xref: /freebsd/sys/amd64/vmm/intel/vmx_support.S (revision c30578feeb827ff09e753685632415ece27c127d)
1366f6083SPeter Grehan/*-
2ebc3c37cSMarcelo Araujo * SPDX-License-Identifier: BSD-2-Clause-FreeBSD
3ebc3c37cSMarcelo Araujo *
4366f6083SPeter Grehan * Copyright (c) 2011 NetApp, Inc.
50492757cSNeel Natu * Copyright (c) 2013 Neel Natu <neel@freebsd.org>
6366f6083SPeter Grehan * All rights reserved.
7366f6083SPeter Grehan *
8366f6083SPeter Grehan * Redistribution and use in source and binary forms, with or without
9366f6083SPeter Grehan * modification, are permitted provided that the following conditions
10366f6083SPeter Grehan * are met:
11366f6083SPeter Grehan * 1. Redistributions of source code must retain the above copyright
12366f6083SPeter Grehan *    notice, this list of conditions and the following disclaimer.
13366f6083SPeter Grehan * 2. Redistributions in binary form must reproduce the above copyright
14366f6083SPeter Grehan *    notice, this list of conditions and the following disclaimer in the
15366f6083SPeter Grehan *    documentation and/or other materials provided with the distribution.
16366f6083SPeter Grehan *
17366f6083SPeter Grehan * THIS SOFTWARE IS PROVIDED BY NETAPP, INC ``AS IS'' AND
18366f6083SPeter Grehan * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19366f6083SPeter Grehan * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20366f6083SPeter Grehan * ARE DISCLAIMED.  IN NO EVENT SHALL NETAPP, INC OR CONTRIBUTORS BE LIABLE
21366f6083SPeter Grehan * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
22366f6083SPeter Grehan * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
23366f6083SPeter Grehan * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
24366f6083SPeter Grehan * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
25366f6083SPeter Grehan * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
26366f6083SPeter Grehan * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
27366f6083SPeter Grehan * SUCH DAMAGE.
28366f6083SPeter Grehan *
29366f6083SPeter Grehan * $FreeBSD$
30366f6083SPeter Grehan */
31366f6083SPeter Grehan
32366f6083SPeter Grehan#include <machine/asmacros.h>
33*c30578feSKonstantin Belousov#include <machine/specialreg.h>
34366f6083SPeter Grehan
35b82e2e94SWarner Losh#include "vmx_assym.h"
36366f6083SPeter Grehan
37318224bbSNeel Natu#ifdef SMP
38318224bbSNeel Natu#define	LK	lock ;
39318224bbSNeel Natu#else
40318224bbSNeel Natu#define	LK
41318224bbSNeel Natu#endif
42318224bbSNeel Natu
43897bb47eSPeter Grehan/* Be friendly to DTrace FBT's prologue/epilogue pattern matching */
44897bb47eSPeter Grehan#define VENTER  push %rbp ; mov %rsp,%rbp
45897bb47eSPeter Grehan#define VLEAVE  pop %rbp
46897bb47eSPeter Grehan
47366f6083SPeter Grehan/*
4858a6aaf7STycho Nightingale * Save the guest context.
4958a6aaf7STycho Nightingale */
5058a6aaf7STycho Nightingale#define	VMX_GUEST_SAVE							\
5158a6aaf7STycho Nightingale	movq	%rdi,VMXCTX_GUEST_RDI(%rsp);				\
5258a6aaf7STycho Nightingale	movq	%rsi,VMXCTX_GUEST_RSI(%rsp);				\
5358a6aaf7STycho Nightingale	movq	%rdx,VMXCTX_GUEST_RDX(%rsp);				\
5458a6aaf7STycho Nightingale	movq	%rcx,VMXCTX_GUEST_RCX(%rsp);				\
5558a6aaf7STycho Nightingale	movq	%r8,VMXCTX_GUEST_R8(%rsp);				\
5658a6aaf7STycho Nightingale	movq	%r9,VMXCTX_GUEST_R9(%rsp);				\
5758a6aaf7STycho Nightingale	movq	%rax,VMXCTX_GUEST_RAX(%rsp);				\
5858a6aaf7STycho Nightingale	movq	%rbx,VMXCTX_GUEST_RBX(%rsp);				\
5958a6aaf7STycho Nightingale	movq	%rbp,VMXCTX_GUEST_RBP(%rsp);				\
6058a6aaf7STycho Nightingale	movq	%r10,VMXCTX_GUEST_R10(%rsp);				\
6158a6aaf7STycho Nightingale	movq	%r11,VMXCTX_GUEST_R11(%rsp);				\
6258a6aaf7STycho Nightingale	movq	%r12,VMXCTX_GUEST_R12(%rsp);				\
6358a6aaf7STycho Nightingale	movq	%r13,VMXCTX_GUEST_R13(%rsp);				\
6458a6aaf7STycho Nightingale	movq	%r14,VMXCTX_GUEST_R14(%rsp);				\
6558a6aaf7STycho Nightingale	movq	%r15,VMXCTX_GUEST_R15(%rsp);				\
6658a6aaf7STycho Nightingale	movq	%cr2,%rdi;						\
6758a6aaf7STycho Nightingale	movq	%rdi,VMXCTX_GUEST_CR2(%rsp);				\
6858a6aaf7STycho Nightingale	movq	%rsp,%rdi;
6958a6aaf7STycho Nightingale
7058a6aaf7STycho Nightingale/*
71ad54f374SNeel Natu * Assumes that %rdi holds a pointer to the 'vmxctx'.
72ad54f374SNeel Natu *
73ad54f374SNeel Natu * On "return" all registers are updated to reflect guest state. The two
74ad54f374SNeel Natu * exceptions are %rip and %rsp. These registers are atomically switched
75ad54f374SNeel Natu * by hardware from the guest area of the vmcs.
76ad54f374SNeel Natu *
77ad54f374SNeel Natu * We modify %rsp to point to the 'vmxctx' so we can use it to restore
78ad54f374SNeel Natu * host context in case of an error with 'vmlaunch' or 'vmresume'.
79366f6083SPeter Grehan */
80366f6083SPeter Grehan#define	VMX_GUEST_RESTORE						\
81ad54f374SNeel Natu	movq	%rdi,%rsp;						\
82366f6083SPeter Grehan	movq	VMXCTX_GUEST_CR2(%rdi),%rsi;				\
83366f6083SPeter Grehan	movq	%rsi,%cr2;						\
84366f6083SPeter Grehan	movq	VMXCTX_GUEST_RSI(%rdi),%rsi;				\
85366f6083SPeter Grehan	movq	VMXCTX_GUEST_RDX(%rdi),%rdx;				\
86366f6083SPeter Grehan	movq	VMXCTX_GUEST_RCX(%rdi),%rcx;				\
87366f6083SPeter Grehan	movq	VMXCTX_GUEST_R8(%rdi),%r8;				\
88366f6083SPeter Grehan	movq	VMXCTX_GUEST_R9(%rdi),%r9;				\
89366f6083SPeter Grehan	movq	VMXCTX_GUEST_RAX(%rdi),%rax;				\
90366f6083SPeter Grehan	movq	VMXCTX_GUEST_RBX(%rdi),%rbx;				\
91366f6083SPeter Grehan	movq	VMXCTX_GUEST_RBP(%rdi),%rbp;				\
92366f6083SPeter Grehan	movq	VMXCTX_GUEST_R10(%rdi),%r10;				\
93366f6083SPeter Grehan	movq	VMXCTX_GUEST_R11(%rdi),%r11;				\
94366f6083SPeter Grehan	movq	VMXCTX_GUEST_R12(%rdi),%r12;				\
95366f6083SPeter Grehan	movq	VMXCTX_GUEST_R13(%rdi),%r13;				\
96366f6083SPeter Grehan	movq	VMXCTX_GUEST_R14(%rdi),%r14;				\
97366f6083SPeter Grehan	movq	VMXCTX_GUEST_R15(%rdi),%r15;				\
98366f6083SPeter Grehan	movq	VMXCTX_GUEST_RDI(%rdi),%rdi; /* restore rdi the last */
99366f6083SPeter Grehan
100318224bbSNeel Natu/*
10191fe5fe7STycho Nightingale * Clobber the remaining registers with guest contents so they can't
10291fe5fe7STycho Nightingale * be misused.
10391fe5fe7STycho Nightingale */
10491fe5fe7STycho Nightingale#define	VMX_GUEST_CLOBBER						\
10591fe5fe7STycho Nightingale	xor	%rax, %rax;						\
10691fe5fe7STycho Nightingale	xor	%rcx, %rcx;						\
10791fe5fe7STycho Nightingale	xor	%rdx, %rdx;						\
10891fe5fe7STycho Nightingale	xor	%rsi, %rsi;						\
10991fe5fe7STycho Nightingale	xor	%r8, %r8;						\
11091fe5fe7STycho Nightingale	xor	%r9, %r9;						\
11191fe5fe7STycho Nightingale	xor	%r10, %r10;						\
11291fe5fe7STycho Nightingale	xor	%r11, %r11;
11391fe5fe7STycho Nightingale
11491fe5fe7STycho Nightingale/*
1150492757cSNeel Natu * Save and restore the host context.
1160492757cSNeel Natu *
1170492757cSNeel Natu * Assumes that %rdi holds a pointer to the 'vmxctx'.
118318224bbSNeel Natu */
11981d597b7SNeel Natu#define	VMX_HOST_SAVE							\
1200492757cSNeel Natu	movq    %r15, VMXCTX_HOST_R15(%rdi);				\
1210492757cSNeel Natu	movq    %r14, VMXCTX_HOST_R14(%rdi);				\
1220492757cSNeel Natu	movq    %r13, VMXCTX_HOST_R13(%rdi);				\
1230492757cSNeel Natu	movq    %r12, VMXCTX_HOST_R12(%rdi);				\
1240492757cSNeel Natu	movq    %rbp, VMXCTX_HOST_RBP(%rdi);				\
1250492757cSNeel Natu	movq    %rsp, VMXCTX_HOST_RSP(%rdi);				\
1260492757cSNeel Natu	movq    %rbx, VMXCTX_HOST_RBX(%rdi);				\
1270492757cSNeel Natu
12881d597b7SNeel Natu#define	VMX_HOST_RESTORE						\
1290492757cSNeel Natu	movq	VMXCTX_HOST_R15(%rdi), %r15;				\
1300492757cSNeel Natu	movq	VMXCTX_HOST_R14(%rdi), %r14;				\
1310492757cSNeel Natu	movq	VMXCTX_HOST_R13(%rdi), %r13;				\
1320492757cSNeel Natu	movq	VMXCTX_HOST_R12(%rdi), %r12;				\
1330492757cSNeel Natu	movq	VMXCTX_HOST_RBP(%rdi), %rbp;				\
1340492757cSNeel Natu	movq	VMXCTX_HOST_RSP(%rdi), %rsp;				\
1350492757cSNeel Natu	movq	VMXCTX_HOST_RBX(%rdi), %rbx;				\
136318224bbSNeel Natu
137318224bbSNeel Natu/*
1380492757cSNeel Natu * vmx_enter_guest(struct vmxctx *vmxctx, int launched)
1390492757cSNeel Natu * %rdi: pointer to the 'vmxctx'
140953c2c47SNeel Natu * %rsi: pointer to the 'vmx'
141953c2c47SNeel Natu * %edx: launch state of the VMCS
1420492757cSNeel Natu * Interrupts must be disabled on entry.
143318224bbSNeel Natu */
1440492757cSNeel NatuENTRY(vmx_enter_guest)
145897bb47eSPeter Grehan	VENTER
1460492757cSNeel Natu	/*
1470492757cSNeel Natu	 * Save host state before doing anything else.
1480492757cSNeel Natu	 */
14981d597b7SNeel Natu	VMX_HOST_SAVE
150318224bbSNeel Natu
1510492757cSNeel Natu	/*
1520492757cSNeel Natu	 * Activate guest pmap on this cpu.
1530492757cSNeel Natu	 */
1540492757cSNeel Natu	movq	VMXCTX_PMAP(%rdi), %r11
1550492757cSNeel Natu	movl	PCPU(CPUID), %eax
1560492757cSNeel Natu	LK btsl	%eax, PM_ACTIVE(%r11)
157318224bbSNeel Natu
158318224bbSNeel Natu	/*
159953c2c47SNeel Natu	 * If 'vmx->eptgen[curcpu]' is not identical to 'pmap->pm_eptgen'
1600492757cSNeel Natu	 * then we must invalidate all mappings associated with this EPTP.
1610492757cSNeel Natu	 */
1620492757cSNeel Natu	movq	PM_EPTGEN(%r11), %r10
163953c2c47SNeel Natu	cmpq	%r10, VMX_EPTGEN(%rsi, %rax, 8)
1640492757cSNeel Natu	je	guest_restore
1650492757cSNeel Natu
166953c2c47SNeel Natu	/* Refresh 'vmx->eptgen[curcpu]' */
167953c2c47SNeel Natu	movq	%r10, VMX_EPTGEN(%rsi, %rax, 8)
1680492757cSNeel Natu
1690492757cSNeel Natu	/* Setup the invept descriptor on the host stack */
1700492757cSNeel Natu	mov	%rsp, %r11
171953c2c47SNeel Natu	movq	VMX_EPTP(%rsi), %rax
1720492757cSNeel Natu	movq	%rax, -16(%r11)
1730492757cSNeel Natu	movq	$0x0, -8(%r11)
1740492757cSNeel Natu	mov	$0x1, %eax		/* Single context invalidate */
1750492757cSNeel Natu	invept	-16(%r11), %rax
1760492757cSNeel Natu	jbe	invept_error		/* Check invept instruction error */
1770492757cSNeel Natu
1780492757cSNeel Natuguest_restore:
1790492757cSNeel Natu
180*c30578feSKonstantin Belousov	/*
181*c30578feSKonstantin Belousov	 * Flush L1D cache if requested.  Use IA32_FLUSH_CMD MSR if available,
182*c30578feSKonstantin Belousov	 * otherwise load enough of the data from the zero_region to flush
183*c30578feSKonstantin Belousov	 * existing L1D content.
184*c30578feSKonstantin Belousov	 */
185*c30578feSKonstantin Belousov#define	L1D_FLUSH_SIZE	(64 * 1024)
186*c30578feSKonstantin Belousov	movl	%edx, %r8d
187*c30578feSKonstantin Belousov	cmpb	$0, guest_l1d_flush(%rip)
188*c30578feSKonstantin Belousov	je	after_l1d
189*c30578feSKonstantin Belousov	movq	vmx_msr_flush_cmd(%rip), %rax
190*c30578feSKonstantin Belousov	testq	%rax, %rax
191*c30578feSKonstantin Belousov	jz	1f
192*c30578feSKonstantin Belousov	movq	%rax, %rdx
193*c30578feSKonstantin Belousov	shrq	$32, %rdx
194*c30578feSKonstantin Belousov	movl	$MSR_IA32_FLUSH_CMD, %ecx
195*c30578feSKonstantin Belousov	wrmsr
196*c30578feSKonstantin Belousov	jmp	after_l1d
197*c30578feSKonstantin Belousov1:	movq	$KERNBASE, %r9
198*c30578feSKonstantin Belousov	movq	$-L1D_FLUSH_SIZE, %rcx
199*c30578feSKonstantin Belousov	/*
200*c30578feSKonstantin Belousov	 * pass 1: Preload TLB.
201*c30578feSKonstantin Belousov	 * Kernel text is mapped using superpages.  TLB preload is
202*c30578feSKonstantin Belousov	 * done for the benefit of older CPUs which split 2M page
203*c30578feSKonstantin Belousov	 * into 4k TLB entries.
204*c30578feSKonstantin Belousov	 */
205*c30578feSKonstantin Belousov2:	movb	L1D_FLUSH_SIZE(%r9, %rcx), %al
206*c30578feSKonstantin Belousov	addq	$PAGE_SIZE, %rcx
207*c30578feSKonstantin Belousov	jne	2b
208*c30578feSKonstantin Belousov	xorl	%eax, %eax
209*c30578feSKonstantin Belousov	cpuid
210*c30578feSKonstantin Belousov	movq	$-L1D_FLUSH_SIZE, %rcx
211*c30578feSKonstantin Belousov	/* pass 2: Read each cache line */
212*c30578feSKonstantin Belousov3:	movb	L1D_FLUSH_SIZE(%r9, %rcx), %al
213*c30578feSKonstantin Belousov	addq	$64, %rcx
214*c30578feSKonstantin Belousov	jne	3b
215*c30578feSKonstantin Belousov	lfence
216*c30578feSKonstantin Belousov#undef	L1D_FLUSH_SIZE
217*c30578feSKonstantin Belousovafter_l1d:
218*c30578feSKonstantin Belousov	cmpl	$0, %r8d
219*c30578feSKonstantin Belousov	je	do_launch
2200492757cSNeel Natu	VMX_GUEST_RESTORE
2210492757cSNeel Natu	vmresume
2220492757cSNeel Natu	/*
2230492757cSNeel Natu	 * In the common case 'vmresume' returns back to the host through
2240492757cSNeel Natu	 * 'vmx_exit_guest' with %rsp pointing to 'vmxctx'.
225318224bbSNeel Natu	 *
2260492757cSNeel Natu	 * If there is an error we return VMX_VMRESUME_ERROR to the caller.
227318224bbSNeel Natu	 */
2280492757cSNeel Natu	movq	%rsp, %rdi		/* point %rdi back to 'vmxctx' */
2290492757cSNeel Natu	movl	$VMX_VMRESUME_ERROR, %eax
2300492757cSNeel Natu	jmp	decode_inst_error
231366f6083SPeter Grehan
2320492757cSNeel Natudo_launch:
2330492757cSNeel Natu	VMX_GUEST_RESTORE
2340492757cSNeel Natu	vmlaunch
235366f6083SPeter Grehan	/*
2360492757cSNeel Natu	 * In the common case 'vmlaunch' returns back to the host through
2370492757cSNeel Natu	 * 'vmx_exit_guest' with %rsp pointing to 'vmxctx'.
238366f6083SPeter Grehan	 *
2390492757cSNeel Natu	 * If there is an error we return VMX_VMLAUNCH_ERROR to the caller.
240366f6083SPeter Grehan	 */
2410492757cSNeel Natu	movq	%rsp, %rdi		/* point %rdi back to 'vmxctx' */
2420492757cSNeel Natu	movl	$VMX_VMLAUNCH_ERROR, %eax
2430492757cSNeel Natu	jmp	decode_inst_error
2440492757cSNeel Natu
2450492757cSNeel Natuinvept_error:
2460492757cSNeel Natu	movl	$VMX_INVEPT_ERROR, %eax
2470492757cSNeel Natu	jmp	decode_inst_error
2480492757cSNeel Natu
2490492757cSNeel Natudecode_inst_error:
2500492757cSNeel Natu	movl	$VM_FAIL_VALID, %r11d
2510492757cSNeel Natu	jz	inst_error
2520492757cSNeel Natu	movl	$VM_FAIL_INVALID, %r11d
2530492757cSNeel Natuinst_error:
2540492757cSNeel Natu	movl	%r11d, VMXCTX_INST_FAIL_STATUS(%rdi)
255366f6083SPeter Grehan
256366f6083SPeter Grehan	/*
2570492757cSNeel Natu	 * The return value is already populated in %eax so we cannot use
2580492757cSNeel Natu	 * it as a scratch register beyond this point.
259366f6083SPeter Grehan	 */
2600492757cSNeel Natu
2610492757cSNeel Natu	/*
2620492757cSNeel Natu	 * Deactivate guest pmap from this cpu.
2630492757cSNeel Natu	 */
2640492757cSNeel Natu	movq	VMXCTX_PMAP(%rdi), %r11
2650492757cSNeel Natu	movl	PCPU(CPUID), %r10d
2660492757cSNeel Natu	LK btrl	%r10d, PM_ACTIVE(%r11)
2670492757cSNeel Natu
26881d597b7SNeel Natu	VMX_HOST_RESTORE
269897bb47eSPeter Grehan	VLEAVE
270366f6083SPeter Grehan	ret
271366f6083SPeter Grehan
272366f6083SPeter Grehan/*
273897bb47eSPeter Grehan * Non-error VM-exit from the guest. Make this a label so it can
274897bb47eSPeter Grehan * be used by C code when setting up the VMCS.
275897bb47eSPeter Grehan * The VMCS-restored %rsp points to the struct vmxctx
276366f6083SPeter Grehan */
277897bb47eSPeter Grehan	ALIGN_TEXT
27858a6aaf7STycho Nightingale	.globl	vmx_exit_guest_flush_rsb
27958a6aaf7STycho Nightingalevmx_exit_guest_flush_rsb:
28058a6aaf7STycho Nightingale	/*
28158a6aaf7STycho Nightingale	 * Save guest state that is not automatically saved in the vmcs.
28258a6aaf7STycho Nightingale	 */
28358a6aaf7STycho Nightingale	VMX_GUEST_SAVE
28458a6aaf7STycho Nightingale
28558a6aaf7STycho Nightingale	/*
28658a6aaf7STycho Nightingale	 * Deactivate guest pmap from this cpu.
28758a6aaf7STycho Nightingale	 */
28858a6aaf7STycho Nightingale	movq	VMXCTX_PMAP(%rdi), %r11
28958a6aaf7STycho Nightingale	movl	PCPU(CPUID), %r10d
29058a6aaf7STycho Nightingale	LK btrl	%r10d, PM_ACTIVE(%r11)
29158a6aaf7STycho Nightingale
29258a6aaf7STycho Nightingale	VMX_HOST_RESTORE
29358a6aaf7STycho Nightingale
29458a6aaf7STycho Nightingale	VMX_GUEST_CLOBBER
29558a6aaf7STycho Nightingale
29658a6aaf7STycho Nightingale	/*
29758a6aaf7STycho Nightingale	 * To prevent malicious branch target predictions from
29858a6aaf7STycho Nightingale	 * affecting the host, overwrite all entries in the RSB upon
29958a6aaf7STycho Nightingale	 * exiting a guest.
30058a6aaf7STycho Nightingale	 */
30158a6aaf7STycho Nightingale	mov	$16, %ecx	/* 16 iterations, two calls per loop */
30258a6aaf7STycho Nightingale	mov	%rsp, %rax
30358a6aaf7STycho Nightingale0:	call	2f		/* create an RSB entry. */
30458a6aaf7STycho Nightingale1:	pause
30558a6aaf7STycho Nightingale	call	1b		/* capture rogue speculation. */
30658a6aaf7STycho Nightingale2:	call	2f		/* create an RSB entry. */
30758a6aaf7STycho Nightingale1:	pause
30858a6aaf7STycho Nightingale	call	1b		/* capture rogue speculation. */
30958a6aaf7STycho Nightingale2:	sub	$1, %ecx
31058a6aaf7STycho Nightingale	jnz	0b
31158a6aaf7STycho Nightingale	mov	%rax, %rsp
31258a6aaf7STycho Nightingale
31358a6aaf7STycho Nightingale	/*
31458a6aaf7STycho Nightingale	 * This will return to the caller of 'vmx_enter_guest()' with a return
31558a6aaf7STycho Nightingale	 * value of VMX_GUEST_VMEXIT.
31658a6aaf7STycho Nightingale	 */
31758a6aaf7STycho Nightingale	movl	$VMX_GUEST_VMEXIT, %eax
31858a6aaf7STycho Nightingale	VLEAVE
31958a6aaf7STycho Nightingale	ret
32058a6aaf7STycho Nightingale
321897bb47eSPeter Grehan	.globl	vmx_exit_guest
322897bb47eSPeter Grehanvmx_exit_guest:
323366f6083SPeter Grehan	/*
324366f6083SPeter Grehan	 * Save guest state that is not automatically saved in the vmcs.
325366f6083SPeter Grehan	 */
32658a6aaf7STycho Nightingale	VMX_GUEST_SAVE
327366f6083SPeter Grehan
328366f6083SPeter Grehan	/*
3290492757cSNeel Natu	 * Deactivate guest pmap from this cpu.
330366f6083SPeter Grehan	 */
3310492757cSNeel Natu	movq	VMXCTX_PMAP(%rdi), %r11
3320492757cSNeel Natu	movl	PCPU(CPUID), %r10d
3330492757cSNeel Natu	LK btrl	%r10d, PM_ACTIVE(%r11)
334eeefa4e4SNeel Natu
33581d597b7SNeel Natu	VMX_HOST_RESTORE
336318224bbSNeel Natu
33791fe5fe7STycho Nightingale	VMX_GUEST_CLOBBER
33891fe5fe7STycho Nightingale
339366f6083SPeter Grehan	/*
3400492757cSNeel Natu	 * This will return to the caller of 'vmx_enter_guest()' with a return
3410492757cSNeel Natu	 * value of VMX_GUEST_VMEXIT.
342366f6083SPeter Grehan	 */
3430492757cSNeel Natu	movl	$VMX_GUEST_VMEXIT, %eax
344897bb47eSPeter Grehan	VLEAVE
3450492757cSNeel Natu	ret
346897bb47eSPeter GrehanEND(vmx_enter_guest)
347f7d47425SNeel Natu
348f7d47425SNeel Natu/*
349f7d47425SNeel Natu * %rdi = interrupt handler entry point
350f7d47425SNeel Natu *
351f7d47425SNeel Natu * Calling sequence described in the "Instruction Set Reference" for the "INT"
352f7d47425SNeel Natu * instruction in Intel SDM, Vol 2.
353f7d47425SNeel Natu */
354f7d47425SNeel NatuENTRY(vmx_call_isr)
355897bb47eSPeter Grehan	VENTER
356f7d47425SNeel Natu	mov	%rsp, %r11			/* save %rsp */
357f7d47425SNeel Natu	and	$~0xf, %rsp			/* align on 16-byte boundary */
358f7d47425SNeel Natu	pushq	$KERNEL_SS			/* %ss */
359f7d47425SNeel Natu	pushq	%r11				/* %rsp */
360f7d47425SNeel Natu	pushfq					/* %rflags */
361f7d47425SNeel Natu	pushq	$KERNEL_CS			/* %cs */
362f7d47425SNeel Natu	cli					/* disable interrupts */
363f7d47425SNeel Natu	callq	*%rdi				/* push %rip and call isr */
364897bb47eSPeter Grehan	VLEAVE
365f7d47425SNeel Natu	ret
366f7d47425SNeel NatuEND(vmx_call_isr)
367