1/*- 2 * Copyright (c) 2013, Anish Gupta (akgupt3@gmail.com) 3 * All rights reserved. 4 * 5 * Redistribution and use in source and binary forms, with or without 6 * modification, are permitted provided that the following conditions 7 * are met: 8 * 1. Redistributions of source code must retain the above copyright 9 * notice unmodified, this list of conditions, and the following 10 * disclaimer. 11 * 2. Redistributions in binary form must reproduce the above copyright 12 * notice, this list of conditions and the following disclaimer in the 13 * documentation and/or other materials provided with the distribution. 14 * 15 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 16 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 17 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 18 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, 19 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT 20 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 21 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 22 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF 24 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 25 * 26 * $FreeBSD$ 27 */ 28#include <machine/asmacros.h> 29 30#include "svm_assym.h" 31 32/* 33 * Be friendly to DTrace FBT's prologue/epilogue pattern matching. 34 * 35 * They are also responsible for saving/restoring the host %rbp across VMRUN. 36 */ 37#define VENTER push %rbp ; mov %rsp,%rbp 38#define VLEAVE pop %rbp 39 40#define VMLOAD .byte 0x0f, 0x01, 0xda 41#define VMRUN .byte 0x0f, 0x01, 0xd8 42#define VMSAVE .byte 0x0f, 0x01, 0xdb 43 44/* 45 * svm_launch(uint64_t vmcb, struct svm_regctx *gctx, struct pcpu *pcpu) 46 * %rdi: physical address of VMCB 47 * %rsi: pointer to guest context 48 * %rdx: pointer to the pcpu data 49 */ 50ENTRY(svm_launch) 51 VENTER 52 53 /* save pointer to the pcpu data */ 54 push %rdx 55 56 /* 57 * Host register state saved across a VMRUN. 58 * 59 * All "callee saved registers" except: 60 * %rsp: because it is preserved by the processor across VMRUN. 61 * %rbp: because it is saved/restored by the function prologue/epilogue. 62 */ 63 push %rbx 64 push %r12 65 push %r13 66 push %r14 67 push %r15 68 69 /* Save the physical address of the VMCB in %rax */ 70 movq %rdi, %rax 71 72 push %rsi /* push guest context pointer on the stack */ 73 74 /* 75 * Restore guest state. 76 */ 77 movq SCTX_R8(%rsi), %r8 78 movq SCTX_R9(%rsi), %r9 79 movq SCTX_R10(%rsi), %r10 80 movq SCTX_R11(%rsi), %r11 81 movq SCTX_R12(%rsi), %r12 82 movq SCTX_R13(%rsi), %r13 83 movq SCTX_R14(%rsi), %r14 84 movq SCTX_R15(%rsi), %r15 85 movq SCTX_RBP(%rsi), %rbp 86 movq SCTX_RBX(%rsi), %rbx 87 movq SCTX_RCX(%rsi), %rcx 88 movq SCTX_RDX(%rsi), %rdx 89 movq SCTX_RDI(%rsi), %rdi 90 movq SCTX_RSI(%rsi), %rsi /* %rsi must be restored last */ 91 92 VMLOAD 93 VMRUN 94 VMSAVE 95 96 pop %rax /* pop guest context pointer from the stack */ 97 98 /* 99 * Save guest state. 100 */ 101 movq %r8, SCTX_R8(%rax) 102 movq %r9, SCTX_R9(%rax) 103 movq %r10, SCTX_R10(%rax) 104 movq %r11, SCTX_R11(%rax) 105 movq %r12, SCTX_R12(%rax) 106 movq %r13, SCTX_R13(%rax) 107 movq %r14, SCTX_R14(%rax) 108 movq %r15, SCTX_R15(%rax) 109 movq %rbp, SCTX_RBP(%rax) 110 movq %rbx, SCTX_RBX(%rax) 111 movq %rcx, SCTX_RCX(%rax) 112 movq %rdx, SCTX_RDX(%rax) 113 movq %rdi, SCTX_RDI(%rax) 114 movq %rsi, SCTX_RSI(%rax) 115 116 /* 117 * To prevent malicious branch target predictions from 118 * affecting the host, overwrite all entries in the RSB upon 119 * exiting a guest. 120 */ 121 mov $16, %ecx /* 16 iterations, two calls per loop */ 122 mov %rsp, %rax 1230: call 2f /* create an RSB entry. */ 1241: pause 125 call 1b /* capture rogue speculation. */ 1262: call 2f /* create an RSB entry. */ 1271: pause 128 call 1b /* capture rogue speculation. */ 1292: sub $1, %ecx 130 jnz 0b 131 mov %rax, %rsp 132 133 /* Restore host state */ 134 pop %r15 135 pop %r14 136 pop %r13 137 pop %r12 138 pop %rbx 139 140 /* Restore %GS.base to point to the host's pcpu data */ 141 pop %rdx 142 mov %edx, %eax 143 shr $32, %rdx 144 mov $MSR_GSBASE, %rcx 145 wrmsr 146 147 /* 148 * Clobber the remaining registers with guest contents so they 149 * can't be misused. 150 */ 151 xor %rbp, %rbp 152 xor %rdi, %rdi 153 xor %rsi, %rsi 154 xor %r8, %r8 155 xor %r9, %r9 156 xor %r10, %r10 157 xor %r11, %r11 158 159 VLEAVE 160 ret 161END(svm_launch) 162