1f35a9205SMarc Zyngier /* 2f35a9205SMarc Zyngier * Copyright (C) 2012 ARM Ltd. 3f35a9205SMarc Zyngier * Author: Marc Zyngier <marc.zyngier@arm.com> 4f35a9205SMarc Zyngier * 5f35a9205SMarc Zyngier * This program is free software: you can redistribute it and/or modify 6f35a9205SMarc Zyngier * it under the terms of the GNU General Public License version 2 as 7f35a9205SMarc Zyngier * published by the Free Software Foundation. 8f35a9205SMarc Zyngier * 9f35a9205SMarc Zyngier * This program is distributed in the hope that it will be useful, 10f35a9205SMarc Zyngier * but WITHOUT ANY WARRANTY; without even the implied warranty of 11f35a9205SMarc Zyngier * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 12f35a9205SMarc Zyngier * GNU General Public License for more details. 13f35a9205SMarc Zyngier * 14f35a9205SMarc Zyngier * You should have received a copy of the GNU General Public License 15f35a9205SMarc Zyngier * along with this program. If not, see <http://www.gnu.org/licenses/>. 16f35a9205SMarc Zyngier */ 17f35a9205SMarc Zyngier 18f35a9205SMarc Zyngier #ifndef __ASM__VIRT_H 19f35a9205SMarc Zyngier #define __ASM__VIRT_H 20f35a9205SMarc Zyngier 21ad72e59fSGeoff Levand /* 22ad72e59fSGeoff Levand * The arm64 hcall implementation uses x0 to specify the hcall type. A value 23ad72e59fSGeoff Levand * less than 0xfff indicates a special hcall, such as get/set vector. 24ad72e59fSGeoff Levand * Any other value is used as a pointer to the function to call. 25ad72e59fSGeoff Levand */ 26ad72e59fSGeoff Levand 27ad72e59fSGeoff Levand /* HVC_GET_VECTORS - Return the value of the vbar_el2 register. */ 28ad72e59fSGeoff Levand #define HVC_GET_VECTORS 0 29ad72e59fSGeoff Levand 30ad72e59fSGeoff Levand /* 31ad72e59fSGeoff Levand * HVC_SET_VECTORS - Set the value of the vbar_el2 register. 32ad72e59fSGeoff Levand * 33ad72e59fSGeoff Levand * @x1: Physical address of the new vector table. 34ad72e59fSGeoff Levand */ 35ad72e59fSGeoff Levand #define HVC_SET_VECTORS 1 36ad72e59fSGeoff Levand 37f9076ecfSGeoff Levand /* 38f9076ecfSGeoff Levand * HVC_SOFT_RESTART - CPU soft reset, used by the cpu_soft_restart routine. 39f9076ecfSGeoff Levand */ 40f9076ecfSGeoff Levand #define HVC_SOFT_RESTART 2 41f9076ecfSGeoff Levand 42*4993fdcfSMarc Zyngier /* Error returned when an invalid stub number is passed into x0 */ 43*4993fdcfSMarc Zyngier #define HVC_STUB_ERR 0xbadca11 44*4993fdcfSMarc Zyngier 45828e9834SMatthew Leach #define BOOT_CPU_MODE_EL1 (0xe11) 46828e9834SMatthew Leach #define BOOT_CPU_MODE_EL2 (0xe12) 47f35a9205SMarc Zyngier 48f35a9205SMarc Zyngier #ifndef __ASSEMBLY__ 49f35a9205SMarc Zyngier 5082deae0fSMarc Zyngier #include <asm/ptrace.h> 51ee78fdc7SJames Morse #include <asm/sections.h> 521f3d8699SMark Rutland #include <asm/sysreg.h> 53488f94d7SJintack Lim #include <asm/cpufeature.h> 5482deae0fSMarc Zyngier 55f35a9205SMarc Zyngier /* 56f35a9205SMarc Zyngier * __boot_cpu_mode records what mode CPUs were booted in. 57f35a9205SMarc Zyngier * A correctly-implemented bootloader must start all CPUs in the same mode: 58f35a9205SMarc Zyngier * In this case, both 32bit halves of __boot_cpu_mode will contain the 59f35a9205SMarc Zyngier * same value (either 0 if booted in EL1, BOOT_CPU_MODE_EL2 if booted in EL2). 60f35a9205SMarc Zyngier * 61f35a9205SMarc Zyngier * Should the bootloader fail to do this, the two values will be different. 62f35a9205SMarc Zyngier * This allows the kernel to flag an error when the secondaries have come up. 63f35a9205SMarc Zyngier */ 64f35a9205SMarc Zyngier extern u32 __boot_cpu_mode[2]; 65f35a9205SMarc Zyngier 66712c6ff4SMarc Zyngier void __hyp_set_vectors(phys_addr_t phys_vector_base); 67712c6ff4SMarc Zyngier phys_addr_t __hyp_get_vectors(void); 68712c6ff4SMarc Zyngier 69f35a9205SMarc Zyngier /* Reports the availability of HYP mode */ 70f35a9205SMarc Zyngier static inline bool is_hyp_mode_available(void) 71f35a9205SMarc Zyngier { 72f35a9205SMarc Zyngier return (__boot_cpu_mode[0] == BOOT_CPU_MODE_EL2 && 73f35a9205SMarc Zyngier __boot_cpu_mode[1] == BOOT_CPU_MODE_EL2); 74f35a9205SMarc Zyngier } 75f35a9205SMarc Zyngier 76f35a9205SMarc Zyngier /* Check if the bootloader has booted CPUs in different modes */ 77f35a9205SMarc Zyngier static inline bool is_hyp_mode_mismatched(void) 78f35a9205SMarc Zyngier { 79f35a9205SMarc Zyngier return __boot_cpu_mode[0] != __boot_cpu_mode[1]; 80f35a9205SMarc Zyngier } 81f35a9205SMarc Zyngier 8282deae0fSMarc Zyngier static inline bool is_kernel_in_hyp_mode(void) 8382deae0fSMarc Zyngier { 841f3d8699SMark Rutland return read_sysreg(CurrentEL) == CurrentEL_EL2; 8582deae0fSMarc Zyngier } 8682deae0fSMarc Zyngier 87488f94d7SJintack Lim static inline bool has_vhe(void) 88488f94d7SJintack Lim { 89488f94d7SJintack Lim if (cpus_have_const_cap(ARM64_HAS_VIRT_HOST_EXTN)) 90488f94d7SJintack Lim return true; 91488f94d7SJintack Lim 92488f94d7SJintack Lim return false; 93488f94d7SJintack Lim } 94488f94d7SJintack Lim 95ac1ad20fSSuzuki K Poulose #ifdef CONFIG_ARM64_VHE 96ac1ad20fSSuzuki K Poulose extern void verify_cpu_run_el(void); 97ac1ad20fSSuzuki K Poulose #else 98ac1ad20fSSuzuki K Poulose static inline void verify_cpu_run_el(void) {} 99ac1ad20fSSuzuki K Poulose #endif 100ac1ad20fSSuzuki K Poulose 101f35a9205SMarc Zyngier #endif /* __ASSEMBLY__ */ 102f35a9205SMarc Zyngier 103f35a9205SMarc Zyngier #endif /* ! __ASM__VIRT_H */ 104