1/* SPDX-License-Identifier: GPL-2.0-only */ 2/* 3 * Copyright (C) 2018 Linaro Ltd <ard.biesheuvel@linaro.org> 4 */ 5 6#include <linux/linkage.h> 7#include <asm/assembler.h> 8 9SYM_FUNC_START(__efi_rt_asm_wrapper) 10 stp x29, x30, [sp, #-112]! 11 mov x29, sp 12 13 /* 14 * Register x18 is designated as the 'platform' register by the AAPCS, 15 * which means firmware running at the same exception level as the OS 16 * (such as UEFI) should never touch it. 17 */ 18 stp x1, x18, [sp, #16] 19 20 /* 21 * Preserve all callee saved registers and preserve the stack pointer 22 * value at the base of the EFI runtime stack so we can recover from 23 * synchronous exceptions occurring while executing the firmware 24 * routines. 25 */ 26 stp x19, x20, [sp, #32] 27 stp x21, x22, [sp, #48] 28 stp x23, x24, [sp, #64] 29 stp x25, x26, [sp, #80] 30 stp x27, x28, [sp, #96] 31 32 ldr_l x16, efi_rt_stack_top 33 mov sp, x16 34 stp x18, x29, [sp, #-16]! 35 36 /* 37 * We are lucky enough that no EFI runtime services take more than 38 * 5 arguments, so all are passed in registers rather than via the 39 * stack. 40 */ 41 mov x8, x0 42 mov x0, x2 43 mov x1, x3 44 mov x2, x4 45 mov x3, x5 46 mov x4, x6 47 blr x8 48 49 mov x16, sp 50 mov sp, x29 51 str xzr, [x16, #8] // clear recorded task SP value 52 53 ldp x1, x2, [sp, #16] 54 cmp x2, x18 55 ldp x29, x30, [sp], #112 56 b.ne 0f 57 ret 580: 59 /* 60 * With CONFIG_SHADOW_CALL_STACK, the kernel uses x18 to store a 61 * shadow stack pointer, which we need to restore before returning to 62 * potentially instrumented code. This is safe because the wrapper is 63 * called with preemption disabled and a separate shadow stack is used 64 * for interrupts. 65 */ 66#ifdef CONFIG_SHADOW_CALL_STACK 67 ldr_l x18, efi_rt_stack_top 68 ldr x18, [x18, #-16] 69#endif 70 71 b efi_handle_corrupted_x18 // tail call 72SYM_FUNC_END(__efi_rt_asm_wrapper) 73 74SYM_CODE_START(__efi_rt_asm_recover) 75 mov sp, x30 76 77 ldr_l x16, efi_rt_stack_top // clear recorded task SP value 78 str xzr, [x16, #-8] 79 80 ldp x19, x20, [sp, #32] 81 ldp x21, x22, [sp, #48] 82 ldp x23, x24, [sp, #64] 83 ldp x25, x26, [sp, #80] 84 ldp x27, x28, [sp, #96] 85 ldp x29, x30, [sp], #112 86 ret 87SYM_CODE_END(__efi_rt_asm_recover) 88