1/* SPDX-License-Identifier: GPL-2.0-only */ 2.text 3#include <linux/linkage.h> 4#include <linux/objtool.h> 5#include <asm/segment.h> 6#include <asm/pgtable_types.h> 7#include <asm/page_types.h> 8#include <asm/msr.h> 9#include <asm/asm-offsets.h> 10#include <asm/frame.h> 11#include <asm/nospec-branch.h> 12 13# Copyright 2003 Pavel Machek <pavel@suse.cz 14 15.code64 16 /* 17 * Hooray, we are in Long 64-bit mode (but still running in low memory) 18 */ 19SYM_FUNC_START(wakeup_long64) 20 movq saved_magic(%rip), %rax 21 movq $0x123456789abcdef0, %rdx 22 cmpq %rdx, %rax 23 je 2f 24 25 /* stop here on a saved_magic mismatch */ 26 movq $0xbad6d61676963, %rcx 271: 28 jmp 1b 292: 30 movw $__KERNEL_DS, %ax 31 movw %ax, %ss 32 movw %ax, %ds 33 movw %ax, %es 34 movw %ax, %fs 35 movw %ax, %gs 36 movq saved_rsp(%rip), %rsp 37 38 movq saved_rbx(%rip), %rbx 39 movq saved_rdi(%rip), %rdi 40 movq saved_rsi(%rip), %rsi 41 movq saved_rbp(%rip), %rbp 42 43 movq saved_rip(%rip), %rax 44 ANNOTATE_RETPOLINE_SAFE 45 jmp *%rax 46SYM_FUNC_END(wakeup_long64) 47 48SYM_FUNC_START(do_suspend_lowlevel) 49 FRAME_BEGIN 50 subq $8, %rsp 51 xorl %eax, %eax 52 call save_processor_state 53 54 movq $saved_context, %rax 55 movq %rsp, pt_regs_sp(%rax) 56 movq %rbp, pt_regs_bp(%rax) 57 movq %rsi, pt_regs_si(%rax) 58 movq %rdi, pt_regs_di(%rax) 59 movq %rbx, pt_regs_bx(%rax) 60 movq %rcx, pt_regs_cx(%rax) 61 movq %rdx, pt_regs_dx(%rax) 62 movq %r8, pt_regs_r8(%rax) 63 movq %r9, pt_regs_r9(%rax) 64 movq %r10, pt_regs_r10(%rax) 65 movq %r11, pt_regs_r11(%rax) 66 movq %r12, pt_regs_r12(%rax) 67 movq %r13, pt_regs_r13(%rax) 68 movq %r14, pt_regs_r14(%rax) 69 movq %r15, pt_regs_r15(%rax) 70 pushfq 71 popq pt_regs_flags(%rax) 72 73 movq $.Lresume_point, saved_rip(%rip) 74 75 movq %rsp, saved_rsp(%rip) 76 movq %rbp, saved_rbp(%rip) 77 movq %rbx, saved_rbx(%rip) 78 movq %rdi, saved_rdi(%rip) 79 movq %rsi, saved_rsi(%rip) 80 81 addq $8, %rsp 82 movl $3, %edi 83 xorl %eax, %eax 84 call x86_acpi_enter_sleep_state 85 /* in case something went wrong, restore the machine status and go on */ 86 jmp .Lresume_point 87 88 .align 4 89.Lresume_point: 90 ANNOTATE_NOENDBR 91 /* We don't restore %rax, it must be 0 anyway */ 92 movq $saved_context, %rax 93 movq saved_context_cr4(%rax), %rbx 94 movq %rbx, %cr4 95 movq saved_context_cr3(%rax), %rbx 96 movq %rbx, %cr3 97 movq saved_context_cr2(%rax), %rbx 98 movq %rbx, %cr2 99 movq saved_context_cr0(%rax), %rbx 100 movq %rbx, %cr0 101 pushq pt_regs_flags(%rax) 102 popfq 103 movq pt_regs_sp(%rax), %rsp 104 movq pt_regs_bp(%rax), %rbp 105 movq pt_regs_si(%rax), %rsi 106 movq pt_regs_di(%rax), %rdi 107 movq pt_regs_bx(%rax), %rbx 108 movq pt_regs_cx(%rax), %rcx 109 movq pt_regs_dx(%rax), %rdx 110 movq pt_regs_r8(%rax), %r8 111 movq pt_regs_r9(%rax), %r9 112 movq pt_regs_r10(%rax), %r10 113 movq pt_regs_r11(%rax), %r11 114 movq pt_regs_r12(%rax), %r12 115 movq pt_regs_r13(%rax), %r13 116 movq pt_regs_r14(%rax), %r14 117 movq pt_regs_r15(%rax), %r15 118 119#if defined(CONFIG_KASAN) && defined(CONFIG_KASAN_STACK) 120 /* 121 * The suspend path may have poisoned some areas deeper in the stack, 122 * which we now need to unpoison. 123 */ 124 movq %rsp, %rdi 125 call kasan_unpoison_task_stack_below 126#endif 127 128 xorl %eax, %eax 129 addq $8, %rsp 130 FRAME_END 131 jmp restore_processor_state 132SYM_FUNC_END(do_suspend_lowlevel) 133STACK_FRAME_NON_STANDARD do_suspend_lowlevel 134 135.data 136saved_rbp: .quad 0 137saved_rsi: .quad 0 138saved_rdi: .quad 0 139saved_rbx: .quad 0 140 141saved_rip: .quad 0 142saved_rsp: .quad 0 143 144SYM_DATA(saved_magic, .quad 0) 145