xref: /linux/arch/x86/power/hibernate_asm_32.S (revision 25489a4f556414445d342951615178368ee45cde)
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * This may not use any stack, nor any variable that is not "NoSave":
4 *
5 * Its rewriting one kernel image with another. What is stack in "old"
6 * image could very well be data page in "new" image, and overwriting
7 * your own stack under you is bad idea.
8 */
9
10#include <linux/linkage.h>
11#include <asm/segment.h>
12#include <asm/page_types.h>
13#include <asm/asm-offsets.h>
14#include <asm/processor-flags.h>
15#include <asm/frame.h>
16
17.text
18
19SYM_FUNC_START(swsusp_arch_suspend)
20	movl %esp, saved_context_esp
21	movl %ebx, saved_context_ebx
22	movl %ebp, saved_context_ebp
23	movl %esi, saved_context_esi
24	movl %edi, saved_context_edi
25	pushfl
26	popl saved_context_eflags
27
28	/* save cr3 */
29	movl	%cr3, %eax
30	movl	%eax, restore_cr3
31
32	FRAME_BEGIN
33	call swsusp_save
34	FRAME_END
35	RET
36SYM_FUNC_END(swsusp_arch_suspend)
37
38SYM_CODE_START(restore_image)
39	/* prepare to jump to the image kernel */
40	movl	restore_jump_address, %ebx
41	movl	restore_cr3, %ebp
42
43	movl	mmu_cr4_features, %ecx
44
45	/* jump to relocated restore code */
46	movl	relocated_restore_code, %eax
47	jmpl	*%eax
48SYM_CODE_END(restore_image)
49
50/* code below has been relocated to a safe page */
51SYM_CODE_START(core_restore_code)
52	movl	temp_pgt, %eax
53	movl	%eax, %cr3
54
55	jecxz	1f	# cr4 Pentium and higher, skip if zero
56	andl	$~(X86_CR4_PGE), %ecx
57	movl	%ecx, %cr4;  # turn off PGE
58	movl	%cr3, %eax;  # flush TLB
59	movl	%eax, %cr3
601:
61	movl	restore_pblist, %edx
62	.p2align 4,,7
63
64copy_loop:
65	testl	%edx, %edx
66	jz	done
67
68	movl	pbe_address(%edx), %esi
69	movl	pbe_orig_address(%edx), %edi
70
71	movl	$(PAGE_SIZE >> 2), %ecx
72	rep movsl
73
74	movl	pbe_next(%edx), %edx
75	jmp	copy_loop
76	.p2align 4,,7
77
78done:
79	jmpl	*%ebx
80SYM_CODE_END(core_restore_code)
81
82	/* code below belongs to the image kernel */
83	.align PAGE_SIZE
84SYM_FUNC_START(restore_registers)
85	/* go back to the original page tables */
86	movl	%ebp, %cr3
87	movl	mmu_cr4_features, %ecx
88	jecxz	1f	# cr4 Pentium and higher, skip if zero
89	movl	%ecx, %cr4;  # turn PGE back on
901:
91
92	movl saved_context_esp, %esp
93	movl saved_context_ebp, %ebp
94	movl saved_context_ebx, %ebx
95	movl saved_context_esi, %esi
96	movl saved_context_edi, %edi
97
98	pushl saved_context_eflags
99	popfl
100
101	/* Saved in save_processor_state. */
102	movl $saved_context, %eax
103	lgdt saved_context_gdt_desc(%eax)
104
105	xorl	%eax, %eax
106
107	/* tell the hibernation core that we've just restored the memory */
108	movl	%eax, in_suspend
109
110	RET
111SYM_FUNC_END(restore_registers)
112