xref: /linux/arch/x86/mm/mem_encrypt_boot.S (revision 160b8e75932fd51a49607d32dbfa1d417977b79c)
1/*
2 * AMD Memory Encryption Support
3 *
4 * Copyright (C) 2016 Advanced Micro Devices, Inc.
5 *
6 * Author: Tom Lendacky <thomas.lendacky@amd.com>
7 *
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License version 2 as
10 * published by the Free Software Foundation.
11 */
12
13#include <linux/linkage.h>
14#include <asm/pgtable.h>
15#include <asm/page.h>
16#include <asm/processor-flags.h>
17#include <asm/msr-index.h>
18
19	.text
20	.code64
21ENTRY(sme_encrypt_execute)
22
23	/*
24	 * Entry parameters:
25	 *   RDI - virtual address for the encrypted mapping
26	 *   RSI - virtual address for the decrypted mapping
27	 *   RDX - length to encrypt
28	 *   RCX - virtual address of the encryption workarea, including:
29	 *     - stack page (PAGE_SIZE)
30	 *     - encryption routine page (PAGE_SIZE)
31	 *     - intermediate copy buffer (PMD_PAGE_SIZE)
32	 *    R8 - physcial address of the pagetables to use for encryption
33	 */
34
35	push	%rbp
36	movq	%rsp, %rbp		/* RBP now has original stack pointer */
37
38	/* Set up a one page stack in the non-encrypted memory area */
39	movq	%rcx, %rax		/* Workarea stack page */
40	leaq	PAGE_SIZE(%rax), %rsp	/* Set new stack pointer */
41	addq	$PAGE_SIZE, %rax	/* Workarea encryption routine */
42
43	push	%r12
44	movq	%rdi, %r10		/* Encrypted area */
45	movq	%rsi, %r11		/* Decrypted area */
46	movq	%rdx, %r12		/* Area length */
47
48	/* Copy encryption routine into the workarea */
49	movq	%rax, %rdi				/* Workarea encryption routine */
50	leaq	__enc_copy(%rip), %rsi			/* Encryption routine */
51	movq	$(.L__enc_copy_end - __enc_copy), %rcx	/* Encryption routine length */
52	rep	movsb
53
54	/* Setup registers for call */
55	movq	%r10, %rdi		/* Encrypted area */
56	movq	%r11, %rsi		/* Decrypted area */
57	movq	%r8, %rdx		/* Pagetables used for encryption */
58	movq	%r12, %rcx		/* Area length */
59	movq	%rax, %r8		/* Workarea encryption routine */
60	addq	$PAGE_SIZE, %r8		/* Workarea intermediate copy buffer */
61
62	call	*%rax			/* Call the encryption routine */
63
64	pop	%r12
65
66	movq	%rbp, %rsp		/* Restore original stack pointer */
67	pop	%rbp
68
69	ret
70ENDPROC(sme_encrypt_execute)
71
72ENTRY(__enc_copy)
73/*
74 * Routine used to encrypt memory in place.
75 *   This routine must be run outside of the kernel proper since
76 *   the kernel will be encrypted during the process. So this
77 *   routine is defined here and then copied to an area outside
78 *   of the kernel where it will remain and run decrypted
79 *   during execution.
80 *
81 *   On entry the registers must be:
82 *     RDI - virtual address for the encrypted mapping
83 *     RSI - virtual address for the decrypted mapping
84 *     RDX - address of the pagetables to use for encryption
85 *     RCX - length of area
86 *      R8 - intermediate copy buffer
87 *
88 *     RAX - points to this routine
89 *
90 * The area will be encrypted by copying from the non-encrypted
91 * memory space to an intermediate buffer and then copying from the
92 * intermediate buffer back to the encrypted memory space. The physical
93 * addresses of the two mappings are the same which results in the area
94 * being encrypted "in place".
95 */
96	/* Enable the new page tables */
97	mov	%rdx, %cr3
98
99	/* Flush any global TLBs */
100	mov	%cr4, %rdx
101	andq	$~X86_CR4_PGE, %rdx
102	mov	%rdx, %cr4
103	orq	$X86_CR4_PGE, %rdx
104	mov	%rdx, %cr4
105
106	push	%r15
107	push	%r12
108
109	movq	%rcx, %r9		/* Save area length */
110	movq	%rdi, %r10		/* Save encrypted area address */
111	movq	%rsi, %r11		/* Save decrypted area address */
112
113	/* Set the PAT register PA5 entry to write-protect */
114	movl	$MSR_IA32_CR_PAT, %ecx
115	rdmsr
116	mov	%rdx, %r15		/* Save original PAT value */
117	andl	$0xffff00ff, %edx	/* Clear PA5 */
118	orl	$0x00000500, %edx	/* Set PA5 to WP */
119	wrmsr
120
121	wbinvd				/* Invalidate any cache entries */
122
123	/* Copy/encrypt up to 2MB at a time */
124	movq	$PMD_PAGE_SIZE, %r12
1251:
126	cmpq	%r12, %r9
127	jnb	2f
128	movq	%r9, %r12
129
1302:
131	movq	%r11, %rsi		/* Source - decrypted area */
132	movq	%r8, %rdi		/* Dest   - intermediate copy buffer */
133	movq	%r12, %rcx
134	rep	movsb
135
136	movq	%r8, %rsi		/* Source - intermediate copy buffer */
137	movq	%r10, %rdi		/* Dest   - encrypted area */
138	movq	%r12, %rcx
139	rep	movsb
140
141	addq	%r12, %r11
142	addq	%r12, %r10
143	subq	%r12, %r9		/* Kernel length decrement */
144	jnz	1b			/* Kernel length not zero? */
145
146	/* Restore PAT register */
147	movl	$MSR_IA32_CR_PAT, %ecx
148	rdmsr
149	mov	%r15, %rdx		/* Restore original PAT value */
150	wrmsr
151
152	pop	%r12
153	pop	%r15
154
155	ret
156.L__enc_copy_end:
157ENDPROC(__enc_copy)
158