xref: /linux/arch/arm64/include/asm/mem_encrypt.h (revision 876f5ebd58a9ac42f48a7ead3d5b274a314e0ace)
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 #ifndef __ASM_MEM_ENCRYPT_H
3 #define __ASM_MEM_ENCRYPT_H
4 
5 #include <asm/rsi.h>
6 
7 struct device;
8 
9 struct arm64_mem_crypt_ops {
10 	int (*encrypt)(unsigned long addr, int numpages);
11 	int (*decrypt)(unsigned long addr, int numpages);
12 };
13 
14 int arm64_mem_crypt_ops_register(const struct arm64_mem_crypt_ops *ops);
15 
16 int set_memory_encrypted(unsigned long addr, int numpages);
17 int set_memory_decrypted(unsigned long addr, int numpages);
18 
19 int realm_register_memory_enc_ops(void);
20 
21 static inline bool force_dma_unencrypted(struct device *dev)
22 {
23 	return is_realm_world();
24 }
25 
26 /*
27  * For Arm CCA guests, canonical addresses are "encrypted", so no changes
28  * required for dma_addr_encrypted().
29  * The unencrypted DMA buffers must be accessed via the unprotected IPA,
30  * "top IPA bit" set.
31  */
32 #define dma_addr_unencrypted(x)		((x) | PROT_NS_SHARED)
33 
34 /* Clear the "top" IPA bit while converting back */
35 #define dma_addr_canonical(x)		((x) & ~PROT_NS_SHARED)
36 
37 #endif	/* __ASM_MEM_ENCRYPT_H */
38