xref: /linux/drivers/crypto/ccp/ccp-crypto.h (revision cdd38c5f1ce4398ec58fec95904b75824daab7b5)
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * AMD Cryptographic Coprocessor (CCP) crypto API support
4  *
5  * Copyright (C) 2013,2017 Advanced Micro Devices, Inc.
6  *
7  * Author: Tom Lendacky <thomas.lendacky@amd.com>
8  */
9 
10 #ifndef __CCP_CRYPTO_H__
11 #define __CCP_CRYPTO_H__
12 
13 #include <linux/list.h>
14 #include <linux/wait.h>
15 #include <linux/ccp.h>
16 #include <crypto/algapi.h>
17 #include <crypto/aes.h>
18 #include <crypto/internal/aead.h>
19 #include <crypto/aead.h>
20 #include <crypto/ctr.h>
21 #include <crypto/hash.h>
22 #include <crypto/sha1.h>
23 #include <crypto/sha2.h>
24 #include <crypto/akcipher.h>
25 #include <crypto/skcipher.h>
26 #include <crypto/internal/rsa.h>
27 
28 /* We want the module name in front of our messages */
29 #undef pr_fmt
30 #define	pr_fmt(fmt)	KBUILD_MODNAME ": " fmt
31 
32 #define	CCP_LOG_LEVEL	KERN_INFO
33 
34 #define CCP_CRA_PRIORITY	300
35 
36 struct ccp_crypto_skcipher_alg {
37 	struct list_head entry;
38 
39 	u32 mode;
40 
41 	struct skcipher_alg alg;
42 };
43 
44 struct ccp_crypto_aead {
45 	struct list_head entry;
46 
47 	u32 mode;
48 
49 	struct aead_alg alg;
50 };
51 
52 struct ccp_crypto_ahash_alg {
53 	struct list_head entry;
54 
55 	const __be32 *init;
56 	u32 type;
57 	u32 mode;
58 
59 	/* Child algorithm used for HMAC, CMAC, etc */
60 	char child_alg[CRYPTO_MAX_ALG_NAME];
61 
62 	struct ahash_alg alg;
63 };
64 
65 struct ccp_crypto_akcipher_alg {
66 	struct list_head entry;
67 
68 	struct akcipher_alg alg;
69 };
70 
71 static inline struct ccp_crypto_skcipher_alg *
ccp_crypto_skcipher_alg(struct crypto_skcipher * tfm)72 	ccp_crypto_skcipher_alg(struct crypto_skcipher *tfm)
73 {
74 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
75 
76 	return container_of(alg, struct ccp_crypto_skcipher_alg, alg);
77 }
78 
79 static inline struct ccp_crypto_ahash_alg *
ccp_crypto_ahash_alg(struct crypto_tfm * tfm)80 	ccp_crypto_ahash_alg(struct crypto_tfm *tfm)
81 {
82 	struct crypto_alg *alg = tfm->__crt_alg;
83 	struct ahash_alg *ahash_alg;
84 
85 	ahash_alg = container_of(alg, struct ahash_alg, halg.base);
86 
87 	return container_of(ahash_alg, struct ccp_crypto_ahash_alg, alg);
88 }
89 
90 /***** AES related defines *****/
91 struct ccp_aes_ctx {
92 	/* Fallback cipher for XTS with unsupported unit sizes */
93 	struct crypto_skcipher *tfm_skcipher;
94 
95 	enum ccp_engine engine;
96 	enum ccp_aes_type type;
97 	enum ccp_aes_mode mode;
98 
99 	struct scatterlist key_sg;
100 	unsigned int key_len;
101 	u8 key[AES_MAX_KEY_SIZE * 2];
102 
103 	u8 nonce[CTR_RFC3686_NONCE_SIZE];
104 
105 	/* CMAC key structures */
106 	struct scatterlist k1_sg;
107 	struct scatterlist k2_sg;
108 	unsigned int kn_len;
109 	u8 k1[AES_BLOCK_SIZE];
110 	u8 k2[AES_BLOCK_SIZE];
111 };
112 
113 struct ccp_aes_req_ctx {
114 	struct scatterlist iv_sg;
115 	u8 iv[AES_BLOCK_SIZE];
116 
117 	struct scatterlist tag_sg;
118 	u8 tag[AES_BLOCK_SIZE];
119 
120 	/* Fields used for RFC3686 requests */
121 	u8 *rfc3686_info;
122 	u8 rfc3686_iv[AES_BLOCK_SIZE];
123 
124 	struct ccp_cmd cmd;
125 
126 	struct skcipher_request fallback_req;	// keep at the end
127 };
128 
129 struct ccp_aes_cmac_req_ctx {
130 	unsigned int null_msg;
131 	unsigned int final;
132 
133 	struct scatterlist *src;
134 	unsigned int nbytes;
135 
136 	u64 hash_cnt;
137 	unsigned int hash_rem;
138 
139 	struct sg_table data_sg;
140 
141 	struct scatterlist iv_sg;
142 	u8 iv[AES_BLOCK_SIZE];
143 
144 	struct scatterlist buf_sg;
145 	unsigned int buf_count;
146 	u8 buf[AES_BLOCK_SIZE];
147 
148 	struct scatterlist pad_sg;
149 	unsigned int pad_count;
150 	u8 pad[AES_BLOCK_SIZE];
151 
152 	struct ccp_cmd cmd;
153 };
154 
155 struct ccp_aes_cmac_exp_ctx {
156 	unsigned int null_msg;
157 
158 	u8 iv[AES_BLOCK_SIZE];
159 
160 	unsigned int buf_count;
161 	u8 buf[AES_BLOCK_SIZE];
162 };
163 
164 /***** 3DES related defines *****/
165 struct ccp_des3_ctx {
166 	enum ccp_engine engine;
167 	enum ccp_des3_type type;
168 	enum ccp_des3_mode mode;
169 
170 	struct scatterlist key_sg;
171 	unsigned int key_len;
172 	u8 key[AES_MAX_KEY_SIZE];
173 };
174 
175 struct ccp_des3_req_ctx {
176 	struct scatterlist iv_sg;
177 	u8 iv[AES_BLOCK_SIZE];
178 
179 	struct ccp_cmd cmd;
180 };
181 
182 /* SHA-related defines
183  * These values must be large enough to accommodate any variant
184  */
185 #define MAX_SHA_CONTEXT_SIZE	SHA512_DIGEST_SIZE
186 #define MAX_SHA_BLOCK_SIZE	SHA512_BLOCK_SIZE
187 
188 struct ccp_sha_ctx {
189 	struct scatterlist opad_sg;
190 	unsigned int opad_count;
191 
192 	unsigned int key_len;
193 	u8 key[MAX_SHA_BLOCK_SIZE];
194 	u8 ipad[MAX_SHA_BLOCK_SIZE];
195 	u8 opad[MAX_SHA_BLOCK_SIZE];
196 	struct crypto_shash *hmac_tfm;
197 };
198 
199 struct ccp_sha_req_ctx {
200 	enum ccp_sha_type type;
201 
202 	u64 msg_bits;
203 
204 	unsigned int first;
205 	unsigned int final;
206 
207 	struct scatterlist *src;
208 	unsigned int nbytes;
209 
210 	u64 hash_cnt;
211 	unsigned int hash_rem;
212 
213 	struct sg_table data_sg;
214 
215 	struct scatterlist ctx_sg;
216 	u8 ctx[MAX_SHA_CONTEXT_SIZE];
217 
218 	struct scatterlist buf_sg;
219 	unsigned int buf_count;
220 	u8 buf[MAX_SHA_BLOCK_SIZE];
221 
222 	/* CCP driver command */
223 	struct ccp_cmd cmd;
224 };
225 
226 struct ccp_sha_exp_ctx {
227 	enum ccp_sha_type type;
228 
229 	u64 msg_bits;
230 
231 	unsigned int first;
232 
233 	u8 ctx[MAX_SHA_CONTEXT_SIZE];
234 
235 	unsigned int buf_count;
236 	u8 buf[MAX_SHA_BLOCK_SIZE];
237 };
238 
239 /***** RSA related defines *****/
240 
241 struct ccp_rsa_ctx {
242 	unsigned int key_len; /* in bits */
243 	struct scatterlist e_sg;
244 	u8 *e_buf;
245 	unsigned int e_len;
246 	struct scatterlist n_sg;
247 	u8 *n_buf;
248 	unsigned int n_len;
249 	struct scatterlist d_sg;
250 	u8 *d_buf;
251 	unsigned int d_len;
252 };
253 
254 struct ccp_rsa_req_ctx {
255 	struct ccp_cmd cmd;
256 };
257 
258 #define	CCP_RSA_MAXMOD	(4 * 1024 / 8)
259 #define	CCP5_RSA_MAXMOD	(16 * 1024 / 8)
260 
261 /***** Common Context Structure *****/
262 struct ccp_ctx {
263 	int (*complete)(struct crypto_async_request *req, int ret);
264 
265 	union {
266 		struct ccp_aes_ctx aes;
267 		struct ccp_rsa_ctx rsa;
268 		struct ccp_sha_ctx sha;
269 		struct ccp_des3_ctx des3;
270 	} u;
271 };
272 
273 int ccp_crypto_enqueue_request(struct crypto_async_request *req,
274 			       struct ccp_cmd *cmd);
275 struct scatterlist *ccp_crypto_sg_table_add(struct sg_table *table,
276 					    struct scatterlist *sg_add);
277 
278 int ccp_register_aes_algs(struct list_head *head);
279 int ccp_register_aes_cmac_algs(struct list_head *head);
280 int ccp_register_aes_xts_algs(struct list_head *head);
281 int ccp_register_aes_aeads(struct list_head *head);
282 int ccp_register_sha_algs(struct list_head *head);
283 int ccp_register_des3_algs(struct list_head *head);
284 int ccp_register_rsa_algs(struct list_head *head);
285 
286 #endif
287