xref: /linux/drivers/crypto/allwinner/sun8i-ce/sun8i-ce-core.c (revision 9dbbc3b9d09d6deba9f3b9e1d5b355032ed46a75)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * sun8i-ce-core.c - hardware cryptographic offloader for
4  * Allwinner H3/A64/H5/H2+/H6/R40 SoC
5  *
6  * Copyright (C) 2015-2019 Corentin Labbe <clabbe.montjoie@gmail.com>
7  *
8  * Core file which registers crypto algorithms supported by the CryptoEngine.
9  *
10  * You could find a link for the datasheet in Documentation/arm/sunxi.rst
11  */
12 #include <linux/clk.h>
13 #include <linux/crypto.h>
14 #include <linux/delay.h>
15 #include <linux/dma-mapping.h>
16 #include <linux/interrupt.h>
17 #include <linux/io.h>
18 #include <linux/irq.h>
19 #include <linux/module.h>
20 #include <linux/of.h>
21 #include <linux/of_device.h>
22 #include <linux/platform_device.h>
23 #include <linux/pm_runtime.h>
24 #include <linux/reset.h>
25 #include <crypto/internal/rng.h>
26 #include <crypto/internal/skcipher.h>
27 
28 #include "sun8i-ce.h"
29 
30 /*
31  * mod clock is lower on H3 than other SoC due to some DMA timeout occurring
32  * with high value.
33  * If you want to tune mod clock, loading driver and passing selftest is
34  * insufficient, you need to test with some LUKS test (mount and write to it)
35  */
36 static const struct ce_variant ce_h3_variant = {
37 	.alg_cipher = { CE_ALG_AES, CE_ALG_DES, CE_ALG_3DES,
38 	},
39 	.alg_hash = { CE_ALG_MD5, CE_ALG_SHA1, CE_ALG_SHA224, CE_ALG_SHA256,
40 		CE_ALG_SHA384, CE_ALG_SHA512
41 	},
42 	.op_mode = { CE_OP_ECB, CE_OP_CBC
43 	},
44 	.ce_clks = {
45 		{ "bus", 0, 200000000 },
46 		{ "mod", 50000000, 0 },
47 		},
48 	.esr = ESR_H3,
49 	.prng = CE_ALG_PRNG,
50 	.trng = CE_ID_NOTSUPP,
51 };
52 
53 static const struct ce_variant ce_h5_variant = {
54 	.alg_cipher = { CE_ALG_AES, CE_ALG_DES, CE_ALG_3DES,
55 	},
56 	.alg_hash = { CE_ALG_MD5, CE_ALG_SHA1, CE_ALG_SHA224, CE_ALG_SHA256,
57 		CE_ID_NOTSUPP, CE_ID_NOTSUPP
58 	},
59 	.op_mode = { CE_OP_ECB, CE_OP_CBC
60 	},
61 	.ce_clks = {
62 		{ "bus", 0, 200000000 },
63 		{ "mod", 300000000, 0 },
64 		},
65 	.esr = ESR_H5,
66 	.prng = CE_ALG_PRNG,
67 	.trng = CE_ID_NOTSUPP,
68 };
69 
70 static const struct ce_variant ce_h6_variant = {
71 	.alg_cipher = { CE_ALG_AES, CE_ALG_DES, CE_ALG_3DES,
72 	},
73 	.alg_hash = { CE_ALG_MD5, CE_ALG_SHA1, CE_ALG_SHA224, CE_ALG_SHA256,
74 		CE_ALG_SHA384, CE_ALG_SHA512
75 	},
76 	.op_mode = { CE_OP_ECB, CE_OP_CBC
77 	},
78 	.cipher_t_dlen_in_bytes = true,
79 	.hash_t_dlen_in_bits = true,
80 	.prng_t_dlen_in_bytes = true,
81 	.trng_t_dlen_in_bytes = true,
82 	.ce_clks = {
83 		{ "bus", 0, 200000000 },
84 		{ "mod", 300000000, 0 },
85 		{ "ram", 0, 400000000 },
86 		},
87 	.esr = ESR_H6,
88 	.prng = CE_ALG_PRNG_V2,
89 	.trng = CE_ALG_TRNG_V2,
90 };
91 
92 static const struct ce_variant ce_a64_variant = {
93 	.alg_cipher = { CE_ALG_AES, CE_ALG_DES, CE_ALG_3DES,
94 	},
95 	.alg_hash = { CE_ALG_MD5, CE_ALG_SHA1, CE_ALG_SHA224, CE_ALG_SHA256,
96 		CE_ID_NOTSUPP, CE_ID_NOTSUPP
97 	},
98 	.op_mode = { CE_OP_ECB, CE_OP_CBC
99 	},
100 	.ce_clks = {
101 		{ "bus", 0, 200000000 },
102 		{ "mod", 300000000, 0 },
103 		},
104 	.esr = ESR_A64,
105 	.prng = CE_ALG_PRNG,
106 	.trng = CE_ID_NOTSUPP,
107 };
108 
109 static const struct ce_variant ce_r40_variant = {
110 	.alg_cipher = { CE_ALG_AES, CE_ALG_DES, CE_ALG_3DES,
111 	},
112 	.alg_hash = { CE_ALG_MD5, CE_ALG_SHA1, CE_ALG_SHA224, CE_ALG_SHA256,
113 		CE_ID_NOTSUPP, CE_ID_NOTSUPP
114 	},
115 	.op_mode = { CE_OP_ECB, CE_OP_CBC
116 	},
117 	.ce_clks = {
118 		{ "bus", 0, 200000000 },
119 		{ "mod", 300000000, 0 },
120 		},
121 	.esr = ESR_R40,
122 	.prng = CE_ALG_PRNG,
123 	.trng = CE_ID_NOTSUPP,
124 };
125 
126 /*
127  * sun8i_ce_get_engine_number() get the next channel slot
128  * This is a simple round-robin way of getting the next channel
129  * The flow 3 is reserve for xRNG operations
130  */
131 int sun8i_ce_get_engine_number(struct sun8i_ce_dev *ce)
132 {
133 	return atomic_inc_return(&ce->flow) % (MAXFLOW - 1);
134 }
135 
136 int sun8i_ce_run_task(struct sun8i_ce_dev *ce, int flow, const char *name)
137 {
138 	u32 v;
139 	int err = 0;
140 	struct ce_task *cet = ce->chanlist[flow].tl;
141 
142 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG
143 	ce->chanlist[flow].stat_req++;
144 #endif
145 
146 	mutex_lock(&ce->mlock);
147 
148 	v = readl(ce->base + CE_ICR);
149 	v |= 1 << flow;
150 	writel(v, ce->base + CE_ICR);
151 
152 	reinit_completion(&ce->chanlist[flow].complete);
153 	writel(ce->chanlist[flow].t_phy, ce->base + CE_TDQ);
154 
155 	ce->chanlist[flow].status = 0;
156 	/* Be sure all data is written before enabling the task */
157 	wmb();
158 
159 	/* Only H6 needs to write a part of t_common_ctl along with "1", but since it is ignored
160 	 * on older SoCs, we have no reason to complicate things.
161 	 */
162 	v = 1 | ((le32_to_cpu(ce->chanlist[flow].tl->t_common_ctl) & 0x7F) << 8);
163 	writel(v, ce->base + CE_TLR);
164 	mutex_unlock(&ce->mlock);
165 
166 	wait_for_completion_interruptible_timeout(&ce->chanlist[flow].complete,
167 			msecs_to_jiffies(ce->chanlist[flow].timeout));
168 
169 	if (ce->chanlist[flow].status == 0) {
170 		dev_err(ce->dev, "DMA timeout for %s (tm=%d) on flow %d\n", name,
171 			ce->chanlist[flow].timeout, flow);
172 		err = -EFAULT;
173 	}
174 	/* No need to lock for this read, the channel is locked so
175 	 * nothing could modify the error value for this channel
176 	 */
177 	v = readl(ce->base + CE_ESR);
178 	switch (ce->variant->esr) {
179 	case ESR_H3:
180 		/* Sadly, the error bit is not per flow */
181 		if (v) {
182 			dev_err(ce->dev, "CE ERROR: %x for flow %x\n", v, flow);
183 			err = -EFAULT;
184 			print_hex_dump(KERN_INFO, "TASK: ", DUMP_PREFIX_NONE, 16, 4,
185 				       cet, sizeof(struct ce_task), false);
186 		}
187 		if (v & CE_ERR_ALGO_NOTSUP)
188 			dev_err(ce->dev, "CE ERROR: algorithm not supported\n");
189 		if (v & CE_ERR_DATALEN)
190 			dev_err(ce->dev, "CE ERROR: data length error\n");
191 		if (v & CE_ERR_KEYSRAM)
192 			dev_err(ce->dev, "CE ERROR: keysram access error for AES\n");
193 		break;
194 	case ESR_A64:
195 	case ESR_H5:
196 	case ESR_R40:
197 		v >>= (flow * 4);
198 		v &= 0xF;
199 		if (v) {
200 			dev_err(ce->dev, "CE ERROR: %x for flow %x\n", v, flow);
201 			err = -EFAULT;
202 			print_hex_dump(KERN_INFO, "TASK: ", DUMP_PREFIX_NONE, 16, 4,
203 				       cet, sizeof(struct ce_task), false);
204 		}
205 		if (v & CE_ERR_ALGO_NOTSUP)
206 			dev_err(ce->dev, "CE ERROR: algorithm not supported\n");
207 		if (v & CE_ERR_DATALEN)
208 			dev_err(ce->dev, "CE ERROR: data length error\n");
209 		if (v & CE_ERR_KEYSRAM)
210 			dev_err(ce->dev, "CE ERROR: keysram access error for AES\n");
211 		break;
212 	case ESR_H6:
213 		v >>= (flow * 8);
214 		v &= 0xFF;
215 		if (v) {
216 			dev_err(ce->dev, "CE ERROR: %x for flow %x\n", v, flow);
217 			err = -EFAULT;
218 			print_hex_dump(KERN_INFO, "TASK: ", DUMP_PREFIX_NONE, 16, 4,
219 				       cet, sizeof(struct ce_task), false);
220 		}
221 		if (v & CE_ERR_ALGO_NOTSUP)
222 			dev_err(ce->dev, "CE ERROR: algorithm not supported\n");
223 		if (v & CE_ERR_DATALEN)
224 			dev_err(ce->dev, "CE ERROR: data length error\n");
225 		if (v & CE_ERR_KEYSRAM)
226 			dev_err(ce->dev, "CE ERROR: keysram access error for AES\n");
227 		if (v & CE_ERR_ADDR_INVALID)
228 			dev_err(ce->dev, "CE ERROR: address invalid\n");
229 		if (v & CE_ERR_KEYLADDER)
230 			dev_err(ce->dev, "CE ERROR: key ladder configuration error\n");
231 		break;
232 	}
233 
234 	return err;
235 }
236 
237 static irqreturn_t ce_irq_handler(int irq, void *data)
238 {
239 	struct sun8i_ce_dev *ce = (struct sun8i_ce_dev *)data;
240 	int flow = 0;
241 	u32 p;
242 
243 	p = readl(ce->base + CE_ISR);
244 	for (flow = 0; flow < MAXFLOW; flow++) {
245 		if (p & (BIT(flow))) {
246 			writel(BIT(flow), ce->base + CE_ISR);
247 			ce->chanlist[flow].status = 1;
248 			complete(&ce->chanlist[flow].complete);
249 		}
250 	}
251 
252 	return IRQ_HANDLED;
253 }
254 
255 static struct sun8i_ce_alg_template ce_algs[] = {
256 {
257 	.type = CRYPTO_ALG_TYPE_SKCIPHER,
258 	.ce_algo_id = CE_ID_CIPHER_AES,
259 	.ce_blockmode = CE_ID_OP_CBC,
260 	.alg.skcipher = {
261 		.base = {
262 			.cra_name = "cbc(aes)",
263 			.cra_driver_name = "cbc-aes-sun8i-ce",
264 			.cra_priority = 400,
265 			.cra_blocksize = AES_BLOCK_SIZE,
266 			.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
267 				CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
268 				CRYPTO_ALG_NEED_FALLBACK,
269 			.cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
270 			.cra_module = THIS_MODULE,
271 			.cra_alignmask = 0xf,
272 			.cra_init = sun8i_ce_cipher_init,
273 			.cra_exit = sun8i_ce_cipher_exit,
274 		},
275 		.min_keysize	= AES_MIN_KEY_SIZE,
276 		.max_keysize	= AES_MAX_KEY_SIZE,
277 		.ivsize		= AES_BLOCK_SIZE,
278 		.setkey		= sun8i_ce_aes_setkey,
279 		.encrypt	= sun8i_ce_skencrypt,
280 		.decrypt	= sun8i_ce_skdecrypt,
281 	}
282 },
283 {
284 	.type = CRYPTO_ALG_TYPE_SKCIPHER,
285 	.ce_algo_id = CE_ID_CIPHER_AES,
286 	.ce_blockmode = CE_ID_OP_ECB,
287 	.alg.skcipher = {
288 		.base = {
289 			.cra_name = "ecb(aes)",
290 			.cra_driver_name = "ecb-aes-sun8i-ce",
291 			.cra_priority = 400,
292 			.cra_blocksize = AES_BLOCK_SIZE,
293 			.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
294 				CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
295 				CRYPTO_ALG_NEED_FALLBACK,
296 			.cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
297 			.cra_module = THIS_MODULE,
298 			.cra_alignmask = 0xf,
299 			.cra_init = sun8i_ce_cipher_init,
300 			.cra_exit = sun8i_ce_cipher_exit,
301 		},
302 		.min_keysize	= AES_MIN_KEY_SIZE,
303 		.max_keysize	= AES_MAX_KEY_SIZE,
304 		.setkey		= sun8i_ce_aes_setkey,
305 		.encrypt	= sun8i_ce_skencrypt,
306 		.decrypt	= sun8i_ce_skdecrypt,
307 	}
308 },
309 {
310 	.type = CRYPTO_ALG_TYPE_SKCIPHER,
311 	.ce_algo_id = CE_ID_CIPHER_DES3,
312 	.ce_blockmode = CE_ID_OP_CBC,
313 	.alg.skcipher = {
314 		.base = {
315 			.cra_name = "cbc(des3_ede)",
316 			.cra_driver_name = "cbc-des3-sun8i-ce",
317 			.cra_priority = 400,
318 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
319 			.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
320 				CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
321 				CRYPTO_ALG_NEED_FALLBACK,
322 			.cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
323 			.cra_module = THIS_MODULE,
324 			.cra_alignmask = 0xf,
325 			.cra_init = sun8i_ce_cipher_init,
326 			.cra_exit = sun8i_ce_cipher_exit,
327 		},
328 		.min_keysize	= DES3_EDE_KEY_SIZE,
329 		.max_keysize	= DES3_EDE_KEY_SIZE,
330 		.ivsize		= DES3_EDE_BLOCK_SIZE,
331 		.setkey		= sun8i_ce_des3_setkey,
332 		.encrypt	= sun8i_ce_skencrypt,
333 		.decrypt	= sun8i_ce_skdecrypt,
334 	}
335 },
336 {
337 	.type = CRYPTO_ALG_TYPE_SKCIPHER,
338 	.ce_algo_id = CE_ID_CIPHER_DES3,
339 	.ce_blockmode = CE_ID_OP_ECB,
340 	.alg.skcipher = {
341 		.base = {
342 			.cra_name = "ecb(des3_ede)",
343 			.cra_driver_name = "ecb-des3-sun8i-ce",
344 			.cra_priority = 400,
345 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
346 			.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
347 				CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
348 				CRYPTO_ALG_NEED_FALLBACK,
349 			.cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
350 			.cra_module = THIS_MODULE,
351 			.cra_alignmask = 0xf,
352 			.cra_init = sun8i_ce_cipher_init,
353 			.cra_exit = sun8i_ce_cipher_exit,
354 		},
355 		.min_keysize	= DES3_EDE_KEY_SIZE,
356 		.max_keysize	= DES3_EDE_KEY_SIZE,
357 		.setkey		= sun8i_ce_des3_setkey,
358 		.encrypt	= sun8i_ce_skencrypt,
359 		.decrypt	= sun8i_ce_skdecrypt,
360 	}
361 },
362 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_HASH
363 {	.type = CRYPTO_ALG_TYPE_AHASH,
364 	.ce_algo_id = CE_ID_HASH_MD5,
365 	.alg.hash = {
366 		.init = sun8i_ce_hash_init,
367 		.update = sun8i_ce_hash_update,
368 		.final = sun8i_ce_hash_final,
369 		.finup = sun8i_ce_hash_finup,
370 		.digest = sun8i_ce_hash_digest,
371 		.export = sun8i_ce_hash_export,
372 		.import = sun8i_ce_hash_import,
373 		.halg = {
374 			.digestsize = MD5_DIGEST_SIZE,
375 			.statesize = sizeof(struct md5_state),
376 			.base = {
377 				.cra_name = "md5",
378 				.cra_driver_name = "md5-sun8i-ce",
379 				.cra_priority = 300,
380 				.cra_alignmask = 3,
381 				.cra_flags = CRYPTO_ALG_TYPE_AHASH |
382 					CRYPTO_ALG_ASYNC |
383 					CRYPTO_ALG_NEED_FALLBACK,
384 				.cra_blocksize = MD5_HMAC_BLOCK_SIZE,
385 				.cra_ctxsize = sizeof(struct sun8i_ce_hash_tfm_ctx),
386 				.cra_module = THIS_MODULE,
387 				.cra_init = sun8i_ce_hash_crainit,
388 				.cra_exit = sun8i_ce_hash_craexit,
389 			}
390 		}
391 	}
392 },
393 {	.type = CRYPTO_ALG_TYPE_AHASH,
394 	.ce_algo_id = CE_ID_HASH_SHA1,
395 	.alg.hash = {
396 		.init = sun8i_ce_hash_init,
397 		.update = sun8i_ce_hash_update,
398 		.final = sun8i_ce_hash_final,
399 		.finup = sun8i_ce_hash_finup,
400 		.digest = sun8i_ce_hash_digest,
401 		.export = sun8i_ce_hash_export,
402 		.import = sun8i_ce_hash_import,
403 		.halg = {
404 			.digestsize = SHA1_DIGEST_SIZE,
405 			.statesize = sizeof(struct sha1_state),
406 			.base = {
407 				.cra_name = "sha1",
408 				.cra_driver_name = "sha1-sun8i-ce",
409 				.cra_priority = 300,
410 				.cra_alignmask = 3,
411 				.cra_flags = CRYPTO_ALG_TYPE_AHASH |
412 					CRYPTO_ALG_ASYNC |
413 					CRYPTO_ALG_NEED_FALLBACK,
414 				.cra_blocksize = SHA1_BLOCK_SIZE,
415 				.cra_ctxsize = sizeof(struct sun8i_ce_hash_tfm_ctx),
416 				.cra_module = THIS_MODULE,
417 				.cra_init = sun8i_ce_hash_crainit,
418 				.cra_exit = sun8i_ce_hash_craexit,
419 			}
420 		}
421 	}
422 },
423 {	.type = CRYPTO_ALG_TYPE_AHASH,
424 	.ce_algo_id = CE_ID_HASH_SHA224,
425 	.alg.hash = {
426 		.init = sun8i_ce_hash_init,
427 		.update = sun8i_ce_hash_update,
428 		.final = sun8i_ce_hash_final,
429 		.finup = sun8i_ce_hash_finup,
430 		.digest = sun8i_ce_hash_digest,
431 		.export = sun8i_ce_hash_export,
432 		.import = sun8i_ce_hash_import,
433 		.halg = {
434 			.digestsize = SHA224_DIGEST_SIZE,
435 			.statesize = sizeof(struct sha256_state),
436 			.base = {
437 				.cra_name = "sha224",
438 				.cra_driver_name = "sha224-sun8i-ce",
439 				.cra_priority = 300,
440 				.cra_alignmask = 3,
441 				.cra_flags = CRYPTO_ALG_TYPE_AHASH |
442 					CRYPTO_ALG_ASYNC |
443 					CRYPTO_ALG_NEED_FALLBACK,
444 				.cra_blocksize = SHA224_BLOCK_SIZE,
445 				.cra_ctxsize = sizeof(struct sun8i_ce_hash_tfm_ctx),
446 				.cra_module = THIS_MODULE,
447 				.cra_init = sun8i_ce_hash_crainit,
448 				.cra_exit = sun8i_ce_hash_craexit,
449 			}
450 		}
451 	}
452 },
453 {	.type = CRYPTO_ALG_TYPE_AHASH,
454 	.ce_algo_id = CE_ID_HASH_SHA256,
455 	.alg.hash = {
456 		.init = sun8i_ce_hash_init,
457 		.update = sun8i_ce_hash_update,
458 		.final = sun8i_ce_hash_final,
459 		.finup = sun8i_ce_hash_finup,
460 		.digest = sun8i_ce_hash_digest,
461 		.export = sun8i_ce_hash_export,
462 		.import = sun8i_ce_hash_import,
463 		.halg = {
464 			.digestsize = SHA256_DIGEST_SIZE,
465 			.statesize = sizeof(struct sha256_state),
466 			.base = {
467 				.cra_name = "sha256",
468 				.cra_driver_name = "sha256-sun8i-ce",
469 				.cra_priority = 300,
470 				.cra_alignmask = 3,
471 				.cra_flags = CRYPTO_ALG_TYPE_AHASH |
472 					CRYPTO_ALG_ASYNC |
473 					CRYPTO_ALG_NEED_FALLBACK,
474 				.cra_blocksize = SHA256_BLOCK_SIZE,
475 				.cra_ctxsize = sizeof(struct sun8i_ce_hash_tfm_ctx),
476 				.cra_module = THIS_MODULE,
477 				.cra_init = sun8i_ce_hash_crainit,
478 				.cra_exit = sun8i_ce_hash_craexit,
479 			}
480 		}
481 	}
482 },
483 {	.type = CRYPTO_ALG_TYPE_AHASH,
484 	.ce_algo_id = CE_ID_HASH_SHA384,
485 	.alg.hash = {
486 		.init = sun8i_ce_hash_init,
487 		.update = sun8i_ce_hash_update,
488 		.final = sun8i_ce_hash_final,
489 		.finup = sun8i_ce_hash_finup,
490 		.digest = sun8i_ce_hash_digest,
491 		.export = sun8i_ce_hash_export,
492 		.import = sun8i_ce_hash_import,
493 		.halg = {
494 			.digestsize = SHA384_DIGEST_SIZE,
495 			.statesize = sizeof(struct sha512_state),
496 			.base = {
497 				.cra_name = "sha384",
498 				.cra_driver_name = "sha384-sun8i-ce",
499 				.cra_priority = 300,
500 				.cra_alignmask = 3,
501 				.cra_flags = CRYPTO_ALG_TYPE_AHASH |
502 					CRYPTO_ALG_ASYNC |
503 					CRYPTO_ALG_NEED_FALLBACK,
504 				.cra_blocksize = SHA384_BLOCK_SIZE,
505 				.cra_ctxsize = sizeof(struct sun8i_ce_hash_tfm_ctx),
506 				.cra_module = THIS_MODULE,
507 				.cra_init = sun8i_ce_hash_crainit,
508 				.cra_exit = sun8i_ce_hash_craexit,
509 			}
510 		}
511 	}
512 },
513 {	.type = CRYPTO_ALG_TYPE_AHASH,
514 	.ce_algo_id = CE_ID_HASH_SHA512,
515 	.alg.hash = {
516 		.init = sun8i_ce_hash_init,
517 		.update = sun8i_ce_hash_update,
518 		.final = sun8i_ce_hash_final,
519 		.finup = sun8i_ce_hash_finup,
520 		.digest = sun8i_ce_hash_digest,
521 		.export = sun8i_ce_hash_export,
522 		.import = sun8i_ce_hash_import,
523 		.halg = {
524 			.digestsize = SHA512_DIGEST_SIZE,
525 			.statesize = sizeof(struct sha512_state),
526 			.base = {
527 				.cra_name = "sha512",
528 				.cra_driver_name = "sha512-sun8i-ce",
529 				.cra_priority = 300,
530 				.cra_alignmask = 3,
531 				.cra_flags = CRYPTO_ALG_TYPE_AHASH |
532 					CRYPTO_ALG_ASYNC |
533 					CRYPTO_ALG_NEED_FALLBACK,
534 				.cra_blocksize = SHA512_BLOCK_SIZE,
535 				.cra_ctxsize = sizeof(struct sun8i_ce_hash_tfm_ctx),
536 				.cra_module = THIS_MODULE,
537 				.cra_init = sun8i_ce_hash_crainit,
538 				.cra_exit = sun8i_ce_hash_craexit,
539 			}
540 		}
541 	}
542 },
543 #endif
544 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_PRNG
545 {
546 	.type = CRYPTO_ALG_TYPE_RNG,
547 	.alg.rng = {
548 		.base = {
549 			.cra_name		= "stdrng",
550 			.cra_driver_name	= "sun8i-ce-prng",
551 			.cra_priority		= 300,
552 			.cra_ctxsize		= sizeof(struct sun8i_ce_rng_tfm_ctx),
553 			.cra_module		= THIS_MODULE,
554 			.cra_init		= sun8i_ce_prng_init,
555 			.cra_exit		= sun8i_ce_prng_exit,
556 		},
557 		.generate               = sun8i_ce_prng_generate,
558 		.seed                   = sun8i_ce_prng_seed,
559 		.seedsize               = PRNG_SEED_SIZE,
560 	}
561 },
562 #endif
563 };
564 
565 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG
566 static int sun8i_ce_debugfs_show(struct seq_file *seq, void *v)
567 {
568 	struct sun8i_ce_dev *ce = seq->private;
569 	unsigned int i;
570 
571 	for (i = 0; i < MAXFLOW; i++)
572 		seq_printf(seq, "Channel %d: nreq %lu\n", i, ce->chanlist[i].stat_req);
573 
574 	for (i = 0; i < ARRAY_SIZE(ce_algs); i++) {
575 		if (!ce_algs[i].ce)
576 			continue;
577 		switch (ce_algs[i].type) {
578 		case CRYPTO_ALG_TYPE_SKCIPHER:
579 			seq_printf(seq, "%s %s %lu %lu\n",
580 				   ce_algs[i].alg.skcipher.base.cra_driver_name,
581 				   ce_algs[i].alg.skcipher.base.cra_name,
582 				   ce_algs[i].stat_req, ce_algs[i].stat_fb);
583 			break;
584 		case CRYPTO_ALG_TYPE_AHASH:
585 			seq_printf(seq, "%s %s %lu %lu\n",
586 				   ce_algs[i].alg.hash.halg.base.cra_driver_name,
587 				   ce_algs[i].alg.hash.halg.base.cra_name,
588 				   ce_algs[i].stat_req, ce_algs[i].stat_fb);
589 			break;
590 		case CRYPTO_ALG_TYPE_RNG:
591 			seq_printf(seq, "%s %s %lu %lu\n",
592 				   ce_algs[i].alg.rng.base.cra_driver_name,
593 				   ce_algs[i].alg.rng.base.cra_name,
594 				   ce_algs[i].stat_req, ce_algs[i].stat_bytes);
595 			break;
596 		}
597 	}
598 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_TRNG
599 	seq_printf(seq, "HWRNG %lu %lu\n",
600 		   ce->hwrng_stat_req, ce->hwrng_stat_bytes);
601 #endif
602 	return 0;
603 }
604 
605 DEFINE_SHOW_ATTRIBUTE(sun8i_ce_debugfs);
606 #endif
607 
608 static void sun8i_ce_free_chanlist(struct sun8i_ce_dev *ce, int i)
609 {
610 	while (i >= 0) {
611 		crypto_engine_exit(ce->chanlist[i].engine);
612 		if (ce->chanlist[i].tl)
613 			dma_free_coherent(ce->dev, sizeof(struct ce_task),
614 					  ce->chanlist[i].tl,
615 					  ce->chanlist[i].t_phy);
616 		i--;
617 	}
618 }
619 
620 /*
621  * Allocate the channel list structure
622  */
623 static int sun8i_ce_allocate_chanlist(struct sun8i_ce_dev *ce)
624 {
625 	int i, err;
626 
627 	ce->chanlist = devm_kcalloc(ce->dev, MAXFLOW,
628 				    sizeof(struct sun8i_ce_flow), GFP_KERNEL);
629 	if (!ce->chanlist)
630 		return -ENOMEM;
631 
632 	for (i = 0; i < MAXFLOW; i++) {
633 		init_completion(&ce->chanlist[i].complete);
634 
635 		ce->chanlist[i].engine = crypto_engine_alloc_init(ce->dev, true);
636 		if (!ce->chanlist[i].engine) {
637 			dev_err(ce->dev, "Cannot allocate engine\n");
638 			i--;
639 			err = -ENOMEM;
640 			goto error_engine;
641 		}
642 		err = crypto_engine_start(ce->chanlist[i].engine);
643 		if (err) {
644 			dev_err(ce->dev, "Cannot start engine\n");
645 			goto error_engine;
646 		}
647 		ce->chanlist[i].tl = dma_alloc_coherent(ce->dev,
648 							sizeof(struct ce_task),
649 							&ce->chanlist[i].t_phy,
650 							GFP_KERNEL);
651 		if (!ce->chanlist[i].tl) {
652 			dev_err(ce->dev, "Cannot get DMA memory for task %d\n",
653 				i);
654 			err = -ENOMEM;
655 			goto error_engine;
656 		}
657 	}
658 	return 0;
659 error_engine:
660 	sun8i_ce_free_chanlist(ce, i);
661 	return err;
662 }
663 
664 /*
665  * Power management strategy: The device is suspended unless a TFM exists for
666  * one of the algorithms proposed by this driver.
667  */
668 static int sun8i_ce_pm_suspend(struct device *dev)
669 {
670 	struct sun8i_ce_dev *ce = dev_get_drvdata(dev);
671 	int i;
672 
673 	reset_control_assert(ce->reset);
674 	for (i = 0; i < CE_MAX_CLOCKS; i++)
675 		clk_disable_unprepare(ce->ceclks[i]);
676 	return 0;
677 }
678 
679 static int sun8i_ce_pm_resume(struct device *dev)
680 {
681 	struct sun8i_ce_dev *ce = dev_get_drvdata(dev);
682 	int err, i;
683 
684 	for (i = 0; i < CE_MAX_CLOCKS; i++) {
685 		if (!ce->variant->ce_clks[i].name)
686 			continue;
687 		err = clk_prepare_enable(ce->ceclks[i]);
688 		if (err) {
689 			dev_err(ce->dev, "Cannot prepare_enable %s\n",
690 				ce->variant->ce_clks[i].name);
691 			goto error;
692 		}
693 	}
694 	err = reset_control_deassert(ce->reset);
695 	if (err) {
696 		dev_err(ce->dev, "Cannot deassert reset control\n");
697 		goto error;
698 	}
699 	return 0;
700 error:
701 	sun8i_ce_pm_suspend(dev);
702 	return err;
703 }
704 
705 static const struct dev_pm_ops sun8i_ce_pm_ops = {
706 	SET_RUNTIME_PM_OPS(sun8i_ce_pm_suspend, sun8i_ce_pm_resume, NULL)
707 };
708 
709 static int sun8i_ce_pm_init(struct sun8i_ce_dev *ce)
710 {
711 	int err;
712 
713 	pm_runtime_use_autosuspend(ce->dev);
714 	pm_runtime_set_autosuspend_delay(ce->dev, 2000);
715 
716 	err = pm_runtime_set_suspended(ce->dev);
717 	if (err)
718 		return err;
719 	pm_runtime_enable(ce->dev);
720 	return err;
721 }
722 
723 static void sun8i_ce_pm_exit(struct sun8i_ce_dev *ce)
724 {
725 	pm_runtime_disable(ce->dev);
726 }
727 
728 static int sun8i_ce_get_clks(struct sun8i_ce_dev *ce)
729 {
730 	unsigned long cr;
731 	int err, i;
732 
733 	for (i = 0; i < CE_MAX_CLOCKS; i++) {
734 		if (!ce->variant->ce_clks[i].name)
735 			continue;
736 		ce->ceclks[i] = devm_clk_get(ce->dev, ce->variant->ce_clks[i].name);
737 		if (IS_ERR(ce->ceclks[i])) {
738 			err = PTR_ERR(ce->ceclks[i]);
739 			dev_err(ce->dev, "Cannot get %s CE clock err=%d\n",
740 				ce->variant->ce_clks[i].name, err);
741 			return err;
742 		}
743 		cr = clk_get_rate(ce->ceclks[i]);
744 		if (!cr)
745 			return -EINVAL;
746 		if (ce->variant->ce_clks[i].freq > 0 &&
747 		    cr != ce->variant->ce_clks[i].freq) {
748 			dev_info(ce->dev, "Set %s clock to %lu (%lu Mhz) from %lu (%lu Mhz)\n",
749 				 ce->variant->ce_clks[i].name,
750 				 ce->variant->ce_clks[i].freq,
751 				 ce->variant->ce_clks[i].freq / 1000000,
752 				 cr, cr / 1000000);
753 			err = clk_set_rate(ce->ceclks[i], ce->variant->ce_clks[i].freq);
754 			if (err)
755 				dev_err(ce->dev, "Fail to set %s clk speed to %lu hz\n",
756 					ce->variant->ce_clks[i].name,
757 					ce->variant->ce_clks[i].freq);
758 		}
759 		if (ce->variant->ce_clks[i].max_freq > 0 &&
760 		    cr > ce->variant->ce_clks[i].max_freq)
761 			dev_warn(ce->dev, "Frequency for %s (%lu hz) is higher than datasheet's recommendation (%lu hz)",
762 				 ce->variant->ce_clks[i].name, cr,
763 				 ce->variant->ce_clks[i].max_freq);
764 	}
765 	return 0;
766 }
767 
768 static int sun8i_ce_register_algs(struct sun8i_ce_dev *ce)
769 {
770 	int ce_method, err, id;
771 	unsigned int i;
772 
773 	for (i = 0; i < ARRAY_SIZE(ce_algs); i++) {
774 		ce_algs[i].ce = ce;
775 		switch (ce_algs[i].type) {
776 		case CRYPTO_ALG_TYPE_SKCIPHER:
777 			id = ce_algs[i].ce_algo_id;
778 			ce_method = ce->variant->alg_cipher[id];
779 			if (ce_method == CE_ID_NOTSUPP) {
780 				dev_dbg(ce->dev,
781 					"DEBUG: Algo of %s not supported\n",
782 					ce_algs[i].alg.skcipher.base.cra_name);
783 				ce_algs[i].ce = NULL;
784 				break;
785 			}
786 			id = ce_algs[i].ce_blockmode;
787 			ce_method = ce->variant->op_mode[id];
788 			if (ce_method == CE_ID_NOTSUPP) {
789 				dev_dbg(ce->dev, "DEBUG: Blockmode of %s not supported\n",
790 					ce_algs[i].alg.skcipher.base.cra_name);
791 				ce_algs[i].ce = NULL;
792 				break;
793 			}
794 			dev_info(ce->dev, "Register %s\n",
795 				 ce_algs[i].alg.skcipher.base.cra_name);
796 			err = crypto_register_skcipher(&ce_algs[i].alg.skcipher);
797 			if (err) {
798 				dev_err(ce->dev, "ERROR: Fail to register %s\n",
799 					ce_algs[i].alg.skcipher.base.cra_name);
800 				ce_algs[i].ce = NULL;
801 				return err;
802 			}
803 			break;
804 		case CRYPTO_ALG_TYPE_AHASH:
805 			id = ce_algs[i].ce_algo_id;
806 			ce_method = ce->variant->alg_hash[id];
807 			if (ce_method == CE_ID_NOTSUPP) {
808 				dev_info(ce->dev,
809 					 "DEBUG: Algo of %s not supported\n",
810 					 ce_algs[i].alg.hash.halg.base.cra_name);
811 				ce_algs[i].ce = NULL;
812 				break;
813 			}
814 			dev_info(ce->dev, "Register %s\n",
815 				 ce_algs[i].alg.hash.halg.base.cra_name);
816 			err = crypto_register_ahash(&ce_algs[i].alg.hash);
817 			if (err) {
818 				dev_err(ce->dev, "ERROR: Fail to register %s\n",
819 					ce_algs[i].alg.hash.halg.base.cra_name);
820 				ce_algs[i].ce = NULL;
821 				return err;
822 			}
823 			break;
824 		case CRYPTO_ALG_TYPE_RNG:
825 			if (ce->variant->prng == CE_ID_NOTSUPP) {
826 				dev_info(ce->dev,
827 					 "DEBUG: Algo of %s not supported\n",
828 					 ce_algs[i].alg.rng.base.cra_name);
829 				ce_algs[i].ce = NULL;
830 				break;
831 			}
832 			dev_info(ce->dev, "Register %s\n",
833 				 ce_algs[i].alg.rng.base.cra_name);
834 			err = crypto_register_rng(&ce_algs[i].alg.rng);
835 			if (err) {
836 				dev_err(ce->dev, "Fail to register %s\n",
837 					ce_algs[i].alg.rng.base.cra_name);
838 				ce_algs[i].ce = NULL;
839 			}
840 			break;
841 		default:
842 			ce_algs[i].ce = NULL;
843 			dev_err(ce->dev, "ERROR: tried to register an unknown algo\n");
844 		}
845 	}
846 	return 0;
847 }
848 
849 static void sun8i_ce_unregister_algs(struct sun8i_ce_dev *ce)
850 {
851 	unsigned int i;
852 
853 	for (i = 0; i < ARRAY_SIZE(ce_algs); i++) {
854 		if (!ce_algs[i].ce)
855 			continue;
856 		switch (ce_algs[i].type) {
857 		case CRYPTO_ALG_TYPE_SKCIPHER:
858 			dev_info(ce->dev, "Unregister %d %s\n", i,
859 				 ce_algs[i].alg.skcipher.base.cra_name);
860 			crypto_unregister_skcipher(&ce_algs[i].alg.skcipher);
861 			break;
862 		case CRYPTO_ALG_TYPE_AHASH:
863 			dev_info(ce->dev, "Unregister %d %s\n", i,
864 				 ce_algs[i].alg.hash.halg.base.cra_name);
865 			crypto_unregister_ahash(&ce_algs[i].alg.hash);
866 			break;
867 		case CRYPTO_ALG_TYPE_RNG:
868 			dev_info(ce->dev, "Unregister %d %s\n", i,
869 				 ce_algs[i].alg.rng.base.cra_name);
870 			crypto_unregister_rng(&ce_algs[i].alg.rng);
871 			break;
872 		}
873 	}
874 }
875 
876 static int sun8i_ce_probe(struct platform_device *pdev)
877 {
878 	struct sun8i_ce_dev *ce;
879 	int err, irq;
880 	u32 v;
881 
882 	ce = devm_kzalloc(&pdev->dev, sizeof(*ce), GFP_KERNEL);
883 	if (!ce)
884 		return -ENOMEM;
885 
886 	ce->dev = &pdev->dev;
887 	platform_set_drvdata(pdev, ce);
888 
889 	ce->variant = of_device_get_match_data(&pdev->dev);
890 	if (!ce->variant) {
891 		dev_err(&pdev->dev, "Missing Crypto Engine variant\n");
892 		return -EINVAL;
893 	}
894 
895 	ce->base = devm_platform_ioremap_resource(pdev, 0);
896 	if (IS_ERR(ce->base))
897 		return PTR_ERR(ce->base);
898 
899 	err = sun8i_ce_get_clks(ce);
900 	if (err)
901 		return err;
902 
903 	/* Get Non Secure IRQ */
904 	irq = platform_get_irq(pdev, 0);
905 	if (irq < 0)
906 		return irq;
907 
908 	ce->reset = devm_reset_control_get(&pdev->dev, NULL);
909 	if (IS_ERR(ce->reset))
910 		return dev_err_probe(&pdev->dev, PTR_ERR(ce->reset),
911 				     "No reset control found\n");
912 
913 	mutex_init(&ce->mlock);
914 	mutex_init(&ce->rnglock);
915 
916 	err = sun8i_ce_allocate_chanlist(ce);
917 	if (err)
918 		return err;
919 
920 	err = sun8i_ce_pm_init(ce);
921 	if (err)
922 		goto error_pm;
923 
924 	err = devm_request_irq(&pdev->dev, irq, ce_irq_handler, 0,
925 			       "sun8i-ce-ns", ce);
926 	if (err) {
927 		dev_err(ce->dev, "Cannot request CryptoEngine Non-secure IRQ (err=%d)\n", err);
928 		goto error_irq;
929 	}
930 
931 	err = sun8i_ce_register_algs(ce);
932 	if (err)
933 		goto error_alg;
934 
935 	err = pm_runtime_resume_and_get(ce->dev);
936 	if (err < 0)
937 		goto error_alg;
938 
939 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_TRNG
940 	sun8i_ce_hwrng_register(ce);
941 #endif
942 
943 	v = readl(ce->base + CE_CTR);
944 	v >>= CE_DIE_ID_SHIFT;
945 	v &= CE_DIE_ID_MASK;
946 	dev_info(&pdev->dev, "CryptoEngine Die ID %x\n", v);
947 
948 	pm_runtime_put_sync(ce->dev);
949 
950 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG
951 	/* Ignore error of debugfs */
952 	ce->dbgfs_dir = debugfs_create_dir("sun8i-ce", NULL);
953 	ce->dbgfs_stats = debugfs_create_file("stats", 0444,
954 					      ce->dbgfs_dir, ce,
955 					      &sun8i_ce_debugfs_fops);
956 #endif
957 
958 	return 0;
959 error_alg:
960 	sun8i_ce_unregister_algs(ce);
961 error_irq:
962 	sun8i_ce_pm_exit(ce);
963 error_pm:
964 	sun8i_ce_free_chanlist(ce, MAXFLOW - 1);
965 	return err;
966 }
967 
968 static int sun8i_ce_remove(struct platform_device *pdev)
969 {
970 	struct sun8i_ce_dev *ce = platform_get_drvdata(pdev);
971 
972 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_TRNG
973 	sun8i_ce_hwrng_unregister(ce);
974 #endif
975 
976 	sun8i_ce_unregister_algs(ce);
977 
978 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG
979 	debugfs_remove_recursive(ce->dbgfs_dir);
980 #endif
981 
982 	sun8i_ce_free_chanlist(ce, MAXFLOW - 1);
983 
984 	sun8i_ce_pm_exit(ce);
985 	return 0;
986 }
987 
988 static const struct of_device_id sun8i_ce_crypto_of_match_table[] = {
989 	{ .compatible = "allwinner,sun8i-h3-crypto",
990 	  .data = &ce_h3_variant },
991 	{ .compatible = "allwinner,sun8i-r40-crypto",
992 	  .data = &ce_r40_variant },
993 	{ .compatible = "allwinner,sun50i-a64-crypto",
994 	  .data = &ce_a64_variant },
995 	{ .compatible = "allwinner,sun50i-h5-crypto",
996 	  .data = &ce_h5_variant },
997 	{ .compatible = "allwinner,sun50i-h6-crypto",
998 	  .data = &ce_h6_variant },
999 	{}
1000 };
1001 MODULE_DEVICE_TABLE(of, sun8i_ce_crypto_of_match_table);
1002 
1003 static struct platform_driver sun8i_ce_driver = {
1004 	.probe		 = sun8i_ce_probe,
1005 	.remove		 = sun8i_ce_remove,
1006 	.driver		 = {
1007 		.name		= "sun8i-ce",
1008 		.pm		= &sun8i_ce_pm_ops,
1009 		.of_match_table	= sun8i_ce_crypto_of_match_table,
1010 	},
1011 };
1012 
1013 module_platform_driver(sun8i_ce_driver);
1014 
1015 MODULE_DESCRIPTION("Allwinner Crypto Engine cryptographic offloader");
1016 MODULE_LICENSE("GPL");
1017 MODULE_AUTHOR("Corentin Labbe <clabbe.montjoie@gmail.com>");
1018