xref: /linux/drivers/crypto/stm32/stm32-cryp.c (revision a1ff5a7d78a036d6c2178ee5acd6ba4946243800)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (C) STMicroelectronics SA 2017
4  * Author: Fabien Dessenne <fabien.dessenne@st.com>
5  * Ux500 support taken from snippets in the old Ux500 cryp driver
6  */
7 
8 #include <crypto/aes.h>
9 #include <crypto/engine.h>
10 #include <crypto/internal/aead.h>
11 #include <crypto/internal/des.h>
12 #include <crypto/internal/skcipher.h>
13 #include <crypto/scatterwalk.h>
14 #include <linux/bottom_half.h>
15 #include <linux/clk.h>
16 #include <linux/delay.h>
17 #include <linux/dma-mapping.h>
18 #include <linux/dmaengine.h>
19 #include <linux/err.h>
20 #include <linux/iopoll.h>
21 #include <linux/interrupt.h>
22 #include <linux/kernel.h>
23 #include <linux/module.h>
24 #include <linux/of.h>
25 #include <linux/platform_device.h>
26 #include <linux/pm_runtime.h>
27 #include <linux/reset.h>
28 #include <linux/string.h>
29 
30 #define DRIVER_NAME             "stm32-cryp"
31 
32 /* Bit [0] encrypt / decrypt */
33 #define FLG_ENCRYPT             BIT(0)
34 /* Bit [8..1] algo & operation mode */
35 #define FLG_AES                 BIT(1)
36 #define FLG_DES                 BIT(2)
37 #define FLG_TDES                BIT(3)
38 #define FLG_ECB                 BIT(4)
39 #define FLG_CBC                 BIT(5)
40 #define FLG_CTR                 BIT(6)
41 #define FLG_GCM                 BIT(7)
42 #define FLG_CCM                 BIT(8)
43 /* Mode mask = bits [15..0] */
44 #define FLG_MODE_MASK           GENMASK(15, 0)
45 /* Bit [31..16] status  */
46 #define FLG_IN_OUT_DMA          BIT(16)
47 #define FLG_HEADER_DMA          BIT(17)
48 
49 /* Registers */
50 #define CRYP_CR                 0x00000000
51 #define CRYP_SR                 0x00000004
52 #define CRYP_DIN                0x00000008
53 #define CRYP_DOUT               0x0000000C
54 #define CRYP_DMACR              0x00000010
55 #define CRYP_IMSCR              0x00000014
56 #define CRYP_RISR               0x00000018
57 #define CRYP_MISR               0x0000001C
58 #define CRYP_K0LR               0x00000020
59 #define CRYP_K0RR               0x00000024
60 #define CRYP_K1LR               0x00000028
61 #define CRYP_K1RR               0x0000002C
62 #define CRYP_K2LR               0x00000030
63 #define CRYP_K2RR               0x00000034
64 #define CRYP_K3LR               0x00000038
65 #define CRYP_K3RR               0x0000003C
66 #define CRYP_IV0LR              0x00000040
67 #define CRYP_IV0RR              0x00000044
68 #define CRYP_IV1LR              0x00000048
69 #define CRYP_IV1RR              0x0000004C
70 #define CRYP_CSGCMCCM0R         0x00000050
71 #define CRYP_CSGCM0R            0x00000070
72 
73 #define UX500_CRYP_CR		0x00000000
74 #define UX500_CRYP_SR		0x00000004
75 #define UX500_CRYP_DIN		0x00000008
76 #define UX500_CRYP_DINSIZE	0x0000000C
77 #define UX500_CRYP_DOUT		0x00000010
78 #define UX500_CRYP_DOUSIZE	0x00000014
79 #define UX500_CRYP_DMACR	0x00000018
80 #define UX500_CRYP_IMSC		0x0000001C
81 #define UX500_CRYP_RIS		0x00000020
82 #define UX500_CRYP_MIS		0x00000024
83 #define UX500_CRYP_K1L		0x00000028
84 #define UX500_CRYP_K1R		0x0000002C
85 #define UX500_CRYP_K2L		0x00000030
86 #define UX500_CRYP_K2R		0x00000034
87 #define UX500_CRYP_K3L		0x00000038
88 #define UX500_CRYP_K3R		0x0000003C
89 #define UX500_CRYP_K4L		0x00000040
90 #define UX500_CRYP_K4R		0x00000044
91 #define UX500_CRYP_IV0L		0x00000048
92 #define UX500_CRYP_IV0R		0x0000004C
93 #define UX500_CRYP_IV1L		0x00000050
94 #define UX500_CRYP_IV1R		0x00000054
95 
96 /* Registers values */
97 #define CR_DEC_NOT_ENC          0x00000004
98 #define CR_TDES_ECB             0x00000000
99 #define CR_TDES_CBC             0x00000008
100 #define CR_DES_ECB              0x00000010
101 #define CR_DES_CBC              0x00000018
102 #define CR_AES_ECB              0x00000020
103 #define CR_AES_CBC              0x00000028
104 #define CR_AES_CTR              0x00000030
105 #define CR_AES_KP               0x00000038 /* Not on Ux500 */
106 #define CR_AES_XTS              0x00000038 /* Only on Ux500 */
107 #define CR_AES_GCM              0x00080000
108 #define CR_AES_CCM              0x00080008
109 #define CR_AES_UNKNOWN          0xFFFFFFFF
110 #define CR_ALGO_MASK            0x00080038
111 #define CR_DATA32               0x00000000
112 #define CR_DATA16               0x00000040
113 #define CR_DATA8                0x00000080
114 #define CR_DATA1                0x000000C0
115 #define CR_KEY128               0x00000000
116 #define CR_KEY192               0x00000100
117 #define CR_KEY256               0x00000200
118 #define CR_KEYRDEN              0x00000400 /* Only on Ux500 */
119 #define CR_KSE                  0x00000800 /* Only on Ux500 */
120 #define CR_FFLUSH               0x00004000
121 #define CR_CRYPEN               0x00008000
122 #define CR_PH_INIT              0x00000000
123 #define CR_PH_HEADER            0x00010000
124 #define CR_PH_PAYLOAD           0x00020000
125 #define CR_PH_FINAL             0x00030000
126 #define CR_PH_MASK              0x00030000
127 #define CR_NBPBL_SHIFT          20
128 
129 #define SR_IFNF                 BIT(1)
130 #define SR_OFNE                 BIT(2)
131 #define SR_BUSY                 BIT(8)
132 
133 #define DMACR_DIEN              BIT(0)
134 #define DMACR_DOEN              BIT(1)
135 
136 #define IMSCR_IN                BIT(0)
137 #define IMSCR_OUT               BIT(1)
138 
139 #define MISR_IN                 BIT(0)
140 #define MISR_OUT                BIT(1)
141 
142 /* Misc */
143 #define AES_BLOCK_32            (AES_BLOCK_SIZE / sizeof(u32))
144 #define GCM_CTR_INIT            2
145 #define CRYP_AUTOSUSPEND_DELAY  50
146 
147 #define CRYP_DMA_BURST_REG      4
148 
149 enum stm32_dma_mode {
150 	NO_DMA,
151 	DMA_PLAIN_SG,
152 	DMA_NEED_SG_TRUNC
153 };
154 
155 struct stm32_cryp_caps {
156 	bool			aeads_support;
157 	bool			linear_aes_key;
158 	bool			kp_mode;
159 	bool			iv_protection;
160 	bool			swap_final;
161 	bool			padding_wa;
162 	u32			cr;
163 	u32			sr;
164 	u32			din;
165 	u32			dout;
166 	u32			dmacr;
167 	u32			imsc;
168 	u32			mis;
169 	u32			k1l;
170 	u32			k1r;
171 	u32			k3r;
172 	u32			iv0l;
173 	u32			iv0r;
174 	u32			iv1l;
175 	u32			iv1r;
176 };
177 
178 struct stm32_cryp_ctx {
179 	struct stm32_cryp       *cryp;
180 	int                     keylen;
181 	__be32                  key[AES_KEYSIZE_256 / sizeof(u32)];
182 	unsigned long           flags;
183 };
184 
185 struct stm32_cryp_reqctx {
186 	unsigned long mode;
187 };
188 
189 struct stm32_cryp {
190 	struct list_head        list;
191 	struct device           *dev;
192 	void __iomem            *regs;
193 	phys_addr_t             phys_base;
194 	struct clk              *clk;
195 	unsigned long           flags;
196 	u32                     irq_status;
197 	const struct stm32_cryp_caps *caps;
198 	struct stm32_cryp_ctx   *ctx;
199 
200 	struct crypto_engine    *engine;
201 
202 	struct skcipher_request *req;
203 	struct aead_request     *areq;
204 
205 	size_t                  authsize;
206 	size_t                  hw_blocksize;
207 
208 	size_t                  payload_in;
209 	size_t                  header_in;
210 	size_t                  payload_out;
211 
212 	/* DMA process fields */
213 	struct scatterlist      *in_sg;
214 	struct scatterlist      *header_sg;
215 	struct scatterlist      *out_sg;
216 	size_t                  in_sg_len;
217 	size_t                  header_sg_len;
218 	size_t                  out_sg_len;
219 	struct completion	dma_completion;
220 
221 	struct dma_chan         *dma_lch_in;
222 	struct dma_chan         *dma_lch_out;
223 	enum stm32_dma_mode     dma_mode;
224 
225 	/* IT process fields */
226 	struct scatter_walk     in_walk;
227 	struct scatter_walk     out_walk;
228 
229 	__be32                  last_ctr[4];
230 	u32                     gcm_ctr;
231 };
232 
233 struct stm32_cryp_list {
234 	struct list_head        dev_list;
235 	spinlock_t              lock; /* protect dev_list */
236 };
237 
238 static struct stm32_cryp_list cryp_list = {
239 	.dev_list = LIST_HEAD_INIT(cryp_list.dev_list),
240 	.lock     = __SPIN_LOCK_UNLOCKED(cryp_list.lock),
241 };
242 
is_aes(struct stm32_cryp * cryp)243 static inline bool is_aes(struct stm32_cryp *cryp)
244 {
245 	return cryp->flags & FLG_AES;
246 }
247 
is_des(struct stm32_cryp * cryp)248 static inline bool is_des(struct stm32_cryp *cryp)
249 {
250 	return cryp->flags & FLG_DES;
251 }
252 
is_tdes(struct stm32_cryp * cryp)253 static inline bool is_tdes(struct stm32_cryp *cryp)
254 {
255 	return cryp->flags & FLG_TDES;
256 }
257 
is_ecb(struct stm32_cryp * cryp)258 static inline bool is_ecb(struct stm32_cryp *cryp)
259 {
260 	return cryp->flags & FLG_ECB;
261 }
262 
is_cbc(struct stm32_cryp * cryp)263 static inline bool is_cbc(struct stm32_cryp *cryp)
264 {
265 	return cryp->flags & FLG_CBC;
266 }
267 
is_ctr(struct stm32_cryp * cryp)268 static inline bool is_ctr(struct stm32_cryp *cryp)
269 {
270 	return cryp->flags & FLG_CTR;
271 }
272 
is_gcm(struct stm32_cryp * cryp)273 static inline bool is_gcm(struct stm32_cryp *cryp)
274 {
275 	return cryp->flags & FLG_GCM;
276 }
277 
is_ccm(struct stm32_cryp * cryp)278 static inline bool is_ccm(struct stm32_cryp *cryp)
279 {
280 	return cryp->flags & FLG_CCM;
281 }
282 
is_encrypt(struct stm32_cryp * cryp)283 static inline bool is_encrypt(struct stm32_cryp *cryp)
284 {
285 	return cryp->flags & FLG_ENCRYPT;
286 }
287 
is_decrypt(struct stm32_cryp * cryp)288 static inline bool is_decrypt(struct stm32_cryp *cryp)
289 {
290 	return !is_encrypt(cryp);
291 }
292 
stm32_cryp_read(struct stm32_cryp * cryp,u32 ofst)293 static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst)
294 {
295 	return readl_relaxed(cryp->regs + ofst);
296 }
297 
stm32_cryp_write(struct stm32_cryp * cryp,u32 ofst,u32 val)298 static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val)
299 {
300 	writel_relaxed(val, cryp->regs + ofst);
301 }
302 
stm32_cryp_wait_busy(struct stm32_cryp * cryp)303 static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp)
304 {
305 	u32 status;
306 
307 	return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->sr, status,
308 			!(status & SR_BUSY), 10, 100000);
309 }
310 
stm32_cryp_enable(struct stm32_cryp * cryp)311 static inline void stm32_cryp_enable(struct stm32_cryp *cryp)
312 {
313 	writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) | CR_CRYPEN,
314 		       cryp->regs + cryp->caps->cr);
315 }
316 
stm32_cryp_wait_enable(struct stm32_cryp * cryp)317 static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp)
318 {
319 	u32 status;
320 
321 	return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->cr, status,
322 			!(status & CR_CRYPEN), 10, 100000);
323 }
324 
stm32_cryp_wait_input(struct stm32_cryp * cryp)325 static inline int stm32_cryp_wait_input(struct stm32_cryp *cryp)
326 {
327 	u32 status;
328 
329 	return readl_relaxed_poll_timeout_atomic(cryp->regs + cryp->caps->sr, status,
330 			status & SR_IFNF, 1, 10);
331 }
332 
stm32_cryp_wait_output(struct stm32_cryp * cryp)333 static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp)
334 {
335 	u32 status;
336 
337 	return readl_relaxed_poll_timeout_atomic(cryp->regs + cryp->caps->sr, status,
338 			status & SR_OFNE, 1, 10);
339 }
340 
stm32_cryp_key_read_enable(struct stm32_cryp * cryp)341 static inline void stm32_cryp_key_read_enable(struct stm32_cryp *cryp)
342 {
343 	writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) | CR_KEYRDEN,
344 		       cryp->regs + cryp->caps->cr);
345 }
346 
stm32_cryp_key_read_disable(struct stm32_cryp * cryp)347 static inline void stm32_cryp_key_read_disable(struct stm32_cryp *cryp)
348 {
349 	writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) & ~CR_KEYRDEN,
350 		       cryp->regs + cryp->caps->cr);
351 }
352 
353 static void stm32_cryp_irq_read_data(struct stm32_cryp *cryp);
354 static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp);
355 static void stm32_cryp_irq_write_gcmccm_header(struct stm32_cryp *cryp);
356 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp);
357 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err);
358 static int stm32_cryp_dma_start(struct stm32_cryp *cryp);
359 static int stm32_cryp_it_start(struct stm32_cryp *cryp);
360 
stm32_cryp_find_dev(struct stm32_cryp_ctx * ctx)361 static struct stm32_cryp *stm32_cryp_find_dev(struct stm32_cryp_ctx *ctx)
362 {
363 	struct stm32_cryp *tmp, *cryp = NULL;
364 
365 	spin_lock_bh(&cryp_list.lock);
366 	if (!ctx->cryp) {
367 		list_for_each_entry(tmp, &cryp_list.dev_list, list) {
368 			cryp = tmp;
369 			break;
370 		}
371 		ctx->cryp = cryp;
372 	} else {
373 		cryp = ctx->cryp;
374 	}
375 
376 	spin_unlock_bh(&cryp_list.lock);
377 
378 	return cryp;
379 }
380 
stm32_cryp_hw_write_iv(struct stm32_cryp * cryp,__be32 * iv)381 static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, __be32 *iv)
382 {
383 	if (!iv)
384 		return;
385 
386 	stm32_cryp_write(cryp, cryp->caps->iv0l, be32_to_cpu(*iv++));
387 	stm32_cryp_write(cryp, cryp->caps->iv0r, be32_to_cpu(*iv++));
388 
389 	if (is_aes(cryp)) {
390 		stm32_cryp_write(cryp, cryp->caps->iv1l, be32_to_cpu(*iv++));
391 		stm32_cryp_write(cryp, cryp->caps->iv1r, be32_to_cpu(*iv++));
392 	}
393 }
394 
stm32_cryp_get_iv(struct stm32_cryp * cryp)395 static void stm32_cryp_get_iv(struct stm32_cryp *cryp)
396 {
397 	struct skcipher_request *req = cryp->req;
398 	__be32 *tmp = (void *)req->iv;
399 
400 	if (!tmp)
401 		return;
402 
403 	if (cryp->caps->iv_protection)
404 		stm32_cryp_key_read_enable(cryp);
405 
406 	*tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0l));
407 	*tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0r));
408 
409 	if (is_aes(cryp)) {
410 		*tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1l));
411 		*tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1r));
412 	}
413 
414 	if (cryp->caps->iv_protection)
415 		stm32_cryp_key_read_disable(cryp);
416 }
417 
418 /**
419  * ux500_swap_bits_in_byte() - mirror the bits in a byte
420  * @b: the byte to be mirrored
421  *
422  * The bits are swapped the following way:
423  *  Byte b include bits 0-7, nibble 1 (n1) include bits 0-3 and
424  *  nibble 2 (n2) bits 4-7.
425  *
426  *  Nibble 1 (n1):
427  *  (The "old" (moved) bit is replaced with a zero)
428  *  1. Move bit 6 and 7, 4 positions to the left.
429  *  2. Move bit 3 and 5, 2 positions to the left.
430  *  3. Move bit 1-4, 1 position to the left.
431  *
432  *  Nibble 2 (n2):
433  *  1. Move bit 0 and 1, 4 positions to the right.
434  *  2. Move bit 2 and 4, 2 positions to the right.
435  *  3. Move bit 3-6, 1 position to the right.
436  *
437  *  Combine the two nibbles to a complete and swapped byte.
438  */
ux500_swap_bits_in_byte(u8 b)439 static inline u8 ux500_swap_bits_in_byte(u8 b)
440 {
441 #define R_SHIFT_4_MASK  0xc0 /* Bits 6 and 7, right shift 4 */
442 #define R_SHIFT_2_MASK  0x28 /* (After right shift 4) Bits 3 and 5,
443 				  right shift 2 */
444 #define R_SHIFT_1_MASK  0x1e /* (After right shift 2) Bits 1-4,
445 				  right shift 1 */
446 #define L_SHIFT_4_MASK  0x03 /* Bits 0 and 1, left shift 4 */
447 #define L_SHIFT_2_MASK  0x14 /* (After left shift 4) Bits 2 and 4,
448 				  left shift 2 */
449 #define L_SHIFT_1_MASK  0x78 /* (After left shift 1) Bits 3-6,
450 				  left shift 1 */
451 
452 	u8 n1;
453 	u8 n2;
454 
455 	/* Swap most significant nibble */
456 	/* Right shift 4, bits 6 and 7 */
457 	n1 = ((b  & R_SHIFT_4_MASK) >> 4) | (b  & ~(R_SHIFT_4_MASK >> 4));
458 	/* Right shift 2, bits 3 and 5 */
459 	n1 = ((n1 & R_SHIFT_2_MASK) >> 2) | (n1 & ~(R_SHIFT_2_MASK >> 2));
460 	/* Right shift 1, bits 1-4 */
461 	n1 = (n1  & R_SHIFT_1_MASK) >> 1;
462 
463 	/* Swap least significant nibble */
464 	/* Left shift 4, bits 0 and 1 */
465 	n2 = ((b  & L_SHIFT_4_MASK) << 4) | (b  & ~(L_SHIFT_4_MASK << 4));
466 	/* Left shift 2, bits 2 and 4 */
467 	n2 = ((n2 & L_SHIFT_2_MASK) << 2) | (n2 & ~(L_SHIFT_2_MASK << 2));
468 	/* Left shift 1, bits 3-6 */
469 	n2 = (n2  & L_SHIFT_1_MASK) << 1;
470 
471 	return n1 | n2;
472 }
473 
474 /**
475  * ux500_swizzle_key() - Shuffle around words and bits in the AES key
476  * @in: key to swizzle
477  * @out: swizzled key
478  * @len: length of key, in bytes
479  *
480  * This "key swizzling procedure" is described in the examples in the
481  * DB8500 design specification. There is no real description of why
482  * the bits have been arranged like this in the hardware.
483  */
ux500_swizzle_key(const u8 * in,u8 * out,u32 len)484 static inline void ux500_swizzle_key(const u8 *in, u8 *out, u32 len)
485 {
486 	int i = 0;
487 	int bpw = sizeof(u32);
488 	int j;
489 	int index = 0;
490 
491 	j = len - bpw;
492 	while (j >= 0) {
493 		for (i = 0; i < bpw; i++) {
494 			index = len - j - bpw + i;
495 			out[j + i] =
496 				ux500_swap_bits_in_byte(in[index]);
497 		}
498 		j -= bpw;
499 	}
500 }
501 
stm32_cryp_hw_write_key(struct stm32_cryp * c)502 static void stm32_cryp_hw_write_key(struct stm32_cryp *c)
503 {
504 	unsigned int i;
505 	int r_id;
506 
507 	if (is_des(c)) {
508 		stm32_cryp_write(c, c->caps->k1l, be32_to_cpu(c->ctx->key[0]));
509 		stm32_cryp_write(c, c->caps->k1r, be32_to_cpu(c->ctx->key[1]));
510 		return;
511 	}
512 
513 	/*
514 	 * On the Ux500 the AES key is considered as a single bit sequence
515 	 * of 128, 192 or 256 bits length. It is written linearly into the
516 	 * registers from K1L and down, and need to be processed to become
517 	 * a proper big-endian bit sequence.
518 	 */
519 	if (is_aes(c) && c->caps->linear_aes_key) {
520 		u32 tmpkey[8];
521 
522 		ux500_swizzle_key((u8 *)c->ctx->key,
523 				  (u8 *)tmpkey, c->ctx->keylen);
524 
525 		r_id = c->caps->k1l;
526 		for (i = 0; i < c->ctx->keylen / sizeof(u32); i++, r_id += 4)
527 			stm32_cryp_write(c, r_id, tmpkey[i]);
528 
529 		return;
530 	}
531 
532 	r_id = c->caps->k3r;
533 	for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4)
534 		stm32_cryp_write(c, r_id, be32_to_cpu(c->ctx->key[i - 1]));
535 }
536 
stm32_cryp_get_hw_mode(struct stm32_cryp * cryp)537 static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp)
538 {
539 	if (is_aes(cryp) && is_ecb(cryp))
540 		return CR_AES_ECB;
541 
542 	if (is_aes(cryp) && is_cbc(cryp))
543 		return CR_AES_CBC;
544 
545 	if (is_aes(cryp) && is_ctr(cryp))
546 		return CR_AES_CTR;
547 
548 	if (is_aes(cryp) && is_gcm(cryp))
549 		return CR_AES_GCM;
550 
551 	if (is_aes(cryp) && is_ccm(cryp))
552 		return CR_AES_CCM;
553 
554 	if (is_des(cryp) && is_ecb(cryp))
555 		return CR_DES_ECB;
556 
557 	if (is_des(cryp) && is_cbc(cryp))
558 		return CR_DES_CBC;
559 
560 	if (is_tdes(cryp) && is_ecb(cryp))
561 		return CR_TDES_ECB;
562 
563 	if (is_tdes(cryp) && is_cbc(cryp))
564 		return CR_TDES_CBC;
565 
566 	dev_err(cryp->dev, "Unknown mode\n");
567 	return CR_AES_UNKNOWN;
568 }
569 
stm32_cryp_get_input_text_len(struct stm32_cryp * cryp)570 static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp *cryp)
571 {
572 	return is_encrypt(cryp) ? cryp->areq->cryptlen :
573 				  cryp->areq->cryptlen - cryp->authsize;
574 }
575 
stm32_cryp_gcm_init(struct stm32_cryp * cryp,u32 cfg)576 static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg)
577 {
578 	int ret;
579 	__be32 iv[4];
580 
581 	/* Phase 1 : init */
582 	memcpy(iv, cryp->areq->iv, 12);
583 	iv[3] = cpu_to_be32(GCM_CTR_INIT);
584 	cryp->gcm_ctr = GCM_CTR_INIT;
585 	stm32_cryp_hw_write_iv(cryp, iv);
586 
587 	stm32_cryp_write(cryp, cryp->caps->cr, cfg | CR_PH_INIT | CR_CRYPEN);
588 
589 	/* Wait for end of processing */
590 	ret = stm32_cryp_wait_enable(cryp);
591 	if (ret) {
592 		dev_err(cryp->dev, "Timeout (gcm init)\n");
593 		return ret;
594 	}
595 
596 	/* Prepare next phase */
597 	if (cryp->areq->assoclen) {
598 		cfg |= CR_PH_HEADER;
599 		stm32_cryp_write(cryp, cryp->caps->cr, cfg);
600 	} else if (stm32_cryp_get_input_text_len(cryp)) {
601 		cfg |= CR_PH_PAYLOAD;
602 		stm32_cryp_write(cryp, cryp->caps->cr, cfg);
603 	}
604 
605 	return 0;
606 }
607 
stm32_crypt_gcmccm_end_header(struct stm32_cryp * cryp)608 static void stm32_crypt_gcmccm_end_header(struct stm32_cryp *cryp)
609 {
610 	u32 cfg;
611 	int err;
612 
613 	/* Check if whole header written */
614 	if (!cryp->header_in) {
615 		/* Wait for completion */
616 		err = stm32_cryp_wait_busy(cryp);
617 		if (err) {
618 			dev_err(cryp->dev, "Timeout (gcm/ccm header)\n");
619 			stm32_cryp_write(cryp, cryp->caps->imsc, 0);
620 			stm32_cryp_finish_req(cryp, err);
621 			return;
622 		}
623 
624 		if (stm32_cryp_get_input_text_len(cryp)) {
625 			/* Phase 3 : payload */
626 			cfg = stm32_cryp_read(cryp, cryp->caps->cr);
627 			cfg &= ~CR_CRYPEN;
628 			stm32_cryp_write(cryp, cryp->caps->cr, cfg);
629 
630 			cfg &= ~CR_PH_MASK;
631 			cfg |= CR_PH_PAYLOAD | CR_CRYPEN;
632 			stm32_cryp_write(cryp, cryp->caps->cr, cfg);
633 		} else {
634 			/*
635 			 * Phase 4 : tag.
636 			 * Nothing to read, nothing to write, caller have to
637 			 * end request
638 			 */
639 		}
640 	}
641 }
642 
stm32_cryp_write_ccm_first_header(struct stm32_cryp * cryp)643 static void stm32_cryp_write_ccm_first_header(struct stm32_cryp *cryp)
644 {
645 	size_t written;
646 	size_t len;
647 	u32 alen = cryp->areq->assoclen;
648 	u32 block[AES_BLOCK_32] = {0};
649 	u8 *b8 = (u8 *)block;
650 
651 	if (alen <= 65280) {
652 		/* Write first u32 of B1 */
653 		b8[0] = (alen >> 8) & 0xFF;
654 		b8[1] = alen & 0xFF;
655 		len = 2;
656 	} else {
657 		/* Build the two first u32 of B1 */
658 		b8[0] = 0xFF;
659 		b8[1] = 0xFE;
660 		b8[2] = (alen & 0xFF000000) >> 24;
661 		b8[3] = (alen & 0x00FF0000) >> 16;
662 		b8[4] = (alen & 0x0000FF00) >> 8;
663 		b8[5] = alen & 0x000000FF;
664 		len = 6;
665 	}
666 
667 	written = min_t(size_t, AES_BLOCK_SIZE - len, alen);
668 
669 	scatterwalk_copychunks((char *)block + len, &cryp->in_walk, written, 0);
670 
671 	writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32);
672 
673 	cryp->header_in -= written;
674 
675 	stm32_crypt_gcmccm_end_header(cryp);
676 }
677 
stm32_cryp_ccm_init(struct stm32_cryp * cryp,u32 cfg)678 static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg)
679 {
680 	int ret;
681 	u32 iv_32[AES_BLOCK_32], b0_32[AES_BLOCK_32];
682 	u8 *iv = (u8 *)iv_32, *b0 = (u8 *)b0_32;
683 	__be32 *bd;
684 	u32 *d;
685 	unsigned int i, textlen;
686 
687 	/* Phase 1 : init. Firstly set the CTR value to 1 (not 0) */
688 	memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
689 	memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
690 	iv[AES_BLOCK_SIZE - 1] = 1;
691 	stm32_cryp_hw_write_iv(cryp, (__be32 *)iv);
692 
693 	/* Build B0 */
694 	memcpy(b0, iv, AES_BLOCK_SIZE);
695 
696 	b0[0] |= (8 * ((cryp->authsize - 2) / 2));
697 
698 	if (cryp->areq->assoclen)
699 		b0[0] |= 0x40;
700 
701 	textlen = stm32_cryp_get_input_text_len(cryp);
702 
703 	b0[AES_BLOCK_SIZE - 2] = textlen >> 8;
704 	b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF;
705 
706 	/* Enable HW */
707 	stm32_cryp_write(cryp, cryp->caps->cr, cfg | CR_PH_INIT | CR_CRYPEN);
708 
709 	/* Write B0 */
710 	d = (u32 *)b0;
711 	bd = (__be32 *)b0;
712 
713 	for (i = 0; i < AES_BLOCK_32; i++) {
714 		u32 xd = d[i];
715 
716 		if (!cryp->caps->padding_wa)
717 			xd = be32_to_cpu(bd[i]);
718 		stm32_cryp_write(cryp, cryp->caps->din, xd);
719 	}
720 
721 	/* Wait for end of processing */
722 	ret = stm32_cryp_wait_enable(cryp);
723 	if (ret) {
724 		dev_err(cryp->dev, "Timeout (ccm init)\n");
725 		return ret;
726 	}
727 
728 	/* Prepare next phase */
729 	if (cryp->areq->assoclen) {
730 		cfg |= CR_PH_HEADER | CR_CRYPEN;
731 		stm32_cryp_write(cryp, cryp->caps->cr, cfg);
732 
733 		/* Write first (special) block (may move to next phase [payload]) */
734 		stm32_cryp_write_ccm_first_header(cryp);
735 	} else if (stm32_cryp_get_input_text_len(cryp)) {
736 		cfg |= CR_PH_PAYLOAD;
737 		stm32_cryp_write(cryp, cryp->caps->cr, cfg);
738 	}
739 
740 	return 0;
741 }
742 
stm32_cryp_hw_init(struct stm32_cryp * cryp)743 static int stm32_cryp_hw_init(struct stm32_cryp *cryp)
744 {
745 	int ret;
746 	u32 cfg, hw_mode;
747 
748 	pm_runtime_get_sync(cryp->dev);
749 
750 	/* Disable interrupt */
751 	stm32_cryp_write(cryp, cryp->caps->imsc, 0);
752 
753 	/* Set configuration */
754 	cfg = CR_DATA8 | CR_FFLUSH;
755 
756 	switch (cryp->ctx->keylen) {
757 	case AES_KEYSIZE_128:
758 		cfg |= CR_KEY128;
759 		break;
760 
761 	case AES_KEYSIZE_192:
762 		cfg |= CR_KEY192;
763 		break;
764 
765 	default:
766 	case AES_KEYSIZE_256:
767 		cfg |= CR_KEY256;
768 		break;
769 	}
770 
771 	hw_mode = stm32_cryp_get_hw_mode(cryp);
772 	if (hw_mode == CR_AES_UNKNOWN)
773 		return -EINVAL;
774 
775 	/* AES ECB/CBC decrypt: run key preparation first */
776 	if (is_decrypt(cryp) &&
777 	    ((hw_mode == CR_AES_ECB) || (hw_mode == CR_AES_CBC))) {
778 		/* Configure in key preparation mode */
779 		if (cryp->caps->kp_mode)
780 			stm32_cryp_write(cryp, cryp->caps->cr,
781 				cfg | CR_AES_KP);
782 		else
783 			stm32_cryp_write(cryp,
784 				cryp->caps->cr, cfg | CR_AES_ECB | CR_KSE);
785 
786 		/* Set key only after full configuration done */
787 		stm32_cryp_hw_write_key(cryp);
788 
789 		/* Start prepare key */
790 		stm32_cryp_enable(cryp);
791 		/* Wait for end of processing */
792 		ret = stm32_cryp_wait_busy(cryp);
793 		if (ret) {
794 			dev_err(cryp->dev, "Timeout (key preparation)\n");
795 			return ret;
796 		}
797 
798 		cfg |= hw_mode | CR_DEC_NOT_ENC;
799 
800 		/* Apply updated config (Decrypt + algo) and flush */
801 		stm32_cryp_write(cryp, cryp->caps->cr, cfg);
802 	} else {
803 		cfg |= hw_mode;
804 		if (is_decrypt(cryp))
805 			cfg |= CR_DEC_NOT_ENC;
806 
807 		/* Apply config and flush */
808 		stm32_cryp_write(cryp, cryp->caps->cr, cfg);
809 
810 		/* Set key only after configuration done */
811 		stm32_cryp_hw_write_key(cryp);
812 	}
813 
814 	switch (hw_mode) {
815 	case CR_AES_GCM:
816 	case CR_AES_CCM:
817 		/* Phase 1 : init */
818 		if (hw_mode == CR_AES_CCM)
819 			ret = stm32_cryp_ccm_init(cryp, cfg);
820 		else
821 			ret = stm32_cryp_gcm_init(cryp, cfg);
822 
823 		if (ret)
824 			return ret;
825 
826 		break;
827 
828 	case CR_DES_CBC:
829 	case CR_TDES_CBC:
830 	case CR_AES_CBC:
831 	case CR_AES_CTR:
832 		stm32_cryp_hw_write_iv(cryp, (__be32 *)cryp->req->iv);
833 		break;
834 
835 	default:
836 		break;
837 	}
838 
839 	/* Enable now */
840 	stm32_cryp_enable(cryp);
841 
842 	return 0;
843 }
844 
stm32_cryp_finish_req(struct stm32_cryp * cryp,int err)845 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err)
846 {
847 	if (!err && (is_gcm(cryp) || is_ccm(cryp)))
848 		/* Phase 4 : output tag */
849 		err = stm32_cryp_read_auth_tag(cryp);
850 
851 	if (!err && (!(is_gcm(cryp) || is_ccm(cryp) || is_ecb(cryp))))
852 		stm32_cryp_get_iv(cryp);
853 
854 	pm_runtime_mark_last_busy(cryp->dev);
855 	pm_runtime_put_autosuspend(cryp->dev);
856 
857 	if (is_gcm(cryp) || is_ccm(cryp))
858 		crypto_finalize_aead_request(cryp->engine, cryp->areq, err);
859 	else
860 		crypto_finalize_skcipher_request(cryp->engine, cryp->req, err);
861 }
862 
stm32_cryp_header_dma_callback(void * param)863 static void stm32_cryp_header_dma_callback(void *param)
864 {
865 	struct stm32_cryp *cryp = (struct stm32_cryp *)param;
866 	int ret;
867 	u32 reg;
868 
869 	dma_unmap_sg(cryp->dev, cryp->header_sg, cryp->header_sg_len, DMA_TO_DEVICE);
870 
871 	reg = stm32_cryp_read(cryp, cryp->caps->dmacr);
872 	stm32_cryp_write(cryp, cryp->caps->dmacr, reg & ~(DMACR_DOEN | DMACR_DIEN));
873 
874 	kfree(cryp->header_sg);
875 
876 	reg = stm32_cryp_read(cryp, cryp->caps->cr);
877 
878 	if (cryp->header_in) {
879 		stm32_cryp_write(cryp, cryp->caps->cr, reg | CR_CRYPEN);
880 
881 		ret = stm32_cryp_wait_input(cryp);
882 		if (ret) {
883 			dev_err(cryp->dev, "input header ready timeout after dma\n");
884 			stm32_cryp_finish_req(cryp, ret);
885 			return;
886 		}
887 		stm32_cryp_irq_write_gcmccm_header(cryp);
888 		WARN_ON(cryp->header_in);
889 	}
890 
891 	if (stm32_cryp_get_input_text_len(cryp)) {
892 		/* Phase 3 : payload */
893 		reg = stm32_cryp_read(cryp, cryp->caps->cr);
894 		stm32_cryp_write(cryp, cryp->caps->cr, reg & ~CR_CRYPEN);
895 
896 		reg &= ~CR_PH_MASK;
897 		reg |= CR_PH_PAYLOAD | CR_CRYPEN;
898 		stm32_cryp_write(cryp, cryp->caps->cr, reg);
899 
900 		if (cryp->flags & FLG_IN_OUT_DMA) {
901 			ret = stm32_cryp_dma_start(cryp);
902 			if (ret)
903 				stm32_cryp_finish_req(cryp, ret);
904 		} else {
905 			stm32_cryp_it_start(cryp);
906 		}
907 	} else {
908 		/*
909 		 * Phase 4 : tag.
910 		 * Nothing to read, nothing to write => end request
911 		 */
912 		stm32_cryp_finish_req(cryp, 0);
913 	}
914 }
915 
stm32_cryp_dma_callback(void * param)916 static void stm32_cryp_dma_callback(void *param)
917 {
918 	struct stm32_cryp *cryp = (struct stm32_cryp *)param;
919 	int ret;
920 	u32 reg;
921 
922 	complete(&cryp->dma_completion); /* completion to indicate no timeout */
923 
924 	dma_sync_sg_for_device(cryp->dev, cryp->out_sg, cryp->out_sg_len, DMA_FROM_DEVICE);
925 
926 	if (cryp->in_sg != cryp->out_sg)
927 		dma_unmap_sg(cryp->dev, cryp->in_sg, cryp->in_sg_len, DMA_TO_DEVICE);
928 
929 	dma_unmap_sg(cryp->dev, cryp->out_sg, cryp->out_sg_len, DMA_FROM_DEVICE);
930 
931 	reg = stm32_cryp_read(cryp, cryp->caps->dmacr);
932 	stm32_cryp_write(cryp, cryp->caps->dmacr, reg & ~(DMACR_DOEN | DMACR_DIEN));
933 
934 	reg = stm32_cryp_read(cryp, cryp->caps->cr);
935 
936 	if (is_gcm(cryp) || is_ccm(cryp)) {
937 		kfree(cryp->in_sg);
938 		kfree(cryp->out_sg);
939 	} else {
940 		if (cryp->in_sg != cryp->req->src)
941 			kfree(cryp->in_sg);
942 		if (cryp->out_sg != cryp->req->dst)
943 			kfree(cryp->out_sg);
944 	}
945 
946 	if (cryp->payload_in) {
947 		stm32_cryp_write(cryp, cryp->caps->cr, reg | CR_CRYPEN);
948 
949 		ret = stm32_cryp_wait_input(cryp);
950 		if (ret) {
951 			dev_err(cryp->dev, "input ready timeout after dma\n");
952 			stm32_cryp_finish_req(cryp, ret);
953 			return;
954 		}
955 		stm32_cryp_irq_write_data(cryp);
956 
957 		ret = stm32_cryp_wait_output(cryp);
958 		if (ret) {
959 			dev_err(cryp->dev, "output ready timeout after dma\n");
960 			stm32_cryp_finish_req(cryp, ret);
961 			return;
962 		}
963 		stm32_cryp_irq_read_data(cryp);
964 	}
965 
966 	stm32_cryp_finish_req(cryp, 0);
967 }
968 
stm32_cryp_header_dma_start(struct stm32_cryp * cryp)969 static int stm32_cryp_header_dma_start(struct stm32_cryp *cryp)
970 {
971 	int ret;
972 	struct dma_async_tx_descriptor *tx_in;
973 	u32 reg;
974 	size_t align_size;
975 
976 	ret = dma_map_sg(cryp->dev, cryp->header_sg, cryp->header_sg_len, DMA_TO_DEVICE);
977 	if (!ret) {
978 		dev_err(cryp->dev, "dma_map_sg() error\n");
979 		return -ENOMEM;
980 	}
981 
982 	dma_sync_sg_for_device(cryp->dev, cryp->header_sg, cryp->header_sg_len, DMA_TO_DEVICE);
983 
984 	tx_in = dmaengine_prep_slave_sg(cryp->dma_lch_in, cryp->header_sg, cryp->header_sg_len,
985 					DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
986 	if (!tx_in) {
987 		dev_err(cryp->dev, "IN prep_slave_sg() failed\n");
988 		return -EINVAL;
989 	}
990 
991 	tx_in->callback_param = cryp;
992 	tx_in->callback = stm32_cryp_header_dma_callback;
993 
994 	/* Advance scatterwalk to not DMA'ed data */
995 	align_size = ALIGN_DOWN(cryp->header_in, cryp->hw_blocksize);
996 	scatterwalk_copychunks(NULL, &cryp->in_walk, align_size, 2);
997 	cryp->header_in -= align_size;
998 
999 	ret = dma_submit_error(dmaengine_submit(tx_in));
1000 	if (ret < 0) {
1001 		dev_err(cryp->dev, "DMA in submit failed\n");
1002 		return ret;
1003 	}
1004 	dma_async_issue_pending(cryp->dma_lch_in);
1005 
1006 	reg = stm32_cryp_read(cryp, cryp->caps->dmacr);
1007 	stm32_cryp_write(cryp, cryp->caps->dmacr, reg | DMACR_DIEN);
1008 
1009 	return 0;
1010 }
1011 
stm32_cryp_dma_start(struct stm32_cryp * cryp)1012 static int stm32_cryp_dma_start(struct stm32_cryp *cryp)
1013 {
1014 	int ret;
1015 	size_t align_size;
1016 	struct dma_async_tx_descriptor *tx_in, *tx_out;
1017 	u32 reg;
1018 
1019 	if (cryp->in_sg != cryp->out_sg) {
1020 		ret = dma_map_sg(cryp->dev, cryp->in_sg, cryp->in_sg_len, DMA_TO_DEVICE);
1021 		if (!ret) {
1022 			dev_err(cryp->dev, "dma_map_sg() error\n");
1023 			return -ENOMEM;
1024 		}
1025 	}
1026 
1027 	ret = dma_map_sg(cryp->dev, cryp->out_sg, cryp->out_sg_len, DMA_FROM_DEVICE);
1028 	if (!ret) {
1029 		dev_err(cryp->dev, "dma_map_sg() error\n");
1030 		return -ENOMEM;
1031 	}
1032 
1033 	dma_sync_sg_for_device(cryp->dev, cryp->in_sg, cryp->in_sg_len, DMA_TO_DEVICE);
1034 
1035 	tx_in = dmaengine_prep_slave_sg(cryp->dma_lch_in, cryp->in_sg, cryp->in_sg_len,
1036 					DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
1037 	if (!tx_in) {
1038 		dev_err(cryp->dev, "IN prep_slave_sg() failed\n");
1039 		return -EINVAL;
1040 	}
1041 
1042 	/* No callback necessary */
1043 	tx_in->callback_param = cryp;
1044 	tx_in->callback = NULL;
1045 
1046 	tx_out = dmaengine_prep_slave_sg(cryp->dma_lch_out, cryp->out_sg, cryp->out_sg_len,
1047 					 DMA_DEV_TO_MEM, DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
1048 	if (!tx_out) {
1049 		dev_err(cryp->dev, "OUT prep_slave_sg() failed\n");
1050 		return -EINVAL;
1051 	}
1052 
1053 	reinit_completion(&cryp->dma_completion);
1054 	tx_out->callback = stm32_cryp_dma_callback;
1055 	tx_out->callback_param = cryp;
1056 
1057 	/* Advance scatterwalk to not DMA'ed data */
1058 	align_size = ALIGN_DOWN(cryp->payload_in, cryp->hw_blocksize);
1059 	scatterwalk_copychunks(NULL, &cryp->in_walk, align_size, 2);
1060 	cryp->payload_in -= align_size;
1061 
1062 	ret = dma_submit_error(dmaengine_submit(tx_in));
1063 	if (ret < 0) {
1064 		dev_err(cryp->dev, "DMA in submit failed\n");
1065 		return ret;
1066 	}
1067 	dma_async_issue_pending(cryp->dma_lch_in);
1068 
1069 	/* Advance scatterwalk to not DMA'ed data */
1070 	scatterwalk_copychunks(NULL, &cryp->out_walk, align_size, 2);
1071 	cryp->payload_out -= align_size;
1072 	ret = dma_submit_error(dmaengine_submit(tx_out));
1073 	if (ret < 0) {
1074 		dev_err(cryp->dev, "DMA out submit failed\n");
1075 		return ret;
1076 	}
1077 	dma_async_issue_pending(cryp->dma_lch_out);
1078 
1079 	reg = stm32_cryp_read(cryp, cryp->caps->dmacr);
1080 	stm32_cryp_write(cryp, cryp->caps->dmacr, reg | DMACR_DOEN | DMACR_DIEN);
1081 
1082 	if (!wait_for_completion_timeout(&cryp->dma_completion, msecs_to_jiffies(1000))) {
1083 		dev_err(cryp->dev, "DMA out timed out\n");
1084 		dmaengine_terminate_sync(cryp->dma_lch_out);
1085 		return -ETIMEDOUT;
1086 	}
1087 
1088 	return 0;
1089 }
1090 
stm32_cryp_it_start(struct stm32_cryp * cryp)1091 static int stm32_cryp_it_start(struct stm32_cryp *cryp)
1092 {
1093 	/* Enable interrupt and let the IRQ handler do everything */
1094 	stm32_cryp_write(cryp, cryp->caps->imsc, IMSCR_IN | IMSCR_OUT);
1095 
1096 	return 0;
1097 }
1098 
1099 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq);
1100 
stm32_cryp_init_tfm(struct crypto_skcipher * tfm)1101 static int stm32_cryp_init_tfm(struct crypto_skcipher *tfm)
1102 {
1103 	crypto_skcipher_set_reqsize(tfm, sizeof(struct stm32_cryp_reqctx));
1104 
1105 	return 0;
1106 }
1107 
1108 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq);
1109 
stm32_cryp_aes_aead_init(struct crypto_aead * tfm)1110 static int stm32_cryp_aes_aead_init(struct crypto_aead *tfm)
1111 {
1112 	crypto_aead_set_reqsize(tfm, sizeof(struct stm32_cryp_reqctx));
1113 
1114 	return 0;
1115 }
1116 
stm32_cryp_crypt(struct skcipher_request * req,unsigned long mode)1117 static int stm32_cryp_crypt(struct skcipher_request *req, unsigned long mode)
1118 {
1119 	struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
1120 			crypto_skcipher_reqtfm(req));
1121 	struct stm32_cryp_reqctx *rctx = skcipher_request_ctx(req);
1122 	struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
1123 
1124 	if (!cryp)
1125 		return -ENODEV;
1126 
1127 	rctx->mode = mode;
1128 
1129 	return crypto_transfer_skcipher_request_to_engine(cryp->engine, req);
1130 }
1131 
stm32_cryp_aead_crypt(struct aead_request * req,unsigned long mode)1132 static int stm32_cryp_aead_crypt(struct aead_request *req, unsigned long mode)
1133 {
1134 	struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1135 	struct stm32_cryp_reqctx *rctx = aead_request_ctx(req);
1136 	struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
1137 
1138 	if (!cryp)
1139 		return -ENODEV;
1140 
1141 	rctx->mode = mode;
1142 
1143 	return crypto_transfer_aead_request_to_engine(cryp->engine, req);
1144 }
1145 
stm32_cryp_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int keylen)1146 static int stm32_cryp_setkey(struct crypto_skcipher *tfm, const u8 *key,
1147 			     unsigned int keylen)
1148 {
1149 	struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
1150 
1151 	memcpy(ctx->key, key, keylen);
1152 	ctx->keylen = keylen;
1153 
1154 	return 0;
1155 }
1156 
stm32_cryp_aes_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int keylen)1157 static int stm32_cryp_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
1158 				 unsigned int keylen)
1159 {
1160 	if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
1161 	    keylen != AES_KEYSIZE_256)
1162 		return -EINVAL;
1163 	else
1164 		return stm32_cryp_setkey(tfm, key, keylen);
1165 }
1166 
stm32_cryp_des_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int keylen)1167 static int stm32_cryp_des_setkey(struct crypto_skcipher *tfm, const u8 *key,
1168 				 unsigned int keylen)
1169 {
1170 	return verify_skcipher_des_key(tfm, key) ?:
1171 	       stm32_cryp_setkey(tfm, key, keylen);
1172 }
1173 
stm32_cryp_tdes_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int keylen)1174 static int stm32_cryp_tdes_setkey(struct crypto_skcipher *tfm, const u8 *key,
1175 				  unsigned int keylen)
1176 {
1177 	return verify_skcipher_des3_key(tfm, key) ?:
1178 	       stm32_cryp_setkey(tfm, key, keylen);
1179 }
1180 
stm32_cryp_aes_aead_setkey(struct crypto_aead * tfm,const u8 * key,unsigned int keylen)1181 static int stm32_cryp_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key,
1182 				      unsigned int keylen)
1183 {
1184 	struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
1185 
1186 	if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
1187 	    keylen != AES_KEYSIZE_256)
1188 		return -EINVAL;
1189 
1190 	memcpy(ctx->key, key, keylen);
1191 	ctx->keylen = keylen;
1192 
1193 	return 0;
1194 }
1195 
stm32_cryp_aes_gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)1196 static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead *tfm,
1197 					  unsigned int authsize)
1198 {
1199 	switch (authsize) {
1200 	case 4:
1201 	case 8:
1202 	case 12:
1203 	case 13:
1204 	case 14:
1205 	case 15:
1206 	case 16:
1207 		break;
1208 	default:
1209 		return -EINVAL;
1210 	}
1211 
1212 	return 0;
1213 }
1214 
stm32_cryp_aes_ccm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)1215 static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead *tfm,
1216 					  unsigned int authsize)
1217 {
1218 	switch (authsize) {
1219 	case 4:
1220 	case 6:
1221 	case 8:
1222 	case 10:
1223 	case 12:
1224 	case 14:
1225 	case 16:
1226 		break;
1227 	default:
1228 		return -EINVAL;
1229 	}
1230 
1231 	return 0;
1232 }
1233 
stm32_cryp_aes_ecb_encrypt(struct skcipher_request * req)1234 static int stm32_cryp_aes_ecb_encrypt(struct skcipher_request *req)
1235 {
1236 	if (req->cryptlen % AES_BLOCK_SIZE)
1237 		return -EINVAL;
1238 
1239 	if (req->cryptlen == 0)
1240 		return 0;
1241 
1242 	return stm32_cryp_crypt(req, FLG_AES | FLG_ECB | FLG_ENCRYPT);
1243 }
1244 
stm32_cryp_aes_ecb_decrypt(struct skcipher_request * req)1245 static int stm32_cryp_aes_ecb_decrypt(struct skcipher_request *req)
1246 {
1247 	if (req->cryptlen % AES_BLOCK_SIZE)
1248 		return -EINVAL;
1249 
1250 	if (req->cryptlen == 0)
1251 		return 0;
1252 
1253 	return stm32_cryp_crypt(req, FLG_AES | FLG_ECB);
1254 }
1255 
stm32_cryp_aes_cbc_encrypt(struct skcipher_request * req)1256 static int stm32_cryp_aes_cbc_encrypt(struct skcipher_request *req)
1257 {
1258 	if (req->cryptlen % AES_BLOCK_SIZE)
1259 		return -EINVAL;
1260 
1261 	if (req->cryptlen == 0)
1262 		return 0;
1263 
1264 	return stm32_cryp_crypt(req, FLG_AES | FLG_CBC | FLG_ENCRYPT);
1265 }
1266 
stm32_cryp_aes_cbc_decrypt(struct skcipher_request * req)1267 static int stm32_cryp_aes_cbc_decrypt(struct skcipher_request *req)
1268 {
1269 	if (req->cryptlen % AES_BLOCK_SIZE)
1270 		return -EINVAL;
1271 
1272 	if (req->cryptlen == 0)
1273 		return 0;
1274 
1275 	return stm32_cryp_crypt(req, FLG_AES | FLG_CBC);
1276 }
1277 
stm32_cryp_aes_ctr_encrypt(struct skcipher_request * req)1278 static int stm32_cryp_aes_ctr_encrypt(struct skcipher_request *req)
1279 {
1280 	if (req->cryptlen == 0)
1281 		return 0;
1282 
1283 	return stm32_cryp_crypt(req, FLG_AES | FLG_CTR | FLG_ENCRYPT);
1284 }
1285 
stm32_cryp_aes_ctr_decrypt(struct skcipher_request * req)1286 static int stm32_cryp_aes_ctr_decrypt(struct skcipher_request *req)
1287 {
1288 	if (req->cryptlen == 0)
1289 		return 0;
1290 
1291 	return stm32_cryp_crypt(req, FLG_AES | FLG_CTR);
1292 }
1293 
stm32_cryp_aes_gcm_encrypt(struct aead_request * req)1294 static int stm32_cryp_aes_gcm_encrypt(struct aead_request *req)
1295 {
1296 	return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM | FLG_ENCRYPT);
1297 }
1298 
stm32_cryp_aes_gcm_decrypt(struct aead_request * req)1299 static int stm32_cryp_aes_gcm_decrypt(struct aead_request *req)
1300 {
1301 	return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM);
1302 }
1303 
crypto_ccm_check_iv(const u8 * iv)1304 static inline int crypto_ccm_check_iv(const u8 *iv)
1305 {
1306 	/* 2 <= L <= 8, so 1 <= L' <= 7. */
1307 	if (iv[0] < 1 || iv[0] > 7)
1308 		return -EINVAL;
1309 
1310 	return 0;
1311 }
1312 
stm32_cryp_aes_ccm_encrypt(struct aead_request * req)1313 static int stm32_cryp_aes_ccm_encrypt(struct aead_request *req)
1314 {
1315 	int err;
1316 
1317 	err = crypto_ccm_check_iv(req->iv);
1318 	if (err)
1319 		return err;
1320 
1321 	return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM | FLG_ENCRYPT);
1322 }
1323 
stm32_cryp_aes_ccm_decrypt(struct aead_request * req)1324 static int stm32_cryp_aes_ccm_decrypt(struct aead_request *req)
1325 {
1326 	int err;
1327 
1328 	err = crypto_ccm_check_iv(req->iv);
1329 	if (err)
1330 		return err;
1331 
1332 	return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM);
1333 }
1334 
stm32_cryp_des_ecb_encrypt(struct skcipher_request * req)1335 static int stm32_cryp_des_ecb_encrypt(struct skcipher_request *req)
1336 {
1337 	if (req->cryptlen % DES_BLOCK_SIZE)
1338 		return -EINVAL;
1339 
1340 	if (req->cryptlen == 0)
1341 		return 0;
1342 
1343 	return stm32_cryp_crypt(req, FLG_DES | FLG_ECB | FLG_ENCRYPT);
1344 }
1345 
stm32_cryp_des_ecb_decrypt(struct skcipher_request * req)1346 static int stm32_cryp_des_ecb_decrypt(struct skcipher_request *req)
1347 {
1348 	if (req->cryptlen % DES_BLOCK_SIZE)
1349 		return -EINVAL;
1350 
1351 	if (req->cryptlen == 0)
1352 		return 0;
1353 
1354 	return stm32_cryp_crypt(req, FLG_DES | FLG_ECB);
1355 }
1356 
stm32_cryp_des_cbc_encrypt(struct skcipher_request * req)1357 static int stm32_cryp_des_cbc_encrypt(struct skcipher_request *req)
1358 {
1359 	if (req->cryptlen % DES_BLOCK_SIZE)
1360 		return -EINVAL;
1361 
1362 	if (req->cryptlen == 0)
1363 		return 0;
1364 
1365 	return stm32_cryp_crypt(req, FLG_DES | FLG_CBC | FLG_ENCRYPT);
1366 }
1367 
stm32_cryp_des_cbc_decrypt(struct skcipher_request * req)1368 static int stm32_cryp_des_cbc_decrypt(struct skcipher_request *req)
1369 {
1370 	if (req->cryptlen % DES_BLOCK_SIZE)
1371 		return -EINVAL;
1372 
1373 	if (req->cryptlen == 0)
1374 		return 0;
1375 
1376 	return stm32_cryp_crypt(req, FLG_DES | FLG_CBC);
1377 }
1378 
stm32_cryp_tdes_ecb_encrypt(struct skcipher_request * req)1379 static int stm32_cryp_tdes_ecb_encrypt(struct skcipher_request *req)
1380 {
1381 	if (req->cryptlen % DES_BLOCK_SIZE)
1382 		return -EINVAL;
1383 
1384 	if (req->cryptlen == 0)
1385 		return 0;
1386 
1387 	return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB | FLG_ENCRYPT);
1388 }
1389 
stm32_cryp_tdes_ecb_decrypt(struct skcipher_request * req)1390 static int stm32_cryp_tdes_ecb_decrypt(struct skcipher_request *req)
1391 {
1392 	if (req->cryptlen % DES_BLOCK_SIZE)
1393 		return -EINVAL;
1394 
1395 	if (req->cryptlen == 0)
1396 		return 0;
1397 
1398 	return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB);
1399 }
1400 
stm32_cryp_tdes_cbc_encrypt(struct skcipher_request * req)1401 static int stm32_cryp_tdes_cbc_encrypt(struct skcipher_request *req)
1402 {
1403 	if (req->cryptlen % DES_BLOCK_SIZE)
1404 		return -EINVAL;
1405 
1406 	if (req->cryptlen == 0)
1407 		return 0;
1408 
1409 	return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC | FLG_ENCRYPT);
1410 }
1411 
stm32_cryp_tdes_cbc_decrypt(struct skcipher_request * req)1412 static int stm32_cryp_tdes_cbc_decrypt(struct skcipher_request *req)
1413 {
1414 	if (req->cryptlen % DES_BLOCK_SIZE)
1415 		return -EINVAL;
1416 
1417 	if (req->cryptlen == 0)
1418 		return 0;
1419 
1420 	return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC);
1421 }
1422 
stm32_cryp_dma_check_sg(struct scatterlist * test_sg,size_t len,size_t block_size)1423 static enum stm32_dma_mode stm32_cryp_dma_check_sg(struct scatterlist *test_sg, size_t len,
1424 						   size_t block_size)
1425 {
1426 	struct scatterlist *sg;
1427 	int i;
1428 
1429 	if (len <= 16)
1430 		return NO_DMA; /* Faster */
1431 
1432 	for_each_sg(test_sg, sg, sg_nents(test_sg), i) {
1433 		if (!IS_ALIGNED(sg->length, block_size) && !sg_is_last(sg))
1434 			return NO_DMA;
1435 
1436 		if (sg->offset % sizeof(u32))
1437 			return NO_DMA;
1438 
1439 		if (sg_is_last(sg) && !IS_ALIGNED(sg->length, AES_BLOCK_SIZE))
1440 			return DMA_NEED_SG_TRUNC;
1441 	}
1442 
1443 	return DMA_PLAIN_SG;
1444 }
1445 
stm32_cryp_dma_check(struct stm32_cryp * cryp,struct scatterlist * in_sg,struct scatterlist * out_sg)1446 static enum stm32_dma_mode stm32_cryp_dma_check(struct stm32_cryp *cryp, struct scatterlist *in_sg,
1447 						struct scatterlist *out_sg)
1448 {
1449 	enum stm32_dma_mode ret = DMA_PLAIN_SG;
1450 
1451 	if (!is_aes(cryp))
1452 		return NO_DMA;
1453 
1454 	if (!cryp->dma_lch_in || !cryp->dma_lch_out)
1455 		return NO_DMA;
1456 
1457 	ret = stm32_cryp_dma_check_sg(in_sg, cryp->payload_in, AES_BLOCK_SIZE);
1458 	if (ret == NO_DMA)
1459 		return ret;
1460 
1461 	ret = stm32_cryp_dma_check_sg(out_sg, cryp->payload_out, AES_BLOCK_SIZE);
1462 	if (ret == NO_DMA)
1463 		return ret;
1464 
1465 	/* Check CTR counter overflow */
1466 	if (is_aes(cryp) && is_ctr(cryp)) {
1467 		u32 c;
1468 		__be32 iv3;
1469 
1470 		memcpy(&iv3, &cryp->req->iv[3 * sizeof(u32)], sizeof(iv3));
1471 		c = be32_to_cpu(iv3);
1472 		if ((c + cryp->payload_in) < cryp->payload_in)
1473 			return NO_DMA;
1474 	}
1475 
1476 	/* Workaround */
1477 	if (is_aes(cryp) && is_ctr(cryp) && ret == DMA_NEED_SG_TRUNC)
1478 		return NO_DMA;
1479 
1480 	return ret;
1481 }
1482 
stm32_cryp_truncate_sg(struct scatterlist ** new_sg,size_t * new_sg_len,struct scatterlist * sg,off_t skip,size_t size)1483 static int stm32_cryp_truncate_sg(struct scatterlist **new_sg, size_t *new_sg_len,
1484 				  struct scatterlist *sg, off_t skip, size_t size)
1485 {
1486 	struct scatterlist *cur;
1487 	int alloc_sg_len;
1488 
1489 	*new_sg_len = 0;
1490 
1491 	if (!sg || !size) {
1492 		*new_sg = NULL;
1493 		return 0;
1494 	}
1495 
1496 	alloc_sg_len = sg_nents_for_len(sg, skip + size);
1497 	if (alloc_sg_len < 0)
1498 		return alloc_sg_len;
1499 
1500 	/* We allocate to much sg entry, but it is easier */
1501 	*new_sg = kmalloc_array((size_t)alloc_sg_len, sizeof(struct scatterlist), GFP_KERNEL);
1502 	if (!*new_sg)
1503 		return -ENOMEM;
1504 
1505 	sg_init_table(*new_sg, (unsigned int)alloc_sg_len);
1506 
1507 	cur = *new_sg;
1508 	while (sg && size) {
1509 		unsigned int len = sg->length;
1510 		unsigned int offset = sg->offset;
1511 
1512 		if (skip > len) {
1513 			skip -= len;
1514 			sg = sg_next(sg);
1515 			continue;
1516 		}
1517 
1518 		if (skip) {
1519 			len -= skip;
1520 			offset += skip;
1521 			skip = 0;
1522 		}
1523 
1524 		if (size < len)
1525 			len = size;
1526 
1527 		if (len > 0) {
1528 			(*new_sg_len)++;
1529 			size -= len;
1530 			sg_set_page(cur, sg_page(sg), len, offset);
1531 			if (size == 0)
1532 				sg_mark_end(cur);
1533 			cur = sg_next(cur);
1534 		}
1535 
1536 		sg = sg_next(sg);
1537 	}
1538 
1539 	return 0;
1540 }
1541 
stm32_cryp_cipher_prepare(struct stm32_cryp * cryp,struct scatterlist * in_sg,struct scatterlist * out_sg)1542 static int stm32_cryp_cipher_prepare(struct stm32_cryp *cryp, struct scatterlist *in_sg,
1543 				     struct scatterlist *out_sg)
1544 {
1545 	size_t align_size;
1546 	int ret;
1547 
1548 	cryp->dma_mode = stm32_cryp_dma_check(cryp, in_sg, out_sg);
1549 
1550 	scatterwalk_start(&cryp->in_walk, in_sg);
1551 	scatterwalk_start(&cryp->out_walk, out_sg);
1552 
1553 	if (cryp->dma_mode == NO_DMA) {
1554 		cryp->flags &= ~FLG_IN_OUT_DMA;
1555 
1556 		if (is_ctr(cryp))
1557 			memset(cryp->last_ctr, 0, sizeof(cryp->last_ctr));
1558 
1559 	} else if (cryp->dma_mode == DMA_NEED_SG_TRUNC) {
1560 
1561 		cryp->flags |= FLG_IN_OUT_DMA;
1562 
1563 		align_size = ALIGN_DOWN(cryp->payload_in, cryp->hw_blocksize);
1564 		ret = stm32_cryp_truncate_sg(&cryp->in_sg, &cryp->in_sg_len, in_sg, 0, align_size);
1565 		if (ret)
1566 			return ret;
1567 
1568 		ret = stm32_cryp_truncate_sg(&cryp->out_sg, &cryp->out_sg_len, out_sg, 0,
1569 					     align_size);
1570 		if (ret) {
1571 			kfree(cryp->in_sg);
1572 			return ret;
1573 		}
1574 	} else {
1575 		cryp->flags |= FLG_IN_OUT_DMA;
1576 
1577 		cryp->in_sg = in_sg;
1578 		cryp->out_sg = out_sg;
1579 
1580 		ret = sg_nents_for_len(cryp->in_sg, cryp->payload_in);
1581 		if (ret < 0)
1582 			return ret;
1583 		cryp->in_sg_len = (size_t)ret;
1584 
1585 		ret = sg_nents_for_len(out_sg, cryp->payload_out);
1586 		if (ret < 0)
1587 			return ret;
1588 		cryp->out_sg_len = (size_t)ret;
1589 	}
1590 
1591 	return 0;
1592 }
1593 
stm32_cryp_aead_prepare(struct stm32_cryp * cryp,struct scatterlist * in_sg,struct scatterlist * out_sg)1594 static int stm32_cryp_aead_prepare(struct stm32_cryp *cryp, struct scatterlist *in_sg,
1595 				   struct scatterlist *out_sg)
1596 {
1597 	size_t align_size;
1598 	off_t skip;
1599 	int ret, ret2;
1600 
1601 	cryp->header_sg = NULL;
1602 	cryp->in_sg = NULL;
1603 	cryp->out_sg = NULL;
1604 
1605 	if (!cryp->dma_lch_in || !cryp->dma_lch_out) {
1606 		cryp->dma_mode = NO_DMA;
1607 		cryp->flags &= ~(FLG_IN_OUT_DMA | FLG_HEADER_DMA);
1608 
1609 		return 0;
1610 	}
1611 
1612 	/* CCM hw_init may have advanced in header */
1613 	skip = cryp->areq->assoclen - cryp->header_in;
1614 
1615 	align_size = ALIGN_DOWN(cryp->header_in, cryp->hw_blocksize);
1616 	ret = stm32_cryp_truncate_sg(&cryp->header_sg, &cryp->header_sg_len, in_sg, skip,
1617 				     align_size);
1618 	if (ret)
1619 		return ret;
1620 
1621 	ret = stm32_cryp_dma_check_sg(cryp->header_sg, align_size, AES_BLOCK_SIZE);
1622 	if (ret == NO_DMA) {
1623 		/* We cannot DMA the header */
1624 		kfree(cryp->header_sg);
1625 		cryp->header_sg = NULL;
1626 
1627 		cryp->flags &= ~FLG_HEADER_DMA;
1628 	} else {
1629 		cryp->flags |= FLG_HEADER_DMA;
1630 	}
1631 
1632 	/* Now skip all header to be at payload start */
1633 	skip = cryp->areq->assoclen;
1634 	align_size = ALIGN_DOWN(cryp->payload_in, cryp->hw_blocksize);
1635 	ret = stm32_cryp_truncate_sg(&cryp->in_sg, &cryp->in_sg_len, in_sg, skip, align_size);
1636 	if (ret) {
1637 		kfree(cryp->header_sg);
1638 		return ret;
1639 	}
1640 
1641 	/* For out buffer align_size is same as in buffer */
1642 	ret = stm32_cryp_truncate_sg(&cryp->out_sg, &cryp->out_sg_len, out_sg, skip, align_size);
1643 	if (ret) {
1644 		kfree(cryp->header_sg);
1645 		kfree(cryp->in_sg);
1646 		return ret;
1647 	}
1648 
1649 	ret = stm32_cryp_dma_check_sg(cryp->in_sg, align_size, AES_BLOCK_SIZE);
1650 	ret2 = stm32_cryp_dma_check_sg(cryp->out_sg, align_size, AES_BLOCK_SIZE);
1651 	if (ret == NO_DMA || ret2 == NO_DMA) {
1652 		kfree(cryp->in_sg);
1653 		cryp->in_sg = NULL;
1654 
1655 		kfree(cryp->out_sg);
1656 		cryp->out_sg = NULL;
1657 
1658 		cryp->flags &= ~FLG_IN_OUT_DMA;
1659 	} else {
1660 		cryp->flags |= FLG_IN_OUT_DMA;
1661 	}
1662 
1663 	return 0;
1664 }
1665 
stm32_cryp_prepare_req(struct skcipher_request * req,struct aead_request * areq)1666 static int stm32_cryp_prepare_req(struct skcipher_request *req,
1667 				  struct aead_request *areq)
1668 {
1669 	struct stm32_cryp_ctx *ctx;
1670 	struct stm32_cryp *cryp;
1671 	struct stm32_cryp_reqctx *rctx;
1672 	struct scatterlist *in_sg, *out_sg;
1673 	int ret;
1674 
1675 	if (!req && !areq)
1676 		return -EINVAL;
1677 
1678 	ctx = req ? crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)) :
1679 		    crypto_aead_ctx(crypto_aead_reqtfm(areq));
1680 
1681 	cryp = ctx->cryp;
1682 
1683 	rctx = req ? skcipher_request_ctx(req) : aead_request_ctx(areq);
1684 	rctx->mode &= FLG_MODE_MASK;
1685 
1686 	cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode;
1687 	cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE;
1688 	cryp->ctx = ctx;
1689 
1690 	if (req) {
1691 		cryp->req = req;
1692 		cryp->areq = NULL;
1693 		cryp->header_in = 0;
1694 		cryp->payload_in = req->cryptlen;
1695 		cryp->payload_out = req->cryptlen;
1696 		cryp->authsize = 0;
1697 
1698 		in_sg = req->src;
1699 		out_sg = req->dst;
1700 
1701 		ret = stm32_cryp_cipher_prepare(cryp, in_sg, out_sg);
1702 		if (ret)
1703 			return ret;
1704 
1705 		ret = stm32_cryp_hw_init(cryp);
1706 	} else {
1707 		/*
1708 		 * Length of input and output data:
1709 		 * Encryption case:
1710 		 *  INPUT  = AssocData   ||     PlainText
1711 		 *          <- assoclen ->  <- cryptlen ->
1712 		 *
1713 		 *  OUTPUT = AssocData    ||   CipherText   ||      AuthTag
1714 		 *          <- assoclen ->  <-- cryptlen -->  <- authsize ->
1715 		 *
1716 		 * Decryption case:
1717 		 *  INPUT  =  AssocData     ||    CipherTex   ||       AuthTag
1718 		 *          <- assoclen --->  <---------- cryptlen ---------->
1719 		 *
1720 		 *  OUTPUT = AssocData    ||               PlainText
1721 		 *          <- assoclen ->  <- cryptlen - authsize ->
1722 		 */
1723 		cryp->areq = areq;
1724 		cryp->req = NULL;
1725 		cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq));
1726 		if (is_encrypt(cryp)) {
1727 			cryp->payload_in = areq->cryptlen;
1728 			cryp->header_in = areq->assoclen;
1729 			cryp->payload_out = areq->cryptlen;
1730 		} else {
1731 			cryp->payload_in = areq->cryptlen - cryp->authsize;
1732 			cryp->header_in = areq->assoclen;
1733 			cryp->payload_out = cryp->payload_in;
1734 		}
1735 
1736 		in_sg = areq->src;
1737 		out_sg = areq->dst;
1738 
1739 		scatterwalk_start(&cryp->in_walk, in_sg);
1740 		scatterwalk_start(&cryp->out_walk, out_sg);
1741 		/* In output, jump after assoc data */
1742 		scatterwalk_copychunks(NULL, &cryp->out_walk, cryp->areq->assoclen, 2);
1743 
1744 		ret = stm32_cryp_hw_init(cryp);
1745 		if (ret)
1746 			return ret;
1747 
1748 		ret = stm32_cryp_aead_prepare(cryp, in_sg, out_sg);
1749 	}
1750 
1751 	return ret;
1752 }
1753 
stm32_cryp_cipher_one_req(struct crypto_engine * engine,void * areq)1754 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq)
1755 {
1756 	struct skcipher_request *req = container_of(areq,
1757 						      struct skcipher_request,
1758 						      base);
1759 	struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
1760 			crypto_skcipher_reqtfm(req));
1761 	struct stm32_cryp *cryp = ctx->cryp;
1762 	int ret;
1763 
1764 	if (!cryp)
1765 		return -ENODEV;
1766 
1767 	ret = stm32_cryp_prepare_req(req, NULL);
1768 	if (ret)
1769 		return ret;
1770 
1771 	if (cryp->flags & FLG_IN_OUT_DMA)
1772 		ret = stm32_cryp_dma_start(cryp);
1773 	else
1774 		ret = stm32_cryp_it_start(cryp);
1775 
1776 	if (ret == -ETIMEDOUT)
1777 		stm32_cryp_finish_req(cryp, ret);
1778 
1779 	return ret;
1780 }
1781 
stm32_cryp_aead_one_req(struct crypto_engine * engine,void * areq)1782 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq)
1783 {
1784 	struct aead_request *req = container_of(areq, struct aead_request,
1785 						base);
1786 	struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1787 	struct stm32_cryp *cryp = ctx->cryp;
1788 	int err;
1789 
1790 	if (!cryp)
1791 		return -ENODEV;
1792 
1793 	err = stm32_cryp_prepare_req(NULL, req);
1794 	if (err)
1795 		return err;
1796 
1797 	if (!stm32_cryp_get_input_text_len(cryp) && !cryp->header_in &&
1798 	    !(cryp->flags & FLG_HEADER_DMA)) {
1799 		/* No input data to process: get tag and finish */
1800 		stm32_cryp_finish_req(cryp, 0);
1801 		return 0;
1802 	}
1803 
1804 	if (cryp->flags & FLG_HEADER_DMA)
1805 		return stm32_cryp_header_dma_start(cryp);
1806 
1807 	if (!cryp->header_in && cryp->flags & FLG_IN_OUT_DMA)
1808 		return stm32_cryp_dma_start(cryp);
1809 
1810 	return stm32_cryp_it_start(cryp);
1811 }
1812 
stm32_cryp_read_auth_tag(struct stm32_cryp * cryp)1813 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp)
1814 {
1815 	u32 cfg, size_bit;
1816 	unsigned int i;
1817 	int ret = 0;
1818 
1819 	/* Update Config */
1820 	cfg = stm32_cryp_read(cryp, cryp->caps->cr);
1821 
1822 	cfg &= ~CR_PH_MASK;
1823 	cfg |= CR_PH_FINAL;
1824 	cfg &= ~CR_DEC_NOT_ENC;
1825 	cfg |= CR_CRYPEN;
1826 
1827 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1828 
1829 	if (is_gcm(cryp)) {
1830 		/* GCM: write aad and payload size (in bits) */
1831 		size_bit = cryp->areq->assoclen * 8;
1832 		if (cryp->caps->swap_final)
1833 			size_bit = (__force u32)cpu_to_be32(size_bit);
1834 
1835 		stm32_cryp_write(cryp, cryp->caps->din, 0);
1836 		stm32_cryp_write(cryp, cryp->caps->din, size_bit);
1837 
1838 		size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen :
1839 				cryp->areq->cryptlen - cryp->authsize;
1840 		size_bit *= 8;
1841 		if (cryp->caps->swap_final)
1842 			size_bit = (__force u32)cpu_to_be32(size_bit);
1843 
1844 		stm32_cryp_write(cryp, cryp->caps->din, 0);
1845 		stm32_cryp_write(cryp, cryp->caps->din, size_bit);
1846 	} else {
1847 		/* CCM: write CTR0 */
1848 		u32 iv32[AES_BLOCK_32];
1849 		u8 *iv = (u8 *)iv32;
1850 		__be32 *biv = (__be32 *)iv32;
1851 
1852 		memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
1853 		memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
1854 
1855 		for (i = 0; i < AES_BLOCK_32; i++) {
1856 			u32 xiv = iv32[i];
1857 
1858 			if (!cryp->caps->padding_wa)
1859 				xiv = be32_to_cpu(biv[i]);
1860 			stm32_cryp_write(cryp, cryp->caps->din, xiv);
1861 		}
1862 	}
1863 
1864 	/* Wait for output data */
1865 	ret = stm32_cryp_wait_output(cryp);
1866 	if (ret) {
1867 		dev_err(cryp->dev, "Timeout (read tag)\n");
1868 		return ret;
1869 	}
1870 
1871 	if (is_encrypt(cryp)) {
1872 		u32 out_tag[AES_BLOCK_32];
1873 
1874 		/* Get and write tag */
1875 		readsl(cryp->regs + cryp->caps->dout, out_tag, AES_BLOCK_32);
1876 		scatterwalk_copychunks(out_tag, &cryp->out_walk, cryp->authsize, 1);
1877 	} else {
1878 		/* Get and check tag */
1879 		u32 in_tag[AES_BLOCK_32], out_tag[AES_BLOCK_32];
1880 
1881 		scatterwalk_copychunks(in_tag, &cryp->in_walk, cryp->authsize, 0);
1882 		readsl(cryp->regs + cryp->caps->dout, out_tag, AES_BLOCK_32);
1883 
1884 		if (crypto_memneq(in_tag, out_tag, cryp->authsize))
1885 			ret = -EBADMSG;
1886 	}
1887 
1888 	/* Disable cryp */
1889 	cfg &= ~CR_CRYPEN;
1890 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1891 
1892 	return ret;
1893 }
1894 
stm32_cryp_check_ctr_counter(struct stm32_cryp * cryp)1895 static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp)
1896 {
1897 	u32 cr;
1898 
1899 	if (unlikely(cryp->last_ctr[3] == cpu_to_be32(0xFFFFFFFF))) {
1900 		/*
1901 		 * In this case, we need to increment manually the ctr counter,
1902 		 * as HW doesn't handle the U32 carry.
1903 		 */
1904 		crypto_inc((u8 *)cryp->last_ctr, sizeof(cryp->last_ctr));
1905 
1906 		cr = stm32_cryp_read(cryp, cryp->caps->cr);
1907 		stm32_cryp_write(cryp, cryp->caps->cr, cr & ~CR_CRYPEN);
1908 
1909 		stm32_cryp_hw_write_iv(cryp, cryp->last_ctr);
1910 
1911 		stm32_cryp_write(cryp, cryp->caps->cr, cr);
1912 	}
1913 
1914 	/* The IV registers are BE  */
1915 	cryp->last_ctr[0] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0l));
1916 	cryp->last_ctr[1] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0r));
1917 	cryp->last_ctr[2] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1l));
1918 	cryp->last_ctr[3] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1r));
1919 }
1920 
stm32_cryp_irq_read_data(struct stm32_cryp * cryp)1921 static void stm32_cryp_irq_read_data(struct stm32_cryp *cryp)
1922 {
1923 	u32 block[AES_BLOCK_32];
1924 
1925 	readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32));
1926 	scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
1927 							     cryp->payload_out), 1);
1928 	cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
1929 				   cryp->payload_out);
1930 }
1931 
stm32_cryp_irq_write_block(struct stm32_cryp * cryp)1932 static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp)
1933 {
1934 	u32 block[AES_BLOCK_32] = {0};
1935 
1936 	scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, cryp->hw_blocksize,
1937 							    cryp->payload_in), 0);
1938 	writesl(cryp->regs + cryp->caps->din, block, cryp->hw_blocksize / sizeof(u32));
1939 	cryp->payload_in -= min_t(size_t, cryp->hw_blocksize, cryp->payload_in);
1940 }
1941 
stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp * cryp)1942 static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp)
1943 {
1944 	int err;
1945 	u32 cfg, block[AES_BLOCK_32] = {0};
1946 	unsigned int i;
1947 
1948 	/* 'Special workaround' procedure described in the datasheet */
1949 
1950 	/* a) disable ip */
1951 	stm32_cryp_write(cryp, cryp->caps->imsc, 0);
1952 	cfg = stm32_cryp_read(cryp, cryp->caps->cr);
1953 	cfg &= ~CR_CRYPEN;
1954 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1955 
1956 	/* b) Update IV1R */
1957 	stm32_cryp_write(cryp, cryp->caps->iv1r, cryp->gcm_ctr - 2);
1958 
1959 	/* c) change mode to CTR */
1960 	cfg &= ~CR_ALGO_MASK;
1961 	cfg |= CR_AES_CTR;
1962 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1963 
1964 	/* a) enable IP */
1965 	cfg |= CR_CRYPEN;
1966 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1967 
1968 	/* b) pad and write the last block */
1969 	stm32_cryp_irq_write_block(cryp);
1970 	/* wait end of process */
1971 	err = stm32_cryp_wait_output(cryp);
1972 	if (err) {
1973 		dev_err(cryp->dev, "Timeout (write gcm last data)\n");
1974 		return stm32_cryp_finish_req(cryp, err);
1975 	}
1976 
1977 	/* c) get and store encrypted data */
1978 	/*
1979 	 * Same code as stm32_cryp_irq_read_data(), but we want to store
1980 	 * block value
1981 	 */
1982 	readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32));
1983 
1984 	scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
1985 							     cryp->payload_out), 1);
1986 	cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
1987 				   cryp->payload_out);
1988 
1989 	/* d) change mode back to AES GCM */
1990 	cfg &= ~CR_ALGO_MASK;
1991 	cfg |= CR_AES_GCM;
1992 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1993 
1994 	/* e) change phase to Final */
1995 	cfg &= ~CR_PH_MASK;
1996 	cfg |= CR_PH_FINAL;
1997 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1998 
1999 	/* f) write padded data */
2000 	writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32);
2001 
2002 	/* g) Empty fifo out */
2003 	err = stm32_cryp_wait_output(cryp);
2004 	if (err) {
2005 		dev_err(cryp->dev, "Timeout (write gcm padded data)\n");
2006 		return stm32_cryp_finish_req(cryp, err);
2007 	}
2008 
2009 	for (i = 0; i < AES_BLOCK_32; i++)
2010 		stm32_cryp_read(cryp, cryp->caps->dout);
2011 
2012 	/* h) run the he normal Final phase */
2013 	stm32_cryp_finish_req(cryp, 0);
2014 }
2015 
stm32_cryp_irq_set_npblb(struct stm32_cryp * cryp)2016 static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp)
2017 {
2018 	u32 cfg;
2019 
2020 	/* disable ip, set NPBLB and reneable ip */
2021 	cfg = stm32_cryp_read(cryp, cryp->caps->cr);
2022 	cfg &= ~CR_CRYPEN;
2023 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2024 
2025 	cfg |= (cryp->hw_blocksize - cryp->payload_in) << CR_NBPBL_SHIFT;
2026 	cfg |= CR_CRYPEN;
2027 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2028 }
2029 
stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp * cryp)2030 static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp)
2031 {
2032 	int err = 0;
2033 	u32 cfg, iv1tmp;
2034 	u32 cstmp1[AES_BLOCK_32], cstmp2[AES_BLOCK_32];
2035 	u32 block[AES_BLOCK_32] = {0};
2036 	unsigned int i;
2037 
2038 	/* 'Special workaround' procedure described in the datasheet */
2039 
2040 	/* a) disable ip */
2041 	stm32_cryp_write(cryp, cryp->caps->imsc, 0);
2042 
2043 	cfg = stm32_cryp_read(cryp, cryp->caps->cr);
2044 	cfg &= ~CR_CRYPEN;
2045 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2046 
2047 	/* b) get IV1 from CRYP_CSGCMCCM7 */
2048 	iv1tmp = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + 7 * 4);
2049 
2050 	/* c) Load CRYP_CSGCMCCMxR */
2051 	for (i = 0; i < ARRAY_SIZE(cstmp1); i++)
2052 		cstmp1[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
2053 
2054 	/* d) Write IV1R */
2055 	stm32_cryp_write(cryp, cryp->caps->iv1r, iv1tmp);
2056 
2057 	/* e) change mode to CTR */
2058 	cfg &= ~CR_ALGO_MASK;
2059 	cfg |= CR_AES_CTR;
2060 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2061 
2062 	/* a) enable IP */
2063 	cfg |= CR_CRYPEN;
2064 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2065 
2066 	/* b) pad and write the last block */
2067 	stm32_cryp_irq_write_block(cryp);
2068 	/* wait end of process */
2069 	err = stm32_cryp_wait_output(cryp);
2070 	if (err) {
2071 		dev_err(cryp->dev, "Timeout (write ccm padded data)\n");
2072 		return stm32_cryp_finish_req(cryp, err);
2073 	}
2074 
2075 	/* c) get and store decrypted data */
2076 	/*
2077 	 * Same code as stm32_cryp_irq_read_data(), but we want to store
2078 	 * block value
2079 	 */
2080 	readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32));
2081 
2082 	scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
2083 							     cryp->payload_out), 1);
2084 	cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, cryp->payload_out);
2085 
2086 	/* d) Load again CRYP_CSGCMCCMxR */
2087 	for (i = 0; i < ARRAY_SIZE(cstmp2); i++)
2088 		cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
2089 
2090 	/* e) change mode back to AES CCM */
2091 	cfg &= ~CR_ALGO_MASK;
2092 	cfg |= CR_AES_CCM;
2093 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2094 
2095 	/* f) change phase to header */
2096 	cfg &= ~CR_PH_MASK;
2097 	cfg |= CR_PH_HEADER;
2098 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2099 
2100 	/* g) XOR and write padded data */
2101 	for (i = 0; i < ARRAY_SIZE(block); i++) {
2102 		block[i] ^= cstmp1[i];
2103 		block[i] ^= cstmp2[i];
2104 		stm32_cryp_write(cryp, cryp->caps->din, block[i]);
2105 	}
2106 
2107 	/* h) wait for completion */
2108 	err = stm32_cryp_wait_busy(cryp);
2109 	if (err)
2110 		dev_err(cryp->dev, "Timeout (write ccm padded data)\n");
2111 
2112 	/* i) run the he normal Final phase */
2113 	stm32_cryp_finish_req(cryp, err);
2114 }
2115 
stm32_cryp_irq_write_data(struct stm32_cryp * cryp)2116 static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp)
2117 {
2118 	if (unlikely(!cryp->payload_in)) {
2119 		dev_warn(cryp->dev, "No more data to process\n");
2120 		return;
2121 	}
2122 
2123 	if (unlikely(cryp->payload_in < AES_BLOCK_SIZE &&
2124 		     (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) &&
2125 		     is_encrypt(cryp))) {
2126 		/* Padding for AES GCM encryption */
2127 		if (cryp->caps->padding_wa) {
2128 			/* Special case 1 */
2129 			stm32_cryp_irq_write_gcm_padded_data(cryp);
2130 			return;
2131 		}
2132 
2133 		/* Setting padding bytes (NBBLB) */
2134 		stm32_cryp_irq_set_npblb(cryp);
2135 	}
2136 
2137 	if (unlikely((cryp->payload_in < AES_BLOCK_SIZE) &&
2138 		     (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) &&
2139 		     is_decrypt(cryp))) {
2140 		/* Padding for AES CCM decryption */
2141 		if (cryp->caps->padding_wa) {
2142 			/* Special case 2 */
2143 			stm32_cryp_irq_write_ccm_padded_data(cryp);
2144 			return;
2145 		}
2146 
2147 		/* Setting padding bytes (NBBLB) */
2148 		stm32_cryp_irq_set_npblb(cryp);
2149 	}
2150 
2151 	if (is_aes(cryp) && is_ctr(cryp))
2152 		stm32_cryp_check_ctr_counter(cryp);
2153 
2154 	stm32_cryp_irq_write_block(cryp);
2155 }
2156 
stm32_cryp_irq_write_gcmccm_header(struct stm32_cryp * cryp)2157 static void stm32_cryp_irq_write_gcmccm_header(struct stm32_cryp *cryp)
2158 {
2159 	u32 block[AES_BLOCK_32] = {0};
2160 	size_t written;
2161 
2162 	written = min_t(size_t, AES_BLOCK_SIZE, cryp->header_in);
2163 
2164 	scatterwalk_copychunks(block, &cryp->in_walk, written, 0);
2165 
2166 	writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32);
2167 
2168 	cryp->header_in -= written;
2169 
2170 	stm32_crypt_gcmccm_end_header(cryp);
2171 }
2172 
stm32_cryp_irq_thread(int irq,void * arg)2173 static irqreturn_t stm32_cryp_irq_thread(int irq, void *arg)
2174 {
2175 	struct stm32_cryp *cryp = arg;
2176 	u32 ph;
2177 	u32 it_mask = stm32_cryp_read(cryp, cryp->caps->imsc);
2178 
2179 	if (cryp->irq_status & MISR_OUT)
2180 		/* Output FIFO IRQ: read data */
2181 		stm32_cryp_irq_read_data(cryp);
2182 
2183 	if (cryp->irq_status & MISR_IN) {
2184 		if (is_gcm(cryp) || is_ccm(cryp)) {
2185 			ph = stm32_cryp_read(cryp, cryp->caps->cr) & CR_PH_MASK;
2186 			if (unlikely(ph == CR_PH_HEADER))
2187 				/* Write Header */
2188 				stm32_cryp_irq_write_gcmccm_header(cryp);
2189 			else
2190 				/* Input FIFO IRQ: write data */
2191 				stm32_cryp_irq_write_data(cryp);
2192 			if (is_gcm(cryp))
2193 				cryp->gcm_ctr++;
2194 		} else {
2195 			/* Input FIFO IRQ: write data */
2196 			stm32_cryp_irq_write_data(cryp);
2197 		}
2198 	}
2199 
2200 	/* Mask useless interrupts */
2201 	if (!cryp->payload_in && !cryp->header_in)
2202 		it_mask &= ~IMSCR_IN;
2203 	if (!cryp->payload_out)
2204 		it_mask &= ~IMSCR_OUT;
2205 	stm32_cryp_write(cryp, cryp->caps->imsc, it_mask);
2206 
2207 	if (!cryp->payload_in && !cryp->header_in && !cryp->payload_out) {
2208 		local_bh_disable();
2209 		stm32_cryp_finish_req(cryp, 0);
2210 		local_bh_enable();
2211 	}
2212 
2213 	return IRQ_HANDLED;
2214 }
2215 
stm32_cryp_irq(int irq,void * arg)2216 static irqreturn_t stm32_cryp_irq(int irq, void *arg)
2217 {
2218 	struct stm32_cryp *cryp = arg;
2219 
2220 	cryp->irq_status = stm32_cryp_read(cryp, cryp->caps->mis);
2221 
2222 	return IRQ_WAKE_THREAD;
2223 }
2224 
stm32_cryp_dma_init(struct stm32_cryp * cryp)2225 static int stm32_cryp_dma_init(struct stm32_cryp *cryp)
2226 {
2227 	struct dma_slave_config dma_conf;
2228 	struct dma_chan *chan;
2229 	int ret;
2230 
2231 	memset(&dma_conf, 0, sizeof(dma_conf));
2232 
2233 	dma_conf.direction = DMA_MEM_TO_DEV;
2234 	dma_conf.dst_addr = cryp->phys_base + cryp->caps->din;
2235 	dma_conf.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
2236 	dma_conf.dst_maxburst = CRYP_DMA_BURST_REG;
2237 	dma_conf.device_fc = false;
2238 
2239 	chan = dma_request_chan(cryp->dev, "in");
2240 	if (IS_ERR(chan))
2241 		return PTR_ERR(chan);
2242 
2243 	cryp->dma_lch_in = chan;
2244 	ret = dmaengine_slave_config(cryp->dma_lch_in, &dma_conf);
2245 	if (ret) {
2246 		dma_release_channel(cryp->dma_lch_in);
2247 		cryp->dma_lch_in = NULL;
2248 		dev_err(cryp->dev, "Couldn't configure DMA in slave.\n");
2249 		return ret;
2250 	}
2251 
2252 	memset(&dma_conf, 0, sizeof(dma_conf));
2253 
2254 	dma_conf.direction = DMA_DEV_TO_MEM;
2255 	dma_conf.src_addr = cryp->phys_base + cryp->caps->dout;
2256 	dma_conf.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
2257 	dma_conf.src_maxburst = CRYP_DMA_BURST_REG;
2258 	dma_conf.device_fc = false;
2259 
2260 	chan = dma_request_chan(cryp->dev, "out");
2261 	if (IS_ERR(chan)) {
2262 		dma_release_channel(cryp->dma_lch_in);
2263 		cryp->dma_lch_in = NULL;
2264 		return PTR_ERR(chan);
2265 	}
2266 
2267 	cryp->dma_lch_out = chan;
2268 
2269 	ret = dmaengine_slave_config(cryp->dma_lch_out, &dma_conf);
2270 	if (ret) {
2271 		dma_release_channel(cryp->dma_lch_out);
2272 		cryp->dma_lch_out = NULL;
2273 		dev_err(cryp->dev, "Couldn't configure DMA out slave.\n");
2274 		dma_release_channel(cryp->dma_lch_in);
2275 		cryp->dma_lch_in = NULL;
2276 		return ret;
2277 	}
2278 
2279 	init_completion(&cryp->dma_completion);
2280 
2281 	return 0;
2282 }
2283 
2284 static struct skcipher_engine_alg crypto_algs[] = {
2285 {
2286 	.base = {
2287 		.base.cra_name		= "ecb(aes)",
2288 		.base.cra_driver_name	= "stm32-ecb-aes",
2289 		.base.cra_priority	= 300,
2290 		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2291 		.base.cra_blocksize	= AES_BLOCK_SIZE,
2292 		.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
2293 		.base.cra_alignmask	= 0,
2294 		.base.cra_module	= THIS_MODULE,
2295 
2296 		.init			= stm32_cryp_init_tfm,
2297 		.min_keysize		= AES_MIN_KEY_SIZE,
2298 		.max_keysize		= AES_MAX_KEY_SIZE,
2299 		.setkey			= stm32_cryp_aes_setkey,
2300 		.encrypt		= stm32_cryp_aes_ecb_encrypt,
2301 		.decrypt		= stm32_cryp_aes_ecb_decrypt,
2302 	},
2303 	.op = {
2304 		.do_one_request = stm32_cryp_cipher_one_req,
2305 	},
2306 },
2307 {
2308 	.base = {
2309 		.base.cra_name		= "cbc(aes)",
2310 		.base.cra_driver_name	= "stm32-cbc-aes",
2311 		.base.cra_priority	= 300,
2312 		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2313 		.base.cra_blocksize	= AES_BLOCK_SIZE,
2314 		.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
2315 		.base.cra_alignmask	= 0,
2316 		.base.cra_module	= THIS_MODULE,
2317 
2318 		.init			= stm32_cryp_init_tfm,
2319 		.min_keysize		= AES_MIN_KEY_SIZE,
2320 		.max_keysize		= AES_MAX_KEY_SIZE,
2321 		.ivsize			= AES_BLOCK_SIZE,
2322 		.setkey			= stm32_cryp_aes_setkey,
2323 		.encrypt		= stm32_cryp_aes_cbc_encrypt,
2324 		.decrypt		= stm32_cryp_aes_cbc_decrypt,
2325 	},
2326 	.op = {
2327 		.do_one_request = stm32_cryp_cipher_one_req,
2328 	},
2329 },
2330 {
2331 	.base = {
2332 		.base.cra_name		= "ctr(aes)",
2333 		.base.cra_driver_name	= "stm32-ctr-aes",
2334 		.base.cra_priority	= 300,
2335 		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2336 		.base.cra_blocksize	= 1,
2337 		.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
2338 		.base.cra_alignmask	= 0,
2339 		.base.cra_module	= THIS_MODULE,
2340 
2341 		.init			= stm32_cryp_init_tfm,
2342 		.min_keysize		= AES_MIN_KEY_SIZE,
2343 		.max_keysize		= AES_MAX_KEY_SIZE,
2344 		.ivsize			= AES_BLOCK_SIZE,
2345 		.setkey			= stm32_cryp_aes_setkey,
2346 		.encrypt		= stm32_cryp_aes_ctr_encrypt,
2347 		.decrypt		= stm32_cryp_aes_ctr_decrypt,
2348 	},
2349 	.op = {
2350 		.do_one_request = stm32_cryp_cipher_one_req,
2351 	},
2352 },
2353 {
2354 	.base = {
2355 		.base.cra_name		= "ecb(des)",
2356 		.base.cra_driver_name	= "stm32-ecb-des",
2357 		.base.cra_priority	= 300,
2358 		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2359 		.base.cra_blocksize	= DES_BLOCK_SIZE,
2360 		.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
2361 		.base.cra_alignmask	= 0,
2362 		.base.cra_module	= THIS_MODULE,
2363 
2364 		.init			= stm32_cryp_init_tfm,
2365 		.min_keysize		= DES_BLOCK_SIZE,
2366 		.max_keysize		= DES_BLOCK_SIZE,
2367 		.setkey			= stm32_cryp_des_setkey,
2368 		.encrypt		= stm32_cryp_des_ecb_encrypt,
2369 		.decrypt		= stm32_cryp_des_ecb_decrypt,
2370 	},
2371 	.op = {
2372 		.do_one_request = stm32_cryp_cipher_one_req,
2373 	},
2374 },
2375 {
2376 	.base = {
2377 		.base.cra_name		= "cbc(des)",
2378 		.base.cra_driver_name	= "stm32-cbc-des",
2379 		.base.cra_priority	= 300,
2380 		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2381 		.base.cra_blocksize	= DES_BLOCK_SIZE,
2382 		.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
2383 		.base.cra_alignmask	= 0,
2384 		.base.cra_module	= THIS_MODULE,
2385 
2386 		.init			= stm32_cryp_init_tfm,
2387 		.min_keysize		= DES_BLOCK_SIZE,
2388 		.max_keysize		= DES_BLOCK_SIZE,
2389 		.ivsize			= DES_BLOCK_SIZE,
2390 		.setkey			= stm32_cryp_des_setkey,
2391 		.encrypt		= stm32_cryp_des_cbc_encrypt,
2392 		.decrypt		= stm32_cryp_des_cbc_decrypt,
2393 	},
2394 	.op = {
2395 		.do_one_request = stm32_cryp_cipher_one_req,
2396 	},
2397 },
2398 {
2399 	.base = {
2400 		.base.cra_name		= "ecb(des3_ede)",
2401 		.base.cra_driver_name	= "stm32-ecb-des3",
2402 		.base.cra_priority	= 300,
2403 		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2404 		.base.cra_blocksize	= DES_BLOCK_SIZE,
2405 		.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
2406 		.base.cra_alignmask	= 0,
2407 		.base.cra_module	= THIS_MODULE,
2408 
2409 		.init			= stm32_cryp_init_tfm,
2410 		.min_keysize		= 3 * DES_BLOCK_SIZE,
2411 		.max_keysize		= 3 * DES_BLOCK_SIZE,
2412 		.setkey			= stm32_cryp_tdes_setkey,
2413 		.encrypt		= stm32_cryp_tdes_ecb_encrypt,
2414 		.decrypt		= stm32_cryp_tdes_ecb_decrypt,
2415 	},
2416 	.op = {
2417 		.do_one_request = stm32_cryp_cipher_one_req,
2418 	},
2419 },
2420 {
2421 	.base = {
2422 		.base.cra_name		= "cbc(des3_ede)",
2423 		.base.cra_driver_name	= "stm32-cbc-des3",
2424 		.base.cra_priority	= 300,
2425 		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2426 		.base.cra_blocksize	= DES_BLOCK_SIZE,
2427 		.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
2428 		.base.cra_alignmask	= 0,
2429 		.base.cra_module	= THIS_MODULE,
2430 
2431 		.init			= stm32_cryp_init_tfm,
2432 		.min_keysize		= 3 * DES_BLOCK_SIZE,
2433 		.max_keysize		= 3 * DES_BLOCK_SIZE,
2434 		.ivsize			= DES_BLOCK_SIZE,
2435 		.setkey			= stm32_cryp_tdes_setkey,
2436 		.encrypt		= stm32_cryp_tdes_cbc_encrypt,
2437 		.decrypt		= stm32_cryp_tdes_cbc_decrypt,
2438 	},
2439 	.op = {
2440 		.do_one_request = stm32_cryp_cipher_one_req,
2441 	},
2442 },
2443 };
2444 
2445 static struct aead_engine_alg aead_algs[] = {
2446 {
2447 	.base.setkey		= stm32_cryp_aes_aead_setkey,
2448 	.base.setauthsize	= stm32_cryp_aes_gcm_setauthsize,
2449 	.base.encrypt		= stm32_cryp_aes_gcm_encrypt,
2450 	.base.decrypt		= stm32_cryp_aes_gcm_decrypt,
2451 	.base.init		= stm32_cryp_aes_aead_init,
2452 	.base.ivsize		= 12,
2453 	.base.maxauthsize	= AES_BLOCK_SIZE,
2454 
2455 	.base.base = {
2456 		.cra_name		= "gcm(aes)",
2457 		.cra_driver_name	= "stm32-gcm-aes",
2458 		.cra_priority		= 300,
2459 		.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2460 		.cra_blocksize		= 1,
2461 		.cra_ctxsize		= sizeof(struct stm32_cryp_ctx),
2462 		.cra_alignmask		= 0,
2463 		.cra_module		= THIS_MODULE,
2464 	},
2465 	.op = {
2466 		.do_one_request = stm32_cryp_aead_one_req,
2467 	},
2468 },
2469 {
2470 	.base.setkey		= stm32_cryp_aes_aead_setkey,
2471 	.base.setauthsize	= stm32_cryp_aes_ccm_setauthsize,
2472 	.base.encrypt		= stm32_cryp_aes_ccm_encrypt,
2473 	.base.decrypt		= stm32_cryp_aes_ccm_decrypt,
2474 	.base.init		= stm32_cryp_aes_aead_init,
2475 	.base.ivsize		= AES_BLOCK_SIZE,
2476 	.base.maxauthsize	= AES_BLOCK_SIZE,
2477 
2478 	.base.base = {
2479 		.cra_name		= "ccm(aes)",
2480 		.cra_driver_name	= "stm32-ccm-aes",
2481 		.cra_priority		= 300,
2482 		.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2483 		.cra_blocksize		= 1,
2484 		.cra_ctxsize		= sizeof(struct stm32_cryp_ctx),
2485 		.cra_alignmask		= 0,
2486 		.cra_module		= THIS_MODULE,
2487 	},
2488 	.op = {
2489 		.do_one_request = stm32_cryp_aead_one_req,
2490 	},
2491 },
2492 };
2493 
2494 static const struct stm32_cryp_caps ux500_data = {
2495 	.aeads_support = false,
2496 	.linear_aes_key = true,
2497 	.kp_mode = false,
2498 	.iv_protection = true,
2499 	.swap_final = true,
2500 	.padding_wa = true,
2501 	.cr = UX500_CRYP_CR,
2502 	.sr = UX500_CRYP_SR,
2503 	.din = UX500_CRYP_DIN,
2504 	.dout = UX500_CRYP_DOUT,
2505 	.dmacr = UX500_CRYP_DMACR,
2506 	.imsc = UX500_CRYP_IMSC,
2507 	.mis = UX500_CRYP_MIS,
2508 	.k1l = UX500_CRYP_K1L,
2509 	.k1r = UX500_CRYP_K1R,
2510 	.k3r = UX500_CRYP_K3R,
2511 	.iv0l = UX500_CRYP_IV0L,
2512 	.iv0r = UX500_CRYP_IV0R,
2513 	.iv1l = UX500_CRYP_IV1L,
2514 	.iv1r = UX500_CRYP_IV1R,
2515 };
2516 
2517 static const struct stm32_cryp_caps f7_data = {
2518 	.aeads_support = true,
2519 	.linear_aes_key = false,
2520 	.kp_mode = true,
2521 	.iv_protection = false,
2522 	.swap_final = true,
2523 	.padding_wa = true,
2524 	.cr = CRYP_CR,
2525 	.sr = CRYP_SR,
2526 	.din = CRYP_DIN,
2527 	.dout = CRYP_DOUT,
2528 	.dmacr = CRYP_DMACR,
2529 	.imsc = CRYP_IMSCR,
2530 	.mis = CRYP_MISR,
2531 	.k1l = CRYP_K1LR,
2532 	.k1r = CRYP_K1RR,
2533 	.k3r = CRYP_K3RR,
2534 	.iv0l = CRYP_IV0LR,
2535 	.iv0r = CRYP_IV0RR,
2536 	.iv1l = CRYP_IV1LR,
2537 	.iv1r = CRYP_IV1RR,
2538 };
2539 
2540 static const struct stm32_cryp_caps mp1_data = {
2541 	.aeads_support = true,
2542 	.linear_aes_key = false,
2543 	.kp_mode = true,
2544 	.iv_protection = false,
2545 	.swap_final = false,
2546 	.padding_wa = false,
2547 	.cr = CRYP_CR,
2548 	.sr = CRYP_SR,
2549 	.din = CRYP_DIN,
2550 	.dout = CRYP_DOUT,
2551 	.dmacr = CRYP_DMACR,
2552 	.imsc = CRYP_IMSCR,
2553 	.mis = CRYP_MISR,
2554 	.k1l = CRYP_K1LR,
2555 	.k1r = CRYP_K1RR,
2556 	.k3r = CRYP_K3RR,
2557 	.iv0l = CRYP_IV0LR,
2558 	.iv0r = CRYP_IV0RR,
2559 	.iv1l = CRYP_IV1LR,
2560 	.iv1r = CRYP_IV1RR,
2561 };
2562 
2563 static const struct of_device_id stm32_dt_ids[] = {
2564 	{ .compatible = "stericsson,ux500-cryp", .data = &ux500_data},
2565 	{ .compatible = "st,stm32f756-cryp", .data = &f7_data},
2566 	{ .compatible = "st,stm32mp1-cryp", .data = &mp1_data},
2567 	{},
2568 };
2569 MODULE_DEVICE_TABLE(of, stm32_dt_ids);
2570 
stm32_cryp_probe(struct platform_device * pdev)2571 static int stm32_cryp_probe(struct platform_device *pdev)
2572 {
2573 	struct device *dev = &pdev->dev;
2574 	struct stm32_cryp *cryp;
2575 	struct reset_control *rst;
2576 	int irq, ret;
2577 
2578 	cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL);
2579 	if (!cryp)
2580 		return -ENOMEM;
2581 
2582 	cryp->caps = of_device_get_match_data(dev);
2583 	if (!cryp->caps)
2584 		return -ENODEV;
2585 
2586 	cryp->dev = dev;
2587 
2588 	cryp->regs = devm_platform_ioremap_resource(pdev, 0);
2589 	if (IS_ERR(cryp->regs))
2590 		return PTR_ERR(cryp->regs);
2591 
2592 	cryp->phys_base = platform_get_resource(pdev, IORESOURCE_MEM, 0)->start;
2593 
2594 	irq = platform_get_irq(pdev, 0);
2595 	if (irq < 0)
2596 		return irq;
2597 
2598 	ret = devm_request_threaded_irq(dev, irq, stm32_cryp_irq,
2599 					stm32_cryp_irq_thread, IRQF_ONESHOT,
2600 					dev_name(dev), cryp);
2601 	if (ret) {
2602 		dev_err(dev, "Cannot grab IRQ\n");
2603 		return ret;
2604 	}
2605 
2606 	cryp->clk = devm_clk_get(dev, NULL);
2607 	if (IS_ERR(cryp->clk)) {
2608 		dev_err_probe(dev, PTR_ERR(cryp->clk), "Could not get clock\n");
2609 
2610 		return PTR_ERR(cryp->clk);
2611 	}
2612 
2613 	ret = clk_prepare_enable(cryp->clk);
2614 	if (ret) {
2615 		dev_err(cryp->dev, "Failed to enable clock\n");
2616 		return ret;
2617 	}
2618 
2619 	pm_runtime_set_autosuspend_delay(dev, CRYP_AUTOSUSPEND_DELAY);
2620 	pm_runtime_use_autosuspend(dev);
2621 
2622 	pm_runtime_get_noresume(dev);
2623 	pm_runtime_set_active(dev);
2624 	pm_runtime_enable(dev);
2625 
2626 	rst = devm_reset_control_get(dev, NULL);
2627 	if (IS_ERR(rst)) {
2628 		ret = PTR_ERR(rst);
2629 		if (ret == -EPROBE_DEFER)
2630 			goto err_rst;
2631 	} else {
2632 		reset_control_assert(rst);
2633 		udelay(2);
2634 		reset_control_deassert(rst);
2635 	}
2636 
2637 	platform_set_drvdata(pdev, cryp);
2638 
2639 	ret = stm32_cryp_dma_init(cryp);
2640 	switch (ret) {
2641 	case 0:
2642 		break;
2643 	case -ENODEV:
2644 		dev_dbg(dev, "DMA mode not available\n");
2645 		break;
2646 	default:
2647 		goto err_dma;
2648 	}
2649 
2650 	spin_lock(&cryp_list.lock);
2651 	list_add(&cryp->list, &cryp_list.dev_list);
2652 	spin_unlock(&cryp_list.lock);
2653 
2654 	/* Initialize crypto engine */
2655 	cryp->engine = crypto_engine_alloc_init(dev, 1);
2656 	if (!cryp->engine) {
2657 		dev_err(dev, "Could not init crypto engine\n");
2658 		ret = -ENOMEM;
2659 		goto err_engine1;
2660 	}
2661 
2662 	ret = crypto_engine_start(cryp->engine);
2663 	if (ret) {
2664 		dev_err(dev, "Could not start crypto engine\n");
2665 		goto err_engine2;
2666 	}
2667 
2668 	ret = crypto_engine_register_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
2669 	if (ret) {
2670 		dev_err(dev, "Could not register algs\n");
2671 		goto err_algs;
2672 	}
2673 
2674 	if (cryp->caps->aeads_support) {
2675 		ret = crypto_engine_register_aeads(aead_algs, ARRAY_SIZE(aead_algs));
2676 		if (ret)
2677 			goto err_aead_algs;
2678 	}
2679 
2680 	dev_info(dev, "Initialized\n");
2681 
2682 	pm_runtime_put_sync(dev);
2683 
2684 	return 0;
2685 
2686 err_aead_algs:
2687 	crypto_engine_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
2688 err_algs:
2689 err_engine2:
2690 	crypto_engine_exit(cryp->engine);
2691 err_engine1:
2692 	spin_lock(&cryp_list.lock);
2693 	list_del(&cryp->list);
2694 	spin_unlock(&cryp_list.lock);
2695 
2696 	if (cryp->dma_lch_in)
2697 		dma_release_channel(cryp->dma_lch_in);
2698 	if (cryp->dma_lch_out)
2699 		dma_release_channel(cryp->dma_lch_out);
2700 err_dma:
2701 err_rst:
2702 	pm_runtime_disable(dev);
2703 	pm_runtime_put_noidle(dev);
2704 
2705 	clk_disable_unprepare(cryp->clk);
2706 
2707 	return ret;
2708 }
2709 
stm32_cryp_remove(struct platform_device * pdev)2710 static void stm32_cryp_remove(struct platform_device *pdev)
2711 {
2712 	struct stm32_cryp *cryp = platform_get_drvdata(pdev);
2713 	int ret;
2714 
2715 	ret = pm_runtime_get_sync(cryp->dev);
2716 
2717 	if (cryp->caps->aeads_support)
2718 		crypto_engine_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs));
2719 	crypto_engine_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
2720 
2721 	crypto_engine_exit(cryp->engine);
2722 
2723 	spin_lock(&cryp_list.lock);
2724 	list_del(&cryp->list);
2725 	spin_unlock(&cryp_list.lock);
2726 
2727 	if (cryp->dma_lch_in)
2728 		dma_release_channel(cryp->dma_lch_in);
2729 
2730 	if (cryp->dma_lch_out)
2731 		dma_release_channel(cryp->dma_lch_out);
2732 
2733 	pm_runtime_disable(cryp->dev);
2734 	pm_runtime_put_noidle(cryp->dev);
2735 
2736 	if (ret >= 0)
2737 		clk_disable_unprepare(cryp->clk);
2738 }
2739 
2740 #ifdef CONFIG_PM
stm32_cryp_runtime_suspend(struct device * dev)2741 static int stm32_cryp_runtime_suspend(struct device *dev)
2742 {
2743 	struct stm32_cryp *cryp = dev_get_drvdata(dev);
2744 
2745 	clk_disable_unprepare(cryp->clk);
2746 
2747 	return 0;
2748 }
2749 
stm32_cryp_runtime_resume(struct device * dev)2750 static int stm32_cryp_runtime_resume(struct device *dev)
2751 {
2752 	struct stm32_cryp *cryp = dev_get_drvdata(dev);
2753 	int ret;
2754 
2755 	ret = clk_prepare_enable(cryp->clk);
2756 	if (ret) {
2757 		dev_err(cryp->dev, "Failed to prepare_enable clock\n");
2758 		return ret;
2759 	}
2760 
2761 	return 0;
2762 }
2763 #endif
2764 
2765 static const struct dev_pm_ops stm32_cryp_pm_ops = {
2766 	SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
2767 				pm_runtime_force_resume)
2768 	SET_RUNTIME_PM_OPS(stm32_cryp_runtime_suspend,
2769 			   stm32_cryp_runtime_resume, NULL)
2770 };
2771 
2772 static struct platform_driver stm32_cryp_driver = {
2773 	.probe  = stm32_cryp_probe,
2774 	.remove_new = stm32_cryp_remove,
2775 	.driver = {
2776 		.name           = DRIVER_NAME,
2777 		.pm		= &stm32_cryp_pm_ops,
2778 		.of_match_table = stm32_dt_ids,
2779 	},
2780 };
2781 
2782 module_platform_driver(stm32_cryp_driver);
2783 
2784 MODULE_AUTHOR("Fabien Dessenne <fabien.dessenne@st.com>");
2785 MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver");
2786 MODULE_LICENSE("GPL");
2787