xref: /linux/drivers/crypto/stm32/stm32-cryp.c (revision 6f7e6393d1ce636bb7ec77a7fe7b77458fddf701)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (C) STMicroelectronics SA 2017
4  * Author: Fabien Dessenne <fabien.dessenne@st.com>
5  * Ux500 support taken from snippets in the old Ux500 cryp driver
6  */
7 
8 #include <crypto/aes.h>
9 #include <crypto/engine.h>
10 #include <crypto/internal/aead.h>
11 #include <crypto/internal/des.h>
12 #include <crypto/internal/skcipher.h>
13 #include <crypto/scatterwalk.h>
14 #include <linux/bottom_half.h>
15 #include <linux/clk.h>
16 #include <linux/delay.h>
17 #include <linux/dma-mapping.h>
18 #include <linux/dmaengine.h>
19 #include <linux/err.h>
20 #include <linux/iopoll.h>
21 #include <linux/interrupt.h>
22 #include <linux/kernel.h>
23 #include <linux/module.h>
24 #include <linux/minmax.h>
25 #include <linux/of.h>
26 #include <linux/platform_device.h>
27 #include <linux/pm_runtime.h>
28 #include <linux/reset.h>
29 #include <linux/string.h>
30 
31 #define DRIVER_NAME             "stm32-cryp"
32 
33 /* Bit [0] encrypt / decrypt */
34 #define FLG_ENCRYPT             BIT(0)
35 /* Bit [8..1] algo & operation mode */
36 #define FLG_AES                 BIT(1)
37 #define FLG_DES                 BIT(2)
38 #define FLG_TDES                BIT(3)
39 #define FLG_ECB                 BIT(4)
40 #define FLG_CBC                 BIT(5)
41 #define FLG_CTR                 BIT(6)
42 #define FLG_GCM                 BIT(7)
43 #define FLG_CCM                 BIT(8)
44 /* Mode mask = bits [15..0] */
45 #define FLG_MODE_MASK           GENMASK(15, 0)
46 /* Bit [31..16] status  */
47 #define FLG_IN_OUT_DMA          BIT(16)
48 #define FLG_HEADER_DMA          BIT(17)
49 
50 /* Registers */
51 #define CRYP_CR                 0x00000000
52 #define CRYP_SR                 0x00000004
53 #define CRYP_DIN                0x00000008
54 #define CRYP_DOUT               0x0000000C
55 #define CRYP_DMACR              0x00000010
56 #define CRYP_IMSCR              0x00000014
57 #define CRYP_RISR               0x00000018
58 #define CRYP_MISR               0x0000001C
59 #define CRYP_K0LR               0x00000020
60 #define CRYP_K0RR               0x00000024
61 #define CRYP_K1LR               0x00000028
62 #define CRYP_K1RR               0x0000002C
63 #define CRYP_K2LR               0x00000030
64 #define CRYP_K2RR               0x00000034
65 #define CRYP_K3LR               0x00000038
66 #define CRYP_K3RR               0x0000003C
67 #define CRYP_IV0LR              0x00000040
68 #define CRYP_IV0RR              0x00000044
69 #define CRYP_IV1LR              0x00000048
70 #define CRYP_IV1RR              0x0000004C
71 #define CRYP_CSGCMCCM0R         0x00000050
72 #define CRYP_CSGCM0R            0x00000070
73 
74 #define UX500_CRYP_CR		0x00000000
75 #define UX500_CRYP_SR		0x00000004
76 #define UX500_CRYP_DIN		0x00000008
77 #define UX500_CRYP_DINSIZE	0x0000000C
78 #define UX500_CRYP_DOUT		0x00000010
79 #define UX500_CRYP_DOUSIZE	0x00000014
80 #define UX500_CRYP_DMACR	0x00000018
81 #define UX500_CRYP_IMSC		0x0000001C
82 #define UX500_CRYP_RIS		0x00000020
83 #define UX500_CRYP_MIS		0x00000024
84 #define UX500_CRYP_K1L		0x00000028
85 #define UX500_CRYP_K1R		0x0000002C
86 #define UX500_CRYP_K2L		0x00000030
87 #define UX500_CRYP_K2R		0x00000034
88 #define UX500_CRYP_K3L		0x00000038
89 #define UX500_CRYP_K3R		0x0000003C
90 #define UX500_CRYP_K4L		0x00000040
91 #define UX500_CRYP_K4R		0x00000044
92 #define UX500_CRYP_IV0L		0x00000048
93 #define UX500_CRYP_IV0R		0x0000004C
94 #define UX500_CRYP_IV1L		0x00000050
95 #define UX500_CRYP_IV1R		0x00000054
96 
97 /* Registers values */
98 #define CR_DEC_NOT_ENC          0x00000004
99 #define CR_TDES_ECB             0x00000000
100 #define CR_TDES_CBC             0x00000008
101 #define CR_DES_ECB              0x00000010
102 #define CR_DES_CBC              0x00000018
103 #define CR_AES_ECB              0x00000020
104 #define CR_AES_CBC              0x00000028
105 #define CR_AES_CTR              0x00000030
106 #define CR_AES_KP               0x00000038 /* Not on Ux500 */
107 #define CR_AES_XTS              0x00000038 /* Only on Ux500 */
108 #define CR_AES_GCM              0x00080000
109 #define CR_AES_CCM              0x00080008
110 #define CR_AES_UNKNOWN          0xFFFFFFFF
111 #define CR_ALGO_MASK            0x00080038
112 #define CR_DATA32               0x00000000
113 #define CR_DATA16               0x00000040
114 #define CR_DATA8                0x00000080
115 #define CR_DATA1                0x000000C0
116 #define CR_KEY128               0x00000000
117 #define CR_KEY192               0x00000100
118 #define CR_KEY256               0x00000200
119 #define CR_KEYRDEN              0x00000400 /* Only on Ux500 */
120 #define CR_KSE                  0x00000800 /* Only on Ux500 */
121 #define CR_FFLUSH               0x00004000
122 #define CR_CRYPEN               0x00008000
123 #define CR_PH_INIT              0x00000000
124 #define CR_PH_HEADER            0x00010000
125 #define CR_PH_PAYLOAD           0x00020000
126 #define CR_PH_FINAL             0x00030000
127 #define CR_PH_MASK              0x00030000
128 #define CR_NBPBL_SHIFT          20
129 
130 #define SR_IFNF                 BIT(1)
131 #define SR_OFNE                 BIT(2)
132 #define SR_BUSY                 BIT(8)
133 
134 #define DMACR_DIEN              BIT(0)
135 #define DMACR_DOEN              BIT(1)
136 
137 #define IMSCR_IN                BIT(0)
138 #define IMSCR_OUT               BIT(1)
139 
140 #define MISR_IN                 BIT(0)
141 #define MISR_OUT                BIT(1)
142 
143 /* Misc */
144 #define AES_BLOCK_32            (AES_BLOCK_SIZE / sizeof(u32))
145 #define GCM_CTR_INIT            2
146 #define CRYP_AUTOSUSPEND_DELAY  50
147 
148 #define CRYP_DMA_BURST_REG      4
149 
150 enum stm32_dma_mode {
151 	NO_DMA,
152 	DMA_PLAIN_SG,
153 	DMA_NEED_SG_TRUNC
154 };
155 
156 struct stm32_cryp_caps {
157 	bool			aeads_support;
158 	bool			linear_aes_key;
159 	bool			kp_mode;
160 	bool			iv_protection;
161 	bool			swap_final;
162 	bool			padding_wa;
163 	u32			cr;
164 	u32			sr;
165 	u32			din;
166 	u32			dout;
167 	u32			dmacr;
168 	u32			imsc;
169 	u32			mis;
170 	u32			k1l;
171 	u32			k1r;
172 	u32			k3r;
173 	u32			iv0l;
174 	u32			iv0r;
175 	u32			iv1l;
176 	u32			iv1r;
177 };
178 
179 struct stm32_cryp_ctx {
180 	struct stm32_cryp       *cryp;
181 	int                     keylen;
182 	__be32                  key[AES_KEYSIZE_256 / sizeof(u32)];
183 	unsigned long           flags;
184 };
185 
186 struct stm32_cryp_reqctx {
187 	unsigned long mode;
188 };
189 
190 struct stm32_cryp {
191 	struct list_head        list;
192 	struct device           *dev;
193 	void __iomem            *regs;
194 	phys_addr_t             phys_base;
195 	struct clk              *clk;
196 	unsigned long           flags;
197 	u32                     irq_status;
198 	const struct stm32_cryp_caps *caps;
199 	struct stm32_cryp_ctx   *ctx;
200 
201 	struct crypto_engine    *engine;
202 
203 	struct skcipher_request *req;
204 	struct aead_request     *areq;
205 
206 	size_t                  authsize;
207 	size_t                  hw_blocksize;
208 
209 	size_t                  payload_in;
210 	size_t                  header_in;
211 	size_t                  payload_out;
212 
213 	/* DMA process fields */
214 	struct scatterlist      *in_sg;
215 	struct scatterlist      *header_sg;
216 	struct scatterlist      *out_sg;
217 	size_t                  in_sg_len;
218 	size_t                  header_sg_len;
219 	size_t                  out_sg_len;
220 	struct completion	dma_completion;
221 
222 	struct dma_chan         *dma_lch_in;
223 	struct dma_chan         *dma_lch_out;
224 	enum stm32_dma_mode     dma_mode;
225 
226 	/* IT process fields */
227 	struct scatter_walk     in_walk;
228 	struct scatter_walk     out_walk;
229 
230 	__be32                  last_ctr[4];
231 	u32                     gcm_ctr;
232 };
233 
234 struct stm32_cryp_list {
235 	struct list_head        dev_list;
236 	spinlock_t              lock; /* protect dev_list */
237 };
238 
239 static struct stm32_cryp_list cryp_list = {
240 	.dev_list = LIST_HEAD_INIT(cryp_list.dev_list),
241 	.lock     = __SPIN_LOCK_UNLOCKED(cryp_list.lock),
242 };
243 
244 static inline bool is_aes(struct stm32_cryp *cryp)
245 {
246 	return cryp->flags & FLG_AES;
247 }
248 
249 static inline bool is_des(struct stm32_cryp *cryp)
250 {
251 	return cryp->flags & FLG_DES;
252 }
253 
254 static inline bool is_tdes(struct stm32_cryp *cryp)
255 {
256 	return cryp->flags & FLG_TDES;
257 }
258 
259 static inline bool is_ecb(struct stm32_cryp *cryp)
260 {
261 	return cryp->flags & FLG_ECB;
262 }
263 
264 static inline bool is_cbc(struct stm32_cryp *cryp)
265 {
266 	return cryp->flags & FLG_CBC;
267 }
268 
269 static inline bool is_ctr(struct stm32_cryp *cryp)
270 {
271 	return cryp->flags & FLG_CTR;
272 }
273 
274 static inline bool is_gcm(struct stm32_cryp *cryp)
275 {
276 	return cryp->flags & FLG_GCM;
277 }
278 
279 static inline bool is_ccm(struct stm32_cryp *cryp)
280 {
281 	return cryp->flags & FLG_CCM;
282 }
283 
284 static inline bool is_encrypt(struct stm32_cryp *cryp)
285 {
286 	return cryp->flags & FLG_ENCRYPT;
287 }
288 
289 static inline bool is_decrypt(struct stm32_cryp *cryp)
290 {
291 	return !is_encrypt(cryp);
292 }
293 
294 static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst)
295 {
296 	return readl_relaxed(cryp->regs + ofst);
297 }
298 
299 static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val)
300 {
301 	writel_relaxed(val, cryp->regs + ofst);
302 }
303 
304 static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp)
305 {
306 	u32 status;
307 
308 	return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->sr, status,
309 			!(status & SR_BUSY), 10, 100000);
310 }
311 
312 static inline void stm32_cryp_enable(struct stm32_cryp *cryp)
313 {
314 	writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) | CR_CRYPEN,
315 		       cryp->regs + cryp->caps->cr);
316 }
317 
318 static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp)
319 {
320 	u32 status;
321 
322 	return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->cr, status,
323 			!(status & CR_CRYPEN), 10, 100000);
324 }
325 
326 static inline int stm32_cryp_wait_input(struct stm32_cryp *cryp)
327 {
328 	u32 status;
329 
330 	return readl_relaxed_poll_timeout_atomic(cryp->regs + cryp->caps->sr, status,
331 			status & SR_IFNF, 1, 10);
332 }
333 
334 static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp)
335 {
336 	u32 status;
337 
338 	return readl_relaxed_poll_timeout_atomic(cryp->regs + cryp->caps->sr, status,
339 			status & SR_OFNE, 1, 10);
340 }
341 
342 static inline void stm32_cryp_key_read_enable(struct stm32_cryp *cryp)
343 {
344 	writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) | CR_KEYRDEN,
345 		       cryp->regs + cryp->caps->cr);
346 }
347 
348 static inline void stm32_cryp_key_read_disable(struct stm32_cryp *cryp)
349 {
350 	writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) & ~CR_KEYRDEN,
351 		       cryp->regs + cryp->caps->cr);
352 }
353 
354 static void stm32_cryp_irq_read_data(struct stm32_cryp *cryp);
355 static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp);
356 static void stm32_cryp_irq_write_gcmccm_header(struct stm32_cryp *cryp);
357 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp);
358 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err);
359 static int stm32_cryp_dma_start(struct stm32_cryp *cryp);
360 static int stm32_cryp_it_start(struct stm32_cryp *cryp);
361 
362 static struct stm32_cryp *stm32_cryp_find_dev(struct stm32_cryp_ctx *ctx)
363 {
364 	struct stm32_cryp *tmp, *cryp = NULL;
365 
366 	spin_lock_bh(&cryp_list.lock);
367 	if (!ctx->cryp) {
368 		list_for_each_entry(tmp, &cryp_list.dev_list, list) {
369 			cryp = tmp;
370 			break;
371 		}
372 		ctx->cryp = cryp;
373 	} else {
374 		cryp = ctx->cryp;
375 	}
376 
377 	spin_unlock_bh(&cryp_list.lock);
378 
379 	return cryp;
380 }
381 
382 static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, __be32 *iv)
383 {
384 	if (!iv)
385 		return;
386 
387 	stm32_cryp_write(cryp, cryp->caps->iv0l, be32_to_cpu(*iv++));
388 	stm32_cryp_write(cryp, cryp->caps->iv0r, be32_to_cpu(*iv++));
389 
390 	if (is_aes(cryp)) {
391 		stm32_cryp_write(cryp, cryp->caps->iv1l, be32_to_cpu(*iv++));
392 		stm32_cryp_write(cryp, cryp->caps->iv1r, be32_to_cpu(*iv++));
393 	}
394 }
395 
396 static void stm32_cryp_get_iv(struct stm32_cryp *cryp)
397 {
398 	struct skcipher_request *req = cryp->req;
399 	__be32 *tmp = (void *)req->iv;
400 
401 	if (!tmp)
402 		return;
403 
404 	if (cryp->caps->iv_protection)
405 		stm32_cryp_key_read_enable(cryp);
406 
407 	*tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0l));
408 	*tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0r));
409 
410 	if (is_aes(cryp)) {
411 		*tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1l));
412 		*tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1r));
413 	}
414 
415 	if (cryp->caps->iv_protection)
416 		stm32_cryp_key_read_disable(cryp);
417 }
418 
419 /**
420  * ux500_swap_bits_in_byte() - mirror the bits in a byte
421  * @b: the byte to be mirrored
422  *
423  * The bits are swapped the following way:
424  *  Byte b include bits 0-7, nibble 1 (n1) include bits 0-3 and
425  *  nibble 2 (n2) bits 4-7.
426  *
427  *  Nibble 1 (n1):
428  *  (The "old" (moved) bit is replaced with a zero)
429  *  1. Move bit 6 and 7, 4 positions to the left.
430  *  2. Move bit 3 and 5, 2 positions to the left.
431  *  3. Move bit 1-4, 1 position to the left.
432  *
433  *  Nibble 2 (n2):
434  *  1. Move bit 0 and 1, 4 positions to the right.
435  *  2. Move bit 2 and 4, 2 positions to the right.
436  *  3. Move bit 3-6, 1 position to the right.
437  *
438  *  Combine the two nibbles to a complete and swapped byte.
439  */
440 static inline u8 ux500_swap_bits_in_byte(u8 b)
441 {
442 #define R_SHIFT_4_MASK  0xc0 /* Bits 6 and 7, right shift 4 */
443 #define R_SHIFT_2_MASK  0x28 /* (After right shift 4) Bits 3 and 5,
444 				  right shift 2 */
445 #define R_SHIFT_1_MASK  0x1e /* (After right shift 2) Bits 1-4,
446 				  right shift 1 */
447 #define L_SHIFT_4_MASK  0x03 /* Bits 0 and 1, left shift 4 */
448 #define L_SHIFT_2_MASK  0x14 /* (After left shift 4) Bits 2 and 4,
449 				  left shift 2 */
450 #define L_SHIFT_1_MASK  0x78 /* (After left shift 1) Bits 3-6,
451 				  left shift 1 */
452 
453 	u8 n1;
454 	u8 n2;
455 
456 	/* Swap most significant nibble */
457 	/* Right shift 4, bits 6 and 7 */
458 	n1 = ((b  & R_SHIFT_4_MASK) >> 4) | (b  & ~(R_SHIFT_4_MASK >> 4));
459 	/* Right shift 2, bits 3 and 5 */
460 	n1 = ((n1 & R_SHIFT_2_MASK) >> 2) | (n1 & ~(R_SHIFT_2_MASK >> 2));
461 	/* Right shift 1, bits 1-4 */
462 	n1 = (n1  & R_SHIFT_1_MASK) >> 1;
463 
464 	/* Swap least significant nibble */
465 	/* Left shift 4, bits 0 and 1 */
466 	n2 = ((b  & L_SHIFT_4_MASK) << 4) | (b  & ~(L_SHIFT_4_MASK << 4));
467 	/* Left shift 2, bits 2 and 4 */
468 	n2 = ((n2 & L_SHIFT_2_MASK) << 2) | (n2 & ~(L_SHIFT_2_MASK << 2));
469 	/* Left shift 1, bits 3-6 */
470 	n2 = (n2  & L_SHIFT_1_MASK) << 1;
471 
472 	return n1 | n2;
473 }
474 
475 /**
476  * ux500_swizzle_key() - Shuffle around words and bits in the AES key
477  * @in: key to swizzle
478  * @out: swizzled key
479  * @len: length of key, in bytes
480  *
481  * This "key swizzling procedure" is described in the examples in the
482  * DB8500 design specification. There is no real description of why
483  * the bits have been arranged like this in the hardware.
484  */
485 static inline void ux500_swizzle_key(const u8 *in, u8 *out, u32 len)
486 {
487 	int i = 0;
488 	int bpw = sizeof(u32);
489 	int j;
490 	int index = 0;
491 
492 	j = len - bpw;
493 	while (j >= 0) {
494 		for (i = 0; i < bpw; i++) {
495 			index = len - j - bpw + i;
496 			out[j + i] =
497 				ux500_swap_bits_in_byte(in[index]);
498 		}
499 		j -= bpw;
500 	}
501 }
502 
503 static void stm32_cryp_hw_write_key(struct stm32_cryp *c)
504 {
505 	unsigned int i;
506 	int r_id;
507 
508 	if (is_des(c)) {
509 		stm32_cryp_write(c, c->caps->k1l, be32_to_cpu(c->ctx->key[0]));
510 		stm32_cryp_write(c, c->caps->k1r, be32_to_cpu(c->ctx->key[1]));
511 		return;
512 	}
513 
514 	/*
515 	 * On the Ux500 the AES key is considered as a single bit sequence
516 	 * of 128, 192 or 256 bits length. It is written linearly into the
517 	 * registers from K1L and down, and need to be processed to become
518 	 * a proper big-endian bit sequence.
519 	 */
520 	if (is_aes(c) && c->caps->linear_aes_key) {
521 		u32 tmpkey[8];
522 
523 		ux500_swizzle_key((u8 *)c->ctx->key,
524 				  (u8 *)tmpkey, c->ctx->keylen);
525 
526 		r_id = c->caps->k1l;
527 		for (i = 0; i < c->ctx->keylen / sizeof(u32); i++, r_id += 4)
528 			stm32_cryp_write(c, r_id, tmpkey[i]);
529 
530 		return;
531 	}
532 
533 	r_id = c->caps->k3r;
534 	for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4)
535 		stm32_cryp_write(c, r_id, be32_to_cpu(c->ctx->key[i - 1]));
536 }
537 
538 static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp)
539 {
540 	if (is_aes(cryp) && is_ecb(cryp))
541 		return CR_AES_ECB;
542 
543 	if (is_aes(cryp) && is_cbc(cryp))
544 		return CR_AES_CBC;
545 
546 	if (is_aes(cryp) && is_ctr(cryp))
547 		return CR_AES_CTR;
548 
549 	if (is_aes(cryp) && is_gcm(cryp))
550 		return CR_AES_GCM;
551 
552 	if (is_aes(cryp) && is_ccm(cryp))
553 		return CR_AES_CCM;
554 
555 	if (is_des(cryp) && is_ecb(cryp))
556 		return CR_DES_ECB;
557 
558 	if (is_des(cryp) && is_cbc(cryp))
559 		return CR_DES_CBC;
560 
561 	if (is_tdes(cryp) && is_ecb(cryp))
562 		return CR_TDES_ECB;
563 
564 	if (is_tdes(cryp) && is_cbc(cryp))
565 		return CR_TDES_CBC;
566 
567 	dev_err(cryp->dev, "Unknown mode\n");
568 	return CR_AES_UNKNOWN;
569 }
570 
571 static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp *cryp)
572 {
573 	return is_encrypt(cryp) ? cryp->areq->cryptlen :
574 				  cryp->areq->cryptlen - cryp->authsize;
575 }
576 
577 static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg)
578 {
579 	int ret;
580 	__be32 iv[4];
581 
582 	/* Phase 1 : init */
583 	memcpy(iv, cryp->areq->iv, 12);
584 	iv[3] = cpu_to_be32(GCM_CTR_INIT);
585 	cryp->gcm_ctr = GCM_CTR_INIT;
586 	stm32_cryp_hw_write_iv(cryp, iv);
587 
588 	stm32_cryp_write(cryp, cryp->caps->cr, cfg | CR_PH_INIT | CR_CRYPEN);
589 
590 	/* Wait for end of processing */
591 	ret = stm32_cryp_wait_enable(cryp);
592 	if (ret) {
593 		dev_err(cryp->dev, "Timeout (gcm init)\n");
594 		return ret;
595 	}
596 
597 	/* Prepare next phase */
598 	if (cryp->areq->assoclen) {
599 		cfg |= CR_PH_HEADER;
600 		stm32_cryp_write(cryp, cryp->caps->cr, cfg);
601 	} else if (stm32_cryp_get_input_text_len(cryp)) {
602 		cfg |= CR_PH_PAYLOAD;
603 		stm32_cryp_write(cryp, cryp->caps->cr, cfg);
604 	}
605 
606 	return 0;
607 }
608 
609 static void stm32_crypt_gcmccm_end_header(struct stm32_cryp *cryp)
610 {
611 	u32 cfg;
612 	int err;
613 
614 	/* Check if whole header written */
615 	if (!cryp->header_in) {
616 		/* Wait for completion */
617 		err = stm32_cryp_wait_busy(cryp);
618 		if (err) {
619 			dev_err(cryp->dev, "Timeout (gcm/ccm header)\n");
620 			stm32_cryp_write(cryp, cryp->caps->imsc, 0);
621 			stm32_cryp_finish_req(cryp, err);
622 			return;
623 		}
624 
625 		if (stm32_cryp_get_input_text_len(cryp)) {
626 			/* Phase 3 : payload */
627 			cfg = stm32_cryp_read(cryp, cryp->caps->cr);
628 			cfg &= ~CR_CRYPEN;
629 			stm32_cryp_write(cryp, cryp->caps->cr, cfg);
630 
631 			cfg &= ~CR_PH_MASK;
632 			cfg |= CR_PH_PAYLOAD | CR_CRYPEN;
633 			stm32_cryp_write(cryp, cryp->caps->cr, cfg);
634 		} else {
635 			/*
636 			 * Phase 4 : tag.
637 			 * Nothing to read, nothing to write, caller have to
638 			 * end request
639 			 */
640 		}
641 	}
642 }
643 
644 static void stm32_cryp_write_ccm_first_header(struct stm32_cryp *cryp)
645 {
646 	size_t written;
647 	size_t len;
648 	u32 alen = cryp->areq->assoclen;
649 	u32 block[AES_BLOCK_32] = {0};
650 	u8 *b8 = (u8 *)block;
651 
652 	if (alen <= 65280) {
653 		/* Write first u32 of B1 */
654 		b8[0] = (alen >> 8) & 0xFF;
655 		b8[1] = alen & 0xFF;
656 		len = 2;
657 	} else {
658 		/* Build the two first u32 of B1 */
659 		b8[0] = 0xFF;
660 		b8[1] = 0xFE;
661 		b8[2] = (alen & 0xFF000000) >> 24;
662 		b8[3] = (alen & 0x00FF0000) >> 16;
663 		b8[4] = (alen & 0x0000FF00) >> 8;
664 		b8[5] = alen & 0x000000FF;
665 		len = 6;
666 	}
667 
668 	written = min_t(size_t, AES_BLOCK_SIZE - len, alen);
669 
670 	memcpy_from_scatterwalk((char *)block + len, &cryp->in_walk, written);
671 
672 	writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32);
673 
674 	cryp->header_in -= written;
675 
676 	stm32_crypt_gcmccm_end_header(cryp);
677 }
678 
679 static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg)
680 {
681 	int ret;
682 	u32 iv_32[AES_BLOCK_32], b0_32[AES_BLOCK_32];
683 	u8 *iv = (u8 *)iv_32, *b0 = (u8 *)b0_32;
684 	__be32 *bd;
685 	u32 *d;
686 	unsigned int i, textlen;
687 
688 	/* Phase 1 : init. Firstly set the CTR value to 1 (not 0) */
689 	memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
690 	memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
691 	iv[AES_BLOCK_SIZE - 1] = 1;
692 	stm32_cryp_hw_write_iv(cryp, (__be32 *)iv);
693 
694 	/* Build B0 */
695 	memcpy(b0, iv, AES_BLOCK_SIZE);
696 
697 	b0[0] |= (8 * ((cryp->authsize - 2) / 2));
698 
699 	if (cryp->areq->assoclen)
700 		b0[0] |= 0x40;
701 
702 	textlen = stm32_cryp_get_input_text_len(cryp);
703 
704 	b0[AES_BLOCK_SIZE - 2] = textlen >> 8;
705 	b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF;
706 
707 	/* Enable HW */
708 	stm32_cryp_write(cryp, cryp->caps->cr, cfg | CR_PH_INIT | CR_CRYPEN);
709 
710 	/* Write B0 */
711 	d = (u32 *)b0;
712 	bd = (__be32 *)b0;
713 
714 	for (i = 0; i < AES_BLOCK_32; i++) {
715 		u32 xd = d[i];
716 
717 		if (!cryp->caps->padding_wa)
718 			xd = be32_to_cpu(bd[i]);
719 		stm32_cryp_write(cryp, cryp->caps->din, xd);
720 	}
721 
722 	/* Wait for end of processing */
723 	ret = stm32_cryp_wait_enable(cryp);
724 	if (ret) {
725 		dev_err(cryp->dev, "Timeout (ccm init)\n");
726 		return ret;
727 	}
728 
729 	/* Prepare next phase */
730 	if (cryp->areq->assoclen) {
731 		cfg |= CR_PH_HEADER | CR_CRYPEN;
732 		stm32_cryp_write(cryp, cryp->caps->cr, cfg);
733 
734 		/* Write first (special) block (may move to next phase [payload]) */
735 		stm32_cryp_write_ccm_first_header(cryp);
736 	} else if (stm32_cryp_get_input_text_len(cryp)) {
737 		cfg |= CR_PH_PAYLOAD;
738 		stm32_cryp_write(cryp, cryp->caps->cr, cfg);
739 	}
740 
741 	return 0;
742 }
743 
744 static int stm32_cryp_hw_init(struct stm32_cryp *cryp)
745 {
746 	int ret;
747 	u32 cfg, hw_mode;
748 
749 	pm_runtime_get_sync(cryp->dev);
750 
751 	/* Disable interrupt */
752 	stm32_cryp_write(cryp, cryp->caps->imsc, 0);
753 
754 	/* Set configuration */
755 	cfg = CR_DATA8 | CR_FFLUSH;
756 
757 	switch (cryp->ctx->keylen) {
758 	case AES_KEYSIZE_128:
759 		cfg |= CR_KEY128;
760 		break;
761 
762 	case AES_KEYSIZE_192:
763 		cfg |= CR_KEY192;
764 		break;
765 
766 	default:
767 	case AES_KEYSIZE_256:
768 		cfg |= CR_KEY256;
769 		break;
770 	}
771 
772 	hw_mode = stm32_cryp_get_hw_mode(cryp);
773 	if (hw_mode == CR_AES_UNKNOWN)
774 		return -EINVAL;
775 
776 	/* AES ECB/CBC decrypt: run key preparation first */
777 	if (is_decrypt(cryp) &&
778 	    ((hw_mode == CR_AES_ECB) || (hw_mode == CR_AES_CBC))) {
779 		/* Configure in key preparation mode */
780 		if (cryp->caps->kp_mode)
781 			stm32_cryp_write(cryp, cryp->caps->cr,
782 				cfg | CR_AES_KP);
783 		else
784 			stm32_cryp_write(cryp,
785 				cryp->caps->cr, cfg | CR_AES_ECB | CR_KSE);
786 
787 		/* Set key only after full configuration done */
788 		stm32_cryp_hw_write_key(cryp);
789 
790 		/* Start prepare key */
791 		stm32_cryp_enable(cryp);
792 		/* Wait for end of processing */
793 		ret = stm32_cryp_wait_busy(cryp);
794 		if (ret) {
795 			dev_err(cryp->dev, "Timeout (key preparation)\n");
796 			return ret;
797 		}
798 
799 		cfg |= hw_mode | CR_DEC_NOT_ENC;
800 
801 		/* Apply updated config (Decrypt + algo) and flush */
802 		stm32_cryp_write(cryp, cryp->caps->cr, cfg);
803 	} else {
804 		cfg |= hw_mode;
805 		if (is_decrypt(cryp))
806 			cfg |= CR_DEC_NOT_ENC;
807 
808 		/* Apply config and flush */
809 		stm32_cryp_write(cryp, cryp->caps->cr, cfg);
810 
811 		/* Set key only after configuration done */
812 		stm32_cryp_hw_write_key(cryp);
813 	}
814 
815 	switch (hw_mode) {
816 	case CR_AES_GCM:
817 	case CR_AES_CCM:
818 		/* Phase 1 : init */
819 		if (hw_mode == CR_AES_CCM)
820 			ret = stm32_cryp_ccm_init(cryp, cfg);
821 		else
822 			ret = stm32_cryp_gcm_init(cryp, cfg);
823 
824 		if (ret)
825 			return ret;
826 
827 		break;
828 
829 	case CR_DES_CBC:
830 	case CR_TDES_CBC:
831 	case CR_AES_CBC:
832 	case CR_AES_CTR:
833 		stm32_cryp_hw_write_iv(cryp, (__be32 *)cryp->req->iv);
834 		break;
835 
836 	default:
837 		break;
838 	}
839 
840 	/* Enable now */
841 	stm32_cryp_enable(cryp);
842 
843 	return 0;
844 }
845 
846 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err)
847 {
848 	if (!err && (is_gcm(cryp) || is_ccm(cryp)))
849 		/* Phase 4 : output tag */
850 		err = stm32_cryp_read_auth_tag(cryp);
851 
852 	if (!err && (!(is_gcm(cryp) || is_ccm(cryp) || is_ecb(cryp))))
853 		stm32_cryp_get_iv(cryp);
854 
855 	pm_runtime_put_autosuspend(cryp->dev);
856 
857 	if (is_gcm(cryp) || is_ccm(cryp))
858 		crypto_finalize_aead_request(cryp->engine, cryp->areq, err);
859 	else
860 		crypto_finalize_skcipher_request(cryp->engine, cryp->req, err);
861 }
862 
863 static void stm32_cryp_header_dma_callback(void *param)
864 {
865 	struct stm32_cryp *cryp = (struct stm32_cryp *)param;
866 	int ret;
867 	u32 reg;
868 
869 	dma_unmap_sg(cryp->dev, cryp->header_sg, cryp->header_sg_len, DMA_TO_DEVICE);
870 
871 	reg = stm32_cryp_read(cryp, cryp->caps->dmacr);
872 	stm32_cryp_write(cryp, cryp->caps->dmacr, reg & ~(DMACR_DOEN | DMACR_DIEN));
873 
874 	kfree(cryp->header_sg);
875 
876 	reg = stm32_cryp_read(cryp, cryp->caps->cr);
877 
878 	if (cryp->header_in) {
879 		stm32_cryp_write(cryp, cryp->caps->cr, reg | CR_CRYPEN);
880 
881 		ret = stm32_cryp_wait_input(cryp);
882 		if (ret) {
883 			dev_err(cryp->dev, "input header ready timeout after dma\n");
884 			stm32_cryp_finish_req(cryp, ret);
885 			return;
886 		}
887 		stm32_cryp_irq_write_gcmccm_header(cryp);
888 		WARN_ON(cryp->header_in);
889 	}
890 
891 	if (stm32_cryp_get_input_text_len(cryp)) {
892 		/* Phase 3 : payload */
893 		reg = stm32_cryp_read(cryp, cryp->caps->cr);
894 		stm32_cryp_write(cryp, cryp->caps->cr, reg & ~CR_CRYPEN);
895 
896 		reg &= ~CR_PH_MASK;
897 		reg |= CR_PH_PAYLOAD | CR_CRYPEN;
898 		stm32_cryp_write(cryp, cryp->caps->cr, reg);
899 
900 		if (cryp->flags & FLG_IN_OUT_DMA) {
901 			ret = stm32_cryp_dma_start(cryp);
902 			if (ret)
903 				stm32_cryp_finish_req(cryp, ret);
904 		} else {
905 			stm32_cryp_it_start(cryp);
906 		}
907 	} else {
908 		/*
909 		 * Phase 4 : tag.
910 		 * Nothing to read, nothing to write => end request
911 		 */
912 		stm32_cryp_finish_req(cryp, 0);
913 	}
914 }
915 
916 static void stm32_cryp_dma_callback(void *param)
917 {
918 	struct stm32_cryp *cryp = (struct stm32_cryp *)param;
919 	int ret;
920 	u32 reg;
921 
922 	complete(&cryp->dma_completion); /* completion to indicate no timeout */
923 
924 	dma_sync_sg_for_device(cryp->dev, cryp->out_sg, cryp->out_sg_len, DMA_FROM_DEVICE);
925 
926 	if (cryp->in_sg != cryp->out_sg)
927 		dma_unmap_sg(cryp->dev, cryp->in_sg, cryp->in_sg_len, DMA_TO_DEVICE);
928 
929 	dma_unmap_sg(cryp->dev, cryp->out_sg, cryp->out_sg_len, DMA_FROM_DEVICE);
930 
931 	reg = stm32_cryp_read(cryp, cryp->caps->dmacr);
932 	stm32_cryp_write(cryp, cryp->caps->dmacr, reg & ~(DMACR_DOEN | DMACR_DIEN));
933 
934 	reg = stm32_cryp_read(cryp, cryp->caps->cr);
935 
936 	if (is_gcm(cryp) || is_ccm(cryp)) {
937 		kfree(cryp->in_sg);
938 		kfree(cryp->out_sg);
939 	} else {
940 		if (cryp->in_sg != cryp->req->src)
941 			kfree(cryp->in_sg);
942 		if (cryp->out_sg != cryp->req->dst)
943 			kfree(cryp->out_sg);
944 	}
945 
946 	if (cryp->payload_in) {
947 		stm32_cryp_write(cryp, cryp->caps->cr, reg | CR_CRYPEN);
948 
949 		ret = stm32_cryp_wait_input(cryp);
950 		if (ret) {
951 			dev_err(cryp->dev, "input ready timeout after dma\n");
952 			stm32_cryp_finish_req(cryp, ret);
953 			return;
954 		}
955 		stm32_cryp_irq_write_data(cryp);
956 
957 		ret = stm32_cryp_wait_output(cryp);
958 		if (ret) {
959 			dev_err(cryp->dev, "output ready timeout after dma\n");
960 			stm32_cryp_finish_req(cryp, ret);
961 			return;
962 		}
963 		stm32_cryp_irq_read_data(cryp);
964 	}
965 
966 	stm32_cryp_finish_req(cryp, 0);
967 }
968 
969 static int stm32_cryp_header_dma_start(struct stm32_cryp *cryp)
970 {
971 	int ret;
972 	struct dma_async_tx_descriptor *tx_in;
973 	u32 reg;
974 	size_t align_size;
975 
976 	ret = dma_map_sg(cryp->dev, cryp->header_sg, cryp->header_sg_len, DMA_TO_DEVICE);
977 	if (!ret) {
978 		dev_err(cryp->dev, "dma_map_sg() error\n");
979 		return -ENOMEM;
980 	}
981 
982 	dma_sync_sg_for_device(cryp->dev, cryp->header_sg, cryp->header_sg_len, DMA_TO_DEVICE);
983 
984 	tx_in = dmaengine_prep_slave_sg(cryp->dma_lch_in, cryp->header_sg, cryp->header_sg_len,
985 					DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
986 	if (!tx_in) {
987 		dev_err(cryp->dev, "IN prep_slave_sg() failed\n");
988 		return -EINVAL;
989 	}
990 
991 	tx_in->callback_param = cryp;
992 	tx_in->callback = stm32_cryp_header_dma_callback;
993 
994 	/* Advance scatterwalk to not DMA'ed data */
995 	align_size = ALIGN_DOWN(cryp->header_in, cryp->hw_blocksize);
996 	scatterwalk_skip(&cryp->in_walk, align_size);
997 	cryp->header_in -= align_size;
998 
999 	ret = dma_submit_error(dmaengine_submit(tx_in));
1000 	if (ret < 0) {
1001 		dev_err(cryp->dev, "DMA in submit failed\n");
1002 		return ret;
1003 	}
1004 	dma_async_issue_pending(cryp->dma_lch_in);
1005 
1006 	reg = stm32_cryp_read(cryp, cryp->caps->dmacr);
1007 	stm32_cryp_write(cryp, cryp->caps->dmacr, reg | DMACR_DIEN);
1008 
1009 	return 0;
1010 }
1011 
1012 static int stm32_cryp_dma_start(struct stm32_cryp *cryp)
1013 {
1014 	int ret;
1015 	size_t align_size;
1016 	struct dma_async_tx_descriptor *tx_in, *tx_out;
1017 	u32 reg;
1018 
1019 	if (cryp->in_sg != cryp->out_sg) {
1020 		ret = dma_map_sg(cryp->dev, cryp->in_sg, cryp->in_sg_len, DMA_TO_DEVICE);
1021 		if (!ret) {
1022 			dev_err(cryp->dev, "dma_map_sg() error\n");
1023 			return -ENOMEM;
1024 		}
1025 	}
1026 
1027 	ret = dma_map_sg(cryp->dev, cryp->out_sg, cryp->out_sg_len, DMA_FROM_DEVICE);
1028 	if (!ret) {
1029 		dev_err(cryp->dev, "dma_map_sg() error\n");
1030 		return -ENOMEM;
1031 	}
1032 
1033 	dma_sync_sg_for_device(cryp->dev, cryp->in_sg, cryp->in_sg_len, DMA_TO_DEVICE);
1034 
1035 	tx_in = dmaengine_prep_slave_sg(cryp->dma_lch_in, cryp->in_sg, cryp->in_sg_len,
1036 					DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
1037 	if (!tx_in) {
1038 		dev_err(cryp->dev, "IN prep_slave_sg() failed\n");
1039 		return -EINVAL;
1040 	}
1041 
1042 	/* No callback necessary */
1043 	tx_in->callback_param = cryp;
1044 	tx_in->callback = NULL;
1045 
1046 	tx_out = dmaengine_prep_slave_sg(cryp->dma_lch_out, cryp->out_sg, cryp->out_sg_len,
1047 					 DMA_DEV_TO_MEM, DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
1048 	if (!tx_out) {
1049 		dev_err(cryp->dev, "OUT prep_slave_sg() failed\n");
1050 		return -EINVAL;
1051 	}
1052 
1053 	reinit_completion(&cryp->dma_completion);
1054 	tx_out->callback = stm32_cryp_dma_callback;
1055 	tx_out->callback_param = cryp;
1056 
1057 	/* Advance scatterwalk to not DMA'ed data */
1058 	align_size = ALIGN_DOWN(cryp->payload_in, cryp->hw_blocksize);
1059 	scatterwalk_skip(&cryp->in_walk, align_size);
1060 	cryp->payload_in -= align_size;
1061 
1062 	ret = dma_submit_error(dmaengine_submit(tx_in));
1063 	if (ret < 0) {
1064 		dev_err(cryp->dev, "DMA in submit failed\n");
1065 		return ret;
1066 	}
1067 	dma_async_issue_pending(cryp->dma_lch_in);
1068 
1069 	/* Advance scatterwalk to not DMA'ed data */
1070 	scatterwalk_skip(&cryp->out_walk, align_size);
1071 	cryp->payload_out -= align_size;
1072 	ret = dma_submit_error(dmaengine_submit(tx_out));
1073 	if (ret < 0) {
1074 		dev_err(cryp->dev, "DMA out submit failed\n");
1075 		return ret;
1076 	}
1077 	dma_async_issue_pending(cryp->dma_lch_out);
1078 
1079 	reg = stm32_cryp_read(cryp, cryp->caps->dmacr);
1080 	stm32_cryp_write(cryp, cryp->caps->dmacr, reg | DMACR_DOEN | DMACR_DIEN);
1081 
1082 	if (!wait_for_completion_timeout(&cryp->dma_completion, msecs_to_jiffies(1000))) {
1083 		dev_err(cryp->dev, "DMA out timed out\n");
1084 		dmaengine_terminate_sync(cryp->dma_lch_out);
1085 		return -ETIMEDOUT;
1086 	}
1087 
1088 	return 0;
1089 }
1090 
1091 static int stm32_cryp_it_start(struct stm32_cryp *cryp)
1092 {
1093 	/* Enable interrupt and let the IRQ handler do everything */
1094 	stm32_cryp_write(cryp, cryp->caps->imsc, IMSCR_IN | IMSCR_OUT);
1095 
1096 	return 0;
1097 }
1098 
1099 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq);
1100 
1101 static int stm32_cryp_init_tfm(struct crypto_skcipher *tfm)
1102 {
1103 	crypto_skcipher_set_reqsize(tfm, sizeof(struct stm32_cryp_reqctx));
1104 
1105 	return 0;
1106 }
1107 
1108 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq);
1109 
1110 static int stm32_cryp_aes_aead_init(struct crypto_aead *tfm)
1111 {
1112 	crypto_aead_set_reqsize(tfm, sizeof(struct stm32_cryp_reqctx));
1113 
1114 	return 0;
1115 }
1116 
1117 static int stm32_cryp_crypt(struct skcipher_request *req, unsigned long mode)
1118 {
1119 	struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
1120 			crypto_skcipher_reqtfm(req));
1121 	struct stm32_cryp_reqctx *rctx = skcipher_request_ctx(req);
1122 	struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
1123 
1124 	if (!cryp)
1125 		return -ENODEV;
1126 
1127 	rctx->mode = mode;
1128 
1129 	return crypto_transfer_skcipher_request_to_engine(cryp->engine, req);
1130 }
1131 
1132 static int stm32_cryp_aead_crypt(struct aead_request *req, unsigned long mode)
1133 {
1134 	struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1135 	struct stm32_cryp_reqctx *rctx = aead_request_ctx(req);
1136 	struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
1137 
1138 	if (!cryp)
1139 		return -ENODEV;
1140 
1141 	rctx->mode = mode;
1142 
1143 	return crypto_transfer_aead_request_to_engine(cryp->engine, req);
1144 }
1145 
1146 static int stm32_cryp_setkey(struct crypto_skcipher *tfm, const u8 *key,
1147 			     unsigned int keylen)
1148 {
1149 	struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
1150 
1151 	memcpy(ctx->key, key, keylen);
1152 	ctx->keylen = keylen;
1153 
1154 	return 0;
1155 }
1156 
1157 static int stm32_cryp_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
1158 				 unsigned int keylen)
1159 {
1160 	if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
1161 	    keylen != AES_KEYSIZE_256)
1162 		return -EINVAL;
1163 	else
1164 		return stm32_cryp_setkey(tfm, key, keylen);
1165 }
1166 
1167 static int stm32_cryp_des_setkey(struct crypto_skcipher *tfm, const u8 *key,
1168 				 unsigned int keylen)
1169 {
1170 	return verify_skcipher_des_key(tfm, key) ?:
1171 	       stm32_cryp_setkey(tfm, key, keylen);
1172 }
1173 
1174 static int stm32_cryp_tdes_setkey(struct crypto_skcipher *tfm, const u8 *key,
1175 				  unsigned int keylen)
1176 {
1177 	return verify_skcipher_des3_key(tfm, key) ?:
1178 	       stm32_cryp_setkey(tfm, key, keylen);
1179 }
1180 
1181 static int stm32_cryp_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key,
1182 				      unsigned int keylen)
1183 {
1184 	struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
1185 
1186 	if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
1187 	    keylen != AES_KEYSIZE_256)
1188 		return -EINVAL;
1189 
1190 	memcpy(ctx->key, key, keylen);
1191 	ctx->keylen = keylen;
1192 
1193 	return 0;
1194 }
1195 
1196 static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead *tfm,
1197 					  unsigned int authsize)
1198 {
1199 	switch (authsize) {
1200 	case 4:
1201 	case 8:
1202 	case 12:
1203 	case 13:
1204 	case 14:
1205 	case 15:
1206 	case 16:
1207 		break;
1208 	default:
1209 		return -EINVAL;
1210 	}
1211 
1212 	return 0;
1213 }
1214 
1215 static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead *tfm,
1216 					  unsigned int authsize)
1217 {
1218 	switch (authsize) {
1219 	case 4:
1220 	case 6:
1221 	case 8:
1222 	case 10:
1223 	case 12:
1224 	case 14:
1225 	case 16:
1226 		break;
1227 	default:
1228 		return -EINVAL;
1229 	}
1230 
1231 	return 0;
1232 }
1233 
1234 static int stm32_cryp_aes_ecb_encrypt(struct skcipher_request *req)
1235 {
1236 	if (req->cryptlen % AES_BLOCK_SIZE)
1237 		return -EINVAL;
1238 
1239 	if (req->cryptlen == 0)
1240 		return 0;
1241 
1242 	return stm32_cryp_crypt(req, FLG_AES | FLG_ECB | FLG_ENCRYPT);
1243 }
1244 
1245 static int stm32_cryp_aes_ecb_decrypt(struct skcipher_request *req)
1246 {
1247 	if (req->cryptlen % AES_BLOCK_SIZE)
1248 		return -EINVAL;
1249 
1250 	if (req->cryptlen == 0)
1251 		return 0;
1252 
1253 	return stm32_cryp_crypt(req, FLG_AES | FLG_ECB);
1254 }
1255 
1256 static int stm32_cryp_aes_cbc_encrypt(struct skcipher_request *req)
1257 {
1258 	if (req->cryptlen % AES_BLOCK_SIZE)
1259 		return -EINVAL;
1260 
1261 	if (req->cryptlen == 0)
1262 		return 0;
1263 
1264 	return stm32_cryp_crypt(req, FLG_AES | FLG_CBC | FLG_ENCRYPT);
1265 }
1266 
1267 static int stm32_cryp_aes_cbc_decrypt(struct skcipher_request *req)
1268 {
1269 	if (req->cryptlen % AES_BLOCK_SIZE)
1270 		return -EINVAL;
1271 
1272 	if (req->cryptlen == 0)
1273 		return 0;
1274 
1275 	return stm32_cryp_crypt(req, FLG_AES | FLG_CBC);
1276 }
1277 
1278 static int stm32_cryp_aes_ctr_encrypt(struct skcipher_request *req)
1279 {
1280 	if (req->cryptlen == 0)
1281 		return 0;
1282 
1283 	return stm32_cryp_crypt(req, FLG_AES | FLG_CTR | FLG_ENCRYPT);
1284 }
1285 
1286 static int stm32_cryp_aes_ctr_decrypt(struct skcipher_request *req)
1287 {
1288 	if (req->cryptlen == 0)
1289 		return 0;
1290 
1291 	return stm32_cryp_crypt(req, FLG_AES | FLG_CTR);
1292 }
1293 
1294 static int stm32_cryp_aes_gcm_encrypt(struct aead_request *req)
1295 {
1296 	return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM | FLG_ENCRYPT);
1297 }
1298 
1299 static int stm32_cryp_aes_gcm_decrypt(struct aead_request *req)
1300 {
1301 	return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM);
1302 }
1303 
1304 static inline int crypto_ccm_check_iv(const u8 *iv)
1305 {
1306 	/* 2 <= L <= 8, so 1 <= L' <= 7. */
1307 	if (iv[0] < 1 || iv[0] > 7)
1308 		return -EINVAL;
1309 
1310 	return 0;
1311 }
1312 
1313 static int stm32_cryp_aes_ccm_encrypt(struct aead_request *req)
1314 {
1315 	int err;
1316 
1317 	err = crypto_ccm_check_iv(req->iv);
1318 	if (err)
1319 		return err;
1320 
1321 	return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM | FLG_ENCRYPT);
1322 }
1323 
1324 static int stm32_cryp_aes_ccm_decrypt(struct aead_request *req)
1325 {
1326 	int err;
1327 
1328 	err = crypto_ccm_check_iv(req->iv);
1329 	if (err)
1330 		return err;
1331 
1332 	return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM);
1333 }
1334 
1335 static int stm32_cryp_des_ecb_encrypt(struct skcipher_request *req)
1336 {
1337 	if (req->cryptlen % DES_BLOCK_SIZE)
1338 		return -EINVAL;
1339 
1340 	if (req->cryptlen == 0)
1341 		return 0;
1342 
1343 	return stm32_cryp_crypt(req, FLG_DES | FLG_ECB | FLG_ENCRYPT);
1344 }
1345 
1346 static int stm32_cryp_des_ecb_decrypt(struct skcipher_request *req)
1347 {
1348 	if (req->cryptlen % DES_BLOCK_SIZE)
1349 		return -EINVAL;
1350 
1351 	if (req->cryptlen == 0)
1352 		return 0;
1353 
1354 	return stm32_cryp_crypt(req, FLG_DES | FLG_ECB);
1355 }
1356 
1357 static int stm32_cryp_des_cbc_encrypt(struct skcipher_request *req)
1358 {
1359 	if (req->cryptlen % DES_BLOCK_SIZE)
1360 		return -EINVAL;
1361 
1362 	if (req->cryptlen == 0)
1363 		return 0;
1364 
1365 	return stm32_cryp_crypt(req, FLG_DES | FLG_CBC | FLG_ENCRYPT);
1366 }
1367 
1368 static int stm32_cryp_des_cbc_decrypt(struct skcipher_request *req)
1369 {
1370 	if (req->cryptlen % DES_BLOCK_SIZE)
1371 		return -EINVAL;
1372 
1373 	if (req->cryptlen == 0)
1374 		return 0;
1375 
1376 	return stm32_cryp_crypt(req, FLG_DES | FLG_CBC);
1377 }
1378 
1379 static int stm32_cryp_tdes_ecb_encrypt(struct skcipher_request *req)
1380 {
1381 	if (req->cryptlen % DES_BLOCK_SIZE)
1382 		return -EINVAL;
1383 
1384 	if (req->cryptlen == 0)
1385 		return 0;
1386 
1387 	return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB | FLG_ENCRYPT);
1388 }
1389 
1390 static int stm32_cryp_tdes_ecb_decrypt(struct skcipher_request *req)
1391 {
1392 	if (req->cryptlen % DES_BLOCK_SIZE)
1393 		return -EINVAL;
1394 
1395 	if (req->cryptlen == 0)
1396 		return 0;
1397 
1398 	return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB);
1399 }
1400 
1401 static int stm32_cryp_tdes_cbc_encrypt(struct skcipher_request *req)
1402 {
1403 	if (req->cryptlen % DES_BLOCK_SIZE)
1404 		return -EINVAL;
1405 
1406 	if (req->cryptlen == 0)
1407 		return 0;
1408 
1409 	return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC | FLG_ENCRYPT);
1410 }
1411 
1412 static int stm32_cryp_tdes_cbc_decrypt(struct skcipher_request *req)
1413 {
1414 	if (req->cryptlen % DES_BLOCK_SIZE)
1415 		return -EINVAL;
1416 
1417 	if (req->cryptlen == 0)
1418 		return 0;
1419 
1420 	return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC);
1421 }
1422 
1423 static enum stm32_dma_mode stm32_cryp_dma_check_sg(struct scatterlist *test_sg, size_t len,
1424 						   size_t block_size)
1425 {
1426 	struct scatterlist *sg;
1427 	int i;
1428 
1429 	if (len <= 16)
1430 		return NO_DMA; /* Faster */
1431 
1432 	for_each_sg(test_sg, sg, sg_nents(test_sg), i) {
1433 		if (!IS_ALIGNED(sg->length, block_size) && !sg_is_last(sg))
1434 			return NO_DMA;
1435 
1436 		if (sg->offset % sizeof(u32))
1437 			return NO_DMA;
1438 
1439 		if (sg_is_last(sg) && !IS_ALIGNED(sg->length, AES_BLOCK_SIZE))
1440 			return DMA_NEED_SG_TRUNC;
1441 	}
1442 
1443 	return DMA_PLAIN_SG;
1444 }
1445 
1446 static enum stm32_dma_mode stm32_cryp_dma_check(struct stm32_cryp *cryp, struct scatterlist *in_sg,
1447 						struct scatterlist *out_sg)
1448 {
1449 	enum stm32_dma_mode ret = DMA_PLAIN_SG;
1450 
1451 	if (!is_aes(cryp))
1452 		return NO_DMA;
1453 
1454 	if (!cryp->dma_lch_in || !cryp->dma_lch_out)
1455 		return NO_DMA;
1456 
1457 	ret = stm32_cryp_dma_check_sg(in_sg, cryp->payload_in, AES_BLOCK_SIZE);
1458 	if (ret == NO_DMA)
1459 		return ret;
1460 
1461 	ret = stm32_cryp_dma_check_sg(out_sg, cryp->payload_out, AES_BLOCK_SIZE);
1462 	if (ret == NO_DMA)
1463 		return ret;
1464 
1465 	/* Check CTR counter overflow */
1466 	if (is_aes(cryp) && is_ctr(cryp)) {
1467 		u32 c;
1468 		__be32 iv3;
1469 
1470 		memcpy(&iv3, &cryp->req->iv[3 * sizeof(u32)], sizeof(iv3));
1471 		c = be32_to_cpu(iv3);
1472 		if ((c + cryp->payload_in) < cryp->payload_in)
1473 			return NO_DMA;
1474 	}
1475 
1476 	/* Workaround */
1477 	if (is_aes(cryp) && is_ctr(cryp) && ret == DMA_NEED_SG_TRUNC)
1478 		return NO_DMA;
1479 
1480 	return ret;
1481 }
1482 
1483 static int stm32_cryp_truncate_sg(struct scatterlist **new_sg, size_t *new_sg_len,
1484 				  struct scatterlist *sg, off_t skip, size_t size)
1485 {
1486 	struct scatterlist *cur;
1487 	int alloc_sg_len;
1488 
1489 	*new_sg_len = 0;
1490 
1491 	if (!sg || !size) {
1492 		*new_sg = NULL;
1493 		return 0;
1494 	}
1495 
1496 	alloc_sg_len = sg_nents_for_len(sg, skip + size);
1497 	if (alloc_sg_len < 0)
1498 		return alloc_sg_len;
1499 
1500 	/* We allocate to much sg entry, but it is easier */
1501 	*new_sg = kmalloc_array((size_t)alloc_sg_len, sizeof(struct scatterlist), GFP_KERNEL);
1502 	if (!*new_sg)
1503 		return -ENOMEM;
1504 
1505 	sg_init_table(*new_sg, (unsigned int)alloc_sg_len);
1506 
1507 	cur = *new_sg;
1508 	while (sg && size) {
1509 		unsigned int len = sg->length;
1510 		unsigned int offset = sg->offset;
1511 
1512 		if (skip > len) {
1513 			skip -= len;
1514 			sg = sg_next(sg);
1515 			continue;
1516 		}
1517 
1518 		if (skip) {
1519 			len -= skip;
1520 			offset += skip;
1521 			skip = 0;
1522 		}
1523 
1524 		if (size < len)
1525 			len = size;
1526 
1527 		if (len > 0) {
1528 			(*new_sg_len)++;
1529 			size -= len;
1530 			sg_set_page(cur, sg_page(sg), len, offset);
1531 			if (size == 0)
1532 				sg_mark_end(cur);
1533 			cur = sg_next(cur);
1534 		}
1535 
1536 		sg = sg_next(sg);
1537 	}
1538 
1539 	return 0;
1540 }
1541 
1542 static int stm32_cryp_cipher_prepare(struct stm32_cryp *cryp, struct scatterlist *in_sg,
1543 				     struct scatterlist *out_sg)
1544 {
1545 	size_t align_size;
1546 	int ret;
1547 
1548 	cryp->dma_mode = stm32_cryp_dma_check(cryp, in_sg, out_sg);
1549 
1550 	scatterwalk_start(&cryp->in_walk, in_sg);
1551 	scatterwalk_start(&cryp->out_walk, out_sg);
1552 
1553 	if (cryp->dma_mode == NO_DMA) {
1554 		cryp->flags &= ~FLG_IN_OUT_DMA;
1555 
1556 		if (is_ctr(cryp))
1557 			memset(cryp->last_ctr, 0, sizeof(cryp->last_ctr));
1558 
1559 	} else if (cryp->dma_mode == DMA_NEED_SG_TRUNC) {
1560 
1561 		cryp->flags |= FLG_IN_OUT_DMA;
1562 
1563 		align_size = ALIGN_DOWN(cryp->payload_in, cryp->hw_blocksize);
1564 		ret = stm32_cryp_truncate_sg(&cryp->in_sg, &cryp->in_sg_len, in_sg, 0, align_size);
1565 		if (ret)
1566 			return ret;
1567 
1568 		ret = stm32_cryp_truncate_sg(&cryp->out_sg, &cryp->out_sg_len, out_sg, 0,
1569 					     align_size);
1570 		if (ret) {
1571 			kfree(cryp->in_sg);
1572 			return ret;
1573 		}
1574 	} else {
1575 		cryp->flags |= FLG_IN_OUT_DMA;
1576 
1577 		cryp->in_sg = in_sg;
1578 		cryp->out_sg = out_sg;
1579 
1580 		ret = sg_nents_for_len(cryp->in_sg, cryp->payload_in);
1581 		if (ret < 0)
1582 			return ret;
1583 		cryp->in_sg_len = (size_t)ret;
1584 
1585 		ret = sg_nents_for_len(out_sg, cryp->payload_out);
1586 		if (ret < 0)
1587 			return ret;
1588 		cryp->out_sg_len = (size_t)ret;
1589 	}
1590 
1591 	return 0;
1592 }
1593 
1594 static int stm32_cryp_aead_prepare(struct stm32_cryp *cryp, struct scatterlist *in_sg,
1595 				   struct scatterlist *out_sg)
1596 {
1597 	size_t align_size;
1598 	off_t skip;
1599 	int ret, ret2;
1600 
1601 	cryp->header_sg = NULL;
1602 	cryp->in_sg = NULL;
1603 	cryp->out_sg = NULL;
1604 
1605 	if (!cryp->dma_lch_in || !cryp->dma_lch_out) {
1606 		cryp->dma_mode = NO_DMA;
1607 		cryp->flags &= ~(FLG_IN_OUT_DMA | FLG_HEADER_DMA);
1608 
1609 		return 0;
1610 	}
1611 
1612 	/* CCM hw_init may have advanced in header */
1613 	skip = cryp->areq->assoclen - cryp->header_in;
1614 
1615 	align_size = ALIGN_DOWN(cryp->header_in, cryp->hw_blocksize);
1616 	ret = stm32_cryp_truncate_sg(&cryp->header_sg, &cryp->header_sg_len, in_sg, skip,
1617 				     align_size);
1618 	if (ret)
1619 		return ret;
1620 
1621 	ret = stm32_cryp_dma_check_sg(cryp->header_sg, align_size, AES_BLOCK_SIZE);
1622 	if (ret == NO_DMA) {
1623 		/* We cannot DMA the header */
1624 		kfree(cryp->header_sg);
1625 		cryp->header_sg = NULL;
1626 
1627 		cryp->flags &= ~FLG_HEADER_DMA;
1628 	} else {
1629 		cryp->flags |= FLG_HEADER_DMA;
1630 	}
1631 
1632 	/* Now skip all header to be at payload start */
1633 	skip = cryp->areq->assoclen;
1634 	align_size = ALIGN_DOWN(cryp->payload_in, cryp->hw_blocksize);
1635 	ret = stm32_cryp_truncate_sg(&cryp->in_sg, &cryp->in_sg_len, in_sg, skip, align_size);
1636 	if (ret) {
1637 		kfree(cryp->header_sg);
1638 		return ret;
1639 	}
1640 
1641 	/* For out buffer align_size is same as in buffer */
1642 	ret = stm32_cryp_truncate_sg(&cryp->out_sg, &cryp->out_sg_len, out_sg, skip, align_size);
1643 	if (ret) {
1644 		kfree(cryp->header_sg);
1645 		kfree(cryp->in_sg);
1646 		return ret;
1647 	}
1648 
1649 	ret = stm32_cryp_dma_check_sg(cryp->in_sg, align_size, AES_BLOCK_SIZE);
1650 	ret2 = stm32_cryp_dma_check_sg(cryp->out_sg, align_size, AES_BLOCK_SIZE);
1651 	if (ret == NO_DMA || ret2 == NO_DMA) {
1652 		kfree(cryp->in_sg);
1653 		cryp->in_sg = NULL;
1654 
1655 		kfree(cryp->out_sg);
1656 		cryp->out_sg = NULL;
1657 
1658 		cryp->flags &= ~FLG_IN_OUT_DMA;
1659 	} else {
1660 		cryp->flags |= FLG_IN_OUT_DMA;
1661 	}
1662 
1663 	return 0;
1664 }
1665 
1666 static int stm32_cryp_prepare_req(struct skcipher_request *req,
1667 				  struct aead_request *areq)
1668 {
1669 	struct stm32_cryp_ctx *ctx;
1670 	struct stm32_cryp *cryp;
1671 	struct stm32_cryp_reqctx *rctx;
1672 	struct scatterlist *in_sg, *out_sg;
1673 	int ret;
1674 
1675 	if (!req && !areq)
1676 		return -EINVAL;
1677 
1678 	ctx = req ? crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)) :
1679 		    crypto_aead_ctx(crypto_aead_reqtfm(areq));
1680 
1681 	cryp = ctx->cryp;
1682 
1683 	rctx = req ? skcipher_request_ctx(req) : aead_request_ctx(areq);
1684 	rctx->mode &= FLG_MODE_MASK;
1685 
1686 	cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode;
1687 	cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE;
1688 	cryp->ctx = ctx;
1689 
1690 	if (req) {
1691 		cryp->req = req;
1692 		cryp->areq = NULL;
1693 		cryp->header_in = 0;
1694 		cryp->payload_in = req->cryptlen;
1695 		cryp->payload_out = req->cryptlen;
1696 		cryp->authsize = 0;
1697 
1698 		in_sg = req->src;
1699 		out_sg = req->dst;
1700 
1701 		ret = stm32_cryp_cipher_prepare(cryp, in_sg, out_sg);
1702 		if (ret)
1703 			return ret;
1704 
1705 		ret = stm32_cryp_hw_init(cryp);
1706 	} else {
1707 		/*
1708 		 * Length of input and output data:
1709 		 * Encryption case:
1710 		 *  INPUT  = AssocData   ||     PlainText
1711 		 *          <- assoclen ->  <- cryptlen ->
1712 		 *
1713 		 *  OUTPUT = AssocData    ||   CipherText   ||      AuthTag
1714 		 *          <- assoclen ->  <-- cryptlen -->  <- authsize ->
1715 		 *
1716 		 * Decryption case:
1717 		 *  INPUT  =  AssocData     ||    CipherTex   ||       AuthTag
1718 		 *          <- assoclen --->  <---------- cryptlen ---------->
1719 		 *
1720 		 *  OUTPUT = AssocData    ||               PlainText
1721 		 *          <- assoclen ->  <- cryptlen - authsize ->
1722 		 */
1723 		cryp->areq = areq;
1724 		cryp->req = NULL;
1725 		cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq));
1726 		if (is_encrypt(cryp)) {
1727 			cryp->payload_in = areq->cryptlen;
1728 			cryp->header_in = areq->assoclen;
1729 			cryp->payload_out = areq->cryptlen;
1730 		} else {
1731 			cryp->payload_in = areq->cryptlen - cryp->authsize;
1732 			cryp->header_in = areq->assoclen;
1733 			cryp->payload_out = cryp->payload_in;
1734 		}
1735 
1736 		in_sg = areq->src;
1737 		out_sg = areq->dst;
1738 
1739 		scatterwalk_start(&cryp->in_walk, in_sg);
1740 		/* In output, jump after assoc data */
1741 		scatterwalk_start_at_pos(&cryp->out_walk, out_sg,
1742 					 areq->assoclen);
1743 
1744 		ret = stm32_cryp_hw_init(cryp);
1745 		if (ret)
1746 			return ret;
1747 
1748 		ret = stm32_cryp_aead_prepare(cryp, in_sg, out_sg);
1749 	}
1750 
1751 	return ret;
1752 }
1753 
1754 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq)
1755 {
1756 	struct skcipher_request *req = container_of(areq,
1757 						      struct skcipher_request,
1758 						      base);
1759 	struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
1760 			crypto_skcipher_reqtfm(req));
1761 	struct stm32_cryp *cryp = ctx->cryp;
1762 	int ret;
1763 
1764 	if (!cryp)
1765 		return -ENODEV;
1766 
1767 	ret = stm32_cryp_prepare_req(req, NULL);
1768 	if (ret)
1769 		return ret;
1770 
1771 	if (cryp->flags & FLG_IN_OUT_DMA)
1772 		ret = stm32_cryp_dma_start(cryp);
1773 	else
1774 		ret = stm32_cryp_it_start(cryp);
1775 
1776 	if (ret == -ETIMEDOUT)
1777 		stm32_cryp_finish_req(cryp, ret);
1778 
1779 	return ret;
1780 }
1781 
1782 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq)
1783 {
1784 	struct aead_request *req = container_of(areq, struct aead_request,
1785 						base);
1786 	struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1787 	struct stm32_cryp *cryp = ctx->cryp;
1788 	int err;
1789 
1790 	if (!cryp)
1791 		return -ENODEV;
1792 
1793 	err = stm32_cryp_prepare_req(NULL, req);
1794 	if (err)
1795 		return err;
1796 
1797 	if (!stm32_cryp_get_input_text_len(cryp) && !cryp->header_in &&
1798 	    !(cryp->flags & FLG_HEADER_DMA)) {
1799 		/* No input data to process: get tag and finish */
1800 		stm32_cryp_finish_req(cryp, 0);
1801 		return 0;
1802 	}
1803 
1804 	if (cryp->flags & FLG_HEADER_DMA)
1805 		return stm32_cryp_header_dma_start(cryp);
1806 
1807 	if (!cryp->header_in && cryp->flags & FLG_IN_OUT_DMA)
1808 		return stm32_cryp_dma_start(cryp);
1809 
1810 	return stm32_cryp_it_start(cryp);
1811 }
1812 
1813 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp)
1814 {
1815 	u32 cfg, size_bit;
1816 	unsigned int i;
1817 	int ret = 0;
1818 
1819 	/* Update Config */
1820 	cfg = stm32_cryp_read(cryp, cryp->caps->cr);
1821 
1822 	cfg &= ~CR_PH_MASK;
1823 	cfg |= CR_PH_FINAL;
1824 	cfg &= ~CR_DEC_NOT_ENC;
1825 	cfg |= CR_CRYPEN;
1826 
1827 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1828 
1829 	if (is_gcm(cryp)) {
1830 		/* GCM: write aad and payload size (in bits) */
1831 		size_bit = cryp->areq->assoclen * 8;
1832 		if (cryp->caps->swap_final)
1833 			size_bit = (__force u32)cpu_to_be32(size_bit);
1834 
1835 		stm32_cryp_write(cryp, cryp->caps->din, 0);
1836 		stm32_cryp_write(cryp, cryp->caps->din, size_bit);
1837 
1838 		size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen :
1839 				cryp->areq->cryptlen - cryp->authsize;
1840 		size_bit *= 8;
1841 		if (cryp->caps->swap_final)
1842 			size_bit = (__force u32)cpu_to_be32(size_bit);
1843 
1844 		stm32_cryp_write(cryp, cryp->caps->din, 0);
1845 		stm32_cryp_write(cryp, cryp->caps->din, size_bit);
1846 	} else {
1847 		/* CCM: write CTR0 */
1848 		u32 iv32[AES_BLOCK_32];
1849 		u8 *iv = (u8 *)iv32;
1850 		__be32 *biv = (__be32 *)iv32;
1851 
1852 		memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
1853 		memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
1854 
1855 		for (i = 0; i < AES_BLOCK_32; i++) {
1856 			u32 xiv = iv32[i];
1857 
1858 			if (!cryp->caps->padding_wa)
1859 				xiv = be32_to_cpu(biv[i]);
1860 			stm32_cryp_write(cryp, cryp->caps->din, xiv);
1861 		}
1862 	}
1863 
1864 	/* Wait for output data */
1865 	ret = stm32_cryp_wait_output(cryp);
1866 	if (ret) {
1867 		dev_err(cryp->dev, "Timeout (read tag)\n");
1868 		return ret;
1869 	}
1870 
1871 	if (is_encrypt(cryp)) {
1872 		u32 out_tag[AES_BLOCK_32];
1873 
1874 		/* Get and write tag */
1875 		readsl(cryp->regs + cryp->caps->dout, out_tag, AES_BLOCK_32);
1876 		memcpy_to_scatterwalk(&cryp->out_walk, out_tag, cryp->authsize);
1877 	} else {
1878 		/* Get and check tag */
1879 		u32 in_tag[AES_BLOCK_32], out_tag[AES_BLOCK_32];
1880 
1881 		memcpy_from_scatterwalk(in_tag, &cryp->in_walk, cryp->authsize);
1882 		readsl(cryp->regs + cryp->caps->dout, out_tag, AES_BLOCK_32);
1883 
1884 		if (crypto_memneq(in_tag, out_tag, cryp->authsize))
1885 			ret = -EBADMSG;
1886 	}
1887 
1888 	/* Disable cryp */
1889 	cfg &= ~CR_CRYPEN;
1890 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1891 
1892 	return ret;
1893 }
1894 
1895 static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp)
1896 {
1897 	u32 cr;
1898 
1899 	if (unlikely(cryp->last_ctr[3] == cpu_to_be32(0xFFFFFFFF))) {
1900 		/*
1901 		 * In this case, we need to increment manually the ctr counter,
1902 		 * as HW doesn't handle the U32 carry.
1903 		 */
1904 		crypto_inc((u8 *)cryp->last_ctr, sizeof(cryp->last_ctr));
1905 
1906 		cr = stm32_cryp_read(cryp, cryp->caps->cr);
1907 		stm32_cryp_write(cryp, cryp->caps->cr, cr & ~CR_CRYPEN);
1908 
1909 		stm32_cryp_hw_write_iv(cryp, cryp->last_ctr);
1910 
1911 		stm32_cryp_write(cryp, cryp->caps->cr, cr);
1912 	}
1913 
1914 	/* The IV registers are BE  */
1915 	cryp->last_ctr[0] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0l));
1916 	cryp->last_ctr[1] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0r));
1917 	cryp->last_ctr[2] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1l));
1918 	cryp->last_ctr[3] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1r));
1919 }
1920 
1921 static void stm32_cryp_irq_read_data(struct stm32_cryp *cryp)
1922 {
1923 	u32 block[AES_BLOCK_32];
1924 
1925 	readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32));
1926 	memcpy_to_scatterwalk(&cryp->out_walk, block, min(cryp->hw_blocksize,
1927 							  cryp->payload_out));
1928 	cryp->payload_out -= min(cryp->hw_blocksize, cryp->payload_out);
1929 }
1930 
1931 static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp)
1932 {
1933 	u32 block[AES_BLOCK_32] = {0};
1934 
1935 	memcpy_from_scatterwalk(block, &cryp->in_walk, min(cryp->hw_blocksize,
1936 							   cryp->payload_in));
1937 	writesl(cryp->regs + cryp->caps->din, block, cryp->hw_blocksize / sizeof(u32));
1938 	cryp->payload_in -= min(cryp->hw_blocksize, cryp->payload_in);
1939 }
1940 
1941 static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp)
1942 {
1943 	int err;
1944 	u32 cfg, block[AES_BLOCK_32] = {0};
1945 	unsigned int i;
1946 
1947 	/* 'Special workaround' procedure described in the datasheet */
1948 
1949 	/* a) disable ip */
1950 	stm32_cryp_write(cryp, cryp->caps->imsc, 0);
1951 	cfg = stm32_cryp_read(cryp, cryp->caps->cr);
1952 	cfg &= ~CR_CRYPEN;
1953 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1954 
1955 	/* b) Update IV1R */
1956 	stm32_cryp_write(cryp, cryp->caps->iv1r, cryp->gcm_ctr - 2);
1957 
1958 	/* c) change mode to CTR */
1959 	cfg &= ~CR_ALGO_MASK;
1960 	cfg |= CR_AES_CTR;
1961 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1962 
1963 	/* a) enable IP */
1964 	cfg |= CR_CRYPEN;
1965 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1966 
1967 	/* b) pad and write the last block */
1968 	stm32_cryp_irq_write_block(cryp);
1969 	/* wait end of process */
1970 	err = stm32_cryp_wait_output(cryp);
1971 	if (err) {
1972 		dev_err(cryp->dev, "Timeout (write gcm last data)\n");
1973 		return stm32_cryp_finish_req(cryp, err);
1974 	}
1975 
1976 	/* c) get and store encrypted data */
1977 	/*
1978 	 * Same code as stm32_cryp_irq_read_data(), but we want to store
1979 	 * block value
1980 	 */
1981 	readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32));
1982 
1983 	memcpy_to_scatterwalk(&cryp->out_walk, block, min(cryp->hw_blocksize,
1984 							  cryp->payload_out));
1985 	cryp->payload_out -= min(cryp->hw_blocksize, cryp->payload_out);
1986 
1987 	/* d) change mode back to AES GCM */
1988 	cfg &= ~CR_ALGO_MASK;
1989 	cfg |= CR_AES_GCM;
1990 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1991 
1992 	/* e) change phase to Final */
1993 	cfg &= ~CR_PH_MASK;
1994 	cfg |= CR_PH_FINAL;
1995 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1996 
1997 	/* f) write padded data */
1998 	writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32);
1999 
2000 	/* g) Empty fifo out */
2001 	err = stm32_cryp_wait_output(cryp);
2002 	if (err) {
2003 		dev_err(cryp->dev, "Timeout (write gcm padded data)\n");
2004 		return stm32_cryp_finish_req(cryp, err);
2005 	}
2006 
2007 	for (i = 0; i < AES_BLOCK_32; i++)
2008 		stm32_cryp_read(cryp, cryp->caps->dout);
2009 
2010 	/* h) run the he normal Final phase */
2011 	stm32_cryp_finish_req(cryp, 0);
2012 }
2013 
2014 static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp)
2015 {
2016 	u32 cfg;
2017 
2018 	/* disable ip, set NPBLB and reneable ip */
2019 	cfg = stm32_cryp_read(cryp, cryp->caps->cr);
2020 	cfg &= ~CR_CRYPEN;
2021 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2022 
2023 	cfg |= (cryp->hw_blocksize - cryp->payload_in) << CR_NBPBL_SHIFT;
2024 	cfg |= CR_CRYPEN;
2025 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2026 }
2027 
2028 static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp)
2029 {
2030 	int err = 0;
2031 	u32 cfg, iv1tmp;
2032 	u32 cstmp1[AES_BLOCK_32], cstmp2[AES_BLOCK_32];
2033 	u32 block[AES_BLOCK_32] = {0};
2034 	unsigned int i;
2035 
2036 	/* 'Special workaround' procedure described in the datasheet */
2037 
2038 	/* a) disable ip */
2039 	stm32_cryp_write(cryp, cryp->caps->imsc, 0);
2040 
2041 	cfg = stm32_cryp_read(cryp, cryp->caps->cr);
2042 	cfg &= ~CR_CRYPEN;
2043 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2044 
2045 	/* b) get IV1 from CRYP_CSGCMCCM7 */
2046 	iv1tmp = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + 7 * 4);
2047 
2048 	/* c) Load CRYP_CSGCMCCMxR */
2049 	for (i = 0; i < ARRAY_SIZE(cstmp1); i++)
2050 		cstmp1[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
2051 
2052 	/* d) Write IV1R */
2053 	stm32_cryp_write(cryp, cryp->caps->iv1r, iv1tmp);
2054 
2055 	/* e) change mode to CTR */
2056 	cfg &= ~CR_ALGO_MASK;
2057 	cfg |= CR_AES_CTR;
2058 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2059 
2060 	/* a) enable IP */
2061 	cfg |= CR_CRYPEN;
2062 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2063 
2064 	/* b) pad and write the last block */
2065 	stm32_cryp_irq_write_block(cryp);
2066 	/* wait end of process */
2067 	err = stm32_cryp_wait_output(cryp);
2068 	if (err) {
2069 		dev_err(cryp->dev, "Timeout (write ccm padded data)\n");
2070 		return stm32_cryp_finish_req(cryp, err);
2071 	}
2072 
2073 	/* c) get and store decrypted data */
2074 	/*
2075 	 * Same code as stm32_cryp_irq_read_data(), but we want to store
2076 	 * block value
2077 	 */
2078 	readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32));
2079 
2080 	memcpy_to_scatterwalk(&cryp->out_walk, block, min(cryp->hw_blocksize,
2081 							  cryp->payload_out));
2082 	cryp->payload_out -= min(cryp->hw_blocksize, cryp->payload_out);
2083 
2084 	/* d) Load again CRYP_CSGCMCCMxR */
2085 	for (i = 0; i < ARRAY_SIZE(cstmp2); i++)
2086 		cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
2087 
2088 	/* e) change mode back to AES CCM */
2089 	cfg &= ~CR_ALGO_MASK;
2090 	cfg |= CR_AES_CCM;
2091 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2092 
2093 	/* f) change phase to header */
2094 	cfg &= ~CR_PH_MASK;
2095 	cfg |= CR_PH_HEADER;
2096 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2097 
2098 	/* g) XOR and write padded data */
2099 	for (i = 0; i < ARRAY_SIZE(block); i++) {
2100 		block[i] ^= cstmp1[i];
2101 		block[i] ^= cstmp2[i];
2102 		stm32_cryp_write(cryp, cryp->caps->din, block[i]);
2103 	}
2104 
2105 	/* h) wait for completion */
2106 	err = stm32_cryp_wait_busy(cryp);
2107 	if (err)
2108 		dev_err(cryp->dev, "Timeout (write ccm padded data)\n");
2109 
2110 	/* i) run the he normal Final phase */
2111 	stm32_cryp_finish_req(cryp, err);
2112 }
2113 
2114 static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp)
2115 {
2116 	if (unlikely(!cryp->payload_in)) {
2117 		dev_warn(cryp->dev, "No more data to process\n");
2118 		return;
2119 	}
2120 
2121 	if (unlikely(cryp->payload_in < AES_BLOCK_SIZE &&
2122 		     (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) &&
2123 		     is_encrypt(cryp))) {
2124 		/* Padding for AES GCM encryption */
2125 		if (cryp->caps->padding_wa) {
2126 			/* Special case 1 */
2127 			stm32_cryp_irq_write_gcm_padded_data(cryp);
2128 			return;
2129 		}
2130 
2131 		/* Setting padding bytes (NBBLB) */
2132 		stm32_cryp_irq_set_npblb(cryp);
2133 	}
2134 
2135 	if (unlikely((cryp->payload_in < AES_BLOCK_SIZE) &&
2136 		     (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) &&
2137 		     is_decrypt(cryp))) {
2138 		/* Padding for AES CCM decryption */
2139 		if (cryp->caps->padding_wa) {
2140 			/* Special case 2 */
2141 			stm32_cryp_irq_write_ccm_padded_data(cryp);
2142 			return;
2143 		}
2144 
2145 		/* Setting padding bytes (NBBLB) */
2146 		stm32_cryp_irq_set_npblb(cryp);
2147 	}
2148 
2149 	if (is_aes(cryp) && is_ctr(cryp))
2150 		stm32_cryp_check_ctr_counter(cryp);
2151 
2152 	stm32_cryp_irq_write_block(cryp);
2153 }
2154 
2155 static void stm32_cryp_irq_write_gcmccm_header(struct stm32_cryp *cryp)
2156 {
2157 	u32 block[AES_BLOCK_32] = {0};
2158 	size_t written;
2159 
2160 	written = min(AES_BLOCK_SIZE, cryp->header_in);
2161 
2162 	memcpy_from_scatterwalk(block, &cryp->in_walk, written);
2163 
2164 	writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32);
2165 
2166 	cryp->header_in -= written;
2167 
2168 	stm32_crypt_gcmccm_end_header(cryp);
2169 }
2170 
2171 static irqreturn_t stm32_cryp_irq_thread(int irq, void *arg)
2172 {
2173 	struct stm32_cryp *cryp = arg;
2174 	u32 ph;
2175 	u32 it_mask = stm32_cryp_read(cryp, cryp->caps->imsc);
2176 
2177 	if (cryp->irq_status & MISR_OUT)
2178 		/* Output FIFO IRQ: read data */
2179 		stm32_cryp_irq_read_data(cryp);
2180 
2181 	if (cryp->irq_status & MISR_IN) {
2182 		if (is_gcm(cryp) || is_ccm(cryp)) {
2183 			ph = stm32_cryp_read(cryp, cryp->caps->cr) & CR_PH_MASK;
2184 			if (unlikely(ph == CR_PH_HEADER))
2185 				/* Write Header */
2186 				stm32_cryp_irq_write_gcmccm_header(cryp);
2187 			else
2188 				/* Input FIFO IRQ: write data */
2189 				stm32_cryp_irq_write_data(cryp);
2190 			if (is_gcm(cryp))
2191 				cryp->gcm_ctr++;
2192 		} else {
2193 			/* Input FIFO IRQ: write data */
2194 			stm32_cryp_irq_write_data(cryp);
2195 		}
2196 	}
2197 
2198 	/* Mask useless interrupts */
2199 	if (!cryp->payload_in && !cryp->header_in)
2200 		it_mask &= ~IMSCR_IN;
2201 	if (!cryp->payload_out)
2202 		it_mask &= ~IMSCR_OUT;
2203 	stm32_cryp_write(cryp, cryp->caps->imsc, it_mask);
2204 
2205 	if (!cryp->payload_in && !cryp->header_in && !cryp->payload_out) {
2206 		local_bh_disable();
2207 		stm32_cryp_finish_req(cryp, 0);
2208 		local_bh_enable();
2209 	}
2210 
2211 	return IRQ_HANDLED;
2212 }
2213 
2214 static irqreturn_t stm32_cryp_irq(int irq, void *arg)
2215 {
2216 	struct stm32_cryp *cryp = arg;
2217 
2218 	cryp->irq_status = stm32_cryp_read(cryp, cryp->caps->mis);
2219 
2220 	return IRQ_WAKE_THREAD;
2221 }
2222 
2223 static int stm32_cryp_dma_init(struct stm32_cryp *cryp)
2224 {
2225 	struct dma_slave_config dma_conf;
2226 	struct dma_chan *chan;
2227 	int ret;
2228 
2229 	memset(&dma_conf, 0, sizeof(dma_conf));
2230 
2231 	dma_conf.direction = DMA_MEM_TO_DEV;
2232 	dma_conf.dst_addr = cryp->phys_base + cryp->caps->din;
2233 	dma_conf.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
2234 	dma_conf.dst_maxburst = CRYP_DMA_BURST_REG;
2235 	dma_conf.device_fc = false;
2236 
2237 	chan = dma_request_chan(cryp->dev, "in");
2238 	if (IS_ERR(chan))
2239 		return PTR_ERR(chan);
2240 
2241 	cryp->dma_lch_in = chan;
2242 	ret = dmaengine_slave_config(cryp->dma_lch_in, &dma_conf);
2243 	if (ret) {
2244 		dma_release_channel(cryp->dma_lch_in);
2245 		cryp->dma_lch_in = NULL;
2246 		dev_err(cryp->dev, "Couldn't configure DMA in slave.\n");
2247 		return ret;
2248 	}
2249 
2250 	memset(&dma_conf, 0, sizeof(dma_conf));
2251 
2252 	dma_conf.direction = DMA_DEV_TO_MEM;
2253 	dma_conf.src_addr = cryp->phys_base + cryp->caps->dout;
2254 	dma_conf.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
2255 	dma_conf.src_maxburst = CRYP_DMA_BURST_REG;
2256 	dma_conf.device_fc = false;
2257 
2258 	chan = dma_request_chan(cryp->dev, "out");
2259 	if (IS_ERR(chan)) {
2260 		dma_release_channel(cryp->dma_lch_in);
2261 		cryp->dma_lch_in = NULL;
2262 		return PTR_ERR(chan);
2263 	}
2264 
2265 	cryp->dma_lch_out = chan;
2266 
2267 	ret = dmaengine_slave_config(cryp->dma_lch_out, &dma_conf);
2268 	if (ret) {
2269 		dma_release_channel(cryp->dma_lch_out);
2270 		cryp->dma_lch_out = NULL;
2271 		dev_err(cryp->dev, "Couldn't configure DMA out slave.\n");
2272 		dma_release_channel(cryp->dma_lch_in);
2273 		cryp->dma_lch_in = NULL;
2274 		return ret;
2275 	}
2276 
2277 	init_completion(&cryp->dma_completion);
2278 
2279 	return 0;
2280 }
2281 
2282 static struct skcipher_engine_alg crypto_algs[] = {
2283 {
2284 	.base = {
2285 		.base.cra_name		= "ecb(aes)",
2286 		.base.cra_driver_name	= "stm32-ecb-aes",
2287 		.base.cra_priority	= 300,
2288 		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2289 		.base.cra_blocksize	= AES_BLOCK_SIZE,
2290 		.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
2291 		.base.cra_alignmask	= 0,
2292 		.base.cra_module	= THIS_MODULE,
2293 
2294 		.init			= stm32_cryp_init_tfm,
2295 		.min_keysize		= AES_MIN_KEY_SIZE,
2296 		.max_keysize		= AES_MAX_KEY_SIZE,
2297 		.setkey			= stm32_cryp_aes_setkey,
2298 		.encrypt		= stm32_cryp_aes_ecb_encrypt,
2299 		.decrypt		= stm32_cryp_aes_ecb_decrypt,
2300 	},
2301 	.op = {
2302 		.do_one_request = stm32_cryp_cipher_one_req,
2303 	},
2304 },
2305 {
2306 	.base = {
2307 		.base.cra_name		= "cbc(aes)",
2308 		.base.cra_driver_name	= "stm32-cbc-aes",
2309 		.base.cra_priority	= 300,
2310 		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2311 		.base.cra_blocksize	= AES_BLOCK_SIZE,
2312 		.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
2313 		.base.cra_alignmask	= 0,
2314 		.base.cra_module	= THIS_MODULE,
2315 
2316 		.init			= stm32_cryp_init_tfm,
2317 		.min_keysize		= AES_MIN_KEY_SIZE,
2318 		.max_keysize		= AES_MAX_KEY_SIZE,
2319 		.ivsize			= AES_BLOCK_SIZE,
2320 		.setkey			= stm32_cryp_aes_setkey,
2321 		.encrypt		= stm32_cryp_aes_cbc_encrypt,
2322 		.decrypt		= stm32_cryp_aes_cbc_decrypt,
2323 	},
2324 	.op = {
2325 		.do_one_request = stm32_cryp_cipher_one_req,
2326 	},
2327 },
2328 {
2329 	.base = {
2330 		.base.cra_name		= "ctr(aes)",
2331 		.base.cra_driver_name	= "stm32-ctr-aes",
2332 		.base.cra_priority	= 300,
2333 		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2334 		.base.cra_blocksize	= 1,
2335 		.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
2336 		.base.cra_alignmask	= 0,
2337 		.base.cra_module	= THIS_MODULE,
2338 
2339 		.init			= stm32_cryp_init_tfm,
2340 		.min_keysize		= AES_MIN_KEY_SIZE,
2341 		.max_keysize		= AES_MAX_KEY_SIZE,
2342 		.ivsize			= AES_BLOCK_SIZE,
2343 		.setkey			= stm32_cryp_aes_setkey,
2344 		.encrypt		= stm32_cryp_aes_ctr_encrypt,
2345 		.decrypt		= stm32_cryp_aes_ctr_decrypt,
2346 	},
2347 	.op = {
2348 		.do_one_request = stm32_cryp_cipher_one_req,
2349 	},
2350 },
2351 {
2352 	.base = {
2353 		.base.cra_name		= "ecb(des)",
2354 		.base.cra_driver_name	= "stm32-ecb-des",
2355 		.base.cra_priority	= 300,
2356 		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2357 		.base.cra_blocksize	= DES_BLOCK_SIZE,
2358 		.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
2359 		.base.cra_alignmask	= 0,
2360 		.base.cra_module	= THIS_MODULE,
2361 
2362 		.init			= stm32_cryp_init_tfm,
2363 		.min_keysize		= DES_BLOCK_SIZE,
2364 		.max_keysize		= DES_BLOCK_SIZE,
2365 		.setkey			= stm32_cryp_des_setkey,
2366 		.encrypt		= stm32_cryp_des_ecb_encrypt,
2367 		.decrypt		= stm32_cryp_des_ecb_decrypt,
2368 	},
2369 	.op = {
2370 		.do_one_request = stm32_cryp_cipher_one_req,
2371 	},
2372 },
2373 {
2374 	.base = {
2375 		.base.cra_name		= "cbc(des)",
2376 		.base.cra_driver_name	= "stm32-cbc-des",
2377 		.base.cra_priority	= 300,
2378 		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2379 		.base.cra_blocksize	= DES_BLOCK_SIZE,
2380 		.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
2381 		.base.cra_alignmask	= 0,
2382 		.base.cra_module	= THIS_MODULE,
2383 
2384 		.init			= stm32_cryp_init_tfm,
2385 		.min_keysize		= DES_BLOCK_SIZE,
2386 		.max_keysize		= DES_BLOCK_SIZE,
2387 		.ivsize			= DES_BLOCK_SIZE,
2388 		.setkey			= stm32_cryp_des_setkey,
2389 		.encrypt		= stm32_cryp_des_cbc_encrypt,
2390 		.decrypt		= stm32_cryp_des_cbc_decrypt,
2391 	},
2392 	.op = {
2393 		.do_one_request = stm32_cryp_cipher_one_req,
2394 	},
2395 },
2396 {
2397 	.base = {
2398 		.base.cra_name		= "ecb(des3_ede)",
2399 		.base.cra_driver_name	= "stm32-ecb-des3",
2400 		.base.cra_priority	= 300,
2401 		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2402 		.base.cra_blocksize	= DES_BLOCK_SIZE,
2403 		.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
2404 		.base.cra_alignmask	= 0,
2405 		.base.cra_module	= THIS_MODULE,
2406 
2407 		.init			= stm32_cryp_init_tfm,
2408 		.min_keysize		= 3 * DES_BLOCK_SIZE,
2409 		.max_keysize		= 3 * DES_BLOCK_SIZE,
2410 		.setkey			= stm32_cryp_tdes_setkey,
2411 		.encrypt		= stm32_cryp_tdes_ecb_encrypt,
2412 		.decrypt		= stm32_cryp_tdes_ecb_decrypt,
2413 	},
2414 	.op = {
2415 		.do_one_request = stm32_cryp_cipher_one_req,
2416 	},
2417 },
2418 {
2419 	.base = {
2420 		.base.cra_name		= "cbc(des3_ede)",
2421 		.base.cra_driver_name	= "stm32-cbc-des3",
2422 		.base.cra_priority	= 300,
2423 		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2424 		.base.cra_blocksize	= DES_BLOCK_SIZE,
2425 		.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
2426 		.base.cra_alignmask	= 0,
2427 		.base.cra_module	= THIS_MODULE,
2428 
2429 		.init			= stm32_cryp_init_tfm,
2430 		.min_keysize		= 3 * DES_BLOCK_SIZE,
2431 		.max_keysize		= 3 * DES_BLOCK_SIZE,
2432 		.ivsize			= DES_BLOCK_SIZE,
2433 		.setkey			= stm32_cryp_tdes_setkey,
2434 		.encrypt		= stm32_cryp_tdes_cbc_encrypt,
2435 		.decrypt		= stm32_cryp_tdes_cbc_decrypt,
2436 	},
2437 	.op = {
2438 		.do_one_request = stm32_cryp_cipher_one_req,
2439 	},
2440 },
2441 };
2442 
2443 static struct aead_engine_alg aead_algs[] = {
2444 {
2445 	.base.setkey		= stm32_cryp_aes_aead_setkey,
2446 	.base.setauthsize	= stm32_cryp_aes_gcm_setauthsize,
2447 	.base.encrypt		= stm32_cryp_aes_gcm_encrypt,
2448 	.base.decrypt		= stm32_cryp_aes_gcm_decrypt,
2449 	.base.init		= stm32_cryp_aes_aead_init,
2450 	.base.ivsize		= 12,
2451 	.base.maxauthsize	= AES_BLOCK_SIZE,
2452 
2453 	.base.base = {
2454 		.cra_name		= "gcm(aes)",
2455 		.cra_driver_name	= "stm32-gcm-aes",
2456 		.cra_priority		= 300,
2457 		.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2458 		.cra_blocksize		= 1,
2459 		.cra_ctxsize		= sizeof(struct stm32_cryp_ctx),
2460 		.cra_alignmask		= 0,
2461 		.cra_module		= THIS_MODULE,
2462 	},
2463 	.op = {
2464 		.do_one_request = stm32_cryp_aead_one_req,
2465 	},
2466 },
2467 {
2468 	.base.setkey		= stm32_cryp_aes_aead_setkey,
2469 	.base.setauthsize	= stm32_cryp_aes_ccm_setauthsize,
2470 	.base.encrypt		= stm32_cryp_aes_ccm_encrypt,
2471 	.base.decrypt		= stm32_cryp_aes_ccm_decrypt,
2472 	.base.init		= stm32_cryp_aes_aead_init,
2473 	.base.ivsize		= AES_BLOCK_SIZE,
2474 	.base.maxauthsize	= AES_BLOCK_SIZE,
2475 
2476 	.base.base = {
2477 		.cra_name		= "ccm(aes)",
2478 		.cra_driver_name	= "stm32-ccm-aes",
2479 		.cra_priority		= 300,
2480 		.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2481 		.cra_blocksize		= 1,
2482 		.cra_ctxsize		= sizeof(struct stm32_cryp_ctx),
2483 		.cra_alignmask		= 0,
2484 		.cra_module		= THIS_MODULE,
2485 	},
2486 	.op = {
2487 		.do_one_request = stm32_cryp_aead_one_req,
2488 	},
2489 },
2490 };
2491 
2492 static const struct stm32_cryp_caps ux500_data = {
2493 	.aeads_support = false,
2494 	.linear_aes_key = true,
2495 	.kp_mode = false,
2496 	.iv_protection = true,
2497 	.swap_final = true,
2498 	.padding_wa = true,
2499 	.cr = UX500_CRYP_CR,
2500 	.sr = UX500_CRYP_SR,
2501 	.din = UX500_CRYP_DIN,
2502 	.dout = UX500_CRYP_DOUT,
2503 	.dmacr = UX500_CRYP_DMACR,
2504 	.imsc = UX500_CRYP_IMSC,
2505 	.mis = UX500_CRYP_MIS,
2506 	.k1l = UX500_CRYP_K1L,
2507 	.k1r = UX500_CRYP_K1R,
2508 	.k3r = UX500_CRYP_K3R,
2509 	.iv0l = UX500_CRYP_IV0L,
2510 	.iv0r = UX500_CRYP_IV0R,
2511 	.iv1l = UX500_CRYP_IV1L,
2512 	.iv1r = UX500_CRYP_IV1R,
2513 };
2514 
2515 static const struct stm32_cryp_caps f7_data = {
2516 	.aeads_support = true,
2517 	.linear_aes_key = false,
2518 	.kp_mode = true,
2519 	.iv_protection = false,
2520 	.swap_final = true,
2521 	.padding_wa = true,
2522 	.cr = CRYP_CR,
2523 	.sr = CRYP_SR,
2524 	.din = CRYP_DIN,
2525 	.dout = CRYP_DOUT,
2526 	.dmacr = CRYP_DMACR,
2527 	.imsc = CRYP_IMSCR,
2528 	.mis = CRYP_MISR,
2529 	.k1l = CRYP_K1LR,
2530 	.k1r = CRYP_K1RR,
2531 	.k3r = CRYP_K3RR,
2532 	.iv0l = CRYP_IV0LR,
2533 	.iv0r = CRYP_IV0RR,
2534 	.iv1l = CRYP_IV1LR,
2535 	.iv1r = CRYP_IV1RR,
2536 };
2537 
2538 static const struct stm32_cryp_caps mp1_data = {
2539 	.aeads_support = true,
2540 	.linear_aes_key = false,
2541 	.kp_mode = true,
2542 	.iv_protection = false,
2543 	.swap_final = false,
2544 	.padding_wa = false,
2545 	.cr = CRYP_CR,
2546 	.sr = CRYP_SR,
2547 	.din = CRYP_DIN,
2548 	.dout = CRYP_DOUT,
2549 	.dmacr = CRYP_DMACR,
2550 	.imsc = CRYP_IMSCR,
2551 	.mis = CRYP_MISR,
2552 	.k1l = CRYP_K1LR,
2553 	.k1r = CRYP_K1RR,
2554 	.k3r = CRYP_K3RR,
2555 	.iv0l = CRYP_IV0LR,
2556 	.iv0r = CRYP_IV0RR,
2557 	.iv1l = CRYP_IV1LR,
2558 	.iv1r = CRYP_IV1RR,
2559 };
2560 
2561 static const struct of_device_id stm32_dt_ids[] = {
2562 	{ .compatible = "stericsson,ux500-cryp", .data = &ux500_data},
2563 	{ .compatible = "st,stm32f756-cryp", .data = &f7_data},
2564 	{ .compatible = "st,stm32mp1-cryp", .data = &mp1_data},
2565 	{},
2566 };
2567 MODULE_DEVICE_TABLE(of, stm32_dt_ids);
2568 
2569 static int stm32_cryp_probe(struct platform_device *pdev)
2570 {
2571 	struct device *dev = &pdev->dev;
2572 	struct stm32_cryp *cryp;
2573 	struct reset_control *rst;
2574 	int irq, ret;
2575 
2576 	cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL);
2577 	if (!cryp)
2578 		return -ENOMEM;
2579 
2580 	cryp->caps = of_device_get_match_data(dev);
2581 	if (!cryp->caps)
2582 		return -ENODEV;
2583 
2584 	cryp->dev = dev;
2585 
2586 	cryp->regs = devm_platform_ioremap_resource(pdev, 0);
2587 	if (IS_ERR(cryp->regs))
2588 		return PTR_ERR(cryp->regs);
2589 
2590 	cryp->phys_base = platform_get_resource(pdev, IORESOURCE_MEM, 0)->start;
2591 
2592 	irq = platform_get_irq(pdev, 0);
2593 	if (irq < 0)
2594 		return irq;
2595 
2596 	ret = devm_request_threaded_irq(dev, irq, stm32_cryp_irq,
2597 					stm32_cryp_irq_thread, IRQF_ONESHOT,
2598 					dev_name(dev), cryp);
2599 	if (ret) {
2600 		dev_err(dev, "Cannot grab IRQ\n");
2601 		return ret;
2602 	}
2603 
2604 	cryp->clk = devm_clk_get(dev, NULL);
2605 	if (IS_ERR(cryp->clk)) {
2606 		dev_err_probe(dev, PTR_ERR(cryp->clk), "Could not get clock\n");
2607 
2608 		return PTR_ERR(cryp->clk);
2609 	}
2610 
2611 	ret = clk_prepare_enable(cryp->clk);
2612 	if (ret) {
2613 		dev_err(cryp->dev, "Failed to enable clock\n");
2614 		return ret;
2615 	}
2616 
2617 	pm_runtime_set_autosuspend_delay(dev, CRYP_AUTOSUSPEND_DELAY);
2618 	pm_runtime_use_autosuspend(dev);
2619 
2620 	pm_runtime_get_noresume(dev);
2621 	pm_runtime_set_active(dev);
2622 	pm_runtime_enable(dev);
2623 
2624 	rst = devm_reset_control_get(dev, NULL);
2625 	if (IS_ERR(rst)) {
2626 		ret = PTR_ERR(rst);
2627 		if (ret == -EPROBE_DEFER)
2628 			goto err_rst;
2629 	} else {
2630 		reset_control_assert(rst);
2631 		udelay(2);
2632 		reset_control_deassert(rst);
2633 	}
2634 
2635 	platform_set_drvdata(pdev, cryp);
2636 
2637 	ret = stm32_cryp_dma_init(cryp);
2638 	switch (ret) {
2639 	case 0:
2640 		break;
2641 	case -ENODEV:
2642 		dev_dbg(dev, "DMA mode not available\n");
2643 		break;
2644 	default:
2645 		goto err_dma;
2646 	}
2647 
2648 	spin_lock(&cryp_list.lock);
2649 	list_add(&cryp->list, &cryp_list.dev_list);
2650 	spin_unlock(&cryp_list.lock);
2651 
2652 	/* Initialize crypto engine */
2653 	cryp->engine = crypto_engine_alloc_init(dev, 1);
2654 	if (!cryp->engine) {
2655 		dev_err(dev, "Could not init crypto engine\n");
2656 		ret = -ENOMEM;
2657 		goto err_engine1;
2658 	}
2659 
2660 	ret = crypto_engine_start(cryp->engine);
2661 	if (ret) {
2662 		dev_err(dev, "Could not start crypto engine\n");
2663 		goto err_engine2;
2664 	}
2665 
2666 	ret = crypto_engine_register_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
2667 	if (ret) {
2668 		dev_err(dev, "Could not register algs\n");
2669 		goto err_algs;
2670 	}
2671 
2672 	if (cryp->caps->aeads_support) {
2673 		ret = crypto_engine_register_aeads(aead_algs, ARRAY_SIZE(aead_algs));
2674 		if (ret)
2675 			goto err_aead_algs;
2676 	}
2677 
2678 	dev_info(dev, "Initialized\n");
2679 
2680 	pm_runtime_put_sync(dev);
2681 
2682 	return 0;
2683 
2684 err_aead_algs:
2685 	crypto_engine_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
2686 err_algs:
2687 err_engine2:
2688 	crypto_engine_exit(cryp->engine);
2689 err_engine1:
2690 	spin_lock(&cryp_list.lock);
2691 	list_del(&cryp->list);
2692 	spin_unlock(&cryp_list.lock);
2693 
2694 	if (cryp->dma_lch_in)
2695 		dma_release_channel(cryp->dma_lch_in);
2696 	if (cryp->dma_lch_out)
2697 		dma_release_channel(cryp->dma_lch_out);
2698 err_dma:
2699 err_rst:
2700 	pm_runtime_disable(dev);
2701 	pm_runtime_put_noidle(dev);
2702 
2703 	clk_disable_unprepare(cryp->clk);
2704 
2705 	return ret;
2706 }
2707 
2708 static void stm32_cryp_remove(struct platform_device *pdev)
2709 {
2710 	struct stm32_cryp *cryp = platform_get_drvdata(pdev);
2711 	int ret;
2712 
2713 	ret = pm_runtime_get_sync(cryp->dev);
2714 
2715 	if (cryp->caps->aeads_support)
2716 		crypto_engine_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs));
2717 	crypto_engine_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
2718 
2719 	crypto_engine_exit(cryp->engine);
2720 
2721 	spin_lock(&cryp_list.lock);
2722 	list_del(&cryp->list);
2723 	spin_unlock(&cryp_list.lock);
2724 
2725 	if (cryp->dma_lch_in)
2726 		dma_release_channel(cryp->dma_lch_in);
2727 
2728 	if (cryp->dma_lch_out)
2729 		dma_release_channel(cryp->dma_lch_out);
2730 
2731 	pm_runtime_disable(cryp->dev);
2732 	pm_runtime_put_noidle(cryp->dev);
2733 
2734 	if (ret >= 0)
2735 		clk_disable_unprepare(cryp->clk);
2736 }
2737 
2738 #ifdef CONFIG_PM
2739 static int stm32_cryp_runtime_suspend(struct device *dev)
2740 {
2741 	struct stm32_cryp *cryp = dev_get_drvdata(dev);
2742 
2743 	clk_disable_unprepare(cryp->clk);
2744 
2745 	return 0;
2746 }
2747 
2748 static int stm32_cryp_runtime_resume(struct device *dev)
2749 {
2750 	struct stm32_cryp *cryp = dev_get_drvdata(dev);
2751 	int ret;
2752 
2753 	ret = clk_prepare_enable(cryp->clk);
2754 	if (ret) {
2755 		dev_err(cryp->dev, "Failed to prepare_enable clock\n");
2756 		return ret;
2757 	}
2758 
2759 	return 0;
2760 }
2761 #endif
2762 
2763 static const struct dev_pm_ops stm32_cryp_pm_ops = {
2764 	SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
2765 				pm_runtime_force_resume)
2766 	SET_RUNTIME_PM_OPS(stm32_cryp_runtime_suspend,
2767 			   stm32_cryp_runtime_resume, NULL)
2768 };
2769 
2770 static struct platform_driver stm32_cryp_driver = {
2771 	.probe  = stm32_cryp_probe,
2772 	.remove = stm32_cryp_remove,
2773 	.driver = {
2774 		.name           = DRIVER_NAME,
2775 		.pm		= &stm32_cryp_pm_ops,
2776 		.of_match_table = stm32_dt_ids,
2777 	},
2778 };
2779 
2780 module_platform_driver(stm32_cryp_driver);
2781 
2782 MODULE_AUTHOR("Fabien Dessenne <fabien.dessenne@st.com>");
2783 MODULE_DESCRIPTION("STMicroelectronics STM32 CRYP hardware driver");
2784 MODULE_LICENSE("GPL");
2785