xref: /linux/drivers/crypto/caam/caamalg_desc.c (revision 56fb34d86e875dbb0d3e6a81c5d3d035db373031)
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * Shared descriptors for aead, skcipher algorithms
4  *
5  * Copyright 2016-2019 NXP
6  */
7 
8 #include "compat.h"
9 #include "desc_constr.h"
10 #include "caamalg_desc.h"
11 
12 /*
13  * For aead functions, read payload and write payload,
14  * both of which are specified in req->src and req->dst
15  */
16 static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
17 {
18 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
19 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
20 			     KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
21 }
22 
23 /* Set DK bit in class 1 operation if shared */
24 static inline void append_dec_op1(u32 *desc, u32 type)
25 {
26 	u32 *jump_cmd, *uncond_jump_cmd;
27 
28 	/* DK bit is valid only for AES */
29 	if ((type & OP_ALG_ALGSEL_MASK) != OP_ALG_ALGSEL_AES) {
30 		append_operation(desc, type | OP_ALG_AS_INITFINAL |
31 				 OP_ALG_DECRYPT);
32 		return;
33 	}
34 
35 	jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD);
36 	append_operation(desc, type | OP_ALG_AS_INIT | OP_ALG_DECRYPT);
37 	uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL);
38 	set_jump_tgt_here(desc, jump_cmd);
39 	append_operation(desc, type | OP_ALG_AS_INIT | OP_ALG_DECRYPT |
40 			 OP_ALG_AAI_DK);
41 	set_jump_tgt_here(desc, uncond_jump_cmd);
42 }
43 
44 /**
45  * cnstr_shdsc_aead_null_encap - IPSec ESP encapsulation shared descriptor
46  *                               (non-protocol) with no (null) encryption.
47  * @desc: pointer to buffer used for descriptor construction
48  * @adata: pointer to authentication transform definitions.
49  *         A split key is required for SEC Era < 6; the size of the split key
50  *         is specified in this case. Valid algorithm values - one of
51  *         OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
52  *         with OP_ALG_AAI_HMAC_PRECOMP.
53  * @icvsize: integrity check value (ICV) size (truncated or full)
54  * @era: SEC Era
55  */
56 void cnstr_shdsc_aead_null_encap(u32 * const desc, struct alginfo *adata,
57 				 unsigned int icvsize, int era)
58 {
59 	u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
60 
61 	init_sh_desc(desc, HDR_SHARE_SERIAL);
62 
63 	/* Skip if already shared */
64 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
65 				   JUMP_COND_SHRD);
66 	if (era < 6) {
67 		if (adata->key_inline)
68 			append_key_as_imm(desc, adata->key_virt,
69 					  adata->keylen_pad, adata->keylen,
70 					  CLASS_2 | KEY_DEST_MDHA_SPLIT |
71 					  KEY_ENC);
72 		else
73 			append_key(desc, adata->key_dma, adata->keylen,
74 				   CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
75 	} else {
76 		append_proto_dkp(desc, adata);
77 	}
78 	set_jump_tgt_here(desc, key_jump_cmd);
79 
80 	/* assoclen + cryptlen = seqinlen */
81 	append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
82 
83 	/* Prepare to read and write cryptlen + assoclen bytes */
84 	append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
85 	append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
86 
87 	/*
88 	 * MOVE_LEN opcode is not available in all SEC HW revisions,
89 	 * thus need to do some magic, i.e. self-patch the descriptor
90 	 * buffer.
91 	 */
92 	read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
93 				    MOVE_DEST_MATH3 |
94 				    (0x6 << MOVE_LEN_SHIFT));
95 	write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
96 				     MOVE_DEST_DESCBUF |
97 				     MOVE_WAITCOMP |
98 				     (0x8 << MOVE_LEN_SHIFT));
99 
100 	/* Class 2 operation */
101 	append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
102 			 OP_ALG_ENCRYPT);
103 
104 	/* Read and write cryptlen bytes */
105 	aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
106 
107 	set_move_tgt_here(desc, read_move_cmd);
108 	set_move_tgt_here(desc, write_move_cmd);
109 	append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
110 	append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
111 		    MOVE_AUX_LS);
112 
113 	/* Write ICV */
114 	append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
115 			 LDST_SRCDST_BYTE_CONTEXT);
116 
117 	print_hex_dump_debug("aead null enc shdesc@" __stringify(__LINE__)": ",
118 			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
119 			     1);
120 }
121 EXPORT_SYMBOL(cnstr_shdsc_aead_null_encap);
122 
123 /**
124  * cnstr_shdsc_aead_null_decap - IPSec ESP decapsulation shared descriptor
125  *                               (non-protocol) with no (null) decryption.
126  * @desc: pointer to buffer used for descriptor construction
127  * @adata: pointer to authentication transform definitions.
128  *         A split key is required for SEC Era < 6; the size of the split key
129  *         is specified in this case. Valid algorithm values - one of
130  *         OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
131  *         with OP_ALG_AAI_HMAC_PRECOMP.
132  * @icvsize: integrity check value (ICV) size (truncated or full)
133  * @era: SEC Era
134  */
135 void cnstr_shdsc_aead_null_decap(u32 * const desc, struct alginfo *adata,
136 				 unsigned int icvsize, int era)
137 {
138 	u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd, *jump_cmd;
139 
140 	init_sh_desc(desc, HDR_SHARE_SERIAL);
141 
142 	/* Skip if already shared */
143 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
144 				   JUMP_COND_SHRD);
145 	if (era < 6) {
146 		if (adata->key_inline)
147 			append_key_as_imm(desc, adata->key_virt,
148 					  adata->keylen_pad, adata->keylen,
149 					  CLASS_2 | KEY_DEST_MDHA_SPLIT |
150 					  KEY_ENC);
151 		else
152 			append_key(desc, adata->key_dma, adata->keylen,
153 				   CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
154 	} else {
155 		append_proto_dkp(desc, adata);
156 	}
157 	set_jump_tgt_here(desc, key_jump_cmd);
158 
159 	/* Class 2 operation */
160 	append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
161 			 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
162 
163 	/* assoclen + cryptlen = seqoutlen */
164 	append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
165 
166 	/* Prepare to read and write cryptlen + assoclen bytes */
167 	append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
168 	append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
169 
170 	/*
171 	 * MOVE_LEN opcode is not available in all SEC HW revisions,
172 	 * thus need to do some magic, i.e. self-patch the descriptor
173 	 * buffer.
174 	 */
175 	read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
176 				    MOVE_DEST_MATH2 |
177 				    (0x6 << MOVE_LEN_SHIFT));
178 	write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
179 				     MOVE_DEST_DESCBUF |
180 				     MOVE_WAITCOMP |
181 				     (0x8 << MOVE_LEN_SHIFT));
182 
183 	/* Read and write cryptlen bytes */
184 	aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
185 
186 	/*
187 	 * Insert a NOP here, since we need at least 4 instructions between
188 	 * code patching the descriptor buffer and the location being patched.
189 	 */
190 	jump_cmd = append_jump(desc, JUMP_TEST_ALL);
191 	set_jump_tgt_here(desc, jump_cmd);
192 
193 	set_move_tgt_here(desc, read_move_cmd);
194 	set_move_tgt_here(desc, write_move_cmd);
195 	append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
196 	append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
197 		    MOVE_AUX_LS);
198 	append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
199 
200 	/* Load ICV */
201 	append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
202 			     FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
203 
204 	print_hex_dump_debug("aead null dec shdesc@" __stringify(__LINE__)": ",
205 			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
206 			     1);
207 }
208 EXPORT_SYMBOL(cnstr_shdsc_aead_null_decap);
209 
210 static void init_sh_desc_key_aead(u32 * const desc,
211 				  struct alginfo * const cdata,
212 				  struct alginfo * const adata,
213 				  const bool is_rfc3686, u32 *nonce, int era)
214 {
215 	u32 *key_jump_cmd;
216 	unsigned int enckeylen = cdata->keylen;
217 
218 	/* Note: Context registers are saved. */
219 	init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
220 
221 	/* Skip if already shared */
222 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
223 				   JUMP_COND_SHRD);
224 
225 	/*
226 	 * RFC3686 specific:
227 	 *	| key = {AUTH_KEY, ENC_KEY, NONCE}
228 	 *	| enckeylen = encryption key size + nonce size
229 	 */
230 	if (is_rfc3686)
231 		enckeylen -= CTR_RFC3686_NONCE_SIZE;
232 
233 	if (era < 6) {
234 		if (adata->key_inline)
235 			append_key_as_imm(desc, adata->key_virt,
236 					  adata->keylen_pad, adata->keylen,
237 					  CLASS_2 | KEY_DEST_MDHA_SPLIT |
238 					  KEY_ENC);
239 		else
240 			append_key(desc, adata->key_dma, adata->keylen,
241 				   CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
242 	} else {
243 		append_proto_dkp(desc, adata);
244 	}
245 
246 	if (cdata->key_inline)
247 		append_key_as_imm(desc, cdata->key_virt, enckeylen,
248 				  enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
249 	else
250 		append_key(desc, cdata->key_dma, enckeylen, CLASS_1 |
251 			   KEY_DEST_CLASS_REG);
252 
253 	/* Load Counter into CONTEXT1 reg */
254 	if (is_rfc3686) {
255 		append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
256 				   LDST_CLASS_IND_CCB |
257 				   LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
258 		append_move(desc,
259 			    MOVE_SRC_OUTFIFO |
260 			    MOVE_DEST_CLASS1CTX |
261 			    (16 << MOVE_OFFSET_SHIFT) |
262 			    (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
263 	}
264 
265 	set_jump_tgt_here(desc, key_jump_cmd);
266 }
267 
268 /**
269  * cnstr_shdsc_aead_encap - IPSec ESP encapsulation shared descriptor
270  *                          (non-protocol).
271  * @desc: pointer to buffer used for descriptor construction
272  * @cdata: pointer to block cipher transform definitions
273  *         Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
274  *         with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
275  * @adata: pointer to authentication transform definitions.
276  *         A split key is required for SEC Era < 6; the size of the split key
277  *         is specified in this case. Valid algorithm values - one of
278  *         OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
279  *         with OP_ALG_AAI_HMAC_PRECOMP.
280  * @ivsize: initialization vector size
281  * @icvsize: integrity check value (ICV) size (truncated or full)
282  * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
283  * @nonce: pointer to rfc3686 nonce
284  * @ctx1_iv_off: IV offset in CONTEXT1 register
285  * @is_qi: true when called from caam/qi
286  * @era: SEC Era
287  */
288 void cnstr_shdsc_aead_encap(u32 * const desc, struct alginfo *cdata,
289 			    struct alginfo *adata, unsigned int ivsize,
290 			    unsigned int icvsize, const bool is_rfc3686,
291 			    u32 *nonce, const u32 ctx1_iv_off, const bool is_qi,
292 			    int era)
293 {
294 	/* Note: Context registers are saved. */
295 	init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
296 
297 	/* Class 2 operation */
298 	append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
299 			 OP_ALG_ENCRYPT);
300 
301 	if (is_qi) {
302 		u32 *wait_load_cmd;
303 
304 		/* REG3 = assoclen */
305 		append_seq_load(desc, 4, LDST_CLASS_DECO |
306 				LDST_SRCDST_WORD_DECO_MATH3 |
307 				(4 << LDST_OFFSET_SHIFT));
308 
309 		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
310 					    JUMP_COND_CALM | JUMP_COND_NCP |
311 					    JUMP_COND_NOP | JUMP_COND_NIP |
312 					    JUMP_COND_NIFP);
313 		set_jump_tgt_here(desc, wait_load_cmd);
314 
315 		append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
316 				LDST_SRCDST_BYTE_CONTEXT |
317 				(ctx1_iv_off << LDST_OFFSET_SHIFT));
318 	}
319 
320 	/* Read and write assoclen bytes */
321 	if (is_qi || era < 3) {
322 		append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
323 		append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
324 	} else {
325 		append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
326 		append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
327 	}
328 
329 	/* Skip assoc data */
330 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
331 
332 	/* read assoc before reading payload */
333 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
334 				      FIFOLDST_VLF);
335 
336 	/* Load Counter into CONTEXT1 reg */
337 	if (is_rfc3686)
338 		append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
339 				     LDST_SRCDST_BYTE_CONTEXT |
340 				     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
341 				      LDST_OFFSET_SHIFT));
342 
343 	/* Class 1 operation */
344 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
345 			 OP_ALG_ENCRYPT);
346 
347 	/* Read and write cryptlen bytes */
348 	append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
349 	append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
350 	aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
351 
352 	/* Write ICV */
353 	append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
354 			 LDST_SRCDST_BYTE_CONTEXT);
355 
356 	print_hex_dump_debug("aead enc shdesc@" __stringify(__LINE__)": ",
357 			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
358 			     1);
359 }
360 EXPORT_SYMBOL(cnstr_shdsc_aead_encap);
361 
362 /**
363  * cnstr_shdsc_aead_decap - IPSec ESP decapsulation shared descriptor
364  *                          (non-protocol).
365  * @desc: pointer to buffer used for descriptor construction
366  * @cdata: pointer to block cipher transform definitions
367  *         Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
368  *         with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
369  * @adata: pointer to authentication transform definitions.
370  *         A split key is required for SEC Era < 6; the size of the split key
371  *         is specified in this case. Valid algorithm values - one of
372  *         OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
373  *         with OP_ALG_AAI_HMAC_PRECOMP.
374  * @ivsize: initialization vector size
375  * @icvsize: integrity check value (ICV) size (truncated or full)
376  * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
377  * @nonce: pointer to rfc3686 nonce
378  * @ctx1_iv_off: IV offset in CONTEXT1 register
379  * @is_qi: true when called from caam/qi
380  * @era: SEC Era
381  */
382 void cnstr_shdsc_aead_decap(u32 * const desc, struct alginfo *cdata,
383 			    struct alginfo *adata, unsigned int ivsize,
384 			    unsigned int icvsize, const bool geniv,
385 			    const bool is_rfc3686, u32 *nonce,
386 			    const u32 ctx1_iv_off, const bool is_qi, int era)
387 {
388 	/* Note: Context registers are saved. */
389 	init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
390 
391 	/* Class 2 operation */
392 	append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
393 			 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
394 
395 	if (is_qi) {
396 		u32 *wait_load_cmd;
397 
398 		/* REG3 = assoclen */
399 		append_seq_load(desc, 4, LDST_CLASS_DECO |
400 				LDST_SRCDST_WORD_DECO_MATH3 |
401 				(4 << LDST_OFFSET_SHIFT));
402 
403 		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
404 					    JUMP_COND_CALM | JUMP_COND_NCP |
405 					    JUMP_COND_NOP | JUMP_COND_NIP |
406 					    JUMP_COND_NIFP);
407 		set_jump_tgt_here(desc, wait_load_cmd);
408 
409 		if (!geniv)
410 			append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
411 					LDST_SRCDST_BYTE_CONTEXT |
412 					(ctx1_iv_off << LDST_OFFSET_SHIFT));
413 	}
414 
415 	/* Read and write assoclen bytes */
416 	if (is_qi || era < 3) {
417 		append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
418 		if (geniv)
419 			append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM,
420 						ivsize);
421 		else
422 			append_math_add(desc, VARSEQOUTLEN, ZERO, REG3,
423 					CAAM_CMD_SZ);
424 	} else {
425 		append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
426 		if (geniv)
427 			append_math_add_imm_u32(desc, VARSEQOUTLEN, DPOVRD, IMM,
428 						ivsize);
429 		else
430 			append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD,
431 					CAAM_CMD_SZ);
432 	}
433 
434 	/* Skip assoc data */
435 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
436 
437 	/* read assoc before reading payload */
438 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
439 			     KEY_VLF);
440 
441 	if (geniv) {
442 		append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
443 				LDST_SRCDST_BYTE_CONTEXT |
444 				(ctx1_iv_off << LDST_OFFSET_SHIFT));
445 		append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO |
446 			    (ctx1_iv_off << MOVE_OFFSET_SHIFT) | ivsize);
447 	}
448 
449 	/* Load Counter into CONTEXT1 reg */
450 	if (is_rfc3686)
451 		append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
452 				     LDST_SRCDST_BYTE_CONTEXT |
453 				     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
454 				      LDST_OFFSET_SHIFT));
455 
456 	/* Choose operation */
457 	if (ctx1_iv_off)
458 		append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
459 				 OP_ALG_DECRYPT);
460 	else
461 		append_dec_op1(desc, cdata->algtype);
462 
463 	/* Read and write cryptlen bytes */
464 	append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
465 	append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
466 	aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
467 
468 	/* Load ICV */
469 	append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
470 			     FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
471 
472 	print_hex_dump_debug("aead dec shdesc@" __stringify(__LINE__)": ",
473 			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
474 			     1);
475 }
476 EXPORT_SYMBOL(cnstr_shdsc_aead_decap);
477 
478 /**
479  * cnstr_shdsc_aead_givencap - IPSec ESP encapsulation shared descriptor
480  *                             (non-protocol) with HW-generated initialization
481  *                             vector.
482  * @desc: pointer to buffer used for descriptor construction
483  * @cdata: pointer to block cipher transform definitions
484  *         Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
485  *         with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
486  * @adata: pointer to authentication transform definitions.
487  *         A split key is required for SEC Era < 6; the size of the split key
488  *         is specified in this case. Valid algorithm values - one of
489  *         OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
490  *         with OP_ALG_AAI_HMAC_PRECOMP.
491  * @ivsize: initialization vector size
492  * @icvsize: integrity check value (ICV) size (truncated or full)
493  * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
494  * @nonce: pointer to rfc3686 nonce
495  * @ctx1_iv_off: IV offset in CONTEXT1 register
496  * @is_qi: true when called from caam/qi
497  * @era: SEC Era
498  */
499 void cnstr_shdsc_aead_givencap(u32 * const desc, struct alginfo *cdata,
500 			       struct alginfo *adata, unsigned int ivsize,
501 			       unsigned int icvsize, const bool is_rfc3686,
502 			       u32 *nonce, const u32 ctx1_iv_off,
503 			       const bool is_qi, int era)
504 {
505 	u32 geniv, moveiv;
506 	u32 *wait_cmd;
507 
508 	/* Note: Context registers are saved. */
509 	init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
510 
511 	if (is_qi) {
512 		u32 *wait_load_cmd;
513 
514 		/* REG3 = assoclen */
515 		append_seq_load(desc, 4, LDST_CLASS_DECO |
516 				LDST_SRCDST_WORD_DECO_MATH3 |
517 				(4 << LDST_OFFSET_SHIFT));
518 
519 		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
520 					    JUMP_COND_CALM | JUMP_COND_NCP |
521 					    JUMP_COND_NOP | JUMP_COND_NIP |
522 					    JUMP_COND_NIFP);
523 		set_jump_tgt_here(desc, wait_load_cmd);
524 	}
525 
526 	if (is_rfc3686) {
527 		if (is_qi)
528 			append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
529 					LDST_SRCDST_BYTE_CONTEXT |
530 					(ctx1_iv_off << LDST_OFFSET_SHIFT));
531 
532 		goto copy_iv;
533 	}
534 
535 	/* Generate IV */
536 	geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
537 		NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
538 		NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT);
539 	append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
540 			    LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
541 	append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
542 	append_move(desc, MOVE_WAITCOMP |
543 		    MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX |
544 		    (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
545 		    (ivsize << MOVE_LEN_SHIFT));
546 	append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
547 
548 copy_iv:
549 	/* Copy IV to class 1 context */
550 	append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO |
551 		    (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
552 		    (ivsize << MOVE_LEN_SHIFT));
553 
554 	/* Return to encryption */
555 	append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
556 			 OP_ALG_ENCRYPT);
557 
558 	/* Read and write assoclen bytes */
559 	if (is_qi || era < 3) {
560 		append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
561 		append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
562 	} else {
563 		append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
564 		append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
565 	}
566 
567 	/* Skip assoc data */
568 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
569 
570 	/* read assoc before reading payload */
571 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
572 			     KEY_VLF);
573 
574 	/* Copy iv from outfifo to class 2 fifo */
575 	moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 |
576 		 NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT);
577 	append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB |
578 			    LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
579 	append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB |
580 			    LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
581 
582 	/* Load Counter into CONTEXT1 reg */
583 	if (is_rfc3686)
584 		append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
585 				     LDST_SRCDST_BYTE_CONTEXT |
586 				     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
587 				      LDST_OFFSET_SHIFT));
588 
589 	/* Class 1 operation */
590 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
591 			 OP_ALG_ENCRYPT);
592 
593 	/* Will write ivsize + cryptlen */
594 	append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
595 
596 	/* Not need to reload iv */
597 	append_seq_fifo_load(desc, ivsize,
598 			     FIFOLD_CLASS_SKIP);
599 
600 	/* Will read cryptlen */
601 	append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
602 
603 	/*
604 	 * Wait for IV transfer (ofifo -> class2) to finish before starting
605 	 * ciphertext transfer (ofifo -> external memory).
606 	 */
607 	wait_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NIFP);
608 	set_jump_tgt_here(desc, wait_cmd);
609 
610 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | KEY_VLF |
611 			     FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LASTBOTH);
612 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
613 
614 	/* Write ICV */
615 	append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
616 			 LDST_SRCDST_BYTE_CONTEXT);
617 
618 	print_hex_dump_debug("aead givenc shdesc@" __stringify(__LINE__)": ",
619 			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
620 			     1);
621 }
622 EXPORT_SYMBOL(cnstr_shdsc_aead_givencap);
623 
624 /**
625  * cnstr_shdsc_gcm_encap - gcm encapsulation shared descriptor
626  * @desc: pointer to buffer used for descriptor construction
627  * @cdata: pointer to block cipher transform definitions
628  *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
629  * @ivsize: initialization vector size
630  * @icvsize: integrity check value (ICV) size (truncated or full)
631  * @is_qi: true when called from caam/qi
632  */
633 void cnstr_shdsc_gcm_encap(u32 * const desc, struct alginfo *cdata,
634 			   unsigned int ivsize, unsigned int icvsize,
635 			   const bool is_qi)
636 {
637 	u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1,
638 	    *zero_assoc_jump_cmd2;
639 
640 	init_sh_desc(desc, HDR_SHARE_SERIAL);
641 
642 	/* skip key loading if they are loaded due to sharing */
643 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
644 				   JUMP_COND_SHRD);
645 	if (cdata->key_inline)
646 		append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
647 				  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
648 	else
649 		append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
650 			   KEY_DEST_CLASS_REG);
651 	set_jump_tgt_here(desc, key_jump_cmd);
652 
653 	/* class 1 operation */
654 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
655 			 OP_ALG_ENCRYPT);
656 
657 	if (is_qi) {
658 		u32 *wait_load_cmd;
659 
660 		/* REG3 = assoclen */
661 		append_seq_load(desc, 4, LDST_CLASS_DECO |
662 				LDST_SRCDST_WORD_DECO_MATH3 |
663 				(4 << LDST_OFFSET_SHIFT));
664 
665 		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
666 					    JUMP_COND_CALM | JUMP_COND_NCP |
667 					    JUMP_COND_NOP | JUMP_COND_NIP |
668 					    JUMP_COND_NIFP);
669 		set_jump_tgt_here(desc, wait_load_cmd);
670 
671 		append_math_sub_imm_u32(desc, VARSEQOUTLEN, SEQINLEN, IMM,
672 					ivsize);
673 	} else {
674 		append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0,
675 				CAAM_CMD_SZ);
676 	}
677 
678 	/* if assoclen + cryptlen is ZERO, skip to ICV write */
679 	zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
680 						 JUMP_COND_MATH_Z);
681 
682 	if (is_qi)
683 		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
684 				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
685 
686 	/* if assoclen is ZERO, skip reading the assoc data */
687 	append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
688 	zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
689 					   JUMP_COND_MATH_Z);
690 
691 	append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
692 
693 	/* skip assoc data */
694 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
695 
696 	/* cryptlen = seqinlen - assoclen */
697 	append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
698 
699 	/* if cryptlen is ZERO jump to zero-payload commands */
700 	zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
701 					    JUMP_COND_MATH_Z);
702 
703 	/* read assoc data */
704 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
705 			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
706 	set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
707 
708 	append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
709 
710 	/* write encrypted data */
711 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
712 
713 	/* read payload data */
714 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
715 			     FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
716 
717 	/* jump to ICV writing */
718 	if (is_qi)
719 		append_jump(desc, JUMP_TEST_ALL | 4);
720 	else
721 		append_jump(desc, JUMP_TEST_ALL | 2);
722 
723 	/* zero-payload commands */
724 	set_jump_tgt_here(desc, zero_payload_jump_cmd);
725 
726 	/* read assoc data */
727 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
728 			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
729 	if (is_qi)
730 		/* jump to ICV writing */
731 		append_jump(desc, JUMP_TEST_ALL | 2);
732 
733 	/* There is no input data */
734 	set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
735 
736 	if (is_qi)
737 		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
738 				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 |
739 				     FIFOLD_TYPE_LAST1);
740 
741 	/* write ICV */
742 	append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
743 			 LDST_SRCDST_BYTE_CONTEXT);
744 
745 	print_hex_dump_debug("gcm enc shdesc@" __stringify(__LINE__)": ",
746 			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
747 			     1);
748 }
749 EXPORT_SYMBOL(cnstr_shdsc_gcm_encap);
750 
751 /**
752  * cnstr_shdsc_gcm_decap - gcm decapsulation shared descriptor
753  * @desc: pointer to buffer used for descriptor construction
754  * @cdata: pointer to block cipher transform definitions
755  *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
756  * @ivsize: initialization vector size
757  * @icvsize: integrity check value (ICV) size (truncated or full)
758  * @is_qi: true when called from caam/qi
759  */
760 void cnstr_shdsc_gcm_decap(u32 * const desc, struct alginfo *cdata,
761 			   unsigned int ivsize, unsigned int icvsize,
762 			   const bool is_qi)
763 {
764 	u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1;
765 
766 	init_sh_desc(desc, HDR_SHARE_SERIAL);
767 
768 	/* skip key loading if they are loaded due to sharing */
769 	key_jump_cmd = append_jump(desc, JUMP_JSL |
770 				   JUMP_TEST_ALL | JUMP_COND_SHRD);
771 	if (cdata->key_inline)
772 		append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
773 				  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
774 	else
775 		append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
776 			   KEY_DEST_CLASS_REG);
777 	set_jump_tgt_here(desc, key_jump_cmd);
778 
779 	/* class 1 operation */
780 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
781 			 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
782 
783 	if (is_qi) {
784 		u32 *wait_load_cmd;
785 
786 		/* REG3 = assoclen */
787 		append_seq_load(desc, 4, LDST_CLASS_DECO |
788 				LDST_SRCDST_WORD_DECO_MATH3 |
789 				(4 << LDST_OFFSET_SHIFT));
790 
791 		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
792 					    JUMP_COND_CALM | JUMP_COND_NCP |
793 					    JUMP_COND_NOP | JUMP_COND_NIP |
794 					    JUMP_COND_NIFP);
795 		set_jump_tgt_here(desc, wait_load_cmd);
796 
797 		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
798 				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
799 	}
800 
801 	/* if assoclen is ZERO, skip reading the assoc data */
802 	append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
803 	zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
804 						 JUMP_COND_MATH_Z);
805 
806 	append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
807 
808 	/* skip assoc data */
809 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
810 
811 	/* read assoc data */
812 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
813 			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
814 
815 	set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
816 
817 	/* cryptlen = seqoutlen - assoclen */
818 	append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
819 
820 	/* jump to zero-payload command if cryptlen is zero */
821 	zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
822 					    JUMP_COND_MATH_Z);
823 
824 	append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
825 
826 	/* store encrypted data */
827 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
828 
829 	/* read payload data */
830 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
831 			     FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
832 
833 	/* zero-payload command */
834 	set_jump_tgt_here(desc, zero_payload_jump_cmd);
835 
836 	/* read ICV */
837 	append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
838 			     FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
839 
840 	print_hex_dump_debug("gcm dec shdesc@" __stringify(__LINE__)": ",
841 			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
842 			     1);
843 }
844 EXPORT_SYMBOL(cnstr_shdsc_gcm_decap);
845 
846 /**
847  * cnstr_shdsc_rfc4106_encap - IPSec ESP gcm encapsulation shared descriptor
848  *                             (non-protocol).
849  * @desc: pointer to buffer used for descriptor construction
850  * @cdata: pointer to block cipher transform definitions
851  *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
852  * @ivsize: initialization vector size
853  * @icvsize: integrity check value (ICV) size (truncated or full)
854  * @is_qi: true when called from caam/qi
855  *
856  * Input sequence: AAD | PTXT
857  * Output sequence: AAD | CTXT | ICV
858  * AAD length (assoclen), which includes the IV length, is available in Math3.
859  */
860 void cnstr_shdsc_rfc4106_encap(u32 * const desc, struct alginfo *cdata,
861 			       unsigned int ivsize, unsigned int icvsize,
862 			       const bool is_qi)
863 {
864 	u32 *key_jump_cmd, *zero_cryptlen_jump_cmd, *skip_instructions;
865 	init_sh_desc(desc, HDR_SHARE_SERIAL);
866 
867 	/* Skip key loading if it is loaded due to sharing */
868 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
869 				   JUMP_COND_SHRD);
870 	if (cdata->key_inline)
871 		append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
872 				  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
873 	else
874 		append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
875 			   KEY_DEST_CLASS_REG);
876 	set_jump_tgt_here(desc, key_jump_cmd);
877 
878 	/* Class 1 operation */
879 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
880 			 OP_ALG_ENCRYPT);
881 
882 	if (is_qi) {
883 		u32 *wait_load_cmd;
884 
885 		/* REG3 = assoclen */
886 		append_seq_load(desc, 4, LDST_CLASS_DECO |
887 				LDST_SRCDST_WORD_DECO_MATH3 |
888 				(4 << LDST_OFFSET_SHIFT));
889 
890 		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
891 					    JUMP_COND_CALM | JUMP_COND_NCP |
892 					    JUMP_COND_NOP | JUMP_COND_NIP |
893 					    JUMP_COND_NIFP);
894 		set_jump_tgt_here(desc, wait_load_cmd);
895 
896 		/* Read salt and IV */
897 		append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
898 					cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
899 					FIFOLD_TYPE_IV);
900 		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
901 				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
902 	}
903 
904 	append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
905 	append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
906 
907 	/* Skip AAD */
908 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
909 
910 	/* Read cryptlen and set this value into VARSEQOUTLEN */
911 	append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
912 
913 	/* If cryptlen is ZERO jump to AAD command */
914 	zero_cryptlen_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
915 					    JUMP_COND_MATH_Z);
916 
917 	/* Read AAD data */
918 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
919 			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
920 
921 	/* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
922 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA);
923 
924 	/* Skip IV */
925 	append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
926 	append_math_add(desc, VARSEQINLEN, VARSEQOUTLEN, REG0, CAAM_CMD_SZ);
927 
928 	/* Write encrypted data */
929 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
930 
931 	/* Read payload data */
932 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
933 			     FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
934 
935 	/* Jump instructions to avoid double reading of AAD */
936 	skip_instructions = append_jump(desc, JUMP_TEST_ALL);
937 
938 	/* There is no input data, cryptlen = 0 */
939 	set_jump_tgt_here(desc, zero_cryptlen_jump_cmd);
940 
941 	/* Read AAD */
942 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
943 			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
944 
945 	set_jump_tgt_here(desc, skip_instructions);
946 
947 	/* Write ICV */
948 	append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
949 			 LDST_SRCDST_BYTE_CONTEXT);
950 
951 	print_hex_dump_debug("rfc4106 enc shdesc@" __stringify(__LINE__)": ",
952 			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
953 			     1);
954 }
955 EXPORT_SYMBOL(cnstr_shdsc_rfc4106_encap);
956 
957 /**
958  * cnstr_shdsc_rfc4106_decap - IPSec ESP gcm decapsulation shared descriptor
959  *                             (non-protocol).
960  * @desc: pointer to buffer used for descriptor construction
961  * @cdata: pointer to block cipher transform definitions
962  *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
963  * @ivsize: initialization vector size
964  * @icvsize: integrity check value (ICV) size (truncated or full)
965  * @is_qi: true when called from caam/qi
966  */
967 void cnstr_shdsc_rfc4106_decap(u32 * const desc, struct alginfo *cdata,
968 			       unsigned int ivsize, unsigned int icvsize,
969 			       const bool is_qi)
970 {
971 	u32 *key_jump_cmd;
972 
973 	init_sh_desc(desc, HDR_SHARE_SERIAL);
974 
975 	/* Skip key loading if it is loaded due to sharing */
976 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
977 				   JUMP_COND_SHRD);
978 	if (cdata->key_inline)
979 		append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
980 				  cdata->keylen, CLASS_1 |
981 				  KEY_DEST_CLASS_REG);
982 	else
983 		append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
984 			   KEY_DEST_CLASS_REG);
985 	set_jump_tgt_here(desc, key_jump_cmd);
986 
987 	/* Class 1 operation */
988 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
989 			 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
990 
991 	if (is_qi) {
992 		u32 *wait_load_cmd;
993 
994 		/* REG3 = assoclen */
995 		append_seq_load(desc, 4, LDST_CLASS_DECO |
996 				LDST_SRCDST_WORD_DECO_MATH3 |
997 				(4 << LDST_OFFSET_SHIFT));
998 
999 		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1000 					    JUMP_COND_CALM | JUMP_COND_NCP |
1001 					    JUMP_COND_NOP | JUMP_COND_NIP |
1002 					    JUMP_COND_NIFP);
1003 		set_jump_tgt_here(desc, wait_load_cmd);
1004 
1005 		/* Read salt and IV */
1006 		append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
1007 					cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
1008 					FIFOLD_TYPE_IV);
1009 		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
1010 				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
1011 	}
1012 
1013 	append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
1014 	append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
1015 
1016 	/* Read assoc data */
1017 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1018 			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
1019 
1020 	/* Skip IV */
1021 	append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
1022 
1023 	/* Will read cryptlen bytes */
1024 	append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ);
1025 
1026 	/* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
1027 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
1028 
1029 	/* Skip assoc data */
1030 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
1031 
1032 	/* Will write cryptlen bytes */
1033 	append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1034 
1035 	/* Store payload data */
1036 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1037 
1038 	/* Read encrypted data */
1039 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1040 			     FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
1041 
1042 	/* Read ICV */
1043 	append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
1044 			     FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1045 
1046 	print_hex_dump_debug("rfc4106 dec shdesc@" __stringify(__LINE__)": ",
1047 			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1048 			     1);
1049 }
1050 EXPORT_SYMBOL(cnstr_shdsc_rfc4106_decap);
1051 
1052 /**
1053  * cnstr_shdsc_rfc4543_encap - IPSec ESP gmac encapsulation shared descriptor
1054  *                             (non-protocol).
1055  * @desc: pointer to buffer used for descriptor construction
1056  * @cdata: pointer to block cipher transform definitions
1057  *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
1058  * @ivsize: initialization vector size
1059  * @icvsize: integrity check value (ICV) size (truncated or full)
1060  * @is_qi: true when called from caam/qi
1061  */
1062 void cnstr_shdsc_rfc4543_encap(u32 * const desc, struct alginfo *cdata,
1063 			       unsigned int ivsize, unsigned int icvsize,
1064 			       const bool is_qi)
1065 {
1066 	u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
1067 
1068 	init_sh_desc(desc, HDR_SHARE_SERIAL);
1069 
1070 	/* Skip key loading if it is loaded due to sharing */
1071 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1072 				   JUMP_COND_SHRD);
1073 	if (cdata->key_inline)
1074 		append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1075 				  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1076 	else
1077 		append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
1078 			   KEY_DEST_CLASS_REG);
1079 	set_jump_tgt_here(desc, key_jump_cmd);
1080 
1081 	/* Class 1 operation */
1082 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1083 			 OP_ALG_ENCRYPT);
1084 
1085 	if (is_qi) {
1086 		/* assoclen is not needed, skip it */
1087 		append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
1088 
1089 		/* Read salt and IV */
1090 		append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
1091 					cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
1092 					FIFOLD_TYPE_IV);
1093 		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
1094 				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
1095 	}
1096 
1097 	/* assoclen + cryptlen = seqinlen */
1098 	append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
1099 
1100 	/*
1101 	 * MOVE_LEN opcode is not available in all SEC HW revisions,
1102 	 * thus need to do some magic, i.e. self-patch the descriptor
1103 	 * buffer.
1104 	 */
1105 	read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1106 				    (0x6 << MOVE_LEN_SHIFT));
1107 	write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1108 				     (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
1109 
1110 	/* Will read assoclen + cryptlen bytes */
1111 	append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1112 
1113 	/* Will write assoclen + cryptlen bytes */
1114 	append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1115 
1116 	/* Read and write assoclen + cryptlen bytes */
1117 	aead_append_src_dst(desc, FIFOLD_TYPE_AAD);
1118 
1119 	set_move_tgt_here(desc, read_move_cmd);
1120 	set_move_tgt_here(desc, write_move_cmd);
1121 	append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1122 	/* Move payload data to OFIFO */
1123 	append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1124 
1125 	/* Write ICV */
1126 	append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
1127 			 LDST_SRCDST_BYTE_CONTEXT);
1128 
1129 	print_hex_dump_debug("rfc4543 enc shdesc@" __stringify(__LINE__)": ",
1130 			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1131 			     1);
1132 }
1133 EXPORT_SYMBOL(cnstr_shdsc_rfc4543_encap);
1134 
1135 /**
1136  * cnstr_shdsc_rfc4543_decap - IPSec ESP gmac decapsulation shared descriptor
1137  *                             (non-protocol).
1138  * @desc: pointer to buffer used for descriptor construction
1139  * @cdata: pointer to block cipher transform definitions
1140  *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
1141  * @ivsize: initialization vector size
1142  * @icvsize: integrity check value (ICV) size (truncated or full)
1143  * @is_qi: true when called from caam/qi
1144  */
1145 void cnstr_shdsc_rfc4543_decap(u32 * const desc, struct alginfo *cdata,
1146 			       unsigned int ivsize, unsigned int icvsize,
1147 			       const bool is_qi)
1148 {
1149 	u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
1150 
1151 	init_sh_desc(desc, HDR_SHARE_SERIAL);
1152 
1153 	/* Skip key loading if it is loaded due to sharing */
1154 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1155 				   JUMP_COND_SHRD);
1156 	if (cdata->key_inline)
1157 		append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1158 				  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1159 	else
1160 		append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
1161 			   KEY_DEST_CLASS_REG);
1162 	set_jump_tgt_here(desc, key_jump_cmd);
1163 
1164 	/* Class 1 operation */
1165 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1166 			 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1167 
1168 	if (is_qi) {
1169 		/* assoclen is not needed, skip it */
1170 		append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
1171 
1172 		/* Read salt and IV */
1173 		append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
1174 					cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
1175 					FIFOLD_TYPE_IV);
1176 		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
1177 				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
1178 	}
1179 
1180 	/* assoclen + cryptlen = seqoutlen */
1181 	append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1182 
1183 	/*
1184 	 * MOVE_LEN opcode is not available in all SEC HW revisions,
1185 	 * thus need to do some magic, i.e. self-patch the descriptor
1186 	 * buffer.
1187 	 */
1188 	read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1189 				    (0x6 << MOVE_LEN_SHIFT));
1190 	write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1191 				     (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
1192 
1193 	/* Will read assoclen + cryptlen bytes */
1194 	append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1195 
1196 	/* Will write assoclen + cryptlen bytes */
1197 	append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1198 
1199 	/* Store payload data */
1200 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1201 
1202 	/* In-snoop assoclen + cryptlen data */
1203 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLDST_VLF |
1204 			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST2FLUSH1);
1205 
1206 	set_move_tgt_here(desc, read_move_cmd);
1207 	set_move_tgt_here(desc, write_move_cmd);
1208 	append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1209 	/* Move payload data to OFIFO */
1210 	append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1211 	append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
1212 
1213 	/* Read ICV */
1214 	append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
1215 			     FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1216 
1217 	print_hex_dump_debug("rfc4543 dec shdesc@" __stringify(__LINE__)": ",
1218 			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1219 			     1);
1220 }
1221 EXPORT_SYMBOL(cnstr_shdsc_rfc4543_decap);
1222 
1223 /**
1224  * cnstr_shdsc_chachapoly - Chacha20 + Poly1305 generic AEAD (rfc7539) and
1225  *                          IPsec ESP (rfc7634, a.k.a. rfc7539esp) shared
1226  *                          descriptor (non-protocol).
1227  * @desc: pointer to buffer used for descriptor construction
1228  * @cdata: pointer to block cipher transform definitions
1229  *         Valid algorithm values - OP_ALG_ALGSEL_CHACHA20 ANDed with
1230  *         OP_ALG_AAI_AEAD.
1231  * @adata: pointer to authentication transform definitions
1232  *         Valid algorithm values - OP_ALG_ALGSEL_POLY1305 ANDed with
1233  *         OP_ALG_AAI_AEAD.
1234  * @ivsize: initialization vector size
1235  * @icvsize: integrity check value (ICV) size (truncated or full)
1236  * @encap: true if encapsulation, false if decapsulation
1237  * @is_qi: true when called from caam/qi
1238  */
1239 void cnstr_shdsc_chachapoly(u32 * const desc, struct alginfo *cdata,
1240 			    struct alginfo *adata, unsigned int ivsize,
1241 			    unsigned int icvsize, const bool encap,
1242 			    const bool is_qi)
1243 {
1244 	u32 *key_jump_cmd, *wait_cmd;
1245 	u32 nfifo;
1246 	const bool is_ipsec = (ivsize != CHACHAPOLY_IV_SIZE);
1247 
1248 	/* Note: Context registers are saved. */
1249 	init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1250 
1251 	/* skip key loading if they are loaded due to sharing */
1252 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1253 				   JUMP_COND_SHRD);
1254 
1255 	append_key_as_imm(desc, cdata->key_virt, cdata->keylen, cdata->keylen,
1256 			  CLASS_1 | KEY_DEST_CLASS_REG);
1257 
1258 	/* For IPsec load the salt from keymat in the context register */
1259 	if (is_ipsec)
1260 		append_load_as_imm(desc, cdata->key_virt + cdata->keylen, 4,
1261 				   LDST_CLASS_1_CCB | LDST_SRCDST_BYTE_CONTEXT |
1262 				   4 << LDST_OFFSET_SHIFT);
1263 
1264 	set_jump_tgt_here(desc, key_jump_cmd);
1265 
1266 	/* Class 2 and 1 operations: Poly & ChaCha */
1267 	if (encap) {
1268 		append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
1269 				 OP_ALG_ENCRYPT);
1270 		append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1271 				 OP_ALG_ENCRYPT);
1272 	} else {
1273 		append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
1274 				 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1275 		append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1276 				 OP_ALG_DECRYPT);
1277 	}
1278 
1279 	if (is_qi) {
1280 		u32 *wait_load_cmd;
1281 		u32 ctx1_iv_off = is_ipsec ? 8 : 4;
1282 
1283 		/* REG3 = assoclen */
1284 		append_seq_load(desc, 4, LDST_CLASS_DECO |
1285 				LDST_SRCDST_WORD_DECO_MATH3 |
1286 				4 << LDST_OFFSET_SHIFT);
1287 
1288 		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1289 					    JUMP_COND_CALM | JUMP_COND_NCP |
1290 					    JUMP_COND_NOP | JUMP_COND_NIP |
1291 					    JUMP_COND_NIFP);
1292 		set_jump_tgt_here(desc, wait_load_cmd);
1293 
1294 		append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
1295 				LDST_SRCDST_BYTE_CONTEXT |
1296 				ctx1_iv_off << LDST_OFFSET_SHIFT);
1297 	}
1298 
1299 	/*
1300 	 * MAGIC with NFIFO
1301 	 * Read associated data from the input and send them to class1 and
1302 	 * class2 alignment blocks. From class1 send data to output fifo and
1303 	 * then write it to memory since we don't need to encrypt AD.
1304 	 */
1305 	nfifo = NFIFOENTRY_DEST_BOTH | NFIFOENTRY_FC1 | NFIFOENTRY_FC2 |
1306 		NFIFOENTRY_DTYPE_POLY | NFIFOENTRY_BND;
1307 	append_load_imm_u32(desc, nfifo, LDST_CLASS_IND_CCB |
1308 			    LDST_SRCDST_WORD_INFO_FIFO_SM | LDLEN_MATH3);
1309 
1310 	append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
1311 	append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
1312 	append_seq_fifo_load(desc, 0, FIFOLD_TYPE_NOINFOFIFO |
1313 			     FIFOLD_CLASS_CLASS1 | LDST_VLF);
1314 	append_move_len(desc, MOVE_AUX_LS | MOVE_SRC_AUX_ABLK |
1315 			MOVE_DEST_OUTFIFO | MOVELEN_MRSEL_MATH3);
1316 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | LDST_VLF);
1317 
1318 	/* IPsec - copy IV at the output */
1319 	if (is_ipsec)
1320 		append_seq_fifo_store(desc, ivsize, FIFOST_TYPE_METADATA |
1321 				      0x2 << 25);
1322 
1323 	wait_cmd = append_jump(desc, JUMP_JSL | JUMP_TYPE_LOCAL |
1324 			       JUMP_COND_NOP | JUMP_TEST_ALL);
1325 	set_jump_tgt_here(desc, wait_cmd);
1326 
1327 	if (encap) {
1328 		/* Read and write cryptlen bytes */
1329 		append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1330 		append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0,
1331 				CAAM_CMD_SZ);
1332 		aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
1333 
1334 		/* Write ICV */
1335 		append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
1336 				 LDST_SRCDST_BYTE_CONTEXT);
1337 	} else {
1338 		/* Read and write cryptlen bytes */
1339 		append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0,
1340 				CAAM_CMD_SZ);
1341 		append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0,
1342 				CAAM_CMD_SZ);
1343 		aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
1344 
1345 		/* Load ICV for verification */
1346 		append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
1347 				     FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
1348 	}
1349 
1350 	print_hex_dump_debug("chachapoly shdesc@" __stringify(__LINE__)": ",
1351 			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1352 			     1);
1353 }
1354 EXPORT_SYMBOL(cnstr_shdsc_chachapoly);
1355 
1356 /* For skcipher encrypt and decrypt, read from req->src and write to req->dst */
1357 static inline void skcipher_append_src_dst(u32 *desc)
1358 {
1359 	append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1360 	append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1361 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 |
1362 			     KEY_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
1363 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
1364 }
1365 
1366 /**
1367  * cnstr_shdsc_skcipher_encap - skcipher encapsulation shared descriptor
1368  * @desc: pointer to buffer used for descriptor construction
1369  * @cdata: pointer to block cipher transform definitions
1370  *         Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
1371  *         with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128
1372  *                                - OP_ALG_ALGSEL_CHACHA20
1373  * @ivsize: initialization vector size
1374  * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
1375  * @ctx1_iv_off: IV offset in CONTEXT1 register
1376  */
1377 void cnstr_shdsc_skcipher_encap(u32 * const desc, struct alginfo *cdata,
1378 				unsigned int ivsize, const bool is_rfc3686,
1379 				const u32 ctx1_iv_off)
1380 {
1381 	u32 *key_jump_cmd;
1382 
1383 	init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1384 	/* Skip if already shared */
1385 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1386 				   JUMP_COND_SHRD);
1387 
1388 	/* Load class1 key only */
1389 	append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1390 			  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1391 
1392 	/* Load nonce into CONTEXT1 reg */
1393 	if (is_rfc3686) {
1394 		const u8 *nonce = cdata->key_virt + cdata->keylen;
1395 
1396 		append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1397 				   LDST_CLASS_IND_CCB |
1398 				   LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1399 		append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
1400 			    MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
1401 			    (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1402 	}
1403 
1404 	set_jump_tgt_here(desc, key_jump_cmd);
1405 
1406 	/* Load IV, if there is one */
1407 	if (ivsize)
1408 		append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1409 				LDST_CLASS_1_CCB | (ctx1_iv_off <<
1410 				LDST_OFFSET_SHIFT));
1411 
1412 	/* Load counter into CONTEXT1 reg */
1413 	if (is_rfc3686)
1414 		append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1415 				     LDST_SRCDST_BYTE_CONTEXT |
1416 				     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1417 				      LDST_OFFSET_SHIFT));
1418 
1419 	/* Load operation */
1420 	append_operation(desc, cdata->algtype | OP_ALG_AS_INIT |
1421 			 OP_ALG_ENCRYPT);
1422 
1423 	/* Perform operation */
1424 	skcipher_append_src_dst(desc);
1425 
1426 	/* Store IV */
1427 	if (ivsize)
1428 		append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1429 				 LDST_CLASS_1_CCB | (ctx1_iv_off <<
1430 				 LDST_OFFSET_SHIFT));
1431 
1432 	print_hex_dump_debug("skcipher enc shdesc@" __stringify(__LINE__)": ",
1433 			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1434 			     1);
1435 }
1436 EXPORT_SYMBOL(cnstr_shdsc_skcipher_encap);
1437 
1438 /**
1439  * cnstr_shdsc_skcipher_decap - skcipher decapsulation shared descriptor
1440  * @desc: pointer to buffer used for descriptor construction
1441  * @cdata: pointer to block cipher transform definitions
1442  *         Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
1443  *         with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128
1444  *                                - OP_ALG_ALGSEL_CHACHA20
1445  * @ivsize: initialization vector size
1446  * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
1447  * @ctx1_iv_off: IV offset in CONTEXT1 register
1448  */
1449 void cnstr_shdsc_skcipher_decap(u32 * const desc, struct alginfo *cdata,
1450 				unsigned int ivsize, const bool is_rfc3686,
1451 				const u32 ctx1_iv_off)
1452 {
1453 	u32 *key_jump_cmd;
1454 
1455 	init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1456 	/* Skip if already shared */
1457 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1458 				   JUMP_COND_SHRD);
1459 
1460 	/* Load class1 key only */
1461 	append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1462 			  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1463 
1464 	/* Load nonce into CONTEXT1 reg */
1465 	if (is_rfc3686) {
1466 		const u8 *nonce = cdata->key_virt + cdata->keylen;
1467 
1468 		append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1469 				   LDST_CLASS_IND_CCB |
1470 				   LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1471 		append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
1472 			    MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
1473 			    (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1474 	}
1475 
1476 	set_jump_tgt_here(desc, key_jump_cmd);
1477 
1478 	/* Load IV, if there is one */
1479 	if (ivsize)
1480 		append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1481 				LDST_CLASS_1_CCB | (ctx1_iv_off <<
1482 				LDST_OFFSET_SHIFT));
1483 
1484 	/* Load counter into CONTEXT1 reg */
1485 	if (is_rfc3686)
1486 		append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1487 				     LDST_SRCDST_BYTE_CONTEXT |
1488 				     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1489 				      LDST_OFFSET_SHIFT));
1490 
1491 	/* Choose operation */
1492 	if (ctx1_iv_off)
1493 		append_operation(desc, cdata->algtype | OP_ALG_AS_INIT |
1494 				 OP_ALG_DECRYPT);
1495 	else
1496 		append_dec_op1(desc, cdata->algtype);
1497 
1498 	/* Perform operation */
1499 	skcipher_append_src_dst(desc);
1500 
1501 	/* Store IV */
1502 	if (ivsize)
1503 		append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1504 				 LDST_CLASS_1_CCB | (ctx1_iv_off <<
1505 				 LDST_OFFSET_SHIFT));
1506 
1507 	print_hex_dump_debug("skcipher dec shdesc@" __stringify(__LINE__)": ",
1508 			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1509 			     1);
1510 }
1511 EXPORT_SYMBOL(cnstr_shdsc_skcipher_decap);
1512 
1513 /**
1514  * cnstr_shdsc_xts_skcipher_encap - xts skcipher encapsulation shared descriptor
1515  * @desc: pointer to buffer used for descriptor construction
1516  * @cdata: pointer to block cipher transform definitions
1517  *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
1518  */
1519 void cnstr_shdsc_xts_skcipher_encap(u32 * const desc, struct alginfo *cdata)
1520 {
1521 	__be64 sector_size = cpu_to_be64(512);
1522 	u32 *key_jump_cmd;
1523 
1524 	init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1525 	/* Skip if already shared */
1526 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1527 				   JUMP_COND_SHRD);
1528 
1529 	/* Load class1 keys only */
1530 	append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1531 			  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1532 
1533 	/* Load sector size with index 40 bytes (0x28) */
1534 	append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
1535 			   LDST_SRCDST_BYTE_CONTEXT |
1536 			   (0x28 << LDST_OFFSET_SHIFT));
1537 
1538 	set_jump_tgt_here(desc, key_jump_cmd);
1539 
1540 	/*
1541 	 * create sequence for loading the sector index
1542 	 * Upper 8B of IV - will be used as sector index
1543 	 * Lower 8B of IV - will be discarded
1544 	 */
1545 	append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1546 			(0x20 << LDST_OFFSET_SHIFT));
1547 	append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1548 
1549 	/* Load operation */
1550 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1551 			 OP_ALG_ENCRYPT);
1552 
1553 	/* Perform operation */
1554 	skcipher_append_src_dst(desc);
1555 
1556 	/* Store upper 8B of IV */
1557 	append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1558 			 (0x20 << LDST_OFFSET_SHIFT));
1559 
1560 	print_hex_dump_debug("xts skcipher enc shdesc@" __stringify(__LINE__)
1561 			     ": ", DUMP_PREFIX_ADDRESS, 16, 4,
1562 			     desc, desc_bytes(desc), 1);
1563 }
1564 EXPORT_SYMBOL(cnstr_shdsc_xts_skcipher_encap);
1565 
1566 /**
1567  * cnstr_shdsc_xts_skcipher_decap - xts skcipher decapsulation shared descriptor
1568  * @desc: pointer to buffer used for descriptor construction
1569  * @cdata: pointer to block cipher transform definitions
1570  *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
1571  */
1572 void cnstr_shdsc_xts_skcipher_decap(u32 * const desc, struct alginfo *cdata)
1573 {
1574 	__be64 sector_size = cpu_to_be64(512);
1575 	u32 *key_jump_cmd;
1576 
1577 	init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1578 	/* Skip if already shared */
1579 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1580 				   JUMP_COND_SHRD);
1581 
1582 	/* Load class1 key only */
1583 	append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1584 			  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1585 
1586 	/* Load sector size with index 40 bytes (0x28) */
1587 	append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
1588 			   LDST_SRCDST_BYTE_CONTEXT |
1589 			   (0x28 << LDST_OFFSET_SHIFT));
1590 
1591 	set_jump_tgt_here(desc, key_jump_cmd);
1592 
1593 	/*
1594 	 * create sequence for loading the sector index
1595 	 * Upper 8B of IV - will be used as sector index
1596 	 * Lower 8B of IV - will be discarded
1597 	 */
1598 	append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1599 			(0x20 << LDST_OFFSET_SHIFT));
1600 	append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1601 
1602 	/* Load operation */
1603 	append_dec_op1(desc, cdata->algtype);
1604 
1605 	/* Perform operation */
1606 	skcipher_append_src_dst(desc);
1607 
1608 	/* Store upper 8B of IV */
1609 	append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1610 			 (0x20 << LDST_OFFSET_SHIFT));
1611 
1612 	print_hex_dump_debug("xts skcipher dec shdesc@" __stringify(__LINE__)
1613 			     ": ", DUMP_PREFIX_ADDRESS, 16, 4, desc,
1614 			     desc_bytes(desc), 1);
1615 }
1616 EXPORT_SYMBOL(cnstr_shdsc_xts_skcipher_decap);
1617 
1618 MODULE_LICENSE("GPL");
1619 MODULE_DESCRIPTION("FSL CAAM descriptor support");
1620 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");
1621