xref: /linux/drivers/crypto/caam/caamalg_desc.c (revision 9dbe3072c6b1f28000961e34497237d0e3d13318)
1 /*
2  * Shared descriptors for aead, skcipher algorithms
3  *
4  * Copyright 2016-2018 NXP
5  */
6 
7 #include "compat.h"
8 #include "desc_constr.h"
9 #include "caamalg_desc.h"
10 
11 /*
12  * For aead functions, read payload and write payload,
13  * both of which are specified in req->src and req->dst
14  */
15 static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
16 {
17 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
18 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
19 			     KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
20 }
21 
22 /* Set DK bit in class 1 operation if shared */
23 static inline void append_dec_op1(u32 *desc, u32 type)
24 {
25 	u32 *jump_cmd, *uncond_jump_cmd;
26 
27 	/* DK bit is valid only for AES */
28 	if ((type & OP_ALG_ALGSEL_MASK) != OP_ALG_ALGSEL_AES) {
29 		append_operation(desc, type | OP_ALG_AS_INITFINAL |
30 				 OP_ALG_DECRYPT);
31 		return;
32 	}
33 
34 	jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD);
35 	append_operation(desc, type | OP_ALG_AS_INITFINAL |
36 			 OP_ALG_DECRYPT);
37 	uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL);
38 	set_jump_tgt_here(desc, jump_cmd);
39 	append_operation(desc, type | OP_ALG_AS_INITFINAL |
40 			 OP_ALG_DECRYPT | OP_ALG_AAI_DK);
41 	set_jump_tgt_here(desc, uncond_jump_cmd);
42 }
43 
44 /**
45  * cnstr_shdsc_aead_null_encap - IPSec ESP encapsulation shared descriptor
46  *                               (non-protocol) with no (null) encryption.
47  * @desc: pointer to buffer used for descriptor construction
48  * @adata: pointer to authentication transform definitions.
49  *         A split key is required for SEC Era < 6; the size of the split key
50  *         is specified in this case. Valid algorithm values - one of
51  *         OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
52  *         with OP_ALG_AAI_HMAC_PRECOMP.
53  * @icvsize: integrity check value (ICV) size (truncated or full)
54  * @era: SEC Era
55  */
56 void cnstr_shdsc_aead_null_encap(u32 * const desc, struct alginfo *adata,
57 				 unsigned int icvsize, int era)
58 {
59 	u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
60 
61 	init_sh_desc(desc, HDR_SHARE_SERIAL);
62 
63 	/* Skip if already shared */
64 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
65 				   JUMP_COND_SHRD);
66 	if (era < 6) {
67 		if (adata->key_inline)
68 			append_key_as_imm(desc, adata->key_virt,
69 					  adata->keylen_pad, adata->keylen,
70 					  CLASS_2 | KEY_DEST_MDHA_SPLIT |
71 					  KEY_ENC);
72 		else
73 			append_key(desc, adata->key_dma, adata->keylen,
74 				   CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
75 	} else {
76 		append_proto_dkp(desc, adata);
77 	}
78 	set_jump_tgt_here(desc, key_jump_cmd);
79 
80 	/* assoclen + cryptlen = seqinlen */
81 	append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
82 
83 	/* Prepare to read and write cryptlen + assoclen bytes */
84 	append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
85 	append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
86 
87 	/*
88 	 * MOVE_LEN opcode is not available in all SEC HW revisions,
89 	 * thus need to do some magic, i.e. self-patch the descriptor
90 	 * buffer.
91 	 */
92 	read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
93 				    MOVE_DEST_MATH3 |
94 				    (0x6 << MOVE_LEN_SHIFT));
95 	write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
96 				     MOVE_DEST_DESCBUF |
97 				     MOVE_WAITCOMP |
98 				     (0x8 << MOVE_LEN_SHIFT));
99 
100 	/* Class 2 operation */
101 	append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
102 			 OP_ALG_ENCRYPT);
103 
104 	/* Read and write cryptlen bytes */
105 	aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
106 
107 	set_move_tgt_here(desc, read_move_cmd);
108 	set_move_tgt_here(desc, write_move_cmd);
109 	append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
110 	append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
111 		    MOVE_AUX_LS);
112 
113 	/* Write ICV */
114 	append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
115 			 LDST_SRCDST_BYTE_CONTEXT);
116 
117 #ifdef DEBUG
118 	print_hex_dump(KERN_ERR,
119 		       "aead null enc shdesc@" __stringify(__LINE__)": ",
120 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
121 #endif
122 }
123 EXPORT_SYMBOL(cnstr_shdsc_aead_null_encap);
124 
125 /**
126  * cnstr_shdsc_aead_null_decap - IPSec ESP decapsulation shared descriptor
127  *                               (non-protocol) with no (null) decryption.
128  * @desc: pointer to buffer used for descriptor construction
129  * @adata: pointer to authentication transform definitions.
130  *         A split key is required for SEC Era < 6; the size of the split key
131  *         is specified in this case. Valid algorithm values - one of
132  *         OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
133  *         with OP_ALG_AAI_HMAC_PRECOMP.
134  * @icvsize: integrity check value (ICV) size (truncated or full)
135  * @era: SEC Era
136  */
137 void cnstr_shdsc_aead_null_decap(u32 * const desc, struct alginfo *adata,
138 				 unsigned int icvsize, int era)
139 {
140 	u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd, *jump_cmd;
141 
142 	init_sh_desc(desc, HDR_SHARE_SERIAL);
143 
144 	/* Skip if already shared */
145 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
146 				   JUMP_COND_SHRD);
147 	if (era < 6) {
148 		if (adata->key_inline)
149 			append_key_as_imm(desc, adata->key_virt,
150 					  adata->keylen_pad, adata->keylen,
151 					  CLASS_2 | KEY_DEST_MDHA_SPLIT |
152 					  KEY_ENC);
153 		else
154 			append_key(desc, adata->key_dma, adata->keylen,
155 				   CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
156 	} else {
157 		append_proto_dkp(desc, adata);
158 	}
159 	set_jump_tgt_here(desc, key_jump_cmd);
160 
161 	/* Class 2 operation */
162 	append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
163 			 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
164 
165 	/* assoclen + cryptlen = seqoutlen */
166 	append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
167 
168 	/* Prepare to read and write cryptlen + assoclen bytes */
169 	append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
170 	append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
171 
172 	/*
173 	 * MOVE_LEN opcode is not available in all SEC HW revisions,
174 	 * thus need to do some magic, i.e. self-patch the descriptor
175 	 * buffer.
176 	 */
177 	read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
178 				    MOVE_DEST_MATH2 |
179 				    (0x6 << MOVE_LEN_SHIFT));
180 	write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
181 				     MOVE_DEST_DESCBUF |
182 				     MOVE_WAITCOMP |
183 				     (0x8 << MOVE_LEN_SHIFT));
184 
185 	/* Read and write cryptlen bytes */
186 	aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
187 
188 	/*
189 	 * Insert a NOP here, since we need at least 4 instructions between
190 	 * code patching the descriptor buffer and the location being patched.
191 	 */
192 	jump_cmd = append_jump(desc, JUMP_TEST_ALL);
193 	set_jump_tgt_here(desc, jump_cmd);
194 
195 	set_move_tgt_here(desc, read_move_cmd);
196 	set_move_tgt_here(desc, write_move_cmd);
197 	append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
198 	append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
199 		    MOVE_AUX_LS);
200 	append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
201 
202 	/* Load ICV */
203 	append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
204 			     FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
205 
206 #ifdef DEBUG
207 	print_hex_dump(KERN_ERR,
208 		       "aead null dec shdesc@" __stringify(__LINE__)": ",
209 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
210 #endif
211 }
212 EXPORT_SYMBOL(cnstr_shdsc_aead_null_decap);
213 
214 static void init_sh_desc_key_aead(u32 * const desc,
215 				  struct alginfo * const cdata,
216 				  struct alginfo * const adata,
217 				  const bool is_rfc3686, u32 *nonce, int era)
218 {
219 	u32 *key_jump_cmd;
220 	unsigned int enckeylen = cdata->keylen;
221 
222 	/* Note: Context registers are saved. */
223 	init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
224 
225 	/* Skip if already shared */
226 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
227 				   JUMP_COND_SHRD);
228 
229 	/*
230 	 * RFC3686 specific:
231 	 *	| key = {AUTH_KEY, ENC_KEY, NONCE}
232 	 *	| enckeylen = encryption key size + nonce size
233 	 */
234 	if (is_rfc3686)
235 		enckeylen -= CTR_RFC3686_NONCE_SIZE;
236 
237 	if (era < 6) {
238 		if (adata->key_inline)
239 			append_key_as_imm(desc, adata->key_virt,
240 					  adata->keylen_pad, adata->keylen,
241 					  CLASS_2 | KEY_DEST_MDHA_SPLIT |
242 					  KEY_ENC);
243 		else
244 			append_key(desc, adata->key_dma, adata->keylen,
245 				   CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
246 	} else {
247 		append_proto_dkp(desc, adata);
248 	}
249 
250 	if (cdata->key_inline)
251 		append_key_as_imm(desc, cdata->key_virt, enckeylen,
252 				  enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
253 	else
254 		append_key(desc, cdata->key_dma, enckeylen, CLASS_1 |
255 			   KEY_DEST_CLASS_REG);
256 
257 	/* Load Counter into CONTEXT1 reg */
258 	if (is_rfc3686) {
259 		append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
260 				   LDST_CLASS_IND_CCB |
261 				   LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
262 		append_move(desc,
263 			    MOVE_SRC_OUTFIFO |
264 			    MOVE_DEST_CLASS1CTX |
265 			    (16 << MOVE_OFFSET_SHIFT) |
266 			    (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
267 	}
268 
269 	set_jump_tgt_here(desc, key_jump_cmd);
270 }
271 
272 /**
273  * cnstr_shdsc_aead_encap - IPSec ESP encapsulation shared descriptor
274  *                          (non-protocol).
275  * @desc: pointer to buffer used for descriptor construction
276  * @cdata: pointer to block cipher transform definitions
277  *         Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
278  *         with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
279  * @adata: pointer to authentication transform definitions.
280  *         A split key is required for SEC Era < 6; the size of the split key
281  *         is specified in this case. Valid algorithm values - one of
282  *         OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
283  *         with OP_ALG_AAI_HMAC_PRECOMP.
284  * @ivsize: initialization vector size
285  * @icvsize: integrity check value (ICV) size (truncated or full)
286  * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
287  * @nonce: pointer to rfc3686 nonce
288  * @ctx1_iv_off: IV offset in CONTEXT1 register
289  * @is_qi: true when called from caam/qi
290  * @era: SEC Era
291  */
292 void cnstr_shdsc_aead_encap(u32 * const desc, struct alginfo *cdata,
293 			    struct alginfo *adata, unsigned int ivsize,
294 			    unsigned int icvsize, const bool is_rfc3686,
295 			    u32 *nonce, const u32 ctx1_iv_off, const bool is_qi,
296 			    int era)
297 {
298 	/* Note: Context registers are saved. */
299 	init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
300 
301 	/* Class 2 operation */
302 	append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
303 			 OP_ALG_ENCRYPT);
304 
305 	if (is_qi) {
306 		u32 *wait_load_cmd;
307 
308 		/* REG3 = assoclen */
309 		append_seq_load(desc, 4, LDST_CLASS_DECO |
310 				LDST_SRCDST_WORD_DECO_MATH3 |
311 				(4 << LDST_OFFSET_SHIFT));
312 
313 		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
314 					    JUMP_COND_CALM | JUMP_COND_NCP |
315 					    JUMP_COND_NOP | JUMP_COND_NIP |
316 					    JUMP_COND_NIFP);
317 		set_jump_tgt_here(desc, wait_load_cmd);
318 
319 		append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
320 				LDST_SRCDST_BYTE_CONTEXT |
321 				(ctx1_iv_off << LDST_OFFSET_SHIFT));
322 	}
323 
324 	/* Read and write assoclen bytes */
325 	if (is_qi || era < 3) {
326 		append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
327 		append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
328 	} else {
329 		append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
330 		append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
331 	}
332 
333 	/* Skip assoc data */
334 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
335 
336 	/* read assoc before reading payload */
337 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
338 				      FIFOLDST_VLF);
339 
340 	/* Load Counter into CONTEXT1 reg */
341 	if (is_rfc3686)
342 		append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
343 				     LDST_SRCDST_BYTE_CONTEXT |
344 				     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
345 				      LDST_OFFSET_SHIFT));
346 
347 	/* Class 1 operation */
348 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
349 			 OP_ALG_ENCRYPT);
350 
351 	/* Read and write cryptlen bytes */
352 	append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
353 	append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
354 	aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
355 
356 	/* Write ICV */
357 	append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
358 			 LDST_SRCDST_BYTE_CONTEXT);
359 
360 #ifdef DEBUG
361 	print_hex_dump(KERN_ERR, "aead enc shdesc@" __stringify(__LINE__)": ",
362 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
363 #endif
364 }
365 EXPORT_SYMBOL(cnstr_shdsc_aead_encap);
366 
367 /**
368  * cnstr_shdsc_aead_decap - IPSec ESP decapsulation shared descriptor
369  *                          (non-protocol).
370  * @desc: pointer to buffer used for descriptor construction
371  * @cdata: pointer to block cipher transform definitions
372  *         Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
373  *         with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
374  * @adata: pointer to authentication transform definitions.
375  *         A split key is required for SEC Era < 6; the size of the split key
376  *         is specified in this case. Valid algorithm values - one of
377  *         OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
378  *         with OP_ALG_AAI_HMAC_PRECOMP.
379  * @ivsize: initialization vector size
380  * @icvsize: integrity check value (ICV) size (truncated or full)
381  * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
382  * @nonce: pointer to rfc3686 nonce
383  * @ctx1_iv_off: IV offset in CONTEXT1 register
384  * @is_qi: true when called from caam/qi
385  * @era: SEC Era
386  */
387 void cnstr_shdsc_aead_decap(u32 * const desc, struct alginfo *cdata,
388 			    struct alginfo *adata, unsigned int ivsize,
389 			    unsigned int icvsize, const bool geniv,
390 			    const bool is_rfc3686, u32 *nonce,
391 			    const u32 ctx1_iv_off, const bool is_qi, int era)
392 {
393 	/* Note: Context registers are saved. */
394 	init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
395 
396 	/* Class 2 operation */
397 	append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
398 			 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
399 
400 	if (is_qi) {
401 		u32 *wait_load_cmd;
402 
403 		/* REG3 = assoclen */
404 		append_seq_load(desc, 4, LDST_CLASS_DECO |
405 				LDST_SRCDST_WORD_DECO_MATH3 |
406 				(4 << LDST_OFFSET_SHIFT));
407 
408 		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
409 					    JUMP_COND_CALM | JUMP_COND_NCP |
410 					    JUMP_COND_NOP | JUMP_COND_NIP |
411 					    JUMP_COND_NIFP);
412 		set_jump_tgt_here(desc, wait_load_cmd);
413 
414 		if (!geniv)
415 			append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
416 					LDST_SRCDST_BYTE_CONTEXT |
417 					(ctx1_iv_off << LDST_OFFSET_SHIFT));
418 	}
419 
420 	/* Read and write assoclen bytes */
421 	if (is_qi || era < 3) {
422 		append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
423 		if (geniv)
424 			append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM,
425 						ivsize);
426 		else
427 			append_math_add(desc, VARSEQOUTLEN, ZERO, REG3,
428 					CAAM_CMD_SZ);
429 	} else {
430 		append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
431 		if (geniv)
432 			append_math_add_imm_u32(desc, VARSEQOUTLEN, DPOVRD, IMM,
433 						ivsize);
434 		else
435 			append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD,
436 					CAAM_CMD_SZ);
437 	}
438 
439 	/* Skip assoc data */
440 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
441 
442 	/* read assoc before reading payload */
443 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
444 			     KEY_VLF);
445 
446 	if (geniv) {
447 		append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
448 				LDST_SRCDST_BYTE_CONTEXT |
449 				(ctx1_iv_off << LDST_OFFSET_SHIFT));
450 		append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO |
451 			    (ctx1_iv_off << MOVE_OFFSET_SHIFT) | ivsize);
452 	}
453 
454 	/* Load Counter into CONTEXT1 reg */
455 	if (is_rfc3686)
456 		append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
457 				     LDST_SRCDST_BYTE_CONTEXT |
458 				     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
459 				      LDST_OFFSET_SHIFT));
460 
461 	/* Choose operation */
462 	if (ctx1_iv_off)
463 		append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
464 				 OP_ALG_DECRYPT);
465 	else
466 		append_dec_op1(desc, cdata->algtype);
467 
468 	/* Read and write cryptlen bytes */
469 	append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
470 	append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
471 	aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
472 
473 	/* Load ICV */
474 	append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
475 			     FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
476 
477 #ifdef DEBUG
478 	print_hex_dump(KERN_ERR, "aead dec shdesc@" __stringify(__LINE__)": ",
479 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
480 #endif
481 }
482 EXPORT_SYMBOL(cnstr_shdsc_aead_decap);
483 
484 /**
485  * cnstr_shdsc_aead_givencap - IPSec ESP encapsulation shared descriptor
486  *                             (non-protocol) with HW-generated initialization
487  *                             vector.
488  * @desc: pointer to buffer used for descriptor construction
489  * @cdata: pointer to block cipher transform definitions
490  *         Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
491  *         with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
492  * @adata: pointer to authentication transform definitions.
493  *         A split key is required for SEC Era < 6; the size of the split key
494  *         is specified in this case. Valid algorithm values - one of
495  *         OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
496  *         with OP_ALG_AAI_HMAC_PRECOMP.
497  * @ivsize: initialization vector size
498  * @icvsize: integrity check value (ICV) size (truncated or full)
499  * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
500  * @nonce: pointer to rfc3686 nonce
501  * @ctx1_iv_off: IV offset in CONTEXT1 register
502  * @is_qi: true when called from caam/qi
503  * @era: SEC Era
504  */
505 void cnstr_shdsc_aead_givencap(u32 * const desc, struct alginfo *cdata,
506 			       struct alginfo *adata, unsigned int ivsize,
507 			       unsigned int icvsize, const bool is_rfc3686,
508 			       u32 *nonce, const u32 ctx1_iv_off,
509 			       const bool is_qi, int era)
510 {
511 	u32 geniv, moveiv;
512 
513 	/* Note: Context registers are saved. */
514 	init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
515 
516 	if (is_qi) {
517 		u32 *wait_load_cmd;
518 
519 		/* REG3 = assoclen */
520 		append_seq_load(desc, 4, LDST_CLASS_DECO |
521 				LDST_SRCDST_WORD_DECO_MATH3 |
522 				(4 << LDST_OFFSET_SHIFT));
523 
524 		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
525 					    JUMP_COND_CALM | JUMP_COND_NCP |
526 					    JUMP_COND_NOP | JUMP_COND_NIP |
527 					    JUMP_COND_NIFP);
528 		set_jump_tgt_here(desc, wait_load_cmd);
529 	}
530 
531 	if (is_rfc3686) {
532 		if (is_qi)
533 			append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
534 					LDST_SRCDST_BYTE_CONTEXT |
535 					(ctx1_iv_off << LDST_OFFSET_SHIFT));
536 
537 		goto copy_iv;
538 	}
539 
540 	/* Generate IV */
541 	geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
542 		NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
543 		NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT);
544 	append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
545 			    LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
546 	append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
547 	append_move(desc, MOVE_WAITCOMP |
548 		    MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX |
549 		    (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
550 		    (ivsize << MOVE_LEN_SHIFT));
551 	append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
552 
553 copy_iv:
554 	/* Copy IV to class 1 context */
555 	append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO |
556 		    (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
557 		    (ivsize << MOVE_LEN_SHIFT));
558 
559 	/* Return to encryption */
560 	append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
561 			 OP_ALG_ENCRYPT);
562 
563 	/* Read and write assoclen bytes */
564 	if (is_qi || era < 3) {
565 		append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
566 		append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
567 	} else {
568 		append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
569 		append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
570 	}
571 
572 	/* Skip assoc data */
573 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
574 
575 	/* read assoc before reading payload */
576 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
577 			     KEY_VLF);
578 
579 	/* Copy iv from outfifo to class 2 fifo */
580 	moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 |
581 		 NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT);
582 	append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB |
583 			    LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
584 	append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB |
585 			    LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
586 
587 	/* Load Counter into CONTEXT1 reg */
588 	if (is_rfc3686)
589 		append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
590 				     LDST_SRCDST_BYTE_CONTEXT |
591 				     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
592 				      LDST_OFFSET_SHIFT));
593 
594 	/* Class 1 operation */
595 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
596 			 OP_ALG_ENCRYPT);
597 
598 	/* Will write ivsize + cryptlen */
599 	append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
600 
601 	/* Not need to reload iv */
602 	append_seq_fifo_load(desc, ivsize,
603 			     FIFOLD_CLASS_SKIP);
604 
605 	/* Will read cryptlen */
606 	append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
607 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | KEY_VLF |
608 			     FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LASTBOTH);
609 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
610 
611 	/* Write ICV */
612 	append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
613 			 LDST_SRCDST_BYTE_CONTEXT);
614 
615 #ifdef DEBUG
616 	print_hex_dump(KERN_ERR,
617 		       "aead givenc shdesc@" __stringify(__LINE__)": ",
618 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
619 #endif
620 }
621 EXPORT_SYMBOL(cnstr_shdsc_aead_givencap);
622 
623 /**
624  * cnstr_shdsc_gcm_encap - gcm encapsulation shared descriptor
625  * @desc: pointer to buffer used for descriptor construction
626  * @cdata: pointer to block cipher transform definitions
627  *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
628  * @ivsize: initialization vector size
629  * @icvsize: integrity check value (ICV) size (truncated or full)
630  * @is_qi: true when called from caam/qi
631  */
632 void cnstr_shdsc_gcm_encap(u32 * const desc, struct alginfo *cdata,
633 			   unsigned int ivsize, unsigned int icvsize,
634 			   const bool is_qi)
635 {
636 	u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1,
637 	    *zero_assoc_jump_cmd2;
638 
639 	init_sh_desc(desc, HDR_SHARE_SERIAL);
640 
641 	/* skip key loading if they are loaded due to sharing */
642 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
643 				   JUMP_COND_SHRD);
644 	if (cdata->key_inline)
645 		append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
646 				  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
647 	else
648 		append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
649 			   KEY_DEST_CLASS_REG);
650 	set_jump_tgt_here(desc, key_jump_cmd);
651 
652 	/* class 1 operation */
653 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
654 			 OP_ALG_ENCRYPT);
655 
656 	if (is_qi) {
657 		u32 *wait_load_cmd;
658 
659 		/* REG3 = assoclen */
660 		append_seq_load(desc, 4, LDST_CLASS_DECO |
661 				LDST_SRCDST_WORD_DECO_MATH3 |
662 				(4 << LDST_OFFSET_SHIFT));
663 
664 		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
665 					    JUMP_COND_CALM | JUMP_COND_NCP |
666 					    JUMP_COND_NOP | JUMP_COND_NIP |
667 					    JUMP_COND_NIFP);
668 		set_jump_tgt_here(desc, wait_load_cmd);
669 
670 		append_math_sub_imm_u32(desc, VARSEQOUTLEN, SEQINLEN, IMM,
671 					ivsize);
672 	} else {
673 		append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0,
674 				CAAM_CMD_SZ);
675 	}
676 
677 	/* if assoclen + cryptlen is ZERO, skip to ICV write */
678 	zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
679 						 JUMP_COND_MATH_Z);
680 
681 	if (is_qi)
682 		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
683 				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
684 
685 	/* if assoclen is ZERO, skip reading the assoc data */
686 	append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
687 	zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
688 					   JUMP_COND_MATH_Z);
689 
690 	append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
691 
692 	/* skip assoc data */
693 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
694 
695 	/* cryptlen = seqinlen - assoclen */
696 	append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
697 
698 	/* if cryptlen is ZERO jump to zero-payload commands */
699 	zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
700 					    JUMP_COND_MATH_Z);
701 
702 	/* read assoc data */
703 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
704 			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
705 	set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
706 
707 	append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
708 
709 	/* write encrypted data */
710 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
711 
712 	/* read payload data */
713 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
714 			     FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
715 
716 	/* jump to ICV writing */
717 	if (is_qi)
718 		append_jump(desc, JUMP_TEST_ALL | 4);
719 	else
720 		append_jump(desc, JUMP_TEST_ALL | 2);
721 
722 	/* zero-payload commands */
723 	set_jump_tgt_here(desc, zero_payload_jump_cmd);
724 
725 	/* read assoc data */
726 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
727 			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
728 	if (is_qi)
729 		/* jump to ICV writing */
730 		append_jump(desc, JUMP_TEST_ALL | 2);
731 
732 	/* There is no input data */
733 	set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
734 
735 	if (is_qi)
736 		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
737 				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 |
738 				     FIFOLD_TYPE_LAST1);
739 
740 	/* write ICV */
741 	append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
742 			 LDST_SRCDST_BYTE_CONTEXT);
743 
744 #ifdef DEBUG
745 	print_hex_dump(KERN_ERR, "gcm enc shdesc@" __stringify(__LINE__)": ",
746 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
747 #endif
748 }
749 EXPORT_SYMBOL(cnstr_shdsc_gcm_encap);
750 
751 /**
752  * cnstr_shdsc_gcm_decap - gcm decapsulation shared descriptor
753  * @desc: pointer to buffer used for descriptor construction
754  * @cdata: pointer to block cipher transform definitions
755  *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
756  * @ivsize: initialization vector size
757  * @icvsize: integrity check value (ICV) size (truncated or full)
758  * @is_qi: true when called from caam/qi
759  */
760 void cnstr_shdsc_gcm_decap(u32 * const desc, struct alginfo *cdata,
761 			   unsigned int ivsize, unsigned int icvsize,
762 			   const bool is_qi)
763 {
764 	u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1;
765 
766 	init_sh_desc(desc, HDR_SHARE_SERIAL);
767 
768 	/* skip key loading if they are loaded due to sharing */
769 	key_jump_cmd = append_jump(desc, JUMP_JSL |
770 				   JUMP_TEST_ALL | JUMP_COND_SHRD);
771 	if (cdata->key_inline)
772 		append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
773 				  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
774 	else
775 		append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
776 			   KEY_DEST_CLASS_REG);
777 	set_jump_tgt_here(desc, key_jump_cmd);
778 
779 	/* class 1 operation */
780 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
781 			 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
782 
783 	if (is_qi) {
784 		u32 *wait_load_cmd;
785 
786 		/* REG3 = assoclen */
787 		append_seq_load(desc, 4, LDST_CLASS_DECO |
788 				LDST_SRCDST_WORD_DECO_MATH3 |
789 				(4 << LDST_OFFSET_SHIFT));
790 
791 		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
792 					    JUMP_COND_CALM | JUMP_COND_NCP |
793 					    JUMP_COND_NOP | JUMP_COND_NIP |
794 					    JUMP_COND_NIFP);
795 		set_jump_tgt_here(desc, wait_load_cmd);
796 
797 		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
798 				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
799 	}
800 
801 	/* if assoclen is ZERO, skip reading the assoc data */
802 	append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
803 	zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
804 						 JUMP_COND_MATH_Z);
805 
806 	append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
807 
808 	/* skip assoc data */
809 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
810 
811 	/* read assoc data */
812 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
813 			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
814 
815 	set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
816 
817 	/* cryptlen = seqoutlen - assoclen */
818 	append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
819 
820 	/* jump to zero-payload command if cryptlen is zero */
821 	zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
822 					    JUMP_COND_MATH_Z);
823 
824 	append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
825 
826 	/* store encrypted data */
827 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
828 
829 	/* read payload data */
830 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
831 			     FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
832 
833 	/* zero-payload command */
834 	set_jump_tgt_here(desc, zero_payload_jump_cmd);
835 
836 	/* read ICV */
837 	append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
838 			     FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
839 
840 #ifdef DEBUG
841 	print_hex_dump(KERN_ERR, "gcm dec shdesc@" __stringify(__LINE__)": ",
842 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
843 #endif
844 }
845 EXPORT_SYMBOL(cnstr_shdsc_gcm_decap);
846 
847 /**
848  * cnstr_shdsc_rfc4106_encap - IPSec ESP gcm encapsulation shared descriptor
849  *                             (non-protocol).
850  * @desc: pointer to buffer used for descriptor construction
851  * @cdata: pointer to block cipher transform definitions
852  *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
853  * @ivsize: initialization vector size
854  * @icvsize: integrity check value (ICV) size (truncated or full)
855  * @is_qi: true when called from caam/qi
856  */
857 void cnstr_shdsc_rfc4106_encap(u32 * const desc, struct alginfo *cdata,
858 			       unsigned int ivsize, unsigned int icvsize,
859 			       const bool is_qi)
860 {
861 	u32 *key_jump_cmd;
862 
863 	init_sh_desc(desc, HDR_SHARE_SERIAL);
864 
865 	/* Skip key loading if it is loaded due to sharing */
866 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
867 				   JUMP_COND_SHRD);
868 	if (cdata->key_inline)
869 		append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
870 				  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
871 	else
872 		append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
873 			   KEY_DEST_CLASS_REG);
874 	set_jump_tgt_here(desc, key_jump_cmd);
875 
876 	/* Class 1 operation */
877 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
878 			 OP_ALG_ENCRYPT);
879 
880 	if (is_qi) {
881 		u32 *wait_load_cmd;
882 
883 		/* REG3 = assoclen */
884 		append_seq_load(desc, 4, LDST_CLASS_DECO |
885 				LDST_SRCDST_WORD_DECO_MATH3 |
886 				(4 << LDST_OFFSET_SHIFT));
887 
888 		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
889 					    JUMP_COND_CALM | JUMP_COND_NCP |
890 					    JUMP_COND_NOP | JUMP_COND_NIP |
891 					    JUMP_COND_NIFP);
892 		set_jump_tgt_here(desc, wait_load_cmd);
893 
894 		/* Read salt and IV */
895 		append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
896 					cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
897 					FIFOLD_TYPE_IV);
898 		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
899 				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
900 	}
901 
902 	append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
903 	append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
904 
905 	/* Read assoc data */
906 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
907 			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
908 
909 	/* Skip IV */
910 	append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
911 
912 	/* Will read cryptlen bytes */
913 	append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
914 
915 	/* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
916 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
917 
918 	/* Skip assoc data */
919 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
920 
921 	/* cryptlen = seqoutlen - assoclen */
922 	append_math_sub(desc, VARSEQOUTLEN, VARSEQINLEN, REG0, CAAM_CMD_SZ);
923 
924 	/* Write encrypted data */
925 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
926 
927 	/* Read payload data */
928 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
929 			     FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
930 
931 	/* Write ICV */
932 	append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
933 			 LDST_SRCDST_BYTE_CONTEXT);
934 
935 #ifdef DEBUG
936 	print_hex_dump(KERN_ERR,
937 		       "rfc4106 enc shdesc@" __stringify(__LINE__)": ",
938 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
939 #endif
940 }
941 EXPORT_SYMBOL(cnstr_shdsc_rfc4106_encap);
942 
943 /**
944  * cnstr_shdsc_rfc4106_decap - IPSec ESP gcm decapsulation shared descriptor
945  *                             (non-protocol).
946  * @desc: pointer to buffer used for descriptor construction
947  * @cdata: pointer to block cipher transform definitions
948  *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
949  * @ivsize: initialization vector size
950  * @icvsize: integrity check value (ICV) size (truncated or full)
951  * @is_qi: true when called from caam/qi
952  */
953 void cnstr_shdsc_rfc4106_decap(u32 * const desc, struct alginfo *cdata,
954 			       unsigned int ivsize, unsigned int icvsize,
955 			       const bool is_qi)
956 {
957 	u32 *key_jump_cmd;
958 
959 	init_sh_desc(desc, HDR_SHARE_SERIAL);
960 
961 	/* Skip key loading if it is loaded due to sharing */
962 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
963 				   JUMP_COND_SHRD);
964 	if (cdata->key_inline)
965 		append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
966 				  cdata->keylen, CLASS_1 |
967 				  KEY_DEST_CLASS_REG);
968 	else
969 		append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
970 			   KEY_DEST_CLASS_REG);
971 	set_jump_tgt_here(desc, key_jump_cmd);
972 
973 	/* Class 1 operation */
974 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
975 			 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
976 
977 	if (is_qi) {
978 		u32 *wait_load_cmd;
979 
980 		/* REG3 = assoclen */
981 		append_seq_load(desc, 4, LDST_CLASS_DECO |
982 				LDST_SRCDST_WORD_DECO_MATH3 |
983 				(4 << LDST_OFFSET_SHIFT));
984 
985 		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
986 					    JUMP_COND_CALM | JUMP_COND_NCP |
987 					    JUMP_COND_NOP | JUMP_COND_NIP |
988 					    JUMP_COND_NIFP);
989 		set_jump_tgt_here(desc, wait_load_cmd);
990 
991 		/* Read salt and IV */
992 		append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
993 					cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
994 					FIFOLD_TYPE_IV);
995 		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
996 				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
997 	}
998 
999 	append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
1000 	append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
1001 
1002 	/* Read assoc data */
1003 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1004 			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
1005 
1006 	/* Skip IV */
1007 	append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
1008 
1009 	/* Will read cryptlen bytes */
1010 	append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ);
1011 
1012 	/* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
1013 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
1014 
1015 	/* Skip assoc data */
1016 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
1017 
1018 	/* Will write cryptlen bytes */
1019 	append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1020 
1021 	/* Store payload data */
1022 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1023 
1024 	/* Read encrypted data */
1025 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1026 			     FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
1027 
1028 	/* Read ICV */
1029 	append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
1030 			     FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1031 
1032 #ifdef DEBUG
1033 	print_hex_dump(KERN_ERR,
1034 		       "rfc4106 dec shdesc@" __stringify(__LINE__)": ",
1035 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1036 #endif
1037 }
1038 EXPORT_SYMBOL(cnstr_shdsc_rfc4106_decap);
1039 
1040 /**
1041  * cnstr_shdsc_rfc4543_encap - IPSec ESP gmac encapsulation shared descriptor
1042  *                             (non-protocol).
1043  * @desc: pointer to buffer used for descriptor construction
1044  * @cdata: pointer to block cipher transform definitions
1045  *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
1046  * @ivsize: initialization vector size
1047  * @icvsize: integrity check value (ICV) size (truncated or full)
1048  * @is_qi: true when called from caam/qi
1049  */
1050 void cnstr_shdsc_rfc4543_encap(u32 * const desc, struct alginfo *cdata,
1051 			       unsigned int ivsize, unsigned int icvsize,
1052 			       const bool is_qi)
1053 {
1054 	u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
1055 
1056 	init_sh_desc(desc, HDR_SHARE_SERIAL);
1057 
1058 	/* Skip key loading if it is loaded due to sharing */
1059 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1060 				   JUMP_COND_SHRD);
1061 	if (cdata->key_inline)
1062 		append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1063 				  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1064 	else
1065 		append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
1066 			   KEY_DEST_CLASS_REG);
1067 	set_jump_tgt_here(desc, key_jump_cmd);
1068 
1069 	/* Class 1 operation */
1070 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1071 			 OP_ALG_ENCRYPT);
1072 
1073 	if (is_qi) {
1074 		/* assoclen is not needed, skip it */
1075 		append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
1076 
1077 		/* Read salt and IV */
1078 		append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
1079 					cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
1080 					FIFOLD_TYPE_IV);
1081 		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
1082 				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
1083 	}
1084 
1085 	/* assoclen + cryptlen = seqinlen */
1086 	append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
1087 
1088 	/*
1089 	 * MOVE_LEN opcode is not available in all SEC HW revisions,
1090 	 * thus need to do some magic, i.e. self-patch the descriptor
1091 	 * buffer.
1092 	 */
1093 	read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1094 				    (0x6 << MOVE_LEN_SHIFT));
1095 	write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1096 				     (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
1097 
1098 	/* Will read assoclen + cryptlen bytes */
1099 	append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1100 
1101 	/* Will write assoclen + cryptlen bytes */
1102 	append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1103 
1104 	/* Read and write assoclen + cryptlen bytes */
1105 	aead_append_src_dst(desc, FIFOLD_TYPE_AAD);
1106 
1107 	set_move_tgt_here(desc, read_move_cmd);
1108 	set_move_tgt_here(desc, write_move_cmd);
1109 	append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1110 	/* Move payload data to OFIFO */
1111 	append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1112 
1113 	/* Write ICV */
1114 	append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
1115 			 LDST_SRCDST_BYTE_CONTEXT);
1116 
1117 #ifdef DEBUG
1118 	print_hex_dump(KERN_ERR,
1119 		       "rfc4543 enc shdesc@" __stringify(__LINE__)": ",
1120 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1121 #endif
1122 }
1123 EXPORT_SYMBOL(cnstr_shdsc_rfc4543_encap);
1124 
1125 /**
1126  * cnstr_shdsc_rfc4543_decap - IPSec ESP gmac decapsulation shared descriptor
1127  *                             (non-protocol).
1128  * @desc: pointer to buffer used for descriptor construction
1129  * @cdata: pointer to block cipher transform definitions
1130  *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
1131  * @ivsize: initialization vector size
1132  * @icvsize: integrity check value (ICV) size (truncated or full)
1133  * @is_qi: true when called from caam/qi
1134  */
1135 void cnstr_shdsc_rfc4543_decap(u32 * const desc, struct alginfo *cdata,
1136 			       unsigned int ivsize, unsigned int icvsize,
1137 			       const bool is_qi)
1138 {
1139 	u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
1140 
1141 	init_sh_desc(desc, HDR_SHARE_SERIAL);
1142 
1143 	/* Skip key loading if it is loaded due to sharing */
1144 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1145 				   JUMP_COND_SHRD);
1146 	if (cdata->key_inline)
1147 		append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1148 				  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1149 	else
1150 		append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
1151 			   KEY_DEST_CLASS_REG);
1152 	set_jump_tgt_here(desc, key_jump_cmd);
1153 
1154 	/* Class 1 operation */
1155 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1156 			 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1157 
1158 	if (is_qi) {
1159 		/* assoclen is not needed, skip it */
1160 		append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
1161 
1162 		/* Read salt and IV */
1163 		append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
1164 					cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
1165 					FIFOLD_TYPE_IV);
1166 		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
1167 				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
1168 	}
1169 
1170 	/* assoclen + cryptlen = seqoutlen */
1171 	append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1172 
1173 	/*
1174 	 * MOVE_LEN opcode is not available in all SEC HW revisions,
1175 	 * thus need to do some magic, i.e. self-patch the descriptor
1176 	 * buffer.
1177 	 */
1178 	read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1179 				    (0x6 << MOVE_LEN_SHIFT));
1180 	write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1181 				     (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
1182 
1183 	/* Will read assoclen + cryptlen bytes */
1184 	append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1185 
1186 	/* Will write assoclen + cryptlen bytes */
1187 	append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1188 
1189 	/* Store payload data */
1190 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1191 
1192 	/* In-snoop assoclen + cryptlen data */
1193 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLDST_VLF |
1194 			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST2FLUSH1);
1195 
1196 	set_move_tgt_here(desc, read_move_cmd);
1197 	set_move_tgt_here(desc, write_move_cmd);
1198 	append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1199 	/* Move payload data to OFIFO */
1200 	append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1201 	append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
1202 
1203 	/* Read ICV */
1204 	append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
1205 			     FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1206 
1207 #ifdef DEBUG
1208 	print_hex_dump(KERN_ERR,
1209 		       "rfc4543 dec shdesc@" __stringify(__LINE__)": ",
1210 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1211 #endif
1212 }
1213 EXPORT_SYMBOL(cnstr_shdsc_rfc4543_decap);
1214 
1215 /* For skcipher encrypt and decrypt, read from req->src and write to req->dst */
1216 static inline void skcipher_append_src_dst(u32 *desc)
1217 {
1218 	append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1219 	append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1220 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 |
1221 			     KEY_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
1222 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
1223 }
1224 
1225 /**
1226  * cnstr_shdsc_skcipher_encap - skcipher encapsulation shared descriptor
1227  * @desc: pointer to buffer used for descriptor construction
1228  * @cdata: pointer to block cipher transform definitions
1229  *         Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
1230  *         with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
1231  * @ivsize: initialization vector size
1232  * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
1233  * @ctx1_iv_off: IV offset in CONTEXT1 register
1234  */
1235 void cnstr_shdsc_skcipher_encap(u32 * const desc, struct alginfo *cdata,
1236 				unsigned int ivsize, const bool is_rfc3686,
1237 				const u32 ctx1_iv_off)
1238 {
1239 	u32 *key_jump_cmd;
1240 
1241 	init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1242 	/* Skip if already shared */
1243 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1244 				   JUMP_COND_SHRD);
1245 
1246 	/* Load class1 key only */
1247 	append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1248 			  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1249 
1250 	/* Load nonce into CONTEXT1 reg */
1251 	if (is_rfc3686) {
1252 		const u8 *nonce = cdata->key_virt + cdata->keylen;
1253 
1254 		append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1255 				   LDST_CLASS_IND_CCB |
1256 				   LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1257 		append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
1258 			    MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
1259 			    (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1260 	}
1261 
1262 	set_jump_tgt_here(desc, key_jump_cmd);
1263 
1264 	/* Load iv */
1265 	append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1266 			LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1267 
1268 	/* Load counter into CONTEXT1 reg */
1269 	if (is_rfc3686)
1270 		append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1271 				     LDST_SRCDST_BYTE_CONTEXT |
1272 				     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1273 				      LDST_OFFSET_SHIFT));
1274 
1275 	/* Load operation */
1276 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1277 			 OP_ALG_ENCRYPT);
1278 
1279 	/* Perform operation */
1280 	skcipher_append_src_dst(desc);
1281 
1282 #ifdef DEBUG
1283 	print_hex_dump(KERN_ERR,
1284 		       "skcipher enc shdesc@" __stringify(__LINE__)": ",
1285 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1286 #endif
1287 }
1288 EXPORT_SYMBOL(cnstr_shdsc_skcipher_encap);
1289 
1290 /**
1291  * cnstr_shdsc_skcipher_decap - skcipher decapsulation shared descriptor
1292  * @desc: pointer to buffer used for descriptor construction
1293  * @cdata: pointer to block cipher transform definitions
1294  *         Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
1295  *         with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
1296  * @ivsize: initialization vector size
1297  * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
1298  * @ctx1_iv_off: IV offset in CONTEXT1 register
1299  */
1300 void cnstr_shdsc_skcipher_decap(u32 * const desc, struct alginfo *cdata,
1301 				unsigned int ivsize, const bool is_rfc3686,
1302 				const u32 ctx1_iv_off)
1303 {
1304 	u32 *key_jump_cmd;
1305 
1306 	init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1307 	/* Skip if already shared */
1308 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1309 				   JUMP_COND_SHRD);
1310 
1311 	/* Load class1 key only */
1312 	append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1313 			  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1314 
1315 	/* Load nonce into CONTEXT1 reg */
1316 	if (is_rfc3686) {
1317 		const u8 *nonce = cdata->key_virt + cdata->keylen;
1318 
1319 		append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1320 				   LDST_CLASS_IND_CCB |
1321 				   LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1322 		append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
1323 			    MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
1324 			    (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1325 	}
1326 
1327 	set_jump_tgt_here(desc, key_jump_cmd);
1328 
1329 	/* load IV */
1330 	append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1331 			LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1332 
1333 	/* Load counter into CONTEXT1 reg */
1334 	if (is_rfc3686)
1335 		append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1336 				     LDST_SRCDST_BYTE_CONTEXT |
1337 				     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1338 				      LDST_OFFSET_SHIFT));
1339 
1340 	/* Choose operation */
1341 	if (ctx1_iv_off)
1342 		append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1343 				 OP_ALG_DECRYPT);
1344 	else
1345 		append_dec_op1(desc, cdata->algtype);
1346 
1347 	/* Perform operation */
1348 	skcipher_append_src_dst(desc);
1349 
1350 #ifdef DEBUG
1351 	print_hex_dump(KERN_ERR,
1352 		       "skcipher dec shdesc@" __stringify(__LINE__)": ",
1353 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1354 #endif
1355 }
1356 EXPORT_SYMBOL(cnstr_shdsc_skcipher_decap);
1357 
1358 /**
1359  * cnstr_shdsc_xts_skcipher_encap - xts skcipher encapsulation shared descriptor
1360  * @desc: pointer to buffer used for descriptor construction
1361  * @cdata: pointer to block cipher transform definitions
1362  *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
1363  */
1364 void cnstr_shdsc_xts_skcipher_encap(u32 * const desc, struct alginfo *cdata)
1365 {
1366 	__be64 sector_size = cpu_to_be64(512);
1367 	u32 *key_jump_cmd;
1368 
1369 	init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1370 	/* Skip if already shared */
1371 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1372 				   JUMP_COND_SHRD);
1373 
1374 	/* Load class1 keys only */
1375 	append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1376 			  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1377 
1378 	/* Load sector size with index 40 bytes (0x28) */
1379 	append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
1380 			   LDST_SRCDST_BYTE_CONTEXT |
1381 			   (0x28 << LDST_OFFSET_SHIFT));
1382 
1383 	set_jump_tgt_here(desc, key_jump_cmd);
1384 
1385 	/*
1386 	 * create sequence for loading the sector index
1387 	 * Upper 8B of IV - will be used as sector index
1388 	 * Lower 8B of IV - will be discarded
1389 	 */
1390 	append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1391 			(0x20 << LDST_OFFSET_SHIFT));
1392 	append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1393 
1394 	/* Load operation */
1395 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1396 			 OP_ALG_ENCRYPT);
1397 
1398 	/* Perform operation */
1399 	skcipher_append_src_dst(desc);
1400 
1401 #ifdef DEBUG
1402 	print_hex_dump(KERN_ERR,
1403 		       "xts skcipher enc shdesc@" __stringify(__LINE__) ": ",
1404 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1405 #endif
1406 }
1407 EXPORT_SYMBOL(cnstr_shdsc_xts_skcipher_encap);
1408 
1409 /**
1410  * cnstr_shdsc_xts_skcipher_decap - xts skcipher decapsulation shared descriptor
1411  * @desc: pointer to buffer used for descriptor construction
1412  * @cdata: pointer to block cipher transform definitions
1413  *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
1414  */
1415 void cnstr_shdsc_xts_skcipher_decap(u32 * const desc, struct alginfo *cdata)
1416 {
1417 	__be64 sector_size = cpu_to_be64(512);
1418 	u32 *key_jump_cmd;
1419 
1420 	init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1421 	/* Skip if already shared */
1422 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1423 				   JUMP_COND_SHRD);
1424 
1425 	/* Load class1 key only */
1426 	append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1427 			  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1428 
1429 	/* Load sector size with index 40 bytes (0x28) */
1430 	append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
1431 			   LDST_SRCDST_BYTE_CONTEXT |
1432 			   (0x28 << LDST_OFFSET_SHIFT));
1433 
1434 	set_jump_tgt_here(desc, key_jump_cmd);
1435 
1436 	/*
1437 	 * create sequence for loading the sector index
1438 	 * Upper 8B of IV - will be used as sector index
1439 	 * Lower 8B of IV - will be discarded
1440 	 */
1441 	append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1442 			(0x20 << LDST_OFFSET_SHIFT));
1443 	append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1444 
1445 	/* Load operation */
1446 	append_dec_op1(desc, cdata->algtype);
1447 
1448 	/* Perform operation */
1449 	skcipher_append_src_dst(desc);
1450 
1451 #ifdef DEBUG
1452 	print_hex_dump(KERN_ERR,
1453 		       "xts skcipher dec shdesc@" __stringify(__LINE__) ": ",
1454 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1455 #endif
1456 }
1457 EXPORT_SYMBOL(cnstr_shdsc_xts_skcipher_decap);
1458 
1459 MODULE_LICENSE("GPL");
1460 MODULE_DESCRIPTION("FSL CAAM descriptor support");
1461 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");
1462