1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3 * Shared descriptors for aead, skcipher algorithms
4 *
5 * Copyright 2016-2019, 2025 NXP
6 */
7
8 #include "compat.h"
9 #include "desc_constr.h"
10 #include "caamalg_desc.h"
11 #include <soc/fsl/caam-blob.h>
12
13 /*
14 * For aead functions, read payload and write payload,
15 * both of which are specified in req->src and req->dst
16 */
aead_append_src_dst(u32 * desc,u32 msg_type)17 static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
18 {
19 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
20 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
21 KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
22 }
23
24 /* Set DK bit in class 1 operation if shared */
append_dec_op1(u32 * desc,u32 type)25 static inline void append_dec_op1(u32 *desc, u32 type)
26 {
27 u32 *jump_cmd, *uncond_jump_cmd;
28
29 /* DK bit is valid only for AES */
30 if ((type & OP_ALG_ALGSEL_MASK) != OP_ALG_ALGSEL_AES) {
31 append_operation(desc, type | OP_ALG_AS_INITFINAL |
32 OP_ALG_DECRYPT);
33 return;
34 }
35
36 jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD);
37 append_operation(desc, type | OP_ALG_AS_INIT | OP_ALG_DECRYPT);
38 uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL);
39 set_jump_tgt_here(desc, jump_cmd);
40 append_operation(desc, type | OP_ALG_AS_INIT | OP_ALG_DECRYPT |
41 OP_ALG_AAI_DK);
42 set_jump_tgt_here(desc, uncond_jump_cmd);
43 }
44
45 /**
46 * cnstr_shdsc_aead_null_encap - IPSec ESP encapsulation shared descriptor
47 * (non-protocol) with no (null) encryption.
48 * @desc: pointer to buffer used for descriptor construction
49 * @adata: pointer to authentication transform definitions.
50 * A split key is required for SEC Era < 6; the size of the split key
51 * is specified in this case. Valid algorithm values - one of
52 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
53 * with OP_ALG_AAI_HMAC_PRECOMP.
54 * @icvsize: integrity check value (ICV) size (truncated or full)
55 * @era: SEC Era
56 */
cnstr_shdsc_aead_null_encap(u32 * const desc,struct alginfo * adata,unsigned int icvsize,int era)57 void cnstr_shdsc_aead_null_encap(u32 * const desc, struct alginfo *adata,
58 unsigned int icvsize, int era)
59 {
60 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
61
62 init_sh_desc(desc, HDR_SHARE_SERIAL);
63
64 /* Skip if already shared */
65 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
66 JUMP_COND_SHRD);
67 if (era < 6) {
68 if (adata->key_inline)
69 append_key_as_imm(desc, adata->key_virt,
70 adata->keylen_pad, adata->keylen,
71 CLASS_2 | KEY_DEST_MDHA_SPLIT |
72 KEY_ENC);
73 else
74 append_key(desc, adata->key_dma, adata->keylen,
75 CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
76 } else {
77 append_proto_dkp(desc, adata);
78 }
79 set_jump_tgt_here(desc, key_jump_cmd);
80
81 /* assoclen + cryptlen = seqinlen */
82 append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
83
84 /* Prepare to read and write cryptlen + assoclen bytes */
85 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
86 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
87
88 /*
89 * MOVE_LEN opcode is not available in all SEC HW revisions,
90 * thus need to do some magic, i.e. self-patch the descriptor
91 * buffer.
92 */
93 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
94 MOVE_DEST_MATH3 |
95 (0x6 << MOVE_LEN_SHIFT));
96 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
97 MOVE_DEST_DESCBUF |
98 MOVE_WAITCOMP |
99 (0x8 << MOVE_LEN_SHIFT));
100
101 /* Class 2 operation */
102 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
103 OP_ALG_ENCRYPT);
104
105 /* Read and write cryptlen bytes */
106 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
107
108 set_move_tgt_here(desc, read_move_cmd);
109 set_move_tgt_here(desc, write_move_cmd);
110 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
111 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
112 MOVE_AUX_LS);
113
114 /* Write ICV */
115 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
116 LDST_SRCDST_BYTE_CONTEXT);
117
118 print_hex_dump_debug("aead null enc shdesc@" __stringify(__LINE__)": ",
119 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
120 1);
121 }
122 EXPORT_SYMBOL(cnstr_shdsc_aead_null_encap);
123
124 /**
125 * cnstr_shdsc_aead_null_decap - IPSec ESP decapsulation shared descriptor
126 * (non-protocol) with no (null) decryption.
127 * @desc: pointer to buffer used for descriptor construction
128 * @adata: pointer to authentication transform definitions.
129 * A split key is required for SEC Era < 6; the size of the split key
130 * is specified in this case. Valid algorithm values - one of
131 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
132 * with OP_ALG_AAI_HMAC_PRECOMP.
133 * @icvsize: integrity check value (ICV) size (truncated or full)
134 * @era: SEC Era
135 */
cnstr_shdsc_aead_null_decap(u32 * const desc,struct alginfo * adata,unsigned int icvsize,int era)136 void cnstr_shdsc_aead_null_decap(u32 * const desc, struct alginfo *adata,
137 unsigned int icvsize, int era)
138 {
139 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd, *jump_cmd;
140
141 init_sh_desc(desc, HDR_SHARE_SERIAL);
142
143 /* Skip if already shared */
144 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
145 JUMP_COND_SHRD);
146 if (era < 6) {
147 if (adata->key_inline)
148 append_key_as_imm(desc, adata->key_virt,
149 adata->keylen_pad, adata->keylen,
150 CLASS_2 | KEY_DEST_MDHA_SPLIT |
151 KEY_ENC);
152 else
153 append_key(desc, adata->key_dma, adata->keylen,
154 CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
155 } else {
156 append_proto_dkp(desc, adata);
157 }
158 set_jump_tgt_here(desc, key_jump_cmd);
159
160 /* Class 2 operation */
161 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
162 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
163
164 /* assoclen + cryptlen = seqoutlen */
165 append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
166
167 /* Prepare to read and write cryptlen + assoclen bytes */
168 append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
169 append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
170
171 /*
172 * MOVE_LEN opcode is not available in all SEC HW revisions,
173 * thus need to do some magic, i.e. self-patch the descriptor
174 * buffer.
175 */
176 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
177 MOVE_DEST_MATH2 |
178 (0x6 << MOVE_LEN_SHIFT));
179 write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
180 MOVE_DEST_DESCBUF |
181 MOVE_WAITCOMP |
182 (0x8 << MOVE_LEN_SHIFT));
183
184 /* Read and write cryptlen bytes */
185 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
186
187 /*
188 * Insert a NOP here, since we need at least 4 instructions between
189 * code patching the descriptor buffer and the location being patched.
190 */
191 jump_cmd = append_jump(desc, JUMP_TEST_ALL);
192 set_jump_tgt_here(desc, jump_cmd);
193
194 set_move_tgt_here(desc, read_move_cmd);
195 set_move_tgt_here(desc, write_move_cmd);
196 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
197 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
198 MOVE_AUX_LS);
199 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
200
201 /* Load ICV */
202 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
203 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
204
205 print_hex_dump_debug("aead null dec shdesc@" __stringify(__LINE__)": ",
206 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
207 1);
208 }
209 EXPORT_SYMBOL(cnstr_shdsc_aead_null_decap);
210
init_sh_desc_key_aead(u32 * const desc,struct alginfo * const cdata,struct alginfo * const adata,const bool is_rfc3686,u32 * nonce,int era)211 static void init_sh_desc_key_aead(u32 * const desc,
212 struct alginfo * const cdata,
213 struct alginfo * const adata,
214 const bool is_rfc3686, u32 *nonce, int era)
215 {
216 u32 *key_jump_cmd;
217 unsigned int enckeylen = cdata->keylen;
218
219 /* Note: Context registers are saved. */
220 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
221
222 /* Skip if already shared */
223 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
224 JUMP_COND_SHRD);
225
226 /*
227 * RFC3686 specific:
228 * | key = {AUTH_KEY, ENC_KEY, NONCE}
229 * | enckeylen = encryption key size + nonce size
230 */
231 if (is_rfc3686)
232 enckeylen -= CTR_RFC3686_NONCE_SIZE;
233
234 if (era < 6) {
235 if (adata->key_inline)
236 append_key_as_imm(desc, adata->key_virt,
237 adata->keylen_pad, adata->keylen,
238 CLASS_2 | KEY_DEST_MDHA_SPLIT |
239 KEY_ENC);
240 else
241 append_key(desc, adata->key_dma, adata->keylen,
242 CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
243 } else {
244 append_proto_dkp(desc, adata);
245 }
246
247 if (cdata->key_inline)
248 append_key_as_imm(desc, cdata->key_virt, enckeylen,
249 enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
250 else
251 append_key(desc, cdata->key_dma, enckeylen, CLASS_1 |
252 KEY_DEST_CLASS_REG);
253
254 /* Load Counter into CONTEXT1 reg */
255 if (is_rfc3686) {
256 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
257 LDST_CLASS_IND_CCB |
258 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
259 append_move(desc,
260 MOVE_SRC_OUTFIFO |
261 MOVE_DEST_CLASS1CTX |
262 (16 << MOVE_OFFSET_SHIFT) |
263 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
264 }
265
266 set_jump_tgt_here(desc, key_jump_cmd);
267 }
268
269 /**
270 * cnstr_shdsc_aead_encap - IPSec ESP encapsulation shared descriptor
271 * (non-protocol).
272 * @desc: pointer to buffer used for descriptor construction
273 * @cdata: pointer to block cipher transform definitions
274 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
275 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
276 * @adata: pointer to authentication transform definitions.
277 * A split key is required for SEC Era < 6; the size of the split key
278 * is specified in this case. Valid algorithm values - one of
279 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
280 * with OP_ALG_AAI_HMAC_PRECOMP.
281 * @ivsize: initialization vector size
282 * @icvsize: integrity check value (ICV) size (truncated or full)
283 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
284 * @nonce: pointer to rfc3686 nonce
285 * @ctx1_iv_off: IV offset in CONTEXT1 register
286 * @is_qi: true when called from caam/qi
287 * @era: SEC Era
288 */
cnstr_shdsc_aead_encap(u32 * const desc,struct alginfo * cdata,struct alginfo * adata,unsigned int ivsize,unsigned int icvsize,const bool is_rfc3686,u32 * nonce,const u32 ctx1_iv_off,const bool is_qi,int era)289 void cnstr_shdsc_aead_encap(u32 * const desc, struct alginfo *cdata,
290 struct alginfo *adata, unsigned int ivsize,
291 unsigned int icvsize, const bool is_rfc3686,
292 u32 *nonce, const u32 ctx1_iv_off, const bool is_qi,
293 int era)
294 {
295 /* Note: Context registers are saved. */
296 init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
297
298 /* Class 2 operation */
299 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
300 OP_ALG_ENCRYPT);
301
302 if (is_qi) {
303 u32 *wait_load_cmd;
304
305 /* REG3 = assoclen */
306 append_seq_load(desc, 4, LDST_CLASS_DECO |
307 LDST_SRCDST_WORD_DECO_MATH3 |
308 (4 << LDST_OFFSET_SHIFT));
309
310 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
311 JUMP_COND_CALM | JUMP_COND_NCP |
312 JUMP_COND_NOP | JUMP_COND_NIP |
313 JUMP_COND_NIFP);
314 set_jump_tgt_here(desc, wait_load_cmd);
315
316 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
317 LDST_SRCDST_BYTE_CONTEXT |
318 (ctx1_iv_off << LDST_OFFSET_SHIFT));
319 }
320
321 /* Read and write assoclen bytes */
322 if (is_qi || era < 3) {
323 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
324 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
325 } else {
326 append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
327 append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
328 }
329
330 /* Skip assoc data */
331 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
332
333 /* read assoc before reading payload */
334 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
335 FIFOLDST_VLF);
336
337 /* Load Counter into CONTEXT1 reg */
338 if (is_rfc3686)
339 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
340 LDST_SRCDST_BYTE_CONTEXT |
341 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
342 LDST_OFFSET_SHIFT));
343
344 /* Class 1 operation */
345 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
346 OP_ALG_ENCRYPT);
347
348 /* Read and write cryptlen bytes */
349 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
350 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
351 aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
352
353 /* Write ICV */
354 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
355 LDST_SRCDST_BYTE_CONTEXT);
356
357 print_hex_dump_debug("aead enc shdesc@" __stringify(__LINE__)": ",
358 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
359 1);
360 }
361 EXPORT_SYMBOL(cnstr_shdsc_aead_encap);
362
363 /**
364 * cnstr_shdsc_aead_decap - IPSec ESP decapsulation shared descriptor
365 * (non-protocol).
366 * @desc: pointer to buffer used for descriptor construction
367 * @cdata: pointer to block cipher transform definitions
368 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
369 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
370 * @adata: pointer to authentication transform definitions.
371 * A split key is required for SEC Era < 6; the size of the split key
372 * is specified in this case. Valid algorithm values - one of
373 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
374 * with OP_ALG_AAI_HMAC_PRECOMP.
375 * @ivsize: initialization vector size
376 * @icvsize: integrity check value (ICV) size (truncated or full)
377 * @geniv: whether to generate Encrypted Chain IV
378 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
379 * @nonce: pointer to rfc3686 nonce
380 * @ctx1_iv_off: IV offset in CONTEXT1 register
381 * @is_qi: true when called from caam/qi
382 * @era: SEC Era
383 */
cnstr_shdsc_aead_decap(u32 * const desc,struct alginfo * cdata,struct alginfo * adata,unsigned int ivsize,unsigned int icvsize,const bool geniv,const bool is_rfc3686,u32 * nonce,const u32 ctx1_iv_off,const bool is_qi,int era)384 void cnstr_shdsc_aead_decap(u32 * const desc, struct alginfo *cdata,
385 struct alginfo *adata, unsigned int ivsize,
386 unsigned int icvsize, const bool geniv,
387 const bool is_rfc3686, u32 *nonce,
388 const u32 ctx1_iv_off, const bool is_qi, int era)
389 {
390 /* Note: Context registers are saved. */
391 init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
392
393 /* Class 2 operation */
394 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
395 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
396
397 if (is_qi) {
398 u32 *wait_load_cmd;
399
400 /* REG3 = assoclen */
401 append_seq_load(desc, 4, LDST_CLASS_DECO |
402 LDST_SRCDST_WORD_DECO_MATH3 |
403 (4 << LDST_OFFSET_SHIFT));
404
405 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
406 JUMP_COND_CALM | JUMP_COND_NCP |
407 JUMP_COND_NOP | JUMP_COND_NIP |
408 JUMP_COND_NIFP);
409 set_jump_tgt_here(desc, wait_load_cmd);
410
411 if (!geniv)
412 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
413 LDST_SRCDST_BYTE_CONTEXT |
414 (ctx1_iv_off << LDST_OFFSET_SHIFT));
415 }
416
417 /* Read and write assoclen bytes */
418 if (is_qi || era < 3) {
419 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
420 if (geniv)
421 append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM,
422 ivsize);
423 else
424 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3,
425 CAAM_CMD_SZ);
426 } else {
427 append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
428 if (geniv)
429 append_math_add_imm_u32(desc, VARSEQOUTLEN, DPOVRD, IMM,
430 ivsize);
431 else
432 append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD,
433 CAAM_CMD_SZ);
434 }
435
436 /* Skip assoc data */
437 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
438
439 /* read assoc before reading payload */
440 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
441 KEY_VLF);
442
443 if (geniv) {
444 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
445 LDST_SRCDST_BYTE_CONTEXT |
446 (ctx1_iv_off << LDST_OFFSET_SHIFT));
447 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO |
448 (ctx1_iv_off << MOVE_OFFSET_SHIFT) | ivsize);
449 }
450
451 /* Load Counter into CONTEXT1 reg */
452 if (is_rfc3686)
453 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
454 LDST_SRCDST_BYTE_CONTEXT |
455 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
456 LDST_OFFSET_SHIFT));
457
458 /* Choose operation */
459 if (ctx1_iv_off)
460 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
461 OP_ALG_DECRYPT);
462 else
463 append_dec_op1(desc, cdata->algtype);
464
465 /* Read and write cryptlen bytes */
466 append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
467 append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
468 aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
469
470 /* Load ICV */
471 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
472 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
473
474 print_hex_dump_debug("aead dec shdesc@" __stringify(__LINE__)": ",
475 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
476 1);
477 }
478 EXPORT_SYMBOL(cnstr_shdsc_aead_decap);
479
480 /**
481 * cnstr_shdsc_aead_givencap - IPSec ESP encapsulation shared descriptor
482 * (non-protocol) with HW-generated initialization
483 * vector.
484 * @desc: pointer to buffer used for descriptor construction
485 * @cdata: pointer to block cipher transform definitions
486 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
487 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
488 * @adata: pointer to authentication transform definitions.
489 * A split key is required for SEC Era < 6; the size of the split key
490 * is specified in this case. Valid algorithm values - one of
491 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
492 * with OP_ALG_AAI_HMAC_PRECOMP.
493 * @ivsize: initialization vector size
494 * @icvsize: integrity check value (ICV) size (truncated or full)
495 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
496 * @nonce: pointer to rfc3686 nonce
497 * @ctx1_iv_off: IV offset in CONTEXT1 register
498 * @is_qi: true when called from caam/qi
499 * @era: SEC Era
500 */
cnstr_shdsc_aead_givencap(u32 * const desc,struct alginfo * cdata,struct alginfo * adata,unsigned int ivsize,unsigned int icvsize,const bool is_rfc3686,u32 * nonce,const u32 ctx1_iv_off,const bool is_qi,int era)501 void cnstr_shdsc_aead_givencap(u32 * const desc, struct alginfo *cdata,
502 struct alginfo *adata, unsigned int ivsize,
503 unsigned int icvsize, const bool is_rfc3686,
504 u32 *nonce, const u32 ctx1_iv_off,
505 const bool is_qi, int era)
506 {
507 u32 geniv, moveiv;
508 u32 *wait_cmd;
509
510 /* Note: Context registers are saved. */
511 init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
512
513 if (is_qi) {
514 u32 *wait_load_cmd;
515
516 /* REG3 = assoclen */
517 append_seq_load(desc, 4, LDST_CLASS_DECO |
518 LDST_SRCDST_WORD_DECO_MATH3 |
519 (4 << LDST_OFFSET_SHIFT));
520
521 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
522 JUMP_COND_CALM | JUMP_COND_NCP |
523 JUMP_COND_NOP | JUMP_COND_NIP |
524 JUMP_COND_NIFP);
525 set_jump_tgt_here(desc, wait_load_cmd);
526 }
527
528 if (is_rfc3686) {
529 if (is_qi)
530 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
531 LDST_SRCDST_BYTE_CONTEXT |
532 (ctx1_iv_off << LDST_OFFSET_SHIFT));
533
534 goto copy_iv;
535 }
536
537 /* Generate IV */
538 geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
539 NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
540 NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT);
541 append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
542 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
543 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
544 append_move(desc, MOVE_WAITCOMP |
545 MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX |
546 (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
547 (ivsize << MOVE_LEN_SHIFT));
548 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
549
550 copy_iv:
551 /* Copy IV to class 1 context */
552 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO |
553 (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
554 (ivsize << MOVE_LEN_SHIFT));
555
556 /* Return to encryption */
557 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
558 OP_ALG_ENCRYPT);
559
560 /* Read and write assoclen bytes */
561 if (is_qi || era < 3) {
562 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
563 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
564 } else {
565 append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
566 append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
567 }
568
569 /* Skip assoc data */
570 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
571
572 /* read assoc before reading payload */
573 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
574 KEY_VLF);
575
576 /* Copy iv from outfifo to class 2 fifo */
577 moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 |
578 NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT);
579 append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB |
580 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
581 append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB |
582 LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
583
584 /* Load Counter into CONTEXT1 reg */
585 if (is_rfc3686)
586 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
587 LDST_SRCDST_BYTE_CONTEXT |
588 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
589 LDST_OFFSET_SHIFT));
590
591 /* Class 1 operation */
592 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
593 OP_ALG_ENCRYPT);
594
595 /* Will write ivsize + cryptlen */
596 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
597
598 /* Not need to reload iv */
599 append_seq_fifo_load(desc, ivsize,
600 FIFOLD_CLASS_SKIP);
601
602 /* Will read cryptlen */
603 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
604
605 /*
606 * Wait for IV transfer (ofifo -> class2) to finish before starting
607 * ciphertext transfer (ofifo -> external memory).
608 */
609 wait_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NIFP);
610 set_jump_tgt_here(desc, wait_cmd);
611
612 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | KEY_VLF |
613 FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LASTBOTH);
614 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
615
616 /* Write ICV */
617 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
618 LDST_SRCDST_BYTE_CONTEXT);
619
620 print_hex_dump_debug("aead givenc shdesc@" __stringify(__LINE__)": ",
621 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
622 1);
623 }
624 EXPORT_SYMBOL(cnstr_shdsc_aead_givencap);
625
626 /**
627 * cnstr_shdsc_gcm_encap - gcm encapsulation shared descriptor
628 * @desc: pointer to buffer used for descriptor construction
629 * @cdata: pointer to block cipher transform definitions
630 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
631 * @ivsize: initialization vector size
632 * @icvsize: integrity check value (ICV) size (truncated or full)
633 * @is_qi: true when called from caam/qi
634 */
cnstr_shdsc_gcm_encap(u32 * const desc,struct alginfo * cdata,unsigned int ivsize,unsigned int icvsize,const bool is_qi)635 void cnstr_shdsc_gcm_encap(u32 * const desc, struct alginfo *cdata,
636 unsigned int ivsize, unsigned int icvsize,
637 const bool is_qi)
638 {
639 u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1,
640 *zero_assoc_jump_cmd2;
641
642 init_sh_desc(desc, HDR_SHARE_SERIAL);
643
644 /* skip key loading if they are loaded due to sharing */
645 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
646 JUMP_COND_SHRD);
647 if (cdata->key_inline)
648 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
649 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
650 else
651 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
652 KEY_DEST_CLASS_REG);
653 set_jump_tgt_here(desc, key_jump_cmd);
654
655 /* class 1 operation */
656 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
657 OP_ALG_ENCRYPT);
658
659 if (is_qi) {
660 u32 *wait_load_cmd;
661
662 /* REG3 = assoclen */
663 append_seq_load(desc, 4, LDST_CLASS_DECO |
664 LDST_SRCDST_WORD_DECO_MATH3 |
665 (4 << LDST_OFFSET_SHIFT));
666
667 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
668 JUMP_COND_CALM | JUMP_COND_NCP |
669 JUMP_COND_NOP | JUMP_COND_NIP |
670 JUMP_COND_NIFP);
671 set_jump_tgt_here(desc, wait_load_cmd);
672
673 append_math_sub_imm_u32(desc, VARSEQOUTLEN, SEQINLEN, IMM,
674 ivsize);
675 } else {
676 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0,
677 CAAM_CMD_SZ);
678 }
679
680 /* if assoclen + cryptlen is ZERO, skip to ICV write */
681 zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
682 JUMP_COND_MATH_Z);
683
684 if (is_qi)
685 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
686 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
687
688 /* if assoclen is ZERO, skip reading the assoc data */
689 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
690 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
691 JUMP_COND_MATH_Z);
692
693 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
694
695 /* skip assoc data */
696 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
697
698 /* cryptlen = seqinlen - assoclen */
699 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
700
701 /* if cryptlen is ZERO jump to zero-payload commands */
702 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
703 JUMP_COND_MATH_Z);
704
705 /* read assoc data */
706 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
707 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
708 set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
709
710 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
711
712 /* write encrypted data */
713 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
714
715 /* read payload data */
716 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
717 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
718
719 /* jump to ICV writing */
720 if (is_qi)
721 append_jump(desc, JUMP_TEST_ALL | 4);
722 else
723 append_jump(desc, JUMP_TEST_ALL | 2);
724
725 /* zero-payload commands */
726 set_jump_tgt_here(desc, zero_payload_jump_cmd);
727
728 /* read assoc data */
729 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
730 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
731 if (is_qi)
732 /* jump to ICV writing */
733 append_jump(desc, JUMP_TEST_ALL | 2);
734
735 /* There is no input data */
736 set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
737
738 if (is_qi)
739 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
740 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 |
741 FIFOLD_TYPE_LAST1);
742
743 /* write ICV */
744 append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
745 LDST_SRCDST_BYTE_CONTEXT);
746
747 print_hex_dump_debug("gcm enc shdesc@" __stringify(__LINE__)": ",
748 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
749 1);
750 }
751 EXPORT_SYMBOL(cnstr_shdsc_gcm_encap);
752
753 /**
754 * cnstr_shdsc_gcm_decap - gcm decapsulation shared descriptor
755 * @desc: pointer to buffer used for descriptor construction
756 * @cdata: pointer to block cipher transform definitions
757 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
758 * @ivsize: initialization vector size
759 * @icvsize: integrity check value (ICV) size (truncated or full)
760 * @is_qi: true when called from caam/qi
761 */
cnstr_shdsc_gcm_decap(u32 * const desc,struct alginfo * cdata,unsigned int ivsize,unsigned int icvsize,const bool is_qi)762 void cnstr_shdsc_gcm_decap(u32 * const desc, struct alginfo *cdata,
763 unsigned int ivsize, unsigned int icvsize,
764 const bool is_qi)
765 {
766 u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1;
767
768 init_sh_desc(desc, HDR_SHARE_SERIAL);
769
770 /* skip key loading if they are loaded due to sharing */
771 key_jump_cmd = append_jump(desc, JUMP_JSL |
772 JUMP_TEST_ALL | JUMP_COND_SHRD);
773 if (cdata->key_inline)
774 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
775 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
776 else
777 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
778 KEY_DEST_CLASS_REG);
779 set_jump_tgt_here(desc, key_jump_cmd);
780
781 /* class 1 operation */
782 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
783 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
784
785 if (is_qi) {
786 u32 *wait_load_cmd;
787
788 /* REG3 = assoclen */
789 append_seq_load(desc, 4, LDST_CLASS_DECO |
790 LDST_SRCDST_WORD_DECO_MATH3 |
791 (4 << LDST_OFFSET_SHIFT));
792
793 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
794 JUMP_COND_CALM | JUMP_COND_NCP |
795 JUMP_COND_NOP | JUMP_COND_NIP |
796 JUMP_COND_NIFP);
797 set_jump_tgt_here(desc, wait_load_cmd);
798
799 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
800 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
801 }
802
803 /* if assoclen is ZERO, skip reading the assoc data */
804 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
805 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
806 JUMP_COND_MATH_Z);
807
808 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
809
810 /* skip assoc data */
811 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
812
813 /* read assoc data */
814 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
815 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
816
817 set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
818
819 /* cryptlen = seqoutlen - assoclen */
820 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
821
822 /* jump to zero-payload command if cryptlen is zero */
823 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
824 JUMP_COND_MATH_Z);
825
826 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
827
828 /* store encrypted data */
829 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
830
831 /* read payload data */
832 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
833 FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
834
835 /* zero-payload command */
836 set_jump_tgt_here(desc, zero_payload_jump_cmd);
837
838 /* read ICV */
839 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
840 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
841
842 print_hex_dump_debug("gcm dec shdesc@" __stringify(__LINE__)": ",
843 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
844 1);
845 }
846 EXPORT_SYMBOL(cnstr_shdsc_gcm_decap);
847
848 /**
849 * cnstr_shdsc_rfc4106_encap - IPSec ESP gcm encapsulation shared descriptor
850 * (non-protocol).
851 * @desc: pointer to buffer used for descriptor construction
852 * @cdata: pointer to block cipher transform definitions
853 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
854 * @ivsize: initialization vector size
855 * @icvsize: integrity check value (ICV) size (truncated or full)
856 * @is_qi: true when called from caam/qi
857 *
858 * Input sequence: AAD | PTXT
859 * Output sequence: AAD | CTXT | ICV
860 * AAD length (assoclen), which includes the IV length, is available in Math3.
861 */
cnstr_shdsc_rfc4106_encap(u32 * const desc,struct alginfo * cdata,unsigned int ivsize,unsigned int icvsize,const bool is_qi)862 void cnstr_shdsc_rfc4106_encap(u32 * const desc, struct alginfo *cdata,
863 unsigned int ivsize, unsigned int icvsize,
864 const bool is_qi)
865 {
866 u32 *key_jump_cmd, *zero_cryptlen_jump_cmd, *skip_instructions;
867 init_sh_desc(desc, HDR_SHARE_SERIAL);
868
869 /* Skip key loading if it is loaded due to sharing */
870 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
871 JUMP_COND_SHRD);
872 if (cdata->key_inline)
873 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
874 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
875 else
876 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
877 KEY_DEST_CLASS_REG);
878 set_jump_tgt_here(desc, key_jump_cmd);
879
880 /* Class 1 operation */
881 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
882 OP_ALG_ENCRYPT);
883
884 if (is_qi) {
885 u32 *wait_load_cmd;
886
887 /* REG3 = assoclen */
888 append_seq_load(desc, 4, LDST_CLASS_DECO |
889 LDST_SRCDST_WORD_DECO_MATH3 |
890 (4 << LDST_OFFSET_SHIFT));
891
892 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
893 JUMP_COND_CALM | JUMP_COND_NCP |
894 JUMP_COND_NOP | JUMP_COND_NIP |
895 JUMP_COND_NIFP);
896 set_jump_tgt_here(desc, wait_load_cmd);
897
898 /* Read salt and IV */
899 append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
900 cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
901 FIFOLD_TYPE_IV);
902 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
903 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
904 }
905
906 append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
907 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
908
909 /* Skip AAD */
910 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
911
912 /* Read cryptlen and set this value into VARSEQOUTLEN */
913 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
914
915 /* If cryptlen is ZERO jump to AAD command */
916 zero_cryptlen_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
917 JUMP_COND_MATH_Z);
918
919 /* Read AAD data */
920 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
921 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
922
923 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
924 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA);
925
926 /* Skip IV */
927 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
928 append_math_add(desc, VARSEQINLEN, VARSEQOUTLEN, REG0, CAAM_CMD_SZ);
929
930 /* Write encrypted data */
931 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
932
933 /* Read payload data */
934 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
935 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
936
937 /* Jump instructions to avoid double reading of AAD */
938 skip_instructions = append_jump(desc, JUMP_TEST_ALL);
939
940 /* There is no input data, cryptlen = 0 */
941 set_jump_tgt_here(desc, zero_cryptlen_jump_cmd);
942
943 /* Read AAD */
944 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
945 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
946
947 set_jump_tgt_here(desc, skip_instructions);
948
949 /* Write ICV */
950 append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
951 LDST_SRCDST_BYTE_CONTEXT);
952
953 print_hex_dump_debug("rfc4106 enc shdesc@" __stringify(__LINE__)": ",
954 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
955 1);
956 }
957 EXPORT_SYMBOL(cnstr_shdsc_rfc4106_encap);
958
959 /**
960 * cnstr_shdsc_rfc4106_decap - IPSec ESP gcm decapsulation shared descriptor
961 * (non-protocol).
962 * @desc: pointer to buffer used for descriptor construction
963 * @cdata: pointer to block cipher transform definitions
964 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
965 * @ivsize: initialization vector size
966 * @icvsize: integrity check value (ICV) size (truncated or full)
967 * @is_qi: true when called from caam/qi
968 */
cnstr_shdsc_rfc4106_decap(u32 * const desc,struct alginfo * cdata,unsigned int ivsize,unsigned int icvsize,const bool is_qi)969 void cnstr_shdsc_rfc4106_decap(u32 * const desc, struct alginfo *cdata,
970 unsigned int ivsize, unsigned int icvsize,
971 const bool is_qi)
972 {
973 u32 *key_jump_cmd;
974
975 init_sh_desc(desc, HDR_SHARE_SERIAL);
976
977 /* Skip key loading if it is loaded due to sharing */
978 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
979 JUMP_COND_SHRD);
980 if (cdata->key_inline)
981 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
982 cdata->keylen, CLASS_1 |
983 KEY_DEST_CLASS_REG);
984 else
985 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
986 KEY_DEST_CLASS_REG);
987 set_jump_tgt_here(desc, key_jump_cmd);
988
989 /* Class 1 operation */
990 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
991 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
992
993 if (is_qi) {
994 u32 *wait_load_cmd;
995
996 /* REG3 = assoclen */
997 append_seq_load(desc, 4, LDST_CLASS_DECO |
998 LDST_SRCDST_WORD_DECO_MATH3 |
999 (4 << LDST_OFFSET_SHIFT));
1000
1001 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1002 JUMP_COND_CALM | JUMP_COND_NCP |
1003 JUMP_COND_NOP | JUMP_COND_NIP |
1004 JUMP_COND_NIFP);
1005 set_jump_tgt_here(desc, wait_load_cmd);
1006
1007 /* Read salt and IV */
1008 append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
1009 cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
1010 FIFOLD_TYPE_IV);
1011 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
1012 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
1013 }
1014
1015 append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
1016 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
1017
1018 /* Read assoc data */
1019 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1020 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
1021
1022 /* Skip IV */
1023 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
1024
1025 /* Will read cryptlen bytes */
1026 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ);
1027
1028 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
1029 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
1030
1031 /* Skip assoc data */
1032 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
1033
1034 /* Will write cryptlen bytes */
1035 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1036
1037 /* Store payload data */
1038 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1039
1040 /* Read encrypted data */
1041 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1042 FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
1043
1044 /* Read ICV */
1045 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
1046 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1047
1048 print_hex_dump_debug("rfc4106 dec shdesc@" __stringify(__LINE__)": ",
1049 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1050 1);
1051 }
1052 EXPORT_SYMBOL(cnstr_shdsc_rfc4106_decap);
1053
1054 /**
1055 * cnstr_shdsc_rfc4543_encap - IPSec ESP gmac encapsulation shared descriptor
1056 * (non-protocol).
1057 * @desc: pointer to buffer used for descriptor construction
1058 * @cdata: pointer to block cipher transform definitions
1059 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
1060 * @ivsize: initialization vector size
1061 * @icvsize: integrity check value (ICV) size (truncated or full)
1062 * @is_qi: true when called from caam/qi
1063 */
cnstr_shdsc_rfc4543_encap(u32 * const desc,struct alginfo * cdata,unsigned int ivsize,unsigned int icvsize,const bool is_qi)1064 void cnstr_shdsc_rfc4543_encap(u32 * const desc, struct alginfo *cdata,
1065 unsigned int ivsize, unsigned int icvsize,
1066 const bool is_qi)
1067 {
1068 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
1069
1070 init_sh_desc(desc, HDR_SHARE_SERIAL);
1071
1072 /* Skip key loading if it is loaded due to sharing */
1073 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1074 JUMP_COND_SHRD);
1075 if (cdata->key_inline)
1076 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1077 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1078 else
1079 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
1080 KEY_DEST_CLASS_REG);
1081 set_jump_tgt_here(desc, key_jump_cmd);
1082
1083 /* Class 1 operation */
1084 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1085 OP_ALG_ENCRYPT);
1086
1087 if (is_qi) {
1088 /* assoclen is not needed, skip it */
1089 append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
1090
1091 /* Read salt and IV */
1092 append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
1093 cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
1094 FIFOLD_TYPE_IV);
1095 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
1096 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
1097 }
1098
1099 /* assoclen + cryptlen = seqinlen */
1100 append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
1101
1102 /*
1103 * MOVE_LEN opcode is not available in all SEC HW revisions,
1104 * thus need to do some magic, i.e. self-patch the descriptor
1105 * buffer.
1106 */
1107 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1108 (0x6 << MOVE_LEN_SHIFT));
1109 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1110 (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
1111
1112 /* Will read assoclen + cryptlen bytes */
1113 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1114
1115 /* Will write assoclen + cryptlen bytes */
1116 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1117
1118 /* Read and write assoclen + cryptlen bytes */
1119 aead_append_src_dst(desc, FIFOLD_TYPE_AAD);
1120
1121 set_move_tgt_here(desc, read_move_cmd);
1122 set_move_tgt_here(desc, write_move_cmd);
1123 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1124 /* Move payload data to OFIFO */
1125 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1126
1127 /* Write ICV */
1128 append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
1129 LDST_SRCDST_BYTE_CONTEXT);
1130
1131 print_hex_dump_debug("rfc4543 enc shdesc@" __stringify(__LINE__)": ",
1132 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1133 1);
1134 }
1135 EXPORT_SYMBOL(cnstr_shdsc_rfc4543_encap);
1136
1137 /**
1138 * cnstr_shdsc_rfc4543_decap - IPSec ESP gmac decapsulation shared descriptor
1139 * (non-protocol).
1140 * @desc: pointer to buffer used for descriptor construction
1141 * @cdata: pointer to block cipher transform definitions
1142 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
1143 * @ivsize: initialization vector size
1144 * @icvsize: integrity check value (ICV) size (truncated or full)
1145 * @is_qi: true when called from caam/qi
1146 */
cnstr_shdsc_rfc4543_decap(u32 * const desc,struct alginfo * cdata,unsigned int ivsize,unsigned int icvsize,const bool is_qi)1147 void cnstr_shdsc_rfc4543_decap(u32 * const desc, struct alginfo *cdata,
1148 unsigned int ivsize, unsigned int icvsize,
1149 const bool is_qi)
1150 {
1151 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
1152
1153 init_sh_desc(desc, HDR_SHARE_SERIAL);
1154
1155 /* Skip key loading if it is loaded due to sharing */
1156 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1157 JUMP_COND_SHRD);
1158 if (cdata->key_inline)
1159 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1160 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1161 else
1162 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
1163 KEY_DEST_CLASS_REG);
1164 set_jump_tgt_here(desc, key_jump_cmd);
1165
1166 /* Class 1 operation */
1167 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1168 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1169
1170 if (is_qi) {
1171 /* assoclen is not needed, skip it */
1172 append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
1173
1174 /* Read salt and IV */
1175 append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
1176 cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
1177 FIFOLD_TYPE_IV);
1178 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
1179 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
1180 }
1181
1182 /* assoclen + cryptlen = seqoutlen */
1183 append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1184
1185 /*
1186 * MOVE_LEN opcode is not available in all SEC HW revisions,
1187 * thus need to do some magic, i.e. self-patch the descriptor
1188 * buffer.
1189 */
1190 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1191 (0x6 << MOVE_LEN_SHIFT));
1192 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1193 (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
1194
1195 /* Will read assoclen + cryptlen bytes */
1196 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1197
1198 /* Will write assoclen + cryptlen bytes */
1199 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1200
1201 /* Store payload data */
1202 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1203
1204 /* In-snoop assoclen + cryptlen data */
1205 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLDST_VLF |
1206 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST2FLUSH1);
1207
1208 set_move_tgt_here(desc, read_move_cmd);
1209 set_move_tgt_here(desc, write_move_cmd);
1210 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1211 /* Move payload data to OFIFO */
1212 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1213 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
1214
1215 /* Read ICV */
1216 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
1217 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1218
1219 print_hex_dump_debug("rfc4543 dec shdesc@" __stringify(__LINE__)": ",
1220 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1221 1);
1222 }
1223 EXPORT_SYMBOL(cnstr_shdsc_rfc4543_decap);
1224
1225 /**
1226 * cnstr_shdsc_chachapoly - Chacha20 + Poly1305 generic AEAD (rfc7539) and
1227 * IPsec ESP (rfc7634, a.k.a. rfc7539esp) shared
1228 * descriptor (non-protocol).
1229 * @desc: pointer to buffer used for descriptor construction
1230 * @cdata: pointer to block cipher transform definitions
1231 * Valid algorithm values - OP_ALG_ALGSEL_CHACHA20 ANDed with
1232 * OP_ALG_AAI_AEAD.
1233 * @adata: pointer to authentication transform definitions
1234 * Valid algorithm values - OP_ALG_ALGSEL_POLY1305 ANDed with
1235 * OP_ALG_AAI_AEAD.
1236 * @ivsize: initialization vector size
1237 * @icvsize: integrity check value (ICV) size (truncated or full)
1238 * @encap: true if encapsulation, false if decapsulation
1239 * @is_qi: true when called from caam/qi
1240 */
cnstr_shdsc_chachapoly(u32 * const desc,struct alginfo * cdata,struct alginfo * adata,unsigned int ivsize,unsigned int icvsize,const bool encap,const bool is_qi)1241 void cnstr_shdsc_chachapoly(u32 * const desc, struct alginfo *cdata,
1242 struct alginfo *adata, unsigned int ivsize,
1243 unsigned int icvsize, const bool encap,
1244 const bool is_qi)
1245 {
1246 u32 *key_jump_cmd, *wait_cmd;
1247 u32 nfifo;
1248 const bool is_ipsec = (ivsize != CHACHAPOLY_IV_SIZE);
1249
1250 /* Note: Context registers are saved. */
1251 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1252
1253 /* skip key loading if they are loaded due to sharing */
1254 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1255 JUMP_COND_SHRD);
1256
1257 append_key_as_imm(desc, cdata->key_virt, cdata->keylen, cdata->keylen,
1258 CLASS_1 | KEY_DEST_CLASS_REG);
1259
1260 /* For IPsec load the salt from keymat in the context register */
1261 if (is_ipsec)
1262 append_load_as_imm(desc, cdata->key_virt + cdata->keylen, 4,
1263 LDST_CLASS_1_CCB | LDST_SRCDST_BYTE_CONTEXT |
1264 4 << LDST_OFFSET_SHIFT);
1265
1266 set_jump_tgt_here(desc, key_jump_cmd);
1267
1268 /* Class 2 and 1 operations: Poly & ChaCha */
1269 if (encap) {
1270 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
1271 OP_ALG_ENCRYPT);
1272 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1273 OP_ALG_ENCRYPT);
1274 } else {
1275 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
1276 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1277 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1278 OP_ALG_DECRYPT);
1279 }
1280
1281 if (is_qi) {
1282 u32 *wait_load_cmd;
1283 u32 ctx1_iv_off = is_ipsec ? 8 : 4;
1284
1285 /* REG3 = assoclen */
1286 append_seq_load(desc, 4, LDST_CLASS_DECO |
1287 LDST_SRCDST_WORD_DECO_MATH3 |
1288 4 << LDST_OFFSET_SHIFT);
1289
1290 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1291 JUMP_COND_CALM | JUMP_COND_NCP |
1292 JUMP_COND_NOP | JUMP_COND_NIP |
1293 JUMP_COND_NIFP);
1294 set_jump_tgt_here(desc, wait_load_cmd);
1295
1296 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
1297 LDST_SRCDST_BYTE_CONTEXT |
1298 ctx1_iv_off << LDST_OFFSET_SHIFT);
1299 }
1300
1301 /*
1302 * MAGIC with NFIFO
1303 * Read associated data from the input and send them to class1 and
1304 * class2 alignment blocks. From class1 send data to output fifo and
1305 * then write it to memory since we don't need to encrypt AD.
1306 */
1307 nfifo = NFIFOENTRY_DEST_BOTH | NFIFOENTRY_FC1 | NFIFOENTRY_FC2 |
1308 NFIFOENTRY_DTYPE_POLY | NFIFOENTRY_BND;
1309 append_load_imm_u32(desc, nfifo, LDST_CLASS_IND_CCB |
1310 LDST_SRCDST_WORD_INFO_FIFO_SM | LDLEN_MATH3);
1311
1312 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
1313 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
1314 append_seq_fifo_load(desc, 0, FIFOLD_TYPE_NOINFOFIFO |
1315 FIFOLD_CLASS_CLASS1 | LDST_VLF);
1316 append_move_len(desc, MOVE_AUX_LS | MOVE_SRC_AUX_ABLK |
1317 MOVE_DEST_OUTFIFO | MOVELEN_MRSEL_MATH3);
1318 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | LDST_VLF);
1319
1320 /* IPsec - copy IV at the output */
1321 if (is_ipsec)
1322 append_seq_fifo_store(desc, ivsize, FIFOST_TYPE_METADATA |
1323 0x2 << 25);
1324
1325 wait_cmd = append_jump(desc, JUMP_JSL | JUMP_TYPE_LOCAL |
1326 JUMP_COND_NOP | JUMP_TEST_ALL);
1327 set_jump_tgt_here(desc, wait_cmd);
1328
1329 if (encap) {
1330 /* Read and write cryptlen bytes */
1331 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1332 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0,
1333 CAAM_CMD_SZ);
1334 aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
1335
1336 /* Write ICV */
1337 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
1338 LDST_SRCDST_BYTE_CONTEXT);
1339 } else {
1340 /* Read and write cryptlen bytes */
1341 append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0,
1342 CAAM_CMD_SZ);
1343 append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0,
1344 CAAM_CMD_SZ);
1345 aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
1346
1347 /* Load ICV for verification */
1348 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
1349 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
1350 }
1351
1352 print_hex_dump_debug("chachapoly shdesc@" __stringify(__LINE__)": ",
1353 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1354 1);
1355 }
1356 EXPORT_SYMBOL(cnstr_shdsc_chachapoly);
1357
1358 /* For skcipher encrypt and decrypt, read from req->src and write to req->dst */
skcipher_append_src_dst(u32 * desc)1359 static inline void skcipher_append_src_dst(u32 *desc)
1360 {
1361 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1362 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1363 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 |
1364 KEY_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
1365 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
1366 }
1367
cnstr_desc_skcipher_enc_dec(u32 * const desc,struct alginfo * cdata,dma_addr_t src,dma_addr_t dst,unsigned int data_sz,unsigned int in_options,unsigned int out_options,unsigned int ivsize,const bool encrypt)1368 void cnstr_desc_skcipher_enc_dec(u32 * const desc, struct alginfo *cdata,
1369 dma_addr_t src, dma_addr_t dst, unsigned int data_sz,
1370 unsigned int in_options, unsigned int out_options,
1371 unsigned int ivsize, const bool encrypt)
1372 {
1373 u32 options = cdata->algtype | OP_ALG_AS_INIT;
1374
1375 if (encrypt)
1376 options |= OP_ALG_ENCRYPT;
1377 else
1378 options |= OP_ALG_DECRYPT;
1379
1380 init_job_desc(desc, 0);
1381
1382 append_jump(desc, JUMP_JSL | JUMP_TYPE_LOCAL |
1383 JUMP_COND_NOP | JUMP_TEST_ALL | 1);
1384
1385 append_key(desc, cdata->protected_key_dma, cdata->plain_keylen,
1386 CLASS_1 | KEY_DEST_CLASS_REG | cdata->key_cmd_opt);
1387
1388 append_seq_in_ptr(desc, src, data_sz, in_options);
1389
1390 append_seq_out_ptr(desc, dst, data_sz, out_options);
1391
1392 /* Load IV, if there is one */
1393 if (ivsize)
1394 append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1395 LDST_CLASS_1_CCB);
1396
1397 append_operation(desc, options);
1398
1399 skcipher_append_src_dst(desc);
1400
1401 /* Store IV */
1402 if (ivsize)
1403 append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1404 LDST_CLASS_1_CCB);
1405
1406 print_hex_dump_debug("skcipher_enc_dec job desc@" __stringify(__LINE__)": ",
1407 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1408 1);
1409 }
1410 EXPORT_SYMBOL(cnstr_desc_skcipher_enc_dec);
1411
cnstr_desc_protected_blob_decap(u32 * const desc,struct alginfo * cdata,dma_addr_t next_desc_addr)1412 void cnstr_desc_protected_blob_decap(u32 * const desc, struct alginfo *cdata,
1413 dma_addr_t next_desc_addr)
1414 {
1415 u32 protected_store;
1416
1417 init_job_desc(desc, 0);
1418
1419 /* Load key modifier */
1420 append_load_as_imm(desc, KEYMOD, sizeof(KEYMOD) - 1,
1421 LDST_CLASS_2_CCB | LDST_SRCDST_BYTE_KEY);
1422
1423 append_seq_in_ptr_intlen(desc, cdata->key_dma,
1424 cdata->plain_keylen + CAAM_BLOB_OVERHEAD, 0);
1425
1426 append_seq_out_ptr_intlen(desc, cdata->protected_key_dma,
1427 cdata->plain_keylen, 0);
1428
1429 protected_store = OP_PCLID_BLOB | OP_PCL_BLOB_BLACK;
1430 if ((cdata->key_cmd_opt >> KEY_EKT_OFFSET) & 1)
1431 protected_store |= OP_PCL_BLOB_EKT;
1432
1433 append_operation(desc, OP_TYPE_DECAP_PROTOCOL | protected_store);
1434
1435 if (next_desc_addr) {
1436 append_jump(desc, JUMP_TYPE_NONLOCAL | JUMP_TEST_ALL);
1437 append_ptr(desc, next_desc_addr);
1438 }
1439
1440 print_hex_dump_debug("protected blob decap job desc@" __stringify(__LINE__) ":",
1441 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1442 desc_bytes(desc), 1);
1443 }
1444 EXPORT_SYMBOL(cnstr_desc_protected_blob_decap);
1445
1446 /**
1447 * cnstr_shdsc_skcipher_encap - skcipher encapsulation shared descriptor
1448 * @desc: pointer to buffer used for descriptor construction
1449 * @cdata: pointer to block cipher transform definitions
1450 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
1451 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128
1452 * - OP_ALG_ALGSEL_CHACHA20
1453 * @ivsize: initialization vector size
1454 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
1455 * @ctx1_iv_off: IV offset in CONTEXT1 register
1456 */
cnstr_shdsc_skcipher_encap(u32 * const desc,struct alginfo * cdata,unsigned int ivsize,const bool is_rfc3686,const u32 ctx1_iv_off)1457 void cnstr_shdsc_skcipher_encap(u32 * const desc, struct alginfo *cdata,
1458 unsigned int ivsize, const bool is_rfc3686,
1459 const u32 ctx1_iv_off)
1460 {
1461 u32 *key_jump_cmd;
1462 u32 options = cdata->algtype | OP_ALG_AS_INIT | OP_ALG_ENCRYPT;
1463 bool is_chacha20 = ((cdata->algtype & OP_ALG_ALGSEL_MASK) ==
1464 OP_ALG_ALGSEL_CHACHA20);
1465
1466 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1467 /* Skip if already shared */
1468 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1469 JUMP_COND_SHRD);
1470
1471 /* Load class1 key only */
1472 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1473 cdata->plain_keylen, CLASS_1 | KEY_DEST_CLASS_REG
1474 | cdata->key_cmd_opt);
1475
1476 /* Load nonce into CONTEXT1 reg */
1477 if (is_rfc3686) {
1478 const u8 *nonce = cdata->key_virt + cdata->keylen;
1479
1480 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1481 LDST_CLASS_IND_CCB |
1482 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1483 append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
1484 MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
1485 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1486 }
1487
1488 set_jump_tgt_here(desc, key_jump_cmd);
1489
1490 /* Load IV, if there is one */
1491 if (ivsize)
1492 append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1493 LDST_CLASS_1_CCB | (ctx1_iv_off <<
1494 LDST_OFFSET_SHIFT));
1495
1496 /* Load counter into CONTEXT1 reg */
1497 if (is_rfc3686)
1498 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1499 LDST_SRCDST_BYTE_CONTEXT |
1500 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1501 LDST_OFFSET_SHIFT));
1502
1503 /* Load operation */
1504 if (is_chacha20)
1505 options |= OP_ALG_AS_FINALIZE;
1506 append_operation(desc, options);
1507
1508 /* Perform operation */
1509 skcipher_append_src_dst(desc);
1510
1511 /* Store IV */
1512 if (!is_chacha20 && ivsize)
1513 append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1514 LDST_CLASS_1_CCB | (ctx1_iv_off <<
1515 LDST_OFFSET_SHIFT));
1516
1517 print_hex_dump_debug("skcipher enc shdesc@" __stringify(__LINE__)": ",
1518 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1519 1);
1520 }
1521 EXPORT_SYMBOL(cnstr_shdsc_skcipher_encap);
1522
1523 /**
1524 * cnstr_shdsc_skcipher_decap - skcipher decapsulation shared descriptor
1525 * @desc: pointer to buffer used for descriptor construction
1526 * @cdata: pointer to block cipher transform definitions
1527 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
1528 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128
1529 * - OP_ALG_ALGSEL_CHACHA20
1530 * @ivsize: initialization vector size
1531 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
1532 * @ctx1_iv_off: IV offset in CONTEXT1 register
1533 */
cnstr_shdsc_skcipher_decap(u32 * const desc,struct alginfo * cdata,unsigned int ivsize,const bool is_rfc3686,const u32 ctx1_iv_off)1534 void cnstr_shdsc_skcipher_decap(u32 * const desc, struct alginfo *cdata,
1535 unsigned int ivsize, const bool is_rfc3686,
1536 const u32 ctx1_iv_off)
1537 {
1538 u32 *key_jump_cmd;
1539 bool is_chacha20 = ((cdata->algtype & OP_ALG_ALGSEL_MASK) ==
1540 OP_ALG_ALGSEL_CHACHA20);
1541
1542 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1543 /* Skip if already shared */
1544 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1545 JUMP_COND_SHRD);
1546
1547 /* Load class1 key only */
1548 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1549 cdata->plain_keylen, CLASS_1 | KEY_DEST_CLASS_REG
1550 | cdata->key_cmd_opt);
1551
1552 /* Load nonce into CONTEXT1 reg */
1553 if (is_rfc3686) {
1554 const u8 *nonce = cdata->key_virt + cdata->keylen;
1555
1556 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1557 LDST_CLASS_IND_CCB |
1558 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1559 append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
1560 MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
1561 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1562 }
1563
1564 set_jump_tgt_here(desc, key_jump_cmd);
1565
1566 /* Load IV, if there is one */
1567 if (ivsize)
1568 append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1569 LDST_CLASS_1_CCB | (ctx1_iv_off <<
1570 LDST_OFFSET_SHIFT));
1571
1572 /* Load counter into CONTEXT1 reg */
1573 if (is_rfc3686)
1574 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1575 LDST_SRCDST_BYTE_CONTEXT |
1576 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1577 LDST_OFFSET_SHIFT));
1578
1579 /* Choose operation */
1580 if (ctx1_iv_off)
1581 append_operation(desc, cdata->algtype | OP_ALG_AS_INIT |
1582 OP_ALG_DECRYPT);
1583 else
1584 append_dec_op1(desc, cdata->algtype);
1585
1586 /* Perform operation */
1587 skcipher_append_src_dst(desc);
1588
1589 /* Store IV */
1590 if (!is_chacha20 && ivsize)
1591 append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1592 LDST_CLASS_1_CCB | (ctx1_iv_off <<
1593 LDST_OFFSET_SHIFT));
1594
1595 print_hex_dump_debug("skcipher dec shdesc@" __stringify(__LINE__)": ",
1596 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1597 1);
1598 }
1599 EXPORT_SYMBOL(cnstr_shdsc_skcipher_decap);
1600
1601 /**
1602 * cnstr_shdsc_xts_skcipher_encap - xts skcipher encapsulation shared descriptor
1603 * @desc: pointer to buffer used for descriptor construction
1604 * @cdata: pointer to block cipher transform definitions
1605 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
1606 */
cnstr_shdsc_xts_skcipher_encap(u32 * const desc,struct alginfo * cdata)1607 void cnstr_shdsc_xts_skcipher_encap(u32 * const desc, struct alginfo *cdata)
1608 {
1609 /*
1610 * Set sector size to a big value, practically disabling
1611 * sector size segmentation in xts implementation. We cannot
1612 * take full advantage of this HW feature with existing
1613 * crypto API / dm-crypt SW architecture.
1614 */
1615 __be64 sector_size = cpu_to_be64(BIT(15));
1616 u32 *key_jump_cmd;
1617
1618 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1619 /* Skip if already shared */
1620 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1621 JUMP_COND_SHRD);
1622
1623 /* Load class1 keys only */
1624 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1625 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1626
1627 /* Load sector size with index 40 bytes (0x28) */
1628 append_load_as_imm(desc, (void *)§or_size, 8, LDST_CLASS_1_CCB |
1629 LDST_SRCDST_BYTE_CONTEXT |
1630 (0x28 << LDST_OFFSET_SHIFT));
1631
1632 set_jump_tgt_here(desc, key_jump_cmd);
1633
1634 /*
1635 * create sequence for loading the sector index / 16B tweak value
1636 * Lower 8B of IV - sector index / tweak lower half
1637 * Upper 8B of IV - upper half of 16B tweak
1638 */
1639 append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1640 (0x20 << LDST_OFFSET_SHIFT));
1641 append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1642 (0x30 << LDST_OFFSET_SHIFT));
1643
1644 /* Load operation */
1645 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1646 OP_ALG_ENCRYPT);
1647
1648 /* Perform operation */
1649 skcipher_append_src_dst(desc);
1650
1651 /* Store lower 8B and upper 8B of IV */
1652 append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1653 (0x20 << LDST_OFFSET_SHIFT));
1654 append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1655 (0x30 << LDST_OFFSET_SHIFT));
1656
1657 print_hex_dump_debug("xts skcipher enc shdesc@" __stringify(__LINE__)
1658 ": ", DUMP_PREFIX_ADDRESS, 16, 4,
1659 desc, desc_bytes(desc), 1);
1660 }
1661 EXPORT_SYMBOL(cnstr_shdsc_xts_skcipher_encap);
1662
1663 /**
1664 * cnstr_shdsc_xts_skcipher_decap - xts skcipher decapsulation shared descriptor
1665 * @desc: pointer to buffer used for descriptor construction
1666 * @cdata: pointer to block cipher transform definitions
1667 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
1668 */
cnstr_shdsc_xts_skcipher_decap(u32 * const desc,struct alginfo * cdata)1669 void cnstr_shdsc_xts_skcipher_decap(u32 * const desc, struct alginfo *cdata)
1670 {
1671 /*
1672 * Set sector size to a big value, practically disabling
1673 * sector size segmentation in xts implementation. We cannot
1674 * take full advantage of this HW feature with existing
1675 * crypto API / dm-crypt SW architecture.
1676 */
1677 __be64 sector_size = cpu_to_be64(BIT(15));
1678 u32 *key_jump_cmd;
1679
1680 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1681 /* Skip if already shared */
1682 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1683 JUMP_COND_SHRD);
1684
1685 /* Load class1 key only */
1686 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1687 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1688
1689 /* Load sector size with index 40 bytes (0x28) */
1690 append_load_as_imm(desc, (void *)§or_size, 8, LDST_CLASS_1_CCB |
1691 LDST_SRCDST_BYTE_CONTEXT |
1692 (0x28 << LDST_OFFSET_SHIFT));
1693
1694 set_jump_tgt_here(desc, key_jump_cmd);
1695
1696 /*
1697 * create sequence for loading the sector index / 16B tweak value
1698 * Lower 8B of IV - sector index / tweak lower half
1699 * Upper 8B of IV - upper half of 16B tweak
1700 */
1701 append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1702 (0x20 << LDST_OFFSET_SHIFT));
1703 append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1704 (0x30 << LDST_OFFSET_SHIFT));
1705 /* Load operation */
1706 append_dec_op1(desc, cdata->algtype);
1707
1708 /* Perform operation */
1709 skcipher_append_src_dst(desc);
1710
1711 /* Store lower 8B and upper 8B of IV */
1712 append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1713 (0x20 << LDST_OFFSET_SHIFT));
1714 append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1715 (0x30 << LDST_OFFSET_SHIFT));
1716
1717 print_hex_dump_debug("xts skcipher dec shdesc@" __stringify(__LINE__)
1718 ": ", DUMP_PREFIX_ADDRESS, 16, 4, desc,
1719 desc_bytes(desc), 1);
1720 }
1721 EXPORT_SYMBOL(cnstr_shdsc_xts_skcipher_decap);
1722
1723 MODULE_LICENSE("GPL");
1724 MODULE_DESCRIPTION("FSL CAAM descriptor support");
1725 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");
1726