1 /* SPDX-License-Identifier: BSD-3-Clause */
2 /* Copyright(c) 2007-2025 Intel Corporation */
3
4 /**
5 ***************************************************************************
6 * @file lac_sym_qat_cipher.c QAT-related support functions for Cipher
7 *
8 * @ingroup LacSymQat_Cipher
9 *
10 * @description Functions to support the QAT related operations for Cipher
11 ***************************************************************************/
12
13 /*
14 *******************************************************************************
15 * Include public/global header files
16 *******************************************************************************
17 */
18
19 #include "cpa.h"
20 #include "icp_accel_devices.h"
21 #include "icp_adf_debug.h"
22 #include "lac_sym_qat.h"
23 #include "lac_sym_qat_cipher.h"
24 #include "lac_mem.h"
25 #include "lac_common.h"
26 #include "cpa_cy_sym.h"
27 #include "lac_sym_qat.h"
28 #include "lac_sym_cipher_defs.h"
29 #include "icp_qat_hw.h"
30 #include "icp_qat_fw_la.h"
31 #include "sal_hw_gen.h"
32
33 #define LAC_UNUSED_POS_MASK 0x3
34
35 /*****************************************************************************
36 * Internal data
37 *****************************************************************************/
38
39 typedef enum _icp_qat_hw_key_depend {
40 IS_KEY_DEP_NO = 0,
41 IS_KEY_DEP_YES,
42 } icp_qat_hw_key_depend;
43
44 /* LAC_CIPHER_IS_XTS_MODE */
45 static const uint8_t key_size_xts[] = {
46 0,
47 0,
48 0,
49 0,
50 0,
51 0,
52 0,
53 0,
54 0,
55 0,
56 0,
57 0,
58 0,
59 0,
60 0,
61 0,
62 0,
63 0,
64 0,
65 0,
66 0,
67 0,
68 0,
69 0,
70 0,
71 0,
72 0,
73 0,
74 0,
75 0,
76 0,
77 0,
78 ICP_QAT_HW_CIPHER_ALGO_AES128, /* ICP_QAT_HW_AES_128_XTS_KEY_SZ */
79 0,
80 0,
81 0,
82 0,
83 0,
84 0,
85 0,
86 0,
87 0,
88 0,
89 0,
90 0,
91 0,
92 0,
93 0,
94 0,
95 0,
96 0,
97 0,
98 0,
99 0,
100 0,
101 0,
102 0,
103 0,
104 0,
105 0,
106 0,
107 0,
108 0,
109 0,
110 ICP_QAT_HW_CIPHER_ALGO_AES256 /* ICP_QAT_HW_AES_256_XTS_KEY_SZ */
111 };
112 /* LAC_CIPHER_IS_AES */
113 static const uint8_t key_size_aes[] = {
114 0,
115 0,
116 0,
117 0,
118 0,
119 0,
120 0,
121 0,
122 0,
123 0,
124 0,
125 0,
126 0,
127 0,
128 0,
129 0,
130 ICP_QAT_HW_CIPHER_ALGO_AES128, /* ICP_QAT_HW_AES_128_KEY_SZ */
131 0,
132 0,
133 0,
134 0,
135 0,
136 0,
137 0,
138 ICP_QAT_HW_CIPHER_ALGO_AES192, /* ICP_QAT_HW_AES_192_KEY_SZ */
139 0,
140 0,
141 0,
142 0,
143 0,
144 0,
145 0,
146 ICP_QAT_HW_CIPHER_ALGO_AES256 /* ICP_QAT_HW_AES_256_KEY_SZ */
147 };
148 /* LAC_CIPHER_IS_AES_F8 */
149 static const uint8_t key_size_f8[] = {
150 0,
151 0,
152 0,
153 0,
154 0,
155 0,
156 0,
157 0,
158 0,
159 0,
160 0,
161 0,
162 0,
163 0,
164 0,
165 0,
166 0,
167 0,
168 0,
169 0,
170 0,
171 0,
172 0,
173 0,
174 0,
175 0,
176 0,
177 0,
178 0,
179 0,
180 0,
181 0,
182 ICP_QAT_HW_CIPHER_ALGO_AES128, /* ICP_QAT_HW_AES_128_F8_KEY_SZ */
183 0,
184 0,
185 0,
186 0,
187 0,
188 0,
189 0,
190 0,
191 0,
192 0,
193 0,
194 0,
195 0,
196 0,
197 0,
198 ICP_QAT_HW_CIPHER_ALGO_AES192, /* ICP_QAT_HW_AES_192_F8_KEY_SZ */
199 0,
200 0,
201 0,
202 0,
203 0,
204 0,
205 0,
206 0,
207 0,
208 0,
209 0,
210 0,
211 0,
212 0,
213 0,
214 ICP_QAT_HW_CIPHER_ALGO_AES256 /* ICP_QAT_HW_AES_256_F8_KEY_SZ */
215 };
216
217 /* This array must be kept aligned with CpaCySymCipherAlgorithm enum but
218 * offset by -1 as that enum starts at 1. LacSymQat_CipherGetCfgData()
219 * below relies on that alignment and uses that enum -1 to index into this
220 * array.
221 */
222 typedef struct _icp_qat_hw_cipher_info {
223 icp_qat_hw_cipher_algo_t algorithm;
224 icp_qat_hw_cipher_mode_t mode;
225 icp_qat_hw_cipher_convert_t key_convert[2];
226 icp_qat_hw_cipher_dir_t dir[2];
227 icp_qat_hw_key_depend isKeyLenDepend;
228 const uint8_t *pAlgByKeySize;
229 } icp_qat_hw_cipher_info;
230
231 static const icp_qat_hw_cipher_info icp_qat_alg_info[] = {
232 /* CPA_CY_SYM_CIPHER_NULL */
233 {
234 ICP_QAT_HW_CIPHER_ALGO_NULL,
235 ICP_QAT_HW_CIPHER_ECB_MODE,
236 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
237 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
238 IS_KEY_DEP_NO,
239 NULL,
240 },
241 /* CPA_CY_SYM_CIPHER_ARC4 */
242 {
243 ICP_QAT_HW_CIPHER_ALGO_ARC4,
244 ICP_QAT_HW_CIPHER_ECB_MODE,
245 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
246 /* Streaming ciphers are a special case. Decrypt = encrypt */
247 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_ENCRYPT },
248 IS_KEY_DEP_NO,
249 NULL,
250 },
251 /* CPA_CY_SYM_CIPHER_AES_ECB */
252 {
253 ICP_QAT_HW_CIPHER_ALGO_AES128,
254 ICP_QAT_HW_CIPHER_ECB_MODE,
255 /* AES decrypt key needs to be reversed. Instead of reversing the
256 * key at session registration, it is instead reversed on-the-fly by
257 * setting the KEY_CONVERT bit here
258 */
259 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_KEY_CONVERT },
260 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
261 IS_KEY_DEP_YES,
262 key_size_aes,
263 },
264 /* CPA_CY_SYM_CIPHER_AES_CBC */
265 {
266 ICP_QAT_HW_CIPHER_ALGO_AES128,
267 ICP_QAT_HW_CIPHER_CBC_MODE,
268 /* AES decrypt key needs to be reversed. Instead of reversing the
269 * key at session registration, it is instead reversed on-the-fly by
270 * setting the KEY_CONVERT bit here
271 */
272 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_KEY_CONVERT },
273 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
274 IS_KEY_DEP_YES,
275 key_size_aes,
276 },
277 /* CPA_CY_SYM_CIPHER_AES_CTR */
278 {
279 ICP_QAT_HW_CIPHER_ALGO_AES128,
280 ICP_QAT_HW_CIPHER_CTR_MODE,
281 /* AES decrypt key needs to be reversed. Instead of reversing the
282 * key at session registration, it is instead reversed on-the-fly by
283 * setting the KEY_CONVERT bit here
284 */
285 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
286 /* Streaming ciphers are a special case. Decrypt = encrypt
287 * Overriding default values previously set for AES
288 */
289 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_ENCRYPT },
290 IS_KEY_DEP_YES,
291 key_size_aes,
292 },
293 /* CPA_CY_SYM_CIPHER_AES_CCM */
294 {
295 ICP_QAT_HW_CIPHER_ALGO_AES128,
296 ICP_QAT_HW_CIPHER_CTR_MODE,
297 /* AES decrypt key needs to be reversed. Instead of reversing the
298 * key at session registration, it is instead reversed on-the-fly by
299 * setting the KEY_CONVERT bit here
300 */
301 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
302 /* Streaming ciphers are a special case. Decrypt = encrypt
303 * Overriding default values previously set for AES
304 */
305 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_ENCRYPT },
306 IS_KEY_DEP_YES,
307 key_size_aes,
308 },
309 /* CPA_CY_SYM_CIPHER_AES_GCM */
310 {
311 ICP_QAT_HW_CIPHER_ALGO_AES128,
312 ICP_QAT_HW_CIPHER_CTR_MODE,
313 /* AES decrypt key needs to be reversed. Instead of reversing the
314 * key at session registration, it is instead reversed on-the-fly by
315 * setting the KEY_CONVERT bit here
316 */
317 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
318 /* Streaming ciphers are a special case. Decrypt = encrypt
319 * Overriding default values previously set for AES
320 */
321 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_ENCRYPT },
322 IS_KEY_DEP_YES,
323 key_size_aes,
324 },
325 /* CPA_CY_SYM_CIPHER_DES_ECB */
326 {
327 ICP_QAT_HW_CIPHER_ALGO_DES,
328 ICP_QAT_HW_CIPHER_ECB_MODE,
329 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
330 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
331 IS_KEY_DEP_NO,
332 NULL,
333 },
334 /* CPA_CY_SYM_CIPHER_DES_CBC */
335 {
336 ICP_QAT_HW_CIPHER_ALGO_DES,
337 ICP_QAT_HW_CIPHER_CBC_MODE,
338 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
339 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
340 IS_KEY_DEP_NO,
341 NULL,
342 },
343 /* CPA_CY_SYM_CIPHER_3DES_ECB */
344 {
345 ICP_QAT_HW_CIPHER_ALGO_3DES,
346 ICP_QAT_HW_CIPHER_ECB_MODE,
347 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
348 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
349 IS_KEY_DEP_NO,
350 NULL,
351 },
352 /* CPA_CY_SYM_CIPHER_3DES_CBC */
353 {
354 ICP_QAT_HW_CIPHER_ALGO_3DES,
355 ICP_QAT_HW_CIPHER_CBC_MODE,
356 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
357 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
358 IS_KEY_DEP_NO,
359 NULL,
360 },
361 /* CPA_CY_SYM_CIPHER_3DES_CTR */
362 {
363 ICP_QAT_HW_CIPHER_ALGO_3DES,
364 ICP_QAT_HW_CIPHER_CTR_MODE,
365 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
366 /* Streaming ciphers are a special case. Decrypt = encrypt
367 * Overriding default values previously set for AES
368 */
369 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_ENCRYPT },
370 IS_KEY_DEP_NO,
371 NULL,
372 },
373 /* CPA_CY_SYM_CIPHER_KASUMI_F8 */
374 {
375 ICP_QAT_HW_CIPHER_ALGO_KASUMI,
376 ICP_QAT_HW_CIPHER_F8_MODE,
377 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
378 /* Streaming ciphers are a special case. Decrypt = encrypt */
379 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_ENCRYPT },
380 IS_KEY_DEP_NO,
381 NULL,
382 },
383 /* CPA_CY_SYM_CIPHER_SNOW3G_UEA2 */
384 {
385 /* The KEY_CONVERT bit has to be set for Snow_3G operation */
386 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
387 ICP_QAT_HW_CIPHER_ECB_MODE,
388 { ICP_QAT_HW_CIPHER_KEY_CONVERT, ICP_QAT_HW_CIPHER_KEY_CONVERT },
389 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
390 IS_KEY_DEP_NO,
391 NULL,
392 },
393 /* CPA_CY_SYM_CIPHER_AES_F8 */
394 {
395 ICP_QAT_HW_CIPHER_ALGO_AES128,
396 ICP_QAT_HW_CIPHER_F8_MODE,
397 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
398 /* Streaming ciphers are a special case. Decrypt = encrypt */
399 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_ENCRYPT },
400 IS_KEY_DEP_YES,
401 key_size_f8,
402 },
403 /* CPA_CY_SYM_CIPHER_AES_XTS */
404 {
405 ICP_QAT_HW_CIPHER_ALGO_AES128,
406 ICP_QAT_HW_CIPHER_XTS_MODE,
407 /* AES decrypt key needs to be reversed. Instead of reversing the
408 * key at session registration, it is instead reversed on-the-fly by
409 * setting the KEY_CONVERT bit here
410 */
411 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_KEY_CONVERT },
412 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
413 IS_KEY_DEP_YES,
414 key_size_xts,
415 },
416 /* CPA_CY_SYM_CIPHER_ZUC_EEA3 */
417 {
418 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3,
419 ICP_QAT_HW_CIPHER_ECB_MODE,
420 { ICP_QAT_HW_CIPHER_KEY_CONVERT, ICP_QAT_HW_CIPHER_KEY_CONVERT },
421 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
422 IS_KEY_DEP_NO,
423 NULL,
424 },
425 /* CPA_CY_SYM_CIPHER_CHACHA */
426 {
427 ICP_QAT_HW_CIPHER_ALGO_CHACHA20_POLY1305,
428 ICP_QAT_HW_CIPHER_CTR_MODE,
429 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
430 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
431 IS_KEY_DEP_NO,
432 NULL,
433 },
434 /* CPA_CY_SYM_CIPHER_SM4_ECB */
435 {
436 ICP_QAT_HW_CIPHER_ALGO_SM4,
437 ICP_QAT_HW_CIPHER_ECB_MODE,
438 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_KEY_CONVERT },
439 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
440 IS_KEY_DEP_NO,
441 NULL,
442 },
443 /* CPA_CY_SYM_CIPHER_SM4_CBC */
444 {
445 ICP_QAT_HW_CIPHER_ALGO_SM4,
446 ICP_QAT_HW_CIPHER_CBC_MODE,
447 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_KEY_CONVERT },
448 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
449 IS_KEY_DEP_NO,
450 NULL,
451 },
452 /* CPA_CY_SYM_CIPHER_SM4_CTR */
453 {
454 ICP_QAT_HW_CIPHER_ALGO_SM4,
455 ICP_QAT_HW_CIPHER_CTR_MODE,
456 { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
457 { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_ENCRYPT },
458 IS_KEY_DEP_NO,
459 NULL,
460 },
461 };
462
463 /*****************************************************************************
464 * Internal functions
465 *****************************************************************************/
466
467 void
LacSymQat_CipherCtrlBlockWrite(icp_qat_la_bulk_req_ftr_t * pMsg,Cpa32U cipherAlgorithm,Cpa32U targetKeyLenInBytes,Cpa32U sliceType,icp_qat_fw_slice_t nextSlice,Cpa8U cipherCfgOffsetInQuadWord)468 LacSymQat_CipherCtrlBlockWrite(icp_qat_la_bulk_req_ftr_t *pMsg,
469 Cpa32U cipherAlgorithm,
470 Cpa32U targetKeyLenInBytes,
471 Cpa32U sliceType,
472 icp_qat_fw_slice_t nextSlice,
473 Cpa8U cipherCfgOffsetInQuadWord)
474 {
475 icp_qat_fw_cipher_cd_ctrl_hdr_t *cd_ctrl =
476 (icp_qat_fw_cipher_cd_ctrl_hdr_t *)&(pMsg->cd_ctrl);
477
478 /* state_padding_sz is nonzero for f8 mode only */
479 cd_ctrl->cipher_padding_sz = 0;
480
481 /* Special handling of AES 192 key for UCS slice.
482 UCS requires it to have 32 bytes - set is as targetKeyLen
483 in this case, and add padding. It makes no sense
484 to force applications to provide such key length for couple reasons:
485 1. It won't be possible to distinguish between AES 192 and 256 based
486 on key length only
487 2. Only some modes of AES will use UCS slice, then application will
488 have to know which ones */
489 if (ICP_QAT_FW_LA_USE_UCS_SLICE_TYPE == sliceType &&
490 ICP_QAT_HW_AES_192_KEY_SZ == targetKeyLenInBytes) {
491 targetKeyLenInBytes = ICP_QAT_HW_UCS_AES_192_KEY_SZ;
492 }
493
494 switch (cipherAlgorithm) {
495 /* Base Key is not passed down to QAT in the case of ARC4 or NULL */
496 case CPA_CY_SYM_CIPHER_ARC4:
497 case CPA_CY_SYM_CIPHER_NULL:
498 cd_ctrl->cipher_key_sz = 0;
499 break;
500 case CPA_CY_SYM_CIPHER_KASUMI_F8:
501 cd_ctrl->cipher_key_sz =
502 LAC_BYTES_TO_QUADWORDS(ICP_QAT_HW_KASUMI_F8_KEY_SZ);
503 cd_ctrl->cipher_padding_sz =
504 ICP_QAT_HW_MODE_F8_NUM_REG_TO_CLEAR;
505 break;
506 /* For Snow3G UEA2 content descriptor key size is
507 key size plus iv size */
508 case CPA_CY_SYM_CIPHER_SNOW3G_UEA2:
509 cd_ctrl->cipher_key_sz =
510 LAC_BYTES_TO_QUADWORDS(ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
511 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
512 break;
513 case CPA_CY_SYM_CIPHER_AES_F8:
514 cd_ctrl->cipher_key_sz =
515 LAC_BYTES_TO_QUADWORDS(targetKeyLenInBytes);
516 cd_ctrl->cipher_padding_sz =
517 (2 * ICP_QAT_HW_MODE_F8_NUM_REG_TO_CLEAR);
518 break;
519 /* For ZUC EEA3 content descriptor key size is
520 key size plus iv size */
521 case CPA_CY_SYM_CIPHER_ZUC_EEA3:
522 cd_ctrl->cipher_key_sz =
523 LAC_BYTES_TO_QUADWORDS(ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
524 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
525 break;
526 default:
527 cd_ctrl->cipher_key_sz =
528 LAC_BYTES_TO_QUADWORDS(targetKeyLenInBytes);
529 }
530
531 cd_ctrl->cipher_state_sz = LAC_BYTES_TO_QUADWORDS(
532 LacSymQat_CipherIvSizeBytesGet(cipherAlgorithm));
533
534 cd_ctrl->cipher_cfg_offset = cipherCfgOffsetInQuadWord;
535
536 ICP_QAT_FW_COMN_NEXT_ID_SET(cd_ctrl, nextSlice);
537 ICP_QAT_FW_COMN_CURR_ID_SET(cd_ctrl, ICP_QAT_FW_SLICE_CIPHER);
538 }
539
540 void
LacSymQat_CipherGetCfgData(lac_session_desc_t * pSession,icp_qat_hw_cipher_algo_t * pAlgorithm,icp_qat_hw_cipher_mode_t * pMode,icp_qat_hw_cipher_dir_t * pDir,icp_qat_hw_cipher_convert_t * pKey_convert)541 LacSymQat_CipherGetCfgData(lac_session_desc_t *pSession,
542 icp_qat_hw_cipher_algo_t *pAlgorithm,
543 icp_qat_hw_cipher_mode_t *pMode,
544 icp_qat_hw_cipher_dir_t *pDir,
545 icp_qat_hw_cipher_convert_t *pKey_convert)
546 {
547 sal_crypto_service_t *pService =
548 (sal_crypto_service_t *)pSession->pInstance;
549
550 int cipherIdx = 0;
551 icp_qat_hw_cipher_dir_t cipherDirection = 0;
552
553 /* Set defaults */
554 *pKey_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
555 *pAlgorithm = ICP_QAT_HW_CIPHER_ALGO_NULL;
556 *pMode = ICP_QAT_HW_CIPHER_ECB_MODE;
557 *pDir = ICP_QAT_HW_CIPHER_ENCRYPT;
558
559 /* offset index as CpaCySymCipherAlgorithm enum starts from 1, not from
560 * 0 */
561 cipherIdx = pSession->cipherAlgorithm - 1;
562 cipherDirection =
563 pSession->cipherDirection == CPA_CY_SYM_CIPHER_DIRECTION_ENCRYPT ?
564 ICP_QAT_HW_CIPHER_ENCRYPT :
565 ICP_QAT_HW_CIPHER_DECRYPT;
566
567 /* Boundary check against the last value in the algorithm enum */
568 if (!(pSession->cipherAlgorithm <= CPA_CY_SYM_CIPHER_SM4_CTR)) {
569 QAT_UTILS_LOG("Invalid cipherAlgorithm value\n");
570 return;
571 }
572
573 if (!(cipherDirection <= ICP_QAT_HW_CIPHER_DECRYPT)) {
574 QAT_UTILS_LOG("Invalid cipherDirection value\n");
575 return;
576 }
577
578 *pAlgorithm = icp_qat_alg_info[cipherIdx].algorithm;
579 *pMode = icp_qat_alg_info[cipherIdx].mode;
580 *pDir = icp_qat_alg_info[cipherIdx].dir[cipherDirection];
581 *pKey_convert =
582 icp_qat_alg_info[cipherIdx].key_convert[cipherDirection];
583
584 if (IS_KEY_DEP_NO != icp_qat_alg_info[cipherIdx].isKeyLenDepend) {
585 *pAlgorithm = icp_qat_alg_info[cipherIdx]
586 .pAlgByKeySize[pSession->cipherKeyLenInBytes];
587 }
588
589 /* CCP and AES_GCM single pass, despite being limited to CTR/AEAD mode,
590 * support both Encrypt/Decrypt modes - this is because of the
591 * differences in the hash computation/verification paths in
592 * encrypt/decrypt modes respectively.
593 * By default CCP is set as CTR Mode.Set AEAD Mode for AES_GCM.
594 */
595 if (SPC == pSession->singlePassState) {
596 if (LAC_CIPHER_IS_GCM(pSession->cipherAlgorithm))
597 *pMode = ICP_QAT_HW_CIPHER_AEAD_MODE;
598 else if (isCyGen4x(pService) &&
599 LAC_CIPHER_IS_CCM(pSession->cipherAlgorithm))
600 *pMode = ICP_QAT_HW_CIPHER_CCM_MODE;
601
602 if (cipherDirection == ICP_QAT_HW_CIPHER_DECRYPT)
603 *pDir = ICP_QAT_HW_CIPHER_DECRYPT;
604 }
605 }
606
607 void
LacSymQat_CipherHwBlockPopulateCfgData(lac_session_desc_t * pSession,const void * pCipherHwBlock,Cpa32U * pSizeInBytes)608 LacSymQat_CipherHwBlockPopulateCfgData(lac_session_desc_t *pSession,
609 const void *pCipherHwBlock,
610 Cpa32U *pSizeInBytes)
611 {
612 icp_qat_hw_cipher_algo_t algorithm = ICP_QAT_HW_CIPHER_ALGO_NULL;
613 icp_qat_hw_cipher_mode_t mode = ICP_QAT_HW_CIPHER_ECB_MODE;
614 icp_qat_hw_cipher_dir_t dir = ICP_QAT_HW_CIPHER_ENCRYPT;
615 icp_qat_hw_cipher_convert_t key_convert;
616 icp_qat_hw_cipher_config_t *pCipherConfig =
617 (icp_qat_hw_cipher_config_t *)pCipherHwBlock;
618 icp_qat_hw_ucs_cipher_config_t *pUCSCipherConfig =
619 (icp_qat_hw_ucs_cipher_config_t *)pCipherHwBlock;
620
621 Cpa32U val, reserved;
622 Cpa32U aed_hash_cmp_length = 0;
623
624 *pSizeInBytes = 0;
625
626 LacSymQat_CipherGetCfgData(
627 pSession, &algorithm, &mode, &dir, &key_convert);
628
629 /* Build the cipher config into the hardware setup block */
630 if (SPC == pSession->singlePassState) {
631 aed_hash_cmp_length = pSession->hashResultSize;
632 reserved = ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
633 pSession->aadLenInBytes);
634 } else {
635 reserved = 0;
636 }
637
638 val = ICP_QAT_HW_CIPHER_CONFIG_BUILD(
639 mode, algorithm, key_convert, dir, aed_hash_cmp_length);
640
641 /* UCS slice has 128-bit configuration register.
642 Leacy cipher slice has 64-bit config register */
643 if (ICP_QAT_FW_LA_USE_UCS_SLICE_TYPE == pSession->cipherSliceType) {
644 pUCSCipherConfig->val = val;
645 pUCSCipherConfig->reserved[0] = reserved;
646 pUCSCipherConfig->reserved[1] = 0;
647 pUCSCipherConfig->reserved[2] = 0;
648 *pSizeInBytes = sizeof(icp_qat_hw_ucs_cipher_config_t);
649 } else {
650 pCipherConfig->val = val;
651 pCipherConfig->reserved = reserved;
652 *pSizeInBytes = sizeof(icp_qat_hw_cipher_config_t);
653 }
654 }
655
656 void
LacSymQat_CipherHwBlockPopulateKeySetup(lac_session_desc_t * pSessionDesc,const CpaCySymCipherSetupData * pCipherSetupData,Cpa32U targetKeyLenInBytes,Cpa32U sliceType,const void * pCipherHwBlock,Cpa32U * pSizeInBytes)657 LacSymQat_CipherHwBlockPopulateKeySetup(
658 lac_session_desc_t *pSessionDesc,
659 const CpaCySymCipherSetupData *pCipherSetupData,
660 Cpa32U targetKeyLenInBytes,
661 Cpa32U sliceType,
662 const void *pCipherHwBlock,
663 Cpa32U *pSizeInBytes)
664 {
665 Cpa8U *pCipherKey = (Cpa8U *)pCipherHwBlock;
666 Cpa32U actualKeyLenInBytes = pCipherSetupData->cipherKeyLenInBytes;
667
668 *pSizeInBytes = 0;
669
670 /* Key is copied into content descriptor for all cases except for
671 * Arc4 and Null cipher */
672 if (!(LAC_CIPHER_IS_ARC4(pCipherSetupData->cipherAlgorithm) ||
673 LAC_CIPHER_IS_NULL(pCipherSetupData->cipherAlgorithm))) {
674 /* Special handling of AES 192 key for UCS slice.
675 UCS requires it to have 32 bytes - set is as targetKeyLen
676 in this case, and add padding. It makes no sense
677 to force applications to provide such key length for couple reasons:
678 1. It won't be possible to distinguish between AES 192 and 256 based
679 on key length only
680 2. Only some modes of AES will use UCS slice, then application will
681 have to know which ones */
682 if (ICP_QAT_FW_LA_USE_UCS_SLICE_TYPE == sliceType &&
683 ICP_QAT_HW_AES_192_KEY_SZ == targetKeyLenInBytes) {
684 targetKeyLenInBytes = ICP_QAT_HW_UCS_AES_192_KEY_SZ;
685 }
686
687 /* Set the Cipher key field in the cipher block */
688 memcpy(pCipherKey,
689 pCipherSetupData->pCipherKey,
690 actualKeyLenInBytes);
691 /* Pad the key with 0's if required */
692 if (0 < (targetKeyLenInBytes - actualKeyLenInBytes)) {
693 LAC_OS_BZERO(pCipherKey + actualKeyLenInBytes,
694 targetKeyLenInBytes - actualKeyLenInBytes);
695 }
696 *pSizeInBytes += targetKeyLenInBytes;
697
698 switch (pCipherSetupData->cipherAlgorithm) {
699 /* For Kasumi in F8 mode Cipher Key is concatenated with
700 * Cipher Key XOR-ed with Key Modifier (CK||CK^KM) */
701 case CPA_CY_SYM_CIPHER_KASUMI_F8: {
702 Cpa32U wordIndex = 0;
703 Cpa32U *pu32CipherKey =
704 (Cpa32U *)pCipherSetupData->pCipherKey;
705 Cpa32U *pTempKey =
706 (Cpa32U *)(pCipherKey + targetKeyLenInBytes);
707
708 /* XOR Key with KASUMI F8 key modifier at 4 bytes level
709 */
710 for (wordIndex = 0; wordIndex <
711 LAC_BYTES_TO_LONGWORDS(targetKeyLenInBytes);
712 wordIndex++) {
713 pTempKey[wordIndex] = pu32CipherKey[wordIndex] ^
714 LAC_CIPHER_KASUMI_F8_KEY_MODIFIER_4_BYTES;
715 }
716
717 *pSizeInBytes += targetKeyLenInBytes;
718
719 /* also add padding for F8 */
720 *pSizeInBytes += LAC_QUADWORDS_TO_BYTES(
721 ICP_QAT_HW_MODE_F8_NUM_REG_TO_CLEAR);
722 LAC_OS_BZERO((Cpa8U *)pTempKey + targetKeyLenInBytes,
723 LAC_QUADWORDS_TO_BYTES(
724 ICP_QAT_HW_MODE_F8_NUM_REG_TO_CLEAR));
725 } break;
726 /* For AES in F8 mode Cipher Key is concatenated with
727 * Cipher Key XOR-ed with Key Mask (CK||CK^KM) */
728 case CPA_CY_SYM_CIPHER_AES_F8: {
729 Cpa32U index = 0;
730 Cpa8U *pTempKey =
731 pCipherKey + (targetKeyLenInBytes / 2);
732 *pSizeInBytes += targetKeyLenInBytes;
733 /* XOR Key with key Mask */
734 for (index = 0; index < targetKeyLenInBytes; index++) {
735 pTempKey[index] =
736 pCipherKey[index] ^ pTempKey[index];
737 }
738 pTempKey = (pCipherKey + targetKeyLenInBytes);
739 /* also add padding for AES F8 */
740 *pSizeInBytes += 2 * targetKeyLenInBytes;
741 LAC_OS_BZERO(pTempKey, 2 * targetKeyLenInBytes);
742 } break;
743 case CPA_CY_SYM_CIPHER_SNOW3G_UEA2: {
744 /* For Snow3G zero area after the key for FW */
745 LAC_OS_BZERO(pCipherKey + targetKeyLenInBytes,
746 ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
747
748 *pSizeInBytes += ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
749 } break;
750 case CPA_CY_SYM_CIPHER_ZUC_EEA3: {
751 /* For ZUC zero area after the key for FW */
752 LAC_OS_BZERO(pCipherKey + targetKeyLenInBytes,
753 ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
754
755 *pSizeInBytes += ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
756 } break;
757 case CPA_CY_SYM_CIPHER_AES_XTS: {
758 /* For AES in XTS mode Cipher Key is concatenated with
759 * second Cipher Key which is used for tweak calculation
760 * (CK1||CK2). For decryption Cipher Key needs to be
761 * converted to reverse key.*/
762 if (ICP_QAT_FW_LA_USE_UCS_SLICE_TYPE == sliceType) {
763 Cpa32U key_len =
764 pCipherSetupData->cipherKeyLenInBytes / 2;
765 memcpy(pSessionDesc->cipherAesXtsKey1Forward,
766 pCipherSetupData->pCipherKey,
767 key_len);
768
769 qatUtilsAESKeyExpansionForward(
770 pSessionDesc->cipherAesXtsKey1Forward,
771 key_len,
772 (uint32_t *)
773 pSessionDesc->cipherAesXtsKey1Reverse);
774
775 memcpy(pSessionDesc->cipherAesXtsKey2,
776 pCipherSetupData->pCipherKey + key_len,
777 key_len);
778
779 if (CPA_CY_SYM_CIPHER_DIRECTION_DECRYPT ==
780 pCipherSetupData->cipherDirection) {
781 memcpy(pCipherKey,
782 pSessionDesc
783 ->cipherAesXtsKey1Reverse,
784 key_len);
785 } else {
786 memcpy(pCipherKey,
787 pSessionDesc
788 ->cipherAesXtsKey1Forward,
789 key_len);
790 }
791 }
792 } break;
793 default:
794 break;
795 }
796 }
797 }
798
799 /*****************************************************************************
800 * External functions
801 *****************************************************************************/
802
803 Cpa8U
LacSymQat_CipherBlockSizeBytesGet(CpaCySymCipherAlgorithm cipherAlgorithm)804 LacSymQat_CipherBlockSizeBytesGet(CpaCySymCipherAlgorithm cipherAlgorithm)
805 {
806 Cpa8U blockSize = 0;
807 switch (cipherAlgorithm) {
808 case CPA_CY_SYM_CIPHER_ARC4:
809 blockSize = LAC_CIPHER_ARC4_BLOCK_LEN_BYTES;
810 break;
811 /* Handle AES or AES_F8 */
812 case CPA_CY_SYM_CIPHER_AES_ECB:
813 case CPA_CY_SYM_CIPHER_AES_CBC:
814 case CPA_CY_SYM_CIPHER_AES_CTR:
815 case CPA_CY_SYM_CIPHER_AES_CCM:
816 case CPA_CY_SYM_CIPHER_AES_GCM:
817 case CPA_CY_SYM_CIPHER_AES_XTS:
818 case CPA_CY_SYM_CIPHER_AES_F8:
819 blockSize = ICP_QAT_HW_AES_BLK_SZ;
820 break;
821 /* Handle DES */
822 case CPA_CY_SYM_CIPHER_DES_ECB:
823 case CPA_CY_SYM_CIPHER_DES_CBC:
824 blockSize = ICP_QAT_HW_DES_BLK_SZ;
825 break;
826 /* Handle TRIPLE DES */
827 case CPA_CY_SYM_CIPHER_3DES_ECB:
828 case CPA_CY_SYM_CIPHER_3DES_CBC:
829 case CPA_CY_SYM_CIPHER_3DES_CTR:
830 blockSize = ICP_QAT_HW_3DES_BLK_SZ;
831 break;
832 case CPA_CY_SYM_CIPHER_KASUMI_F8:
833 blockSize = ICP_QAT_HW_KASUMI_BLK_SZ;
834 break;
835 case CPA_CY_SYM_CIPHER_SNOW3G_UEA2:
836 blockSize = ICP_QAT_HW_SNOW_3G_BLK_SZ;
837 break;
838 case CPA_CY_SYM_CIPHER_ZUC_EEA3:
839 blockSize = ICP_QAT_HW_ZUC_3G_BLK_SZ;
840 break;
841 case CPA_CY_SYM_CIPHER_NULL:
842 blockSize = LAC_CIPHER_NULL_BLOCK_LEN_BYTES;
843 break;
844 case CPA_CY_SYM_CIPHER_CHACHA:
845 blockSize = ICP_QAT_HW_CHACHAPOLY_BLK_SZ;
846 break;
847 case CPA_CY_SYM_CIPHER_SM4_ECB:
848 case CPA_CY_SYM_CIPHER_SM4_CBC:
849 case CPA_CY_SYM_CIPHER_SM4_CTR:
850 blockSize = ICP_QAT_HW_SM4_BLK_SZ;
851 break;
852 default:
853 QAT_UTILS_LOG("Algorithm not supported in Cipher");
854 }
855 return blockSize;
856 }
857
858 Cpa32U
LacSymQat_CipherIvSizeBytesGet(CpaCySymCipherAlgorithm cipherAlgorithm)859 LacSymQat_CipherIvSizeBytesGet(CpaCySymCipherAlgorithm cipherAlgorithm)
860 {
861 Cpa32U ivSize = 0;
862 switch (cipherAlgorithm) {
863 case CPA_CY_SYM_CIPHER_ARC4:
864 ivSize = LAC_CIPHER_ARC4_STATE_LEN_BYTES;
865 break;
866 case CPA_CY_SYM_CIPHER_KASUMI_F8:
867 ivSize = ICP_QAT_HW_KASUMI_BLK_SZ;
868 break;
869 case CPA_CY_SYM_CIPHER_SNOW3G_UEA2:
870 ivSize = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
871 break;
872 case CPA_CY_SYM_CIPHER_ZUC_EEA3:
873 ivSize = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
874 break;
875 case CPA_CY_SYM_CIPHER_CHACHA:
876 ivSize = ICP_QAT_HW_CHACHAPOLY_IV_SZ;
877 break;
878 case CPA_CY_SYM_CIPHER_AES_ECB:
879 case CPA_CY_SYM_CIPHER_DES_ECB:
880 case CPA_CY_SYM_CIPHER_3DES_ECB:
881 case CPA_CY_SYM_CIPHER_SM4_ECB:
882 case CPA_CY_SYM_CIPHER_NULL:
883 /* for all ECB Mode IV size is 0 */
884 break;
885 default:
886 ivSize = LacSymQat_CipherBlockSizeBytesGet(cipherAlgorithm);
887 }
888 return ivSize;
889 }
890
891 inline CpaStatus
LacSymQat_CipherRequestParamsPopulate(lac_session_desc_t * pSessionDesc,icp_qat_fw_la_bulk_req_t * pReq,Cpa32U cipherOffsetInBytes,Cpa32U cipherLenInBytes,Cpa64U ivBufferPhysAddr,Cpa8U * pIvBufferVirt)892 LacSymQat_CipherRequestParamsPopulate(lac_session_desc_t *pSessionDesc,
893 icp_qat_fw_la_bulk_req_t *pReq,
894 Cpa32U cipherOffsetInBytes,
895 Cpa32U cipherLenInBytes,
896 Cpa64U ivBufferPhysAddr,
897 Cpa8U *pIvBufferVirt)
898 {
899 icp_qat_fw_la_cipher_req_params_t *pCipherReqParams;
900 icp_qat_fw_cipher_cd_ctrl_hdr_t *pCipherCdCtrlHdr;
901 icp_qat_fw_serv_specif_flags *pCipherSpecificFlags;
902 Cpa32U usedBufSize = 0;
903 Cpa32U totalBufSize = 0;
904
905 pCipherReqParams = (icp_qat_fw_la_cipher_req_params_t
906 *)((Cpa8U *)&(pReq->serv_specif_rqpars) +
907 ICP_QAT_FW_CIPHER_REQUEST_PARAMETERS_OFFSET);
908 pCipherCdCtrlHdr = (icp_qat_fw_cipher_cd_ctrl_hdr_t *)&(pReq->cd_ctrl);
909 pCipherSpecificFlags = &(pReq->comn_hdr.serv_specif_flags);
910
911 pCipherReqParams->cipher_offset = cipherOffsetInBytes;
912 pCipherReqParams->cipher_length = cipherLenInBytes;
913
914 /* Don't copy the buffer into the Msg if
915 * it's too big for the cipher_IV_array
916 * OR if the FW needs to update it
917 * OR if there's no buffer supplied
918 * OR if last partial
919 */
920 if ((pCipherCdCtrlHdr->cipher_state_sz >
921 LAC_SYM_QAT_HASH_IV_REQ_MAX_SIZE_QW) ||
922 (ICP_QAT_FW_LA_UPDATE_STATE_GET(*pCipherSpecificFlags) ==
923 ICP_QAT_FW_LA_UPDATE_STATE) ||
924 (pIvBufferVirt == NULL) ||
925 (ICP_QAT_FW_LA_PARTIAL_GET(*pCipherSpecificFlags) ==
926 ICP_QAT_FW_LA_PARTIAL_END)) {
927 /* Populate the field with a ptr to the flat buffer */
928 pCipherReqParams->u.s.cipher_IV_ptr = ivBufferPhysAddr;
929 pCipherReqParams->u.s.resrvd1 = 0;
930 /* Set the flag indicating the field format */
931 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(
932 *pCipherSpecificFlags, ICP_QAT_FW_CIPH_IV_64BIT_PTR);
933 } else {
934 /* Populate the field with the contents of the buffer,
935 * zero field first as data may be smaller than the field */
936
937 /* In case of XTS mode using UCS slice always encrypt the embedded IV.
938 * IV provided by user needs to be encrypted to calculate initial tweak,
939 * use pCipherReqParams->u.cipher_IV_array as destination buffer for
940 * tweak value */
941 if (ICP_QAT_FW_LA_USE_UCS_SLICE_TYPE ==
942 pSessionDesc->cipherSliceType &&
943 LAC_CIPHER_IS_XTS_MODE(pSessionDesc->cipherAlgorithm)) {
944 memset(pCipherReqParams->u.cipher_IV_array,
945 0,
946 LAC_LONGWORDS_TO_BYTES(
947 ICP_QAT_FW_NUM_LONGWORDS_4));
948 qatUtilsAESEncrypt(
949 pSessionDesc->cipherAesXtsKey2,
950 pSessionDesc->cipherKeyLenInBytes / 2,
951 pIvBufferVirt,
952 (Cpa8U *)pCipherReqParams->u.cipher_IV_array);
953 } else {
954 totalBufSize =
955 LAC_LONGWORDS_TO_BYTES(ICP_QAT_FW_NUM_LONGWORDS_4);
956 usedBufSize = LAC_QUADWORDS_TO_BYTES(
957 pCipherCdCtrlHdr->cipher_state_sz);
958 /* Only initialise unused buffer if applicable*/
959 if (usedBufSize < totalBufSize) {
960 memset(
961 (&pCipherReqParams->u.cipher_IV_array
962 [usedBufSize & LAC_UNUSED_POS_MASK]),
963 0,
964 totalBufSize - usedBufSize);
965 }
966 memcpy(pCipherReqParams->u.cipher_IV_array,
967 pIvBufferVirt,
968 usedBufSize);
969 }
970 /* Set the flag indicating the field format */
971 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(
972 *pCipherSpecificFlags, ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
973 }
974
975 return CPA_STATUS_SUCCESS;
976 }
977
978 void
LacSymQat_CipherArc4StateInit(const Cpa8U * pKey,Cpa32U keyLenInBytes,Cpa8U * pArc4CipherState)979 LacSymQat_CipherArc4StateInit(const Cpa8U *pKey,
980 Cpa32U keyLenInBytes,
981 Cpa8U *pArc4CipherState)
982 {
983 Cpa32U i = 0;
984 Cpa32U j = 0;
985 Cpa32U k = 0;
986
987 for (i = 0; i < LAC_CIPHER_ARC4_KEY_MATRIX_LEN_BYTES; ++i) {
988 pArc4CipherState[i] = (Cpa8U)i;
989 }
990
991 for (i = 0, k = 0; i < LAC_CIPHER_ARC4_KEY_MATRIX_LEN_BYTES; ++i, ++k) {
992 Cpa8U swap = 0;
993
994 if (k >= keyLenInBytes)
995 k -= keyLenInBytes;
996
997 j = (j + pArc4CipherState[i] + pKey[k]);
998 if (j >= LAC_CIPHER_ARC4_KEY_MATRIX_LEN_BYTES)
999 j %= LAC_CIPHER_ARC4_KEY_MATRIX_LEN_BYTES;
1000
1001 /* Swap state[i] & state[j] */
1002 swap = pArc4CipherState[i];
1003 pArc4CipherState[i] = pArc4CipherState[j];
1004 pArc4CipherState[j] = swap;
1005 }
1006
1007 /* Initialise i & j values for QAT */
1008 pArc4CipherState[LAC_CIPHER_ARC4_KEY_MATRIX_LEN_BYTES] = 0;
1009 pArc4CipherState[LAC_CIPHER_ARC4_KEY_MATRIX_LEN_BYTES + 1] = 0;
1010 }
1011
1012 /* Update the cipher_key_sz in the Request cache prepared and stored
1013 * in the session */
1014 void
LacSymQat_CipherXTSModeUpdateKeyLen(lac_session_desc_t * pSessionDesc,Cpa32U newKeySizeInBytes)1015 LacSymQat_CipherXTSModeUpdateKeyLen(lac_session_desc_t *pSessionDesc,
1016 Cpa32U newKeySizeInBytes)
1017 {
1018 icp_qat_fw_cipher_cd_ctrl_hdr_t *pCipherControlBlock = NULL;
1019
1020 pCipherControlBlock = (icp_qat_fw_cipher_cd_ctrl_hdr_t *)&(
1021 pSessionDesc->reqCacheFtr.cd_ctrl);
1022
1023 pCipherControlBlock->cipher_key_sz =
1024 LAC_BYTES_TO_QUADWORDS(newKeySizeInBytes);
1025 }
1026