/linux/arch/arm/crypto/ |
H A D | aes-neonbs-glue.c | 48 u8 rk[13 * (8 * AES_BLOCK_SIZE) + 32] __aligned(AES_BLOCK_SIZE); 98 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt() 99 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt() 103 walk.stride / AES_BLOCK_SIZE); in __ecb_crypt() 110 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt() 155 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in cbc_encrypt() 161 crypto_xor_cpy(dst, src, prev, AES_BLOCK_SIZE); in cbc_encrypt() 165 src += AES_BLOCK_SIZE; in cbc_encrypt() 166 dst += AES_BLOCK_SIZE; in cbc_encrypt() 167 nbytes -= AES_BLOCK_SIZE; in cbc_encrypt() [all …]
|
H A D | aes-ce-glue.c | 54 u8 b[AES_BLOCK_SIZE]; 177 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt() 182 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt() 197 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt() 202 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt() 215 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_encrypt_walk() 221 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_encrypt_walk() 245 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_decrypt_walk() 251 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_decrypt_walk() 271 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_encrypt() [all …]
|
H A D | ghash-ce-glue.c | 394 aes_encrypt(&aes_ctx, (u8 *)&k, (u8[AES_BLOCK_SIZE]){}); in gcm_aes_setkey() 495 u8 buf[AES_BLOCK_SIZE]; in gcm_encrypt() 516 while (walk.nbytes >= AES_BLOCK_SIZE) { in gcm_encrypt() 517 int nblocks = walk.nbytes / AES_BLOCK_SIZE; in gcm_encrypt() 524 src += nblocks * AES_BLOCK_SIZE; in gcm_encrypt() 525 dst += nblocks * AES_BLOCK_SIZE; in gcm_encrypt() 532 walk.nbytes % AES_BLOCK_SIZE); in gcm_encrypt() 547 tail = walk.nbytes % AES_BLOCK_SIZE; in gcm_encrypt() 586 u8 otag[AES_BLOCK_SIZE]; in gcm_decrypt() 587 u8 buf[AES_BLOCK_SIZE]; in gcm_decrypt() [all …]
|
/linux/arch/arm64/crypto/ |
H A D | aes-glue.c | 134 u8 dg[AES_BLOCK_SIZE]; 189 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt() 194 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt() 209 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt() 214 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt() 227 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_encrypt_walk() 232 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_encrypt_walk() 256 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_decrypt_walk() 261 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_decrypt_walk() 282 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_encrypt() [all …]
|
H A D | aes-neonbs-glue.c | 60 u8 rk[13 * (8 * AES_BLOCK_SIZE) + 32]; 62 } __aligned(AES_BLOCK_SIZE); 106 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt() 107 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt() 111 walk.stride / AES_BLOCK_SIZE); in __ecb_crypt() 118 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt() 166 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_encrypt() 167 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in cbc_encrypt() 175 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in cbc_encrypt() 189 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_decrypt() [all …]
|
H A D | aes-ce-ccm-glue.c | 65 __be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8]; in ccm_init_mac() 83 memcpy(maciv, req->iv, AES_BLOCK_SIZE - l); in ccm_init_mac() 96 memset(&req->iv[AES_BLOCK_SIZE - l], 0, l); in ccm_init_mac() 103 int enc_after = (macp + abytes) % AES_BLOCK_SIZE; in ce_aes_ccm_auth_data() 106 u32 blocks = abytes / AES_BLOCK_SIZE; in ce_aes_ccm_auth_data() 108 if (macp == AES_BLOCK_SIZE || (!macp && blocks > 0)) { in ce_aes_ccm_auth_data() 111 u32 adv = (blocks - rem) * AES_BLOCK_SIZE; in ce_aes_ccm_auth_data() 113 macp = enc_after ? 0 : AES_BLOCK_SIZE; in ce_aes_ccm_auth_data() 123 u32 l = min(AES_BLOCK_SIZE - macp, abytes); in ce_aes_ccm_auth_data() 142 u32 macp = AES_BLOCK_SIZE; in ccm_calculate_auth_mac() [all …]
|
H A D | ghash-ce-glue.c | 244 aes_encrypt(&ctx->aes_key, key, (u8[AES_BLOCK_SIZE]){}); in gcm_aes_setkey() 341 u8 buf[AES_BLOCK_SIZE]; in gcm_encrypt() 364 if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE)) { in gcm_encrypt() 368 nbytes &= ~(AES_BLOCK_SIZE - 1); in gcm_encrypt() 381 if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE)) in gcm_encrypt() 405 u8 otag[AES_BLOCK_SIZE]; in gcm_decrypt() 406 u8 buf[AES_BLOCK_SIZE]; in gcm_decrypt() 434 if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE)) { in gcm_decrypt() 438 nbytes &= ~(AES_BLOCK_SIZE - 1); in gcm_decrypt() 451 if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE)) in gcm_decrypt() [all …]
|
/linux/arch/riscv/crypto/ |
H A D | aes-riscv64-glue.c | 27 const u8 in[AES_BLOCK_SIZE], 28 u8 out[AES_BLOCK_SIZE]); 30 const u8 in[AES_BLOCK_SIZE], 31 u8 out[AES_BLOCK_SIZE]); 40 u8 iv[AES_BLOCK_SIZE]); 43 u8 iv[AES_BLOCK_SIZE]); 47 const u8 iv[AES_BLOCK_SIZE], bool enc); 51 u8 iv[AES_BLOCK_SIZE]); 56 u8 tweak[AES_BLOCK_SIZE]); 61 u8 tweak[AES_BLOCK_SIZE]); [all …]
|
/linux/net/mac80211/ |
H A D | fils_aead.c | 28 u8 d[AES_BLOCK_SIZE], tmp[AES_BLOCK_SIZE] = {}; in aes_s2v() 35 crypto_shash_digest(desc, tmp, AES_BLOCK_SIZE, d); in aes_s2v() 41 crypto_xor(d, tmp, AES_BLOCK_SIZE); in aes_s2v() 46 if (len[i] >= AES_BLOCK_SIZE) { in aes_s2v() 49 crypto_shash_update(desc, addr[i], len[i] - AES_BLOCK_SIZE); in aes_s2v() 50 crypto_xor(d, addr[i] + len[i] - AES_BLOCK_SIZE, in aes_s2v() 51 AES_BLOCK_SIZE); in aes_s2v() 60 crypto_shash_finup(desc, d, AES_BLOCK_SIZE, v); in aes_s2v() 71 u8 v[AES_BLOCK_SIZE]; in aes_siv_encrypt() 106 memcpy(out, v, AES_BLOCK_SIZE); in aes_siv_encrypt() [all …]
|
/linux/lib/crypto/ |
H A D | aescfb.c | 43 int len, const u8 iv[AES_BLOCK_SIZE]) in aescfb_encrypt() argument 45 u8 ks[AES_BLOCK_SIZE]; in aescfb_encrypt() 50 crypto_xor_cpy(dst, src, ks, min(len, AES_BLOCK_SIZE)); in aescfb_encrypt() 53 dst += AES_BLOCK_SIZE; in aescfb_encrypt() 54 src += AES_BLOCK_SIZE; in aescfb_encrypt() 55 len -= AES_BLOCK_SIZE; in aescfb_encrypt() 72 int len, const u8 iv[AES_BLOCK_SIZE]) in aescfb_decrypt() argument 74 u8 ks[2][AES_BLOCK_SIZE]; in aescfb_decrypt() 79 if (len > AES_BLOCK_SIZE) in aescfb_decrypt() 87 crypto_xor_cpy(dst, src, ks[i], min(len, AES_BLOCK_SIZE)); in aescfb_decrypt() [all …]
|
H A D | aesgcm.c | 49 u8 kin[AES_BLOCK_SIZE] = {}; in aesgcm_expandkey() 93 u8 buf[AES_BLOCK_SIZE]; in aesgcm_mac() 111 u8 buf[AES_BLOCK_SIZE]; in aesgcm_crypt() 124 crypto_xor_cpy(dst, src, buf, min(len, AES_BLOCK_SIZE)); in aesgcm_crypt() 126 dst += AES_BLOCK_SIZE; in aesgcm_crypt() 127 src += AES_BLOCK_SIZE; in aesgcm_crypt() 128 len -= AES_BLOCK_SIZE; in aesgcm_crypt() 183 u8 tagbuf[AES_BLOCK_SIZE]; in aesgcm_decrypt() 697 u8 tagbuf[AES_BLOCK_SIZE]; in libaesgcm_init()
|
/linux/drivers/crypto/nx/ |
H A D | nx-aes-xcbc.c | 23 u8 state[AES_BLOCK_SIZE]; 25 u8 buffer[AES_BLOCK_SIZE]; 64 u8 keys[2][AES_BLOCK_SIZE]; in nx_xcbc_empty() 71 memcpy(key, csbcpb->cpb.aes_xcbc.key, AES_BLOCK_SIZE); in nx_xcbc_empty() 72 memcpy(csbcpb->cpb.aes_ecb.key, key, AES_BLOCK_SIZE); in nx_xcbc_empty() 107 memcpy(csbcpb->cpb.aes_ecb.key, keys[0], AES_BLOCK_SIZE); in nx_xcbc_empty() 114 len = AES_BLOCK_SIZE; in nx_xcbc_empty() 118 if (len != AES_BLOCK_SIZE) in nx_xcbc_empty() 132 memcpy(csbcpb->cpb.aes_xcbc.key, key, AES_BLOCK_SIZE); in nx_xcbc_empty() 189 if (total <= AES_BLOCK_SIZE) { in nx_xcbc_update() [all …]
|
H A D | nx-aes-gcm.c | 112 if (nbytes <= AES_BLOCK_SIZE) { in nx_gca() 155 AES_BLOCK_SIZE); in nx_gca() 164 memcpy(out, csbcpb_aead->cpb.aes_gca.out_pat, AES_BLOCK_SIZE); in nx_gca() 192 memcpy(csbcpb->cpb.aes_gcm.iv_or_cnt, iv, AES_BLOCK_SIZE); in gmac() 224 csbcpb->cpb.aes_gcm.out_pat_or_mac, AES_BLOCK_SIZE); in gmac() 226 csbcpb->cpb.aes_gcm.out_s0, AES_BLOCK_SIZE); in gmac() 248 char out[AES_BLOCK_SIZE]; in gcm_empty() 266 len = AES_BLOCK_SIZE; in gcm_empty() 272 if (len != AES_BLOCK_SIZE) in gcm_empty() 376 memcpy(rctx->iv, csbcpb->cpb.aes_gcm.out_cnt, AES_BLOCK_SIZE); in gcm_aes_nx_crypt() [all …]
|
/linux/arch/powerpc/crypto/ |
H A D | aes-spe-glue.c | 192 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in ppc_ecb_crypt() 231 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in ppc_cbc_crypt() 273 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in ppc_ctr_crypt() 300 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in ppc_xts_crypt() 324 int tail = req->cryptlen % AES_BLOCK_SIZE; in ppc_xts_encrypt() 325 int offset = req->cryptlen - tail - AES_BLOCK_SIZE; in ppc_xts_encrypt() 327 u8 b[2][AES_BLOCK_SIZE]; in ppc_xts_encrypt() 330 if (req->cryptlen < AES_BLOCK_SIZE) in ppc_xts_encrypt() 344 scatterwalk_map_and_copy(b[0], req->dst, offset, AES_BLOCK_SIZE, 0); in ppc_xts_encrypt() 346 scatterwalk_map_and_copy(b[0], req->src, offset + AES_BLOCK_SIZE, tail, 0); in ppc_xts_encrypt() [all …]
|
H A D | aes_ctr.c | 73 u8 keystream[AES_BLOCK_SIZE]; in p8_aes_ctr_final() 87 crypto_inc(ctrblk, AES_BLOCK_SIZE); in p8_aes_ctr_final() 107 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in p8_aes_ctr_crypt() 113 nbytes / AES_BLOCK_SIZE, in p8_aes_ctr_crypt() 120 crypto_inc(walk.iv, AES_BLOCK_SIZE); in p8_aes_ctr_crypt() 121 } while ((nbytes -= AES_BLOCK_SIZE) >= AES_BLOCK_SIZE); in p8_aes_ctr_crypt() 147 .ivsize = AES_BLOCK_SIZE, 148 .chunksize = AES_BLOCK_SIZE,
|
H A D | aes_xts.c | 84 u8 tweak[AES_BLOCK_SIZE]; in p8_aes_xts_crypt() 87 if (req->cryptlen < AES_BLOCK_SIZE) in p8_aes_xts_crypt() 120 round_down(nbytes, AES_BLOCK_SIZE), in p8_aes_xts_crypt() 125 round_down(nbytes, AES_BLOCK_SIZE), in p8_aes_xts_crypt() 131 ret = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in p8_aes_xts_crypt() 152 .base.cra_blocksize = AES_BLOCK_SIZE, 161 .ivsize = AES_BLOCK_SIZE,
|
/linux/arch/s390/crypto/ |
H A D | aes_s390.c | 71 u8 buf[AES_BLOCK_SIZE]; 118 cpacf_km(sctx->fc, &sctx->key, out, in, AES_BLOCK_SIZE); in crypto_aes_encrypt() 130 &sctx->key, out, in, AES_BLOCK_SIZE); in crypto_aes_decrypt() 164 .cra_blocksize = AES_BLOCK_SIZE, 241 n = nbytes & ~(AES_BLOCK_SIZE - 1); in ecb_aes_crypt() 290 .base.cra_blocksize = AES_BLOCK_SIZE, 331 u8 iv[AES_BLOCK_SIZE]; in cbc_aes_crypt() 341 memcpy(param.iv, walk.iv, AES_BLOCK_SIZE); in cbc_aes_crypt() 345 n = nbytes & ~(AES_BLOCK_SIZE - 1); in cbc_aes_crypt() 348 memcpy(walk.iv, param.iv, AES_BLOCK_SIZE); in cbc_aes_crypt() [all …]
|
H A D | paes_s390.c | 287 n = nbytes & ~(AES_BLOCK_SIZE - 1); in ecb_paes_crypt() 317 .base.cra_blocksize = AES_BLOCK_SIZE, 386 u8 iv[AES_BLOCK_SIZE]; in cbc_paes_crypt() 397 memcpy(param.iv, walk.iv, AES_BLOCK_SIZE); in cbc_paes_crypt() 404 n = nbytes & ~(AES_BLOCK_SIZE - 1); in cbc_paes_crypt() 408 memcpy(walk.iv, param.iv, AES_BLOCK_SIZE); in cbc_paes_crypt() 436 .base.cra_blocksize = AES_BLOCK_SIZE, 444 .ivsize = AES_BLOCK_SIZE, 622 n = nbytes & ~(AES_BLOCK_SIZE - 1); in paes_xts_crypt_full() 679 n = nbytes & ~(AES_BLOCK_SIZE - 1); in paes_xts_crypt() [all …]
|
/linux/drivers/crypto/ccp/ |
H A D | ccp-crypto.h | 109 u8 k1[AES_BLOCK_SIZE]; 110 u8 k2[AES_BLOCK_SIZE]; 115 u8 iv[AES_BLOCK_SIZE]; 118 u8 tag[AES_BLOCK_SIZE]; 122 u8 rfc3686_iv[AES_BLOCK_SIZE]; 142 u8 iv[AES_BLOCK_SIZE]; 146 u8 buf[AES_BLOCK_SIZE]; 150 u8 pad[AES_BLOCK_SIZE]; 158 u8 iv[AES_BLOCK_SIZE]; 161 u8 buf[AES_BLOCK_SIZE]; [all …]
|
/linux/arch/x86/crypto/ |
H A D | aesni-intel_glue.c | 44 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE - 1)) 193 nbytes &= AES_BLOCK_SIZE - 1; in ecb_encrypt() 215 nbytes &= AES_BLOCK_SIZE - 1; in ecb_decrypt() 237 nbytes &= AES_BLOCK_SIZE - 1; in cbc_encrypt() 259 nbytes &= AES_BLOCK_SIZE - 1; in cbc_decrypt() 270 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_encrypt() 281 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_encrypt() 282 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_encrypt() 289 cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_encrypt() 296 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_encrypt() [all …]
|
/linux/drivers/crypto/ |
H A D | padlock-aes.c | 36 #define ecb_fetch_bytes (ecb_fetch_blocks * AES_BLOCK_SIZE) 40 #define cbc_fetch_bytes (cbc_fetch_blocks * AES_BLOCK_SIZE) 218 u8 buf[AES_BLOCK_SIZE * (MAX_ECB_FETCH_BLOCKS - 1) + PADLOCK_ALIGNMENT - 1]; in ecb_crypt_copy() 221 memcpy(tmp, in, count * AES_BLOCK_SIZE); in ecb_crypt_copy() 232 u8 buf[AES_BLOCK_SIZE * (MAX_CBC_FETCH_BLOCKS - 1) + PADLOCK_ALIGNMENT - 1]; in cbc_crypt_copy() 235 memcpy(tmp, in, count * AES_BLOCK_SIZE); in cbc_crypt_copy() 329 .cra_blocksize = AES_BLOCK_SIZE, 359 nbytes / AES_BLOCK_SIZE); in ecb_aes_encrypt() 360 nbytes &= AES_BLOCK_SIZE - 1; in ecb_aes_encrypt() 384 nbytes / AES_BLOCK_SIZE); in ecb_aes_decrypt() [all …]
|
H A D | atmel-aes.c | 107 __be32 iv[AES_BLOCK_SIZE / sizeof(u32)]; 120 __be32 j0[AES_BLOCK_SIZE / sizeof(u32)]; 121 u32 tag[AES_BLOCK_SIZE / sizeof(u32)]; 122 __be32 ghash[AES_BLOCK_SIZE / sizeof(u32)]; 146 u8 lastc[AES_BLOCK_SIZE]; 383 atmel_aes_read_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE)); in atmel_aes_read_block() 389 atmel_aes_write_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE)); in atmel_aes_write_block() 512 crypto_inc((u8 *)ctx->iv, AES_BLOCK_SIZE); in atmel_aes_ctr_update_req_iv() 595 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_cpu_transfer() 597 if (dd->datalen < AES_BLOCK_SIZE) in atmel_aes_cpu_transfer() [all …]
|
/linux/arch/sparc/crypto/ |
H A D | aes_glue.c | 235 round_down(nbytes, AES_BLOCK_SIZE)); in ecb_encrypt() 236 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in ecb_encrypt() 260 round_down(nbytes, AES_BLOCK_SIZE)); in ecb_decrypt() 261 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in ecb_decrypt() 284 round_down(nbytes, AES_BLOCK_SIZE), in cbc_encrypt() 286 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in cbc_encrypt() 310 round_down(nbytes, AES_BLOCK_SIZE), in cbc_decrypt() 312 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in cbc_decrypt() 323 u64 keystream[AES_BLOCK_SIZE / sizeof(u64)]; in ctr_crypt_final() 329 keystream, AES_BLOCK_SIZE); in ctr_crypt_final() [all …]
|
/linux/drivers/crypto/ccree/ |
H A D | cc_aead.h | 17 #define CCM_CONFIG_BUF_SIZE (AES_BLOCK_SIZE * 3) 52 u8 ctr_iv[AES_BLOCK_SIZE] ____cacheline_aligned; 55 u8 gcm_iv_inc1[AES_BLOCK_SIZE] ____cacheline_aligned; 56 u8 gcm_iv_inc2[AES_BLOCK_SIZE] ____cacheline_aligned; 57 u8 hkey[AES_BLOCK_SIZE] ____cacheline_aligned;
|
/linux/drivers/crypto/intel/qat/qat_common/ |
H A D | qat_algs.c | 281 cipher_cd_ctrl->cipher_state_sz = AES_BLOCK_SIZE >> 3; in qat_alg_aead_init_enc_session() 368 cipher_cd_ctrl->cipher_state_sz = AES_BLOCK_SIZE >> 3; in qat_alg_aead_init_dec_session() 445 cd_ctrl->cipher_state_sz = AES_BLOCK_SIZE >> 3; in qat_alg_skcipher_init_com() 474 key = (u8 *)aes_expanded.key_enc + (AES_BLOCK_SIZE * nrounds); in qat_alg_xts_reverse_key() 475 memcpy(key_reverse, key, AES_BLOCK_SIZE); in qat_alg_xts_reverse_key() 479 key = (u8 *)aes_expanded.key_enc + (AES_BLOCK_SIZE * nrounds); in qat_alg_xts_reverse_key() 480 memcpy(key_reverse, key, AES_BLOCK_SIZE); in qat_alg_xts_reverse_key() 481 memcpy(key_reverse + AES_BLOCK_SIZE, key - AES_BLOCK_SIZE, in qat_alg_xts_reverse_key() 482 AES_BLOCK_SIZE); in qat_alg_xts_reverse_key() 688 memcpy(qat_req->iv, sreq->iv, AES_BLOCK_SIZE); in qat_alg_update_iv_ctr_mode() [all …]
|