Home
last modified time | relevance | path

Searched refs:AES_BLOCK_SIZE (Results 1 – 25 of 38) sorted by relevance

12

/linux/arch/arm64/crypto/
H A Daes-neonbs-glue.c47 u8 rk[13 * (8 * AES_BLOCK_SIZE) + 32];
49 } __aligned(AES_BLOCK_SIZE);
97 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt()
98 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt()
102 walk.stride / AES_BLOCK_SIZE); in __ecb_crypt()
108 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt()
159 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_encrypt()
160 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in cbc_encrypt()
168 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in cbc_encrypt()
182 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_decrypt()
[all …]
/linux/drivers/crypto/ccp/
H A Dccp-crypto.h109 u8 k1[AES_BLOCK_SIZE];
110 u8 k2[AES_BLOCK_SIZE];
115 u8 iv[AES_BLOCK_SIZE];
118 u8 tag[AES_BLOCK_SIZE];
122 u8 rfc3686_iv[AES_BLOCK_SIZE];
142 u8 iv[AES_BLOCK_SIZE];
146 u8 buf[AES_BLOCK_SIZE];
150 u8 pad[AES_BLOCK_SIZE];
158 u8 iv[AES_BLOCK_SIZE];
161 u8 buf[AES_BLOCK_SIZE];
[all …]
H A Dccp-ops.c491 if (aes->src_len & (AES_BLOCK_SIZE - 1)) in ccp_run_aes_cmac_cmd()
494 if (aes->iv_len != AES_BLOCK_SIZE) in ccp_run_aes_cmac_cmd()
501 if (aes->cmac_key_len != AES_BLOCK_SIZE) in ccp_run_aes_cmac_cmd()
554 dm_offset = CCP_SB_BYTES - AES_BLOCK_SIZE; in ccp_run_aes_cmac_cmd()
567 AES_BLOCK_SIZE, DMA_TO_DEVICE); in ccp_run_aes_cmac_cmd()
572 ccp_prepare_data(&src, NULL, &op, AES_BLOCK_SIZE, true); in ccp_run_aes_cmac_cmd()
617 dm_offset = CCP_SB_BYTES - AES_BLOCK_SIZE; in ccp_run_aes_cmac_cmd()
674 authsize = aes->authsize ? aes->authsize : AES_BLOCK_SIZE; in ccp_run_aes_gcm_cmd()
760 AES_BLOCK_SIZE, in ccp_run_aes_gcm_cmd()
769 ccp_prepare_data(&wa->aad, NULL, &wa->op, AES_BLOCK_SIZE, true); in ccp_run_aes_gcm_cmd()
[all …]
/linux/drivers/crypto/
H A Dpadlock-aes.c36 #define ecb_fetch_bytes (ecb_fetch_blocks * AES_BLOCK_SIZE)
40 #define cbc_fetch_bytes (cbc_fetch_blocks * AES_BLOCK_SIZE)
218 u8 buf[AES_BLOCK_SIZE * (MAX_ECB_FETCH_BLOCKS - 1) + PADLOCK_ALIGNMENT - 1]; in ecb_crypt_copy()
221 memcpy(tmp, in, count * AES_BLOCK_SIZE); in ecb_crypt_copy()
232 u8 buf[AES_BLOCK_SIZE * (MAX_CBC_FETCH_BLOCKS - 1) + PADLOCK_ALIGNMENT - 1]; in cbc_crypt_copy()
235 memcpy(tmp, in, count * AES_BLOCK_SIZE); in cbc_crypt_copy()
329 .cra_blocksize = AES_BLOCK_SIZE,
359 nbytes / AES_BLOCK_SIZE); in ecb_aes_encrypt()
360 nbytes &= AES_BLOCK_SIZE - 1; in ecb_aes_encrypt()
384 nbytes / AES_BLOCK_SIZE); in ecb_aes_decrypt()
[all …]
H A Dgeode-aes.c173 geode_aes_crypt(tctx, in, out, AES_BLOCK_SIZE, NULL, in geode_encrypt()
188 geode_aes_crypt(tctx, in, out, AES_BLOCK_SIZE, NULL, in geode_decrypt()
224 .cra_blocksize = AES_BLOCK_SIZE,
286 round_down(nbytes, AES_BLOCK_SIZE), in geode_skcipher_crypt()
288 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in geode_skcipher_crypt()
321 .base.cra_blocksize = AES_BLOCK_SIZE,
332 .ivsize = AES_BLOCK_SIZE,
339 .base.cra_blocksize = AES_BLOCK_SIZE,
/linux/drivers/crypto/ccree/
H A Dcc_aead.h17 #define CCM_CONFIG_BUF_SIZE (AES_BLOCK_SIZE * 3)
52 u8 ctr_iv[AES_BLOCK_SIZE] ____cacheline_aligned;
55 u8 gcm_iv_inc1[AES_BLOCK_SIZE] ____cacheline_aligned;
56 u8 gcm_iv_inc2[AES_BLOCK_SIZE] ____cacheline_aligned;
57 u8 hkey[AES_BLOCK_SIZE] ____cacheline_aligned;
H A Dcc_buffer_mgr.c296 AES_BLOCK_SIZE + areq_ctx->ccm_hdr_size); in cc_set_aead_conf_buf()
309 (AES_BLOCK_SIZE + areq_ctx->ccm_hdr_size), in cc_set_aead_conf_buf()
472 AES_BLOCK_SIZE, DMA_BIDIRECTIONAL); in cc_unmap_aead_request()
477 AES_BLOCK_SIZE, DMA_TO_DEVICE); in cc_unmap_aead_request()
482 AES_BLOCK_SIZE, DMA_TO_DEVICE); in cc_unmap_aead_request()
487 AES_BLOCK_SIZE, DMA_TO_DEVICE); in cc_unmap_aead_request()
494 AES_BLOCK_SIZE, DMA_TO_DEVICE); in cc_unmap_aead_request()
990 dma_addr = dma_map_single(dev, addr, AES_BLOCK_SIZE, in cc_map_aead_request()
995 AES_BLOCK_SIZE, addr); in cc_map_aead_request()
1009 dma_addr = dma_map_single(dev, areq_ctx->hkey, AES_BLOCK_SIZE, in cc_map_aead_request()
[all …]
H A Dcc_hash.h31 u8 state[AES_BLOCK_SIZE];
33 u8 buffer[AES_BLOCK_SIZE];
/linux/lib/crypto/powerpc/
H A Daes.h62 u8 out[AES_BLOCK_SIZE], in aes_encrypt_arch() argument
63 const u8 in[AES_BLOCK_SIZE]) in aes_encrypt_arch() argument
71 u8 out[AES_BLOCK_SIZE], in aes_decrypt_arch() argument
72 const u8 in[AES_BLOCK_SIZE]) in aes_decrypt_arch() argument
167 u8 out[AES_BLOCK_SIZE], in aes_encrypt_arch() argument
168 const u8 in[AES_BLOCK_SIZE]) in aes_encrypt_arch() argument
196 static void aes_decrypt_arch(const struct aes_key *key, u8 out[AES_BLOCK_SIZE], in aes_decrypt_arch() argument
197 const u8 in[AES_BLOCK_SIZE]) in aes_decrypt_arch() argument
/linux/net/ceph/
H A Dcrypto.c270 char iv[AES_BLOCK_SIZE] __aligned(8); in ceph_aes_crypt()
271 int pad_byte = AES_BLOCK_SIZE - (in_len & (AES_BLOCK_SIZE - 1)); in ceph_aes_crypt()
282 memcpy(iv, aes_iv, AES_BLOCK_SIZE); in ceph_aes_crypt()
312 if (pad_byte > 0 && pad_byte <= AES_BLOCK_SIZE && in ceph_aes_crypt()
343 sgt.sgl, sgt.nents, buf_len, AES_BLOCK_SIZE, in ceph_krb5_encrypt()
381 WARN_ON(data_off != AES_BLOCK_SIZE); in ceph_krb5_decrypt()
418 return AES_BLOCK_SIZE; in ceph_crypt_data_offset()
431 return data_len + AES_BLOCK_SIZE - in ceph_crypt_buflen()
432 (data_len & (AES_BLOCK_SIZE - 1)); in ceph_crypt_buflen()
435 return AES_BLOCK_SIZE + data_len + 24; in ceph_crypt_buflen()
/linux/drivers/crypto/caam/
H A Dcaamalg_qi.c1487 .cra_blocksize = AES_BLOCK_SIZE,
1494 .ivsize = AES_BLOCK_SIZE,
1542 .ivsize = AES_BLOCK_SIZE,
1543 .chunksize = AES_BLOCK_SIZE,
1563 .chunksize = AES_BLOCK_SIZE,
1577 .cra_blocksize = AES_BLOCK_SIZE,
1584 .ivsize = AES_BLOCK_SIZE,
1603 .maxauthsize = AES_BLOCK_SIZE,
1622 .maxauthsize = AES_BLOCK_SIZE,
1642 .maxauthsize = AES_BLOCK_SIZE,
[all …]
H A Dcaamalg.c1978 .cra_blocksize = AES_BLOCK_SIZE,
1987 .ivsize = AES_BLOCK_SIZE,
1999 .cra_blocksize = AES_BLOCK_SIZE,
2006 .ivsize = AES_BLOCK_SIZE,
2063 .ivsize = AES_BLOCK_SIZE,
2064 .chunksize = AES_BLOCK_SIZE,
2087 .chunksize = AES_BLOCK_SIZE,
2104 .cra_blocksize = AES_BLOCK_SIZE,
2111 .ivsize = AES_BLOCK_SIZE,
2141 .cra_blocksize = AES_BLOCK_SIZE,
[all …]
H A Dcaamalg_qi2.c1682 .cra_blocksize = AES_BLOCK_SIZE,
1689 .ivsize = AES_BLOCK_SIZE,
1737 .ivsize = AES_BLOCK_SIZE,
1738 .chunksize = AES_BLOCK_SIZE,
1758 .chunksize = AES_BLOCK_SIZE,
1772 .cra_blocksize = AES_BLOCK_SIZE,
1779 .ivsize = AES_BLOCK_SIZE,
1814 .maxauthsize = AES_BLOCK_SIZE,
1833 .maxauthsize = AES_BLOCK_SIZE,
1853 .maxauthsize = AES_BLOCK_SIZE,
[all …]
/linux/drivers/crypto/cavium/nitrox/
H A Dnitrox_skcipher.c390 .cra_blocksize = AES_BLOCK_SIZE,
397 .ivsize = AES_BLOCK_SIZE,
409 .cra_blocksize = AES_BLOCK_SIZE,
416 .ivsize = AES_BLOCK_SIZE,
428 .cra_blocksize = AES_BLOCK_SIZE,
435 .ivsize = AES_BLOCK_SIZE,
466 .cra_blocksize = AES_BLOCK_SIZE,
473 .ivsize = AES_BLOCK_SIZE,
/linux/drivers/crypto/inside-secure/
H A Dsafexcel_hash.c354 if (unlikely(req->xcbcmac && req->processed > AES_BLOCK_SIZE)) { in safexcel_ahash_send_req()
355 if (unlikely(cache_len < AES_BLOCK_SIZE)) { in safexcel_ahash_send_req()
359 extra = AES_BLOCK_SIZE - cache_len; in safexcel_ahash_send_req()
376 for (i = 0; i < AES_BLOCK_SIZE / 4; i++) { in safexcel_ahash_send_req()
385 cache_len = AES_BLOCK_SIZE; in safexcel_ahash_send_req()
390 crypto_xor(req->cache, (const u8 *)req->state, AES_BLOCK_SIZE); in safexcel_ahash_send_req()
808 } else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE && in safexcel_ahash_final()
811 memset(areq->result, 0, AES_BLOCK_SIZE); in safexcel_ahash_final()
813 } else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE && in safexcel_ahash_final()
818 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) { in safexcel_ahash_final()
[all …]
/linux/drivers/crypto/virtio/
H A Dvirtio_crypto_skcipher_algs.c425 req->cryptlen - AES_BLOCK_SIZE, in __virtio_crypto_skcipher_do_req()
426 AES_BLOCK_SIZE, 0); in __virtio_crypto_skcipher_do_req()
477 if (req->cryptlen % AES_BLOCK_SIZE) in virtio_crypto_skcipher_encrypt()
500 if (req->cryptlen % AES_BLOCK_SIZE) in virtio_crypto_skcipher_decrypt()
554 req->cryptlen - AES_BLOCK_SIZE, in virtio_crypto_skcipher_finalize_req()
555 AES_BLOCK_SIZE, 0); in virtio_crypto_skcipher_finalize_req()
572 .base.cra_blocksize = AES_BLOCK_SIZE,
582 .ivsize = AES_BLOCK_SIZE,
/linux/drivers/crypto/tegra/
H A Dtegra-se-aes.c217 data_count = rctx->len / AES_BLOCK_SIZE; in tegra_aes_prep_cmd()
218 res_bits = (rctx->len % AES_BLOCK_SIZE) * 8; in tegra_aes_prep_cmd()
280 if (rctx->len % AES_BLOCK_SIZE) in tegra_aes_do_one_req()
281 rctx->len += AES_BLOCK_SIZE - (rctx->len % AES_BLOCK_SIZE); in tegra_aes_do_one_req()
530 .ivsize = AES_BLOCK_SIZE,
536 .cra_blocksize = AES_BLOCK_SIZE,
557 .cra_blocksize = AES_BLOCK_SIZE,
573 .ivsize = AES_BLOCK_SIZE,
595 .ivsize = AES_BLOCK_SIZE,
601 .cra_blocksize = AES_BLOCK_SIZE,
[all …]
/linux/net/mac80211/
H A Daes_gmac.c21 u8 *zero, *__aad, iv[AES_BLOCK_SIZE]; in ieee80211_aes_gmac()
59 iv[AES_BLOCK_SIZE - 1] = 0x01; in ieee80211_aes_gmac()
/linux/drivers/crypto/cavium/cpt/
H A Dcptvf_algs.c338 .base.cra_blocksize = AES_BLOCK_SIZE,
346 .ivsize = AES_BLOCK_SIZE,
356 .base.cra_blocksize = AES_BLOCK_SIZE,
364 .ivsize = AES_BLOCK_SIZE,
374 .base.cra_blocksize = AES_BLOCK_SIZE,
/linux/drivers/crypto/qce/
H A Dcommon.h24 #define QCE_AES_IV_LENGTH AES_BLOCK_SIZE
26 #define QCE_MAX_IV_SIZE AES_BLOCK_SIZE
/linux/drivers/crypto/bcm/
H A Dcipher.h37 #define MAX_IV_SIZE AES_BLOCK_SIZE
132 u8 iv_ctr[ALIGN(2 * AES_BLOCK_SIZE, SPU_MSG_ALIGN)];
167 u8 gcmpad[ALIGN(AES_BLOCK_SIZE, SPU_MSG_ALIGN)];
H A Dcipher.c1809 ((ctx->max_payload % AES_BLOCK_SIZE) != 0)); in aes_setkey()
3029 .cra_blocksize = AES_BLOCK_SIZE,
3034 .maxauthsize = AES_BLOCK_SIZE,
3052 .cra_blocksize = AES_BLOCK_SIZE,
3057 .maxauthsize = AES_BLOCK_SIZE,
3075 .cra_blocksize = AES_BLOCK_SIZE,
3080 .maxauthsize = AES_BLOCK_SIZE,
3098 .cra_blocksize = AES_BLOCK_SIZE,
3103 .maxauthsize = AES_BLOCK_SIZE,
3121 .cra_blocksize = AES_BLOCK_SIZE,
[all …]
/linux/security/keys/trusted-keys/
H A Dtrusted_dcp.c307 buf = kmalloc(AES_BLOCK_SIZE, GFP_KERNEL); in test_for_zero_key()
313 memset(buf, 0x55, AES_BLOCK_SIZE); in test_for_zero_key()
319 if (memcmp(buf, bad, AES_BLOCK_SIZE) == 0) { in test_for_zero_key()
/linux/drivers/crypto/marvell/octeontx/
H A Dotx_cptvf_algs.c1226 .base.cra_blocksize = AES_BLOCK_SIZE,
1233 .ivsize = AES_BLOCK_SIZE,
1243 .base.cra_blocksize = AES_BLOCK_SIZE,
1250 .ivsize = AES_BLOCK_SIZE,
1260 .base.cra_blocksize = AES_BLOCK_SIZE,
1313 .cra_blocksize = AES_BLOCK_SIZE,
1326 .ivsize = AES_BLOCK_SIZE,
1332 .cra_blocksize = AES_BLOCK_SIZE,
1345 .ivsize = AES_BLOCK_SIZE,
1351 .cra_blocksize = AES_BLOCK_SIZE,
[all …]
/linux/drivers/crypto/rockchip/
H A Drk3288_crypto.h247 u8 iv[AES_BLOCK_SIZE];
253 u8 backup_iv[AES_BLOCK_SIZE];

12