Searched refs:SM4_BLOCK_SIZE (Results 1 – 8 of 8) sorted by relevance
| /linux/arch/arm64/crypto/ |
| H A D | sm4-ce-glue.c | 66 u8 digest[SM4_BLOCK_SIZE]; 123 nbytes -= nblks * SM4_BLOCK_SIZE; in sm4_ecb_do_crypt() 165 nblocks = nbytes / SM4_BLOCK_SIZE; in sm4_cbc_crypt() 177 err = skcipher_walk_done(&walk, nbytes % SM4_BLOCK_SIZE); in sm4_cbc_crypt() 211 if (req->cryptlen < SM4_BLOCK_SIZE) in sm4_cbc_cts_crypt() 214 if (req->cryptlen == SM4_BLOCK_SIZE) in sm4_cbc_cts_crypt() 222 cbc_blocks = DIV_ROUND_UP(req->cryptlen, SM4_BLOCK_SIZE) - 2; in sm4_cbc_cts_crypt() 225 cbc_blocks * SM4_BLOCK_SIZE, in sm4_cbc_cts_crypt() 240 req->cryptlen - cbc_blocks * SM4_BLOCK_SIZE, in sm4_cbc_cts_crypt() 288 dst += nblks * SM4_BLOCK_SIZE; in sm4_ctr_crypt() [all …]
|
| H A D | sm4-neon-glue.c | 49 nblocks = nbytes / SM4_BLOCK_SIZE; in sm4_ecb_do_crypt() 55 err = skcipher_walk_done(&walk, nbytes % SM4_BLOCK_SIZE); in sm4_ecb_do_crypt() 92 while (nbytes >= SM4_BLOCK_SIZE) { in sm4_cbc_encrypt() 93 crypto_xor_cpy(dst, src, iv, SM4_BLOCK_SIZE); in sm4_cbc_encrypt() 96 src += SM4_BLOCK_SIZE; in sm4_cbc_encrypt() 97 dst += SM4_BLOCK_SIZE; in sm4_cbc_encrypt() 98 nbytes -= SM4_BLOCK_SIZE; in sm4_cbc_encrypt() 101 memcpy(walk.iv, iv, SM4_BLOCK_SIZE); in sm4_cbc_encrypt() 124 nblocks = nbytes / SM4_BLOCK_SIZE; in sm4_cbc_decrypt() 131 err = skcipher_walk_done(&walk, nbytes % SM4_BLOCK_SIZE); in sm4_cbc_decrypt() [all …]
|
| H A D | sm4-ce-ccm-glue.c | 66 memset(&req->iv[SM4_BLOCK_SIZE - l], 0, l); in ccm_format_input() 68 memcpy(info, req->iv, SM4_BLOCK_SIZE); in ccm_format_input() 85 memcpy(&info[SM4_BLOCK_SIZE - l], (u8 *)&len + 4 - l, l); in ccm_format_input() 124 if (len == SM4_BLOCK_SIZE) { in ccm_calculate_auth_mac() 125 if (n < SM4_BLOCK_SIZE) { in ccm_calculate_auth_mac() 131 nblocks = n / SM4_BLOCK_SIZE; in ccm_calculate_auth_mac() 135 p += nblocks * SM4_BLOCK_SIZE; in ccm_calculate_auth_mac() 136 n %= SM4_BLOCK_SIZE; in ccm_calculate_auth_mac() 142 l = min(n, SM4_BLOCK_SIZE - len); in ccm_calculate_auth_mac() 162 u8 __aligned(8) ctr0[SM4_BLOCK_SIZE]; in ccm_crypt() [all …]
|
| H A D | sm4-ce-gcm-glue.c | 139 u8 __aligned(8) iv[SM4_BLOCK_SIZE]; in gcm_crypt() 142 memset(ghash, 0, SM4_BLOCK_SIZE); in gcm_crypt() 155 unsigned int tail = walk->nbytes % SM4_BLOCK_SIZE; in gcm_crypt() 178 u8 __aligned(8) ghash[SM4_BLOCK_SIZE]; in gcm_encrypt() 198 u8 __aligned(8) ghash[SM4_BLOCK_SIZE]; in gcm_decrypt() 199 u8 authtag[SM4_BLOCK_SIZE]; in gcm_decrypt() 229 .chunksize = SM4_BLOCK_SIZE, 230 .maxauthsize = SM4_BLOCK_SIZE,
|
| H A D | sm4-ce-cipher-glue.c | 57 .cra_blocksize = SM4_BLOCK_SIZE,
|
| /linux/arch/riscv/crypto/ |
| H A D | sm4-riscv64-glue.c | 24 const u8 in[SM4_BLOCK_SIZE], 25 u8 out[SM4_BLOCK_SIZE]); 71 .cra_blocksize = SM4_BLOCK_SIZE,
|
| /linux/include/crypto/ |
| H A D | sm4.h | 16 #define SM4_BLOCK_SIZE 16 macro
|
| /linux/drivers/crypto/inside-secure/ |
| H A D | safexcel_cipher.c | 3018 if (req->cryptlen & (SM4_BLOCK_SIZE - 1)) in safexcel_sm4_blk_encrypt() 3028 if (req->cryptlen & (SM4_BLOCK_SIZE - 1)) in safexcel_sm4_blk_decrypt() 3063 .cra_blocksize = SM4_BLOCK_SIZE, 3079 ctx->blocksz = SM4_BLOCK_SIZE; in safexcel_skcipher_sm4_cbc_cra_init() 3093 .ivsize = SM4_BLOCK_SIZE, 3101 .cra_blocksize = SM4_BLOCK_SIZE, 3131 ctx->blocksz = SM4_BLOCK_SIZE; in safexcel_skcipher_sm4_ctr_cra_init() 3167 if (req->cryptlen & (SM4_BLOCK_SIZE - 1)) in safexcel_aead_sm4_blk_encrypt() 3179 if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1)) in safexcel_aead_sm4_blk_decrypt() 3192 ctx->blocksz = SM4_BLOCK_SIZE; in safexcel_aead_sm4cbc_sha1_cra_init() [all...] |