Lines Matching refs:AES_BLOCK_SIZE
48 u8 rk[13 * (8 * AES_BLOCK_SIZE) + 32] __aligned(AES_BLOCK_SIZE);
98 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt()
99 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt()
103 walk.stride / AES_BLOCK_SIZE); in __ecb_crypt()
110 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt()
155 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in cbc_encrypt()
161 crypto_xor_cpy(dst, src, prev, AES_BLOCK_SIZE); in cbc_encrypt()
165 src += AES_BLOCK_SIZE; in cbc_encrypt()
166 dst += AES_BLOCK_SIZE; in cbc_encrypt()
167 nbytes -= AES_BLOCK_SIZE; in cbc_encrypt()
168 } while (nbytes >= AES_BLOCK_SIZE); in cbc_encrypt()
169 memcpy(walk.iv, prev, AES_BLOCK_SIZE); in cbc_encrypt()
184 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_decrypt()
185 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in cbc_decrypt()
189 walk.stride / AES_BLOCK_SIZE); in cbc_decrypt()
197 walk.nbytes - blocks * AES_BLOCK_SIZE); in cbc_decrypt()
227 u8 buf[AES_BLOCK_SIZE]; in ctr_encrypt()
237 if (unlikely(bytes < AES_BLOCK_SIZE)) in ctr_encrypt()
241 bytes &= ~(8 * AES_BLOCK_SIZE - 1); in ctr_encrypt()
247 if (unlikely(bytes < AES_BLOCK_SIZE)) in ctr_encrypt()
300 int tail = req->cryptlen % AES_BLOCK_SIZE; in __xts_crypt()
302 u8 buf[2 * AES_BLOCK_SIZE]; in __xts_crypt()
306 if (req->cryptlen < AES_BLOCK_SIZE) in __xts_crypt()
325 while (walk.nbytes >= AES_BLOCK_SIZE) { in __xts_crypt()
326 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __xts_crypt()
331 walk.stride / AES_BLOCK_SIZE); in __xts_crypt()
340 walk.nbytes - blocks * AES_BLOCK_SIZE); in __xts_crypt()
347 scatterwalk_map_and_copy(buf, req->dst, req->cryptlen - AES_BLOCK_SIZE, in __xts_crypt()
348 AES_BLOCK_SIZE, 0); in __xts_crypt()
349 memcpy(buf + AES_BLOCK_SIZE, buf, tail); in __xts_crypt()
352 crypto_xor(buf, req->iv, AES_BLOCK_SIZE); in __xts_crypt()
359 crypto_xor(buf, req->iv, AES_BLOCK_SIZE); in __xts_crypt()
361 scatterwalk_map_and_copy(buf, req->dst, req->cryptlen - AES_BLOCK_SIZE, in __xts_crypt()
362 AES_BLOCK_SIZE + tail, 1); in __xts_crypt()
380 .base.cra_blocksize = AES_BLOCK_SIZE,
387 .walksize = 8 * AES_BLOCK_SIZE,
395 .base.cra_blocksize = AES_BLOCK_SIZE,
402 .walksize = 8 * AES_BLOCK_SIZE,
403 .ivsize = AES_BLOCK_SIZE,
418 .chunksize = AES_BLOCK_SIZE,
419 .walksize = 8 * AES_BLOCK_SIZE,
420 .ivsize = AES_BLOCK_SIZE,
434 .chunksize = AES_BLOCK_SIZE,
435 .walksize = 8 * AES_BLOCK_SIZE,
436 .ivsize = AES_BLOCK_SIZE,
444 .base.cra_blocksize = AES_BLOCK_SIZE,
451 .walksize = 8 * AES_BLOCK_SIZE,
452 .ivsize = AES_BLOCK_SIZE,