Home
last modified time | relevance | path

Searched refs:cryptlen (Results 1 – 25 of 49) sorted by relevance

12

/linux/drivers/crypto/allwinner/sun4i-ss/
H A Dsun4i-ss-cipher.c30 unsigned int ileft = areq->cryptlen; in sun4i_ss_opti_poll()
31 unsigned int oleft = areq->cryptlen; in sun4i_ss_opti_poll()
41 if (!areq->cryptlen) in sun4i_ss_opti_poll()
51 areq->cryptlen - ivsize, ivsize, 0); in sun4i_ss_opti_poll()
57 algt->stat_bytes += areq->cryptlen; in sun4i_ss_opti_poll()
74 ileft = areq->cryptlen / 4; in sun4i_ss_opti_poll()
75 oleft = areq->cryptlen / 4; in sun4i_ss_opti_poll()
137 scatterwalk_map_and_copy(areq->iv, areq->dst, areq->cryptlen - ivsize, in sun4i_ss_opti_poll()
166 areq->cryptlen, areq->iv); in sun4i_ss_cipher_poll_fallback()
196 unsigned int ileft = areq->cryptlen; in sun4i_ss_cipher_poll()
[all …]
/linux/drivers/crypto/cavium/nitrox/
H A Dnitrox_aead.c165 creq->gph.param0 = cpu_to_be16(rctx->cryptlen); in nitrox_set_creq()
166 creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen); in nitrox_set_creq()
227 rctx->cryptlen = areq->cryptlen; in nitrox_aes_gcm_enc()
229 rctx->srclen = areq->assoclen + areq->cryptlen; in nitrox_aes_gcm_enc()
261 rctx->cryptlen = areq->cryptlen - aead->authsize; in nitrox_aes_gcm_dec()
263 rctx->srclen = areq->cryptlen + areq->assoclen; in nitrox_aes_gcm_dec()
449 aead_rctx->cryptlen = areq->cryptlen; in nitrox_rfc4106_enc()
451 aead_rctx->srclen = aead_rctx->assoclen + aead_rctx->cryptlen; in nitrox_rfc4106_enc()
481 aead_rctx->cryptlen = areq->cryptlen - aead->authsize; in nitrox_rfc4106_dec()
484 areq->cryptlen - GCM_RFC4106_IV_SIZE + areq->assoclen; in nitrox_rfc4106_dec()
H A Dnitrox_skcipher.c85 unsigned int start = skreq->cryptlen - ivsize; in nitrox_cbc_cipher_callback()
221 skreq->cryptlen); in alloc_src_sglist()
242 skreq->cryptlen); in alloc_dst_sglist()
266 creq->gph.param0 = cpu_to_be16(skreq->cryptlen); in nitrox_skcipher_crypt()
297 unsigned int start = skreq->cryptlen - ivsize; in nitrox_cbc_decrypt()
/linux/drivers/crypto/tegra/
H A Dtegra-se-aes.c60 unsigned int cryptlen; member
109 offset = req->cryptlen - ctx->ivsize; in tegra_cbc_iv_copyback()
124 num = req->cryptlen / ctx->ivsize; in tegra_aes_update_iv()
125 if (req->cryptlen % ctx->ivsize) in tegra_aes_update_iv()
273 rctx->len = req->cryptlen; in tegra_aes_do_one_req()
291 scatterwalk_map_and_copy(rctx->datbuf.buf, req->src, 0, req->cryptlen, 0); in tegra_aes_do_one_req()
322 scatterwalk_map_and_copy(rctx->datbuf.buf, req->dst, 0, req->cryptlen, 1); in tegra_aes_do_one_req()
489 if (!IS_ALIGNED(req->cryptlen, crypto_skcipher_blocksize(tfm))) { in tegra_aes_crypt()
490 dev_dbg(ctx->se->dev, "invalid length (%d)", req->cryptlen); in tegra_aes_crypt()
493 } else if (req->cryptlen < XTS_BLOCK_SIZE) { in tegra_aes_crypt()
[all …]
/linux/arch/arm64/crypto/
H A Daes-glue.c278 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_encrypt()
288 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_encrypt()
289 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_encrypt()
304 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_encrypt()
307 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); in cts_cbc_encrypt()
310 subreq.cryptlen); in cts_cbc_encrypt()
315 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_encrypt()
334 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_decrypt()
344 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_decrypt()
345 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_decrypt()
[all …]
H A Dsm4-ce-glue.c211 if (req->cryptlen < SM4_BLOCK_SIZE) in sm4_cbc_cts_crypt()
214 if (req->cryptlen == SM4_BLOCK_SIZE) in sm4_cbc_cts_crypt()
222 cbc_blocks = DIV_ROUND_UP(req->cryptlen, SM4_BLOCK_SIZE) - 2; in sm4_cbc_cts_crypt()
232 dst = src = scatterwalk_ffwd(sg_src, src, subreq.cryptlen); in sm4_cbc_cts_crypt()
235 subreq.cryptlen); in sm4_cbc_cts_crypt()
240 req->cryptlen - cbc_blocks * SM4_BLOCK_SIZE, in sm4_cbc_cts_crypt()
314 int tail = req->cryptlen % SM4_BLOCK_SIZE; in sm4_xts_crypt()
323 if (req->cryptlen < SM4_BLOCK_SIZE) in sm4_xts_crypt()
331 int nblocks = DIV_ROUND_UP(req->cryptlen, SM4_BLOCK_SIZE) - 2; in sm4_xts_crypt()
375 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); in sm4_xts_crypt()
[all …]
H A Dsm4-ce-ccm-glue.c199 err = ccm_format_input(mac, req, req->cryptlen); in ccm_encrypt()
212 scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen, in ccm_encrypt()
228 err = ccm_format_input(mac, req, req->cryptlen - authsize); in ccm_decrypt()
242 req->assoclen + req->cryptlen - authsize, in ccm_decrypt()
H A Daes-ce-ccm-glue.c174 u32 len = req->cryptlen; in ccm_encrypt()
225 scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen, in ccm_encrypt()
239 u32 len = req->cryptlen - authsize; in ccm_decrypt()
291 req->assoclen + req->cryptlen - authsize, in ccm_decrypt()
/linux/drivers/crypto/gemini/
H A Dsl3516-ce-cipher.c34 if (areq->cryptlen == 0 || areq->cryptlen % 16) { in sl3516_ce_need_fallback()
119 areq->cryptlen, areq->iv); in sl3516_ce_cipher_fallback()
147 areq->cryptlen, in sl3516_ce_cipher()
179 len = areq->cryptlen; in sl3516_ce_cipher()
189 areq->cryptlen, i, rctx->t_src[i].len, sg->offset, todo); in sl3516_ce_cipher()
196 dev_err(ce->dev, "remaining len %d/%u nr_sgs=%d\n", len, areq->cryptlen, nr_sgs); in sl3516_ce_cipher()
201 len = areq->cryptlen; in sl3516_ce_cipher()
211 areq->cryptlen, i, rctx->t_dst[i].len, sg->offset, todo); in sl3516_ce_cipher()
234 ecb->cipher.algorithm_len = areq->cryptlen; in sl3516_ce_cipher()
/linux/crypto/
H A Dseqiv.c59 if (req->cryptlen < ivsize) in seqiv_aead_encrypt()
70 req->assoclen + req->cryptlen); in seqiv_aead_encrypt()
87 req->cryptlen - ivsize, info); in seqiv_aead_encrypt()
108 if (req->cryptlen < ivsize + crypto_aead_authsize(geniv)) in seqiv_aead_decrypt()
118 req->cryptlen - ivsize, req->iv); in seqiv_aead_decrypt()
H A Dauthenc.c119 req->assoclen + req->cryptlen, in authenc_geniv_ahash_finish()
160 req->assoclen + req->cryptlen); in crypto_authenc_genicv()
169 scatterwalk_map_and_copy(hash, req->dst, req->assoclen + req->cryptlen, in crypto_authenc_genicv()
195 unsigned int cryptlen = req->cryptlen; in crypto_authenc_encrypt() local
212 skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv); in crypto_authenc_encrypt()
261 req->cryptlen - authsize, req->iv); in crypto_authenc_decrypt_tail()
293 req->assoclen + req->cryptlen - authsize); in crypto_authenc_decrypt()
H A Daegis128-neon.c54 unsigned int cryptlen, in crypto_aegis128_final_simd() argument
59 cryptlen, authsize); in crypto_aegis128_final_simd()
H A Dhctr2.c140 bool has_remainder = req->cryptlen % POLYVAL_BLOCK_SIZE; in hctr2_hash_tweak()
159 const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; in hctr2_hash_message()
173 if (req->cryptlen % BLOCKCIPHER_BLOCK_SIZE) in hctr2_hash_message()
215 int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; in hctr2_crypt()
218 if (req->cryptlen < BLOCKCIPHER_BLOCK_SIZE) in hctr2_crypt()
/linux/drivers/crypto/amlogic/
H A Damlogic-gxl-cipher.c30 if (areq->cryptlen == 0) in meson_cipher_need_fallback()
75 areq->cryptlen, areq->iv); in meson_cipher_do_fallback()
108 areq->cryptlen, in meson_cipher()
131 if (ivsize > areq->cryptlen) { in meson_cipher()
132 dev_err(mc->dev, "invalid ivsize=%d vs len=%d\n", ivsize, areq->cryptlen); in meson_cipher()
144 offset = areq->cryptlen - ivsize; in meson_cipher()
205 len = areq->cryptlen; in meson_cipher()
250 areq->cryptlen - ivsize, in meson_cipher()
/linux/arch/x86/crypto/
H A Daesni-intel_glue.c247 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_encrypt()
258 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_encrypt()
259 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_encrypt()
273 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_encrypt()
276 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); in cts_cbc_encrypt()
279 subreq.cryptlen); in cts_cbc_encrypt()
284 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_encrypt()
303 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_decrypt()
314 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_decrypt()
315 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_decrypt()
[all …]
/linux/arch/riscv/crypto/
H A Daes-riscv64-glue.c224 if (req->cryptlen < AES_BLOCK_SIZE) in riscv64_aes_cbc_cts_crypt()
236 if (unlikely(walk.nbytes != req->cryptlen)) { in riscv64_aes_cbc_cts_crypt()
237 cbc_len = round_down(req->cryptlen - AES_BLOCK_SIZE - 1, in riscv64_aes_cbc_cts_crypt()
253 req->cryptlen - cbc_len, req->iv); in riscv64_aes_cbc_cts_crypt()
358 int tail = req->cryptlen % AES_BLOCK_SIZE; in riscv64_aes_xts_crypt()
365 if (req->cryptlen < AES_BLOCK_SIZE) in riscv64_aes_xts_crypt()
390 req->cryptlen - tail - AES_BLOCK_SIZE, in riscv64_aes_xts_crypt()
422 dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); in riscv64_aes_xts_crypt()
424 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); in riscv64_aes_xts_crypt()
/linux/drivers/crypto/qce/
H A Dcommon.c299 unsigned int enckeylen, unsigned int cryptlen) in qce_xtskey() argument
311 qce_write(qce, REG_ENCR_XTS_DU_SIZE, cryptlen); in qce_xtskey()
352 rctx->cryptlen); in qce_setup_regs_skcipher()
373 qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen); in qce_setup_regs_skcipher()
383 qce_write(qce, REG_SEG_SIZE, rctx->cryptlen); in qce_setup_regs_skcipher()
514 totallen = rctx->cryptlen + rctx->assoclen; in qce_setup_regs_aead()
518 qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen + ctx->authsize); in qce_setup_regs_aead()
520 qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen); in qce_setup_regs_aead()
/linux/include/crypto/
H A Daead.h94 unsigned int cryptlen; member
608 unsigned int cryptlen, u8 *iv) in aead_request_set_crypt() argument
612 req->cryptlen = cryptlen; in aead_request_set_crypt()
/linux/drivers/crypto/inside-secure/
H A Dsafexcel_cipher.c164 u32 cryptlen, u32 assoclen, u32 digestsize) in safexcel_aead_token() argument
174 cryptlen -= digestsize; in safexcel_aead_token()
196 *(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen); in safexcel_aead_token()
208 cbcmaciv[14] = cryptlen >> 8; in safexcel_aead_token()
209 cbcmaciv[15] = cryptlen & 255; in safexcel_aead_token()
243 if (likely(cryptlen)) { in safexcel_aead_token()
300 if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) { in safexcel_aead_token()
306 atoken->packet_length = cryptlen; in safexcel_aead_token()
323 cryptlen &= 15; in safexcel_aead_token()
324 if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) { in safexcel_aead_token()
[all …]
/linux/arch/powerpc/crypto/
H A Daes-spe-glue.c324 int tail = req->cryptlen % AES_BLOCK_SIZE; in ppc_xts_encrypt()
325 int offset = req->cryptlen - tail - AES_BLOCK_SIZE; in ppc_xts_encrypt()
330 if (req->cryptlen < AES_BLOCK_SIZE) in ppc_xts_encrypt()
336 req->cryptlen - tail, req->iv); in ppc_xts_encrypt()
362 int tail = req->cryptlen % AES_BLOCK_SIZE; in ppc_xts_decrypt()
363 int offset = req->cryptlen - tail - AES_BLOCK_SIZE; in ppc_xts_decrypt()
369 if (req->cryptlen < AES_BLOCK_SIZE) in ppc_xts_decrypt()
/linux/drivers/crypto/marvell/octeontx/
H A Dotx_cptvf_algs.c162 start = sreq->cryptlen - ivsize; in output_iv_copyback()
243 u32 start = req->cryptlen - ivsize; in create_ctx_hdr()
269 req_info->req.param1 = req->cryptlen; in create_ctx_hdr()
316 update_input_data(req_info, req->src, req->cryptlen, &argcnt); in create_input_list()
337 update_output_data(req_info, req->dst, 0, req->cryptlen, &argcnt); in create_output_list()
351 if (req->cryptlen > OTX_CPT_MAX_REQ_SIZE) in cpt_enc_dec()
939 req_info->req.param1 = req->cryptlen; in create_aead_ctx_hdr()
940 req_info->req.param2 = req->cryptlen + req->assoclen; in create_aead_ctx_hdr()
943 req_info->req.param1 = req->cryptlen - mac_len; in create_aead_ctx_hdr()
944 req_info->req.param2 = req->cryptlen + req->assoclen - mac_len; in create_aead_ctx_hdr()
[all …]
/linux/drivers/crypto/ti/
H A Ddthev2-aes.c269 unsigned int len = req->cryptlen; in dthe_aes_run()
356 writel_relaxed(lower_32_bits(req->cryptlen), aes_base_reg + DTHE_P_AES_C_LENGTH_0); in dthe_aes_run()
357 writel_relaxed(upper_32_bits(req->cryptlen), aes_base_reg + DTHE_P_AES_C_LENGTH_1); in dthe_aes_run()
412 if (req->cryptlen % AES_BLOCK_SIZE) { in dthe_aes_crypt()
419 req->cryptlen, req->iv); in dthe_aes_crypt()
431 if (req->cryptlen == 0) { in dthe_aes_crypt()
/linux/drivers/crypto/ccree/
H A Dcc_buffer_mgr.c56 u32 skip = req->assoclen + req->cryptlen; in cc_copy_mac()
519 areq_ctx->assoclen, req->cryptlen); in cc_unmap_aead_request()
690 areq_ctx->src_sgl, areq_ctx->cryptlen, in cc_prepare_aead_data_mlli()
730 areq_ctx->src_sgl, areq_ctx->cryptlen, in cc_prepare_aead_data_mlli()
734 areq_ctx->dst_sgl, areq_ctx->cryptlen, in cc_prepare_aead_data_mlli()
762 areq_ctx->dst_sgl, areq_ctx->cryptlen, in cc_prepare_aead_data_mlli()
766 areq_ctx->src_sgl, areq_ctx->cryptlen, in cc_prepare_aead_data_mlli()
802 unsigned int size_for_map = req->assoclen + req->cryptlen; in cc_aead_chain_data()
841 size_for_map = req->assoclen + req->cryptlen; in cc_aead_chain_data()
972 areq_ctx->cryptlen = (areq_ctx->gen_ctx.op_type == in cc_map_aead_request()
[all …]
/linux/drivers/crypto/rockchip/
H A Drk3288_crypto_skcipher.c32 if (!req->cryptlen) in rk_cipher_need_fallback()
35 len = req->cryptlen; in rk_cipher_need_fallback()
83 areq->cryptlen, areq->iv); in rk_cipher_fallback()
311 unsigned int len = areq->cryptlen; in rk_cipher_run()
326 offset = areq->cryptlen - ivsize; in rk_cipher_run()
404 offset = areq->cryptlen - ivsize; in rk_cipher_run()
/linux/drivers/crypto/ccp/
H A Dccp-crypto-aes-xts.c130 if (req->cryptlen == xts_unit_sizes[unit].size) { in ccp_aes_xts_crypt()
158 req->dst, req->cryptlen, req->iv); in ccp_aes_xts_crypt()
179 rctx->cmd.u.xts.src_len = req->cryptlen; in ccp_aes_xts_crypt()

12