| /linux/crypto/ |
| H A D | authencesn.c | 92 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_genicv_tail() local 98 scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0); in crypto_authenc_esn_genicv_tail() 101 scatterwalk_map_and_copy(hash, dst, assoclen + cryptlen, authsize, 1); in crypto_authenc_esn_genicv_tail() 124 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_genicv() local 134 scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 1); in crypto_authenc_esn_genicv() 140 ahash_request_set_crypt(ahreq, dst, hash, assoclen + cryptlen); in crypto_authenc_esn_genicv() 168 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_encrypt() local 188 skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv); in crypto_authenc_esn_encrypt() 208 unsigned int cryptlen = req->cryptlen - authsize; in crypto_authenc_esn_decrypt_tail() local 219 scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0); in crypto_authenc_esn_decrypt_tail() [all …]
|
| H A D | seqiv.c | 59 if (req->cryptlen < ivsize) in seqiv_aead_encrypt() 70 req->assoclen + req->cryptlen); in seqiv_aead_encrypt() 87 req->cryptlen - ivsize, info); in seqiv_aead_encrypt() 108 if (req->cryptlen < ivsize + crypto_aead_authsize(geniv)) in seqiv_aead_decrypt() 118 req->cryptlen - ivsize, req->iv); in seqiv_aead_decrypt()
|
| H A D | authenc.c | 119 req->assoclen + req->cryptlen, in authenc_geniv_ahash_finish() 160 req->assoclen + req->cryptlen); in crypto_authenc_genicv() 169 scatterwalk_map_and_copy(hash, req->dst, req->assoclen + req->cryptlen, in crypto_authenc_genicv() 195 unsigned int cryptlen = req->cryptlen; in crypto_authenc_encrypt() local 212 skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv); in crypto_authenc_encrypt() 261 req->cryptlen - authsize, req->iv); in crypto_authenc_decrypt_tail() 293 req->assoclen + req->cryptlen - authsize); in crypto_authenc_decrypt()
|
| H A D | aegis128-neon.c | 54 unsigned int cryptlen, in crypto_aegis128_final_simd() argument 59 cryptlen, authsize); in crypto_aegis128_final_simd()
|
| H A D | hctr2.c | 140 bool has_remainder = req->cryptlen % POLYVAL_BLOCK_SIZE; in hctr2_hash_tweak() 159 const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; in hctr2_hash_message() 173 if (req->cryptlen % BLOCKCIPHER_BLOCK_SIZE) in hctr2_hash_message() 215 int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; in hctr2_crypt() 218 if (req->cryptlen < BLOCKCIPHER_BLOCK_SIZE) in hctr2_crypt()
|
| H A D | aegis-neon.h | 14 unsigned int cryptlen,
|
| /linux/drivers/crypto/allwinner/sun4i-ss/ |
| H A D | sun4i-ss-cipher.c | 30 unsigned int ileft = areq->cryptlen; in sun4i_ss_opti_poll() 31 unsigned int oleft = areq->cryptlen; in sun4i_ss_opti_poll() 41 if (!areq->cryptlen) in sun4i_ss_opti_poll() 51 areq->cryptlen - ivsize, ivsize, 0); in sun4i_ss_opti_poll() 57 algt->stat_bytes += areq->cryptlen; in sun4i_ss_opti_poll() 74 ileft = areq->cryptlen / 4; in sun4i_ss_opti_poll() 75 oleft = areq->cryptlen / 4; in sun4i_ss_opti_poll() 137 scatterwalk_map_and_copy(areq->iv, areq->dst, areq->cryptlen - ivsize, in sun4i_ss_opti_poll() 166 areq->cryptlen, areq->iv); in sun4i_ss_cipher_poll_fallback() 196 unsigned int ileft = areq->cryptlen; in sun4i_ss_cipher_poll() [all …]
|
| /linux/drivers/crypto/cavium/nitrox/ |
| H A D | nitrox_aead.c | 165 creq->gph.param0 = cpu_to_be16(rctx->cryptlen); in nitrox_set_creq() 166 creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen); in nitrox_set_creq() 227 rctx->cryptlen = areq->cryptlen; in nitrox_aes_gcm_enc() 229 rctx->srclen = areq->assoclen + areq->cryptlen; in nitrox_aes_gcm_enc() 261 rctx->cryptlen = areq->cryptlen - aead->authsize; in nitrox_aes_gcm_dec() 263 rctx->srclen = areq->cryptlen + areq->assoclen; in nitrox_aes_gcm_dec() 449 aead_rctx->cryptlen = areq->cryptlen; in nitrox_rfc4106_enc() 451 aead_rctx->srclen = aead_rctx->assoclen + aead_rctx->cryptlen; in nitrox_rfc4106_enc() 481 aead_rctx->cryptlen = areq->cryptlen - aead->authsize; in nitrox_rfc4106_dec() 484 areq->cryptlen - GCM_RFC4106_IV_SIZE + areq->assoclen; in nitrox_rfc4106_dec()
|
| H A D | nitrox_skcipher.c | 85 unsigned int start = skreq->cryptlen - ivsize; in nitrox_cbc_cipher_callback() 221 skreq->cryptlen); in alloc_src_sglist() 242 skreq->cryptlen); in alloc_dst_sglist() 266 creq->gph.param0 = cpu_to_be16(skreq->cryptlen); in nitrox_skcipher_crypt() 297 unsigned int start = skreq->cryptlen - ivsize; in nitrox_cbc_decrypt()
|
| /linux/drivers/crypto/tegra/ |
| H A D | tegra-se-aes.c | 60 unsigned int cryptlen; member 109 offset = req->cryptlen - ctx->ivsize; in tegra_cbc_iv_copyback() 124 num = req->cryptlen / ctx->ivsize; in tegra_aes_update_iv() 125 if (req->cryptlen % ctx->ivsize) in tegra_aes_update_iv() 273 rctx->len = req->cryptlen; in tegra_aes_do_one_req() 291 scatterwalk_map_and_copy(rctx->datbuf.buf, req->src, 0, req->cryptlen, 0); in tegra_aes_do_one_req() 322 scatterwalk_map_and_copy(rctx->datbuf.buf, req->dst, 0, req->cryptlen, 1); in tegra_aes_do_one_req() 489 if (!IS_ALIGNED(req->cryptlen, crypto_skcipher_blocksize(tfm))) { in tegra_aes_crypt() 490 dev_dbg(ctx->se->dev, "invalid length (%d)", req->cryptlen); in tegra_aes_crypt() 493 } else if (req->cryptlen < XTS_BLOCK_SIZE) { in tegra_aes_crypt() [all …]
|
| /linux/drivers/crypto/gemini/ |
| H A D | sl3516-ce-cipher.c | 34 if (areq->cryptlen == 0 || areq->cryptlen % 16) { in sl3516_ce_need_fallback() 119 areq->cryptlen, areq->iv); in sl3516_ce_cipher_fallback() 147 areq->cryptlen, in sl3516_ce_cipher() 179 len = areq->cryptlen; in sl3516_ce_cipher() 189 areq->cryptlen, i, rctx->t_src[i].len, sg->offset, todo); in sl3516_ce_cipher() 196 dev_err(ce->dev, "remaining len %d/%u nr_sgs=%d\n", len, areq->cryptlen, nr_sgs); in sl3516_ce_cipher() 201 len = areq->cryptlen; in sl3516_ce_cipher() 211 areq->cryptlen, i, rctx->t_dst[i].len, sg->offset, todo); in sl3516_ce_cipher() 234 ecb->cipher.algorithm_len = areq->cryptlen; in sl3516_ce_cipher()
|
| /linux/drivers/crypto/amlogic/ |
| H A D | amlogic-gxl-cipher.c | 30 if (areq->cryptlen == 0) in meson_cipher_need_fallback() 75 areq->cryptlen, areq->iv); in meson_cipher_do_fallback() 108 areq->cryptlen, in meson_cipher() 131 if (ivsize > areq->cryptlen) { in meson_cipher() 132 dev_err(mc->dev, "invalid ivsize=%d vs len=%d\n", ivsize, areq->cryptlen); in meson_cipher() 144 offset = areq->cryptlen - ivsize; in meson_cipher() 205 len = areq->cryptlen; in meson_cipher() 250 areq->cryptlen - ivsize, in meson_cipher()
|
| /linux/arch/arm64/crypto/ |
| H A D | sm4-ce-glue.c | 211 if (req->cryptlen < SM4_BLOCK_SIZE) in sm4_cbc_cts_crypt() 214 if (req->cryptlen == SM4_BLOCK_SIZE) in sm4_cbc_cts_crypt() 222 cbc_blocks = DIV_ROUND_UP(req->cryptlen, SM4_BLOCK_SIZE) - 2; in sm4_cbc_cts_crypt() 232 dst = src = scatterwalk_ffwd(sg_src, src, subreq.cryptlen); in sm4_cbc_cts_crypt() 235 subreq.cryptlen); in sm4_cbc_cts_crypt() 240 req->cryptlen - cbc_blocks * SM4_BLOCK_SIZE, in sm4_cbc_cts_crypt() 314 int tail = req->cryptlen % SM4_BLOCK_SIZE; in sm4_xts_crypt() 323 if (req->cryptlen < SM4_BLOCK_SIZE) in sm4_xts_crypt() 331 int nblocks = DIV_ROUND_UP(req->cryptlen, SM4_BLOCK_SIZE) - 2; in sm4_xts_crypt() 375 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); in sm4_xts_crypt() [all …]
|
| H A D | sm4-ce-ccm-glue.c | 199 err = ccm_format_input(mac, req, req->cryptlen); in ccm_encrypt() 212 scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen, in ccm_encrypt() 228 err = ccm_format_input(mac, req, req->cryptlen - authsize); in ccm_decrypt() 242 req->assoclen + req->cryptlen - authsize, in ccm_decrypt()
|
| H A D | aes-neonbs-glue.c | 281 int tail = req->cryptlen % (8 * AES_BLOCK_SIZE); in __xts_crypt() 291 if (req->cryptlen < AES_BLOCK_SIZE) in __xts_crypt() 296 int xts_blocks = DIV_ROUND_UP(req->cryptlen, in __xts_crypt() 354 dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); in __xts_crypt() 356 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); in __xts_crypt()
|
| /linux/include/crypto/ |
| H A D | aead.h | 94 unsigned int cryptlen; member 608 unsigned int cryptlen, u8 *iv) in aead_request_set_crypt() argument 612 req->cryptlen = cryptlen; in aead_request_set_crypt()
|
| /linux/drivers/crypto/qce/ |
| H A D | common.c | 299 unsigned int enckeylen, unsigned int cryptlen) in qce_xtskey() argument 311 qce_write(qce, REG_ENCR_XTS_DU_SIZE, cryptlen); in qce_xtskey() 352 rctx->cryptlen); in qce_setup_regs_skcipher() 373 qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen); in qce_setup_regs_skcipher() 383 qce_write(qce, REG_SEG_SIZE, rctx->cryptlen); in qce_setup_regs_skcipher() 514 totallen = rctx->cryptlen + rctx->assoclen; in qce_setup_regs_aead() 518 qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen + ctx->authsize); in qce_setup_regs_aead() 520 qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen); in qce_setup_regs_aead()
|
| H A D | cipher.h | 44 unsigned int cryptlen; member
|
| /linux/drivers/crypto/ti/ |
| H A D | dthev2-aes.c | 269 unsigned int len = req->cryptlen; in dthe_aes_run() 356 writel_relaxed(lower_32_bits(req->cryptlen), aes_base_reg + DTHE_P_AES_C_LENGTH_0); in dthe_aes_run() 357 writel_relaxed(upper_32_bits(req->cryptlen), aes_base_reg + DTHE_P_AES_C_LENGTH_1); in dthe_aes_run() 412 if (req->cryptlen % AES_BLOCK_SIZE) { in dthe_aes_crypt() 419 req->cryptlen, req->iv); in dthe_aes_crypt() 431 if (req->cryptlen == 0) { in dthe_aes_crypt()
|
| /linux/drivers/crypto/marvell/octeontx/ |
| H A D | otx_cptvf_algs.c | 162 start = sreq->cryptlen - ivsize; in output_iv_copyback() 243 u32 start = req->cryptlen - ivsize; in create_ctx_hdr() 269 req_info->req.param1 = req->cryptlen; in create_ctx_hdr() 316 update_input_data(req_info, req->src, req->cryptlen, &argcnt); in create_input_list() 337 update_output_data(req_info, req->dst, 0, req->cryptlen, &argcnt); in create_output_list() 351 if (req->cryptlen > OTX_CPT_MAX_REQ_SIZE) in cpt_enc_dec() 939 req_info->req.param1 = req->cryptlen; in create_aead_ctx_hdr() 940 req_info->req.param2 = req->cryptlen + req->assoclen; in create_aead_ctx_hdr() 943 req_info->req.param1 = req->cryptlen - mac_len; in create_aead_ctx_hdr() 944 req_info->req.param2 = req->cryptlen + req->assoclen - mac_len; in create_aead_ctx_hdr() [all …]
|
| /linux/drivers/crypto/ccree/ |
| H A D | cc_buffer_mgr.c | 56 u32 skip = req->assoclen + req->cryptlen; in cc_copy_mac() 519 areq_ctx->assoclen, req->cryptlen); in cc_unmap_aead_request() 690 areq_ctx->src_sgl, areq_ctx->cryptlen, in cc_prepare_aead_data_mlli() 730 areq_ctx->src_sgl, areq_ctx->cryptlen, in cc_prepare_aead_data_mlli() 734 areq_ctx->dst_sgl, areq_ctx->cryptlen, in cc_prepare_aead_data_mlli() 762 areq_ctx->dst_sgl, areq_ctx->cryptlen, in cc_prepare_aead_data_mlli() 766 areq_ctx->src_sgl, areq_ctx->cryptlen, in cc_prepare_aead_data_mlli() 802 unsigned int size_for_map = req->assoclen + req->cryptlen; in cc_aead_chain_data() 841 size_for_map = req->assoclen + req->cryptlen; in cc_aead_chain_data() 972 areq_ctx->cryptlen = (areq_ctx->gen_ctx.op_type == in cc_map_aead_request() [all …]
|
| /linux/drivers/crypto/ccp/ |
| H A D | ccp-crypto-aes-xts.c | 130 if (req->cryptlen == xts_unit_sizes[unit].size) { in ccp_aes_xts_crypt() 158 req->dst, req->cryptlen, req->iv); in ccp_aes_xts_crypt() 179 rctx->cmd.u.xts.src_len = req->cryptlen; in ccp_aes_xts_crypt()
|
| /linux/drivers/crypto/rockchip/ |
| H A D | rk3288_crypto_skcipher.c | 32 if (!req->cryptlen) in rk_cipher_need_fallback() 35 len = req->cryptlen; in rk_cipher_need_fallback() 83 areq->cryptlen, areq->iv); in rk_cipher_fallback() 311 unsigned int len = areq->cryptlen; in rk_cipher_run() 326 offset = areq->cryptlen - ivsize; in rk_cipher_run() 404 offset = areq->cryptlen - ivsize; in rk_cipher_run()
|
| /linux/drivers/crypto/caam/ |
| H A D | caamalg_qi.c | 974 src_len = req->assoclen + req->cryptlen + in aead_edesc_alloc() 993 src_len = req->assoclen + req->cryptlen; in aead_edesc_alloc() 1135 out_len = req->assoclen + req->cryptlen + in aead_edesc_alloc() 1137 in_len = 4 + ivsize + req->assoclen + req->cryptlen; in aead_edesc_alloc() 1238 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); in skcipher_done() 1276 src_nents = sg_nents_for_len(req->src, req->cryptlen); in skcipher_edesc_alloc() 1279 req->cryptlen); in skcipher_edesc_alloc() 1284 dst_nents = sg_nents_for_len(req->dst, req->cryptlen); in skcipher_edesc_alloc() 1287 req->cryptlen); in skcipher_edesc_alloc() 1377 sg_to_qm_sg(req->src, req->cryptlen, sg_table + 1, 0); in skcipher_edesc_alloc() [all …]
|
| H A D | caamalg.c | 1121 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); in skcipher_crypt_done() 1168 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen, in init_aead_job() 1191 req->assoclen + req->cryptlen + authsize, in init_aead_job() 1195 req->assoclen + req->cryptlen - authsize, in init_aead_job() 1215 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen)) in init_gcm_job() 1335 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen); in init_skcipher_job() 1339 edesc->src_nents > 1 ? 100 : req->cryptlen, 1); in init_skcipher_job() 1363 src_dma, dst_dma, req->cryptlen + ivsize, in init_skcipher_job() 1376 append_seq_in_ptr(desc, src_dma, req->cryptlen + ivsize, in_options); in init_skcipher_job() 1378 append_seq_out_ptr(desc, dst_dma, req->cryptlen + ivsize, out_options); in init_skcipher_job() [all …]
|