/linux/drivers/crypto/aspeed/ |
H A D | aspeed-hace-hash.c | 78 struct aspeed_sham_reqctx *rctx) in aspeed_ahash_fill_padding() argument 83 AHASH_DBG(hace_dev, "rctx flags:0x%x\n", (u32)rctx->flags); in aspeed_ahash_fill_padding() 85 switch (rctx->flags & SHA_FLAGS_MASK) { in aspeed_ahash_fill_padding() 89 bits[0] = cpu_to_be64(rctx->digcnt[0] << 3); in aspeed_ahash_fill_padding() 90 index = rctx->bufcnt & 0x3f; in aspeed_ahash_fill_padding() 92 *(rctx->buffer + rctx->bufcnt) = 0x80; in aspeed_ahash_fill_padding() 93 memset(rctx->buffer + rctx->bufcnt + 1, 0, padlen - 1); in aspeed_ahash_fill_padding() 94 memcpy(rctx->buffer + rctx->bufcnt + padlen, bits, 8); in aspeed_ahash_fill_padding() 95 rctx->bufcnt += padlen + 8; in aspeed_ahash_fill_padding() 98 bits[1] = cpu_to_be64(rctx->digcnt[0] << 3); in aspeed_ahash_fill_padding() [all …]
|
/linux/drivers/crypto/tegra/ |
H A D | tegra-se-aes.c | 100 struct tegra_aes_reqctx *rctx = skcipher_request_ctx(req); in tegra_cbc_iv_copyback() local 105 if (rctx->encrypt) in tegra_cbc_iv_copyback() 106 memcpy(req->iv, rctx->datbuf.buf + offset, ctx->ivsize); in tegra_cbc_iv_copyback() 203 struct tegra_aes_reqctx *rctx) in tegra_aes_prep_cmd() argument 208 dma_addr_t addr = rctx->datbuf.addr; in tegra_aes_prep_cmd() 210 data_count = rctx->len / AES_BLOCK_SIZE; in tegra_aes_prep_cmd() 211 res_bits = (rctx->len % AES_BLOCK_SIZE) * 8; in tegra_aes_prep_cmd() 220 if (rctx->iv) { in tegra_aes_prep_cmd() 224 cpuvaddr[i++] = rctx->iv[j]; in tegra_aes_prep_cmd() 232 cpuvaddr[i++] = rctx->config; in tegra_aes_prep_cmd() [all …]
|
H A D | tegra-se-hash.c | 114 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); in tegra_sha_fallback_init() local 118 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in tegra_sha_fallback_init() 119 rctx->fallback_req.base.flags = req->base.flags & in tegra_sha_fallback_init() 122 return crypto_ahash_init(&rctx->fallback_req); in tegra_sha_fallback_init() 127 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); in tegra_sha_fallback_update() local 131 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in tegra_sha_fallback_update() 132 rctx->fallback_req.base.flags = req->base.flags & in tegra_sha_fallback_update() 134 rctx->fallback_req.nbytes = req->nbytes; in tegra_sha_fallback_update() 135 rctx->fallback_req.src = req->src; in tegra_sha_fallback_update() 137 return crypto_ahash_update(&rctx->fallback_req); in tegra_sha_fallback_update() [all …]
|
/linux/drivers/crypto/qce/ |
H A D | aead.c | 27 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req); in qce_aead_done() local 49 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_aead_done() 51 dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); in qce_aead_done() 53 if (IS_CCM(rctx->flags)) { in qce_aead_done() 55 sg_free_table(&rctx->src_tbl); in qce_aead_done() 57 sg_free_table(&rctx->dst_tbl); in qce_aead_done() 59 if (!(IS_DECRYPT(rctx->flags) && !diff_dst)) in qce_aead_done() 60 sg_free_table(&rctx->dst_tbl); in qce_aead_done() 63 sg_free_table(&rctx->dst_tbl); in qce_aead_done() 70 if (IS_ENCRYPT(rctx->flags)) { in qce_aead_done() [all …]
|
H A D | sha.c | 41 struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req); in qce_ahash_done() local 53 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_done() 54 dma_unmap_sg(qce->dev, &rctx->result_sg, 1, DMA_FROM_DEVICE); in qce_ahash_done() 56 memcpy(rctx->digest, result->auth_iv, digestsize); in qce_ahash_done() 57 if (req->result && rctx->last_blk) in qce_ahash_done() 60 rctx->byte_count[0] = cpu_to_be32(result->auth_byte_count[0]); in qce_ahash_done() 61 rctx->byte_count[1] = cpu_to_be32(result->auth_byte_count[1]); in qce_ahash_done() 67 req->src = rctx->src_orig; in qce_ahash_done() 68 req->nbytes = rctx->nbytes_orig; in qce_ahash_done() 69 rctx->last_blk = false; in qce_ahash_done() [all …]
|
H A D | skcipher.c | 31 struct qce_cipher_reqctx *rctx = skcipher_request_ctx(req); in qce_skcipher_done() local 50 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_skcipher_done() 51 dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); in qce_skcipher_done() 53 sg_free_table(&rctx->dst_tbl); in qce_skcipher_done() 59 memcpy(rctx->iv, result_buf->encr_cntr_iv, rctx->ivsize); in qce_skcipher_done() 67 struct qce_cipher_reqctx *rctx = skcipher_request_ctx(req); in qce_skcipher_async_req_handle() local 77 rctx->iv = req->iv; in qce_skcipher_async_req_handle() 78 rctx->ivsize = crypto_skcipher_ivsize(skcipher); in qce_skcipher_async_req_handle() 79 rctx->cryptlen = req->cryptlen; in qce_skcipher_async_req_handle() 85 rctx->src_nents = sg_nents_for_len(req->src, req->cryptlen); in qce_skcipher_async_req_handle() [all …]
|
H A D | common.c | 151 struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req); in qce_setup_regs_ahash() local 162 if (!rctx->last_blk && req->nbytes % blocksize) in qce_setup_regs_ahash() 167 if (IS_CMAC(rctx->flags)) { in qce_setup_regs_ahash() 175 auth_cfg = qce_auth_cfg(rctx->flags, rctx->authklen, digestsize); in qce_setup_regs_ahash() 178 if (IS_SHA_HMAC(rctx->flags) || IS_CMAC(rctx->flags)) { in qce_setup_regs_ahash() 179 u32 authkey_words = rctx->authklen / sizeof(u32); in qce_setup_regs_ahash() 181 qce_cpu_to_be32p_array(mackey, rctx->authkey, rctx->authklen); in qce_setup_regs_ahash() 186 if (IS_CMAC(rctx->flags)) in qce_setup_regs_ahash() 189 if (rctx->first_blk) in qce_setup_regs_ahash() 190 memcpy(auth, rctx->digest, digestsize); in qce_setup_regs_ahash() [all …]
|
/linux/drivers/crypto/intel/keembay/ |
H A D | keembay-ocs-hcu-core.c | 115 static inline unsigned int kmb_get_total_data(struct ocs_hcu_rctx *rctx) in kmb_get_total_data() argument 117 return rctx->sg_data_total + rctx->buf_cnt; in kmb_get_total_data() 121 static int flush_sg_to_ocs_buffer(struct ocs_hcu_rctx *rctx) in flush_sg_to_ocs_buffer() argument 125 if (rctx->sg_data_total > (sizeof(rctx->buffer) - rctx->buf_cnt)) { in flush_sg_to_ocs_buffer() 130 while (rctx->sg_data_total) { in flush_sg_to_ocs_buffer() 131 if (!rctx->sg) { in flush_sg_to_ocs_buffer() 139 if (rctx->sg_data_offset == rctx->sg->length) { in flush_sg_to_ocs_buffer() 140 rctx->sg = sg_next(rctx->sg); in flush_sg_to_ocs_buffer() 141 rctx->sg_data_offset = 0; in flush_sg_to_ocs_buffer() 149 count = min(rctx->sg->length - rctx->sg_data_offset, in flush_sg_to_ocs_buffer() [all …]
|
H A D | keembay-ocs-aes-core.c | 240 static void ocs_aes_init_rctx(struct ocs_aes_rctx *rctx) in ocs_aes_init_rctx() argument 243 memset(rctx, 0, sizeof(*rctx)); in ocs_aes_init_rctx() 246 rctx->src_dll.dma_addr = DMA_MAPPING_ERROR; in ocs_aes_init_rctx() 247 rctx->dst_dll.dma_addr = DMA_MAPPING_ERROR; in ocs_aes_init_rctx() 248 rctx->aad_src_dll.dma_addr = DMA_MAPPING_ERROR; in ocs_aes_init_rctx() 249 rctx->aad_dst_dll.dma_addr = DMA_MAPPING_ERROR; in ocs_aes_init_rctx() 314 struct ocs_aes_rctx *rctx = skcipher_request_ctx(req); in kmb_ocs_sk_common() local 357 ocs_aes_init_rctx(rctx); in kmb_ocs_sk_common() 358 rctx->instruction = instruction; in kmb_ocs_sk_common() 359 rctx->mode = mode; in kmb_ocs_sk_common() [all …]
|
/linux/drivers/crypto/ccp/ |
H A D | ccp-crypto-aes-cmac.c | 28 struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx_dma(req); in ccp_aes_cmac_complete() local 34 if (rctx->hash_rem) { in ccp_aes_cmac_complete() 36 unsigned int offset = rctx->nbytes - rctx->hash_rem; in ccp_aes_cmac_complete() 38 scatterwalk_map_and_copy(rctx->buf, rctx->src, in ccp_aes_cmac_complete() 39 offset, rctx->hash_rem, 0); in ccp_aes_cmac_complete() 40 rctx->buf_count = rctx->hash_rem; in ccp_aes_cmac_complete() 42 rctx->buf_count = 0; in ccp_aes_cmac_complete() 46 if (req->result && rctx->final) in ccp_aes_cmac_complete() 47 memcpy(req->result, rctx->iv, digest_size); in ccp_aes_cmac_complete() 50 sg_free_table(&rctx->data_sg); in ccp_aes_cmac_complete() [all …]
|
H A D | ccp-crypto-sha.c | 31 struct ccp_sha_req_ctx *rctx = ahash_request_ctx_dma(req); in ccp_sha_complete() local 37 if (rctx->hash_rem) { in ccp_sha_complete() 39 unsigned int offset = rctx->nbytes - rctx->hash_rem; in ccp_sha_complete() 41 scatterwalk_map_and_copy(rctx->buf, rctx->src, in ccp_sha_complete() 42 offset, rctx->hash_rem, 0); in ccp_sha_complete() 43 rctx->buf_count = rctx->hash_rem; in ccp_sha_complete() 45 rctx->buf_count = 0; in ccp_sha_complete() 49 if (req->result && rctx->final) in ccp_sha_complete() 50 memcpy(req->result, rctx->ctx, digest_size); in ccp_sha_complete() 53 sg_free_table(&rctx->data_sg); in ccp_sha_complete() [all …]
|
H A D | ccp-crypto-aes-xts.c | 65 struct ccp_aes_req_ctx *rctx = skcipher_request_ctx_dma(req); in ccp_aes_xts_complete() local 70 memcpy(req->iv, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_xts_complete() 109 struct ccp_aes_req_ctx *rctx = skcipher_request_ctx_dma(req); in ccp_aes_xts_crypt() local 151 skcipher_request_set_tfm(&rctx->fallback_req, in ccp_aes_xts_crypt() 153 skcipher_request_set_callback(&rctx->fallback_req, in ccp_aes_xts_crypt() 157 skcipher_request_set_crypt(&rctx->fallback_req, req->src, in ccp_aes_xts_crypt() 159 ret = encrypt ? crypto_skcipher_encrypt(&rctx->fallback_req) : in ccp_aes_xts_crypt() 160 crypto_skcipher_decrypt(&rctx->fallback_req); in ccp_aes_xts_crypt() 164 memcpy(rctx->iv, req->iv, AES_BLOCK_SIZE); in ccp_aes_xts_crypt() 165 sg_init_one(&rctx->iv_sg, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_xts_crypt() [all …]
|
H A D | ccp-crypto-aes-galois.c | 80 struct ccp_aes_req_ctx *rctx = aead_request_ctx_dma(req); in ccp_aes_gcm_crypt() local 105 memcpy(rctx->iv, req->iv, GCM_AES_IV_SIZE); in ccp_aes_gcm_crypt() 107 rctx->iv[i + GCM_AES_IV_SIZE] = 0; in ccp_aes_gcm_crypt() 108 rctx->iv[AES_BLOCK_SIZE - 1] = 1; in ccp_aes_gcm_crypt() 111 iv_sg = &rctx->iv_sg; in ccp_aes_gcm_crypt() 113 sg_init_one(iv_sg, rctx->iv, iv_len); in ccp_aes_gcm_crypt() 116 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); in ccp_aes_gcm_crypt() 117 INIT_LIST_HEAD(&rctx->cmd.entry); in ccp_aes_gcm_crypt() 118 rctx->cmd.engine = CCP_ENGINE_AES; in ccp_aes_gcm_crypt() 119 rctx->cmd.u.aes.authsize = crypto_aead_authsize(tfm); in ccp_aes_gcm_crypt() [all …]
|
/linux/drivers/crypto/cavium/nitrox/ |
H A D | nitrox_aead.c | 151 static int nitrox_set_creq(struct nitrox_aead_rctx *rctx) in nitrox_set_creq() argument 153 struct se_crypto_request *creq = &rctx->nkreq.creq; in nitrox_set_creq() 157 creq->flags = rctx->flags; in nitrox_set_creq() 158 creq->gfp = (rctx->flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? GFP_KERNEL : in nitrox_set_creq() 163 creq->ctrl.s.arg = rctx->ctrl_arg; in nitrox_set_creq() 165 creq->gph.param0 = cpu_to_be16(rctx->cryptlen); in nitrox_set_creq() 166 creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen); in nitrox_set_creq() 167 creq->gph.param2 = cpu_to_be16(rctx->ivsize + rctx->assoclen); in nitrox_set_creq() 169 param3.auth_offset = rctx->ivsize; in nitrox_set_creq() 172 creq->ctx_handle = rctx->ctx_handle; in nitrox_set_creq() [all …]
|
/linux/drivers/crypto/starfive/ |
H A D | jh7110-rsa.c | 75 struct starfive_cryp_request_ctx *rctx = ctx->rctx; in starfive_rsa_montgomery_form() local 76 int count = (ALIGN(rctx->total, 4) / 4) - 1; in starfive_rsa_montgomery_form() 82 rctx->csr.pka.v = 0; in starfive_rsa_montgomery_form() 84 writel(rctx->csr.pka.v, cryp->base + STARFIVE_PKA_CACR_OFFSET); in starfive_rsa_montgomery_form() 90 rctx->csr.pka.v = 0; in starfive_rsa_montgomery_form() 91 rctx->csr.pka.cln_done = 1; in starfive_rsa_montgomery_form() 92 rctx->csr.pka.opsize = opsize; in starfive_rsa_montgomery_form() 93 rctx->csr.pka.exposize = opsize; in starfive_rsa_montgomery_form() 94 rctx->csr.pka.cmd = CRYPTO_CMD_PRE; in starfive_rsa_montgomery_form() 95 rctx->csr.pka.start = 1; in starfive_rsa_montgomery_form() [all …]
|
H A D | jh7110-hash.c | 66 struct starfive_cryp_request_ctx *rctx = ctx->rctx; in starfive_hash_hmac_key() local 74 rctx->csr.hash.hmac = 1; in starfive_hash_hmac_key() 75 rctx->csr.hash.key_flag = 1; in starfive_hash_hmac_key() 77 writel(rctx->csr.hash.v, cryp->base + STARFIVE_HASH_SHACSR); in starfive_hash_hmac_key() 167 struct starfive_cryp_request_ctx *rctx = ahash_request_ctx(req); in starfive_hash_copy_hash() local 175 mlen = rctx->digsize / sizeof(u32); in starfive_hash_copy_hash() 200 struct starfive_cryp_request_ctx *rctx = ctx->rctx; in starfive_hash_one_request() local 210 rctx->csr.hash.v = 0; in starfive_hash_one_request() 211 rctx->csr.hash.mode = ctx->hash_mode; in starfive_hash_one_request() 218 rctx->csr.hash.start = 1; in starfive_hash_one_request() [all …]
|
/linux/drivers/crypto/allwinner/sun8i-ss/ |
H A D | sun8i-ss-hash.c | 146 struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq); in sun8i_ss_hash_init() local 150 memset(rctx, 0, sizeof(struct sun8i_ss_hash_reqctx)); in sun8i_ss_hash_init() 152 ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm); in sun8i_ss_hash_init() 153 rctx->fallback_req.base.flags = areq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in sun8i_ss_hash_init() 155 return crypto_ahash_init(&rctx->fallback_req); in sun8i_ss_hash_init() 160 struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq); in sun8i_ss_hash_export() local 164 ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm); in sun8i_ss_hash_export() 165 rctx->fallback_req.base.flags = areq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in sun8i_ss_hash_export() 167 return crypto_ahash_export(&rctx->fallback_req, out); in sun8i_ss_hash_export() 172 struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq); in sun8i_ss_hash_import() local [all …]
|
/linux/drivers/crypto/bcm/ |
H A D | cipher.c | 134 struct iproc_reqctx_s *rctx, in spu_skcipher_rx_sg_create() argument 140 struct iproc_ctx_s *ctx = rctx->ctx; in spu_skcipher_rx_sg_create() 144 rctx->gfp); in spu_skcipher_rx_sg_create() 151 sg_set_buf(sg++, rctx->msg_buf.spu_resp_hdr, ctx->spu_resp_hdr_len); in spu_skcipher_rx_sg_create() 156 sg_set_buf(sg++, rctx->msg_buf.c.supdt_tweak, in spu_skcipher_rx_sg_create() 160 datalen = spu_msg_sg_add(&sg, &rctx->dst_sg, &rctx->dst_skip, in spu_skcipher_rx_sg_create() 161 rctx->dst_nents, chunksize); in spu_skcipher_rx_sg_create() 169 sg_set_buf(sg++, rctx->msg_buf.rx_stat_pad, stat_pad_len); in spu_skcipher_rx_sg_create() 171 memset(rctx->msg_buf.rx_stat, 0, SPU_RX_STATUS_LEN); in spu_skcipher_rx_sg_create() 172 sg_set_buf(sg, rctx->msg_buf.rx_stat, spu->spu_rx_status_len()); in spu_skcipher_rx_sg_create() [all …]
|
/linux/crypto/ |
H A D | chacha20poly1305.c | 74 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); in async_done_continue() local 76 rctx->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; in async_done_continue() 97 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); in poly_verify_tag() local 98 u8 tag[sizeof(rctx->tag)]; in poly_verify_tag() 101 req->assoclen + rctx->cryptlen, in poly_verify_tag() 103 if (crypto_memneq(tag, rctx->tag, sizeof(tag))) in poly_verify_tag() 110 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); in poly_copy_tag() local 112 scatterwalk_map_and_copy(rctx->tag, req->dst, in poly_copy_tag() 113 req->assoclen + rctx->cryptlen, in poly_copy_tag() 114 sizeof(rctx->tag), 1); in poly_copy_tag() [all …]
|
/linux/drivers/crypto/rockchip/ |
H A D | rk3288_crypto_ahash.c | 46 struct rk_ahash_rctx *rctx = ahash_request_ctx(areq); in rk_ahash_digest_fb() local 54 ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm); in rk_ahash_digest_fb() 55 rctx->fallback_req.base.flags = areq->base.flags & in rk_ahash_digest_fb() 58 rctx->fallback_req.nbytes = areq->nbytes; in rk_ahash_digest_fb() 59 rctx->fallback_req.src = areq->src; in rk_ahash_digest_fb() 60 rctx->fallback_req.result = areq->result; in rk_ahash_digest_fb() 62 return crypto_ahash_digest(&rctx->fallback_req); in rk_ahash_digest_fb() 90 struct rk_ahash_rctx *rctx = ahash_request_ctx(req); in rk_ahash_reg_init() local 110 CRYPTO_WRITE(dev, RK_CRYPTO_HASH_CTRL, rctx->mode | in rk_ahash_reg_init() 122 struct rk_ahash_rctx *rctx = ahash_request_ctx(req); in rk_ahash_init() local [all …]
|
H A D | rk3288_crypto_skcipher.c | 72 struct rk_cipher_rctx *rctx = skcipher_request_ctx(areq); in rk_cipher_fallback() local 79 skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm); in rk_cipher_fallback() 80 skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags, in rk_cipher_fallback() 82 skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst, in rk_cipher_fallback() 84 if (rctx->mode & RK_CRYPTO_DEC) in rk_cipher_fallback() 85 err = crypto_skcipher_decrypt(&rctx->fallback_req); in rk_cipher_fallback() 87 err = crypto_skcipher_encrypt(&rctx->fallback_req); in rk_cipher_fallback() 93 struct rk_cipher_rctx *rctx = skcipher_request_ctx(req); in rk_cipher_handle_req() local 103 rctx->dev = rkc; in rk_cipher_handle_req() 157 struct rk_cipher_rctx *rctx = skcipher_request_ctx(req); in rk_aes_ecb_encrypt() local [all …]
|
/linux/drivers/crypto/stm32/ |
H A D | stm32-hash.c | 307 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req); in stm32_hash_write_ctrl() local 310 struct stm32_hash_state *state = &rctx->state; in stm32_hash_write_ctrl() 326 reg |= (rctx->data_type << HASH_CR_DATATYPE_POS); in stm32_hash_write_ctrl() 346 rctx->state.blocklen -= sizeof(u32); in stm32_hash_write_ctrl() 352 static void stm32_hash_append_sg(struct stm32_hash_request_ctx *rctx) in stm32_hash_append_sg() argument 354 struct stm32_hash_state *state = &rctx->state; in stm32_hash_append_sg() 357 while ((state->bufcnt < state->blocklen) && rctx->total) { in stm32_hash_append_sg() 358 count = min(rctx->sg->length - rctx->offset, rctx->total); in stm32_hash_append_sg() 362 if ((rctx->sg->length == 0) && !sg_is_last(rctx->sg)) { in stm32_hash_append_sg() 363 rctx->sg = sg_next(rctx->sg); in stm32_hash_append_sg() [all …]
|
/linux/drivers/crypto/ |
H A D | sahara.c | 543 struct sahara_aes_reqctx *rctx = skcipher_request_ctx(req); in sahara_aes_cbc_update_iv() local 547 if (rctx->mode & FLAGS_ENCRYPT) { in sahara_aes_cbc_update_iv() 551 memcpy(req->iv, rctx->iv_out, ivsize); in sahara_aes_cbc_update_iv() 560 struct sahara_aes_reqctx *rctx; in sahara_aes_process() local 574 rctx = skcipher_request_ctx(req); in sahara_aes_process() 576 rctx->mode &= FLAGS_MODE_MASK; in sahara_aes_process() 577 dev->flags = (dev->flags & ~FLAGS_MODE_MASK) | rctx->mode; in sahara_aes_process() 586 rctx->iv_out, ivsize, in sahara_aes_process() 646 struct sahara_aes_reqctx *rctx = skcipher_request_ctx(req); in sahara_aes_fallback() local 650 skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback); in sahara_aes_fallback() [all …]
|
/linux/drivers/crypto/allwinner/sun8i-ce/ |
H A D | sun8i-ce-hash.c | 74 struct sun8i_ce_hash_reqctx *rctx = ahash_request_ctx(areq); in sun8i_ce_hash_init() local 78 memset(rctx, 0, sizeof(struct sun8i_ce_hash_reqctx)); in sun8i_ce_hash_init() 80 ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm); in sun8i_ce_hash_init() 81 rctx->fallback_req.base.flags = areq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in sun8i_ce_hash_init() 83 return crypto_ahash_init(&rctx->fallback_req); in sun8i_ce_hash_init() 88 struct sun8i_ce_hash_reqctx *rctx = ahash_request_ctx(areq); in sun8i_ce_hash_export() local 92 ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm); in sun8i_ce_hash_export() 93 rctx->fallback_req.base.flags = areq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in sun8i_ce_hash_export() 95 return crypto_ahash_export(&rctx->fallback_req, out); in sun8i_ce_hash_export() 100 struct sun8i_ce_hash_reqctx *rctx = ahash_request_ctx(areq); in sun8i_ce_hash_import() local [all …]
|
H A D | sun8i-ce-cipher.c | 93 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in sun8i_ce_cipher_fallback() local 108 skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm); in sun8i_ce_cipher_fallback() 109 skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags, in sun8i_ce_cipher_fallback() 111 skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst, in sun8i_ce_cipher_fallback() 113 if (rctx->op_dir & CE_DECRYPTION) in sun8i_ce_cipher_fallback() 114 err = crypto_skcipher_decrypt(&rctx->fallback_req); in sun8i_ce_cipher_fallback() 116 err = crypto_skcipher_encrypt(&rctx->fallback_req); in sun8i_ce_cipher_fallback() 126 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in sun8i_ce_cipher_prepare() local 146 rctx->op_dir, areq->iv, crypto_skcipher_ivsize(tfm), in sun8i_ce_cipher_prepare() 153 flow = rctx->flow; in sun8i_ce_cipher_prepare() [all …]
|