Home
last modified time | relevance | path

Searched refs:rctx (Results 1 – 25 of 71) sorted by relevance

123

/linux/drivers/crypto/aspeed/
H A Daspeed-hace-hash.c78 struct aspeed_sham_reqctx *rctx) in aspeed_ahash_fill_padding() argument
83 AHASH_DBG(hace_dev, "rctx flags:0x%x\n", (u32)rctx->flags); in aspeed_ahash_fill_padding()
85 switch (rctx->flags & SHA_FLAGS_MASK) { in aspeed_ahash_fill_padding()
89 bits[0] = cpu_to_be64(rctx->digcnt[0] << 3); in aspeed_ahash_fill_padding()
90 index = rctx->bufcnt & 0x3f; in aspeed_ahash_fill_padding()
92 *(rctx->buffer + rctx->bufcnt) = 0x80; in aspeed_ahash_fill_padding()
93 memset(rctx->buffer + rctx->bufcnt + 1, 0, padlen - 1); in aspeed_ahash_fill_padding()
94 memcpy(rctx->buffer + rctx->bufcnt + padlen, bits, 8); in aspeed_ahash_fill_padding()
95 rctx->bufcnt += padlen + 8; in aspeed_ahash_fill_padding()
98 bits[1] = cpu_to_be64(rctx->digcnt[0] << 3); in aspeed_ahash_fill_padding()
[all …]
/linux/drivers/crypto/qce/
H A Daead.c27 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req); in qce_aead_done() local
49 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_aead_done()
51 dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); in qce_aead_done()
53 if (IS_CCM(rctx->flags)) { in qce_aead_done()
55 sg_free_table(&rctx->src_tbl); in qce_aead_done()
57 sg_free_table(&rctx->dst_tbl); in qce_aead_done()
59 if (!(IS_DECRYPT(rctx->flags) && !diff_dst)) in qce_aead_done()
60 sg_free_table(&rctx->dst_tbl); in qce_aead_done()
63 sg_free_table(&rctx->dst_tbl); in qce_aead_done()
70 if (IS_ENCRYPT(rctx->flags)) { in qce_aead_done()
[all …]
H A Dsha.c41 struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req); in qce_ahash_done() local
53 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_done()
54 dma_unmap_sg(qce->dev, &rctx->result_sg, 1, DMA_FROM_DEVICE); in qce_ahash_done()
56 memcpy(rctx->digest, result->auth_iv, digestsize); in qce_ahash_done()
57 if (req->result && rctx->last_blk) in qce_ahash_done()
60 rctx->byte_count[0] = cpu_to_be32(result->auth_byte_count[0]); in qce_ahash_done()
61 rctx->byte_count[1] = cpu_to_be32(result->auth_byte_count[1]); in qce_ahash_done()
67 req->src = rctx->src_orig; in qce_ahash_done()
68 req->nbytes = rctx->nbytes_orig; in qce_ahash_done()
69 rctx->last_blk = false; in qce_ahash_done()
[all …]
H A Dskcipher.c31 struct qce_cipher_reqctx *rctx = skcipher_request_ctx(req); in qce_skcipher_done() local
50 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_skcipher_done()
51 dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); in qce_skcipher_done()
53 sg_free_table(&rctx->dst_tbl); in qce_skcipher_done()
59 memcpy(rctx->iv, result_buf->encr_cntr_iv, rctx->ivsize); in qce_skcipher_done()
67 struct qce_cipher_reqctx *rctx = skcipher_request_ctx(req); in qce_skcipher_async_req_handle() local
77 rctx->iv = req->iv; in qce_skcipher_async_req_handle()
78 rctx->ivsize = crypto_skcipher_ivsize(skcipher); in qce_skcipher_async_req_handle()
79 rctx->cryptlen = req->cryptlen; in qce_skcipher_async_req_handle()
85 rctx->src_nents = sg_nents_for_len(req->src, req->cryptlen); in qce_skcipher_async_req_handle()
[all …]
H A Dcommon.c151 struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req); in qce_setup_regs_ahash() local
162 if (!rctx->last_blk && req->nbytes % blocksize) in qce_setup_regs_ahash()
167 if (IS_CMAC(rctx->flags)) { in qce_setup_regs_ahash()
175 auth_cfg = qce_auth_cfg(rctx->flags, rctx->authklen, digestsize); in qce_setup_regs_ahash()
178 if (IS_SHA_HMAC(rctx->flags) || IS_CMAC(rctx->flags)) { in qce_setup_regs_ahash()
179 u32 authkey_words = rctx->authklen / sizeof(u32); in qce_setup_regs_ahash()
181 qce_cpu_to_be32p_array(mackey, rctx->authkey, rctx->authklen); in qce_setup_regs_ahash()
186 if (IS_CMAC(rctx->flags)) in qce_setup_regs_ahash()
189 if (rctx->first_blk) in qce_setup_regs_ahash()
190 memcpy(auth, rctx->digest, digestsize); in qce_setup_regs_ahash()
[all …]
/linux/drivers/crypto/intel/keembay/
H A Dkeembay-ocs-hcu-core.c115 static inline unsigned int kmb_get_total_data(struct ocs_hcu_rctx *rctx) in kmb_get_total_data() argument
117 return rctx->sg_data_total + rctx->buf_cnt; in kmb_get_total_data()
121 static int flush_sg_to_ocs_buffer(struct ocs_hcu_rctx *rctx) in flush_sg_to_ocs_buffer() argument
125 if (rctx->sg_data_total > (sizeof(rctx->buffer) - rctx->buf_cnt)) { in flush_sg_to_ocs_buffer()
130 while (rctx->sg_data_total) { in flush_sg_to_ocs_buffer()
131 if (!rctx->sg) { in flush_sg_to_ocs_buffer()
139 if (rctx->sg_data_offset == rctx->sg->length) { in flush_sg_to_ocs_buffer()
140 rctx->sg = sg_next(rctx->sg); in flush_sg_to_ocs_buffer()
141 rctx->sg_data_offset = 0; in flush_sg_to_ocs_buffer()
149 count = min(rctx->sg->length - rctx->sg_data_offset, in flush_sg_to_ocs_buffer()
[all …]
/linux/drivers/crypto/ccp/
H A Dccp-crypto-aes-cmac.c28 struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx_dma(req); in ccp_aes_cmac_complete() local
34 if (rctx->hash_rem) { in ccp_aes_cmac_complete()
36 unsigned int offset = rctx->nbytes - rctx->hash_rem; in ccp_aes_cmac_complete()
38 scatterwalk_map_and_copy(rctx->buf, rctx->src, in ccp_aes_cmac_complete()
39 offset, rctx->hash_rem, 0); in ccp_aes_cmac_complete()
40 rctx->buf_count = rctx->hash_rem; in ccp_aes_cmac_complete()
42 rctx->buf_count = 0; in ccp_aes_cmac_complete()
46 if (req->result && rctx->final) in ccp_aes_cmac_complete()
47 memcpy(req->result, rctx->iv, digest_size); in ccp_aes_cmac_complete()
50 sg_free_table(&rctx->data_sg); in ccp_aes_cmac_complete()
[all …]
H A Dccp-crypto-sha.c31 struct ccp_sha_req_ctx *rctx = ahash_request_ctx_dma(req); in ccp_sha_complete() local
37 if (rctx->hash_rem) { in ccp_sha_complete()
39 unsigned int offset = rctx->nbytes - rctx->hash_rem; in ccp_sha_complete()
41 scatterwalk_map_and_copy(rctx->buf, rctx->src, in ccp_sha_complete()
42 offset, rctx->hash_rem, 0); in ccp_sha_complete()
43 rctx->buf_count = rctx->hash_rem; in ccp_sha_complete()
45 rctx->buf_count = 0; in ccp_sha_complete()
49 if (req->result && rctx->final) in ccp_sha_complete()
50 memcpy(req->result, rctx->ctx, digest_size); in ccp_sha_complete()
53 sg_free_table(&rctx->data_sg); in ccp_sha_complete()
[all …]
H A Dccp-crypto-aes-xts.c65 struct ccp_aes_req_ctx *rctx = skcipher_request_ctx_dma(req); in ccp_aes_xts_complete() local
70 memcpy(req->iv, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_xts_complete()
109 struct ccp_aes_req_ctx *rctx = skcipher_request_ctx_dma(req); in ccp_aes_xts_crypt() local
151 skcipher_request_set_tfm(&rctx->fallback_req, in ccp_aes_xts_crypt()
153 skcipher_request_set_callback(&rctx->fallback_req, in ccp_aes_xts_crypt()
157 skcipher_request_set_crypt(&rctx->fallback_req, req->src, in ccp_aes_xts_crypt()
159 ret = encrypt ? crypto_skcipher_encrypt(&rctx->fallback_req) : in ccp_aes_xts_crypt()
160 crypto_skcipher_decrypt(&rctx->fallback_req); in ccp_aes_xts_crypt()
164 memcpy(rctx->iv, req->iv, AES_BLOCK_SIZE); in ccp_aes_xts_crypt()
165 sg_init_one(&rctx->iv_sg, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_xts_crypt()
[all …]
H A Dccp-crypto-aes-galois.c80 struct ccp_aes_req_ctx *rctx = aead_request_ctx_dma(req); in ccp_aes_gcm_crypt() local
105 memcpy(rctx->iv, req->iv, GCM_AES_IV_SIZE); in ccp_aes_gcm_crypt()
107 rctx->iv[i + GCM_AES_IV_SIZE] = 0; in ccp_aes_gcm_crypt()
108 rctx->iv[AES_BLOCK_SIZE - 1] = 1; in ccp_aes_gcm_crypt()
111 iv_sg = &rctx->iv_sg; in ccp_aes_gcm_crypt()
113 sg_init_one(iv_sg, rctx->iv, iv_len); in ccp_aes_gcm_crypt()
116 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); in ccp_aes_gcm_crypt()
117 INIT_LIST_HEAD(&rctx->cmd.entry); in ccp_aes_gcm_crypt()
118 rctx->cmd.engine = CCP_ENGINE_AES; in ccp_aes_gcm_crypt()
119 rctx->cmd.u.aes.authsize = crypto_aead_authsize(tfm); in ccp_aes_gcm_crypt()
[all …]
H A Dccp-crypto-des3.c26 struct ccp_des3_req_ctx *rctx = skcipher_request_ctx_dma(req); in ccp_des3_complete() local
32 memcpy(req->iv, rctx->iv, DES3_EDE_BLOCK_SIZE); in ccp_des3_complete()
65 struct ccp_des3_req_ctx *rctx = skcipher_request_ctx_dma(req); in ccp_des3_crypt() local
81 memcpy(rctx->iv, req->iv, DES3_EDE_BLOCK_SIZE); in ccp_des3_crypt()
82 iv_sg = &rctx->iv_sg; in ccp_des3_crypt()
84 sg_init_one(iv_sg, rctx->iv, iv_len); in ccp_des3_crypt()
87 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); in ccp_des3_crypt()
88 INIT_LIST_HEAD(&rctx->cmd.entry); in ccp_des3_crypt()
89 rctx->cmd.engine = CCP_ENGINE_DES3; in ccp_des3_crypt()
90 rctx->cmd.u.des3.type = ctx->u.des3.type; in ccp_des3_crypt()
[all …]
H A Dccp-crypto-aes.c27 struct ccp_aes_req_ctx *rctx = skcipher_request_ctx_dma(req); in ccp_aes_complete() local
33 memcpy(req->iv, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_complete()
70 struct ccp_aes_req_ctx *rctx = skcipher_request_ctx_dma(req); in ccp_aes_crypt() local
86 memcpy(rctx->iv, req->iv, AES_BLOCK_SIZE); in ccp_aes_crypt()
87 iv_sg = &rctx->iv_sg; in ccp_aes_crypt()
89 sg_init_one(iv_sg, rctx->iv, iv_len); in ccp_aes_crypt()
92 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); in ccp_aes_crypt()
93 INIT_LIST_HEAD(&rctx->cmd.entry); in ccp_aes_crypt()
94 rctx->cmd.engine = CCP_ENGINE_AES; in ccp_aes_crypt()
95 rctx->cmd.u.aes.type = ctx->u.aes.type; in ccp_aes_crypt()
[all …]
/linux/drivers/crypto/tegra/
H A Dtegra-se-hash.c114 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); in tegra_sha_fallback_init() local
118 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in tegra_sha_fallback_init()
119 rctx->fallback_req.base.flags = req->base.flags & in tegra_sha_fallback_init()
122 return crypto_ahash_init(&rctx->fallback_req); in tegra_sha_fallback_init()
127 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); in tegra_sha_fallback_update() local
131 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in tegra_sha_fallback_update()
132 rctx->fallback_req.base.flags = req->base.flags & in tegra_sha_fallback_update()
134 rctx->fallback_req.nbytes = req->nbytes; in tegra_sha_fallback_update()
135 rctx->fallback_req.src = req->src; in tegra_sha_fallback_update()
137 return crypto_ahash_update(&rctx->fallback_req); in tegra_sha_fallback_update()
[all …]
/linux/drivers/crypto/cavium/nitrox/
H A Dnitrox_aead.c151 static int nitrox_set_creq(struct nitrox_aead_rctx *rctx) in nitrox_set_creq() argument
153 struct se_crypto_request *creq = &rctx->nkreq.creq; in nitrox_set_creq()
157 creq->flags = rctx->flags; in nitrox_set_creq()
158 creq->gfp = (rctx->flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? GFP_KERNEL : in nitrox_set_creq()
163 creq->ctrl.s.arg = rctx->ctrl_arg; in nitrox_set_creq()
165 creq->gph.param0 = cpu_to_be16(rctx->cryptlen); in nitrox_set_creq()
166 creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen); in nitrox_set_creq()
167 creq->gph.param2 = cpu_to_be16(rctx->ivsize + rctx->assoclen); in nitrox_set_creq()
169 param3.auth_offset = rctx->ivsize; in nitrox_set_creq()
172 creq->ctx_handle = rctx->ctx_handle; in nitrox_set_creq()
[all …]
/linux/drivers/crypto/allwinner/sun8i-ss/
H A Dsun8i-ss-hash.c146 struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq); in sun8i_ss_hash_init() local
150 memset(rctx, 0, sizeof(struct sun8i_ss_hash_reqctx)); in sun8i_ss_hash_init()
152 ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm); in sun8i_ss_hash_init()
153 rctx->fallback_req.base.flags = areq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in sun8i_ss_hash_init()
155 return crypto_ahash_init(&rctx->fallback_req); in sun8i_ss_hash_init()
160 struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq); in sun8i_ss_hash_export() local
164 ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm); in sun8i_ss_hash_export()
165 rctx->fallback_req.base.flags = areq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in sun8i_ss_hash_export()
167 return crypto_ahash_export(&rctx->fallback_req, out); in sun8i_ss_hash_export()
172 struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq); in sun8i_ss_hash_import() local
[all …]
H A Dsun8i-ss-cipher.c93 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in sun8i_ss_cipher_fallback() local
108 skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm); in sun8i_ss_cipher_fallback()
109 skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags, in sun8i_ss_cipher_fallback()
111 skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst, in sun8i_ss_cipher_fallback()
113 if (rctx->op_dir & SS_DECRYPTION) in sun8i_ss_cipher_fallback()
114 err = crypto_skcipher_decrypt(&rctx->fallback_req); in sun8i_ss_cipher_fallback()
116 err = crypto_skcipher_encrypt(&rctx->fallback_req); in sun8i_ss_cipher_fallback()
125 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in sun8i_ss_setup_ivs() local
130 struct sun8i_ss_flow *sf = &ss->flows[rctx->flow]; in sun8i_ss_setup_ivs()
135 rctx->ivlen = ivsize; in sun8i_ss_setup_ivs()
[all …]
/linux/crypto/
H A Dchacha20poly1305.c74 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); in async_done_continue() local
76 rctx->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; in async_done_continue()
97 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); in poly_verify_tag() local
98 u8 tag[sizeof(rctx->tag)]; in poly_verify_tag()
101 req->assoclen + rctx->cryptlen, in poly_verify_tag()
103 if (crypto_memneq(tag, rctx->tag, sizeof(tag))) in poly_verify_tag()
110 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); in poly_copy_tag() local
112 scatterwalk_map_and_copy(rctx->tag, req->dst, in poly_copy_tag()
113 req->assoclen + rctx->cryptlen, in poly_copy_tag()
114 sizeof(rctx->tag), 1); in poly_copy_tag()
[all …]
H A Dadiantum.c223 struct adiantum_request_ctx *rctx = skcipher_request_ctx(req); in adiantum_hash_header() local
243 poly1305_core_emit(&state, NULL, &rctx->header_hash); in adiantum_hash_header()
251 struct adiantum_request_ctx *rctx = skcipher_request_ctx(req); in adiantum_hash_message() local
253 struct shash_desc *hash_desc = &rctx->u.hash_desc; in adiantum_hash_message()
282 struct adiantum_request_ctx *rctx = skcipher_request_ctx(req); in adiantum_finish() local
290 if (!rctx->enc) in adiantum_finish()
291 crypto_cipher_decrypt_one(tctx->blockcipher, rctx->rbuf.bytes, in adiantum_finish()
292 rctx->rbuf.bytes); in adiantum_finish()
299 rctx->u.hash_desc.tfm = tctx->hash; in adiantum_finish()
300 le128_sub(&rctx->rbuf.bignum, &rctx->rbuf.bignum, &rctx->header_hash); in adiantum_finish()
[all …]
/linux/drivers/crypto/rockchip/
H A Drk3288_crypto_ahash.c46 struct rk_ahash_rctx *rctx = ahash_request_ctx(areq); in rk_ahash_digest_fb() local
54 ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm); in rk_ahash_digest_fb()
55 rctx->fallback_req.base.flags = areq->base.flags & in rk_ahash_digest_fb()
58 rctx->fallback_req.nbytes = areq->nbytes; in rk_ahash_digest_fb()
59 rctx->fallback_req.src = areq->src; in rk_ahash_digest_fb()
60 rctx->fallback_req.result = areq->result; in rk_ahash_digest_fb()
62 return crypto_ahash_digest(&rctx->fallback_req); in rk_ahash_digest_fb()
90 struct rk_ahash_rctx *rctx = ahash_request_ctx(req); in rk_ahash_reg_init() local
110 CRYPTO_WRITE(dev, RK_CRYPTO_HASH_CTRL, rctx->mode | in rk_ahash_reg_init()
122 struct rk_ahash_rctx *rctx = ahash_request_ctx(req); in rk_ahash_init() local
[all …]
H A Drk3288_crypto_skcipher.c72 struct rk_cipher_rctx *rctx = skcipher_request_ctx(areq); in rk_cipher_fallback() local
79 skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm); in rk_cipher_fallback()
80 skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags, in rk_cipher_fallback()
82 skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst, in rk_cipher_fallback()
84 if (rctx->mode & RK_CRYPTO_DEC) in rk_cipher_fallback()
85 err = crypto_skcipher_decrypt(&rctx->fallback_req); in rk_cipher_fallback()
87 err = crypto_skcipher_encrypt(&rctx->fallback_req); in rk_cipher_fallback()
93 struct rk_cipher_rctx *rctx = skcipher_request_ctx(req); in rk_cipher_handle_req() local
103 rctx->dev = rkc; in rk_cipher_handle_req()
157 struct rk_cipher_rctx *rctx = skcipher_request_ctx(req); in rk_aes_ecb_encrypt() local
[all …]
/linux/drivers/crypto/allwinner/sun8i-ce/
H A Dsun8i-ce-hash.c74 struct sun8i_ce_hash_reqctx *rctx = ahash_request_ctx(areq); in sun8i_ce_hash_init() local
78 memset(rctx, 0, sizeof(struct sun8i_ce_hash_reqctx)); in sun8i_ce_hash_init()
80 ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm); in sun8i_ce_hash_init()
81 rctx->fallback_req.base.flags = areq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in sun8i_ce_hash_init()
83 return crypto_ahash_init(&rctx->fallback_req); in sun8i_ce_hash_init()
88 struct sun8i_ce_hash_reqctx *rctx = ahash_request_ctx(areq); in sun8i_ce_hash_export() local
92 ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm); in sun8i_ce_hash_export()
93 rctx->fallback_req.base.flags = areq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in sun8i_ce_hash_export()
95 return crypto_ahash_export(&rctx->fallback_req, out); in sun8i_ce_hash_export()
100 struct sun8i_ce_hash_reqctx *rctx = ahash_request_ctx(areq); in sun8i_ce_hash_import() local
[all …]
H A Dsun8i-ce-cipher.c93 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in sun8i_ce_cipher_fallback() local
108 skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm); in sun8i_ce_cipher_fallback()
109 skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags, in sun8i_ce_cipher_fallback()
111 skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst, in sun8i_ce_cipher_fallback()
113 if (rctx->op_dir & CE_DECRYPTION) in sun8i_ce_cipher_fallback()
114 err = crypto_skcipher_decrypt(&rctx->fallback_req); in sun8i_ce_cipher_fallback()
116 err = crypto_skcipher_encrypt(&rctx->fallback_req); in sun8i_ce_cipher_fallback()
126 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in sun8i_ce_cipher_prepare() local
146 rctx->op_dir, areq->iv, crypto_skcipher_ivsize(tfm), in sun8i_ce_cipher_prepare()
153 flow = rctx->flow; in sun8i_ce_cipher_prepare()
[all …]
/linux/drivers/crypto/starfive/
H A Djh7110-hash.c66 struct starfive_cryp_request_ctx *rctx = ctx->rctx; in starfive_hash_hmac_key() local
74 rctx->csr.hash.hmac = 1; in starfive_hash_hmac_key()
75 rctx->csr.hash.key_flag = 1; in starfive_hash_hmac_key()
77 writel(rctx->csr.hash.v, cryp->base + STARFIVE_HASH_SHACSR); in starfive_hash_hmac_key()
167 struct starfive_cryp_request_ctx *rctx = ahash_request_ctx(req); in starfive_hash_copy_hash() local
175 mlen = rctx->digsize / sizeof(u32); in starfive_hash_copy_hash()
200 struct starfive_cryp_request_ctx *rctx = ctx->rctx; in starfive_hash_one_request() local
210 rctx->csr.hash.v = 0; in starfive_hash_one_request()
211 rctx->csr.hash.mode = ctx->hash_mode; in starfive_hash_one_request()
218 rctx->csr.hash.start = 1; in starfive_hash_one_request()
[all …]
/linux/drivers/crypto/gemini/
H A Dsl3516-ce-cipher.c107 struct sl3516_ce_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in sl3516_ce_cipher_fallback() local
115 skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm); in sl3516_ce_cipher_fallback()
116 skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags, in sl3516_ce_cipher_fallback()
118 skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst, in sl3516_ce_cipher_fallback()
120 if (rctx->op_dir == CE_DECRYPTION) in sl3516_ce_cipher_fallback()
121 err = crypto_skcipher_decrypt(&rctx->fallback_req); in sl3516_ce_cipher_fallback()
123 err = crypto_skcipher_encrypt(&rctx->fallback_req); in sl3516_ce_cipher_fallback()
132 struct sl3516_ce_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in sl3516_ce_cipher() local
148 rctx->op_dir, areq->iv, crypto_skcipher_ivsize(tfm), in sl3516_ce_cipher()
185 rctx->t_src[i].addr = sg_dma_address(sg); in sl3516_ce_cipher()
[all …]
/linux/drivers/crypto/
H A Domap-aes-gcm.c49 struct omap_aes_reqctx *rctx; in omap_aes_gcm_done_task() local
53 rctx = aead_request_ctx(dd->aead_req); in omap_aes_gcm_done_task()
68 scatterwalk_map_and_copy(rctx->auth_tag, in omap_aes_gcm_done_task()
80 tag = (u8 *)rctx->auth_tag; in omap_aes_gcm_done_task()
188 struct omap_aes_reqctx *rctx; in omap_aes_gcm_dma_out_callback() local
197 rctx = aead_request_ctx(dd->aead_req); in omap_aes_gcm_dma_out_callback()
198 auth_tag = (u32 *)rctx->auth_tag; in omap_aes_gcm_dma_out_callback()
221 struct omap_aes_reqctx *rctx = aead_request_ctx(req); in omap_aes_gcm_prepare_req() local
227 rctx->mode &= FLAGS_MODE_MASK; in omap_aes_gcm_prepare_req()
228 dd->flags = (dd->flags & ~FLAGS_MODE_MASK) | rctx->mode; in omap_aes_gcm_prepare_req()
[all …]

123