| /linux/drivers/crypto/tegra/ |
| H A D | tegra-se-aes.c | 106 struct tegra_aes_reqctx *rctx = skcipher_request_ctx(req); in tegra_cbc_iv_copyback() local 111 if (rctx->encrypt) in tegra_cbc_iv_copyback() 112 memcpy(req->iv, rctx->datbuf.buf + offset, ctx->ivsize); in tegra_cbc_iv_copyback() 209 struct tegra_aes_reqctx *rctx) in tegra_aes_prep_cmd() argument 214 dma_addr_t addr = rctx->datbuf.addr; in tegra_aes_prep_cmd() 216 data_count = rctx->len / AES_BLOCK_SIZE; in tegra_aes_prep_cmd() 217 res_bits = (rctx->len % AES_BLOCK_SIZE) * 8; in tegra_aes_prep_cmd() 226 if (rctx->iv) { in tegra_aes_prep_cmd() 230 cpuvaddr[i++] = rctx->iv[j]; in tegra_aes_prep_cmd() 238 cpuvaddr[i++] = rctx->config; in tegra_aes_prep_cmd() [all …]
|
| /linux/drivers/crypto/ccp/ |
| H A D | ccp-crypto-aes-cmac.c | 28 struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx_dma(req); in ccp_aes_cmac_complete() local 34 if (rctx->hash_rem) { in ccp_aes_cmac_complete() 36 unsigned int offset = rctx->nbytes - rctx->hash_rem; in ccp_aes_cmac_complete() 38 scatterwalk_map_and_copy(rctx->buf, rctx->src, in ccp_aes_cmac_complete() 39 offset, rctx->hash_rem, 0); in ccp_aes_cmac_complete() 40 rctx->buf_count = rctx->hash_rem; in ccp_aes_cmac_complete() 42 rctx->buf_count = 0; in ccp_aes_cmac_complete() 46 if (req->result && rctx->final) in ccp_aes_cmac_complete() 47 memcpy(req->result, rctx->iv, digest_size); in ccp_aes_cmac_complete() 50 sg_free_table(&rctx->data_sg); in ccp_aes_cmac_complete() [all …]
|
| H A D | ccp-crypto-sha.c | 31 struct ccp_sha_req_ctx *rctx = ahash_request_ctx_dma(req); in ccp_sha_complete() local 37 if (rctx->hash_rem) { in ccp_sha_complete() 39 unsigned int offset = rctx->nbytes - rctx->hash_rem; in ccp_sha_complete() 41 scatterwalk_map_and_copy(rctx->buf, rctx->src, in ccp_sha_complete() 42 offset, rctx->hash_rem, 0); in ccp_sha_complete() 43 rctx->buf_count = rctx->hash_rem; in ccp_sha_complete() 45 rctx->buf_count = 0; in ccp_sha_complete() 49 if (req->result && rctx->final) in ccp_sha_complete() 50 memcpy(req->result, rctx->ctx, digest_size); in ccp_sha_complete() 53 sg_free_table(&rctx->data_sg); in ccp_sha_complete() [all …]
|
| H A D | ccp-crypto-aes-xts.c | 65 struct ccp_aes_req_ctx *rctx = skcipher_request_ctx_dma(req); in ccp_aes_xts_complete() local 70 memcpy(req->iv, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_xts_complete() 109 struct ccp_aes_req_ctx *rctx = skcipher_request_ctx_dma(req); in ccp_aes_xts_crypt() local 151 skcipher_request_set_tfm(&rctx->fallback_req, in ccp_aes_xts_crypt() 153 skcipher_request_set_callback(&rctx->fallback_req, in ccp_aes_xts_crypt() 157 skcipher_request_set_crypt(&rctx->fallback_req, req->src, in ccp_aes_xts_crypt() 159 ret = encrypt ? crypto_skcipher_encrypt(&rctx->fallback_req) : in ccp_aes_xts_crypt() 160 crypto_skcipher_decrypt(&rctx->fallback_req); in ccp_aes_xts_crypt() 164 memcpy(rctx->iv, req->iv, AES_BLOCK_SIZE); in ccp_aes_xts_crypt() 165 sg_init_one(&rctx->iv_sg, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_xts_crypt() [all …]
|
| H A D | ccp-crypto-aes-galois.c | 80 struct ccp_aes_req_ctx *rctx = aead_request_ctx_dma(req); in ccp_aes_gcm_crypt() local 105 memcpy(rctx->iv, req->iv, GCM_AES_IV_SIZE); in ccp_aes_gcm_crypt() 107 rctx->iv[i + GCM_AES_IV_SIZE] = 0; in ccp_aes_gcm_crypt() 108 rctx->iv[AES_BLOCK_SIZE - 1] = 1; in ccp_aes_gcm_crypt() 111 iv_sg = &rctx->iv_sg; in ccp_aes_gcm_crypt() 113 sg_init_one(iv_sg, rctx->iv, iv_len); in ccp_aes_gcm_crypt() 116 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); in ccp_aes_gcm_crypt() 117 INIT_LIST_HEAD(&rctx->cmd.entry); in ccp_aes_gcm_crypt() 118 rctx->cmd.engine = CCP_ENGINE_AES; in ccp_aes_gcm_crypt() 119 rctx->cmd.u.aes.authsize = crypto_aead_authsize(tfm); in ccp_aes_gcm_crypt() [all …]
|
| H A D | ccp-crypto-rsa.c | 47 struct ccp_rsa_req_ctx *rctx = akcipher_request_ctx_dma(req); in ccp_rsa_complete() local 52 req->dst_len = rctx->cmd.u.rsa.key_size >> 3; in ccp_rsa_complete() 68 struct ccp_rsa_req_ctx *rctx = akcipher_request_ctx_dma(req); in ccp_rsa_crypt() local 71 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); in ccp_rsa_crypt() 72 INIT_LIST_HEAD(&rctx->cmd.entry); in ccp_rsa_crypt() 73 rctx->cmd.engine = CCP_ENGINE_RSA; in ccp_rsa_crypt() 75 rctx->cmd.u.rsa.key_size = ctx->u.rsa.key_len; /* in bits */ in ccp_rsa_crypt() 77 rctx->cmd.u.rsa.exp = &ctx->u.rsa.e_sg; in ccp_rsa_crypt() 78 rctx->cmd.u.rsa.exp_len = ctx->u.rsa.e_len; in ccp_rsa_crypt() 80 rctx->cmd.u.rsa.exp = &ctx->u.rsa.d_sg; in ccp_rsa_crypt() [all …]
|
| /linux/drivers/crypto/cavium/nitrox/ |
| H A D | nitrox_aead.c | 151 static int nitrox_set_creq(struct nitrox_aead_rctx *rctx) in nitrox_set_creq() argument 153 struct se_crypto_request *creq = &rctx->nkreq.creq; in nitrox_set_creq() 157 creq->flags = rctx->flags; in nitrox_set_creq() 158 creq->gfp = (rctx->flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? GFP_KERNEL : in nitrox_set_creq() 163 creq->ctrl.s.arg = rctx->ctrl_arg; in nitrox_set_creq() 165 creq->gph.param0 = cpu_to_be16(rctx->cryptlen); in nitrox_set_creq() 166 creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen); in nitrox_set_creq() 167 creq->gph.param2 = cpu_to_be16(rctx->ivsize + rctx->assoclen); in nitrox_set_creq() 169 param3.auth_offset = rctx->ivsize; in nitrox_set_creq() 172 creq->ctx_handle = rctx->ctx_handle; in nitrox_set_creq() [all …]
|
| /linux/drivers/crypto/allwinner/sun8i-ss/ |
| H A D | sun8i-ss-hash.c | 146 struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq); in sun8i_ss_hash_init() local 150 memset(rctx, 0, sizeof(struct sun8i_ss_hash_reqctx)); in sun8i_ss_hash_init() 152 ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm); in sun8i_ss_hash_init() 153 ahash_request_set_callback(&rctx->fallback_req, in sun8i_ss_hash_init() 157 return crypto_ahash_init(&rctx->fallback_req); in sun8i_ss_hash_init() 162 struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq); in sun8i_ss_hash_export() local 166 ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm); in sun8i_ss_hash_export() 167 ahash_request_set_callback(&rctx->fallback_req, in sun8i_ss_hash_export() 171 return crypto_ahash_export(&rctx->fallback_req, out); in sun8i_ss_hash_export() 176 struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq); in sun8i_ss_hash_import() local [all …]
|
| /linux/drivers/crypto/qce/ |
| H A D | common.c | 151 struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req); in qce_setup_regs_ahash() local 162 if (!rctx->last_blk && req->nbytes % blocksize) in qce_setup_regs_ahash() 167 if (IS_CMAC(rctx->flags)) { in qce_setup_regs_ahash() 175 auth_cfg = qce_auth_cfg(rctx->flags, rctx->authklen, digestsize); in qce_setup_regs_ahash() 178 if (IS_SHA_HMAC(rctx->flags) || IS_CMAC(rctx->flags)) { in qce_setup_regs_ahash() 179 u32 authkey_words = rctx->authklen / sizeof(u32); in qce_setup_regs_ahash() 181 qce_cpu_to_be32p_array(mackey, rctx->authkey, rctx->authklen); in qce_setup_regs_ahash() 186 if (IS_CMAC(rctx->flags)) in qce_setup_regs_ahash() 189 if (rctx->first_blk) in qce_setup_regs_ahash() 190 memcpy(auth, rctx->digest, digestsize); in qce_setup_regs_ahash() [all …]
|
| /linux/drivers/crypto/rockchip/ |
| H A D | rk3288_crypto_skcipher.c | 72 struct rk_cipher_rctx *rctx = skcipher_request_ctx(areq); in rk_cipher_fallback() local 79 skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm); in rk_cipher_fallback() 80 skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags, in rk_cipher_fallback() 82 skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst, in rk_cipher_fallback() 84 if (rctx->mode & RK_CRYPTO_DEC) in rk_cipher_fallback() 85 err = crypto_skcipher_decrypt(&rctx->fallback_req); in rk_cipher_fallback() 87 err = crypto_skcipher_encrypt(&rctx->fallback_req); in rk_cipher_fallback() 93 struct rk_cipher_rctx *rctx = skcipher_request_ctx(req); in rk_cipher_handle_req() local 103 rctx->dev = rkc; in rk_cipher_handle_req() 157 struct rk_cipher_rctx *rctx = skcipher_request_ctx(req); in rk_aes_ecb_encrypt() local [all …]
|
| /linux/drivers/crypto/starfive/ |
| H A D | jh7110-hash.c | 66 struct starfive_cryp_request_ctx *rctx = ctx->rctx; in starfive_hash_hmac_key() local 74 rctx->csr.hash.hmac = 1; in starfive_hash_hmac_key() 75 rctx->csr.hash.key_flag = 1; in starfive_hash_hmac_key() 77 writel(rctx->csr.hash.v, cryp->base + STARFIVE_HASH_SHACSR); in starfive_hash_hmac_key() 167 struct starfive_cryp_request_ctx *rctx = ahash_request_ctx(req); in starfive_hash_copy_hash() local 175 mlen = rctx->digsize / sizeof(u32); in starfive_hash_copy_hash() 200 struct starfive_cryp_request_ctx *rctx = ctx->rctx; in starfive_hash_one_request() local 210 rctx->csr.hash.v = 0; in starfive_hash_one_request() 211 rctx->csr.hash.mode = ctx->hash_mode; in starfive_hash_one_request() 218 rctx->csr.hash.start = 1; in starfive_hash_one_request() [all …]
|
| /linux/drivers/crypto/gemini/ |
| H A D | sl3516-ce-cipher.c | 107 struct sl3516_ce_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in sl3516_ce_cipher_fallback() local 115 skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm); in sl3516_ce_cipher_fallback() 116 skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags, in sl3516_ce_cipher_fallback() 118 skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst, in sl3516_ce_cipher_fallback() 120 if (rctx->op_dir == CE_DECRYPTION) in sl3516_ce_cipher_fallback() 121 err = crypto_skcipher_decrypt(&rctx->fallback_req); in sl3516_ce_cipher_fallback() 123 err = crypto_skcipher_encrypt(&rctx->fallback_req); in sl3516_ce_cipher_fallback() 132 struct sl3516_ce_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in sl3516_ce_cipher() local 148 rctx->op_dir, areq->iv, crypto_skcipher_ivsize(tfm), in sl3516_ce_cipher() 185 rctx->t_src[i].addr = sg_dma_address(sg); in sl3516_ce_cipher() [all …]
|
| /linux/crypto/ |
| H A D | hctr2.c | 138 struct hctr2_request_ctx *rctx = skcipher_request_ctx(req); in hctr2_hash_tweak() local 139 struct polyval_ctx *poly_ctx = &rctx->u.poly_ctx; in hctr2_hash_tweak() 149 polyval_export_blkaligned(poly_ctx, &rctx->hashed_tweak); in hctr2_hash_tweak() 157 struct hctr2_request_ctx *rctx = skcipher_request_ctx(req); in hctr2_hash_message() local 158 struct polyval_ctx *poly_ctx = &rctx->u.poly_ctx; in hctr2_hash_message() 182 struct hctr2_request_ctx *rctx = skcipher_request_ctx(req); in hctr2_finish() local 183 struct polyval_ctx *poly_ctx = &rctx->u.poly_ctx; in hctr2_finish() 189 &rctx->hashed_tweak); in hctr2_finish() 190 hctr2_hash_message(req, rctx->bulk_part_dst, digest); in hctr2_finish() 191 crypto_xor(rctx->first_block, digest, BLOCKCIPHER_BLOCK_SIZE); in hctr2_finish() [all …]
|
| H A D | essiv.c | 172 struct essiv_aead_request_ctx *rctx = aead_request_ctx(req); in essiv_aead_done() local 177 kfree(rctx->assoc); in essiv_aead_done() 187 struct essiv_aead_request_ctx *rctx = aead_request_ctx(req); in essiv_aead_crypt() local 188 struct aead_request *subreq = &rctx->aead_req; in essiv_aead_crypt() 204 rctx->assoc = NULL; in essiv_aead_crypt() 217 sg_init_table(rctx->sg, 4); in essiv_aead_crypt() 224 rctx->assoc = kmalloc(ssize, GFP_ATOMIC); in essiv_aead_crypt() 225 if (!rctx->assoc) in essiv_aead_crypt() 228 scatterwalk_map_and_copy(rctx->assoc, req->src, 0, in essiv_aead_crypt() 230 sg_set_buf(rctx->sg, rctx->assoc, ssize); in essiv_aead_crypt() [all …]
|
| /linux/drivers/crypto/amlogic/ |
| H A D | amlogic-gxl-cipher.c | 62 struct meson_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in meson_cipher_do_fallback() local 71 skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm); in meson_cipher_do_fallback() 72 skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags, in meson_cipher_do_fallback() 74 skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst, in meson_cipher_do_fallback() 77 if (rctx->op_dir == MESON_DECRYPT) in meson_cipher_do_fallback() 78 err = crypto_skcipher_decrypt(&rctx->fallback_req); in meson_cipher_do_fallback() 80 err = crypto_skcipher_encrypt(&rctx->fallback_req); in meson_cipher_do_fallback() 88 struct meson_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in meson_cipher() local 92 int flow = rctx->flow; in meson_cipher() 109 rctx->op_dir, crypto_skcipher_ivsize(tfm), in meson_cipher() [all …]
|
| /linux/drivers/crypto/allwinner/sun4i-ss/ |
| H A D | sun4i-ss-cipher.c | 401 struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in sun4i_ss_cbc_aes_encrypt() local 403 rctx->mode = SS_OP_AES | SS_CBC | SS_ENABLED | SS_ENCRYPTION | in sun4i_ss_cbc_aes_encrypt() 412 struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in sun4i_ss_cbc_aes_decrypt() local 414 rctx->mode = SS_OP_AES | SS_CBC | SS_ENABLED | SS_DECRYPTION | in sun4i_ss_cbc_aes_decrypt() 424 struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in sun4i_ss_ecb_aes_encrypt() local 426 rctx->mode = SS_OP_AES | SS_ECB | SS_ENABLED | SS_ENCRYPTION | in sun4i_ss_ecb_aes_encrypt() 435 struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in sun4i_ss_ecb_aes_decrypt() local 437 rctx->mode = SS_OP_AES | SS_ECB | SS_ENABLED | SS_DECRYPTION | in sun4i_ss_ecb_aes_decrypt() 447 struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in sun4i_ss_cbc_des_encrypt() local 449 rctx->mode = SS_OP_DES | SS_CBC | SS_ENABLED | SS_ENCRYPTION | in sun4i_ss_cbc_des_encrypt() [all …]
|
| /linux/drivers/crypto/marvell/octeontx/ |
| H A D | otx_cptvf_algs.c | 101 struct otx_cpt_req_ctx *rctx; in validate_hmac_cipher_null() local 107 rctx = aead_request_ctx_dma(req); in validate_hmac_cipher_null() 108 if (memcmp(rctx->fctx.hmac.s.hmac_calc, in validate_hmac_cipher_null() 109 rctx->fctx.hmac.s.hmac_recv, in validate_hmac_cipher_null() 150 struct otx_cpt_req_ctx *rctx; in output_iv_copyback() local 159 rctx = skcipher_request_ctx_dma(sreq); in output_iv_copyback() 160 req_info = &rctx->cpt_req; in output_iv_copyback() 237 struct otx_cpt_req_ctx *rctx = skcipher_request_ctx_dma(req); in create_ctx_hdr() local 238 struct otx_cpt_req_info *req_info = &rctx->cpt_req; in create_ctx_hdr() 241 struct otx_cpt_fc_ctx *fctx = &rctx->fctx; in create_ctx_hdr() [all …]
|
| /linux/drivers/crypto/ti/ |
| H A D | dthev2-aes.c | 181 struct dthe_aes_req_ctx *rctx, in dthe_aes_set_ctrl_key() argument 220 if (rctx->enc) in dthe_aes_set_ctrl_key() 257 struct dthe_aes_req_ctx *rctx = skcipher_request_ctx(req); in dthe_aes_dma_in_callback() local 259 complete(&rctx->aes_compl); in dthe_aes_dma_in_callback() 267 struct dthe_aes_req_ctx *rctx = skcipher_request_ctx(req); in dthe_aes_run() local 349 init_completion(&rctx->aes_compl); in dthe_aes_run() 352 dthe_aes_set_ctrl_key(ctx, rctx, NULL); in dthe_aes_run() 354 dthe_aes_set_ctrl_key(ctx, rctx, (u32 *)req->iv); in dthe_aes_run() 366 ret = wait_for_completion_timeout(&rctx->aes_compl, msecs_to_jiffies(DTHE_DMA_TIMEOUT_MS)); in dthe_aes_run() 403 struct dthe_aes_req_ctx *rctx = skcipher_request_ctx(req); in dthe_aes_crypt() local [all …]
|
| /linux/drivers/crypto/ |
| H A D | img-hash.c | 490 struct img_hash_request_ctx *rctx = ahash_request_ctx(req); in img_hash_init() local 493 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); in img_hash_init() 494 ahash_request_set_callback(&rctx->fallback_req, in img_hash_init() 498 return crypto_ahash_init(&rctx->fallback_req); in img_hash_init() 554 struct img_hash_request_ctx *rctx = ahash_request_ctx(req); in img_hash_update() local 558 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); in img_hash_update() 559 ahash_request_set_callback(&rctx->fallback_req, in img_hash_update() 562 ahash_request_set_crypt(&rctx->fallback_req, req->src, NULL, req->nbytes); in img_hash_update() 564 return crypto_ahash_update(&rctx->fallback_req); in img_hash_update() 569 struct img_hash_request_ctx *rctx = ahash_request_ctx(req); in img_hash_final() local [all …]
|
| H A D | hifn_795x.c | 1095 struct hifn_context *ctx, struct hifn_request_context *rctx, in hifn_setup_cmd_desc() argument 1107 switch (rctx->op) { in hifn_setup_cmd_desc() 1124 if (rctx->op == ACRYPTO_OP_ENCRYPT || rctx->op == ACRYPTO_OP_DECRYPT) { in hifn_setup_cmd_desc() 1129 if (rctx->iv && rctx->mode != ACRYPTO_MODE_ECB) in hifn_setup_cmd_desc() 1132 switch (rctx->mode) { in hifn_setup_cmd_desc() 1149 switch (rctx->type) { in hifn_setup_cmd_desc() 1184 rctx->iv, rctx->ivsize, md); in hifn_setup_cmd_desc() 1303 struct hifn_context *ctx, struct hifn_request_context *rctx, in hifn_setup_dma() argument 1324 t = &rctx->walk.cache[0]; in hifn_setup_dma() 1327 if (t->length && rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) { in hifn_setup_dma() [all …]
|
| /linux/kernel/events/ |
| H A D | internal.h | 215 unsigned char rctx = interrupt_context_level(); in DEFINE_OUTPUT_COPY() local 217 if (recursion[rctx]) in DEFINE_OUTPUT_COPY() 220 recursion[rctx]++; in DEFINE_OUTPUT_COPY() 223 return rctx; in DEFINE_OUTPUT_COPY() 226 static inline void put_recursion_context(u8 *recursion, unsigned char rctx) in put_recursion_context() argument 229 recursion[rctx]--; in put_recursion_context()
|
| /linux/drivers/crypto/cavium/cpt/ |
| H A D | cptvf_algs.c | 100 struct cvm_req_ctx *rctx = skcipher_request_ctx_dma(req); in create_ctx_hdr() local 101 struct fc_context *fctx = &rctx->fctx; in create_ctx_hdr() 103 struct cpt_request_info *req_info = &rctx->cpt_req; in create_ctx_hdr() 132 offset_control = (__be64 *)&rctx->control_word; in create_ctx_hdr() 154 struct cvm_req_ctx *rctx = skcipher_request_ctx_dma(req); in create_input_list() local 155 struct cpt_request_info *req_info = &rctx->cpt_req; in create_input_list() 176 struct cvm_req_ctx *rctx = skcipher_request_ctx_dma(req); in create_output_list() local 177 struct cpt_request_info *req_info = &rctx->cpt_req; in create_output_list() 196 struct cvm_req_ctx *rctx = skcipher_request_ctx_dma(req); in cvm_enc_dec() local 198 struct fc_context *fctx = &rctx->fctx; in cvm_enc_dec() [all …]
|
| /linux/include/trace/ |
| H A D | perf.h | 29 int rctx; \ 43 entry = perf_trace_buf_alloc(__entry_size, &__regs, &rctx); \ 53 perf_trace_run_bpf_submit(entry, __entry_size, rctx, \
|
| /linux/drivers/crypto/caam/ |
| H A D | caamalg.c | 1050 struct caam_aead_req_ctx *rctx = aead_request_ctx(req); in aead_crypt_done() local 1058 edesc = rctx->edesc; in aead_crypt_done() 1090 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req); in skcipher_crypt_done() local 1099 edesc = rctx->edesc; in skcipher_crypt_done() 1392 struct caam_aead_req_ctx *rctx = aead_request_ctx(req); in aead_edesc_alloc() local 1493 rctx->edesc = edesc; in aead_edesc_alloc() 1528 struct caam_aead_req_ctx *rctx = aead_request_ctx(req); in aead_enqueue_req() local 1529 struct aead_edesc *edesc = rctx->edesc; in aead_enqueue_req() 1546 kfree(rctx->edesc); in aead_enqueue_req() 1624 struct caam_aead_req_ctx *rctx = aead_request_ctx(req); in aead_do_one_req() local [all …]
|
| /linux/drivers/infiniband/sw/siw/ |
| H A D | siw.h | 473 #define rx_wqe(rctx) (&(rctx)->wqe_active) argument 474 #define rx_mem(rctx) ((rctx)->wqe_active.mem[0]) argument
|