| /linux/drivers/crypto/qce/ |
| H A D | core.c | 70 static int qce_handle_request(struct crypto_async_request *async_req) in qce_handle_request() argument 74 u32 type = crypto_tfm_alg_type(async_req->tfm); in qce_handle_request() 80 ret = ops->async_req_handle(async_req); in qce_handle_request() 90 struct crypto_async_request *async_req, *backlog; in qce_handle_queue() local 102 async_req = crypto_dequeue_request(&qce->queue); in qce_handle_queue() 103 if (async_req) in qce_handle_queue() 104 qce->req = async_req; in qce_handle_queue() 107 if (!async_req) in qce_handle_queue() 115 err = qce_handle_request(async_req); in qce_handle_queue()
|
| H A D | common.c | 147 static int qce_setup_regs_ahash(struct crypto_async_request *async_req) in qce_setup_regs_ahash() argument 149 struct ahash_request *req = ahash_request_cast(async_req); in qce_setup_regs_ahash() 150 struct crypto_ahash *ahash = __crypto_ahash_cast(async_req->tfm); in qce_setup_regs_ahash() 152 struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm); in qce_setup_regs_ahash() 155 unsigned int blocksize = crypto_tfm_alg_blocksize(async_req->tfm); in qce_setup_regs_ahash() 314 static int qce_setup_regs_skcipher(struct crypto_async_request *async_req) in qce_setup_regs_skcipher() argument 316 struct skcipher_request *req = skcipher_request_cast(async_req); in qce_setup_regs_skcipher() 318 struct qce_cipher_ctx *ctx = crypto_tfm_ctx(async_req->tfm); in qce_setup_regs_skcipher() 420 static int qce_setup_regs_aead(struct crypto_async_request *async_req) in qce_setup_regs_aead() argument 422 struct aead_request *req = aead_request_cast(async_req); in qce_setup_regs_aead() [all …]
|
| H A D | common.h | 102 int qce_start(struct crypto_async_request *async_req, u32 type);
|
| /linux/drivers/net/usb/ |
| H A D | rtl8150.c | 149 struct async_req { struct 177 struct async_req *req = (struct async_req *)urb->context; in async_set_reg_cb() 190 struct async_req *req; in async_set_registers() 192 req = kmalloc(sizeof(struct async_req), GFP_ATOMIC); in async_set_registers()
|
| /linux/drivers/crypto/ccp/ |
| H A D | ccp-crypto-aes-xts.c | 62 static int ccp_aes_xts_complete(struct crypto_async_request *async_req, int ret) in ccp_aes_xts_complete() argument 64 struct skcipher_request *req = skcipher_request_cast(async_req); in ccp_aes_xts_complete()
|
| H A D | ccp-crypto-rsa.c | 44 static int ccp_rsa_complete(struct crypto_async_request *async_req, int ret) in ccp_rsa_complete() argument 46 struct akcipher_request *req = akcipher_request_cast(async_req); in ccp_rsa_complete()
|
| H A D | ccp-crypto-aes-cmac.c | 23 static int ccp_aes_cmac_complete(struct crypto_async_request *async_req, in ccp_aes_cmac_complete() argument 26 struct ahash_request *req = ahash_request_cast(async_req); in ccp_aes_cmac_complete()
|
| H A D | ccp-crypto-sha.c | 27 static int ccp_sha_complete(struct crypto_async_request *async_req, int ret) in ccp_sha_complete() argument 29 struct ahash_request *req = ahash_request_cast(async_req); in ccp_sha_complete()
|
| H A D | ccp-crypto-aes-galois.c | 24 static int ccp_aes_gcm_complete(struct crypto_async_request *async_req, int ret) in ccp_aes_gcm_complete() argument
|
| /linux/drivers/nvme/host/ |
| H A D | tcp.c | 198 struct nvme_tcp_request async_req; member 294 return req == &req->queue->ctrl->async_req; in nvme_tcp_async_req() 1415 struct nvme_tcp_request *async = &ctrl->async_req; in nvme_tcp_free_async_req() 1423 struct nvme_tcp_request *async = &ctrl->async_req; in nvme_tcp_alloc_async_req() 2020 if (to_tcp_ctrl(ctrl)->async_req.pdu) { in nvme_tcp_free_admin_queue() 2023 to_tcp_ctrl(ctrl)->async_req.pdu = NULL; in nvme_tcp_free_admin_queue() 2604 struct nvme_tcp_cmd_pdu *pdu = ctrl->async_req.pdu; in nvme_tcp_submit_async_event() 2620 ctrl->async_req.state = NVME_TCP_SEND_CMD_PDU; in nvme_tcp_submit_async_event() 2621 ctrl->async_req.offset = 0; in nvme_tcp_submit_async_event() 2622 ctrl->async_req.curr_bio = NULL; in nvme_tcp_submit_async_event() [all …]
|
| /linux/drivers/crypto/ |
| H A D | img-hash.c | 504 struct crypto_async_request *async_req, *backlog; in img_hash_handle_queue() local 520 async_req = crypto_dequeue_request(&hdev->queue); in img_hash_handle_queue() 521 if (async_req) in img_hash_handle_queue() 526 if (!async_req) in img_hash_handle_queue() 532 req = ahash_request_cast(async_req); in img_hash_handle_queue()
|
| H A D | hifn_795x.c | 1897 struct crypto_async_request *async_req; in hifn_flush() local 1913 while ((async_req = crypto_dequeue_request(&dev->queue))) { in hifn_flush() 1914 req = skcipher_request_cast(async_req); in hifn_flush() 2024 struct crypto_async_request *async_req, *backlog; in hifn_process_queue() local 2032 async_req = crypto_dequeue_request(&dev->queue); in hifn_process_queue() 2035 if (!async_req) in hifn_process_queue() 2041 req = skcipher_request_cast(async_req); in hifn_process_queue()
|
| /linux/drivers/crypto/rockchip/ |
| H A D | rk3288_crypto_skcipher.c | 299 static int rk_cipher_run(struct crypto_engine *engine, void *async_req) in rk_cipher_run() argument 301 struct skcipher_request *areq = container_of(async_req, struct skcipher_request, base); in rk_cipher_run()
|
| /linux/drivers/md/ |
| H A D | dm-crypt.c | 1463 static void kcryptd_async_done(void *async_req, int error);
|