/linux/fs/netfs/ |
H A D | read_pgpriv2.c | 19 static void netfs_pgpriv2_copy_folio(struct netfs_io_request *creq, struct folio *folio) in netfs_pgpriv2_copy_folio() argument 21 struct netfs_io_stream *cache = &creq->io_streams[1]; in netfs_pgpriv2_copy_folio() 32 i_size = i_size_read(creq->inode); in netfs_pgpriv2_copy_folio() 41 if (fpos + fsize > creq->i_size) in netfs_pgpriv2_copy_folio() 42 creq->i_size = i_size; in netfs_pgpriv2_copy_folio() 56 if (rolling_buffer_append(&creq->buffer, folio, 0) < 0) { in netfs_pgpriv2_copy_folio() 57 clear_bit(NETFS_RREQ_FOLIO_COPY_TO_CACHE, &creq->flags); in netfs_pgpriv2_copy_folio() 73 creq->buffer.iter.iov_offset = cache->submit_off; in netfs_pgpriv2_copy_folio() 75 atomic64_set(&creq->issued_to, fpos + cache->submit_off); in netfs_pgpriv2_copy_folio() 77 part = netfs_advance_write(creq, cache, fpos + cache->submit_off, in netfs_pgpriv2_copy_folio() [all …]
|
/linux/drivers/crypto/marvell/cesa/ |
H A D | hash.c | 30 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_ahash_req_iter_init() local 31 unsigned int len = req->nbytes + creq->cache_ptr; in mv_cesa_ahash_req_iter_init() 33 if (!creq->last_req) in mv_cesa_ahash_req_iter_init() 38 iter->src.op_offset = creq->cache_ptr; in mv_cesa_ahash_req_iter_init() 96 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_ahash_dma_last_cleanup() local 98 mv_cesa_ahash_dma_free_padding(&creq->req.dma); in mv_cesa_ahash_dma_last_cleanup() 103 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_ahash_dma_cleanup() local 105 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE); in mv_cesa_ahash_dma_cleanup() 106 mv_cesa_ahash_dma_free_cache(&creq->req.dma); in mv_cesa_ahash_dma_cleanup() 107 mv_cesa_dma_cleanup(&creq->base); in mv_cesa_ahash_dma_cleanup() [all …]
|
H A D | cesa.h | 731 struct mv_cesa_req *creq);
|
/linux/drivers/infiniband/hw/bnxt_re/ |
H A D | qplib_rcfw.c | 168 bnxt_qplib_service_creq(&rcfw->creq.creq_tasklet); in __wait_for_resp() 209 bnxt_qplib_service_creq(&rcfw->creq.creq_tasklet); in __block_for_resp() 405 bnxt_qplib_service_creq(&rcfw->creq.creq_tasklet); in __poll_for_resp() 614 rc = rcfw->creq.aeq_handler(rcfw, (void *)func_event, NULL); in bnxt_qplib_process_func_event() 646 rc = rcfw->creq.aeq_handler(rcfw, qp_event, qp); in bnxt_qplib_process_qp_event() 733 struct bnxt_qplib_rcfw *rcfw = from_tasklet(rcfw, t, creq.creq_tasklet); in bnxt_qplib_service_creq() 734 struct bnxt_qplib_creq_ctx *creq = &rcfw->creq; in bnxt_qplib_service_creq() local 736 struct bnxt_qplib_hwq *hwq = &creq->hwq; in bnxt_qplib_service_creq() 745 if (!CREQ_CMP_VALID(creqe, creq->creq_db.dbinfo.flags)) in bnxt_qplib_service_creq() 759 creq->stats.creq_qp_event_processed++; in bnxt_qplib_service_creq() [all …]
|
H A D | main.c | 2047 bnxt_re_net_ring_free(rdev, rdev->rcfw.creq.ring_id, type); in bnxt_re_dev_uninit() 2077 struct bnxt_qplib_creq_ctx *creq; in bnxt_re_dev_init() local 2142 creq = &rdev->rcfw.creq; in bnxt_re_dev_init() 2143 rattr.dma_arr = creq->hwq.pbl[PBL_LVL_0].pg_map_arr; in bnxt_re_dev_init() 2144 rattr.pages = creq->hwq.pbl[creq->hwq.level].pg_count; in bnxt_re_dev_init() 2149 rc = bnxt_re_net_ring_alloc(rdev, &rattr, &creq->ring_id); in bnxt_re_dev_init() 2259 bnxt_re_net_ring_free(rdev, rdev->rcfw.creq.ring_id, type); in bnxt_re_dev_init()
|
/linux/drivers/crypto/cavium/nitrox/ |
H A D | nitrox_req.h | 213 struct se_crypto_request creq; member 623 struct se_crypto_request *creq = &nkreq->creq; in alloc_src_req_buf() local 625 nkreq->src = alloc_req_buf(nents, ivsize, creq->gfp); in alloc_src_req_buf() 648 struct se_crypto_request *creq = &nkreq->creq; in nitrox_creq_set_src_sg() local 650 creq->src = nitrox_creq_src_sg(iv, ivsize); in nitrox_creq_set_src_sg() 651 sg = creq->src; in nitrox_creq_set_src_sg() 670 struct se_crypto_request *creq = &nkreq->creq; in alloc_dst_req_buf() local 672 nkreq->dst = alloc_req_buf(nents, extralen, creq->gfp); in alloc_dst_req_buf() 681 struct se_crypto_request *creq = &nkreq->creq; in nitrox_creq_set_orh() local 683 creq->orh = (u64 *)(nkreq->dst); in nitrox_creq_set_orh() [all …]
|
H A D | nitrox_aead.c | 153 struct se_crypto_request *creq = &rctx->nkreq.creq; in nitrox_set_creq() local 157 creq->flags = rctx->flags; in nitrox_set_creq() 158 creq->gfp = (rctx->flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? GFP_KERNEL : in nitrox_set_creq() 161 creq->ctrl.value = 0; in nitrox_set_creq() 162 creq->opcode = FLEXI_CRYPTO_ENCRYPT_HMAC; in nitrox_set_creq() 163 creq->ctrl.s.arg = rctx->ctrl_arg; in nitrox_set_creq() 165 creq->gph.param0 = cpu_to_be16(rctx->cryptlen); in nitrox_set_creq() 166 creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen); in nitrox_set_creq() 167 creq->gph.param2 = cpu_to_be16(rctx->ivsize + rctx->assoclen); in nitrox_set_creq() 170 creq->gph.param3 = cpu_to_be16(param3.param); in nitrox_set_creq() [all …]
|
H A D | nitrox_skcipher.c | 92 if (nkreq->creq.ctrl.s.arg == ENCRYPT) { in nitrox_cbc_cipher_callback() 253 struct se_crypto_request *creq; in nitrox_skcipher_crypt() local 256 creq = &nkreq->creq; in nitrox_skcipher_crypt() 257 creq->flags = skreq->base.flags; in nitrox_skcipher_crypt() 258 creq->gfp = (skreq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? in nitrox_skcipher_crypt() 262 creq->ctrl.value = 0; in nitrox_skcipher_crypt() 263 creq->opcode = FLEXI_CRYPTO_ENCRYPT_HMAC; in nitrox_skcipher_crypt() 264 creq->ctrl.s.arg = (enc ? ENCRYPT : DECRYPT); in nitrox_skcipher_crypt() 266 creq->gph.param0 = cpu_to_be16(skreq->cryptlen); in nitrox_skcipher_crypt() 267 creq->gph.param1 = 0; in nitrox_skcipher_crypt() [all …]
|
H A D | nitrox_reqmgr.c | 214 struct se_crypto_request *creq) in softreq_map_iobuf() argument 218 ret = dma_map_inbufs(sr, creq); in softreq_map_iobuf() 222 ret = dma_map_outbufs(sr, creq); in softreq_map_iobuf()
|
/linux/crypto/ |
H A D | chacha20poly1305.c | 127 struct chacha_req *creq = &rctx->u.chacha; in chacha_decrypt() local 134 chacha_iv(creq->iv, req, 1); in chacha_decrypt() 141 skcipher_request_set_callback(&creq->req, rctx->flags, in chacha_decrypt() 143 skcipher_request_set_tfm(&creq->req, ctx->chacha); in chacha_decrypt() 144 skcipher_request_set_crypt(&creq->req, src, dst, in chacha_decrypt() 145 rctx->cryptlen, creq->iv); in chacha_decrypt() 146 err = crypto_skcipher_decrypt(&creq->req); in chacha_decrypt() 365 struct chacha_req *creq = &rctx->u.chacha; in poly_genkey() local 377 sg_init_one(creq->src, rctx->key, sizeof(rctx->key)); in poly_genkey() 379 chacha_iv(creq->iv, req, 0); in poly_genkey() [all …]
|
/linux/tools/net/ynl/samples/ |
H A D | ethtool.c | 13 struct ethtool_channels_get_req_dump creq = {}; in main() local 23 creq._present.header = 1; /* ethtool needs an empty nest, sigh */ in main() 24 channels = ethtool_channels_get_dump(ys, &creq); in main()
|
/linux/drivers/crypto/ccree/ |
H A D | cc_request_mgr.c | 47 struct cc_crypto_req creq; member 348 struct cc_crypto_req *creq; in cc_proc_backlog() local 362 creq = &bli->creq; in cc_proc_backlog() 363 req = creq->user_arg; in cc_proc_backlog() 370 creq->user_cb(dev, req, -EINPROGRESS); in cc_proc_backlog() 387 cc_do_send_request(drvdata, &bli->creq, bli->desc, bli->len, in cc_proc_backlog() 435 memcpy(&bli->creq, cc_req, sizeof(*cc_req)); in cc_send_request()
|
/linux/drivers/gpu/drm/vmwgfx/ |
H A D | vmwgfx_surface.c | 1482 rep->creq = rep_ext.creq.base; in vmw_gb_surface_reference_ioctl() 1749 rep->creq.base.svga3d_flags = SVGA3D_FLAGS_LOWER_32(metadata->flags); in vmw_gb_surface_reference_internal() 1750 rep->creq.base.format = metadata->format; in vmw_gb_surface_reference_internal() 1751 rep->creq.base.mip_levels = metadata->mip_levels[0]; in vmw_gb_surface_reference_internal() 1752 rep->creq.base.drm_surface_flags = 0; in vmw_gb_surface_reference_internal() 1753 rep->creq.base.multisample_count = metadata->multisample_count; in vmw_gb_surface_reference_internal() 1754 rep->creq.base.autogen_filter = metadata->autogen_filter; in vmw_gb_surface_reference_internal() 1755 rep->creq.base.array_size = metadata->array_size; in vmw_gb_surface_reference_internal() 1756 rep->creq.base.buffer_handle = backup_handle; in vmw_gb_surface_reference_internal() 1757 rep->creq.base.base_size = metadata->base_size; in vmw_gb_surface_reference_internal() [all …]
|
/linux/drivers/crypto/inside-secure/ |
H A D | safexcel_cipher.c | 1630 struct safexcel_cipher_req *creq = aead_request_ctx(req); in safexcel_aead_encrypt() local 1632 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT); in safexcel_aead_encrypt() 1637 struct safexcel_cipher_req *creq = aead_request_ctx(req); in safexcel_aead_decrypt() local 1639 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT); in safexcel_aead_decrypt() 2685 struct safexcel_cipher_req *creq = aead_request_ctx(req); in safexcel_ccm_encrypt() local 2690 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT); in safexcel_ccm_encrypt() 2695 struct safexcel_cipher_req *creq = aead_request_ctx(req); in safexcel_ccm_decrypt() local 2700 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT); in safexcel_ccm_decrypt() 2824 struct safexcel_cipher_req *creq = aead_request_ctx(req); in safexcel_aead_chachapoly_crypt() local 2840 return safexcel_queue_req(&req->base, creq, dir); in safexcel_aead_chachapoly_crypt() [all …]
|
/linux/include/uapi/drm/ |
H A D | vmwgfx_drm.h | 1011 struct drm_vmw_gb_surface_create_req creq; member 1218 struct drm_vmw_gb_surface_create_ext_req creq; member
|
/linux/drivers/usb/gadget/function/ |
H A D | f_fs.c | 3791 const struct usb_ctrlrequest *creq) in ffs_func_setup() argument 3798 pr_vdebug("creq->bRequestType = %02x\n", creq->bRequestType); in ffs_func_setup() 3799 pr_vdebug("creq->bRequest = %02x\n", creq->bRequest); in ffs_func_setup() 3800 pr_vdebug("creq->wValue = %04x\n", le16_to_cpu(creq->wValue)); in ffs_func_setup() 3801 pr_vdebug("creq->wIndex = %04x\n", le16_to_cpu(creq->wIndex)); in ffs_func_setup() 3802 pr_vdebug("creq->wLength = %04x\n", le16_to_cpu(creq->wLength)); in ffs_func_setup() 3817 switch (creq->bRequestType & USB_RECIP_MASK) { in ffs_func_setup() 3819 ret = ffs_func_revmap_intf(func, le16_to_cpu(creq->wIndex)); in ffs_func_setup() 3825 ret = ffs_func_revmap_ep(func, le16_to_cpu(creq->wIndex)); in ffs_func_setup() 3834 ret = le16_to_cpu(creq->wIndex); in ffs_func_setup() [all …]
|
/linux/fs/smb/client/ |
H A D | smb2ops.c | 4305 void *creq; in crypt_message() local 4332 creq = smb2_get_aead_req(tfm, rqst, num_rqst, sign, &iv, &req, &sg, in crypt_message() 4334 if (IS_ERR(creq)) in crypt_message() 4335 return PTR_ERR(creq); in crypt_message() 4359 kvfree_sensitive(creq, sensitive_size); in crypt_message()
|