| /linux/fs/netfs/ |
| H A D | read_pgpriv2.c | 19 static void netfs_pgpriv2_copy_folio(struct netfs_io_request *creq, struct folio *folio) in netfs_pgpriv2_copy_folio() argument 21 struct netfs_io_stream *cache = &creq->io_streams[1]; in netfs_pgpriv2_copy_folio() 32 i_size = i_size_read(creq->inode); in netfs_pgpriv2_copy_folio() 41 if (fpos + fsize > creq->i_size) in netfs_pgpriv2_copy_folio() 42 creq->i_size = i_size; in netfs_pgpriv2_copy_folio() 56 if (rolling_buffer_append(&creq->buffer, folio, 0) < 0) { in netfs_pgpriv2_copy_folio() 57 clear_bit(NETFS_RREQ_FOLIO_COPY_TO_CACHE, &creq->flags); in netfs_pgpriv2_copy_folio() 73 creq->buffer.iter.iov_offset = cache->submit_off; in netfs_pgpriv2_copy_folio() 75 atomic64_set(&creq->issued_to, fpos + cache->submit_off); in netfs_pgpriv2_copy_folio() 77 part = netfs_advance_write(creq, cache, fpos + cache->submit_off, in netfs_pgpriv2_copy_folio() [all …]
|
| /linux/drivers/infiniband/hw/bng_re/ |
| H A D | bng_fw.c | 57 bng_re_free_hwq(rcfw->res, &rcfw->creq.hwq); in bng_re_free_rcfw_channel() 67 struct bng_re_creq_ctx *creq; in bng_re_alloc_fw_channel() local 71 creq = &rcfw->creq; in bng_re_alloc_fw_channel() 83 if (bng_re_alloc_init_hwq(&creq->hwq, &hwq_attr)) { in bng_re_alloc_fw_channel() 218 struct bng_re_rcfw *rcfw = from_tasklet(rcfw, t, creq.creq_tasklet); in bng_re_service_creq() 219 struct bng_re_creq_ctx *creq = &rcfw->creq; in bng_re_service_creq() local 221 struct bng_re_hwq *hwq = &creq->hwq; in bng_re_service_creq() 230 if (!BNG_FW_CREQ_CMP_VALID(creqe, creq->creq_db.dbinfo.flags)) in bng_re_service_creq() 243 creq->stats.creq_qp_event_processed++; in bng_re_service_creq() 248 creq->stats.creq_func_event_processed++; in bng_re_service_creq() [all …]
|
| H A D | bng_dev.c | 280 bng_re_net_ring_free(rdev, rdev->rcfw.creq.ring_id, in bng_re_dev_uninit() 295 struct bng_re_creq_ctx *creq; in bng_re_dev_init() local 353 creq = &rdev->rcfw.creq; in bng_re_dev_init() 354 rattr.dma_arr = creq->hwq.pbl[BNG_PBL_LVL_0].pg_map_arr; in bng_re_dev_init() 355 rattr.pages = creq->hwq.pbl[creq->hwq.level].pg_count; in bng_re_dev_init() 360 rc = bng_re_net_ring_alloc(rdev, &rattr, &creq->ring_id); in bng_re_dev_init() 412 bng_re_net_ring_free(rdev, rdev->rcfw.creq.ring_id, type); in bng_re_dev_init()
|
| H A D | bng_fw.h | 121 struct bng_re_creq_ctx creq; member
|
| /linux/drivers/infiniband/hw/bnxt_re/ |
| H A D | qplib_rcfw.c | 168 bnxt_qplib_service_creq(&rcfw->creq.creq_tasklet); in __wait_for_resp() 209 bnxt_qplib_service_creq(&rcfw->creq.creq_tasklet); in __block_for_resp() 406 bnxt_qplib_service_creq(&rcfw->creq.creq_tasklet); in __poll_for_resp() 615 rc = rcfw->creq.aeq_handler(rcfw, (void *)func_event, NULL); in bnxt_qplib_process_func_event() 648 rc = rcfw->creq.aeq_handler(rcfw, qp_event, qp); in bnxt_qplib_process_qp_event() 735 struct bnxt_qplib_rcfw *rcfw = from_tasklet(rcfw, t, creq.creq_tasklet); in bnxt_qplib_service_creq() 736 struct bnxt_qplib_creq_ctx *creq = &rcfw->creq; in bnxt_qplib_service_creq() local 738 struct bnxt_qplib_hwq *hwq = &creq->hwq; in bnxt_qplib_service_creq() 747 if (!CREQ_CMP_VALID(creqe, creq->creq_db.dbinfo.flags)) in bnxt_qplib_service_creq() 761 creq->stats.creq_qp_event_processed++; in bnxt_qplib_service_creq() [all …]
|
| H A D | main.c | 2165 bnxt_re_net_ring_free(rdev, rdev->rcfw.creq.ring_id, type); in bnxt_re_dev_uninit() 2185 struct bnxt_qplib_creq_ctx *creq; in bnxt_re_dev_init() local 2248 creq = &rdev->rcfw.creq; in bnxt_re_dev_init() 2249 rattr.dma_arr = creq->hwq.pbl[PBL_LVL_0].pg_map_arr; in bnxt_re_dev_init() 2250 rattr.pages = creq->hwq.pbl[creq->hwq.level].pg_count; in bnxt_re_dev_init() 2255 rc = bnxt_re_net_ring_alloc(rdev, &rattr, &creq->ring_id); in bnxt_re_dev_init() 2365 bnxt_re_net_ring_free(rdev, rdev->rcfw.creq.ring_id, type); in bnxt_re_dev_init()
|
| /linux/drivers/crypto/cavium/nitrox/ |
| H A D | nitrox_req.h | 213 struct se_crypto_request creq; member 623 struct se_crypto_request *creq = &nkreq->creq; in alloc_src_req_buf() local 625 nkreq->src = alloc_req_buf(nents, ivsize, creq->gfp); in alloc_src_req_buf() 648 struct se_crypto_request *creq = &nkreq->creq; in nitrox_creq_set_src_sg() local 650 creq->src = nitrox_creq_src_sg(iv, ivsize); in nitrox_creq_set_src_sg() 651 sg = creq->src; in nitrox_creq_set_src_sg() 670 struct se_crypto_request *creq = &nkreq->creq; in alloc_dst_req_buf() local 672 nkreq->dst = alloc_req_buf(nents, extralen, creq->gfp); in alloc_dst_req_buf() 681 struct se_crypto_request *creq = &nkreq->creq; in nitrox_creq_set_orh() local 683 creq->orh = (u64 *)(nkreq->dst); in nitrox_creq_set_orh() [all …]
|
| H A D | nitrox_aead.c | 153 struct se_crypto_request *creq = &rctx->nkreq.creq; in nitrox_set_creq() local 157 creq->flags = rctx->flags; in nitrox_set_creq() 158 creq->gfp = (rctx->flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? GFP_KERNEL : in nitrox_set_creq() 161 creq->ctrl.value = 0; in nitrox_set_creq() 162 creq->opcode = FLEXI_CRYPTO_ENCRYPT_HMAC; in nitrox_set_creq() 163 creq->ctrl.s.arg = rctx->ctrl_arg; in nitrox_set_creq() 165 creq->gph.param0 = cpu_to_be16(rctx->cryptlen); in nitrox_set_creq() 166 creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen); in nitrox_set_creq() 167 creq->gph.param2 = cpu_to_be16(rctx->ivsize + rctx->assoclen); in nitrox_set_creq() 170 creq->gph.param3 = cpu_to_be16(param3.param); in nitrox_set_creq() [all …]
|
| H A D | nitrox_skcipher.c | 92 if (nkreq->creq.ctrl.s.arg == ENCRYPT) { in nitrox_cbc_cipher_callback() 253 struct se_crypto_request *creq; in nitrox_skcipher_crypt() local 256 creq = &nkreq->creq; in nitrox_skcipher_crypt() 257 creq->flags = skreq->base.flags; in nitrox_skcipher_crypt() 258 creq->gfp = (skreq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? in nitrox_skcipher_crypt() 262 creq->ctrl.value = 0; in nitrox_skcipher_crypt() 263 creq->opcode = FLEXI_CRYPTO_ENCRYPT_HMAC; in nitrox_skcipher_crypt() 264 creq->ctrl.s.arg = (enc ? ENCRYPT : DECRYPT); in nitrox_skcipher_crypt() 266 creq->gph.param0 = cpu_to_be16(skreq->cryptlen); in nitrox_skcipher_crypt() 267 creq->gph.param1 = 0; in nitrox_skcipher_crypt() [all …]
|
| H A D | nitrox_reqmgr.c | 214 struct se_crypto_request *creq) in softreq_map_iobuf() argument 218 ret = dma_map_inbufs(sr, creq); in softreq_map_iobuf() 222 ret = dma_map_outbufs(sr, creq); in softreq_map_iobuf()
|
| /linux/tools/net/ynl/samples/ |
| H A D | ethtool.c | 13 struct ethtool_channels_get_req_dump creq = {}; in main() local 23 creq._present.header = 1; /* ethtool needs an empty nest, sigh */ in main() 24 channels = ethtool_channels_get_dump(ys, &creq); in main()
|
| /linux/drivers/crypto/ccree/ |
| H A D | cc_request_mgr.c | 47 struct cc_crypto_req creq; member 348 struct cc_crypto_req *creq; in cc_proc_backlog() local 362 creq = &bli->creq; in cc_proc_backlog() 363 req = creq->user_arg; in cc_proc_backlog() 370 creq->user_cb(dev, req, -EINPROGRESS); in cc_proc_backlog() 387 cc_do_send_request(drvdata, &bli->creq, bli->desc, bli->len, in cc_proc_backlog() 435 memcpy(&bli->creq, cc_req, sizeof(*cc_req)); in cc_send_request()
|
| /linux/drivers/net/wireless/marvell/libertas/ |
| H A D | cfg.c | 1278 struct cfg80211_scan_request *creq = NULL; in _new_connect_scan_req() local 1282 creq = kzalloc(sizeof(*creq) + sizeof(struct cfg80211_ssid) + in _new_connect_scan_req() 1285 if (!creq) in _new_connect_scan_req() 1289 creq->ssids = (void *)&creq->channels[n_channels]; in _new_connect_scan_req() 1290 creq->n_channels = n_channels; in _new_connect_scan_req() 1291 creq->n_ssids = 1; in _new_connect_scan_req() 1307 creq->channels[i] = &wiphy->bands[band]->channels[j]; in _new_connect_scan_req() 1313 creq->n_channels = i; in _new_connect_scan_req() 1316 memcpy(creq->ssids[0].ssid, sme->ssid, sme->ssid_len); in _new_connect_scan_req() 1317 creq->ssids[0].ssid_len = sme->ssid_len; in _new_connect_scan_req() [all …]
|
| /linux/net/wireless/ |
| H A D | scan.c | 3505 struct cfg80211_scan_request_int *creq; in cfg80211_get_dev_from_ifindex() 3525 /* Determine number of channels, needed to allocate creq */ in cfg80211_wext_siwscan() 3535 creq = kzalloc(struct_size(creq, req.channels, n_channels) + in cfg80211_wext_siwscan() 3538 if (!creq) in cfg80211_wext_siwscan() 3541 creq->req.wiphy = wiphy; in cfg80211_wext_siwscan() 3542 creq->req.wdev = dev->ieee80211_ptr; in cfg80211_wext_siwscan() 3544 creq->req.ssids = (void *)creq + in cfg80211_wext_siwscan() 3545 struct_size(creq, re in cfg80211_wext_siwscan() 3517 struct cfg80211_scan_request_int *creq; cfg80211_wext_siwscan() local [all...] |
| /linux/include/trace/events/ |
| H A D | netfs.h | 565 const struct netfs_io_request *creq), 567 TP_ARGS(rreq, creq), 571 __field(unsigned int, creq) 580 __entry->creq = creq->debug_id; 587 __entry->creq,
|
| /linux/drivers/crypto/marvell/cesa/ |
| H A D | cesa.c | 170 struct mv_cesa_req *creq) in mv_cesa_queue_req() argument 173 struct mv_cesa_engine *engine = creq->engine; in mv_cesa_queue_req() 177 if ((mv_cesa_req_get_type(creq) == CESA_DMA_REQ) && in mv_cesa_queue_req() 179 mv_cesa_tdma_chain(engine, creq); in mv_cesa_queue_req()
|
| /linux/drivers/crypto/inside-secure/ |
| H A D | safexcel_cipher.c | 1630 struct safexcel_cipher_req *creq = aead_request_ctx(req); in safexcel_aead_encrypt() local 1632 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT); in safexcel_aead_encrypt() 1637 struct safexcel_cipher_req *creq = aead_request_ctx(req); in safexcel_aead_decrypt() local 1639 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT); in safexcel_aead_decrypt() 2685 struct safexcel_cipher_req *creq = aead_request_ctx(req); in safexcel_ccm_encrypt() local 2690 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT); in safexcel_ccm_encrypt() 2695 struct safexcel_cipher_req *creq = aead_request_ctx(req); in safexcel_ccm_decrypt() local 2700 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT); in safexcel_ccm_decrypt() 2824 struct safexcel_cipher_req *creq = aead_request_ctx(req); in safexcel_aead_chachapoly_crypt() local 2840 return safexcel_queue_req(&req->base, creq, dir); in safexcel_aead_chachapoly_crypt() [all …]
|
| /linux/include/uapi/drm/ |
| H A D | vmwgfx_drm.h | 1011 struct drm_vmw_gb_surface_create_req creq; member 1218 struct drm_vmw_gb_surface_create_ext_req creq; member
|
| /linux/drivers/usb/gadget/function/ |
| H A D | f_fs.c | 3812 const struct usb_ctrlrequest *creq) in ffs_func_setup() argument 3819 pr_vdebug("creq->bRequestType = %02x\n", creq->bRequestType); in ffs_func_setup() 3820 pr_vdebug("creq->bRequest = %02x\n", creq->bRequest); in ffs_func_setup() 3821 pr_vdebug("creq->wValue = %04x\n", le16_to_cpu(creq->wValue)); in ffs_func_setup() 3822 pr_vdebug("creq->wIndex = %04x\n", le16_to_cpu(creq->wIndex)); in ffs_func_setup() 3823 pr_vdebug("creq->wLength = %04x\n", le16_to_cpu(creq->wLength)); in ffs_func_setup() 3838 switch (creq->bRequestType & USB_RECIP_MASK) { in ffs_func_setup() 3840 ret = ffs_func_revmap_intf(func, le16_to_cpu(creq->wIndex)); in ffs_func_setup() 3846 ret = ffs_func_revmap_ep(func, le16_to_cpu(creq->wIndex)); in ffs_func_setup() 3855 ret = le16_to_cpu(creq->wIndex); in ffs_func_setup() [all …]
|
| /linux/drivers/net/ethernet/marvell/octeontx2/nic/ |
| H A D | otx2_tc.c | 369 struct nix_mcast_grp_create_req *creq; in otx2_tc_update_mcast() local 375 creq = otx2_mbox_alloc_msg_nix_mcast_grp_create(&nic->mbox); in otx2_tc_update_mcast() 376 if (!creq) { in otx2_tc_update_mcast() 381 creq->dir = NIX_MCAST_INGRESS; in otx2_tc_update_mcast() 391 &creq->hdr); in otx2_tc_update_mcast()
|
| /linux/fs/smb/client/ |
| H A D | smb2inode.c | 1186 struct smb2_create_req *creq; in smb2_unlink() local 1232 creq = rqst[0].rq_iov[0].iov_base; in smb2_unlink() 1233 creq->ShareAccess = FILE_SHARE_DELETE_LE; in smb2_unlink()
|
| H A D | smb2ops.c | 4441 void *creq; in crypt_message() local 4467 creq = smb2_get_aead_req(tfm, rqst, num_rqst, sign, &iv, &req, &sg); in crypt_message() 4468 if (IS_ERR(creq)) in crypt_message() 4469 return PTR_ERR(creq); in crypt_message() 4497 kfree_sensitive(creq); in crypt_message()
|