Home
last modified time | relevance | path

Searched refs:niov (Results 1 – 7 of 7) sorted by relevance

/linux/net/core/
H A Ddevmem.h78 net_iov_owner(const struct net_iov *niov) in net_iov_owner() argument
80 return niov->owner; in net_iov_owner()
83 static inline unsigned int net_iov_idx(const struct net_iov *niov) in net_iov_idx() argument
85 return niov - net_iov_owner(niov)->niovs; in net_iov_idx()
89 net_iov_binding(const struct net_iov *niov) in net_iov_binding() argument
91 return net_iov_owner(niov)->binding; in net_iov_binding()
94 static inline unsigned long net_iov_virtual_addr(const struct net_iov *niov) in net_iov_virtual_addr() argument
96 struct dmabuf_genpool_chunk_owner *owner = net_iov_owner(niov); in net_iov_virtual_addr()
99 ((unsigned long)net_iov_idx(niov) << PAGE_SHIFT); in net_iov_virtual_addr()
102 static inline u32 net_iov_binding_id(const struct net_iov *niov) in net_iov_binding_id() argument
[all …]
H A Ddevmem.c40 static dma_addr_t net_devmem_get_dma_addr(const struct net_iov *niov) in net_devmem_get_dma_addr() argument
42 struct dmabuf_genpool_chunk_owner *owner = net_iov_owner(niov); in net_devmem_get_dma_addr()
45 ((dma_addr_t)net_iov_idx(niov) << PAGE_SHIFT); in net_devmem_get_dma_addr()
75 struct net_iov *niov; in net_devmem_alloc_dmabuf() local
86 niov = &owner->niovs[index]; in net_devmem_alloc_dmabuf()
88 niov->pp_magic = 0; in net_devmem_alloc_dmabuf()
89 niov->pp = NULL; in net_devmem_alloc_dmabuf()
90 atomic_long_set(&niov->pp_ref_count, 0); in net_devmem_alloc_dmabuf()
92 return niov; in net_devmem_alloc_dmabuf()
95 void net_devmem_free_dmabuf(struct net_iov *niov) in net_devmem_free_dmabuf() argument
[all …]
/linux/drivers/infiniband/hw/qib/
H A Dqib_user_sdma.c560 unsigned long niov) in qib_user_sdma_coalesce() argument
576 for (i = 0; i < niov; i++) { in qib_user_sdma_coalesce()
714 unsigned long niov) in qib_user_sdma_pin_pkt() argument
719 for (idx = 0; idx < niov; idx++) { in qib_user_sdma_pin_pkt()
756 unsigned long niov, int npages) in qib_user_sdma_init_payload() argument
762 ret = qib_user_sdma_coalesce(dd, pq, pkt, iov, niov); in qib_user_sdma_init_payload()
764 ret = qib_user_sdma_pin_pkt(dd, pq, pkt, iov, niov); in qib_user_sdma_init_payload()
801 unsigned long niov, in qib_user_sdma_queue_pkts() argument
816 while (idx < niov && npkts < *maxpkts) { in qib_user_sdma_queue_pkts()
871 while (pktnwc < pktnw && idx < niov) { in qib_user_sdma_queue_pkts()
/linux/drivers/target/iscsi/
H A Discsi_target.c503 u32 niov = 0, tx_size = ISCSI_HDR_LEN; in iscsit_xmit_nondatain_pdu() local
507 iov[niov].iov_base = cmd->pdu; in iscsit_xmit_nondatain_pdu()
508 iov[niov++].iov_len = ISCSI_HDR_LEN; in iscsit_xmit_nondatain_pdu()
525 iov[niov].iov_base = (void *)data_buf; in iscsit_xmit_nondatain_pdu()
526 iov[niov++].iov_len = data_buf_len; in iscsit_xmit_nondatain_pdu()
530 iov[niov].iov_base = &cmd->pad_bytes; in iscsit_xmit_nondatain_pdu()
531 iov[niov++].iov_len = padding; in iscsit_xmit_nondatain_pdu()
541 iov[niov].iov_base = &cmd->data_crc; in iscsit_xmit_nondatain_pdu()
542 iov[niov++].iov_len = ISCSI_CRC_LEN; in iscsit_xmit_nondatain_pdu()
550 cmd->iov_misc_count = niov; in iscsit_xmit_nondatain_pdu()
[all …]
/linux/fs/smb/server/
H A Dconnection.h123 int (*writev)(struct ksmbd_transport *t, struct kvec *iovs, int niov,
H A Dtransport_rdma.c228 struct kvec *iov, int niov,
1163 struct kvec *iov, int niov, in smb_direct_post_send_data() argument
1176 for (i = 0; i < niov; i++) in smb_direct_post_send_data()
1186 for (i = 0; i < niov; i++) { in smb_direct_post_send_data()
/linux/net/ipv4/
H A Dtcp.c2478 struct net_iov *niov; in tcp_recvmsg_dmabuf() local
2495 niov = skb_frag_net_iov(frag); in tcp_recvmsg_dmabuf()
2502 frag_offset = net_iov_virtual_addr(niov) + in tcp_recvmsg_dmabuf()
2514 dmabuf_cmsg.dmabuf_id = net_iov_binding_id(niov); in tcp_recvmsg_dmabuf()
2526 atomic_long_inc(&niov->pp_ref_count); in tcp_recvmsg_dmabuf()