Home
last modified time | relevance | path

Searched refs:ents (Results 1 – 21 of 21) sorted by relevance

/linux/drivers/gpu/drm/virtio/
H A Dvirtgpu_object.c161 struct virtio_gpu_mem_entry **ents, in virtio_gpu_object_shmem_init() argument
178 *ents = kvmalloc_array(*nents, in virtio_gpu_object_shmem_init()
181 if (!(*ents)) { in virtio_gpu_object_shmem_init()
188 (*ents)[si].addr = cpu_to_le64(sg_dma_address(sg)); in virtio_gpu_object_shmem_init()
189 (*ents)[si].length = cpu_to_le32(sg_dma_len(sg)); in virtio_gpu_object_shmem_init()
190 (*ents)[si].padding = 0; in virtio_gpu_object_shmem_init()
194 (*ents)[si].addr = cpu_to_le64(sg_phys(sg)); in virtio_gpu_object_shmem_init()
195 (*ents)[si].length = cpu_to_le32(sg->length); in virtio_gpu_object_shmem_init()
196 (*ents)[si].padding = 0; in virtio_gpu_object_shmem_init()
211 struct virtio_gpu_mem_entry *ents = NULL; in virtio_gpu_object_create() local
[all …]
H A Dvirtgpu_prime.c147 int virtgpu_dma_buf_import_sgt(struct virtio_gpu_mem_entry **ents, in virtgpu_dma_buf_import_sgt() argument
168 *ents = kvmalloc_array(sgt->nents, in virtgpu_dma_buf_import_sgt()
171 if (!(*ents)) { in virtgpu_dma_buf_import_sgt()
178 (*ents)[i].addr = cpu_to_le64(sg_dma_address(sl)); in virtgpu_dma_buf_import_sgt()
179 (*ents)[i].length = cpu_to_le32(sg_dma_len(sl)); in virtgpu_dma_buf_import_sgt()
180 (*ents)[i].padding = 0; in virtgpu_dma_buf_import_sgt()
225 struct virtio_gpu_mem_entry *ents = NULL; in virtgpu_dma_buf_init_obj() local
241 ret = virtgpu_dma_buf_import_sgt(&ents, &nents, bo, attach); in virtgpu_dma_buf_init_obj()
251 ents, nents); in virtgpu_dma_buf_init_obj()
H A Dvirtgpu_drv.h355 struct virtio_gpu_mem_entry *ents,
427 struct virtio_gpu_mem_entry *ents,
480 int virtgpu_dma_buf_import_sgt(struct virtio_gpu_mem_entry **ents,
H A Dvirtgpu_vq.c628 struct virtio_gpu_mem_entry *ents, in virtio_gpu_cmd_resource_attach_backing() argument
642 vbuf->data_buf = ents; in virtio_gpu_cmd_resource_attach_backing()
643 vbuf->data_size = sizeof(*ents) * nents; in virtio_gpu_cmd_resource_attach_backing()
1120 struct virtio_gpu_mem_entry *ents, in virtio_gpu_object_attach() argument
1127 ents, nents, NULL); in virtio_gpu_object_attach()
1280 struct virtio_gpu_mem_entry *ents, in virtio_gpu_cmd_resource_create_blob() argument
1298 vbuf->data_buf = ents; in virtio_gpu_cmd_resource_create_blob()
1299 vbuf->data_size = sizeof(*ents) * nents; in virtio_gpu_cmd_resource_create_blob()
H A Dvirtgpu_plane.c271 struct virtio_gpu_mem_entry *ents = NULL; in virtio_gpu_prepare_imported_obj() local
284 ret = virtgpu_dma_buf_import_sgt(&ents, &nents, in virtio_gpu_prepare_imported_obj()
289 virtio_gpu_object_attach(vgdev, bo, ents, nents); in virtio_gpu_prepare_imported_obj()
/linux/arch/riscv/kvm/
H A Dvm.c94 struct kvm_irq_routing_entry *ents; in kvm_riscv_setup_default_irq_routing() local
97 ents = kcalloc(lines, sizeof(*ents), GFP_KERNEL); in kvm_riscv_setup_default_irq_routing()
98 if (!ents) in kvm_riscv_setup_default_irq_routing()
102 ents[i].gsi = i; in kvm_riscv_setup_default_irq_routing()
103 ents[i].type = KVM_IRQ_ROUTING_IRQCHIP; in kvm_riscv_setup_default_irq_routing()
104 ents[i].u.irqchip.irqchip = 0; in kvm_riscv_setup_default_irq_routing()
105 ents[i].u.irqchip.pin = i; in kvm_riscv_setup_default_irq_routing()
107 rc = kvm_set_irq_routing(kvm, ents, lines, 0); in kvm_riscv_setup_default_irq_routing()
108 kfree(ents); in kvm_riscv_setup_default_irq_routing()
/linux/net/can/j1939/
H A Dbus.c58 ent = &priv->ents[ecu->addr]; in j1939_ecu_map_locked()
85 ent = &priv->ents[ecu->addr]; in j1939_ecu_unmap_locked()
103 for (i = 0; i < ARRAY_SIZE(priv->ents); i++) in j1939_ecu_unmap_all()
104 if (priv->ents[i].ecu) in j1939_ecu_unmap_all()
105 j1939_ecu_unmap_locked(priv->ents[i].ecu); in j1939_ecu_unmap_all()
177 return priv->ents[addr].ecu; in j1939_ecu_find_by_addr_locked()
284 priv->ents[sa].nusers++; in j1939_local_ecu_get()
300 priv->ents[ecu->addr].nusers++; in j1939_local_ecu_get()
315 priv->ents[sa].nusers--; in j1939_local_ecu_put()
328 priv->ents[ecu->addr].nusers--; in j1939_local_ecu_put()
H A Dmain.c98 priv->ents[skcb->addr.sa].nusers) in j1939_can_recv()
101 priv->ents[skcb->addr.da].nusers) in j1939_can_recv()
H A Dj1939-priv.h71 } ents[256]; member
H A Dtransport.c362 priv->ents[skcb->addr.da].nusers) in j1939_session_skb_queue()
2015 priv->ents[skcb->addr.da].nusers) in j1939_tp_send()
/linux/include/net/
H A Drps.h62 u32 ents[] ____cacheline_aligned_in_smp; member
64 #define RPS_SOCK_FLOW_TABLE_SIZE(_num) (offsetof(struct rps_sock_flow_table, ents[_num]))
80 if (READ_ONCE(table->ents[index]) != val) in rps_record_sock_flow()
81 WRITE_ONCE(table->ents[index], val); in rps_record_sock_flow()
/linux/kernel/dma/
H A Dmapping.c205 int ents; in __dma_map_sg_attrs() local
214 ents = dma_direct_map_sg(dev, sg, nents, dir, attrs); in __dma_map_sg_attrs()
216 ents = iommu_dma_map_sg(dev, sg, nents, dir, attrs); in __dma_map_sg_attrs()
218 ents = ops->map_sg(dev, sg, nents, dir, attrs); in __dma_map_sg_attrs()
220 if (ents > 0) { in __dma_map_sg_attrs()
222 trace_dma_map_sg(dev, sg, nents, ents, dir, attrs); in __dma_map_sg_attrs()
223 debug_dma_map_sg(dev, sg, nents, ents, dir, attrs); in __dma_map_sg_attrs()
224 } else if (WARN_ON_ONCE(ents != -EINVAL && ents != -ENOMEM && in __dma_map_sg_attrs()
225 ents != -EIO && ents != -EREMOTEIO)) { in __dma_map_sg_attrs()
226 trace_dma_map_sg_err(dev, sg, nents, ents, dir, attrs); in __dma_map_sg_attrs()
[all …]
/linux/lib/crypto/mpi/
H A Dmpicoder.c334 int x, j, z, lzeros, ents; in mpi_read_raw_from_sgl() local
340 ents = sg_nents_for_len(sgl, nbytes); in mpi_read_raw_from_sgl()
341 if (ents < 0) in mpi_read_raw_from_sgl()
344 sg_miter_start(&miter, sgl, ents, SG_MITER_ATOMIC | SG_MITER_FROM_SG); in mpi_read_raw_from_sgl()
/linux/fs/xfs/libxfs/
H A Dxfs_dir2_priv.h26 struct xfs_dir2_leaf_entry *ents; member
108 struct xfs_dir2_leaf_entry *ents, int *indexp,
126 struct xfs_dir2_leaf_entry *ents, int index, int compact,
H A Dxfs_da_btree.c775 size = (int)((char *)&leafhdr.ents[leafhdr.count] - in xfs_da3_root_split()
2472 struct xfs_dir2_leaf_entry *ents; in xfs_da3_swap_lastblock() local
2477 ents = leafhdr.ents; in xfs_da3_swap_lastblock()
2479 dead_hash = be32_to_cpu(ents[leafhdr.count - 1].hashval); in xfs_da3_swap_lastblock()
/linux/include/trace/events/
H A Ddma.h280 int ents, enum dma_data_direction dir, unsigned long attrs),
281 TP_ARGS(dev, sgl, nents, ents, dir, attrs),
286 __dynamic_array(u64, dma_addrs, ents)
287 __dynamic_array(unsigned int, lengths, ents)
299 for_each_sg(sgl, sg, ents, i) {
/linux/arch/sparc/mm/
H A Dinit_64.c115 int ents, ret, i; in read_obp_memory() local
117 ents = prop_size / sizeof(struct linux_prom64_registers); in read_obp_memory()
118 if (ents > MAX_BANKS) { in read_obp_memory()
135 for (i = 0; i < ents; i++) { in read_obp_memory()
156 (ents - i - 1) * sizeof(regs[0])); in read_obp_memory()
158 ents--; in read_obp_memory()
165 *num_ents = ents; in read_obp_memory()
167 sort(regs, ents, sizeof(struct linux_prom64_registers), in read_obp_memory()
600 int n, node, ents, first, last, i; in read_obp_translations() local
622 ents in read_obp_translations()
[all...]
/linux/drivers/gpu/drm/qxl/
H A Dqxl_dev.h822 uint32_t ents[]; member
/linux/net/core/
H A Dsysctl_net_core.c190 sock_table->ents[i] = RPS_NO_CPU; in rps_sock_flow_sysctl()
H A Ddev.c4869 ident = READ_ONCE(sock_flow_table->ents[hash & sock_flow_table->mask]); in get_rps_cpu()
/linux/fs/proc/
H A Dbase.c2745 const struct pid_entry *ents, unsigned int nents) in proc_pident_readdir() argument
2759 for (p = ents + (ctx->pos - 2); p < ents + nents; p++) { in proc_pident_readdir()