Home
last modified time | relevance | path

Searched refs:ring_size (Results 1 – 25 of 158) sorted by relevance

1234567

/linux/tools/virtio/ringtest/
H A Dvirtio_ring_0_9.c70 ret = posix_memalign(&p, 0x1000, vring_size(ring_size, 0x1000)); in alloc_ring()
75 memset(p, 0, vring_size(ring_size, 0x1000)); in alloc_ring()
76 vring_init(&ring, ring_size, p, 0x1000); in alloc_ring()
85 for (i = 0; i < ring_size - 1; i++) in alloc_ring()
89 guest.num_free = ring_size; in alloc_ring()
90 data = malloc(ring_size * sizeof *data); in alloc_ring()
95 memset(data, 0, ring_size * sizeof *data); in alloc_ring()
111 head = (ring_size - 1) & (guest.avail_idx++); in add_inbuf()
136 ring.avail->ring[avail & (ring_size - 1)] = in add_inbuf()
137 (head | (avail & ~(ring_size - 1))) ^ 0x8000; in add_inbuf()
[all …]
H A Dring.c82 ret = posix_memalign((void **)&ring, 0x1000, ring_size * sizeof *ring); in alloc_ring()
97 for (i = 0; i < ring_size; ++i) { in alloc_ring()
103 guest.num_free = ring_size; in alloc_ring()
104 data = calloc(ring_size, sizeof(*data)); in alloc_ring()
120 head = (ring_size - 1) & (guest.avail_idx++); in add_inbuf()
145 unsigned head = (ring_size - 1) & guest.last_used_idx; in get_buf()
154 index = ring[head].index & (ring_size - 1); in get_buf()
166 unsigned head = (ring_size - 1) & guest.last_used_idx; in used_empty()
221 unsigned head = (ring_size - 1) & host.used_idx; in avail_empty()
228 unsigned head = (ring_size - 1) & host.used_idx; in use_buf()
H A Dmain.c29 unsigned ring_size = 256; variable
288 ring_size, in help()
322 ring_size = strtol(optarg, &endptr, 0); in main()
323 assert(ring_size && !(ring_size & (ring_size - 1))); in main()
/linux/include/xen/interface/io/
H A Dring.h357 static inline RING_IDX name##_mask(RING_IDX idx, RING_IDX ring_size) \
359 return idx & (ring_size - 1); \
364 RING_IDX ring_size) \
366 return buf + name##_mask(idx, ring_size); \
374 RING_IDX ring_size) \
377 size <= ring_size - *masked_cons) { \
380 memcpy(opaque, buf + *masked_cons, ring_size - *masked_cons); \
381 memcpy((unsigned char *)opaque + ring_size - *masked_cons, buf, \
382 size - (ring_size - *masked_cons)); \
384 *masked_cons = name##_mask(*masked_cons + size, ring_size); \
[all …]
/linux/tools/testing/selftests/kvm/
H A Dcoalesced_io_test.c17 uint32_t ring_size; member
47 for (i = 0; i < io->ring_size - 1; i++) { in guest_code()
97 *(uint64_t *)run->mmio.data == io->mmio_gpa + io->ring_size - 1)) || in vcpu_run_and_verify_io_exit()
100 pio_value == io->pio_port + io->ring_size - 1)), in vcpu_run_and_verify_io_exit()
106 (want_pio ? io->pio_port : io->mmio_gpa) + io->ring_size - 1, run->exit_reason, in vcpu_run_and_verify_io_exit()
122 TEST_ASSERT((ring->last + 1) % io->ring_size == ring->first, in vcpu_run_and_verify_coalesced_io()
124 ring->first, ring->last, io->ring_size, ring_start); in vcpu_run_and_verify_coalesced_io()
126 for (i = 0; i < io->ring_size - 1; i++) { in vcpu_run_and_verify_coalesced_io()
127 uint32_t idx = (ring->first + i) % io->ring_size; in vcpu_run_and_verify_coalesced_io()
214 .ring_size = (getpagesize() - sizeof(struct kvm_coalesced_mmio_ring)) / in main()
[all …]
/linux/drivers/crypto/ccp/
H A Dtee-dev.c25 static int tee_alloc_ring(struct psp_tee_device *tee, int ring_size) in tee_alloc_ring() argument
30 if (!ring_size) in tee_alloc_ring()
36 start_addr = (void *)__get_free_pages(GFP_KERNEL, get_order(ring_size)); in tee_alloc_ring()
40 memset(start_addr, 0x0, ring_size); in tee_alloc_ring()
42 rb_mgr->ring_size = ring_size; in tee_alloc_ring()
57 get_order(rb_mgr->ring_size)); in tee_free_ring()
60 rb_mgr->ring_size = 0; in tee_free_ring()
76 cmd->size = tee->rb_mgr.ring_size; in tee_alloc_cmd_buffer()
91 int ring_size = MAX_RING_BUFFER_ENTRIES * sizeof(struct tee_ring_cmd); in tee_init_ring() local
98 ret = tee_alloc_ring(tee, ring_size); in tee_init_ring()
[all …]
/linux/tools/hv/
H A Dhv_fcopy_uio_daemon.c59 uint32_t ring_size = 0; in get_ring_buffer_size() local
85 ring_size = (uint32_t)st.st_size / 2; in get_ring_buffer_size()
87 ring_path, ring_size); in get_ring_buffer_size()
93 if (!ring_size && retry_count == 0) { in get_ring_buffer_size()
102 if (!ring_size) in get_ring_buffer_size()
105 return ring_size; in get_ring_buffer_size()
456 uint32_t ring_size, len; in main() local
487 ring_size = get_ring_buffer_size(); in main()
488 if (!ring_size) { in main()
493 desc = malloc(ring_size * sizeof(unsigned char)); in main()
[all …]
/linux/drivers/net/ethernet/intel/idpf/
H A Didpf_controlq.c39 idpf_mbx_wr32(hw, cq->reg.tail, (u32)(cq->ring_size - 1)); in idpf_ctlq_init_regs()
51 idpf_mbx_wr32(hw, cq->reg.len, (cq->ring_size | cq->reg.len_ena_mask)); in idpf_ctlq_init_regs()
65 for (i = 0; i < cq->ring_size; i++) { in idpf_ctlq_init_rxq_bufs()
105 cq->ring_size = 0; in idpf_ctlq_shutdown()
137 cq->ring_size = qinfo->len; in idpf_ctlq_add()
141 cq->next_to_post = cq->ring_size - 1; in idpf_ctlq_add()
322 if (cq->next_to_use == cq->ring_size) in idpf_ctlq_send()
363 if (*clean_count > cq->ring_size) in idpf_ctlq_clean_sq()
393 if (ntc == cq->ring_size) in idpf_ctlq_clean_sq()
431 if (*buff_count > cq->ring_size) in idpf_ctlq_post_rx_buffs()
[all …]
H A Didpf_controlq_setup.c14 size_t size = cq->ring_size * sizeof(struct idpf_ctlq_desc); in idpf_ctlq_alloc_desc_ring()
43 cq->bi.rx_buff = kcalloc(cq->ring_size, sizeof(struct idpf_dma_mem *), in idpf_ctlq_alloc_bufs()
49 for (i = 0; i < cq->ring_size - 1; i++) { in idpf_ctlq_alloc_bufs()
112 for (i = 0; i < cq->ring_size; i++) { in idpf_ctlq_free_bufs()
/linux/drivers/gpu/drm/radeon/
H A Dradeon_ring.c88 ring->ring_free_dw = rptr + (ring->ring_size / 4); in radeon_ring_free_size()
93 ring->ring_free_dw = ring->ring_size / 4; in radeon_ring_free_size()
114 if (ndw > (ring->ring_size / 4)) in radeon_ring_alloc()
316 size = ring->wptr + (ring->ring_size / 4); in radeon_ring_backup()
383 int radeon_ring_init(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ring_size, in radeon_ring_init() argument
388 ring->ring_size = ring_size; in radeon_ring_init()
394 r = radeon_bo_create(rdev, ring->ring_size, PAGE_SIZE, true, in radeon_ring_init()
420 ring->ptr_mask = (ring->ring_size / 4) - 1; in radeon_ring_init()
421 ring->ring_free_dw = ring->ring_size / 4; in radeon_ring_init()
476 count = (ring->ring_size / 4) - ring->ring_free_dw; in radeon_debugfs_ring_info_show()
H A Dni.c1668 rb_cntl = order_base_2(ring->ring_size / 8); in cayman_cp_resume()
2021 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0; in cayman_uvd_start()
2029 if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size) in cayman_uvd_resume()
2033 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0)); in cayman_uvd_resume()
2101 rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size = 0; in cayman_vce_start()
2102 rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_size = 0; in cayman_vce_start()
2110 if (!rdev->has_vce || !rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size) in cayman_vce_resume()
2114 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, 0x0); in cayman_vce_resume()
2120 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, 0x0); in cayman_vce_resume()
2228 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET, in cayman_startup()
[all …]
/linux/drivers/xen/
H A Devtchn.c67 unsigned int ring_size; member
99 return idx & (u->ring_size - 1); in evtchn_ring_offset()
183 if ((prod - cons) < u->ring_size) { in evtchn_interrupt()
240 if (((c ^ p) & u->ring_size) != 0) { in evtchn_read()
241 bytes1 = (u->ring_size - evtchn_ring_offset(u, c)) * in evtchn_read()
327 if (u->nr_evtchns <= u->ring_size) in evtchn_resize_ring()
330 if (u->ring_size == 0) in evtchn_resize_ring()
333 new_size = 2 * u->ring_size; in evtchn_resize_ring()
359 memcpy(new_ring, old_ring, u->ring_size * sizeof(*u->ring)); in evtchn_resize_ring()
360 memcpy(new_ring + u->ring_size, old_ring, in evtchn_resize_ring()
[all …]
/linux/drivers/net/ethernet/amazon/ena/
H A Dena_netdev.h89 #define ENA_TX_RING_IDX_NEXT(idx, ring_size) (((idx) + 1) & ((ring_size) - 1)) argument
91 #define ENA_RX_RING_IDX_NEXT(idx, ring_size) (((idx) + 1) & ((ring_size) - 1)) argument
92 #define ENA_RX_RING_IDX_ADD(idx, n, ring_size) \ argument
93 (((idx) + (n)) & ((ring_size) - 1))
268 int ring_size; member
H A Dena_netdev.c162 ring->ring_size); in ena_xmit_common()
200 txr->ring_size = adapter->requested_tx_ring_size; in ena_init_io_rings()
215 rxr->ring_size = adapter->requested_rx_ring_size; in ena_init_io_rings()
246 size = sizeof(struct ena_tx_buffer) * tx_ring->ring_size; in ena_setup_tx_resources()
256 size = sizeof(u16) * tx_ring->ring_size; in ena_setup_tx_resources()
273 for (i = 0; i < tx_ring->ring_size; i++) in ena_setup_tx_resources()
383 size = sizeof(struct ena_rx_buffer) * (rx_ring->ring_size + 1); in ena_setup_rx_resources()
393 size = sizeof(u16) * rx_ring->ring_size; in ena_setup_rx_resources()
405 for (i = 0; i < rx_ring->ring_size; i++) in ena_setup_rx_resources()
600 rx_ring->ring_size); in ena_refill_rx_bufs()
[all …]
/linux/drivers/crypto/intel/qat/qat_common/
H A Dadf_transport.c93 ADF_MAX_INFLIGHTS(ring->ring_size, ring->msg_size)) { in adf_send_message()
103 ADF_RING_SIZE_MODULO(ring->ring_size)); in adf_send_message()
124 ADF_RING_SIZE_MODULO(ring->ring_size)); in adf_handle_response()
139 u32 ring_config = BUILD_RING_CONFIG(ring->ring_size); in adf_configure_tx_ring()
151 BUILD_RESP_RING_CONFIG(ring->ring_size, in adf_configure_rx_ring()
168 ADF_SIZE_TO_RING_SIZE_IN_BYTES(ring->ring_size); in adf_init_ring()
194 ring->ring_size); in adf_init_ring()
206 ADF_SIZE_TO_RING_SIZE_IN_BYTES(ring->ring_size); in adf_cleanup_ring()
272 ring->ring_size = adf_verify_ring_size(msg_size, num_msgs); in adf_create_ring()
275 max_inflights = ADF_MAX_INFLIGHTS(ring->ring_size, ring->msg_size); in adf_create_ring()
/linux/drivers/bus/mhi/ep/
H A Dring.c26 ring->rd_offset = (ring->rd_offset + 1) % ring->ring_size; in mhi_ep_ring_inc_index()
55 buf_info.size = (ring->ring_size - start) * sizeof(struct mhi_ring_element); in __mhi_ep_cache_ring()
125 num_free_elem = ((ring->ring_size - ring->rd_offset) + ring->wr_offset) - 1; in mhi_ep_ring_add_element()
187 ring->ring_size = mhi_ep_ring_num_elems(ring); in mhi_ep_ring_start()
208 ring->ring_cache = kcalloc(ring->ring_size, sizeof(struct mhi_ring_element), GFP_KERNEL); in mhi_ep_ring_start()
/linux/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_ring.c354 ring->ring_size = roundup_pow_of_two(max_dw * 4 * sched_hw_submission); in amdgpu_ring_init()
356 ring->ring_size = roundup_pow_of_two(max_dw * 4); in amdgpu_ring_init()
357 ring->count_dw = (ring->ring_size - 4) >> 2; in amdgpu_ring_init()
362 ring->buf_mask = (ring->ring_size / 4) - 1; in amdgpu_ring_init()
369 ring->ring_backup = kvzalloc(ring->ring_size, GFP_KERNEL); in amdgpu_ring_init()
376 r = amdgpu_bo_create_kernel(adev, ring->ring_size + ring->funcs->extra_bytes, in amdgpu_ring_init()
605 if (*pos >= (ring->ring_size + 12)) in amdgpu_debugfs_ring_read()
622 size = ring->ring_size - (early[0] - early[1]); in amdgpu_debugfs_ring_read()
724 ring->ring_size + 12); in amdgpu_debugfs_ring_init()
728 ring->ring_size + 12); in amdgpu_debugfs_ring_init()
[all …]
H A Damdgpu_ring_mux.c66 amdgpu_ring_alloc(real_ring, (ring->ring_size >> 2) + end - start); in amdgpu_ring_mux_copy_pkt_from_sw_ring()
68 (ring->ring_size >> 2) - start); in amdgpu_ring_mux_copy_pkt_from_sw_ring()
299 readp += mux->real_ring->ring_size >> 2; in amdgpu_ring_mux_get_rptr()
300 end += mux->real_ring->ring_size >> 2; in amdgpu_ring_mux_get_rptr()
/linux/drivers/net/ethernet/atheros/
H A Dag71xx.c774 int ring_mask, ring_size; in ag71xx_tx_packets() local
778 ring_size = BIT(ring->order); in ag71xx_tx_packets()
831 if ((ring->curr - ring->dirty) < (ring_size * 3) / 4) in ag71xx_tx_packets()
1153 int ring_size = BIT(ring->order); in ag71xx_ring_tx_init() local
1154 int ring_mask = ring_size - 1; in ag71xx_ring_tx_init()
1157 for (i = 0; i < ring_size; i++) { in ag71xx_ring_tx_init()
1178 int ring_size = BIT(ring->order); in ag71xx_ring_rx_clean() local
1184 for (i = 0; i < ring_size; i++) in ag71xx_ring_rx_clean()
1230 int ring_size = BIT(ring->order); in ag71xx_ring_rx_init() local
1235 for (i = 0; i < ring_size; i++) { in ag71xx_ring_rx_init()
[all …]
/linux/arch/powerpc/platforms/pasemi/
H A Ddma_lib.c242 int pasemi_dma_alloc_ring(struct pasemi_dmachan *chan, int ring_size) in pasemi_dma_alloc_ring() argument
246 chan->ring_size = ring_size; in pasemi_dma_alloc_ring()
249 ring_size * sizeof(u64), in pasemi_dma_alloc_ring()
268 dma_free_coherent(&dma_pdev->dev, chan->ring_size * sizeof(u64), in pasemi_dma_free_ring()
271 chan->ring_size = 0; in pasemi_dma_free_ring()
/linux/tools/testing/selftests/bpf/progs/
H A Dtest_ringbuf.c32 long ring_size = 0; variable
72 ring_size = bpf_ringbuf_query(&ringbuf, BPF_RB_RING_SIZE); in test_ringbuf()
/linux/tools/testing/selftests/bpf/prog_tests/
H A Dringbuf.c152 unsigned long avail_data, ring_size, cons_pos, prod_pos; in ringbuf_subtest()
235 CHECK(skel->bss->ring_size != page_size, in ringbuf_subtest()
237 (long)page_size, skel->bss->ring_size); in ringbuf_subtest()
250 ring_size = ring__size(ring); in ringbuf_subtest()
251 ASSERT_EQ(ring_size, page_size, "ring_ring_size"); in ringbuf_subtest()
545 ASSERT_EQ(skel->bss->ring_size, size, "check_ring_size");
151 unsigned long avail_data, ring_size, cons_pos, prod_pos; ringbuf_subtest() local
/linux/tools/testing/selftests/drivers/net/
H A Dnetpoll_basic.py83 def ethtool_set_ringsize(interface_name: str, ring_size: tuple[int, int]) -> bool:
85 rxs = ring_size[0]
86 txs = ring_size[1]
/linux/include/linux/
H A Dpipe_fs_i.h91 unsigned int ring_size; member
237 return &pipe->bufs[slot & (pipe->ring_size - 1)]; in pipe_buf()
/linux/drivers/dma/amd/qdma/
H A Dqdma.h215 u32 ring_size; member
229 u32 ring_size; member

1234567