Lines Matching refs:batch_size
254 sock_opt = xsk->batch_size; in enable_busy_poll()
504 ifobj->xsk_arr[j].batch_size = DEFAULT_BATCH_SIZE; in __test_spec_init()
1091 static int complete_pkts(struct xsk_socket_info *xsk, int batch_size) in complete_pkts() argument
1103 rcvd = xsk_ring_cons__peek(&xsk->umem->cq, batch_size, &idx); in complete_pkts()
1157 rcvd = xsk_ring_cons__peek(&xsk->rx, xsk->batch_size, &idx_rx); in __receive_pkts()
1309 if (pkts_in_flight >= (int)((umem_size(umem) - xsk->batch_size * buffer_len) / in __send_pkts()
1320 while (xsk_ring_prod__reserve(&xsk->tx, xsk->batch_size, &idx) < xsk->batch_size) { in __send_pkts()
1340 complete_pkts(xsk, xsk->batch_size); in __send_pkts()
1343 for (i = 0; i < xsk->batch_size; i++) { in __send_pkts()
1351 if (nb_frags > xsk->batch_size - i) { in __send_pkts()
1353 xsk_ring_prod__cancel(&xsk->tx, xsk->batch_size - i); in __send_pkts()
1441 complete_pkts(xsk, xsk->batch_size); in wait_for_tx_completion()
2478 test->ifobj_tx->xsk->batch_size = 1; in testapp_hw_sw_min_ring_size()
2479 test->ifobj_rx->xsk->batch_size = 1; in testapp_hw_sw_min_ring_size()
2485 test->ifobj_tx->xsk->batch_size = DEFAULT_BATCH_SIZE - 1; in testapp_hw_sw_min_ring_size()
2486 test->ifobj_rx->xsk->batch_size = DEFAULT_BATCH_SIZE - 1; in testapp_hw_sw_min_ring_size()
2502 test->ifobj_tx->xsk->batch_size = XSK_RING_PROD__DEFAULT_NUM_DESCS; in testapp_hw_sw_max_ring_size()
2503 test->ifobj_rx->xsk->batch_size = XSK_RING_PROD__DEFAULT_NUM_DESCS; in testapp_hw_sw_max_ring_size()
2512 test->ifobj_tx->xsk->batch_size = test->ifobj_tx->ring.tx_max_pending - 8; in testapp_hw_sw_max_ring_size()
2513 test->ifobj_rx->xsk->batch_size = test->ifobj_tx->ring.tx_max_pending - 8; in testapp_hw_sw_max_ring_size()