Home
last modified time | relevance | path

Searched refs:batch (Results 1 – 25 of 146) sorted by relevance

123456

/linux/mm/
H A Dmmu_gather.c20 struct mmu_gather_batch *batch; in tlb_next_batch() local
26 batch = tlb->active; in tlb_next_batch()
27 if (batch->next) { in tlb_next_batch()
28 tlb->active = batch->next; in tlb_next_batch()
35 batch = (void *)__get_free_page(GFP_NOWAIT); in tlb_next_batch()
36 if (!batch) in tlb_next_batch()
40 batch->next = NULL; in tlb_next_batch()
41 batch->nr = 0; in tlb_next_batch()
42 batch->max = MAX_GATHER_BATCH; in tlb_next_batch()
44 tlb->active->next = batch; in tlb_next_batch()
[all …]
/linux/arch/powerpc/mm/book3s64/
H A Dhash_tlb.c44 struct ppc64_tlb_batch *batch = &get_cpu_var(ppc64_tlb_batch); in hpte_need_flush() local
51 i = batch->index; in hpte_need_flush()
103 if (!batch->active) { in hpte_need_flush()
119 if (i != 0 && (mm != batch->mm || batch->psize != psize || in hpte_need_flush()
120 batch->ssize != ssize)) { in hpte_need_flush()
121 __flush_tlb_pending(batch); in hpte_need_flush()
125 batch->mm = mm; in hpte_need_flush()
126 batch->psize = psize; in hpte_need_flush()
127 batch->ssize = ssize; in hpte_need_flush()
129 batch->pte[i] = rpte; in hpte_need_flush()
[all …]
/linux/drivers/iommu/iommufd/
H A Dpages.c286 static void batch_clear(struct pfn_batch *batch) in batch_clear() argument
288 batch->total_pfns = 0; in batch_clear()
289 batch->end = 0; in batch_clear()
290 batch->pfns[0] = 0; in batch_clear()
291 batch->npfns[0] = 0; in batch_clear()
298 static void batch_clear_carry(struct pfn_batch *batch, unsigned int keep_pfns) in batch_clear_carry() argument
301 return batch_clear(batch); in batch_clear_carry()
304 WARN_ON(!batch->end || in batch_clear_carry()
305 batch->npfns[batch->end - 1] < keep_pfns); in batch_clear_carry()
307 batch->total_pfns = keep_pfns; in batch_clear_carry()
[all …]
/linux/include/trace/events/
H A Dintel_ifs.h13 TP_PROTO(int batch, int start, int stop, u64 status),
15 TP_ARGS(batch, start, stop, status),
18 __field( int, batch )
25 __entry->batch = batch;
32 __entry->batch,
40 TP_PROTO(int batch, union ifs_sbaf activate, union ifs_sbaf_status status),
42 TP_ARGS(batch, activate, status),
46 __field( int, batch )
53 __entry->batch = batch;
59 __entry->batch,
/linux/tools/testing/selftests/bpf/progs/
H A Dtest_bpf_ma.c56 static __always_inline void batch_alloc(struct bpf_map *map, unsigned int batch, unsigned int idx) in batch_alloc() argument
62 for (i = 0; i < batch; i++) { in batch_alloc()
83 static __always_inline void batch_free(struct bpf_map *map, unsigned int batch, unsigned int idx) in batch_free() argument
89 for (i = 0; i < batch; i++) { in batch_free()
105 static __always_inline void batch_percpu_alloc(struct bpf_map *map, unsigned int batch, in batch_percpu_alloc() argument
112 for (i = 0; i < batch; i++) { in batch_percpu_alloc()
133 static __always_inline void batch_percpu_free(struct bpf_map *map, unsigned int batch, in batch_percpu_free() argument
140 for (i = 0; i < batch; i++) { in batch_percpu_free()
154 #define CALL_BATCH_ALLOC(size, batch, idx) \ argument
155 batch_alloc((struct bpf_map *)(&array_##size), batch, idx)
[all …]
/linux/drivers/xen/
H A Dgntdev.c623 struct gntdev_copy_batch *batch; in gntdev_release() local
637 while (priv->batch) { in gntdev_release()
638 batch = priv->batch; in gntdev_release()
639 priv->batch = batch->next; in gntdev_release()
640 kfree(batch); in gntdev_release()
809 static int gntdev_get_page(struct gntdev_copy_batch *batch, void __user *virt, in gntdev_get_page() argument
817 ret = pin_user_pages_fast(addr, 1, batch->writeable ? FOLL_WRITE : 0, &page); in gntdev_get_page()
821 batch->pages[batch->nr_pages++] = page; in gntdev_get_page()
829 static void gntdev_put_pages(struct gntdev_copy_batch *batch) in gntdev_put_pages() argument
831 unpin_user_pages_dirty_lock(batch->pages, batch->nr_pages, batch->writeable); in gntdev_put_pages()
[all …]
/linux/arch/powerpc/include/asm/book3s/64/
H A Dtlbflush-hash.h25 extern void __flush_tlb_pending(struct ppc64_tlb_batch *batch);
31 struct ppc64_tlb_batch *batch; in arch_enter_lazy_mmu_mode() local
40 batch = this_cpu_ptr(&ppc64_tlb_batch); in arch_enter_lazy_mmu_mode()
41 batch->active = 1; in arch_enter_lazy_mmu_mode()
46 struct ppc64_tlb_batch *batch; in arch_leave_lazy_mmu_mode() local
50 batch = this_cpu_ptr(&ppc64_tlb_batch); in arch_leave_lazy_mmu_mode()
52 if (batch->index) in arch_leave_lazy_mmu_mode()
53 __flush_tlb_pending(batch); in arch_leave_lazy_mmu_mode()
54 batch->active = 0; in arch_leave_lazy_mmu_mode()
/linux/drivers/iommu/intel/
H A Dcache.c293 static void qi_batch_flush_descs(struct intel_iommu *iommu, struct qi_batch *batch) in qi_batch_flush_descs() argument
295 if (!iommu || !batch->index) in qi_batch_flush_descs()
298 qi_submit_sync(iommu, batch->descs, batch->index, 0); in qi_batch_flush_descs()
301 memset(batch, 0, sizeof(*batch)); in qi_batch_flush_descs()
304 static void qi_batch_increment_index(struct intel_iommu *iommu, struct qi_batch *batch) in qi_batch_increment_index() argument
306 if (++batch->index == QI_MAX_BATCHED_DESC_COUNT) in qi_batch_increment_index()
307 qi_batch_flush_descs(iommu, batch); in qi_batch_increment_index()
312 struct qi_batch *batch) in qi_batch_add_iotlb() argument
314 qi_desc_iotlb(iommu, did, addr, size_order, type, &batch->descs[batch->index]); in qi_batch_add_iotlb()
315 qi_batch_increment_index(iommu, batch); in qi_batch_add_iotlb()
[all …]
/linux/drivers/gpu/drm/i915/gt/
H A Dgen8_engine_cs.h53 __gen8_emit_pipe_control(u32 *batch, u32 bit_group_0, in __gen8_emit_pipe_control() argument
56 memset(batch, 0, 6 * sizeof(u32)); in __gen8_emit_pipe_control()
58 batch[0] = GFX_OP_PIPE_CONTROL(6) | bit_group_0; in __gen8_emit_pipe_control()
59 batch[1] = bit_group_1; in __gen8_emit_pipe_control()
60 batch[2] = offset; in __gen8_emit_pipe_control()
62 return batch + 6; in __gen8_emit_pipe_control()
65 static inline u32 *gen8_emit_pipe_control(u32 *batch, in gen8_emit_pipe_control() argument
68 return __gen8_emit_pipe_control(batch, 0, bit_group_1, offset); in gen8_emit_pipe_control()
71 static inline u32 *gen12_emit_pipe_control(u32 *batch, u32 bit_group_0, in gen12_emit_pipe_control() argument
74 return __gen8_emit_pipe_control(batch, bit_group_0, in gen12_emit_pipe_control()
H A Dselftest_workarounds.c504 struct i915_vma *batch; in check_dirty_whitelist() local
513 batch = create_batch(ce->vm); in check_dirty_whitelist()
514 if (IS_ERR(batch)) { in check_dirty_whitelist()
515 err = PTR_ERR(batch); in check_dirty_whitelist()
542 err = i915_gem_object_lock(batch->obj, &ww); in check_dirty_whitelist()
548 cs = i915_gem_object_pin_map(batch->obj, I915_MAP_WC); in check_dirty_whitelist()
614 i915_gem_object_flush_map(batch->obj); in check_dirty_whitelist()
615 i915_gem_object_unpin_map(batch->obj); in check_dirty_whitelist()
631 err = i915_vma_move_to_active(batch, rq, 0); in check_dirty_whitelist()
641 i915_vma_offset(batch), PAGE_SIZE, in check_dirty_whitelist()
[all …]
H A Dselftest_engine_cs.c147 struct i915_vma *batch; in perf_mi_bb_start() local
156 batch = create_empty_batch(ce); in perf_mi_bb_start()
157 if (IS_ERR(batch)) { in perf_mi_bb_start()
158 err = PTR_ERR(batch); in perf_mi_bb_start()
163 err = i915_vma_sync(batch); in perf_mi_bb_start()
166 i915_vma_put(batch); in perf_mi_bb_start()
184 i915_vma_offset(batch), 8, in perf_mi_bb_start()
205 i915_vma_put(batch); in perf_mi_bb_start()
/linux/drivers/gpu/drm/i915/gem/selftests/
H A Digt_gem_utils.c116 struct i915_vma *batch; in igt_gpu_fill_dw() local
123 batch = igt_emit_store_dw(vma, offset, count, val); in igt_gpu_fill_dw()
124 if (IS_ERR(batch)) in igt_gpu_fill_dw()
125 return PTR_ERR(batch); in igt_gpu_fill_dw()
133 err = igt_vma_move_to_active_unlocked(batch, rq, 0); in igt_gpu_fill_dw()
146 i915_vma_offset(batch), in igt_gpu_fill_dw()
147 i915_vma_size(batch), in igt_gpu_fill_dw()
155 i915_vma_unpin_and_release(&batch, 0); in igt_gpu_fill_dw()
/linux/drivers/net/ethernet/mellanox/mlx5/core/en/xsk/
H A Drx.c26 int batch, i; in mlx5e_xsk_alloc_rx_mpwqe() local
35 batch = xsk_buff_alloc_batch(rq->xsk_pool, xsk_buffs, in mlx5e_xsk_alloc_rx_mpwqe()
44 for (; batch < rq->mpwqe.pages_per_wqe; batch++) { in mlx5e_xsk_alloc_rx_mpwqe()
45 xsk_buffs[batch] = xsk_buff_alloc(rq->xsk_pool); in mlx5e_xsk_alloc_rx_mpwqe()
46 if (unlikely(!xsk_buffs[batch])) in mlx5e_xsk_alloc_rx_mpwqe()
55 for (i = 0; i < batch; i++) { in mlx5e_xsk_alloc_rx_mpwqe()
65 for (i = 0; i < batch; i++) { in mlx5e_xsk_alloc_rx_mpwqe()
78 for (i = 0; i < batch; i++) { in mlx5e_xsk_alloc_rx_mpwqe()
105 for (i = 0; i < batch; i++) { in mlx5e_xsk_alloc_rx_mpwqe()
152 while (--batch >= 0) in mlx5e_xsk_alloc_rx_mpwqe()
[all …]
/linux/drivers/vfio/
H A Dvfio_iommu_type1.c494 static void __vfio_batch_init(struct vfio_batch *batch, bool single) in __vfio_batch_init() argument
496 batch->size = 0; in __vfio_batch_init()
497 batch->offset = 0; in __vfio_batch_init()
502 batch->pages = (struct page **) __get_free_page(GFP_KERNEL); in __vfio_batch_init()
503 if (!batch->pages) in __vfio_batch_init()
506 batch->capacity = VFIO_BATCH_MAX_CAPACITY; in __vfio_batch_init()
510 batch->pages = &batch->fallback_page; in __vfio_batch_init()
511 batch->capacity = 1; in __vfio_batch_init()
514 static void vfio_batch_init(struct vfio_batch *batch) in vfio_batch_init() argument
516 __vfio_batch_init(batch, false); in vfio_batch_init()
[all …]
/linux/tools/testing/selftests/bpf/map_tests/
H A Dhtab_map_batch_ops.c79 __u32 batch, count, total, total_success; in __test_map_lookup_and_delete_batch() local
109 err = bpf_map_lookup_and_delete_batch(map_fd, NULL, &batch, keys, in __test_map_lookup_and_delete_batch()
119 err = bpf_map_lookup_and_delete_batch(map_fd, NULL, &batch, keys, in __test_map_lookup_and_delete_batch()
127 err = bpf_map_lookup_and_delete_batch(map_fd, NULL, &batch, keys, in __test_map_lookup_and_delete_batch()
153 total ? &batch : NULL, in __test_map_lookup_and_delete_batch()
154 &batch, keys + total, in __test_map_lookup_and_delete_batch()
216 total ? &batch : NULL, in __test_map_lookup_and_delete_batch()
217 &batch, keys + total, in __test_map_lookup_and_delete_batch()
H A Darray_map_batch_ops.c75 __u64 batch = 0; in __test_map_lookup_and_update_batch() local
105 batch = 0; in __test_map_lookup_and_update_batch()
113 total ? &batch : NULL, in __test_map_lookup_and_update_batch()
114 &batch, keys + total, in __test_map_lookup_and_update_batch()
/linux/lib/
H A Dpercpu_counter.c93 void percpu_counter_add_batch(struct percpu_counter *fbc, s64 amount, s32 batch) in percpu_counter_add_batch() argument
100 if (unlikely(abs(count + amount) >= batch)) { in percpu_counter_add_batch()
120 void percpu_counter_add_batch(struct percpu_counter *fbc, s64 amount, s32 batch) in percpu_counter_add_batch() argument
127 if (abs(count) >= batch) { in percpu_counter_add_batch()
292 int __percpu_counter_compare(struct percpu_counter *fbc, s64 rhs, s32 batch) in __percpu_counter_compare() argument
298 if (abs(count - rhs) > (batch * num_online_cpus())) { in __percpu_counter_compare()
328 s64 limit, s64 amount, s32 batch) in __percpu_counter_limited_add() argument
339 unknown = batch * num_online_cpus(); in __percpu_counter_limited_add()
343 if (abs(count + amount) <= batch && in __percpu_counter_limited_add()
/linux/tools/virtio/
H A Dvirtio_test.c170 bool delayed, int batch, int reset_n, int bufs) in run_test() argument
178 const bool random_batch = batch == RANDOM_BATCH; in run_test()
193 batch = (random() % vq->vring.num) + 1; in run_test()
196 (started - completed) < batch) { in run_test()
349 long batch = 1, reset = 0; in main() local
376 batch = RANDOM_BATCH; in main()
378 batch = strtol(optarg, NULL, 10); in main()
379 assert(batch > 0); in main()
380 assert(batch < (long)INT_MAX + 1); in main()
401 run_test(&dev, &dev.vqs[0], delayed, batch, reset, 0x100000); in main()
/linux/arch/riscv/mm/
H A Dtlbflush.c230 void arch_tlbbatch_add_pending(struct arch_tlbflush_unmap_batch *batch, in arch_tlbbatch_add_pending() argument
233 cpumask_or(&batch->cpumask, &batch->cpumask, mm_cpumask(mm)); in arch_tlbbatch_add_pending()
237 void arch_tlbbatch_flush(struct arch_tlbflush_unmap_batch *batch) in arch_tlbbatch_flush() argument
239 __flush_tlb_range(NULL, &batch->cpumask, in arch_tlbbatch_flush()
241 cpumask_clear(&batch->cpumask); in arch_tlbbatch_flush()
/linux/drivers/net/ethernet/netronome/nfp/flower/
H A Dlag_conf.c234 unsigned int member_cnt, enum nfp_fl_lag_batch *batch) in nfp_fl_lag_config_group() argument
254 if (*batch == NFP_FL_LAG_BATCH_FIRST) { in nfp_fl_lag_config_group()
257 *batch = NFP_FL_LAG_BATCH_MEMBER; in nfp_fl_lag_config_group()
263 *batch = NFP_FL_LAG_BATCH_FINISHED; in nfp_fl_lag_config_group()
269 if (*batch == NFP_FL_LAG_BATCH_FINISHED) { in nfp_fl_lag_config_group()
296 enum nfp_fl_lag_batch batch = NFP_FL_LAG_BATCH_FIRST; in nfp_fl_lag_do_work() local
318 &batch); in nfp_fl_lag_do_work()
391 active_count, &batch); in nfp_fl_lag_do_work()
405 if (batch == NFP_FL_LAG_BATCH_MEMBER) { in nfp_fl_lag_do_work()
406 batch = NFP_FL_LAG_BATCH_FINISHED; in nfp_fl_lag_do_work()
[all …]
/linux/tools/virtio/ringtest/
H A Dmain.c22 int batch = 1; variable
116 int tokick = batch; in run_guest()
129 tokick = batch; in run_guest()
348 batch = c; in main()
372 if (batch > max_outstanding) in main()
373 batch = max_outstanding; in main()
/linux/net/core/
H A Dnetclassid_cgroup.c66 unsigned int batch; member
78 if (--ctx->batch == 0) { in update_classid_sock()
79 ctx->batch = UPDATE_CLASSID_BATCH; in update_classid_sock()
89 .batch = UPDATE_CLASSID_BATCH in update_classid_task()
/linux/drivers/net/ethernet/freescale/dpaa2/
H A Ddpaa2-xsk.c401 int batch, i, err; in dpaa2_xsk_tx() local
410 batch = xsk_tx_peek_release_desc_batch(ch->xsk_pool, budget); in dpaa2_xsk_tx()
411 if (!batch) in dpaa2_xsk_tx()
415 for (i = 0; i < batch; i++) { in dpaa2_xsk_tx()
418 batch = i; in dpaa2_xsk_tx()
426 max_retries = batch * DPAA2_ETH_ENQUEUE_RETRIES; in dpaa2_xsk_tx()
430 while (total_enqueued < batch && retries < max_retries) { in dpaa2_xsk_tx()
432 batch - total_enqueued, &enqueued); in dpaa2_xsk_tx()
446 for (i = total_enqueued; i < batch; i++) { in dpaa2_xsk_tx()
/linux/include/linux/
H A Dpercpu_counter.h57 s32 batch);
59 int __percpu_counter_compare(struct percpu_counter *fbc, s64 rhs, s32 batch);
61 s64 amount, s32 batch);
182 __percpu_counter_compare(struct percpu_counter *fbc, s64 rhs, s32 batch) in __percpu_counter_compare() argument
226 percpu_counter_add_batch(struct percpu_counter *fbc, s64 amount, s32 batch) in percpu_counter_add_batch() argument
/linux/tools/testing/selftests/drivers/net/mlxsw/
H A Dfib_offload.sh284 >> $batch_dir/add.batch
286 >> $batch_dir/del.batch
291 ip -batch $batch_dir/add.batch
307 ip -batch $batch_dir/del.batch

123456