Searched refs:udma_to_device_barrier (Results 1 – 10 of 10) sorted by relevance
84 #define udma_to_device_barrier() asm volatile("" ::: "memory") macro86 #define udma_to_device_barrier() asm volatile("" ::: "memory") macro88 #define udma_to_device_barrier() asm volatile("sync" ::: "memory") macro90 #define udma_to_device_barrier() asm volatile("sync" ::: "memory") macro92 #define udma_to_device_barrier() asm volatile("mf" ::: "memory") macro94 #define udma_to_device_barrier() asm volatile("membar #StoreStore" ::: "memory") macro96 #define udma_to_device_barrier() asm volatile("dsb st" ::: "memory"); macro98 #define udma_to_device_barrier() asm volatile("" ::: "memory") macro102 #define udma_to_device_barrier() mips_sync() macro106 #define udma_to_device_barrier() dmb() macro[all …]
205 udma_to_device_barrier(); in set_data_seg()401 udma_to_device_barrier(); /* see comment below */ in mlx4_post_send()429 udma_to_device_barrier(); in mlx4_post_send()451 udma_to_device_barrier(); in mlx4_post_send()497 udma_to_device_barrier(); in mlx4_post_send()565 udma_to_device_barrier(); in mlx4_post_recv()
699 udma_to_device_barrier(); in mlx4_arm_cq()765 udma_to_device_barrier(); in __mlx4_cq_clean()
115 udma_to_device_barrier(); in mlx4_post_srq_recv()
121 udma_to_device_barrier(); in irdma_nop_1()399 udma_to_device_barrier(); /* make sure WQE is populated before valid bit is set */ in irdma_uk_rdma_write()490 udma_to_device_barrier(); /* make sure WQE is populated before valid bit is set */ in irdma_uk_rdma_read()587 udma_to_device_barrier(); /* make sure WQE is populated before valid bit is set */ in irdma_uk_send()811 udma_to_device_barrier(); /* make sure WQE is populated before valid bit is set */ in irdma_uk_inline_rdma_write()885 udma_to_device_barrier(); /* make sure WQE is populated before valid bit is set */ in irdma_uk_inline_send()936 udma_to_device_barrier(); /* make sure WQE is populated before valid bit is set */ in irdma_uk_stag_local_invalidate()988 udma_to_device_barrier(); /* make sure WQE is populated before valid bit is set */ in irdma_uk_mw_bind()1045 udma_to_device_barrier(); /* make sure WQE is populated before valid bit is set */ in irdma_uk_post_receive()1128 udma_to_device_barrier(); /* make sure WQE is populated before valid bit is set */ in irdma_uk_cq_request_notification()[all …]
139 udma_to_device_barrier(); in mlx5_post_srq_recv()
928 udma_to_device_barrier(); in _mlx5_post_send()1125 udma_to_device_barrier(); in mlx5_post_wq_recv()1199 udma_to_device_barrier(); in mlx5_post_recv()
1412 udma_to_device_barrier(); in __mlx5_cq_clean()
496 udma_to_device_barrier(); in t4_ring_sq_db()545 udma_to_device_barrier(); in t4_ring_rq_db()
297 udma_to_device_barrier(); in ring_kernel_db()