| /linux/drivers/dma-buf/ |
| H A D | dma-fence.c | 114 static const char *dma_fence_stub_get_name(struct dma_fence *fence) in dma_fence_stub_get_name() argument 157 struct dma_fence *fence; in dma_fence_allocate_private_stub() local 159 fence = kzalloc(sizeof(*fence), GFP_KERNEL); in dma_fence_allocate_private_stub() 160 if (fence == NULL) in dma_fence_allocate_private_stub() 163 dma_fence_init(fence, in dma_fence_allocate_private_stub() 169 &fence->flags); in dma_fence_allocate_private_stub() 171 dma_fence_signal_timestamp(fence, timestamp); in dma_fence_allocate_private_stub() 173 return fence; in dma_fence_allocate_private_stub() 365 int dma_fence_signal_timestamp_locked(struct dma_fence *fence, in dma_fence_signal_timestamp_locked() argument 371 lockdep_assert_held(fence->lock); in dma_fence_signal_timestamp_locked() [all …]
|
| H A D | dma-resv.c | 71 struct dma_resv *resv, struct dma_fence **fence, in dma_resv_list_entry() argument 78 *fence = (struct dma_fence *)(tmp & ~DMA_RESV_LIST_MASK); in dma_resv_list_entry() 86 struct dma_fence *fence, in dma_resv_list_set() argument 89 long tmp = ((long)fence) | usage; in dma_resv_list_set() 126 struct dma_fence *fence; in dma_resv_list_free() local 128 dma_resv_list_entry(list, i, NULL, &fence, NULL); in dma_resv_list_free() 129 dma_fence_put(fence); in dma_resv_list_free() 217 struct dma_fence *fence; in dma_resv_reserve_fences() local 219 dma_resv_list_entry(old, i, obj, &fence, &usage); in dma_resv_reserve_fences() 220 if (dma_fence_is_signaled(fence)) in dma_resv_reserve_fences() [all …]
|
| H A D | st-dma-fence-chain.c | 62 struct dma_fence *fence, in mock_chain() argument 71 dma_fence_chain_init(f, dma_fence_get(prev), dma_fence_get(fence), in mock_chain() 184 struct dma_fence *fence; in find_seqno() local 192 fence = dma_fence_get(fc.tail); in find_seqno() 193 err = dma_fence_chain_find_seqno(&fence, 0); in find_seqno() 194 dma_fence_put(fence); in find_seqno() 201 fence = dma_fence_get(fc.tail); in find_seqno() 202 err = dma_fence_chain_find_seqno(&fence, i + 1); in find_seqno() 203 dma_fence_put(fence); in find_seqno() 209 if (fence != fc.chains[i]) { in find_seqno() [all …]
|
| H A D | sw_sync.c | 53 __s32 fence; /* fd of new fence */ member 86 static inline struct sync_pt *dma_fence_to_sync_pt(struct dma_fence *fence) in dma_fence_to_sync_pt() argument 88 if (fence->ops != &timeline_fence_ops) in dma_fence_to_sync_pt() 90 return container_of(fence, struct sync_pt, base); in dma_fence_to_sync_pt() 141 static const char *timeline_fence_get_driver_name(struct dma_fence *fence) in timeline_fence_get_driver_name() argument 146 static const char *timeline_fence_get_timeline_name(struct dma_fence *fence) in timeline_fence_get_timeline_name() argument 148 struct sync_timeline *parent = dma_fence_parent(fence); in timeline_fence_get_timeline_name() 153 static void timeline_fence_release(struct dma_fence *fence) in timeline_fence_release() argument 155 struct sync_pt *pt = dma_fence_to_sync_pt(fence); in timeline_fence_release() 156 struct sync_timeline *parent = dma_fence_parent(fence); in timeline_fence_release() [all …]
|
| /linux/drivers/gpu/drm/i915/ |
| H A D | i915_sw_fence.c | 49 static inline void debug_fence_init(struct i915_sw_fence *fence) in debug_fence_init() argument 51 debug_object_init(fence, &i915_sw_fence_debug_descr); in debug_fence_init() 54 static inline __maybe_unused void debug_fence_init_onstack(struct i915_sw_fence *fence) in debug_fence_init_onstack() argument 56 debug_object_init_on_stack(fence, &i915_sw_fence_debug_descr); in debug_fence_init_onstack() 59 static inline void debug_fence_activate(struct i915_sw_fence *fence) in debug_fence_activate() argument 61 debug_object_activate(fence, &i915_sw_fence_debug_descr); in debug_fence_activate() 64 static inline void debug_fence_set_state(struct i915_sw_fence *fence, in debug_fence_set_state() argument 67 debug_object_active_state(fence, &i915_sw_fence_debug_descr, old, new); in debug_fence_set_state() 70 static inline void debug_fence_deactivate(struct i915_sw_fence *fence) in debug_fence_deactivate() argument 72 debug_object_deactivate(fence, &i915_sw_fence_debug_descr); in debug_fence_deactivate() [all …]
|
| H A D | i915_sw_fence.h | 42 void __i915_sw_fence_init(struct i915_sw_fence *fence, 47 #define i915_sw_fence_init(fence, fn) \ argument 52 __i915_sw_fence_init((fence), (fn), #fence, &__key); \ 55 #define i915_sw_fence_init(fence, fn) \ argument 58 __i915_sw_fence_init((fence), (fn), NULL, NULL); \ 62 void i915_sw_fence_reinit(struct i915_sw_fence *fence); 65 void i915_sw_fence_fini(struct i915_sw_fence *fence); 67 static inline void i915_sw_fence_fini(struct i915_sw_fence *fence) {} in i915_sw_fence_fini() argument 70 void i915_sw_fence_commit(struct i915_sw_fence *fence); 72 int i915_sw_fence_await_sw_fence(struct i915_sw_fence *fence, [all …]
|
| H A D | i915_active.h | 48 void i915_active_noop(struct dma_fence *fence, struct dma_fence_cb *cb); 64 void *fence, in __i915_active_fence_init() argument 67 RCU_INIT_POINTER(active->fence, fence); in __i915_active_fence_init() 76 struct dma_fence *fence); 103 struct dma_fence *fence; in i915_active_fence_get() local 106 fence = dma_fence_get_rcu_safe(&active->fence); in i915_active_fence_get() 109 return fence; in i915_active_fence_get() 123 return rcu_access_pointer(active->fence); in i915_active_fence_isset() 178 int i915_sw_fence_await_active(struct i915_sw_fence *fence, 222 struct dma_fence *fence; in __i915_request_await_exclusive() local [all …]
|
| H A D | i915_deps.c | 75 static int i915_deps_grow(struct i915_deps *deps, struct dma_fence *fence, in i915_deps_grow() argument 96 deps->fences[deps->num_deps++] = dma_fence_get(fence); in i915_deps_grow() 100 if (ctx->no_wait_gpu && !dma_fence_is_signaled(fence)) { in i915_deps_grow() 105 ret = dma_fence_wait(fence, ctx->interruptible); in i915_deps_grow() 109 ret = fence->error; in i915_deps_grow() 177 struct dma_fence *fence, in i915_deps_add_dependency() argument 183 if (!fence) in i915_deps_add_dependency() 186 if (dma_fence_is_signaled(fence)) { in i915_deps_add_dependency() 187 ret = fence->error; in i915_deps_add_dependency() 196 if (!entry->context || entry->context != fence->context) in i915_deps_add_dependency() [all …]
|
| /linux/drivers/gpu/drm/nouveau/ |
| H A D | nouveau_fence.c | 42 nouveau_fctx(struct nouveau_fence *fence) in nouveau_fctx() argument 44 return container_of(fence->base.lock, struct nouveau_fence_chan, lock); in nouveau_fctx() 48 nouveau_fence_signal(struct nouveau_fence *fence) in nouveau_fence_signal() argument 52 dma_fence_signal_locked(&fence->base); in nouveau_fence_signal() 53 list_del(&fence->head); in nouveau_fence_signal() 54 rcu_assign_pointer(fence->channel, NULL); in nouveau_fence_signal() 56 if (test_bit(DMA_FENCE_FLAG_USER_BITS, &fence->base.flags)) { in nouveau_fence_signal() 57 struct nouveau_fence_chan *fctx = nouveau_fctx(fence); in nouveau_fence_signal() 63 dma_fence_put(&fence->base); in nouveau_fence_signal() 68 nouveau_local_fence(struct dma_fence *fence, struct nouveau_drm *drm) in nouveau_local_fence() argument [all …]
|
| H A D | nv04_fence.c | 40 nv04_fence_emit(struct nouveau_fence *fence) in nv04_fence_emit() argument 42 struct nvif_push *push = &unrcu_pointer(fence->channel)->chan.push; in nv04_fence_emit() 45 PUSH_NVSQ(push, NV_SW, 0x0150, fence->base.seqno); in nv04_fence_emit() 52 nv04_fence_sync(struct nouveau_fence *fence, in nv04_fence_sync() argument 70 struct nv04_fence_chan *fctx = chan->fence; in nv04_fence_context_del() 72 chan->fence = NULL; in nv04_fence_context_del() 85 chan->fence = fctx; in nv04_fence_context_new() 94 struct nv04_fence_priv *priv = drm->fence; in nv04_fence_destroy() 95 drm->fence = NULL; in nv04_fence_destroy() 104 priv = drm->fence = kzalloc(sizeof(*priv), GFP_KERNEL); in nv04_fence_create()
|
| /linux/drivers/gpu/drm/amd/amdgpu/ |
| H A D | amdgpu_amdkfd_fence.c | 67 struct amdgpu_amdkfd_fence *fence; in amdgpu_amdkfd_fence_create() local 69 fence = kzalloc(sizeof(*fence), GFP_KERNEL); in amdgpu_amdkfd_fence_create() 70 if (fence == NULL) in amdgpu_amdkfd_fence_create() 75 fence->mm = mm; in amdgpu_amdkfd_fence_create() 76 get_task_comm(fence->timeline_name, current); in amdgpu_amdkfd_fence_create() 77 spin_lock_init(&fence->lock); in amdgpu_amdkfd_fence_create() 78 fence->svm_bo = svm_bo; in amdgpu_amdkfd_fence_create() 79 dma_fence_init(&fence->base, &amdkfd_fence_ops, &fence->lock, in amdgpu_amdkfd_fence_create() 82 return fence; in amdgpu_amdkfd_fence_create() 87 struct amdgpu_amdkfd_fence *fence; in to_amdgpu_amdkfd_fence() local [all …]
|
| H A D | amdgpu_fence.c | 106 struct dma_fence *fence; in amdgpu_fence_emit() local 111 fence = &af->base; in amdgpu_fence_emit() 115 dma_fence_init(fence, &amdgpu_fence_ops, in amdgpu_fence_emit() 139 to_amdgpu_fence(fence)->start_timestamp = ktime_get(); in amdgpu_fence_emit() 144 rcu_assign_pointer(*ptr, dma_fence_get(fence)); in amdgpu_fence_emit() 231 struct dma_fence *fence, **ptr; in amdgpu_fence_process() local 239 fence = rcu_dereference_protected(*ptr, 1); in amdgpu_fence_process() 242 if (!fence) in amdgpu_fence_process() 249 am_fence = container_of(fence, struct amdgpu_fence, base); in amdgpu_fence_process() 251 dma_fence_signal(fence); in amdgpu_fence_process() [all …]
|
| /linux/drivers/gpu/drm/i915/selftests/ |
| H A D | lib_sw_fence.c | 30 nop_fence_notify(struct i915_sw_fence *fence, enum i915_sw_fence_notify state) in nop_fence_notify() argument 35 void __onstack_fence_init(struct i915_sw_fence *fence, in __onstack_fence_init() argument 39 debug_fence_init_onstack(fence); in __onstack_fence_init() 41 __init_waitqueue_head(&fence->wait, name, key); in __onstack_fence_init() 42 atomic_set(&fence->pending, 1); in __onstack_fence_init() 43 fence->error = 0; in __onstack_fence_init() 44 fence->fn = nop_fence_notify; in __onstack_fence_init() 47 void onstack_fence_fini(struct i915_sw_fence *fence) in onstack_fence_fini() argument 49 if (!fence->fn) in onstack_fence_fini() 52 i915_sw_fence_commit(fence); in onstack_fence_fini() [all …]
|
| H A D | lib_sw_fence.h | 16 #define onstack_fence_init(fence) \ argument 20 __onstack_fence_init((fence), #fence, &__key); \ 23 #define onstack_fence_init(fence) \ argument 24 __onstack_fence_init((fence), NULL, NULL) 27 void __onstack_fence_init(struct i915_sw_fence *fence, 30 void onstack_fence_fini(struct i915_sw_fence *fence); 33 struct i915_sw_fence fence; member 41 void heap_fence_put(struct i915_sw_fence *fence);
|
| H A D | i915_sw_fence.c | 32 fence_notify(struct i915_sw_fence *fence, enum i915_sw_fence_notify state) in fence_notify() argument 48 struct i915_sw_fence *fence; in alloc_fence() local 50 fence = kmalloc(sizeof(*fence), GFP_KERNEL); in alloc_fence() 51 if (!fence) in alloc_fence() 54 i915_sw_fence_init(fence, fence_notify); in alloc_fence() 55 return fence; in alloc_fence() 58 static void free_fence(struct i915_sw_fence *fence) in free_fence() argument 60 i915_sw_fence_fini(fence); in free_fence() 61 kfree(fence); in free_fence() 64 static int __test_self(struct i915_sw_fence *fence) in __test_self() argument [all …]
|
| /linux/drivers/gpu/drm/radeon/ |
| H A D | radeon_fence.c | 134 struct radeon_fence **fence, in radeon_fence_emit() argument 140 *fence = kmalloc(sizeof(struct radeon_fence), GFP_KERNEL); in radeon_fence_emit() 141 if ((*fence) == NULL) in radeon_fence_emit() 144 (*fence)->rdev = rdev; in radeon_fence_emit() 145 (*fence)->seq = seq = ++rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_emit() 146 (*fence)->ring = ring; in radeon_fence_emit() 147 (*fence)->is_vm_update = false; in radeon_fence_emit() 148 dma_fence_init(&(*fence)->base, &radeon_fence_ops, in radeon_fence_emit() 152 radeon_fence_ring_emit(rdev, ring, *fence); in radeon_fence_emit() 153 trace_radeon_fence_emit(rdev_to_drm(rdev), ring, (*fence)->seq); in radeon_fence_emit() [all …]
|
| H A D | radeon_sync.c | 63 struct radeon_fence *fence) in radeon_sync_fence() argument 67 if (!fence) in radeon_sync_fence() 70 other = sync->sync_to[fence->ring]; in radeon_sync_fence() 71 sync->sync_to[fence->ring] = radeon_fence_later(fence, other); in radeon_sync_fence() 73 if (fence->is_vm_update) { in radeon_sync_fence() 75 sync->last_vm_update = radeon_fence_later(fence, other); in radeon_sync_fence() 95 struct radeon_fence *fence; in radeon_sync_resv() local 100 fence = to_radeon_fence(f); in radeon_sync_resv() 101 if (fence && fence->rdev == rdev) in radeon_sync_resv() 102 radeon_sync_fence(sync, fence); in radeon_sync_resv() [all …]
|
| /linux/drivers/gpu/host1x/ |
| H A D | intr.c | 15 struct host1x_syncpt_fence *fence) in host1x_intr_add_fence_to_list() argument 20 if ((s32)(fence_in_list->threshold - fence->threshold) <= 0) { in host1x_intr_add_fence_to_list() 22 list_add(&fence->list, &fence_in_list->list); in host1x_intr_add_fence_to_list() 28 list_add(&fence->list, &list->list); in host1x_intr_add_fence_to_list() 33 struct host1x_syncpt_fence *fence; in host1x_intr_update_hw_state() local 36 fence = list_first_entry(&sp->fences.list, struct host1x_syncpt_fence, list); in host1x_intr_update_hw_state() 38 host1x_hw_intr_set_syncpt_threshold(host, sp->id, fence->threshold); in host1x_intr_update_hw_state() 45 void host1x_intr_add_fence_locked(struct host1x *host, struct host1x_syncpt_fence *fence) in host1x_intr_add_fence_locked() argument 47 struct host1x_fence_list *fence_list = &fence->sp->fences; in host1x_intr_add_fence_locked() 49 INIT_LIST_HEAD(&fence->list); in host1x_intr_add_fence_locked() [all …]
|
| /linux/drivers/gpu/drm/v3d/ |
| H A D | v3d_fence.c | 9 struct v3d_fence *fence; in v3d_fence_create() local 11 fence = kzalloc(sizeof(*fence), GFP_KERNEL); in v3d_fence_create() 12 if (!fence) in v3d_fence_create() 15 fence->dev = &v3d->drm; in v3d_fence_create() 16 fence->queue = q; in v3d_fence_create() 17 fence->seqno = ++queue->emit_seqno; in v3d_fence_create() 18 dma_fence_init(&fence->base, &v3d_fence_ops, &queue->fence_lock, in v3d_fence_create() 19 queue->fence_context, fence->seqno); in v3d_fence_create() 21 return &fence->base; in v3d_fence_create() 24 static const char *v3d_fence_get_driver_name(struct dma_fence *fence) in v3d_fence_get_driver_name() argument [all …]
|
| /linux/tools/testing/selftests/sync/ |
| H A D | sync_stress_consumer.c | 40 static int busy_wait_on_fence(int fence) in busy_wait_on_fence() argument 45 error = sync_fence_count_with_status(fence, FENCE_STATUS_ERROR); in busy_wait_on_fence() 47 active = sync_fence_count_with_status(fence, in busy_wait_on_fence() 66 int fence, valid, i; in mpsc_producer_thread() local 72 fence = sw_sync_fence_create(consumer_timeline, "fence", i); in mpsc_producer_thread() 73 valid = sw_sync_fence_is_valid(fence); in mpsc_producer_thread() 82 ASSERT(sync_wait(fence, -1) > 0, in mpsc_producer_thread() 85 ASSERT(busy_wait_on_fence(fence) == 0, in mpsc_producer_thread() 100 sw_sync_fence_destroy(fence); in mpsc_producer_thread() 108 int fence, merged, tmp, valid, it, i; in mpcs_consumer_thread() local [all …]
|
| H A D | sync_stress_merge.c | 43 int fence, tmpfence, merged, valid; in test_merge_stress_random_merge() local 51 fence = sw_sync_fence_create(timelines[0], "fence", 0); in test_merge_stress_random_merge() 52 valid = sw_sync_fence_is_valid(fence); in test_merge_stress_random_merge() 76 merged = sync_merge("merge", tmpfence, fence); in test_merge_stress_random_merge() 78 sw_sync_fence_destroy(fence); in test_merge_stress_random_merge() 79 fence = merged; in test_merge_stress_random_merge() 91 ASSERT(sync_fence_size(fence) == size, in test_merge_stress_random_merge() 97 ret = sync_wait(fence, 0); in test_merge_stress_random_merge() 106 ret = sync_wait(fence, 0); in test_merge_stress_random_merge() 109 sw_sync_fence_destroy(fence); in test_merge_stress_random_merge()
|
| H A D | sync_alloc.c | 46 int timeline, fence, valid; in test_alloc_fence() local 52 fence = sw_sync_fence_create(timeline, "allocFence", 1); in test_alloc_fence() 53 valid = sw_sync_fence_is_valid(fence); in test_alloc_fence() 56 sw_sync_fence_destroy(fence); in test_alloc_fence() 63 int fence, timeline; in test_alloc_fence_negative() local 68 fence = sw_sync_fence_create(-1, "fence", 1); in test_alloc_fence_negative() 69 ASSERT(fence < 0, "Success allocating negative fence\n"); in test_alloc_fence_negative() 71 sw_sync_fence_destroy(fence); in test_alloc_fence_negative()
|
| /linux/include/linux/ |
| H A D | dma-fence-chain.h | 29 struct dma_fence *fence; member 61 to_dma_fence_chain(struct dma_fence *fence) in to_dma_fence_chain() argument 63 if (!fence || !dma_fence_is_chain(fence)) in to_dma_fence_chain() 66 return container_of(fence, struct dma_fence_chain, base); in to_dma_fence_chain() 77 dma_fence_chain_contained(struct dma_fence *fence) in dma_fence_chain_contained() argument 79 struct dma_fence_chain *chain = to_dma_fence_chain(fence); in dma_fence_chain_contained() 81 return chain ? chain->fence : fence; in dma_fence_chain_contained() 124 struct dma_fence *dma_fence_chain_walk(struct dma_fence *fence); 128 struct dma_fence *fence,
|
| H A D | dma-fence-array.h | 59 to_dma_fence_array(struct dma_fence *fence) in to_dma_fence_array() argument 61 if (!fence || !dma_fence_is_array(fence)) in to_dma_fence_array() 64 return container_of(fence, struct dma_fence_array, base); in to_dma_fence_array() 78 #define dma_fence_array_for_each(fence, index, head) \ argument 79 for (index = 0, fence = dma_fence_array_first(head); fence; \ 80 ++(index), fence = dma_fence_array_next(head, index)) 93 bool dma_fence_match_context(struct dma_fence *fence, u64 context);
|
| /linux/tools/memory-model/ |
| H A D | linux-kernel.cat | 53 * smp_mb__after_unlock_lock() fence only affects a single lock handover. 58 * to the CPU that executes the fence. Therefore, all those stores are 59 * also affected by the fence. 65 let strong-fence = mb | gp 67 let nonrw-fence = strong-fence | po-rel | acq-po 68 let fence = nonrw-fence | wmb | rmb 95 let ppo = to-r | to-w | (fence & int) | (po-unlock-lock-po & int) 100 let cumul-fence [all...] |