/linux/drivers/gpu/drm/ |
H A D | drm_exec.c | 54 static void drm_exec_unlock_all(struct drm_exec *exec) in drm_exec_unlock_all() argument 59 drm_exec_for_each_locked_object_reverse(exec, index, obj) { in drm_exec_unlock_all() 64 drm_gem_object_put(exec->prelocked); in drm_exec_unlock_all() 65 exec->prelocked = NULL; in drm_exec_unlock_all() 79 void drm_exec_init(struct drm_exec *exec, u32 flags, unsigned nr) in drm_exec_init() argument 84 exec->flags = flags; in drm_exec_init() 85 exec->objects = kvmalloc_array(nr, sizeof(void *), GFP_KERNEL); in drm_exec_init() 88 exec->max_objects = exec->objects ? nr : 0; in drm_exec_init() 89 exec->num_objects = 0; in drm_exec_init() 90 exec->contended = DRM_EXEC_DUMMY; in drm_exec_init() [all …]
|
H A D | drm_gpuvm.c | 1089 exec_prepare_obj(struct drm_exec *exec, struct drm_gem_object *obj, in exec_prepare_obj() argument 1092 return num_fences ? drm_exec_prepare_obj(exec, obj, num_fences) : in exec_prepare_obj() 1093 drm_exec_lock_obj(exec, obj); in exec_prepare_obj() 1112 struct drm_exec *exec, in drm_gpuvm_prepare_vm() argument 1115 return exec_prepare_obj(exec, gpuvm->r_obj, num_fences); in drm_gpuvm_prepare_vm() 1121 struct drm_exec *exec, in __drm_gpuvm_prepare_objects() argument 1129 ret = exec_prepare_obj(exec, vm_bo->obj, num_fences); in __drm_gpuvm_prepare_objects() 1142 struct drm_exec *exec, in drm_gpuvm_prepare_objects_locked() argument 1150 ret = exec_prepare_obj(exec, vm_bo->obj, num_fences); in drm_gpuvm_prepare_objects_locked() 1186 struct drm_exec *exec, in drm_gpuvm_prepare_objects() argument [all …]
|
/linux/drivers/gpu/drm/tests/ |
H A D | drm_exec_test.c | 47 struct drm_exec exec; in sanitycheck() local 49 drm_exec_init(&exec, DRM_EXEC_INTERRUPTIBLE_WAIT, 0); in sanitycheck() 50 drm_exec_fini(&exec); in sanitycheck() 58 struct drm_exec exec; in test_lock() local 63 drm_exec_init(&exec, DRM_EXEC_INTERRUPTIBLE_WAIT, 0); in test_lock() 64 drm_exec_until_all_locked(&exec) { in test_lock() 65 ret = drm_exec_lock_obj(&exec, &gobj); in test_lock() 66 drm_exec_retry_on_contention(&exec); in test_lock() 71 drm_exec_fini(&exec); in test_lock() 78 struct drm_exec exec; in test_lock_unlock() local [all …]
|
/linux/drivers/gpu/drm/vc4/ |
H A D | vc4_gem.c | 157 struct vc4_exec_info *exec[2]; in vc4_save_hang_state() local 169 exec[0] = vc4_first_bin_job(vc4); in vc4_save_hang_state() 170 exec[1] = vc4_first_render_job(vc4); in vc4_save_hang_state() 171 if (!exec[0] && !exec[1]) { in vc4_save_hang_state() 179 if (!exec[i]) in vc4_save_hang_state() 183 list_for_each_entry(bo, &exec[i]->unref_list, unref_head) in vc4_save_hang_state() 185 state->bo_count += exec[i]->bo_count + unref_list_count; in vc4_save_hang_state() 198 if (!exec[i]) in vc4_save_hang_state() 201 for (j = 0; j < exec[i]->bo_count; j++) { in vc4_save_hang_state() 202 bo = to_vc4_bo(exec[i]->bo[j]); in vc4_save_hang_state() [all …]
|
H A D | vc4_validate.c | 51 struct vc4_exec_info *exec, \ 106 vc4_use_bo(struct vc4_exec_info *exec, uint32_t hindex) in vc4_use_bo() argument 108 struct vc4_dev *vc4 = exec->dev; in vc4_use_bo() 115 if (hindex >= exec->bo_count) { in vc4_use_bo() 117 hindex, exec->bo_count); in vc4_use_bo() 120 obj = to_drm_gem_dma_obj(exec->bo[hindex]); in vc4_use_bo() 133 vc4_use_handle(struct vc4_exec_info *exec, uint32_t gem_handles_packet_index) in vc4_use_handle() argument 135 return vc4_use_bo(exec, exec->bo_index[gem_handles_packet_index]); in vc4_use_handle() 139 validate_bin_pos(struct vc4_exec_info *exec, void *untrusted, uint32_t pos) in validate_bin_pos() argument 144 return (untrusted - 1 == exec->bin_u + pos); in validate_bin_pos() [all …]
|
H A D | vc4_irq.c | 67 struct vc4_exec_info *exec; in vc4_overflow_mem_work() local 92 exec = vc4_first_bin_job(vc4); in vc4_overflow_mem_work() 93 if (!exec) in vc4_overflow_mem_work() 94 exec = vc4_last_render_job(vc4); in vc4_overflow_mem_work() 95 if (exec) { in vc4_overflow_mem_work() 96 exec->bin_slots |= vc4->bin_alloc_overflow; in vc4_overflow_mem_work() 120 struct vc4_exec_info *next, *exec = vc4_first_bin_job(vc4); in vc4_irq_finish_bin_job() local 122 if (!exec) in vc4_irq_finish_bin_job() 125 trace_vc4_bcl_end_irq(dev, exec->seqno); in vc4_irq_finish_bin_job() 127 vc4_move_job_to_render(dev, exec); in vc4_irq_finish_bin_job() [all …]
|
/linux/include/drm/ |
H A D | drm_exec.h | 63 drm_exec_obj(struct drm_exec *exec, unsigned long index) in drm_exec_obj() argument 65 return index < exec->num_objects ? exec->objects[index] : NULL; in drm_exec_obj() 76 #define drm_exec_for_each_locked_object(exec, index, obj) \ argument 77 for ((index) = 0; ((obj) = drm_exec_obj(exec, index)); ++(index)) 90 #define drm_exec_for_each_locked_object_reverse(exec, index, obj) \ argument 91 for ((index) = (exec)->num_objects - 1; \ 92 ((obj) = drm_exec_obj(exec, index)); --(index)) 105 #define drm_exec_until_all_locked(exec) \ argument 110 drm_exec_cleanup(exec); \ 120 #define drm_exec_retry_on_contention(exec) \ argument [all …]
|
H A D | drm_gpuvm.h | 516 struct drm_exec exec; member 553 struct drm_exec *exec, 557 struct drm_exec *exec, 561 struct drm_exec *exec, 586 drm_exec_fini(&vm_exec->exec); in drm_gpuvm_exec_unlock() 589 int drm_gpuvm_validate(struct drm_gpuvm *gpuvm, struct drm_exec *exec); 591 struct drm_exec *exec, 611 drm_gpuvm_resv_add_fence(vm_exec->vm, &vm_exec->exec, fence, in drm_gpuvm_exec_resv_add_fence() 626 return drm_gpuvm_validate(vm_exec->vm, &vm_exec->exec); in drm_gpuvm_exec_validate() 1155 struct drm_exec *exec);
|
/linux/include/uapi/linux/ |
H A D | a.out.h | 44 #define N_MAGIC(exec) ((exec).a_info & 0xffff) argument 46 #define N_MACHTYPE(exec) ((enum machine_type)(((exec).a_info >> 16) & 0xff)) argument 47 #define N_FLAGS(exec) (((exec).a_info >> 24) & 0xff) argument 48 #define N_SET_INFO(exec, magic, type, flags) \ argument 49 ((exec).a_info = ((magic) & 0xffff) \ 52 #define N_SET_MAGIC(exec, magic) \ argument 53 ((exec).a_info = (((exec).a_info & 0xffff0000) | ((magic) & 0xffff))) 55 #define N_SET_MACHTYPE(exec, machtype) \ argument 56 ((exec).a_info = \ 57 ((exec).a_info&0xff00ffff) | ((((int)(machtype))&0xff) << 16)) [all …]
|
/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_csa.c | 69 struct drm_exec exec; in amdgpu_map_static_csa() local 72 drm_exec_init(&exec, DRM_EXEC_INTERRUPTIBLE_WAIT, 0); in amdgpu_map_static_csa() 73 drm_exec_until_all_locked(&exec) { in amdgpu_map_static_csa() 74 r = amdgpu_vm_lock_pd(vm, &exec, 0); in amdgpu_map_static_csa() 76 r = drm_exec_lock_obj(&exec, &bo->tbo.base); in amdgpu_map_static_csa() 77 drm_exec_retry_on_contention(&exec); in amdgpu_map_static_csa() 101 drm_exec_fini(&exec); in amdgpu_map_static_csa() 109 struct drm_exec exec; in amdgpu_unmap_static_csa() local 112 drm_exec_init(&exec, DRM_EXEC_INTERRUPTIBLE_WAIT, 0); in amdgpu_unmap_static_csa() 113 drm_exec_until_all_locked(&exec) { in amdgpu_unmap_static_csa() [all …]
|
H A D | amdgpu_seq64.c | 67 struct drm_exec exec; in amdgpu_seq64_map() local 75 drm_exec_init(&exec, DRM_EXEC_INTERRUPTIBLE_WAIT, 0); in amdgpu_seq64_map() 76 drm_exec_until_all_locked(&exec) { in amdgpu_seq64_map() 77 r = amdgpu_vm_lock_pd(vm, &exec, 0); in amdgpu_seq64_map() 79 r = drm_exec_lock_obj(&exec, &bo->tbo.base); in amdgpu_seq64_map() 80 drm_exec_retry_on_contention(&exec); in amdgpu_seq64_map() 108 drm_exec_fini(&exec); in amdgpu_seq64_map() 124 struct drm_exec exec; in amdgpu_seq64_unmap() local 136 drm_exec_init(&exec, DRM_EXEC_INTERRUPTIBLE_WAIT, 0); in amdgpu_seq64_unmap() 137 drm_exec_until_all_locked(&exec) { in amdgpu_seq64_unmap() [all …]
|
H A D | amdgpu_gem.c | 234 struct drm_exec exec; in amdgpu_gem_object_close() local 237 drm_exec_init(&exec, DRM_EXEC_IGNORE_DUPLICATES, 0); in amdgpu_gem_object_close() 238 drm_exec_until_all_locked(&exec) { in amdgpu_gem_object_close() 239 r = drm_exec_prepare_obj(&exec, &bo->tbo.base, 1); in amdgpu_gem_object_close() 240 drm_exec_retry_on_contention(&exec); in amdgpu_gem_object_close() 244 r = amdgpu_vm_lock_pd(vm, &exec, 0); in amdgpu_gem_object_close() 245 drm_exec_retry_on_contention(&exec); in amdgpu_gem_object_close() 272 drm_exec_fini(&exec); in amdgpu_gem_object_close() 716 struct drm_exec exec; in amdgpu_gem_va_ioctl() local 777 drm_exec_init(&exec, DRM_EXEC_INTERRUPTIBLE_WAIT | in amdgpu_gem_va_ioctl() [all …]
|
H A D | amdgpu_umsch_mm.c | 87 struct drm_exec exec; in map_ring_data() local 92 drm_exec_init(&exec, 0, 0); in map_ring_data() 93 drm_exec_until_all_locked(&exec) { in map_ring_data() 94 r = drm_exec_lock_obj(&exec, &bo->tbo.base); in map_ring_data() 95 drm_exec_retry_on_contention(&exec); in map_ring_data() 99 r = amdgpu_vm_lock_pd(vm, &exec, 0); in map_ring_data() 100 drm_exec_retry_on_contention(&exec); in map_ring_data() 132 drm_exec_fini(&exec); in map_ring_data() 143 drm_exec_fini(&exec); in map_ring_data() 152 struct drm_exec exec; in unmap_ring_data() local [all …]
|
/linux/tools/testing/selftests/exec/ |
H A D | null-argv.c | 10 #define FORK(exec) \ argument 15 exec; /* Some kind of exec */ \ 16 perror("# " #exec); \ 19 check_result(pid, #exec); \
|
/linux/drivers/sbus/char/ |
H A D | oradax.c | 861 ctx->result.exec.status = DAX_SUBMIT_ERR_THR_INIT; in dax_ccb_exec() 868 ctx->result.exec.status = DAX_SUBMIT_ERR_NO_CA_AVAIL; in dax_ccb_exec() 878 ctx->result.exec.status = DAX_SUBMIT_ERR_CCB_ARR_MMU_MISS; in dax_ccb_exec() 886 ctx->result.exec.status = DAX_SUBMIT_ERR_NO_CA_AVAIL; in dax_ccb_exec() 892 ctx->result.exec.status = dax_preprocess_usr_ccbs(ctx, idx, nccbs); in dax_ccb_exec() 893 if (ctx->result.exec.status != DAX_SUBMIT_OK) in dax_ccb_exec() 896 ctx->result.exec.status = dax_lock_pages(ctx, idx, nccbs, in dax_ccb_exec() 897 &ctx->result.exec.status_data); in dax_ccb_exec() 898 if (ctx->result.exec.status != DAX_SUBMIT_OK) in dax_ccb_exec() 906 &accepted_len, &ctx->result.exec.status_data); in dax_ccb_exec() [all …]
|
/linux/drivers/gpu/drm/xe/ |
H A D | xe_gt_pagefault.c | 96 static int xe_pf_begin(struct drm_exec *exec, struct xe_vma *vma, in xe_pf_begin() argument 103 err = xe_vm_lock_vma(exec, vma); in xe_pf_begin() 131 struct drm_exec exec; in handle_vma_pagefault() local 155 drm_exec_init(&exec, 0, 0); in handle_vma_pagefault() 156 drm_exec_until_all_locked(&exec) { in handle_vma_pagefault() 157 err = xe_pf_begin(&exec, vma, atomic, tile->id); in handle_vma_pagefault() 158 drm_exec_retry_on_contention(&exec); in handle_vma_pagefault() 159 if (xe_vm_validate_should_retry(&exec, err, &end)) in handle_vma_pagefault() 169 if (xe_vm_validate_should_retry(&exec, err, &end)) in handle_vma_pagefault() 180 drm_exec_fini(&exec); in handle_vma_pagefault() [all …]
|
H A D | xe_exec.c | 102 return xe_vm_validate_rebind(vm, &vm_exec->exec, 1); in xe_exec_fn() 116 struct drm_exec *exec = &vm_exec.exec; in xe_exec_ioctl() local 243 drm_exec_init(exec, vm_exec.flags, 0); in xe_exec_ioctl() 247 if (xe_vm_validate_should_retry(exec, err, &end)) in xe_exec_ioctl() 306 drm_gpuvm_resv_add_fence(&vm->gpuvm, exec, &job->drm.s_fence->finished, in xe_exec_ioctl() 338 drm_exec_fini(exec); in xe_exec_ioctl()
|
H A D | xe_vm.c | 204 struct drm_exec *exec) in resume_and_reinstall_preempt_fences() argument 214 drm_gpuvm_resv_add_fence(&vm->gpuvm, exec, q->lr.pfence, in resume_and_reinstall_preempt_fences() 226 struct drm_exec *exec = &vm_exec.exec; in xe_vm_add_compute_exec_queue() local 251 drm_gpuvm_resv_add_fence(&vm->gpuvm, exec, pfence, in xe_vm_add_compute_exec_queue() 266 drm_exec_fini(exec); in xe_vm_add_compute_exec_queue() 365 bool xe_vm_validate_should_retry(struct drm_exec *exec, int err, ktime_t *end) in xe_vm_validate_should_retry() argument 381 static int xe_gpuvm_validate(struct drm_gpuvm_bo *vm_bo, struct drm_exec *exec) in xe_gpuvm_validate() argument 415 int xe_vm_validate_rebind(struct xe_vm *vm, struct drm_exec *exec, in xe_vm_validate_rebind() argument 423 ret = drm_gpuvm_validate(&vm->gpuvm, exec); in xe_vm_validate_rebind() 432 drm_exec_for_each_locked_object(exec, index, obj) { in xe_vm_validate_rebind() [all …]
|
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/timer/ |
H A D | base.c | 73 LIST_HEAD(exec); in nvkm_timer_alarm_trigger() 90 list_add(&alarm->exec, &exec); in nvkm_timer_alarm_trigger() 99 list_for_each_entry_safe(alarm, atemp, &exec, exec) { in nvkm_timer_alarm_trigger() 100 list_del(&alarm->exec); in nvkm_timer_alarm_trigger()
|
/linux/tools/perf/util/ |
H A D | comm.h | 15 bool exec; member 23 struct comm *comm__new(const char *str, u64 timestamp, bool exec); 26 bool exec);
|
H A D | comm.c | 188 struct comm *comm__new(const char *str, u64 timestamp, bool exec) in comm__new() argument 196 comm->exec = exec; in comm__new() 207 int comm__override(struct comm *comm, const char *str, u64 timestamp, bool exec) in comm__override() argument 218 if (exec) in comm__override() 219 comm->exec = true; in comm__override()
|
/linux/tools/testing/selftests/bpf/progs/ |
H A D | test_overhead.c | 13 int BPF_KPROBE(prog1, struct task_struct *tsk, const char *buf, bool exec) in BPF_KPROBE() argument 31 int BPF_PROG(prog4, struct task_struct *tsk, const char *buf, bool exec) in BPF_PROG() argument 37 int BPF_PROG(prog5, struct task_struct *tsk, const char *buf, bool exec) in BPF_PROG() argument
|
/linux/drivers/gpu/drm/panthor/ |
H A D | panthor_mmu.h | 47 int panthor_vm_prepare_mapped_bos_resvs(struct drm_exec *exec, 88 int panthor_vm_bind_job_prepare_resvs(struct drm_exec *exec, 90 void panthor_vm_bind_job_update_resvs(struct drm_exec *exec, struct drm_sched_job *job); 92 void panthor_vm_update_resvs(struct panthor_vm *vm, struct drm_exec *exec,
|
/linux/drivers/gpu/drm/qxl/ |
H A D | qxl_release.c | 221 drm_exec_init(&release->exec, no_intr ? 0 : in qxl_release_reserve_list() 223 drm_exec_until_all_locked(&release->exec) { in qxl_release_reserve_list() 225 ret = drm_exec_prepare_obj(&release->exec, in qxl_release_reserve_list() 228 drm_exec_retry_on_contention(&release->exec); in qxl_release_reserve_list() 241 drm_exec_fini(&release->exec); in qxl_release_reserve_list() 252 drm_exec_fini(&release->exec); in qxl_release_backoff_reserve_list() 440 drm_exec_fini(&release->exec); in qxl_release_fence_buffer_objects()
|
/linux/drivers/gpu/drm/i915/gem/ |
H A D | i915_gem_execbuffer.c | 41 struct drm_i915_gem_exec_object2 *exec; member 249 struct drm_i915_gem_exec_object2 *exec; /** ioctl execobj[] */ member 567 struct drm_i915_gem_exec_object2 *entry = &eb->exec[i]; in eb_add_vma() 571 ev->exec = entry; in eb_add_vma() 659 struct drm_i915_gem_exec_object2 *entry = ev->exec; in eb_reserve_vma() 951 vma = eb_lookup_vma(eb, eb->exec[i].handle); in eb_lookup_vmas() 957 err = eb_validate_vma(eb, &eb->exec[i], vma); in eb_lookup_vmas() 1024 struct drm_i915_gem_exec_object2 *entry = &eb->exec[i]; in eb_validate_vmas() 1054 eb_vma_misplaced(&eb->exec[i], vma, ev->flags)); in eb_validate_vmas() 1529 const struct drm_i915_gem_exec_object2 *entry = ev->exec; in eb_relocate_vma() [all …]
|