| /linux/include/drm/ |
| H A D | drm_exec.h | 63 drm_exec_obj(struct drm_exec *exec, unsigned long index) in drm_exec_obj() argument 65 return index < exec->num_objects ? exec->objects[index] : NULL; in drm_exec_obj() 76 #define drm_exec_for_each_locked_object(exec, index, obj) \ argument 77 for ((index) = 0; ((obj) = drm_exec_obj(exec, index)); ++(index)) 90 #define drm_exec_for_each_locked_object_reverse(exec, index, obj) \ argument 91 for ((index) = (exec)->num_objects - 1; \ 92 ((obj) = drm_exec_obj(exec, index)); --(index)) 105 #define drm_exec_until_all_locked(exec) \ argument 110 drm_exec_cleanup(exec); \ 120 #define drm_exec_retry_on_contention(exec) \ argument [all …]
|
| H A D | drm_gpuvm.h | 537 struct drm_exec exec; member 574 struct drm_exec *exec, 578 struct drm_exec *exec, 582 struct drm_exec *exec, 607 drm_exec_fini(&vm_exec->exec); in drm_gpuvm_exec_unlock() 610 int drm_gpuvm_validate(struct drm_gpuvm *gpuvm, struct drm_exec *exec); 612 struct drm_exec *exec, 632 drm_gpuvm_resv_add_fence(vm_exec->vm, &vm_exec->exec, fence, in drm_gpuvm_exec_resv_add_fence() 647 return drm_gpuvm_validate(vm_exec->vm, &vm_exec->exec); in drm_gpuvm_exec_validate() 1206 struct drm_exec *exec); [all …]
|
| /linux/include/uapi/linux/ |
| H A D | a.out.h | 44 #define N_MAGIC(exec) ((exec).a_info & 0xffff) argument 46 #define N_MACHTYPE(exec) ((enum machine_type)(((exec).a_info >> 16) & 0xff)) argument 47 #define N_FLAGS(exec) (((exec).a_info >> 24) & 0xff) argument 48 #define N_SET_INFO(exec, magic, type, flags) \ argument 49 ((exec).a_info = ((magic) & 0xffff) \ 52 #define N_SET_MAGIC(exec, magic) \ argument 53 ((exec).a_info = (((exec).a_info & 0xffff0000) | ((magic) & 0xffff))) 55 #define N_SET_MACHTYPE(exec, machtype) \ argument 56 ((exec).a_info = \ 57 ((exec).a_info&0xff00ffff) | ((((int)(machtype))&0xff) << 16)) [all …]
|
| /linux/drivers/gpu/drm/amd/amdgpu/ |
| H A D | amdgpu_csa.c | 69 struct drm_exec exec; in amdgpu_map_static_csa() local 72 drm_exec_init(&exec, DRM_EXEC_INTERRUPTIBLE_WAIT, 0); in amdgpu_map_static_csa() 73 drm_exec_until_all_locked(&exec) { in amdgpu_map_static_csa() 74 r = amdgpu_vm_lock_pd(vm, &exec, 0); in amdgpu_map_static_csa() 76 r = drm_exec_lock_obj(&exec, &bo->tbo.base); in amdgpu_map_static_csa() 77 drm_exec_retry_on_contention(&exec); in amdgpu_map_static_csa() 101 drm_exec_fini(&exec); in amdgpu_map_static_csa() 109 struct drm_exec exec; in amdgpu_unmap_static_csa() local 112 drm_exec_init(&exec, 0, 0); in amdgpu_unmap_static_csa() 113 drm_exec_until_all_locked(&exec) { in amdgpu_unmap_static_csa() [all …]
|
| H A D | amdgpu_userq_fence.c | 473 struct drm_exec exec; in amdgpu_userq_signal_ioctl() local 576 drm_exec_init(&exec, DRM_EXEC_INTERRUPTIBLE_WAIT, in amdgpu_userq_signal_ioctl() 580 drm_exec_until_all_locked(&exec) { in amdgpu_userq_signal_ioctl() 581 r = drm_exec_prepare_array(&exec, gobj_read, num_read_bo_handles, 1); in amdgpu_userq_signal_ioctl() 582 drm_exec_retry_on_contention(&exec); in amdgpu_userq_signal_ioctl() 588 r = drm_exec_prepare_array(&exec, gobj_write, num_write_bo_handles, 1); in amdgpu_userq_signal_ioctl() 589 drm_exec_retry_on_contention(&exec); in amdgpu_userq_signal_ioctl() 620 drm_exec_fini(&exec); in amdgpu_userq_signal_ioctl() 659 struct drm_exec exec; in amdgpu_userq_wait_ioctl() local 729 drm_exec_init(&exec, DRM_EXEC_INTERRUPTIBLE_WAIT, in amdgpu_userq_wait_ioctl() [all …]
|
| /linux/drivers/gpu/drm/xe/ |
| H A D | xe_dma_buf.c | 55 struct drm_exec *exec = XE_VALIDATION_UNSUPPORTED; in xe_dma_buf_pin() local 77 ret = xe_bo_migrate(bo, XE_PL_TT, NULL, exec); in xe_dma_buf_pin() 87 ret = xe_bo_pin_external(bo, !allow_vram, exec); in xe_dma_buf_pin() 107 struct drm_exec *exec = XE_VALIDATION_UNSUPPORTED; in xe_dma_buf_map() local 116 r = xe_bo_migrate(bo, XE_PL_TT, NULL, exec); in xe_dma_buf_map() 118 r = xe_bo_validate(bo, NULL, false, exec); in xe_dma_buf_map() 178 struct drm_exec exec; in xe_dma_buf_begin_cpu_access() local 185 xe_validation_guard(&ctx, &xe_bo_device(bo)->val, &exec, (struct xe_val_flags) {}, ret) { in xe_dma_buf_begin_cpu_access() 186 ret = drm_exec_lock_obj(&exec, &bo->ttm.base); in xe_dma_buf_begin_cpu_access() 187 drm_exec_retry_on_contention(&exec); in xe_dma_buf_begin_cpu_access() [all …]
|
| H A D | xe_bo.c | 1197 struct drm_exec exec; in xe_bo_notifier_prepare_pinned() local 1201 xe_validation_guard(&ctx, &xe->val, &exec, (struct xe_val_flags) {.exclusive = true}, ret) { in xe_bo_notifier_prepare_pinned() 1202 ret = drm_exec_lock_obj(&exec, &bo->ttm.base); in xe_bo_notifier_prepare_pinned() 1203 drm_exec_retry_on_contention(&exec); in xe_bo_notifier_prepare_pinned() 1224 XE_BO_FLAG_PINNED, &exec); in xe_bo_notifier_prepare_pinned() 1226 drm_exec_retry_on_contention(&exec); in xe_bo_notifier_prepare_pinned() 1334 struct drm_exec exec; in xe_bo_evict_pinned() local 1339 xe_validation_guard(&ctx, &xe->val, &exec, (struct xe_val_flags) {.exclusive = true}, ret) { in xe_bo_evict_pinned() 1340 ret = drm_exec_lock_obj(&exec, &bo->ttm.base); in xe_bo_evict_pinned() 1341 drm_exec_retry_on_contention(&exec); in xe_bo_evict_pinned() [all …]
|
| H A D | xe_pagefault.c | 48 static int xe_pagefault_begin(struct drm_exec *exec, struct xe_vma *vma, in xe_pagefault_begin() argument 55 err = xe_vm_lock_vma(exec, vma); in xe_pagefault_begin() 62 return need_vram_move ? xe_bo_migrate(bo, vram->placement, NULL, exec) : in xe_pagefault_begin() 63 xe_bo_validate(bo, vm, true, exec); in xe_pagefault_begin() 72 struct drm_exec exec; in xe_pagefault_handle_vma() local 104 xe_validation_ctx_init(&ctx, &vm->xe->val, &exec, (struct xe_val_flags) {}); in xe_pagefault_handle_vma() 105 drm_exec_until_all_locked(&exec) { in xe_pagefault_handle_vma() 106 err = xe_pagefault_begin(&exec, vma, tile->mem.vram, in xe_pagefault_handle_vma() 108 drm_exec_retry_on_contention(&exec); in xe_pagefault_handle_vma() 115 xe_vm_set_validation_exec(vm, &exec); in xe_pagefault_handle_vma()
|
| H A D | xe_vm.c | 59 int xe_vm_drm_exec_lock(struct xe_vm *vm, struct drm_exec *exec) in xe_vm_drm_exec_lock() argument 61 return drm_exec_lock_obj(exec, xe_vm_obj(vm)); in xe_vm_drm_exec_lock() 200 struct drm_exec *exec) in resume_and_reinstall_preempt_fences() argument 210 drm_gpuvm_resv_add_fence(&vm->gpuvm, exec, q->lr.pfence, in resume_and_reinstall_preempt_fences() 222 struct drm_exec *exec = &vm_exec.exec; in xe_vm_add_compute_exec_queue() local 248 drm_gpuvm_resv_add_fence(&vm->gpuvm, exec, pfence, in xe_vm_add_compute_exec_queue() 327 static int xe_gpuvm_validate(struct drm_gpuvm_bo *vm_bo, struct drm_exec *exec) in xe_gpuvm_validate() argument 341 ret = xe_bo_validate(gem_to_xe_bo(vm_bo->obj), vm, false, exec); in xe_gpuvm_validate() 364 int xe_vm_validate_rebind(struct xe_vm *vm, struct drm_exec *exec, in xe_vm_validate_rebind() argument 372 ret = drm_gpuvm_validate(&vm->gpuvm, exec); in xe_vm_validate_rebind() [all …]
|
| H A D | xe_vm.h | 263 int xe_vm_lock_vma(struct drm_exec *exec, struct xe_vma *vma); 265 int xe_vm_validate_rebind(struct xe_vm *vm, struct drm_exec *exec, 293 int xe_vm_drm_exec_lock(struct xe_vm *vm, struct drm_exec *exec); 374 static inline void xe_vm_set_validation_exec(struct xe_vm *vm, struct drm_exec *exec) in xe_vm_set_validation_exec() argument 377 xe_assert(vm->xe, !!exec ^ !!vm->validation._exec); in xe_vm_set_validation_exec() 378 vm->validation._exec = exec; in xe_vm_set_validation_exec()
|
| /linux/drivers/gpu/drm/ |
| H A D | drm_gpuvm.c | 1189 exec_prepare_obj(struct drm_exec *exec, struct drm_gem_object *obj, in exec_prepare_obj() argument 1192 return num_fences ? drm_exec_prepare_obj(exec, obj, num_fences) : in exec_prepare_obj() 1193 drm_exec_lock_obj(exec, obj); in exec_prepare_obj() 1212 struct drm_exec *exec, in drm_gpuvm_prepare_vm() argument 1215 return exec_prepare_obj(exec, gpuvm->r_obj, num_fences); in drm_gpuvm_prepare_vm() 1221 struct drm_exec *exec, in __drm_gpuvm_prepare_objects() argument 1229 ret = exec_prepare_obj(exec, vm_bo->obj, num_fences); in __drm_gpuvm_prepare_objects() 1242 struct drm_exec *exec, in drm_gpuvm_prepare_objects_locked() argument 1253 ret = exec_prepare_obj(exec, vm_bo->obj, num_fences); in drm_gpuvm_prepare_objects_locked() 1289 struct drm_exec *exec, in drm_gpuvm_prepare_objects() argument [all …]
|
| /linux/tools/testing/selftests/exec/ |
| H A D | check-exec.c | 269 const int exec = variant->mount_exec && variant->file_exec; in TEST_F() local 271 test_exec_path(_metadata, reg_file_path, exec ? 0 : EACCES); in TEST_F() 274 test_exec_path(_metadata, reg_file_path, exec ? 0 : EACCES); in TEST_F() 279 const int exec = variant->mount_exec && variant->file_exec; in TEST_F() local 283 test_exec_path(_metadata, reg_file_path, exec ? 0 : EACCES); in TEST_F() 286 test_exec_path(_metadata, reg_file_path, exec ? 0 : EACCES); in TEST_F() 292 const int exec = variant->file_exec; in TEST_F() local 294 test_exec_fd(_metadata, self->memfd, exec ? 0 : EACCES); in TEST_F() 297 test_exec_fd(_metadata, self->memfd, exec ? 0 : EACCES); in TEST_F() 302 const int exec = variant->file_exec; in TEST_F() local [all …]
|
| H A D | null-argv.c | 10 #define FORK(exec) \ argument 15 exec; /* Some kind of exec */ \ 16 perror("# " #exec); \ 19 check_result(pid, #exec); \
|
| H A D | .gitignore | 12 /check-exec 18 /set-exec
|
| /linux/drivers/gpu/drm/nouveau/nvkm/subdev/timer/ |
| H A D | base.c | 73 LIST_HEAD(exec); in nvkm_timer_alarm_trigger() 90 list_add(&alarm->exec, &exec); in nvkm_timer_alarm_trigger() 99 list_for_each_entry_safe(alarm, atemp, &exec, exec) { in nvkm_timer_alarm_trigger() 100 list_del(&alarm->exec); in nvkm_timer_alarm_trigger()
|
| /linux/drivers/gpu/drm/msm/ |
| H A D | msm_gem_submit.c | 277 struct drm_exec *exec = &submit->exec; in submit_lock_objects_vmbind() local 280 drm_exec_init(&submit->exec, flags, submit->nr_bos); in submit_lock_objects_vmbind() 282 drm_exec_until_all_locked (&submit->exec) { in submit_lock_objects_vmbind() 283 ret = drm_gpuvm_prepare_vm(submit->vm, exec, 1); in submit_lock_objects_vmbind() 284 drm_exec_retry_on_contention(exec); in submit_lock_objects_vmbind() 288 ret = drm_gpuvm_prepare_objects(submit->vm, exec, 1); in submit_lock_objects_vmbind() 289 drm_exec_retry_on_contention(exec); in submit_lock_objects_vmbind() 306 drm_exec_init(&submit->exec, flags, submit->nr_bos); in submit_lock_objects() 308 drm_exec_until_all_locked (&submit->exec) { in submit_lock_objects() 309 ret = drm_exec_lock_obj(&submit->exec, in submit_lock_objects() [all …]
|
| H A D | msm_gem.c | 55 struct drm_exec exec; in msm_gem_close() local 83 msm_gem_lock_vm_and_obj(&exec, obj, ctx->vm); in msm_gem_close() 85 drm_exec_fini(&exec); /* drop locks */ in msm_gem_close() 108 struct drm_exec exec; in msm_gem_vma_put() local 110 msm_gem_lock_vm_and_obj(&exec, obj, priv->kms->vm); in msm_gem_vma_put() 112 drm_exec_fini(&exec); /* drop locks */ in msm_gem_vma_put() 584 struct drm_exec exec; in msm_gem_get_and_pin_iova_range() local 587 msm_gem_lock_vm_and_obj(&exec, obj, vm); in msm_gem_get_and_pin_iova_range() 589 drm_exec_fini(&exec); /* drop locks */ in msm_gem_get_and_pin_iova_range() 609 struct drm_exec exec; in msm_gem_get_iova() local [all …]
|
| H A D | msm_gem.h | 361 msm_gem_lock_vm_and_obj(struct drm_exec *exec, in msm_gem_lock_vm_and_obj() argument 367 drm_exec_init(exec, 0, 2); in msm_gem_lock_vm_and_obj() 368 drm_exec_until_all_locked (exec) { in msm_gem_lock_vm_and_obj() 369 ret = drm_exec_lock_obj(exec, drm_gpuvm_resv_obj(vm)); in msm_gem_lock_vm_and_obj() 371 ret = drm_exec_lock_obj(exec, obj); in msm_gem_lock_vm_and_obj() 372 drm_exec_retry_on_contention(exec); in msm_gem_lock_vm_and_obj() 439 struct drm_exec exec; member
|
| H A D | msm_gem_vma.c | 439 msm_gem_vm_bo_validate(struct drm_gpuvm_bo *vm_bo, struct drm_exec *exec) in msm_gem_vm_bo_validate() argument 899 struct drm_exec exec; in msm_gem_vm_close() local 916 drm_exec_init(&exec, 0, 2); in msm_gem_vm_close() 917 drm_exec_until_all_locked (&exec) { in msm_gem_vm_close() 918 drm_exec_lock_obj(&exec, drm_gpuvm_resv_obj(gpuvm)); in msm_gem_vm_close() 919 drm_exec_retry_on_contention(&exec); in msm_gem_vm_close() 932 drm_exec_lock_obj(&exec, obj); in msm_gem_vm_close() 933 drm_exec_retry_on_contention(&exec); in msm_gem_vm_close() 940 drm_exec_unlock_obj(&exec, obj); in msm_gem_vm_close() 944 drm_exec_fini(&exec); in msm_gem_vm_close() [all …]
|
| /linux/tools/perf/util/ |
| H A D | comm.h | 15 bool exec; member 23 struct comm *comm__new(const char *str, u64 timestamp, bool exec); 26 bool exec);
|
| H A D | comm.c | 192 struct comm *comm__new(const char *str, u64 timestamp, bool exec) in comm__new() argument 200 comm->exec = exec; in comm__new() 211 int comm__override(struct comm *comm, const char *str, u64 timestamp, bool exec) in comm__override() argument 222 if (exec) in comm__override() 223 comm->exec = true; in comm__override()
|
| /linux/tools/testing/selftests/bpf/progs/ |
| H A D | test_overhead.c | 10 int BPF_KPROBE(prog1, struct task_struct *tsk, const char *buf, bool exec) 28 int BPF_PROG(prog4, struct task_struct *tsk, const char *buf, bool exec) in prog3() 34 int BPF_PROG(prog5, struct task_struct *tsk, const char *buf, bool exec) in BPF_PROG() 13 BPF_KPROBE(prog1,struct task_struct * tsk,const char * buf,bool exec) BPF_KPROBE() argument 31 BPF_PROG(prog4,struct task_struct * tsk,const char * buf,bool exec) BPF_PROG() argument 37 BPF_PROG(prog5,struct task_struct * tsk,const char * buf,bool exec) BPF_PROG() argument
|
| /linux/drivers/sbus/char/ |
| H A D | oradax.c | 861 ctx->result.exec.status = DAX_SUBMIT_ERR_THR_INIT; in dax_ccb_exec() 868 ctx->result.exec.status = DAX_SUBMIT_ERR_NO_CA_AVAIL; in dax_ccb_exec() 878 ctx->result.exec.status = DAX_SUBMIT_ERR_CCB_ARR_MMU_MISS; in dax_ccb_exec() 886 ctx->result.exec.status = DAX_SUBMIT_ERR_NO_CA_AVAIL; in dax_ccb_exec() 892 ctx->result.exec.status = dax_preprocess_usr_ccbs(ctx, idx, nccbs); in dax_ccb_exec() 893 if (ctx->result.exec.status != DAX_SUBMIT_OK) in dax_ccb_exec() 896 ctx->result.exec.status = dax_lock_pages(ctx, idx, nccbs, in dax_ccb_exec() 897 &ctx->result.exec.status_data); in dax_ccb_exec() 898 if (ctx->result.exec.status != DAX_SUBMIT_OK) in dax_ccb_exec() 906 &accepted_len, &ctx->result.exec.status_data); in dax_ccb_exec() [all …]
|
| /linux/drivers/gpu/drm/xe/tests/ |
| H A D | xe_bo.c | 26 struct kunit *test, struct drm_exec *exec) in ccs_test_migrate() argument 38 ret = xe_bo_validate(bo, NULL, false, exec); in ccs_test_migrate() 63 ret = xe_bo_evict(bo, exec); in ccs_test_migrate() 135 struct drm_exec *exec = XE_VALIDATION_OPT_OUT; in ccs_test_run_tile() local 143 bo_flags, exec); in ccs_test_run_tile() 153 test, exec); in ccs_test_run_tile() 159 0xdeadbeefdeadbeefULL, test, exec); in ccs_test_run_tile() 164 ret = ccs_test_migrate(tile, bo, true, 0ULL, 0ULL, test, exec); in ccs_test_run_tile() 214 struct drm_exec *exec = XE_VALIDATION_OPT_OUT; in evict_test_run_tile() local 225 bo_flags, exec); in evict_test_run_tile() [all …]
|
| /linux/drivers/gpu/drm/panthor/ |
| H A D | panthor_mmu.h | 49 int panthor_vm_prepare_mapped_bos_resvs(struct drm_exec *exec, 90 int panthor_vm_bind_job_prepare_resvs(struct drm_exec *exec, 92 void panthor_vm_bind_job_update_resvs(struct drm_exec *exec, struct drm_sched_job *job); 94 void panthor_vm_update_resvs(struct panthor_vm *vm, struct drm_exec *exec,
|