Home
last modified time | relevance | path

Searched refs:exec (Results 1 – 25 of 205) sorted by relevance

123456789

/linux/include/drm/
H A Ddrm_exec.h63 drm_exec_obj(struct drm_exec *exec, unsigned long index) in drm_exec_obj() argument
65 return index < exec->num_objects ? exec->objects[index] : NULL; in drm_exec_obj()
76 #define drm_exec_for_each_locked_object(exec, index, obj) \ argument
77 for ((index) = 0; ((obj) = drm_exec_obj(exec, index)); ++(index))
90 #define drm_exec_for_each_locked_object_reverse(exec, index, obj) \ argument
91 for ((index) = (exec)->num_objects - 1; \
92 ((obj) = drm_exec_obj(exec, index)); --(index))
105 #define drm_exec_until_all_locked(exec) \ argument
110 drm_exec_cleanup(exec); \
120 #define drm_exec_retry_on_contention(exec) \ argument
[all …]
H A Ddrm_gpuvm.h537 struct drm_exec exec; member
574 struct drm_exec *exec,
578 struct drm_exec *exec,
582 struct drm_exec *exec,
607 drm_exec_fini(&vm_exec->exec); in drm_gpuvm_exec_unlock()
610 int drm_gpuvm_validate(struct drm_gpuvm *gpuvm, struct drm_exec *exec);
612 struct drm_exec *exec,
632 drm_gpuvm_resv_add_fence(vm_exec->vm, &vm_exec->exec, fence, in drm_gpuvm_exec_resv_add_fence()
647 return drm_gpuvm_validate(vm_exec->vm, &vm_exec->exec); in drm_gpuvm_exec_validate()
1206 struct drm_exec *exec);
[all …]
/linux/include/uapi/linux/
H A Da.out.h44 #define N_MAGIC(exec) ((exec).a_info & 0xffff) argument
46 #define N_MACHTYPE(exec) ((enum machine_type)(((exec).a_info >> 16) & 0xff)) argument
47 #define N_FLAGS(exec) (((exec).a_info >> 24) & 0xff) argument
48 #define N_SET_INFO(exec, magic, type, flags) \ argument
49 ((exec).a_info = ((magic) & 0xffff) \
52 #define N_SET_MAGIC(exec, magic) \ argument
53 ((exec).a_info = (((exec).a_info & 0xffff0000) | ((magic) & 0xffff)))
55 #define N_SET_MACHTYPE(exec, machtype) \ argument
56 ((exec).a_info = \
57 ((exec).a_info&0xff00ffff) | ((((int)(machtype))&0xff) << 16))
[all …]
/linux/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_csa.c69 struct drm_exec exec; in amdgpu_map_static_csa() local
72 drm_exec_init(&exec, DRM_EXEC_INTERRUPTIBLE_WAIT, 0); in amdgpu_map_static_csa()
73 drm_exec_until_all_locked(&exec) { in amdgpu_map_static_csa()
74 r = amdgpu_vm_lock_pd(vm, &exec, 0); in amdgpu_map_static_csa()
76 r = drm_exec_lock_obj(&exec, &bo->tbo.base); in amdgpu_map_static_csa()
77 drm_exec_retry_on_contention(&exec); in amdgpu_map_static_csa()
101 drm_exec_fini(&exec); in amdgpu_map_static_csa()
109 struct drm_exec exec; in amdgpu_unmap_static_csa() local
112 drm_exec_init(&exec, 0, 0); in amdgpu_unmap_static_csa()
113 drm_exec_until_all_locked(&exec) { in amdgpu_unmap_static_csa()
[all …]
H A Damdgpu_amdkfd_gpuvm.c1139 struct drm_exec exec; member
1169 drm_exec_init(&ctx->exec, DRM_EXEC_INTERRUPTIBLE_WAIT, 0); in reserve_bo_and_vm()
1170 drm_exec_until_all_locked(&ctx->exec) { in reserve_bo_and_vm()
1171 ret = amdgpu_vm_lock_pd(vm, &ctx->exec, 2); in reserve_bo_and_vm()
1172 drm_exec_retry_on_contention(&ctx->exec); in reserve_bo_and_vm()
1176 ret = drm_exec_prepare_obj(&ctx->exec, &bo->tbo.base, 1); in reserve_bo_and_vm()
1177 drm_exec_retry_on_contention(&ctx->exec); in reserve_bo_and_vm()
1185 drm_exec_fini(&ctx->exec); in reserve_bo_and_vm()
1208 drm_exec_init(&ctx->exec, DRM_EXEC_INTERRUPTIBLE_WAIT | in reserve_bo_and_cond_vms()
1210 drm_exec_until_all_locked(&ctx->exec) { in reserve_bo_and_cond_vms()
[all …]
/linux/tools/testing/selftests/exec/
H A Dcheck-exec.c269 const int exec = variant->mount_exec && variant->file_exec; in TEST_F() local
271 test_exec_path(_metadata, reg_file_path, exec ? 0 : EACCES); in TEST_F()
274 test_exec_path(_metadata, reg_file_path, exec ? 0 : EACCES); in TEST_F()
279 const int exec = variant->mount_exec && variant->file_exec; in TEST_F() local
283 test_exec_path(_metadata, reg_file_path, exec ? 0 : EACCES); in TEST_F()
286 test_exec_path(_metadata, reg_file_path, exec ? 0 : EACCES); in TEST_F()
292 const int exec = variant->file_exec; in TEST_F() local
294 test_exec_fd(_metadata, self->memfd, exec ? 0 : EACCES); in TEST_F()
297 test_exec_fd(_metadata, self->memfd, exec ? 0 : EACCES); in TEST_F()
302 const int exec = variant->file_exec; in TEST_F() local
[all …]
H A Dnull-argv.c10 #define FORK(exec) \ argument
15 exec; /* Some kind of exec */ \
16 perror("# " #exec); \
19 check_result(pid, #exec); \
H A D.gitignore12 /check-exec
18 /set-exec
/linux/drivers/gpu/drm/msm/
H A Dmsm_gem_submit.c277 struct drm_exec *exec = &submit->exec; in submit_lock_objects_vmbind() local
280 drm_exec_init(&submit->exec, flags, submit->nr_bos); in submit_lock_objects_vmbind()
282 drm_exec_until_all_locked (&submit->exec) { in submit_lock_objects_vmbind()
283 ret = drm_gpuvm_prepare_vm(submit->vm, exec, 1); in submit_lock_objects_vmbind()
284 drm_exec_retry_on_contention(exec); in submit_lock_objects_vmbind()
288 ret = drm_gpuvm_prepare_objects(submit->vm, exec, 1); in submit_lock_objects_vmbind()
289 drm_exec_retry_on_contention(exec); in submit_lock_objects_vmbind()
306 drm_exec_init(&submit->exec, flags, submit->nr_bos); in submit_lock_objects()
308 drm_exec_until_all_locked (&submit->exec) { in submit_lock_objects()
309 ret = drm_exec_lock_obj(&submit->exec, in submit_lock_objects()
[all …]
H A Dmsm_gem.c55 struct drm_exec exec; in msm_gem_close() local
83 msm_gem_lock_vm_and_obj(&exec, obj, ctx->vm); in msm_gem_close()
85 drm_exec_fini(&exec); /* drop locks */ in msm_gem_close()
108 struct drm_exec exec; in msm_gem_vma_put() local
110 msm_gem_lock_vm_and_obj(&exec, obj, priv->kms->vm); in msm_gem_vma_put()
112 drm_exec_fini(&exec); /* drop locks */ in msm_gem_vma_put()
584 struct drm_exec exec; in msm_gem_get_and_pin_iova_range() local
587 msm_gem_lock_vm_and_obj(&exec, obj, vm); in msm_gem_get_and_pin_iova_range()
589 drm_exec_fini(&exec); /* drop locks */ in msm_gem_get_and_pin_iova_range()
609 struct drm_exec exec; in msm_gem_get_iova() local
[all …]
H A Dmsm_gem.h361 msm_gem_lock_vm_and_obj(struct drm_exec *exec, in msm_gem_lock_vm_and_obj() argument
367 drm_exec_init(exec, 0, 2); in msm_gem_lock_vm_and_obj()
368 drm_exec_until_all_locked (exec) { in msm_gem_lock_vm_and_obj()
369 ret = drm_exec_lock_obj(exec, drm_gpuvm_resv_obj(vm)); in msm_gem_lock_vm_and_obj()
371 ret = drm_exec_lock_obj(exec, obj); in msm_gem_lock_vm_and_obj()
372 drm_exec_retry_on_contention(exec); in msm_gem_lock_vm_and_obj()
439 struct drm_exec exec; member
H A Dmsm_gem_vma.c439 msm_gem_vm_bo_validate(struct drm_gpuvm_bo *vm_bo, struct drm_exec *exec) in msm_gem_vm_bo_validate() argument
896 struct drm_exec exec; in msm_gem_vm_close() local
913 drm_exec_init(&exec, 0, 2); in msm_gem_vm_close()
914 drm_exec_until_all_locked (&exec) { in msm_gem_vm_close()
915 drm_exec_lock_obj(&exec, drm_gpuvm_resv_obj(gpuvm)); in msm_gem_vm_close()
916 drm_exec_retry_on_contention(&exec); in msm_gem_vm_close()
929 drm_exec_lock_obj(&exec, obj); in msm_gem_vm_close()
930 drm_exec_retry_on_contention(&exec); in msm_gem_vm_close()
937 drm_exec_unlock_obj(&exec, obj); in msm_gem_vm_close()
941 drm_exec_fini(&exec); in msm_gem_vm_close()
[all …]
/linux/drivers/sbus/char/
H A Doradax.c861 ctx->result.exec.status = DAX_SUBMIT_ERR_THR_INIT; in dax_ccb_exec()
868 ctx->result.exec.status = DAX_SUBMIT_ERR_NO_CA_AVAIL; in dax_ccb_exec()
878 ctx->result.exec.status = DAX_SUBMIT_ERR_CCB_ARR_MMU_MISS; in dax_ccb_exec()
886 ctx->result.exec.status = DAX_SUBMIT_ERR_NO_CA_AVAIL; in dax_ccb_exec()
892 ctx->result.exec.status = dax_preprocess_usr_ccbs(ctx, idx, nccbs); in dax_ccb_exec()
893 if (ctx->result.exec.status != DAX_SUBMIT_OK) in dax_ccb_exec()
896 ctx->result.exec.status = dax_lock_pages(ctx, idx, nccbs, in dax_ccb_exec()
897 &ctx->result.exec.status_data); in dax_ccb_exec()
898 if (ctx->result.exec.status != DAX_SUBMIT_OK) in dax_ccb_exec()
906 &accepted_len, &ctx->result.exec.status_data); in dax_ccb_exec()
[all …]
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/timer/
H A Dbase.c73 LIST_HEAD(exec); in nvkm_timer_alarm_trigger()
90 list_add(&alarm->exec, &exec); in nvkm_timer_alarm_trigger()
99 list_for_each_entry_safe(alarm, atemp, &exec, exec) { in nvkm_timer_alarm_trigger()
100 list_del(&alarm->exec); in nvkm_timer_alarm_trigger()
/linux/drivers/gpu/drm/xe/tests/
H A Dxe_bo.c26 struct kunit *test, struct drm_exec *exec) in ccs_test_migrate() argument
38 ret = xe_bo_validate(bo, NULL, false, exec); in ccs_test_migrate()
63 ret = xe_bo_evict(bo, exec); in ccs_test_migrate()
135 struct drm_exec *exec = XE_VALIDATION_OPT_OUT; in ccs_test_run_tile() local
143 bo_flags, exec); in ccs_test_run_tile()
153 test, exec); in ccs_test_run_tile()
159 0xdeadbeefdeadbeefULL, test, exec); in ccs_test_run_tile()
164 ret = ccs_test_migrate(tile, bo, true, 0ULL, 0ULL, test, exec); in ccs_test_run_tile()
214 struct drm_exec *exec = XE_VALIDATION_OPT_OUT; in evict_test_run_tile() local
225 bo_flags, exec); in evict_test_run_tile()
[all …]
/linux/drivers/gpu/drm/xe/
H A Dxe_pagefault.c48 static int xe_pagefault_begin(struct drm_exec *exec, struct xe_vma *vma, in xe_pagefault_begin() argument
55 err = xe_vm_lock_vma(exec, vma); in xe_pagefault_begin()
62 return need_vram_move ? xe_bo_migrate(bo, vram->placement, NULL, exec) : in xe_pagefault_begin()
63 xe_bo_validate(bo, vm, true, exec); in xe_pagefault_begin()
72 struct drm_exec exec; in xe_pagefault_handle_vma() local
104 xe_validation_ctx_init(&ctx, &vm->xe->val, &exec, (struct xe_val_flags) {}); in xe_pagefault_handle_vma()
105 drm_exec_until_all_locked(&exec) { in xe_pagefault_handle_vma()
106 err = xe_pagefault_begin(&exec, vma, tile->mem.vram, in xe_pagefault_handle_vma()
108 drm_exec_retry_on_contention(&exec); in xe_pagefault_handle_vma()
115 xe_vm_set_validation_exec(vm, &exec); in xe_pagefault_handle_vma()
H A Dxe_vm.h263 int xe_vm_lock_vma(struct drm_exec *exec, struct xe_vma *vma);
265 int xe_vm_validate_rebind(struct xe_vm *vm, struct drm_exec *exec,
293 int xe_vm_drm_exec_lock(struct xe_vm *vm, struct drm_exec *exec);
374 static inline void xe_vm_set_validation_exec(struct xe_vm *vm, struct drm_exec *exec) in xe_vm_set_validation_exec() argument
377 xe_assert(vm->xe, !!exec ^ !!vm->validation._exec); in xe_vm_set_validation_exec()
378 vm->validation._exec = exec; in xe_vm_set_validation_exec()
/linux/tools/perf/util/
H A Dcomm.h15 bool exec; member
23 struct comm *comm__new(const char *str, u64 timestamp, bool exec);
26 bool exec);
H A Dcomm.c192 struct comm *comm__new(const char *str, u64 timestamp, bool exec) in comm__new() argument
200 comm->exec = exec; in comm__new()
211 int comm__override(struct comm *comm, const char *str, u64 timestamp, bool exec) in comm__override() argument
222 if (exec) in comm__override()
223 comm->exec = true; in comm__override()
/linux/tools/testing/selftests/bpf/progs/
H A Dtest_overhead.c10 int BPF_KPROBE(prog1, struct task_struct *tsk, const char *buf, bool exec)
28 int BPF_PROG(prog4, struct task_struct *tsk, const char *buf, bool exec) in prog3()
34 int BPF_PROG(prog5, struct task_struct *tsk, const char *buf, bool exec) in BPF_PROG()
13 BPF_KPROBE(prog1,struct task_struct * tsk,const char * buf,bool exec) BPF_KPROBE() argument
31 BPF_PROG(prog4,struct task_struct * tsk,const char * buf,bool exec) BPF_PROG() argument
37 BPF_PROG(prog5,struct task_struct * tsk,const char * buf,bool exec) BPF_PROG() argument
/linux/drivers/gpu/drm/panthor/
H A Dpanthor_mmu.h49 int panthor_vm_prepare_mapped_bos_resvs(struct drm_exec *exec,
90 int panthor_vm_bind_job_prepare_resvs(struct drm_exec *exec,
92 void panthor_vm_bind_job_update_resvs(struct drm_exec *exec, struct drm_sched_job *job);
94 void panthor_vm_update_resvs(struct panthor_vm *vm, struct drm_exec *exec,
/linux/arch/arc/mm/
H A Dfault.c79 unsigned int write = 0, exec = 0, mask; in do_page_fault() local
107 exec = 1; in do_page_fault()
127 if (exec) in do_page_fault()
/linux/Documentation/arch/powerpc/
H A Ddexcr.rst93 - This aspect will be set after exec / set this aspect after exec
96 - This aspect will be clear after exec / clear this aspect after exec
106 set, it will be cleared when you run exec, and you can change this with the
170 For example, clearing NPHIE on exec is a privileged operation (a process
195 key (potentially all threads from the same parent that have not run ``exec()``).
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/mxm/
H A Dmxms.c97 bool (*exec)(struct nvkm_mxm *, u8 *, void *), void *info) in mxms_foreach()
148 if (mxm->subdev.debug >= NV_DBG_DEBUG && (exec == NULL)) { in mxms_foreach()
170 if (!exec(mxm, desc, info)) in mxms_foreach()
/linux/drivers/spi/
H A Dspi-wpcm-fiu.c137 int (*exec)(struct spi_mem *mem, const struct spi_mem_op *op); member
301 { .match = wpcm_fiu_normal_match, .exec = wpcm_fiu_normal_exec },
302 { .match = wpcm_fiu_fast_read_match, .exec = wpcm_fiu_fast_read_exec },
303 { .match = wpcm_fiu_4ba_match, .exec = wpcm_fiu_4ba_exec },
304 { .match = wpcm_fiu_rdid_match, .exec = wpcm_fiu_rdid_exec },
305 { .match = wpcm_fiu_dummy_match, .exec = wpcm_fiu_dummy_exec },
360 return shape->exec(mem, op); in wpcm_fiu_exec_op()

123456789