Home
last modified time | relevance | path

Searched refs:exec (Results 1 – 25 of 233) sorted by last modified time

12345678910

/linux/drivers/gpu/drm/panthor/
H A Dpanthor_sched.h35 void panthor_job_update_resvs(struct drm_exec *exec, struct drm_sched_job *job);
H A Dpanthor_sched.c3400 void panthor_job_update_resvs(struct drm_exec *exec, struct drm_sched_job *sched_job) in panthor_job_update_resvs() argument
3409 panthor_vm_update_resvs(job->group->vm, exec, &sched_job->s_fence->finished, in panthor_job_update_resvs()
H A Dpanthor_mmu.c2461 int panthor_vm_bind_job_prepare_resvs(struct drm_exec *exec, in panthor_vm_bind_job_prepare_resvs() argument
2468 ret = drm_gpuvm_prepare_vm(&job->vm->base, exec, 1); in panthor_vm_bind_job_prepare_resvs()
2474 ret = drm_exec_prepare_obj(exec, job->ctx.map.vm_bo->obj, 1); in panthor_vm_bind_job_prepare_resvs()
2487 void panthor_vm_bind_job_update_resvs(struct drm_exec *exec, in panthor_vm_bind_job_update_resvs() argument
2493 drm_gpuvm_resv_add_fence(&job->vm->base, exec, in panthor_vm_bind_job_update_resvs()
2499 void panthor_vm_update_resvs(struct panthor_vm *vm, struct drm_exec *exec, in panthor_vm_update_resvs() argument
2504 drm_gpuvm_resv_add_fence(&vm->base, exec, fence, private_usage, extobj_usage); in panthor_vm_update_resvs()
2609 int panthor_vm_prepare_mapped_bos_resvs(struct drm_exec *exec, struct panthor_vm *vm, in panthor_vm_prepare_mapped_bos_resvs() argument
2615 ret = drm_gpuvm_prepare_vm(&vm->base, exec, slot_count); in panthor_vm_prepare_mapped_bos_resvs()
2619 return drm_gpuvm_prepare_objects(&vm->base, exec, slot_count); in panthor_vm_prepare_mapped_bos_resvs()
H A Dpanthor_mmu.h45 int panthor_vm_prepare_mapped_bos_resvs(struct drm_exec *exec,
86 int panthor_vm_bind_job_prepare_resvs(struct drm_exec *exec,
88 void panthor_vm_bind_job_update_resvs(struct drm_exec *exec, struct drm_sched_job *job);
90 void panthor_vm_update_resvs(struct panthor_vm *vm, struct drm_exec *exec,
H A Dpanthor_drv.c300 struct drm_exec exec; member
694 upd_resvs(&ctx->exec, ctx->jobs[i].job); in panthor_submit_ctx_push_jobs()
723 drm_exec_init(&ctx->exec, in panthor_submit_ctx_init()
740 drm_exec_fini(&ctx->exec); in panthor_submit_ctx_cleanup()
951 drm_exec_until_all_locked(&ctx.exec) { in panthor_ioctl_group_submit()
952 ret = panthor_vm_prepare_mapped_bos_resvs(&ctx.exec, vm, in panthor_ioctl_group_submit()
1148 drm_exec_until_all_locked(&ctx.exec) { in panthor_ioctl_vm_bind_async()
1150 ret = panthor_vm_bind_job_prepare_resvs(&ctx.exec, ctx.jobs[i].job); in panthor_ioctl_vm_bind_async()
1151 drm_exec_retry_on_contention(&ctx.exec); in panthor_ioctl_vm_bind_async()
/linux/drivers/gpu/drm/nouveau/
H A Dnouveau_bo.c918 int (*exec)(struct nouveau_channel *, in nouveau_bo_move_init() member
973 drm->ttm.move = mthd->exec; in nouveau_bo_move_init()
978 } while ((++mthd)->exec); in nouveau_bo_move_init()
/linux/drivers/gpu/drm/amd/amdkfd/
H A Dkfd_svm.c1480 struct drm_exec exec; member
1490 drm_exec_init(&ctx->exec, intr ? DRM_EXEC_INTERRUPTIBLE_WAIT: 0, 0); in svm_range_reserve_bos()
1491 drm_exec_until_all_locked(&ctx->exec) { in svm_range_reserve_bos()
1501 r = amdgpu_vm_lock_pd(vm, &ctx->exec, 2); in svm_range_reserve_bos()
1502 drm_exec_retry_on_contention(&ctx->exec); in svm_range_reserve_bos()
1530 drm_exec_fini(&ctx->exec); in svm_range_reserve_bos()
1536 drm_exec_fini(&ctx->exec); in svm_range_unreserve_bos()
/linux/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_amdkfd_gpuvm.c1123 struct drm_exec exec; member
1153 drm_exec_init(&ctx->exec, DRM_EXEC_INTERRUPTIBLE_WAIT, 0); in reserve_bo_and_vm()
1154 drm_exec_until_all_locked(&ctx->exec) { in reserve_bo_and_vm()
1155 ret = amdgpu_vm_lock_pd(vm, &ctx->exec, 2); in reserve_bo_and_vm()
1156 drm_exec_retry_on_contention(&ctx->exec); in reserve_bo_and_vm()
1160 ret = drm_exec_prepare_obj(&ctx->exec, &bo->tbo.base, 1); in reserve_bo_and_vm()
1161 drm_exec_retry_on_contention(&ctx->exec); in reserve_bo_and_vm()
1169 drm_exec_fini(&ctx->exec); in reserve_bo_and_vm()
1192 drm_exec_init(&ctx->exec, DRM_EXEC_INTERRUPTIBLE_WAIT | in reserve_bo_and_cond_vms()
1194 drm_exec_until_all_locked(&ctx->exec) { in reserve_bo_and_cond_vms()
[all …]
H A Dgfx_v9_0.c5295 bool exec = flags & AMDGPU_FENCE_FLAG_EXEC; in gfx_v9_0_ring_emit_fence() local
5309 if (exec) in gfx_v9_0_ring_emit_fence()
H A Damdgpu_vm.c368 int amdgpu_vm_lock_pd(struct amdgpu_vm *vm, struct drm_exec *exec, in amdgpu_vm_lock_pd() argument
372 return drm_exec_prepare_obj(exec, &vm->root.bo->tbo.base, in amdgpu_vm_lock_pd()
H A Damdgpu_vm.h455 int amdgpu_vm_lock_pd(struct amdgpu_vm *vm, struct drm_exec *exec,
H A Damdgpu_umsch_mm.c84 struct drm_exec exec; in map_ring_data() local
89 drm_exec_init(&exec, 0, 0); in map_ring_data()
90 drm_exec_until_all_locked(&exec) { in map_ring_data()
91 r = drm_exec_lock_obj(&exec, &bo->tbo.base); in map_ring_data()
92 drm_exec_retry_on_contention(&exec); in map_ring_data()
96 r = amdgpu_vm_lock_pd(vm, &exec, 0); in map_ring_data()
97 drm_exec_retry_on_contention(&exec); in map_ring_data()
129 drm_exec_fini(&exec); in map_ring_data()
140 drm_exec_fini(&exec); in map_ring_data()
149 struct drm_exec exec; in unmap_ring_data() local
[all …]
H A Damdgpu_mes.c1215 struct drm_exec exec; in amdgpu_mes_ctx_map_meta_data() local
1220 drm_exec_init(&exec, 0, 0); in amdgpu_mes_ctx_map_meta_data()
1221 drm_exec_until_all_locked(&exec) { in amdgpu_mes_ctx_map_meta_data()
1222 r = drm_exec_lock_obj(&exec, in amdgpu_mes_ctx_map_meta_data()
1224 drm_exec_retry_on_contention(&exec); in amdgpu_mes_ctx_map_meta_data()
1228 r = amdgpu_vm_lock_pd(vm, &exec, 0); in amdgpu_mes_ctx_map_meta_data()
1229 drm_exec_retry_on_contention(&exec); in amdgpu_mes_ctx_map_meta_data()
1266 drm_exec_fini(&exec); in amdgpu_mes_ctx_map_meta_data()
1276 drm_exec_fini(&exec); in amdgpu_mes_ctx_map_meta_data()
1288 struct drm_exec exec; in amdgpu_mes_ctx_unmap_meta_data() local
[all …]
/linux/Documentation/userspace-api/
H A Dmseal.rst134 exec system call is invoked. Applications can apply sealing to any virtual
/linux/scripts/
H A DKbuild.include126 silent_log_print = exec >/dev/null;
/linux/Documentation/arch/powerpc/
H A Ddexcr.rst93 - This aspect will be set after exec / set this aspect after exec
96 - This aspect will be clear after exec / clear this aspect after exec
106 set, it will be cleared when you run exec, and you can change this with the
170 For example, clearing NPHIE on exec is a privileged operation (a process
195 key (potentially all threads from the same parent that have not run ``exec()``).
/linux/tools/testing/selftests/bpf/progs/
H A Dlocal_storage.c208 void BPF_PROG(exec, struct linux_binprm *bprm) in BPF_PROG() argument
/linux/net/openvswitch/
H A Dflow_netlink.c2611 arg.exec = last || !actions_may_change_flow(actions); in validate_and_copy_sample()
2699 u32 exec; in validate_and_copy_clone() local
2708 exec = last || !actions_may_change_flow(attr); in validate_and_copy_clone()
2710 err = ovs_nla_add_action(sfa, OVS_CLONE_ATTR_EXEC, &exec, in validate_and_copy_clone()
2711 sizeof(exec), log); in validate_and_copy_clone()
/linux/drivers/gpu/drm/xe/
H A Dxe_vm.c207 struct drm_exec *exec) in resume_and_reinstall_preempt_fences() argument
217 drm_gpuvm_resv_add_fence(&vm->gpuvm, exec, q->compute.pfence, in resume_and_reinstall_preempt_fences()
229 struct drm_exec *exec = &vm_exec.exec; in xe_vm_add_compute_exec_queue() local
254 drm_gpuvm_resv_add_fence(&vm->gpuvm, exec, pfence, in xe_vm_add_compute_exec_queue()
269 drm_exec_fini(exec); in xe_vm_add_compute_exec_queue()
352 bool xe_vm_validate_should_retry(struct drm_exec *exec, int err, ktime_t *end) in xe_vm_validate_should_retry() argument
368 static int xe_gpuvm_validate(struct drm_gpuvm_bo *vm_bo, struct drm_exec *exec) in xe_gpuvm_validate() argument
402 int xe_vm_validate_rebind(struct xe_vm *vm, struct drm_exec *exec, in xe_vm_validate_rebind() argument
410 ret = drm_gpuvm_validate(&vm->gpuvm, exec); in xe_vm_validate_rebind()
419 drm_exec_for_each_locked_object(exec, index, obj) { in xe_vm_validate_rebind()
[all …]
H A Dxe_sync.c109 bool exec = flags & SYNC_PARSE_FLAG_EXEC; in xe_sync_entry_parse() local
181 if (exec) { in xe_sync_entry_parse()
H A Dxe_exec.c106 return xe_vm_validate_rebind(vm, &vm_exec->exec, 1); in xe_exec_fn()
120 struct drm_exec *exec = &vm_exec.exec; in xe_exec_ioctl() local
231 drm_exec_init(exec, vm_exec.flags, 0); in xe_exec_ioctl()
235 if (xe_vm_validate_should_retry(exec, err, &end)) in xe_exec_ioctl()
294 drm_gpuvm_resv_add_fence(&vm->gpuvm, exec, &job->drm.s_fence->finished, in xe_exec_ioctl()
322 drm_exec_fini(exec); in xe_exec_ioctl()
/linux/drivers/gpu/drm/vc4/
H A Dvc4_drv.h977 void vc4_move_job_to_render(struct drm_device *dev, struct vc4_exec_info *exec);
1047 struct vc4_exec_info *exec);
1050 vc4_validate_shader_recs(struct drm_device *dev, struct vc4_exec_info *exec);
1052 struct drm_gem_dma_object *vc4_use_bo(struct vc4_exec_info *exec,
1055 int vc4_get_rcl(struct drm_device *dev, struct vc4_exec_info *exec);
1057 bool vc4_check_tex_size(struct vc4_exec_info *exec,
/linux/drivers/gpu/drm/msm/
H A Dmsm_gem.h262 struct drm_exec exec; member
/linux/drivers/gpu/drm/i915/gem/
H A Di915_gem_execbuffer.c43 struct drm_i915_gem_exec_object2 *exec; member
251 struct drm_i915_gem_exec_object2 *exec; /** ioctl execobj[] */ member
568 struct drm_i915_gem_exec_object2 *entry = &eb->exec[i]; in eb_add_vma()
572 ev->exec = entry; in eb_add_vma()
660 struct drm_i915_gem_exec_object2 *entry = ev->exec; in eb_reserve_vma()
952 vma = eb_lookup_vma(eb, eb->exec[i].handle); in eb_lookup_vmas()
958 err = eb_validate_vma(eb, &eb->exec[i], vma); in eb_lookup_vmas()
1025 struct drm_i915_gem_exec_object2 *entry = &eb->exec[i]; in eb_validate_vmas()
1055 eb_vma_misplaced(&eb->exec[i], vma, ev->flags)); in eb_validate_vmas()
1530 const struct drm_i915_gem_exec_object2 *entry = ev->exec; in eb_relocate_vma()
[all …]
/linux/drivers/accel/qaic/
H A Dqaic_data.c1167 struct qaic_execute_entry *exec, unsigned int count, in send_bo_list_to_device() argument
1171 struct qaic_partial_execute_entry *pexec = (struct qaic_partial_execute_entry *)exec; in send_bo_list_to_device()
1185 is_partial ? pexec[i].handle : exec[i].handle); in send_bo_list_to_device()
1261 struct qaic_execute_entry *exec, unsigned int count, in update_profiling_data() argument
1264 struct qaic_partial_execute_entry *pexec = (struct qaic_partial_execute_entry *)exec; in update_profiling_data()
1278 is_partial ? pexec[i].handle : exec[i].handle); in update_profiling_data()
1294 struct qaic_execute_entry *exec; in __qaic_execute_bo_ioctl() local
1312 size = is_partial ? sizeof(struct qaic_partial_execute_entry) : sizeof(*exec); in __qaic_execute_bo_ioctl()
1319 exec = kcalloc(args->hdr.count, size, GFP_KERNEL); in __qaic_execute_bo_ioctl()
1320 if (!exec) in __qaic_execute_bo_ioctl()
[all …]

12345678910