Lines Matching refs:queue

153 	struct amdgpu_usermode_queue *queue = container_of(work,  in amdgpu_userq_hang_detect_work()  local
159 if (!queue || !queue->userq_mgr) in amdgpu_userq_hang_detect_work()
162 uq_mgr = queue->userq_mgr; in amdgpu_userq_hang_detect_work()
163 fence = READ_ONCE(queue->hang_detect_fence); in amdgpu_userq_hang_detect_work()
177 void amdgpu_userq_start_hang_detect_work(struct amdgpu_usermode_queue *queue) in amdgpu_userq_start_hang_detect_work() argument
182 if (!queue || !queue->userq_mgr || !queue->userq_mgr->adev) in amdgpu_userq_start_hang_detect_work()
185 adev = queue->userq_mgr->adev; in amdgpu_userq_start_hang_detect_work()
187 switch (queue->queue_type) { in amdgpu_userq_start_hang_detect_work()
203 WRITE_ONCE(queue->hang_detect_fence, queue->last_fence); in amdgpu_userq_start_hang_detect_work()
204 schedule_delayed_work(&queue->hang_detect_work, in amdgpu_userq_start_hang_detect_work()
208 static void amdgpu_userq_init_hang_detect_work(struct amdgpu_usermode_queue *queue) in amdgpu_userq_init_hang_detect_work() argument
210 INIT_DELAYED_WORK(&queue->hang_detect_work, amdgpu_userq_hang_detect_work); in amdgpu_userq_init_hang_detect_work()
211 queue->hang_detect_fence = NULL; in amdgpu_userq_init_hang_detect_work()
214 static int amdgpu_userq_buffer_va_list_add(struct amdgpu_usermode_queue *queue, in amdgpu_userq_buffer_va_list_add() argument
227 list_add(&va_cursor->list, &queue->userq_va_list); in amdgpu_userq_buffer_va_list_add()
233 struct amdgpu_usermode_queue *queue, in amdgpu_userq_input_va_validate() argument
237 struct amdgpu_vm *vm = queue->vm; in amdgpu_userq_input_va_validate()
257 amdgpu_userq_buffer_va_list_add(queue, va_map, user_addr); in amdgpu_userq_input_va_validate()
286 static bool amdgpu_userq_buffer_vas_mapped(struct amdgpu_usermode_queue *queue) in amdgpu_userq_buffer_vas_mapped() argument
291 list_for_each_entry_safe(va_cursor, tmp, &queue->userq_va_list, list) { in amdgpu_userq_buffer_vas_mapped()
292 r += amdgpu_userq_buffer_va_mapped(queue->vm, va_cursor->gpu_addr); in amdgpu_userq_buffer_vas_mapped()
293 dev_dbg(queue->userq_mgr->adev->dev, in amdgpu_userq_buffer_vas_mapped()
295 queue, va_cursor->gpu_addr, r); in amdgpu_userq_buffer_vas_mapped()
313 struct amdgpu_usermode_queue *queue) in amdgpu_userq_buffer_vas_list_cleanup() argument
319 r = amdgpu_bo_reserve(queue->vm->root.bo, false); in amdgpu_userq_buffer_vas_list_cleanup()
323 list_for_each_entry_safe(va_cursor, tmp, &queue->userq_va_list, list) { in amdgpu_userq_buffer_vas_list_cleanup()
324 mapping = amdgpu_vm_bo_lookup_mapping(queue->vm, va_cursor->gpu_addr); in amdgpu_userq_buffer_vas_list_cleanup()
330 queue, va_cursor->gpu_addr); in amdgpu_userq_buffer_vas_list_cleanup()
334 amdgpu_bo_unreserve(queue->vm->root.bo); in amdgpu_userq_buffer_vas_list_cleanup()
338 static int amdgpu_userq_preempt_helper(struct amdgpu_usermode_queue *queue) in amdgpu_userq_preempt_helper() argument
340 struct amdgpu_userq_mgr *uq_mgr = queue->userq_mgr; in amdgpu_userq_preempt_helper()
343 adev->userq_funcs[queue->queue_type]; in amdgpu_userq_preempt_helper()
347 if (queue->state == AMDGPU_USERQ_STATE_MAPPED) { in amdgpu_userq_preempt_helper()
348 r = userq_funcs->preempt(queue); in amdgpu_userq_preempt_helper()
350 queue->state = AMDGPU_USERQ_STATE_HUNG; in amdgpu_userq_preempt_helper()
353 queue->state = AMDGPU_USERQ_STATE_PREEMPTED; in amdgpu_userq_preempt_helper()
363 static int amdgpu_userq_restore_helper(struct amdgpu_usermode_queue *queue) in amdgpu_userq_restore_helper() argument
365 struct amdgpu_userq_mgr *uq_mgr = queue->userq_mgr; in amdgpu_userq_restore_helper()
368 adev->userq_funcs[queue->queue_type]; in amdgpu_userq_restore_helper()
371 if (queue->state == AMDGPU_USERQ_STATE_PREEMPTED) { in amdgpu_userq_restore_helper()
372 r = userq_funcs->restore(queue); in amdgpu_userq_restore_helper()
374 queue->state = AMDGPU_USERQ_STATE_HUNG; in amdgpu_userq_restore_helper()
376 queue->state = AMDGPU_USERQ_STATE_MAPPED; in amdgpu_userq_restore_helper()
383 static int amdgpu_userq_unmap_helper(struct amdgpu_usermode_queue *queue) in amdgpu_userq_unmap_helper() argument
385 struct amdgpu_userq_mgr *uq_mgr = queue->userq_mgr; in amdgpu_userq_unmap_helper()
388 adev->userq_funcs[queue->queue_type]; in amdgpu_userq_unmap_helper()
392 if ((queue->state == AMDGPU_USERQ_STATE_MAPPED) || in amdgpu_userq_unmap_helper()
393 (queue->state == AMDGPU_USERQ_STATE_PREEMPTED)) { in amdgpu_userq_unmap_helper()
394 r = userq_funcs->unmap(queue); in amdgpu_userq_unmap_helper()
396 queue->state = AMDGPU_USERQ_STATE_HUNG; in amdgpu_userq_unmap_helper()
399 queue->state = AMDGPU_USERQ_STATE_UNMAPPED; in amdgpu_userq_unmap_helper()
409 static int amdgpu_userq_map_helper(struct amdgpu_usermode_queue *queue) in amdgpu_userq_map_helper() argument
411 struct amdgpu_userq_mgr *uq_mgr = queue->userq_mgr; in amdgpu_userq_map_helper()
414 adev->userq_funcs[queue->queue_type]; in amdgpu_userq_map_helper()
417 if (queue->state == AMDGPU_USERQ_STATE_UNMAPPED) { in amdgpu_userq_map_helper()
418 r = userq_funcs->map(queue); in amdgpu_userq_map_helper()
420 queue->state = AMDGPU_USERQ_STATE_HUNG; in amdgpu_userq_map_helper()
423 queue->state = AMDGPU_USERQ_STATE_MAPPED; in amdgpu_userq_map_helper()
430 static int amdgpu_userq_wait_for_last_fence(struct amdgpu_usermode_queue *queue) in amdgpu_userq_wait_for_last_fence() argument
432 struct amdgpu_userq_mgr *uq_mgr = queue->userq_mgr; in amdgpu_userq_wait_for_last_fence()
433 struct dma_fence *f = queue->last_fence; in amdgpu_userq_wait_for_last_fence()
441 queue->state = AMDGPU_USERQ_STATE_HUNG; in amdgpu_userq_wait_for_last_fence()
449 static void amdgpu_userq_cleanup(struct amdgpu_usermode_queue *queue) in amdgpu_userq_cleanup() argument
451 struct amdgpu_userq_mgr *uq_mgr = queue->userq_mgr; in amdgpu_userq_cleanup()
453 const struct amdgpu_userq_funcs *uq_funcs = adev->userq_funcs[queue->queue_type]; in amdgpu_userq_cleanup()
459 amdgpu_userq_buffer_vas_list_cleanup(adev, queue); in amdgpu_userq_cleanup()
460 uq_funcs->mqd_destroy(queue); in amdgpu_userq_cleanup()
461 amdgpu_userq_fence_driver_free(queue); in amdgpu_userq_cleanup()
463 xa_erase_irq(&adev->userq_doorbell_xa, queue->doorbell_index); in amdgpu_userq_cleanup()
464 queue->userq_mgr = NULL; in amdgpu_userq_cleanup()
465 list_del(&queue->userq_va_list); in amdgpu_userq_cleanup()
466 kfree(queue); in amdgpu_userq_cleanup()
620 amdgpu_userq_destroy(struct amdgpu_userq_mgr *uq_mgr, struct amdgpu_usermode_queue *queue) in amdgpu_userq_destroy() argument
627 amdgpu_userq_wait_for_last_fence(queue); in amdgpu_userq_destroy()
629 if (queue->hang_detect_fence) { in amdgpu_userq_destroy()
630 cancel_delayed_work_sync(&queue->hang_detect_work); in amdgpu_userq_destroy()
631 queue->hang_detect_fence = NULL; in amdgpu_userq_destroy()
633 r = amdgpu_bo_reserve(queue->db_obj.obj, true); in amdgpu_userq_destroy()
635 amdgpu_bo_unpin(queue->db_obj.obj); in amdgpu_userq_destroy()
636 amdgpu_bo_unreserve(queue->db_obj.obj); in amdgpu_userq_destroy()
638 amdgpu_bo_unref(&queue->db_obj.obj); in amdgpu_userq_destroy()
640 r = amdgpu_bo_reserve(queue->wptr_obj.obj, true); in amdgpu_userq_destroy()
642 amdgpu_bo_unpin(queue->wptr_obj.obj); in amdgpu_userq_destroy()
643 amdgpu_bo_unreserve(queue->wptr_obj.obj); in amdgpu_userq_destroy()
645 amdgpu_bo_unref(&queue->wptr_obj.obj); in amdgpu_userq_destroy()
647 atomic_dec(&uq_mgr->userq_count[queue->queue_type]); in amdgpu_userq_destroy()
649 debugfs_remove_recursive(queue->debugfs_queue); in amdgpu_userq_destroy()
652 r = amdgpu_userq_unmap_helper(queue); in amdgpu_userq_destroy()
656 queue->state = AMDGPU_USERQ_STATE_HUNG; in amdgpu_userq_destroy()
658 amdgpu_userq_cleanup(queue); in amdgpu_userq_destroy()
669 struct amdgpu_usermode_queue *queue = in amdgpu_userq_kref_destroy() local
671 struct amdgpu_userq_mgr *uq_mgr = queue->userq_mgr; in amdgpu_userq_kref_destroy()
673 r = amdgpu_userq_destroy(uq_mgr, queue); in amdgpu_userq_kref_destroy()
680 struct amdgpu_usermode_queue *queue; in amdgpu_userq_get() local
683 queue = xa_load(&uq_mgr->userq_xa, qid); in amdgpu_userq_get()
684 if (queue) in amdgpu_userq_get()
685 kref_get(&queue->refcount); in amdgpu_userq_get()
688 return queue; in amdgpu_userq_get()
691 void amdgpu_userq_put(struct amdgpu_usermode_queue *queue) in amdgpu_userq_put() argument
693 if (queue) in amdgpu_userq_put()
694 kref_put(&queue->refcount, amdgpu_userq_kref_destroy); in amdgpu_userq_put()
715 struct amdgpu_usermode_queue *queue = m->private; in amdgpu_mqd_info_read() local
719 if (!queue || !queue->mqd.obj) in amdgpu_mqd_info_read()
722 bo = amdgpu_bo_ref(queue->mqd.obj); in amdgpu_mqd_info_read()
729 seq_printf(m, "queue_type: %d\n", queue->queue_type); in amdgpu_mqd_info_read()
730 seq_printf(m, "mqd_gpu_address: 0x%llx\n", amdgpu_bo_gpu_offset(queue->mqd.obj)); in amdgpu_mqd_info_read()
759 struct amdgpu_usermode_queue *queue; in amdgpu_userq_create() local
798 queue = kzalloc_obj(struct amdgpu_usermode_queue); in amdgpu_userq_create()
799 if (!queue) { in amdgpu_userq_create()
805 INIT_LIST_HEAD(&queue->userq_va_list); in amdgpu_userq_create()
806 queue->doorbell_handle = args->in.doorbell_handle; in amdgpu_userq_create()
807 queue->queue_type = args->in.ip_type; in amdgpu_userq_create()
808 queue->vm = &fpriv->vm; in amdgpu_userq_create()
809 queue->priority = priority; in amdgpu_userq_create()
811 db_info.queue_type = queue->queue_type; in amdgpu_userq_create()
812 db_info.doorbell_handle = queue->doorbell_handle; in amdgpu_userq_create()
813 db_info.db_obj = &queue->db_obj; in amdgpu_userq_create()
816 queue->userq_mgr = uq_mgr; in amdgpu_userq_create()
818 if (amdgpu_userq_input_va_validate(adev, queue, args->in.queue_va, args->in.queue_size) || in amdgpu_userq_create()
819 amdgpu_userq_input_va_validate(adev, queue, args->in.rptr_va, AMDGPU_GPU_PAGE_SIZE) || in amdgpu_userq_create()
820 amdgpu_userq_input_va_validate(adev, queue, args->in.wptr_va, AMDGPU_GPU_PAGE_SIZE)) { in amdgpu_userq_create()
822 kfree(queue); in amdgpu_userq_create()
830 kfree(queue); in amdgpu_userq_create()
835 queue->doorbell_index = index; in amdgpu_userq_create()
836 xa_init_flags(&queue->fence_drv_xa, XA_FLAGS_ALLOC); in amdgpu_userq_create()
837 r = amdgpu_userq_fence_driver_alloc(adev, queue); in amdgpu_userq_create()
843 r = uq_funcs->mqd_create(queue, &args->in); in amdgpu_userq_create()
846 amdgpu_userq_fence_driver_free(queue); in amdgpu_userq_create()
847 kfree(queue); in amdgpu_userq_create()
852 kref_init(&queue->refcount); in amdgpu_userq_create()
856 r = xa_err(xa_store_irq(&adev->userq_doorbell_xa, index, queue, GFP_KERNEL)); in amdgpu_userq_create()
858 kfree(queue); in amdgpu_userq_create()
863 r = xa_alloc(&uq_mgr->userq_xa, &qid, queue, in amdgpu_userq_create()
867 amdgpu_userq_fence_driver_free(queue); in amdgpu_userq_create()
868 uq_funcs->mqd_destroy(queue); in amdgpu_userq_create()
869 kfree(queue); in amdgpu_userq_create()
878 ((queue->queue_type == AMDGPU_HW_IP_GFX) || in amdgpu_userq_create()
879 (queue->queue_type == AMDGPU_HW_IP_COMPUTE))) in amdgpu_userq_create()
884 r = amdgpu_userq_map_helper(queue); in amdgpu_userq_create()
888 amdgpu_userq_fence_driver_free(queue); in amdgpu_userq_create()
889 uq_funcs->mqd_destroy(queue); in amdgpu_userq_create()
890 kfree(queue); in amdgpu_userq_create()
903 queue->debugfs_queue = debugfs_create_dir(queue_name, filp->debugfs_client); in amdgpu_userq_create()
904 debugfs_create_file("mqd_info", 0444, queue->debugfs_queue, queue, &amdgpu_mqd_info_fops); in amdgpu_userq_create()
906 amdgpu_userq_init_hang_detect_work(queue); in amdgpu_userq_create()
910 atomic_inc(&uq_mgr->userq_count[queue->queue_type]); in amdgpu_userq_create()
1006 struct amdgpu_usermode_queue *queue; in amdgpu_userq_ioctl() local
1024 queue = __xa_erase(&fpriv->userq_mgr.userq_xa, args->in.queue_id); in amdgpu_userq_ioctl()
1026 if (!queue) in amdgpu_userq_ioctl()
1029 amdgpu_userq_put(queue); in amdgpu_userq_ioctl()
1044 struct amdgpu_usermode_queue *queue; in amdgpu_userq_restore_all() local
1049 xa_for_each(&uq_mgr->userq_xa, queue_id, queue) { in amdgpu_userq_restore_all()
1050 queue = amdgpu_userq_get(uq_mgr, queue_id); in amdgpu_userq_restore_all()
1051 if (!queue) in amdgpu_userq_restore_all()
1054 if (!amdgpu_userq_buffer_vas_mapped(queue)) { in amdgpu_userq_restore_all()
1057 queue->state = AMDGPU_USERQ_STATE_INVALID_VA; in amdgpu_userq_restore_all()
1058 amdgpu_userq_put(queue); in amdgpu_userq_restore_all()
1062 r = amdgpu_userq_restore_helper(queue); in amdgpu_userq_restore_all()
1066 amdgpu_userq_put(queue); in amdgpu_userq_restore_all()
1293 struct amdgpu_usermode_queue *queue; in amdgpu_userq_evict_all() local
1299 xa_for_each(&uq_mgr->userq_xa, queue_id, queue) { in amdgpu_userq_evict_all()
1300 queue = amdgpu_userq_get(uq_mgr, queue_id); in amdgpu_userq_evict_all()
1301 if (!queue) in amdgpu_userq_evict_all()
1303 r = amdgpu_userq_preempt_helper(queue); in amdgpu_userq_evict_all()
1306 amdgpu_userq_put(queue); in amdgpu_userq_evict_all()
1334 struct amdgpu_usermode_queue *queue; in amdgpu_userq_wait_for_signal() local
1338 xa_for_each(&uq_mgr->userq_xa, queue_id, queue) { in amdgpu_userq_wait_for_signal()
1339 queue = amdgpu_userq_get(uq_mgr, queue_id); in amdgpu_userq_wait_for_signal()
1340 if (!queue) in amdgpu_userq_wait_for_signal()
1343 struct dma_fence *f = queue->last_fence; in amdgpu_userq_wait_for_signal()
1346 amdgpu_userq_put(queue); in amdgpu_userq_wait_for_signal()
1353 amdgpu_userq_put(queue); in amdgpu_userq_wait_for_signal()
1356 amdgpu_userq_put(queue); in amdgpu_userq_wait_for_signal()
1406 struct amdgpu_usermode_queue *queue; in amdgpu_userq_mgr_fini() local
1411 queue = xa_find(&userq_mgr->userq_xa, &queue_id, ULONG_MAX, in amdgpu_userq_mgr_fini()
1413 if (queue) in amdgpu_userq_mgr_fini()
1417 if (!queue) in amdgpu_userq_mgr_fini()
1420 amdgpu_userq_put(queue); in amdgpu_userq_mgr_fini()
1430 struct amdgpu_usermode_queue *queue; in amdgpu_userq_suspend() local
1438 xa_for_each(&adev->userq_doorbell_xa, queue_id, queue) { in amdgpu_userq_suspend()
1439 uqm = queue->userq_mgr; in amdgpu_userq_suspend()
1444 r = amdgpu_userq_preempt_helper(queue); in amdgpu_userq_suspend()
1446 r = amdgpu_userq_unmap_helper(queue); in amdgpu_userq_suspend()
1456 struct amdgpu_usermode_queue *queue; in amdgpu_userq_resume() local
1464 xa_for_each(&adev->userq_doorbell_xa, queue_id, queue) { in amdgpu_userq_resume()
1465 uqm = queue->userq_mgr; in amdgpu_userq_resume()
1468 r = amdgpu_userq_restore_helper(queue); in amdgpu_userq_resume()
1470 r = amdgpu_userq_map_helper(queue); in amdgpu_userq_resume()
1482 struct amdgpu_usermode_queue *queue; in amdgpu_userq_stop_sched_for_enforce_isolation() local
1494 xa_for_each(&adev->userq_doorbell_xa, queue_id, queue) { in amdgpu_userq_stop_sched_for_enforce_isolation()
1495 uqm = queue->userq_mgr; in amdgpu_userq_stop_sched_for_enforce_isolation()
1498 if (((queue->queue_type == AMDGPU_HW_IP_GFX) || in amdgpu_userq_stop_sched_for_enforce_isolation()
1499 (queue->queue_type == AMDGPU_HW_IP_COMPUTE)) && in amdgpu_userq_stop_sched_for_enforce_isolation()
1500 (queue->xcp_id == idx)) { in amdgpu_userq_stop_sched_for_enforce_isolation()
1502 r = amdgpu_userq_preempt_helper(queue); in amdgpu_userq_stop_sched_for_enforce_isolation()
1516 struct amdgpu_usermode_queue *queue; in amdgpu_userq_start_sched_for_enforce_isolation() local
1528 xa_for_each(&adev->userq_doorbell_xa, queue_id, queue) { in amdgpu_userq_start_sched_for_enforce_isolation()
1529 uqm = queue->userq_mgr; in amdgpu_userq_start_sched_for_enforce_isolation()
1531 if (((queue->queue_type == AMDGPU_HW_IP_GFX) || in amdgpu_userq_start_sched_for_enforce_isolation()
1532 (queue->queue_type == AMDGPU_HW_IP_COMPUTE)) && in amdgpu_userq_start_sched_for_enforce_isolation()
1533 (queue->xcp_id == idx)) { in amdgpu_userq_start_sched_for_enforce_isolation()
1534 r = amdgpu_userq_restore_helper(queue); in amdgpu_userq_start_sched_for_enforce_isolation()
1578 struct amdgpu_usermode_queue *queue; in amdgpu_userq_pre_reset() local
1582 xa_for_each(&adev->userq_doorbell_xa, queue_id, queue) { in amdgpu_userq_pre_reset()
1583 uqm = queue->userq_mgr; in amdgpu_userq_pre_reset()
1585 if (queue->state == AMDGPU_USERQ_STATE_MAPPED) { in amdgpu_userq_pre_reset()
1586 amdgpu_userq_wait_for_last_fence(queue); in amdgpu_userq_pre_reset()
1587 userq_funcs = adev->userq_funcs[queue->queue_type]; in amdgpu_userq_pre_reset()
1588 userq_funcs->unmap(queue); in amdgpu_userq_pre_reset()
1593 queue->state = AMDGPU_USERQ_STATE_HUNG; in amdgpu_userq_pre_reset()
1594 amdgpu_userq_fence_driver_force_completion(queue); in amdgpu_userq_pre_reset()
1605 struct amdgpu_usermode_queue *queue; in amdgpu_userq_post_reset() local
1610 xa_for_each(&adev->userq_doorbell_xa, queue_id, queue) { in amdgpu_userq_post_reset()
1611 if (queue->state == AMDGPU_USERQ_STATE_HUNG && !vram_lost) { in amdgpu_userq_post_reset()
1612 userq_funcs = adev->userq_funcs[queue->queue_type]; in amdgpu_userq_post_reset()
1614 r = userq_funcs->map(queue); in amdgpu_userq_post_reset()
1619 queue->state = AMDGPU_USERQ_STATE_MAPPED; in amdgpu_userq_post_reset()