Lines Matching refs:jpeg

42 	INIT_DELAYED_WORK(&adev->jpeg.idle_work, amdgpu_jpeg_idle_work_handler);  in amdgpu_jpeg_sw_init()
43 mutex_init(&adev->jpeg.jpeg_pg_lock); in amdgpu_jpeg_sw_init()
44 atomic_set(&adev->jpeg.total_submission_cnt, 0); in amdgpu_jpeg_sw_init()
48 adev->jpeg.indirect_sram = true; in amdgpu_jpeg_sw_init()
50 for (i = 0; i < adev->jpeg.num_jpeg_inst; i++) { in amdgpu_jpeg_sw_init()
51 if (adev->jpeg.harvest_config & (1U << i)) in amdgpu_jpeg_sw_init()
54 if (adev->jpeg.indirect_sram) { in amdgpu_jpeg_sw_init()
58 &adev->jpeg.inst[i].dpg_sram_bo, in amdgpu_jpeg_sw_init()
59 &adev->jpeg.inst[i].dpg_sram_gpu_addr, in amdgpu_jpeg_sw_init()
60 &adev->jpeg.inst[i].dpg_sram_cpu_addr); in amdgpu_jpeg_sw_init()
76 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in amdgpu_jpeg_sw_fini()
77 if (adev->jpeg.harvest_config & (1U << i)) in amdgpu_jpeg_sw_fini()
81 &adev->jpeg.inst[i].dpg_sram_bo, in amdgpu_jpeg_sw_fini()
82 &adev->jpeg.inst[i].dpg_sram_gpu_addr, in amdgpu_jpeg_sw_fini()
83 (void **)&adev->jpeg.inst[i].dpg_sram_cpu_addr); in amdgpu_jpeg_sw_fini()
85 for (j = 0; j < adev->jpeg.num_jpeg_rings; ++j) in amdgpu_jpeg_sw_fini()
86 amdgpu_ring_fini(&adev->jpeg.inst[i].ring_dec[j]); in amdgpu_jpeg_sw_fini()
89 if (adev->jpeg.reg_list) in amdgpu_jpeg_sw_fini()
92 mutex_destroy(&adev->jpeg.jpeg_pg_lock); in amdgpu_jpeg_sw_fini()
99 cancel_delayed_work_sync(&adev->jpeg.idle_work); in amdgpu_jpeg_suspend()
112 container_of(work, struct amdgpu_device, jpeg.idle_work.work); in amdgpu_jpeg_idle_work_handler()
116 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in amdgpu_jpeg_idle_work_handler()
117 if (adev->jpeg.harvest_config & (1U << i)) in amdgpu_jpeg_idle_work_handler()
120 for (j = 0; j < adev->jpeg.num_jpeg_rings; ++j) in amdgpu_jpeg_idle_work_handler()
121 fences += amdgpu_fence_count_emitted(&adev->jpeg.inst[i].ring_dec[j]); in amdgpu_jpeg_idle_work_handler()
124 if (!fences && !atomic_read(&adev->jpeg.total_submission_cnt)) { in amdgpu_jpeg_idle_work_handler()
125 mutex_lock(&adev->jpeg.jpeg_pg_lock); in amdgpu_jpeg_idle_work_handler()
128 mutex_unlock(&adev->jpeg.jpeg_pg_lock); in amdgpu_jpeg_idle_work_handler()
130 schedule_delayed_work(&adev->jpeg.idle_work, JPEG_IDLE_TIMEOUT); in amdgpu_jpeg_idle_work_handler()
137 atomic_inc(&adev->jpeg.total_submission_cnt); in amdgpu_jpeg_ring_begin_use()
138 cancel_delayed_work_sync(&adev->jpeg.idle_work); in amdgpu_jpeg_ring_begin_use()
140 mutex_lock(&adev->jpeg.jpeg_pg_lock); in amdgpu_jpeg_ring_begin_use()
143 mutex_unlock(&adev->jpeg.jpeg_pg_lock); in amdgpu_jpeg_ring_begin_use()
148 atomic_dec(&ring->adev->jpeg.total_submission_cnt); in amdgpu_jpeg_ring_end_use()
149 schedule_delayed_work(&ring->adev->jpeg.idle_work, JPEG_IDLE_TIMEOUT); in amdgpu_jpeg_ring_end_use()
167 WREG32(adev->jpeg.inst[ring->me].external.jpeg_pitch[ring->pipe], 0xCAFEDEAD); in amdgpu_jpeg_dec_ring_test_ring()
169 RREG32(adev->jpeg.inst[ring->me].external.jpeg_pitch[ring->pipe]); in amdgpu_jpeg_dec_ring_test_ring()
171 amdgpu_ring_write(ring, PACKET0(adev->jpeg.internal.jpeg_pitch[ring->pipe], 0)); in amdgpu_jpeg_dec_ring_test_ring()
176 tmp = RREG32(adev->jpeg.inst[ring->me].external.jpeg_pitch[ring->pipe]); in amdgpu_jpeg_dec_ring_test_ring()
206 ib->ptr[0] = PACKETJ(adev->jpeg.internal.jpeg_pitch[ring->pipe], 0, 0, PACKETJ_TYPE0); in amdgpu_jpeg_dec_set_reg()
253 tmp = RREG32(adev->jpeg.inst[ring->me].external.jpeg_pitch[ring->pipe]); in amdgpu_jpeg_dec_ring_test_ib()
274 struct ras_common_if *ras_if = adev->jpeg.ras_if; in amdgpu_jpeg_process_poison_irq()
297 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in amdgpu_jpeg_ras_late_init()
298 if (adev->jpeg.harvest_config & (1 << i) || in amdgpu_jpeg_ras_late_init()
299 !adev->jpeg.inst[i].ras_poison_irq.funcs) in amdgpu_jpeg_ras_late_init()
302 r = amdgpu_irq_get(adev, &adev->jpeg.inst[i].ras_poison_irq, 0); in amdgpu_jpeg_ras_late_init()
319 if (!adev->jpeg.ras) in amdgpu_jpeg_ras_sw_init()
322 ras = adev->jpeg.ras; in amdgpu_jpeg_ras_sw_init()
332 adev->jpeg.ras_if = &ras->ras_block.ras_comm; in amdgpu_jpeg_ras_sw_init()
345 .mc_addr = adev->jpeg.inst[inst_idx].dpg_sram_gpu_addr, in amdgpu_jpeg_psp_update_sram()
346 .ucode_size = ((uintptr_t)adev->jpeg.inst[inst_idx].dpg_sram_curr_addr - in amdgpu_jpeg_psp_update_sram()
347 (uintptr_t)adev->jpeg.inst[inst_idx].dpg_sram_cpu_addr), in amdgpu_jpeg_psp_update_sram()
367 mask = (1ULL << (adev->jpeg.num_jpeg_inst * adev->jpeg.num_jpeg_rings)) - 1; in amdgpu_debugfs_jpeg_sched_mask_set()
371 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in amdgpu_debugfs_jpeg_sched_mask_set()
372 for (j = 0; j < adev->jpeg.num_jpeg_rings; ++j) { in amdgpu_debugfs_jpeg_sched_mask_set()
373 ring = &adev->jpeg.inst[i].ring_dec[j]; in amdgpu_debugfs_jpeg_sched_mask_set()
374 if (val & (BIT_ULL((i * adev->jpeg.num_jpeg_rings) + j))) in amdgpu_debugfs_jpeg_sched_mask_set()
394 for (i = 0; i < adev->jpeg.num_jpeg_inst; ++i) { in amdgpu_debugfs_jpeg_sched_mask_get()
395 for (j = 0; j < adev->jpeg.num_jpeg_rings; ++j) { in amdgpu_debugfs_jpeg_sched_mask_get()
396 ring = &adev->jpeg.inst[i].ring_dec[j]; in amdgpu_debugfs_jpeg_sched_mask_get()
398 mask |= 1ULL << ((i * adev->jpeg.num_jpeg_rings) + j); in amdgpu_debugfs_jpeg_sched_mask_get()
418 if (!(adev->jpeg.num_jpeg_inst > 1) && !(adev->jpeg.num_jpeg_rings > 1)) in amdgpu_debugfs_jpeg_sched_mask_init()
436 return amdgpu_show_reset_mask(buf, adev->jpeg.supported_reset); in amdgpu_get_jpeg_reset_mask()
446 if (adev->jpeg.num_jpeg_inst) { in amdgpu_jpeg_sysfs_reset_mask_init()
458 if (adev->jpeg.num_jpeg_inst) in amdgpu_jpeg_sysfs_reset_mask_fini()
466 adev->jpeg.ip_dump = kcalloc(adev->jpeg.num_jpeg_inst * count, in amdgpu_jpeg_reg_dump_init()
468 if (!adev->jpeg.ip_dump) { in amdgpu_jpeg_reg_dump_init()
473 adev->jpeg.reg_list = reg; in amdgpu_jpeg_reg_dump_init()
474 adev->jpeg.reg_count = count; in amdgpu_jpeg_reg_dump_init()
481 kfree(adev->jpeg.ip_dump); in amdgpu_jpeg_reg_dump_fini()
482 adev->jpeg.reg_list = NULL; in amdgpu_jpeg_reg_dump_fini()
483 adev->jpeg.reg_count = 0; in amdgpu_jpeg_reg_dump_fini()
492 if (!adev->jpeg.ip_dump) in amdgpu_jpeg_dump_ip_state()
495 for (i = 0; i < adev->jpeg.num_jpeg_inst; i++) { in amdgpu_jpeg_dump_ip_state()
496 if (adev->jpeg.harvest_config & (1 << i)) in amdgpu_jpeg_dump_ip_state()
500 inst_off = i * adev->jpeg.reg_count; in amdgpu_jpeg_dump_ip_state()
502 adev->jpeg.ip_dump[inst_off] = in amdgpu_jpeg_dump_ip_state()
503 RREG32(SOC15_REG_ENTRY_OFFSET_INST(adev->jpeg.reg_list[0], in amdgpu_jpeg_dump_ip_state()
505 is_powered = ((adev->jpeg.ip_dump[inst_off] & 0x1) != 1); in amdgpu_jpeg_dump_ip_state()
508 for (j = 1; j < adev->jpeg.reg_count; j++) in amdgpu_jpeg_dump_ip_state()
509 adev->jpeg.ip_dump[inst_off + j] = in amdgpu_jpeg_dump_ip_state()
510 RREG32(SOC15_REG_ENTRY_OFFSET_INST(adev->jpeg.reg_list[j], in amdgpu_jpeg_dump_ip_state()
521 if (!adev->jpeg.ip_dump) in amdgpu_jpeg_print_ip_state()
524 drm_printf(p, "num_instances:%d\n", adev->jpeg.num_jpeg_inst); in amdgpu_jpeg_print_ip_state()
525 for (i = 0; i < adev->jpeg.num_jpeg_inst; i++) { in amdgpu_jpeg_print_ip_state()
526 if (adev->jpeg.harvest_config & (1 << i)) { in amdgpu_jpeg_print_ip_state()
531 inst_off = i * adev->jpeg.reg_count; in amdgpu_jpeg_print_ip_state()
532 is_powered = ((adev->jpeg.ip_dump[inst_off] & 0x1) != 1); in amdgpu_jpeg_print_ip_state()
536 for (j = 0; j < adev->jpeg.reg_count; j++) in amdgpu_jpeg_print_ip_state()
537 drm_printf(p, "%-50s \t 0x%08x\n", adev->jpeg.reg_list[j].reg_name, in amdgpu_jpeg_print_ip_state()
538 adev->jpeg.ip_dump[inst_off + j]); in amdgpu_jpeg_print_ip_state()