Home
last modified time | relevance | path

Searched refs:gpu_read (Results 1 – 24 of 24) sorted by relevance

/linux/drivers/gpu/drm/panthor/
H A Dpanthor_hw.c116 ptdev->gpu_info.csf_id = gpu_read(ptdev, GPU_CSF_ID); in panthor_gpu_info_init()
117 ptdev->gpu_info.gpu_rev = gpu_read(ptdev, GPU_REVID); in panthor_gpu_info_init()
118 ptdev->gpu_info.core_features = gpu_read(ptdev, GPU_CORE_FEATURES); in panthor_gpu_info_init()
119 ptdev->gpu_info.l2_features = gpu_read(ptdev, GPU_L2_FEATURES); in panthor_gpu_info_init()
120 ptdev->gpu_info.tiler_features = gpu_read(ptdev, GPU_TILER_FEATURES); in panthor_gpu_info_init()
121 ptdev->gpu_info.mem_features = gpu_read(ptdev, GPU_MEM_FEATURES); in panthor_gpu_info_init()
122 ptdev->gpu_info.mmu_features = gpu_read(ptdev, GPU_MMU_FEATURES); in panthor_gpu_info_init()
123 ptdev->gpu_info.thread_features = gpu_read(ptdev, GPU_THREAD_FEATURES); in panthor_gpu_info_init()
124 ptdev->gpu_info.max_threads = gpu_read(ptdev, GPU_THREAD_MAX_THREADS); in panthor_gpu_info_init()
125 ptdev->gpu_info.thread_max_workgroup_size = gpu_read(ptdev, GPU_THREAD_MAX_WORKGROUP_SIZE); in panthor_gpu_info_init()
[all …]
H A Dpanthor_device.h415 if (!gpu_read(ptdev, __reg_prefix ## _INT_STAT)) \
429 u32 status = gpu_read(ptdev, __reg_prefix ## _INT_RAWSTAT) & pirq->mask; \
482 static inline u32 gpu_read(struct panthor_device *ptdev, u32 reg) in gpu_read() function
500 return (gpu_read(ptdev, reg) | ((u64)gpu_read(ptdev, reg + 4) << 32)); in gpu_read64()
513 hi1 = gpu_read(ptdev, reg + 4); in gpu_read64_counter()
514 lo = gpu_read(ptdev, reg); in gpu_read64_counter()
515 hi2 = gpu_read(ptdev, reg + 4); in gpu_read64_counter()
521 read_poll_timeout(gpu_read, val, cond, delay_us, timeout_us, false, \
526 read_poll_timeout_atomic(gpu_read, val, cond, delay_us, timeout_us, \
H A Dpanthor_gpu.c74 l2_config = gpu_read(ptdev, GPU_L2_CONFIG); in panthor_gpu_l2_config_set()
84 u32 fault_status = gpu_read(ptdev, GPU_FAULT_STATUS); in panthor_gpu_irq_handler()
311 !(gpu_read(ptdev, GPU_INT_RAWSTAT) & GPU_IRQ_CLEAN_CACHES_COMPLETED)) in panthor_gpu_flush_caches()
351 !(gpu_read(ptdev, GPU_INT_RAWSTAT) & GPU_IRQ_RESET_COMPLETED)) in panthor_gpu_soft_reset()
H A Dpanthor_fw.c1089 !(gpu_read(ptdev, JOB_INT_STAT) & JOB_INT_GLOBAL_IF)) in panthor_fw_start()
1100 u32 status = gpu_read(ptdev, MCU_STATUS); in panthor_fw_start()
1125 halted = gpu_read(ptdev, MCU_STATUS) == MCU_STATUS_HALT; in panthor_fw_mcu_halted()
H A Dpanthor_pwr.c84 return gpu_read(ptdev, PWR_INT_RAWSTAT) & PWR_IRQ_RESET_COMPLETED; in reset_irq_raised()
H A Dpanthor_device.c38 if ((gpu_read(ptdev, GPU_COHERENCY_FEATURES) & in panthor_gpu_coherency_init()
H A Dpanthor_mmu.c1649 fault_status = gpu_read(ptdev, AS_FAULTSTATUS(as)); in panthor_mmu_irq_handler()
/linux/drivers/gpu/drm/msm/adreno/
H A Da8xx_gpu.c79 val = gpu_read(gpu, offset); in a8xx_read_pipe_slice()
130 if (gpu_read(gpu, REG_A8XX_RBBM_STATUS) & in _a8xx_check_idle()
134 return !(gpu_read(gpu, REG_A8XX_RBBM_INT_0_STATUS) & in _a8xx_check_idle()
148 gpu_read(gpu, REG_A8XX_RBBM_STATUS), in a8xx_idle()
149 gpu_read(gpu, REG_A8XX_RBBM_INT_0_STATUS), in a8xx_idle()
150 gpu_read(gpu, REG_A6XX_CP_RB_RPTR), in a8xx_idle()
151 gpu_read(gpu, REG_A6XX_CP_RB_WPTR)); in a8xx_idle()
510 gpu_read(gpu, REG_A6XX_GBIF_HALT); in hw_init()
513 gpu_read(gpu, REG_A8XX_RBBM_GBIF_HALT); in hw_init()
718 DRM_DEV_INFO(&gpu->pdev->dev, "status: %08x\n", gpu_read(gpu, REG_A8XX_RBBM_STATUS)); in a8xx_dump()
[all …]
H A Da5xx_gpu.c1023 gpu_read(gpu, REG_A5XX_CP_SCRATCH_REG(i))); in a5xx_recover()
1030 gpu_read(gpu, REG_A5XX_RBBM_SW_RESET_CMD); in a5xx_recover()
1070 if (gpu_read(gpu, REG_A5XX_RBBM_STATUS) & ~A5XX_RBBM_STATUS_HI_BUSY) in _a5xx_check_idle()
1077 return !(gpu_read(gpu, REG_A5XX_RBBM_INT_0_STATUS) & in _a5xx_check_idle()
1098 gpu_read(gpu, REG_A5XX_RBBM_STATUS), in a5xx_idle()
1099 gpu_read(gpu, REG_A5XX_RBBM_INT_0_STATUS), in a5xx_idle()
1100 gpu_read(gpu, REG_A5XX_CP_RB_RPTR), in a5xx_idle()
1101 gpu_read(gpu, REG_A5XX_CP_RB_WPTR)); in a5xx_idle()
1114 gpu_read(gpu, REG_A5XX_CP_SCRATCH_REG(4)), in a5xx_fault_handler()
1115 gpu_read(gpu, REG_A5XX_CP_SCRATCH_REG(5)), in a5xx_fault_handler()
[all …]
H A Da2xx_gpu.c274 gpu_read(gpu, REG_AXXX_CP_SCRATCH_REG0 + i)); in a2xx_recover()
282 gpu_read(gpu, REG_A2XX_RBBM_SOFT_RESET); in a2xx_recover()
306 if (spin_until(!(gpu_read(gpu, REG_A2XX_RBBM_STATUS) & in a2xx_idle()
321 mstatus = gpu_read(gpu, REG_A2XX_MASTER_INT_SIGNAL); in a2xx_irq()
324 status = gpu_read(gpu, REG_A2XX_MH_INTERRUPT_STATUS); in a2xx_irq()
328 gpu_read(gpu, REG_A2XX_MH_MMU_PAGE_FAULT)); in a2xx_irq()
334 status = gpu_read(gpu, REG_AXXX_CP_INT_STATUS); in a2xx_irq()
344 status = gpu_read(gpu, REG_A2XX_RBBM_INT_STATUS); in a2xx_irq()
451 gpu_read(gpu, REG_A2XX_RBBM_STATUS)); in a2xx_dump()
464 state->rbbm_status = gpu_read(gpu, REG_A2XX_RBBM_STATUS); in a2xx_gpu_state_get()
[all …]
H A Da6xx_gpu.c120 if (gpu_read(gpu, REG_A6XX_RBBM_STATUS) & in _a6xx_check_idle()
124 return !(gpu_read(gpu, REG_A6XX_RBBM_INT_0_STATUS) & in _a6xx_check_idle()
137 gpu_read(gpu, REG_A6XX_RBBM_STATUS), in a6xx_idle()
138 gpu_read(gpu, REG_A6XX_RBBM_INT_0_STATUS), in a6xx_idle()
139 gpu_read(gpu, REG_A6XX_CP_RB_RPTR), in a6xx_idle()
140 gpu_read(gpu, REG_A6XX_CP_RB_WPTR)); in a6xx_idle()
687 val = gpu_read(gpu, REG_A6XX_RBBM_CLOCK_CNTL); in a6xx_set_hwcg()
894 *dest++ = gpu_read(gpu, reglist->regs[i]); in a7xx_patch_pwrup_reglist()
903 *dest++ = gpu_read(gpu, reglist->regs[i]); in a7xx_patch_pwrup_reglist()
931 *dest++ = gpu_read(gpu, dyn_pwrup_reglist->regs[i].offset); in a7xx_patch_pwrup_reglist()
[all …]
H A Da4xx_gpu.c277 val = gpu_read(gpu, REG_A4XX_RBBM_CLOCK_DELAY_HLSQ); in a4xx_hw_init()
358 gpu_read(gpu, REG_AXXX_CP_SCRATCH_REG0 + i)); in a4xx_recover()
366 gpu_read(gpu, REG_A4XX_RBBM_SW_RESET_CMD); in a4xx_recover()
392 if (spin_until(!(gpu_read(gpu, REG_A4XX_RBBM_STATUS) & in a4xx_idle()
406 status = gpu_read(gpu, REG_A4XX_RBBM_INT_0_STATUS); in a4xx_irq()
410 uint32_t reg = gpu_read(gpu, REG_A4XX_CP_PROTECT_STATUS); in a4xx_irq()
560 state->rbbm_status = gpu_read(gpu, REG_A4XX_RBBM_STATUS); in a4xx_gpu_state_get()
568 gpu_read(gpu, REG_A4XX_RBBM_STATUS)); in a4xx_dump()
586 reg = gpu_read(gpu, REG_A4XX_RBBM_POWER_STATUS); in a4xx_pm_resume()
626 ring->memptrs->rptr = gpu_read(gpu, REG_A4XX_CP_RB_RPTR); in a4xx_get_rptr()
H A Da3xx_gpu.c374 gpu_read(gpu, REG_AXXX_CP_SCRATCH_REG0 + i)); in a3xx_recover()
382 gpu_read(gpu, REG_A3XX_RBBM_SW_RESET_CMD); in a3xx_recover()
408 if (spin_until(!(gpu_read(gpu, REG_A3XX_RBBM_STATUS) & in a3xx_idle()
423 status = gpu_read(gpu, REG_A3XX_RBBM_INT_0_STATUS); in a3xx_irq()
477 gpu_read(gpu, REG_A3XX_RBBM_STATUS)); in a3xx_dump()
490 state->rbbm_status = gpu_read(gpu, REG_A3XX_RBBM_STATUS); in a3xx_gpu_state_get()
507 ring->memptrs->rptr = gpu_read(gpu, REG_AXXX_CP_RB_RPTR); in a3xx_get_rptr()
H A Da6xx_gpu_state.c195 data[0] = gpu_read(gpu, REG_A6XX_DBGC_CFG_DBGBUS_TRACE_BUF2); in debugbus_read()
196 data[1] = gpu_read(gpu, REG_A6XX_DBGC_CFG_DBGBUS_TRACE_BUF1); in debugbus_read()
245 data[i] = gpu_read(gpu, REG_A6XX_VBIF_TEST_BUS_OUT); in vbif_debugbus_read()
275 clk = gpu_read(gpu, REG_A6XX_VBIF_CLKON); in a6xx_get_vbif_debugbus_block()
1149 obj->data[index++] = gpu_read(gpu, in a6xx_get_ahb_gpu_registers()
1174 obj->data[index++] = gpu_read(gpu, regs[i] + j); in a7xx_get_ahb_gpu_registers()
1446 return gpu_read(gpu, REG_A6XX_CP_ROQ_THRESHOLDS_2) >> 14; in a6xx_get_cp_roq_size()
1458 return 4 * (gpu_read(gpu, REG_A6XX_CP_SQE_UCODE_DBG_DATA) >> 20); in a7xx_get_cp_roq_size()
1484 obj->data[i] = gpu_read(gpu, indexed->data); in a6xx_get_indexed_regs()
1506 val = gpu_read(gpu, REG_A6XX_CP_CHICKEN_DBG); in a6xx_get_indexed_registers()
[all …]
H A Da5xx_gpu.h146 if ((gpu_read(gpu, reg) & mask) == value) in spin_usecs()
H A Da5xx_preempt.c194 status = gpu_read(gpu, REG_A5XX_CP_CONTEXT_SWITCH_CNTL); in a5xx_preempt_irq()
H A Da6xx_preempt.c174 status = gpu_read(gpu, REG_A6XX_CP_CONTEXT_SWITCH_CNTL); in a6xx_preempt_irq()
H A Dadreno_gpu.c808 state->registers[pos++] = gpu_read(gpu, addr); in adreno_gpu_state_get()
1067 uint32_t val = gpu_read(gpu, addr); in adreno_dump()
/linux/drivers/gpu/drm/etnaviv/
H A Detnaviv_sched.c54 dma_addr = gpu_read(gpu, VIVS_FE_DMA_ADDRESS); in etnaviv_sched_timedout_job()
62 primid = gpu_read(gpu, VIVS_MC_PROFILE_FE_READ); in etnaviv_sched_timedout_job()
H A Detnaviv_iommu_v2.c172 if (gpu_read(gpu, VIVS_MMUv2_CONTROL) & VIVS_MMUv2_CONTROL_ENABLE) in etnaviv_iommuv2_restore_nonsec()
196 if (gpu_read(gpu, VIVS_MMUv2_SEC_CONTROL) & VIVS_MMUv2_SEC_CONTROL_ENABLE) in etnaviv_iommuv2_restore_sec()
H A Detnaviv_gpu.h175 static inline u32 gpu_read(struct etnaviv_gpu *gpu, u32 reg) in gpu_read() function
H A Detnaviv_dump.c94 reg->value = cpu_to_le32(gpu_read(gpu, read_addr)); in etnaviv_core_dump_registers()
/linux/drivers/gpu/drm/msm/
H A Dmsm_gpu.h601 static inline u32 gpu_read(struct msm_gpu *gpu, u32 reg) in gpu_read() function
H A Dmsm_gpu.c688 current_cntrs[i] = gpu_read(gpu, gpu->perfcntrs[i].sample_reg); in update_hw_cntrs()