Home
last modified time | relevance | path

Searched refs:a6xx_gpu (Results 1 – 8 of 8) sorted by relevance

/linux/drivers/gpu/drm/msm/adreno/
H A Da6xx_preempt.c16 static inline bool try_preempt_state(struct a6xx_gpu *a6xx_gpu, in try_preempt_state() argument
19 enum a6xx_preempt_state cur = atomic_cmpxchg(&a6xx_gpu->preempt_state, in try_preempt_state()
29 static inline void set_preempt_state(struct a6xx_gpu *gpu, in set_preempt_state()
44 static inline void update_wptr(struct a6xx_gpu *a6xx_gpu, struct msm_ringbuffer *ring) in update_wptr() argument
54 a6xx_fenced_write(a6xx_gpu, REG_A6XX_CP_RB_WPTR, wptr, BIT(0), false); in update_wptr()
66 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in get_next_ring() local
77 if (!empty && ring == a6xx_gpu->cur_ring) in get_next_ring()
78 empty = ring->memptrs->fence == a6xx_gpu->last_seqno[i]; in get_next_ring()
90 struct a6xx_gpu *a6xx_gpu = timer_container_of(a6xx_gpu, t, in a6xx_preempt_timer() local
92 struct msm_gpu *gpu = &a6xx_gpu->base.base; in a6xx_preempt_timer()
[all …]
H A Da6xx_gpu.c19 static u64 read_gmu_ao_counter(struct a6xx_gpu *a6xx_gpu) in read_gmu_ao_counter() argument
24 count_hi = gmu_read(&a6xx_gpu->gmu, REG_A6XX_GMU_ALWAYS_ON_COUNTER_H); in read_gmu_ao_counter()
25 count_lo = gmu_read(&a6xx_gpu->gmu, REG_A6XX_GMU_ALWAYS_ON_COUNTER_L); in read_gmu_ao_counter()
26 temp = gmu_read(&a6xx_gpu->gmu, REG_A6XX_GMU_ALWAYS_ON_COUNTER_H); in read_gmu_ao_counter()
50 static int fenced_write(struct a6xx_gpu *a6xx_gpu, u32 offset, u32 value, u32 mask) in fenced_write() argument
52 struct adreno_gpu *adreno_gpu = &a6xx_gpu->base; in fenced_write()
54 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in fenced_write()
94 int a6xx_fenced_write(struct a6xx_gpu *a6xx_gpu, u32 offset, u64 value, u32 mask, bool is_64b) in a6xx_fenced_write() argument
98 ret = fenced_write(a6xx_gpu, offset, lower_32_bits(value), mask); in a6xx_fenced_write()
105 ret = fenced_write(a6xx_gpu, offset + 1, upper_32_bits(value), mask); in a6xx_fenced_write()
[all …]
H A Da6xx_gpu.h59 struct a6xx_gpu { struct
117 #define to_a6xx_gpu(x) container_of(x, struct a6xx_gpu, base) argument
243 static inline void a6xx_llc_rmw(struct a6xx_gpu *a6xx_gpu, u32 reg, u32 mask, u32 or) in a6xx_llc_rmw() argument
245 return msm_rmw(a6xx_gpu->llc_mmio + (reg << 2), mask, or); in a6xx_llc_rmw()
248 static inline u32 a6xx_llc_read(struct a6xx_gpu *a6xx_gpu, u32 reg) in a6xx_llc_read() argument
250 return readl(a6xx_gpu->llc_mmio + (reg << 2)); in a6xx_llc_read()
253 static inline void a6xx_llc_write(struct a6xx_gpu *a6xx_gpu, u32 reg, u32 value) in a6xx_llc_write() argument
255 writel(value, a6xx_gpu->llc_mmio + (reg << 2)); in a6xx_llc_write()
261 int a6xx_gmu_resume(struct a6xx_gpu *gpu);
262 int a6xx_gmu_stop(struct a6xx_gpu *gpu);
[all …]
H A Da8xx_gpu.c22 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a8xx_aperture_slice_set() local
27 if (a6xx_gpu->cached_aperture == val) in a8xx_aperture_slice_set()
32 a6xx_gpu->cached_aperture = val; in a8xx_aperture_slice_set()
38 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a8xx_aperture_acquire() local
40 spin_lock_irqsave(&a6xx_gpu->aperture_lock, *flags); in a8xx_aperture_acquire()
48 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a8xx_aperture_release() local
50 spin_unlock_irqrestore(&a6xx_gpu->aperture_lock, flags); in a8xx_aperture_release()
73 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a8xx_read_pipe_slice() local
77 spin_lock_irqsave(&a6xx_gpu->aperture_lock, flags); in a8xx_read_pipe_slice()
80 spin_unlock_irqrestore(&a6xx_gpu->aperture_lock, flags); in a8xx_read_pipe_slice()
[all …]
H A Da6xx_gmu.c23 struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu); in a6xx_gmu_fault() local
24 struct adreno_gpu *adreno_gpu = &a6xx_gpu->base; in a6xx_gmu_fault()
96 struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu); in a6xx_gmu_gx_is_on() local
97 struct adreno_gpu *adreno_gpu = &a6xx_gpu->base; in a6xx_gmu_gx_is_on()
125 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_gmu_set_freq() local
126 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in a6xx_gmu_set_freq()
219 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_gmu_get_freq() local
220 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in a6xx_gmu_get_freq()
227 struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu); in a6xx_gmu_check_idle_level() local
228 struct adreno_gpu *adreno_gpu = &a6xx_gpu->base; in a6xx_gmu_check_idle_level()
[all …]
H A Da6xx_hfi.c109 struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu); in a6xx_hfi_wait_for_msg_interrupt() local
119 if (completion_done(&a6xx_gpu->base.fault_coredump_done)) in a6xx_hfi_wait_for_msg_interrupt()
126 wait_for_completion(&a6xx_gpu->base.fault_coredump_done); in a6xx_hfi_wait_for_msg_interrupt()
323 struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu); in a6xx_hfi_send_perf_table() local
324 struct adreno_gpu *adreno_gpu = &a6xx_gpu->base; in a6xx_hfi_send_perf_table()
794 struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu); in a6xx_hfi_send_bw_table() local
795 struct adreno_gpu *adreno_gpu = &a6xx_gpu->base; in a6xx_hfi_send_bw_table()
H A Da6xx_gpu_state.c148 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_crashdumper_run() local
155 if (!a6xx_gmu_sptprac_is_on(&a6xx_gpu->gmu)) in a6xx_crashdumper_run()
1197 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in _a6xx_get_gmu_registers() local
1198 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in _a6xx_get_gmu_registers()
1231 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_get_gmu_registers() local
1254 if (!a6xx_gmu_gx_is_on(&a6xx_gpu->gmu)) in a6xx_get_gmu_registers()
1258 gmu_write(&a6xx_gpu->gmu, REG_A6XX_GMU_AO_AHB_FENCE_CTRL, 0); in a6xx_get_gmu_registers()
1291 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_snapshot_gmu_hfi_history() local
1292 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in a6xx_snapshot_gmu_hfi_history()
1586 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_gpu_state_get() local
[all …]
/linux/drivers/gpu/drm/msm/
H A DMakefile23 adreno/a6xx_gpu.o \