Home
last modified time | relevance | path

Searched refs:adev (Results 1 – 25 of 771) sorted by relevance

12345678910>>...31

/linux/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_device.c147 static inline void amdgpu_device_stop_pending_resets(struct amdgpu_device *adev);
162 struct amdgpu_device *adev = drm_to_adev(ddev); in amdgpu_device_get_pcie_replay_count() local
163 uint64_t cnt = amdgpu_asic_get_pcie_replay_count(adev); in amdgpu_device_get_pcie_replay_count()
177 struct amdgpu_device *adev = drm_to_adev(ddev); in amdgpu_sysfs_reg_state_get() local
183 adev, AMDGPU_REG_STATE_TYPE_XGMI, buf, count); in amdgpu_sysfs_reg_state_get()
187 adev, AMDGPU_REG_STATE_TYPE_WAFL, buf, count); in amdgpu_sysfs_reg_state_get()
191 adev, AMDGPU_REG_STATE_TYPE_PCIE, buf, count); in amdgpu_sysfs_reg_state_get()
195 adev, AMDGPU_REG_STATE_TYPE_USR, buf, count); in amdgpu_sysfs_reg_state_get()
199 adev, AMDGPU_REG_STATE_TYPE_USR_1, buf, count); in amdgpu_sysfs_reg_state_get()
211 int amdgpu_reg_state_sysfs_init(struct amdgpu_device *adev) in amdgpu_reg_state_sysfs_init() argument
[all …]
H A Dgmc_v11_0.c53 static int gmc_v11_0_ecc_interrupt_state(struct amdgpu_device *adev, in gmc_v11_0_ecc_interrupt_state() argument
62 gmc_v11_0_vm_fault_interrupt_state(struct amdgpu_device *adev, in gmc_v11_0_vm_fault_interrupt_state() argument
69 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_MMHUB0(0), false); in gmc_v11_0_vm_fault_interrupt_state()
76 if (!adev->in_s0ix && (adev->in_runpm || adev->in_suspend || in gmc_v11_0_vm_fault_interrupt_state()
77 amdgpu_in_reset(adev))) in gmc_v11_0_vm_fault_interrupt_state()
78 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_GFXHUB(0), false); in gmc_v11_0_vm_fault_interrupt_state()
82 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_MMHUB0(0), true); in gmc_v11_0_vm_fault_interrupt_state()
89 if (!adev->in_s0ix) in gmc_v11_0_vm_fault_interrupt_state()
90 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_GFXHUB(0), true); in gmc_v11_0_vm_fault_interrupt_state()
99 static int gmc_v11_0_process_interrupt(struct amdgpu_device *adev, in gmc_v11_0_process_interrupt() argument
[all …]
H A Dgmc_v12_0.c45 static int gmc_v12_0_ecc_interrupt_state(struct amdgpu_device *adev, in gmc_v12_0_ecc_interrupt_state() argument
53 static int gmc_v12_0_vm_fault_interrupt_state(struct amdgpu_device *adev, in gmc_v12_0_vm_fault_interrupt_state() argument
60 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_MMHUB0(0), false); in gmc_v12_0_vm_fault_interrupt_state()
67 if (!adev->in_s0ix) in gmc_v12_0_vm_fault_interrupt_state()
68 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_GFXHUB(0), false); in gmc_v12_0_vm_fault_interrupt_state()
72 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_MMHUB0(0), true); in gmc_v12_0_vm_fault_interrupt_state()
79 if (!adev->in_s0ix) in gmc_v12_0_vm_fault_interrupt_state()
80 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_GFXHUB(0), true); in gmc_v12_0_vm_fault_interrupt_state()
89 static int gmc_v12_0_process_interrupt(struct amdgpu_device *adev, in gmc_v12_0_process_interrupt() argument
101 hub = &adev->vmhub[AMDGPU_MMHUB0(0)]; in gmc_v12_0_process_interrupt()
[all …]
H A Dsoc24.c74 static int soc24_query_video_codecs(struct amdgpu_device *adev, bool encode, in soc24_query_video_codecs() argument
77 if (adev->vcn.num_vcn_inst == hweight8(adev->vcn.harvest_config)) in soc24_query_video_codecs()
80 switch (amdgpu_ip_version(adev, UVD_HWIP, 0)) { in soc24_query_video_codecs()
92 static u32 soc24_get_config_memsize(struct amdgpu_device *adev) in soc24_get_config_memsize() argument
94 return adev->nbio.funcs->get_memsize(adev); in soc24_get_config_memsize()
97 static u32 soc24_get_xclk(struct amdgpu_device *adev) in soc24_get_xclk() argument
99 return adev->clock.spll.reference_freq; in soc24_get_xclk()
102 void soc24_grbm_select(struct amdgpu_device *adev, in soc24_grbm_select() argument
136 static uint32_t soc24_read_indexed_register(struct amdgpu_device *adev, in soc24_read_indexed_register() argument
143 mutex_lock(&adev->grbm_idx_mutex); in soc24_read_indexed_register()
[all …]
H A Dsoc15.c174 static int soc15_query_video_codecs(struct amdgpu_device *adev, bool encode, in soc15_query_video_codecs() argument
177 if (amdgpu_ip_version(adev, VCE_HWIP, 0)) { in soc15_query_video_codecs()
178 switch (amdgpu_ip_version(adev, VCE_HWIP, 0)) { in soc15_query_video_codecs()
190 switch (amdgpu_ip_version(adev, UVD_HWIP, 0)) { in soc15_query_video_codecs()
218 static u32 soc15_uvd_ctx_rreg(struct amdgpu_device *adev, u32 reg) in soc15_uvd_ctx_rreg() argument
226 spin_lock_irqsave(&adev->uvd_ctx_idx_lock, flags); in soc15_uvd_ctx_rreg()
229 spin_unlock_irqrestore(&adev->uvd_ctx_idx_lock, flags); in soc15_uvd_ctx_rreg()
233 static void soc15_uvd_ctx_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in soc15_uvd_ctx_wreg() argument
240 spin_lock_irqsave(&adev->uvd_ctx_idx_lock, flags); in soc15_uvd_ctx_wreg()
243 spin_unlock_irqrestore(&adev->uvd_ctx_idx_lock, flags); in soc15_uvd_ctx_wreg()
[all …]
H A Damdgpu_rlc.c38 void amdgpu_gfx_rlc_enter_safe_mode(struct amdgpu_device *adev, int xcc_id) in amdgpu_gfx_rlc_enter_safe_mode() argument
40 if (adev->gfx.rlc.in_safe_mode[xcc_id]) in amdgpu_gfx_rlc_enter_safe_mode()
44 if (!adev->gfx.rlc.funcs->is_rlc_enabled(adev)) in amdgpu_gfx_rlc_enter_safe_mode()
47 if (adev->cg_flags & in amdgpu_gfx_rlc_enter_safe_mode()
50 adev->gfx.rlc.funcs->set_safe_mode(adev, xcc_id); in amdgpu_gfx_rlc_enter_safe_mode()
51 adev->gfx.rlc.in_safe_mode[xcc_id] = true; in amdgpu_gfx_rlc_enter_safe_mode()
63 void amdgpu_gfx_rlc_exit_safe_mode(struct amdgpu_device *adev, int xcc_id) in amdgpu_gfx_rlc_exit_safe_mode() argument
65 if (!(adev->gfx.rlc.in_safe_mode[xcc_id])) in amdgpu_gfx_rlc_exit_safe_mode()
69 if (!adev->gfx.rlc.funcs->is_rlc_enabled(adev)) in amdgpu_gfx_rlc_exit_safe_mode()
72 if (adev->cg_flags & in amdgpu_gfx_rlc_exit_safe_mode()
[all …]
H A Damdgpu_virt.c47 bool amdgpu_virt_mmio_blocked(struct amdgpu_device *adev) in amdgpu_virt_mmio_blocked() argument
55 void amdgpu_virt_init_setting(struct amdgpu_device *adev) in amdgpu_virt_init_setting() argument
57 struct drm_device *ddev = adev_to_drm(adev); in amdgpu_virt_init_setting()
60 if (adev->asic_type != CHIP_ALDEBARAN && in amdgpu_virt_init_setting()
61 adev->asic_type != CHIP_ARCTURUS && in amdgpu_virt_init_setting()
62 ((adev->pdev->class >> 8) != PCI_CLASS_ACCELERATOR_PROCESSING)) { in amdgpu_virt_init_setting()
63 if (adev->mode_info.num_crtc == 0) in amdgpu_virt_init_setting()
64 adev->mode_info.num_crtc = 1; in amdgpu_virt_init_setting()
65 adev->enable_virtual_display = true; in amdgpu_virt_init_setting()
68 adev->cg_flags = 0; in amdgpu_virt_init_setting()
[all …]
H A Dsoc21.c150 static int soc21_query_video_codecs(struct amdgpu_device *adev, bool encode, in soc21_query_video_codecs() argument
153 if (adev->vcn.num_vcn_inst == hweight8(adev->vcn.harvest_config)) in soc21_query_video_codecs()
156 switch (amdgpu_ip_version(adev, UVD_HWIP, 0)) { in soc21_query_video_codecs()
161 if (amdgpu_sriov_vf(adev)) { in soc21_query_video_codecs()
162 if ((adev->vcn.harvest_config & AMDGPU_VCN_HARVEST_VCN0) || in soc21_query_video_codecs()
163 !amdgpu_sriov_is_av1_support(adev)) { in soc21_query_video_codecs()
175 if ((adev->vcn.harvest_config & AMDGPU_VCN_HARVEST_VCN0)) { in soc21_query_video_codecs()
199 static u32 soc21_didt_rreg(struct amdgpu_device *adev, u32 reg) in soc21_didt_rreg() argument
207 spin_lock_irqsave(&adev->didt_idx_lock, flags); in soc21_didt_rreg()
210 spin_unlock_irqrestore(&adev->didt_idx_lock, flags); in soc21_didt_rreg()
[all …]
H A Damdgpu_discovery.c235 static int amdgpu_discovery_read_binary_from_sysmem(struct amdgpu_device *adev, uint8_t *binary) in amdgpu_discovery_read_binary_from_sysmem() argument
241 ret = amdgpu_acpi_get_tmr_info(adev, &tmr_offset, &tmr_size); in amdgpu_discovery_read_binary_from_sysmem()
248 discv_regn = memremap(pos, adev->mman.discovery_tmr_size, MEMREMAP_WC); in amdgpu_discovery_read_binary_from_sysmem()
250 memcpy(binary, discv_regn, adev->mman.discovery_tmr_size); in amdgpu_discovery_read_binary_from_sysmem()
261 static int amdgpu_discovery_read_binary_from_mem(struct amdgpu_device *adev, in amdgpu_discovery_read_binary_from_mem() argument
268 if (!amdgpu_sriov_vf(adev)) { in amdgpu_discovery_read_binary_from_mem()
289 amdgpu_device_vram_access(adev, pos, (uint32_t *)binary, in amdgpu_discovery_read_binary_from_mem()
290 adev->mman.discovery_tmr_size, false); in amdgpu_discovery_read_binary_from_mem()
292 ret = amdgpu_discovery_read_binary_from_sysmem(adev, binary); in amdgpu_discovery_read_binary_from_mem()
298 static int amdgpu_discovery_read_binary_from_file(struct amdgpu_device *adev, uint8_t *binary) in amdgpu_discovery_read_binary_from_file() argument
[all …]
H A Dnv.c210 static int nv_query_video_codecs(struct amdgpu_device *adev, bool encode, in nv_query_video_codecs() argument
213 if (adev->vcn.num_vcn_inst == hweight8(adev->vcn.harvest_config)) in nv_query_video_codecs()
216 switch (amdgpu_ip_version(adev, UVD_HWIP, 0)) { in nv_query_video_codecs()
220 if (amdgpu_sriov_vf(adev)) { in nv_query_video_codecs()
221 if (adev->vcn.harvest_config & AMDGPU_VCN_HARVEST_VCN0) { in nv_query_video_codecs()
233 if (adev->vcn.harvest_config & AMDGPU_VCN_HARVEST_VCN0) { in nv_query_video_codecs()
278 static u32 nv_didt_rreg(struct amdgpu_device *adev, u32 reg) in nv_didt_rreg() argument
286 spin_lock_irqsave(&adev->didt_idx_lock, flags); in nv_didt_rreg()
289 spin_unlock_irqrestore(&adev->didt_idx_lock, flags); in nv_didt_rreg()
293 static void nv_didt_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in nv_didt_wreg() argument
[all …]
H A Damdgpu_gfx.c46 int amdgpu_gfx_mec_queue_to_bit(struct amdgpu_device *adev, int mec, in amdgpu_gfx_mec_queue_to_bit() argument
51 bit += mec * adev->gfx.mec.num_pipe_per_mec in amdgpu_gfx_mec_queue_to_bit()
52 * adev->gfx.mec.num_queue_per_pipe; in amdgpu_gfx_mec_queue_to_bit()
53 bit += pipe * adev->gfx.mec.num_queue_per_pipe; in amdgpu_gfx_mec_queue_to_bit()
59 void amdgpu_queue_mask_bit_to_mec_queue(struct amdgpu_device *adev, int bit, in amdgpu_queue_mask_bit_to_mec_queue() argument
62 *queue = bit % adev->gfx.mec.num_queue_per_pipe; in amdgpu_queue_mask_bit_to_mec_queue()
63 *pipe = (bit / adev->gfx.mec.num_queue_per_pipe) in amdgpu_queue_mask_bit_to_mec_queue()
64 % adev->gfx.mec.num_pipe_per_mec; in amdgpu_queue_mask_bit_to_mec_queue()
65 *mec = (bit / adev->gfx.mec.num_queue_per_pipe) in amdgpu_queue_mask_bit_to_mec_queue()
66 / adev->gfx.mec.num_pipe_per_mec; in amdgpu_queue_mask_bit_to_mec_queue()
[all …]
H A Damdgpu_bios.c92 static bool amdgpu_read_bios_from_vram(struct amdgpu_device *adev) in amdgpu_read_bios_from_vram() argument
98 if (!(adev->flags & AMD_IS_APU)) in amdgpu_read_bios_from_vram()
99 if (amdgpu_device_need_post(adev)) in amdgpu_read_bios_from_vram()
103 if (pci_resource_len(adev->pdev, 0) == 0) in amdgpu_read_bios_from_vram()
106 adev->bios = NULL; in amdgpu_read_bios_from_vram()
107 vram_base = pci_resource_start(adev->pdev, 0); in amdgpu_read_bios_from_vram()
112 adev->bios = kmalloc(size, GFP_KERNEL); in amdgpu_read_bios_from_vram()
113 if (!adev->bios) { in amdgpu_read_bios_from_vram()
117 adev->bios_size = size; in amdgpu_read_bios_from_vram()
118 memcpy_fromio(adev->bios, bios, size); in amdgpu_read_bios_from_vram()
[all …]
H A Damdgpu_acp.c103 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in acp_sw_init() local
105 adev->acp.parent = adev->dev; in acp_sw_init()
107 adev->acp.cgs_device = in acp_sw_init()
108 amdgpu_cgs_create_device(adev); in acp_sw_init()
109 if (!adev->acp.cgs_device) in acp_sw_init()
117 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in acp_sw_fini() local
119 if (adev->acp.cgs_device) in acp_sw_fini()
120 amdgpu_cgs_destroy_device(adev->acp.cgs_device); in acp_sw_fini()
126 void *adev; member
133 struct amdgpu_device *adev; in acp_poweroff() local
[all …]
H A Dmxgpu_nv.c36 static void xgpu_nv_mailbox_send_ack(struct amdgpu_device *adev) in xgpu_nv_mailbox_send_ack() argument
41 static void xgpu_nv_mailbox_set_valid(struct amdgpu_device *adev, bool val) in xgpu_nv_mailbox_set_valid() argument
55 static enum idh_event xgpu_nv_mailbox_peek_msg(struct amdgpu_device *adev) in xgpu_nv_mailbox_peek_msg() argument
61 static int xgpu_nv_mailbox_rcv_msg(struct amdgpu_device *adev, in xgpu_nv_mailbox_rcv_msg() argument
70 xgpu_nv_mailbox_send_ack(adev); in xgpu_nv_mailbox_rcv_msg()
75 static uint8_t xgpu_nv_peek_ack(struct amdgpu_device *adev) in xgpu_nv_peek_ack() argument
80 static int xgpu_nv_poll_ack(struct amdgpu_device *adev) in xgpu_nv_poll_ack() argument
94 dev_err(adev->dev, "Doesn't get TRN_MSG_ACK from pf in %d msec \n", NV_MAILBOX_POLL_ACK_TIMEDOUT); in xgpu_nv_poll_ack()
99 static int xgpu_nv_poll_msg(struct amdgpu_device *adev, enum idh_event event) in xgpu_nv_poll_msg() argument
108 r = xgpu_nv_mailbox_rcv_msg(adev, event); in xgpu_nv_poll_msg()
[all …]
H A Dvi.c257 static int vi_query_video_codecs(struct amdgpu_device *adev, bool encode, in vi_query_video_codecs() argument
260 switch (adev->asic_type) { in vi_query_video_codecs()
298 static u32 vi_pcie_rreg(struct amdgpu_device *adev, u32 reg) in vi_pcie_rreg() argument
303 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in vi_pcie_rreg()
307 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in vi_pcie_rreg()
311 static void vi_pcie_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in vi_pcie_wreg() argument
315 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in vi_pcie_wreg()
320 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in vi_pcie_wreg()
323 static u32 vi_smc_rreg(struct amdgpu_device *adev, u32 reg) in vi_smc_rreg() argument
328 spin_lock_irqsave(&adev->smc_idx_lock, flags); in vi_smc_rreg()
[all …]
H A Damdgpu_irq.c124 void amdgpu_irq_disable_all(struct amdgpu_device *adev) in amdgpu_irq_disable_all() argument
130 spin_lock_irqsave(&adev->irq.lock, irqflags); in amdgpu_irq_disable_all()
132 if (!adev->irq.client[i].sources) in amdgpu_irq_disable_all()
136 struct amdgpu_irq_src *src = adev->irq.client[i].sources[j]; in amdgpu_irq_disable_all()
142 r = src->funcs->set(adev, src, k, in amdgpu_irq_disable_all()
150 spin_unlock_irqrestore(&adev->irq.lock, irqflags); in amdgpu_irq_disable_all()
167 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_irq_handler() local
170 ret = amdgpu_ih_process(adev, &adev->irq.ih); in amdgpu_irq_handler()
174 amdgpu_ras_interrupt_fatal_error_handler(adev); in amdgpu_irq_handler()
188 struct amdgpu_device *adev = container_of(work, struct amdgpu_device, in amdgpu_irq_handle_ih1() local
[all …]
H A Damdgpu_ras.c139 static bool amdgpu_ras_check_bad_page(struct amdgpu_device *adev,
142 static void amdgpu_register_bad_pages_mca_notifier(struct amdgpu_device *adev);
150 void amdgpu_ras_set_error_query_ready(struct amdgpu_device *adev, bool ready) in amdgpu_ras_set_error_query_ready() argument
152 if (adev && amdgpu_ras_get_context(adev)) in amdgpu_ras_set_error_query_ready()
153 amdgpu_ras_get_context(adev)->error_query_ready = ready; in amdgpu_ras_set_error_query_ready()
156 static bool amdgpu_ras_get_error_query_ready(struct amdgpu_device *adev) in amdgpu_ras_get_error_query_ready() argument
158 if (adev && amdgpu_ras_get_context(adev)) in amdgpu_ras_get_error_query_ready()
159 return amdgpu_ras_get_context(adev)->error_query_ready; in amdgpu_ras_get_error_query_ready()
164 static int amdgpu_reserve_page_direct(struct amdgpu_device *adev, uint64_t address) in amdgpu_reserve_page_direct() argument
170 if ((address >= adev->gmc.mc_vram_size) || in amdgpu_reserve_page_direct()
[all …]
H A Damdgpu_jpeg.c37 int amdgpu_jpeg_sw_init(struct amdgpu_device *adev) in amdgpu_jpeg_sw_init() argument
41 INIT_DELAYED_WORK(&adev->jpeg.idle_work, amdgpu_jpeg_idle_work_handler); in amdgpu_jpeg_sw_init()
42 mutex_init(&adev->jpeg.jpeg_pg_lock); in amdgpu_jpeg_sw_init()
43 atomic_set(&adev->jpeg.total_submission_cnt, 0); in amdgpu_jpeg_sw_init()
45 if ((adev->firmware.load_type == AMDGPU_FW_LOAD_PSP) && in amdgpu_jpeg_sw_init()
46 (adev->pg_flags & AMD_PG_SUPPORT_JPEG_DPG)) in amdgpu_jpeg_sw_init()
47 adev->jpeg.indirect_sram = true; in amdgpu_jpeg_sw_init()
49 for (i = 0; i < adev->jpeg.num_jpeg_inst; i++) { in amdgpu_jpeg_sw_init()
50 if (adev->jpeg.harvest_config & (1 << i)) in amdgpu_jpeg_sw_init()
53 if (adev->jpeg.indirect_sram) { in amdgpu_jpeg_sw_init()
[all …]
H A Dmxgpu_ai.c37 static void xgpu_ai_mailbox_send_ack(struct amdgpu_device *adev) in xgpu_ai_mailbox_send_ack() argument
42 static void xgpu_ai_mailbox_set_valid(struct amdgpu_device *adev, bool val) in xgpu_ai_mailbox_set_valid() argument
56 static enum idh_event xgpu_ai_mailbox_peek_msg(struct amdgpu_device *adev) in xgpu_ai_mailbox_peek_msg() argument
63 static int xgpu_ai_mailbox_rcv_msg(struct amdgpu_device *adev, in xgpu_ai_mailbox_rcv_msg() argument
73 xgpu_ai_mailbox_send_ack(adev); in xgpu_ai_mailbox_rcv_msg()
78 static uint8_t xgpu_ai_peek_ack(struct amdgpu_device *adev) { in xgpu_ai_peek_ack() argument
82 static int xgpu_ai_poll_ack(struct amdgpu_device *adev) in xgpu_ai_poll_ack() argument
96 dev_err(adev->dev, "Doesn't get TRN_MSG_ACK from pf in %d msec\n", AI_MAILBOX_POLL_ACK_TIMEDOUT); in xgpu_ai_poll_ack()
101 static int xgpu_ai_poll_msg(struct amdgpu_device *adev, enum idh_event event) in xgpu_ai_poll_msg() argument
106 r = xgpu_ai_mailbox_rcv_msg(adev, event); in xgpu_ai_poll_msg()
[all …]
H A Damdgpu_mes.c35 int amdgpu_mes_doorbell_process_slice(struct amdgpu_device *adev) in amdgpu_mes_doorbell_process_slice() argument
42 static int amdgpu_mes_kernel_doorbell_get(struct amdgpu_device *adev, in amdgpu_mes_kernel_doorbell_get() argument
46 struct amdgpu_mes *mes = &adev->mes; in amdgpu_mes_kernel_doorbell_get()
49 offset = adev->doorbell_index.sdma_engine[0]; in amdgpu_mes_kernel_doorbell_get()
66 static void amdgpu_mes_kernel_doorbell_free(struct amdgpu_device *adev, in amdgpu_mes_kernel_doorbell_free() argument
70 struct amdgpu_mes *mes = &adev->mes; in amdgpu_mes_kernel_doorbell_free()
78 static int amdgpu_mes_doorbell_init(struct amdgpu_device *adev) in amdgpu_mes_doorbell_init() argument
81 struct amdgpu_mes *mes = &adev->mes; in amdgpu_mes_doorbell_init()
92 adev->mes.aggregated_doorbells[i] = mes->db_start_dw_offset + i * 2; in amdgpu_mes_doorbell_init()
99 static int amdgpu_mes_event_log_init(struct amdgpu_device *adev) in amdgpu_mes_event_log_init() argument
[all …]
H A Dgmc_v6_0.c44 static void gmc_v6_0_set_gmc_funcs(struct amdgpu_device *adev);
45 static void gmc_v6_0_set_irq_funcs(struct amdgpu_device *adev);
64 static void gmc_v6_0_mc_stop(struct amdgpu_device *adev) in gmc_v6_0_mc_stop() argument
68 gmc_v6_0_wait_for_idle((void *)adev); in gmc_v6_0_mc_stop()
84 static void gmc_v6_0_mc_resume(struct amdgpu_device *adev) in gmc_v6_0_mc_resume() argument
98 static int gmc_v6_0_init_microcode(struct amdgpu_device *adev) in gmc_v6_0_init_microcode() argument
105 switch (adev->asic_type) { in gmc_v6_0_init_microcode()
129 err = amdgpu_ucode_request(adev, &adev->gmc.fw, "amdgpu/%s_mc.bin", chip_name); in gmc_v6_0_init_microcode()
131 dev_err(adev->dev, in gmc_v6_0_init_microcode()
134 amdgpu_ucode_release(&adev->gmc.fw); in gmc_v6_0_init_microcode()
[all …]
H A Damdgpu.h124 struct amdgpu_device *adev; member
364 void amdgpu_device_ip_get_clockgating_state(struct amdgpu_device *adev,
366 int amdgpu_device_ip_wait_for_idle(struct amdgpu_device *adev,
368 bool amdgpu_device_ip_is_idle(struct amdgpu_device *adev,
394 int amdgpu_device_ip_block_version_cmp(struct amdgpu_device *adev,
399 amdgpu_device_ip_get_ip_block(struct amdgpu_device *adev,
402 int amdgpu_device_ip_block_add(struct amdgpu_device *adev,
408 bool amdgpu_get_bios(struct amdgpu_device *adev);
409 bool amdgpu_read_bios(struct amdgpu_device *adev);
410 bool amdgpu_soc15_read_bios_from_rom(struct amdgpu_device *adev,
[all …]
H A Dsienna_cichlid.c37 struct amdgpu_device *adev = (struct amdgpu_device *)reset_ctl->handle; in sienna_cichlid_is_mode2_default()
39 if (amdgpu_ip_version(adev, MP1_HWIP, 0) == IP_VERSION(11, 0, 7) && in sienna_cichlid_is_mode2_default()
40 adev->pm.fw_version >= 0x3a5500 && !amdgpu_sriov_vf(adev)) in sienna_cichlid_is_mode2_default()
70 static int sienna_cichlid_mode2_suspend_ip(struct amdgpu_device *adev) in sienna_cichlid_mode2_suspend_ip() argument
74 amdgpu_device_set_pg_state(adev, AMD_PG_STATE_UNGATE); in sienna_cichlid_mode2_suspend_ip()
75 amdgpu_device_set_cg_state(adev, AMD_CG_STATE_UNGATE); in sienna_cichlid_mode2_suspend_ip()
77 for (i = adev->num_ip_blocks - 1; i >= 0; i--) { in sienna_cichlid_mode2_suspend_ip()
78 if (!(adev->ip_blocks[i].version->type == in sienna_cichlid_mode2_suspend_ip()
80 adev->ip_blocks[i].version->type == in sienna_cichlid_mode2_suspend_ip()
84 r = adev->ip_blocks[i].version->funcs->suspend(adev); in sienna_cichlid_mode2_suspend_ip()
[all …]
/linux/drivers/gpu/drm/amd/pm/
H A Damdgpu_dpm.c36 #define amdgpu_dpm_enable_bapm(adev, e) \ argument
37 ((adev)->powerplay.pp_funcs->enable_bapm((adev)->powerplay.pp_handle, (e)))
39 #define amdgpu_dpm_is_legacy_dpm(adev) ((adev)->powerplay.pp_handle == (adev)) argument
41 int amdgpu_dpm_get_sclk(struct amdgpu_device *adev, bool low) in amdgpu_dpm_get_sclk() argument
43 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_sclk()
49 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_sclk()
50 ret = pp_funcs->get_sclk((adev)->powerplay.pp_handle, in amdgpu_dpm_get_sclk()
52 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_sclk()
57 int amdgpu_dpm_get_mclk(struct amdgpu_device *adev, bool low) in amdgpu_dpm_get_mclk() argument
59 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_mclk()
[all …]
/linux/sound/soc/intel/avs/
H A Dloader.c101 static int avs_fw_manifest_strip_verify(struct avs_dev *adev, struct firmware *fw, in avs_fw_manifest_strip_verify() argument
125 dev_warn(adev->dev, "bad FW version %d.%d.%d.%d, expected %d.%d.%d.%d or newer\n", in avs_fw_manifest_strip_verify()
137 int avs_cldma_load_basefw(struct avs_dev *adev, struct firmware *fw) in avs_cldma_load_basefw() argument
143 ret = avs_dsp_op(adev, power, AVS_MAIN_CORE_MASK, true); in avs_cldma_load_basefw()
147 ret = avs_dsp_op(adev, reset, AVS_MAIN_CORE_MASK, false); in avs_cldma_load_basefw()
153 dev_err(adev->dev, "cldma reset failed: %d\n", ret); in avs_cldma_load_basefw()
158 ret = avs_dsp_op(adev, stall, AVS_MAIN_CORE_MASK, false); in avs_cldma_load_basefw()
162 reinit_completion(&adev->fw_ready); in avs_cldma_load_basefw()
163 avs_dsp_op(adev, int_control, true); in avs_cldma_load_basefw()
166 ret = snd_hdac_adsp_readl_poll(adev, AVS_FW_REG_STATUS(adev), reg, in avs_cldma_load_basefw()
[all …]

12345678910>>...31