/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_device.c | 177 static inline bool amdgpu_ip_member_of_hwini(struct amdgpu_device *adev, in amdgpu_ip_member_of_hwini() argument 180 return (adev->init_lvl->hwini_ip_block_mask & (1U << block)) != 0; in amdgpu_ip_member_of_hwini() 183 void amdgpu_set_init_level(struct amdgpu_device *adev, in amdgpu_set_init_level() argument 188 adev->init_lvl = &amdgpu_init_minimal_xgmi; in amdgpu_set_init_level() 191 adev->init_lvl = &amdgpu_init_recovery; in amdgpu_set_init_level() 196 adev->init_lvl = &amdgpu_init_default; in amdgpu_set_init_level() 201 static inline void amdgpu_device_stop_pending_resets(struct amdgpu_device *adev); 218 struct amdgpu_device *adev = drm_to_adev(ddev); in amdgpu_device_get_pcie_replay_count() local 219 uint64_t cnt = amdgpu_asic_get_pcie_replay_count(adev); in amdgpu_device_get_pcie_replay_count() 233 struct amdgpu_device *adev = drm_to_adev(ddev); in amdgpu_sysfs_reg_state_get() local [all …]
|
H A D | gmc_v10_0.c | 54 static int gmc_v10_0_ecc_interrupt_state(struct amdgpu_device *adev, in gmc_v10_0_ecc_interrupt_state() argument 63 gmc_v10_0_vm_fault_interrupt_state(struct amdgpu_device *adev, in gmc_v10_0_vm_fault_interrupt_state() argument 70 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_MMHUB0(0), false); in gmc_v10_0_vm_fault_interrupt_state() 77 if (!adev->in_s0ix) in gmc_v10_0_vm_fault_interrupt_state() 78 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_GFXHUB(0), false); in gmc_v10_0_vm_fault_interrupt_state() 82 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_MMHUB0(0), true); in gmc_v10_0_vm_fault_interrupt_state() 89 if (!adev->in_s0ix) in gmc_v10_0_vm_fault_interrupt_state() 90 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_GFXHUB(0), true); in gmc_v10_0_vm_fault_interrupt_state() 99 static int gmc_v10_0_process_interrupt(struct amdgpu_device *adev, in gmc_v10_0_process_interrupt() argument 105 struct amdgpu_vmhub *hub = &adev->vmhub[vmhub_index]; in gmc_v10_0_process_interrupt() [all …]
|
H A D | gmc_v9_0.c | 414 static int gmc_v9_0_ecc_interrupt_state(struct amdgpu_device *adev, in gmc_v9_0_ecc_interrupt_state() argument 424 if (adev->asic_type >= CHIP_VEGA20) in gmc_v9_0_ecc_interrupt_state() 465 static int gmc_v9_0_vm_fault_interrupt_state(struct amdgpu_device *adev, in gmc_v9_0_vm_fault_interrupt_state() argument 483 for_each_set_bit(j, adev->vmhubs_mask, AMDGPU_MAX_VMHUBS) { in gmc_v9_0_vm_fault_interrupt_state() 484 hub = &adev->vmhub[j]; in gmc_v9_0_vm_fault_interrupt_state() 493 if (adev->in_s0ix && (j == AMDGPU_GFXHUB(0))) in gmc_v9_0_vm_fault_interrupt_state() 511 for_each_set_bit(j, adev->vmhubs_mask, AMDGPU_MAX_VMHUBS) { in gmc_v9_0_vm_fault_interrupt_state() 512 hub = &adev->vmhub[j]; in gmc_v9_0_vm_fault_interrupt_state() 521 if (adev->in_s0ix && (j == AMDGPU_GFXHUB(0))) in gmc_v9_0_vm_fault_interrupt_state() 545 static int gmc_v9_0_process_interrupt(struct amdgpu_device *adev, in gmc_v9_0_process_interrupt() argument [all …]
|
H A D | gmc_v11_0.c | 53 static int gmc_v11_0_ecc_interrupt_state(struct amdgpu_device *adev, in gmc_v11_0_ecc_interrupt_state() argument 62 gmc_v11_0_vm_fault_interrupt_state(struct amdgpu_device *adev, in gmc_v11_0_vm_fault_interrupt_state() argument 69 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_MMHUB0(0), false); in gmc_v11_0_vm_fault_interrupt_state() 76 if (!adev->in_s0ix && (adev->in_runpm || adev->in_suspend || in gmc_v11_0_vm_fault_interrupt_state() 77 amdgpu_in_reset(adev))) in gmc_v11_0_vm_fault_interrupt_state() 78 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_GFXHUB(0), false); in gmc_v11_0_vm_fault_interrupt_state() 82 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_MMHUB0(0), true); in gmc_v11_0_vm_fault_interrupt_state() 89 if (!adev->in_s0ix) in gmc_v11_0_vm_fault_interrupt_state() 90 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_GFXHUB(0), true); in gmc_v11_0_vm_fault_interrupt_state() 99 static int gmc_v11_0_process_interrupt(struct amdgpu_device *adev, in gmc_v11_0_process_interrupt() argument [all …]
|
H A D | gmc_v12_0.c | 45 static int gmc_v12_0_ecc_interrupt_state(struct amdgpu_device *adev, in gmc_v12_0_ecc_interrupt_state() argument 53 static int gmc_v12_0_vm_fault_interrupt_state(struct amdgpu_device *adev, in gmc_v12_0_vm_fault_interrupt_state() argument 60 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_MMHUB0(0), false); in gmc_v12_0_vm_fault_interrupt_state() 67 if (!adev->in_s0ix) in gmc_v12_0_vm_fault_interrupt_state() 68 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_GFXHUB(0), false); in gmc_v12_0_vm_fault_interrupt_state() 72 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_MMHUB0(0), true); in gmc_v12_0_vm_fault_interrupt_state() 79 if (!adev->in_s0ix) in gmc_v12_0_vm_fault_interrupt_state() 80 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_GFXHUB(0), true); in gmc_v12_0_vm_fault_interrupt_state() 89 static int gmc_v12_0_process_interrupt(struct amdgpu_device *adev, in gmc_v12_0_process_interrupt() argument 101 hub = &adev->vmhub[AMDGPU_MMHUB0(0)]; in gmc_v12_0_process_interrupt() [all …]
|
H A D | soc24.c | 74 static int soc24_query_video_codecs(struct amdgpu_device *adev, bool encode, in soc24_query_video_codecs() argument 77 if (adev->vcn.num_vcn_inst == hweight8(adev->vcn.harvest_config)) in soc24_query_video_codecs() 80 switch (amdgpu_ip_version(adev, UVD_HWIP, 0)) { in soc24_query_video_codecs() 92 static u32 soc24_get_config_memsize(struct amdgpu_device *adev) in soc24_get_config_memsize() argument 94 return adev->nbio.funcs->get_memsize(adev); in soc24_get_config_memsize() 97 static u32 soc24_get_xclk(struct amdgpu_device *adev) in soc24_get_xclk() argument 99 return adev->clock.spll.reference_freq; in soc24_get_xclk() 102 void soc24_grbm_select(struct amdgpu_device *adev, in soc24_grbm_select() argument 136 static uint32_t soc24_read_indexed_register(struct amdgpu_device *adev, in soc24_read_indexed_register() argument 143 mutex_lock(&adev->grbm_idx_mutex); in soc24_read_indexed_register() [all …]
|
H A D | amdgpu_virt.c | 47 bool amdgpu_virt_mmio_blocked(struct amdgpu_device *adev) in amdgpu_virt_mmio_blocked() argument 55 void amdgpu_virt_init_setting(struct amdgpu_device *adev) in amdgpu_virt_init_setting() argument 57 struct drm_device *ddev = adev_to_drm(adev); in amdgpu_virt_init_setting() 60 if (adev->asic_type != CHIP_ALDEBARAN && in amdgpu_virt_init_setting() 61 adev->asic_type != CHIP_ARCTURUS && in amdgpu_virt_init_setting() 62 ((adev->pdev->class >> 8) != PCI_CLASS_ACCELERATOR_PROCESSING)) { in amdgpu_virt_init_setting() 63 if (adev->mode_info.num_crtc == 0) in amdgpu_virt_init_setting() 64 adev->mode_info.num_crtc = 1; in amdgpu_virt_init_setting() 65 adev->enable_virtual_display = true; in amdgpu_virt_init_setting() 68 adev->cg_flags = 0; in amdgpu_virt_init_setting() [all …]
|
H A D | soc15.c | 192 static int soc15_query_video_codecs(struct amdgpu_device *adev, bool encode, in soc15_query_video_codecs() argument 195 if (amdgpu_ip_version(adev, VCE_HWIP, 0)) { in soc15_query_video_codecs() 196 switch (amdgpu_ip_version(adev, VCE_HWIP, 0)) { in soc15_query_video_codecs() 208 switch (amdgpu_ip_version(adev, UVD_HWIP, 0)) { in soc15_query_video_codecs() 242 static u32 soc15_uvd_ctx_rreg(struct amdgpu_device *adev, u32 reg) in soc15_uvd_ctx_rreg() argument 250 spin_lock_irqsave(&adev->uvd_ctx_idx_lock, flags); in soc15_uvd_ctx_rreg() 253 spin_unlock_irqrestore(&adev->uvd_ctx_idx_lock, flags); in soc15_uvd_ctx_rreg() 257 static void soc15_uvd_ctx_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in soc15_uvd_ctx_wreg() argument 264 spin_lock_irqsave(&adev->uvd_ctx_idx_lock, flags); in soc15_uvd_ctx_wreg() 267 spin_unlock_irqrestore(&adev->uvd_ctx_idx_lock, flags); in soc15_uvd_ctx_wreg() [all …]
|
H A D | soc21.c | 150 static int soc21_query_video_codecs(struct amdgpu_device *adev, bool encode, in soc21_query_video_codecs() argument 153 if (adev->vcn.num_vcn_inst == hweight8(adev->vcn.harvest_config)) in soc21_query_video_codecs() 156 switch (amdgpu_ip_version(adev, UVD_HWIP, 0)) { in soc21_query_video_codecs() 161 if (amdgpu_sriov_vf(adev)) { in soc21_query_video_codecs() 162 if ((adev->vcn.harvest_config & AMDGPU_VCN_HARVEST_VCN0) || in soc21_query_video_codecs() 163 !amdgpu_sriov_is_av1_support(adev)) { in soc21_query_video_codecs() 175 if ((adev->vcn.harvest_config & AMDGPU_VCN_HARVEST_VCN0)) { in soc21_query_video_codecs() 199 static u32 soc21_didt_rreg(struct amdgpu_device *adev, u32 reg) in soc21_didt_rreg() argument 207 spin_lock_irqsave(&adev->didt_idx_lock, flags); in soc21_didt_rreg() 210 spin_unlock_irqrestore(&adev->didt_idx_lock, flags); in soc21_didt_rreg() [all …]
|
H A D | amdgpu_discovery.c | 237 static int amdgpu_discovery_read_binary_from_sysmem(struct amdgpu_device *adev, uint8_t *binary) in amdgpu_discovery_read_binary_from_sysmem() argument 243 ret = amdgpu_acpi_get_tmr_info(adev, &tmr_offset, &tmr_size); in amdgpu_discovery_read_binary_from_sysmem() 250 discv_regn = memremap(pos, adev->mman.discovery_tmr_size, MEMREMAP_WC); in amdgpu_discovery_read_binary_from_sysmem() 252 memcpy(binary, discv_regn, adev->mman.discovery_tmr_size); in amdgpu_discovery_read_binary_from_sysmem() 263 static int amdgpu_discovery_read_binary_from_mem(struct amdgpu_device *adev, in amdgpu_discovery_read_binary_from_mem() argument 270 if (!amdgpu_sriov_vf(adev)) { in amdgpu_discovery_read_binary_from_mem() 291 amdgpu_device_vram_access(adev, pos, (uint32_t *)binary, in amdgpu_discovery_read_binary_from_mem() 292 adev->mman.discovery_tmr_size, false); in amdgpu_discovery_read_binary_from_mem() 294 ret = amdgpu_discovery_read_binary_from_sysmem(adev, binary); in amdgpu_discovery_read_binary_from_mem() 300 static int amdgpu_discovery_read_binary_from_file(struct amdgpu_device *adev, uint8_t *binary) in amdgpu_discovery_read_binary_from_file() argument [all …]
|
H A D | amdgpu_rlc.c | 38 void amdgpu_gfx_rlc_enter_safe_mode(struct amdgpu_device *adev, int xcc_id) in amdgpu_gfx_rlc_enter_safe_mode() argument 40 if (adev->gfx.rlc.in_safe_mode[xcc_id]) in amdgpu_gfx_rlc_enter_safe_mode() 44 if (!adev->gfx.rlc.funcs->is_rlc_enabled(adev)) in amdgpu_gfx_rlc_enter_safe_mode() 47 if (adev->cg_flags & in amdgpu_gfx_rlc_enter_safe_mode() 50 adev->gfx.rlc.funcs->set_safe_mode(adev, xcc_id); in amdgpu_gfx_rlc_enter_safe_mode() 51 adev->gfx.rlc.in_safe_mode[xcc_id] = true; in amdgpu_gfx_rlc_enter_safe_mode() 63 void amdgpu_gfx_rlc_exit_safe_mode(struct amdgpu_device *adev, int xcc_id) in amdgpu_gfx_rlc_exit_safe_mode() argument 65 if (!(adev->gfx.rlc.in_safe_mode[xcc_id])) in amdgpu_gfx_rlc_exit_safe_mode() 69 if (!adev->gfx.rlc.funcs->is_rlc_enabled(adev)) in amdgpu_gfx_rlc_exit_safe_mode() 72 if (adev->cg_flags & in amdgpu_gfx_rlc_exit_safe_mode() [all …]
|
H A D | nv.c | 210 static int nv_query_video_codecs(struct amdgpu_device *adev, bool encode, in nv_query_video_codecs() argument 213 if (adev->vcn.num_vcn_inst == hweight8(adev->vcn.harvest_config)) in nv_query_video_codecs() 216 switch (amdgpu_ip_version(adev, UVD_HWIP, 0)) { in nv_query_video_codecs() 220 if (amdgpu_sriov_vf(adev)) { in nv_query_video_codecs() 221 if (adev->vcn.harvest_config & AMDGPU_VCN_HARVEST_VCN0) { in nv_query_video_codecs() 233 if (adev->vcn.harvest_config & AMDGPU_VCN_HARVEST_VCN0) { in nv_query_video_codecs() 278 static u32 nv_didt_rreg(struct amdgpu_device *adev, u32 reg) in nv_didt_rreg() argument 286 spin_lock_irqsave(&adev->didt_idx_lock, flags); in nv_didt_rreg() 289 spin_unlock_irqrestore(&adev->didt_idx_lock, flags); in nv_didt_rreg() 293 static void nv_didt_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in nv_didt_wreg() argument [all …]
|
H A D | amdgpu_bios.c | 50 static bool check_atom_bios(struct amdgpu_device *adev, size_t size) in check_atom_bios() argument 53 uint8_t *bios = adev->bios; in check_atom_bios() 56 dev_dbg(adev->dev, "VBIOS mem is null or mem size is wrong\n"); in check_atom_bios() 61 dev_dbg(adev->dev, "VBIOS signature incorrect %x %x\n", bios[0], in check_atom_bios() 68 dev_dbg(adev->dev, "Can't locate VBIOS header\n"); in check_atom_bios() 74 dev_dbg(adev->dev, "VBIOS header is broken\n"); in check_atom_bios() 80 dev_dbg(adev->dev, "ATOMBIOS detected\n"); in check_atom_bios() 94 static bool amdgpu_read_bios_from_vram(struct amdgpu_device *adev) in amdgpu_read_bios_from_vram() argument 100 if (!(adev->flags & AMD_IS_APU)) in amdgpu_read_bios_from_vram() 101 if (amdgpu_device_need_post(adev)) in amdgpu_read_bios_from_vram() [all …]
|
H A D | amdgpu_gfx.c | 46 int amdgpu_gfx_mec_queue_to_bit(struct amdgpu_device *adev, int mec, in amdgpu_gfx_mec_queue_to_bit() argument 51 bit += mec * adev->gfx.mec.num_pipe_per_mec in amdgpu_gfx_mec_queue_to_bit() 52 * adev->gfx.mec.num_queue_per_pipe; in amdgpu_gfx_mec_queue_to_bit() 53 bit += pipe * adev->gfx.mec.num_queue_per_pipe; in amdgpu_gfx_mec_queue_to_bit() 59 void amdgpu_queue_mask_bit_to_mec_queue(struct amdgpu_device *adev, int bit, in amdgpu_queue_mask_bit_to_mec_queue() argument 62 *queue = bit % adev->gfx.mec.num_queue_per_pipe; in amdgpu_queue_mask_bit_to_mec_queue() 63 *pipe = (bit / adev->gfx.mec.num_queue_per_pipe) in amdgpu_queue_mask_bit_to_mec_queue() 64 % adev->gfx.mec.num_pipe_per_mec; in amdgpu_queue_mask_bit_to_mec_queue() 65 *mec = (bit / adev->gfx.mec.num_queue_per_pipe) in amdgpu_queue_mask_bit_to_mec_queue() 66 / adev->gfx.mec.num_pipe_per_mec; in amdgpu_queue_mask_bit_to_mec_queue() [all …]
|
H A D | amdgpu_acp.c | 103 struct amdgpu_device *adev = ip_block->adev; in acp_sw_init() local 105 adev->acp.parent = adev->dev; in acp_sw_init() 107 adev->acp.cgs_device = in acp_sw_init() 108 amdgpu_cgs_create_device(adev); in acp_sw_init() 109 if (!adev->acp.cgs_device) in acp_sw_init() 117 struct amdgpu_device *adev = ip_block->adev; in acp_sw_fini() local 119 if (adev->acp.cgs_device) in acp_sw_fini() 120 amdgpu_cgs_destroy_device(adev->acp.cgs_device); in acp_sw_fini() 126 void *adev; member 133 struct amdgpu_device *adev; in acp_poweroff() local [all …]
|
H A D | amdgpu_virt.h | 87 int (*req_full_gpu)(struct amdgpu_device *adev, bool init); 88 int (*rel_full_gpu)(struct amdgpu_device *adev, bool init); 89 int (*req_init_data)(struct amdgpu_device *adev); 90 int (*reset_gpu)(struct amdgpu_device *adev); 91 void (*ready_to_reset)(struct amdgpu_device *adev); 92 int (*wait_reset)(struct amdgpu_device *adev); 93 void (*trans_msg)(struct amdgpu_device *adev, enum idh_request req, 95 void (*ras_poison_handler)(struct amdgpu_device *adev, 97 bool (*rcvd_ras_intr)(struct amdgpu_device *adev); 98 int (*req_ras_err_count)(struct amdgpu_device *adev); [all …]
|
H A D | vi.c | 257 static int vi_query_video_codecs(struct amdgpu_device *adev, bool encode, in vi_query_video_codecs() argument 260 switch (adev->asic_type) { in vi_query_video_codecs() 298 static u32 vi_pcie_rreg(struct amdgpu_device *adev, u32 reg) in vi_pcie_rreg() argument 303 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in vi_pcie_rreg() 307 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in vi_pcie_rreg() 311 static void vi_pcie_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in vi_pcie_wreg() argument 315 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in vi_pcie_wreg() 320 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in vi_pcie_wreg() 323 static u32 vi_smc_rreg(struct amdgpu_device *adev, u32 reg) in vi_smc_rreg() argument 328 spin_lock_irqsave(&adev->smc_idx_lock, flags); in vi_smc_rreg() [all …]
|
H A D | amdgpu_amdkfd.c | 70 void amdgpu_amdkfd_device_probe(struct amdgpu_device *adev) in amdgpu_amdkfd_device_probe() argument 72 bool vf = amdgpu_sriov_vf(adev); in amdgpu_amdkfd_device_probe() 77 adev->kfd.dev = kgd2kfd_probe(adev, vf); in amdgpu_amdkfd_device_probe() 93 static void amdgpu_doorbell_get_kfd_info(struct amdgpu_device *adev, in amdgpu_doorbell_get_kfd_info() argument 102 if (adev->enable_mes) { in amdgpu_doorbell_get_kfd_info() 109 *aperture_base = adev->doorbell.base; in amdgpu_doorbell_get_kfd_info() 112 } else if (adev->doorbell.size > adev->doorbell.num_kernel_doorbells * in amdgpu_doorbell_get_kfd_info() 114 *aperture_base = adev->doorbell.base; in amdgpu_doorbell_get_kfd_info() 115 *aperture_size = adev->doorbell.size; in amdgpu_doorbell_get_kfd_info() 116 *start_offset = adev->doorbell.num_kernel_doorbells * sizeof(u32); in amdgpu_doorbell_get_kfd_info() [all …]
|
H A D | amdgpu_irq.c | 124 void amdgpu_irq_disable_all(struct amdgpu_device *adev) in amdgpu_irq_disable_all() argument 130 spin_lock_irqsave(&adev->irq.lock, irqflags); in amdgpu_irq_disable_all() 132 if (!adev->irq.client[i].sources) in amdgpu_irq_disable_all() 136 struct amdgpu_irq_src *src = adev->irq.client[i].sources[j]; in amdgpu_irq_disable_all() 142 r = src->funcs->set(adev, src, k, in amdgpu_irq_disable_all() 150 spin_unlock_irqrestore(&adev->irq.lock, irqflags); in amdgpu_irq_disable_all() 167 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_irq_handler() local 170 ret = amdgpu_ih_process(adev, &adev->irq.ih); in amdgpu_irq_handler() 174 amdgpu_ras_interrupt_fatal_error_handler(adev); in amdgpu_irq_handler() 188 struct amdgpu_device *adev = container_of(work, struct amdgpu_device, in amdgpu_irq_handle_ih1() local [all …]
|
H A D | amdgpu_ras.c | 140 static bool amdgpu_ras_check_bad_page(struct amdgpu_device *adev, 143 static void amdgpu_register_bad_pages_mca_notifier(struct amdgpu_device *adev); 151 void amdgpu_ras_set_error_query_ready(struct amdgpu_device *adev, bool ready) in amdgpu_ras_set_error_query_ready() argument 153 if (adev && amdgpu_ras_get_context(adev)) in amdgpu_ras_set_error_query_ready() 154 amdgpu_ras_get_context(adev)->error_query_ready = ready; in amdgpu_ras_set_error_query_ready() 157 static bool amdgpu_ras_get_error_query_ready(struct amdgpu_device *adev) in amdgpu_ras_get_error_query_ready() argument 159 if (adev && amdgpu_ras_get_context(adev)) in amdgpu_ras_get_error_query_ready() 160 return amdgpu_ras_get_context(adev)->error_query_ready; in amdgpu_ras_get_error_query_ready() 165 static int amdgpu_reserve_page_direct(struct amdgpu_device *adev, uint64_t address) in amdgpu_reserve_page_direct() argument 171 if ((address >= adev->gmc.mc_vram_size) || in amdgpu_reserve_page_direct() [all …]
|
H A D | mxgpu_ai.c | 37 static void xgpu_ai_mailbox_send_ack(struct amdgpu_device *adev) in xgpu_ai_mailbox_send_ack() argument 42 static void xgpu_ai_mailbox_set_valid(struct amdgpu_device *adev, bool val) in xgpu_ai_mailbox_set_valid() argument 56 static enum idh_event xgpu_ai_mailbox_peek_msg(struct amdgpu_device *adev) in xgpu_ai_mailbox_peek_msg() argument 63 static int xgpu_ai_mailbox_rcv_msg(struct amdgpu_device *adev, in xgpu_ai_mailbox_rcv_msg() argument 73 xgpu_ai_mailbox_send_ack(adev); in xgpu_ai_mailbox_rcv_msg() 78 static uint8_t xgpu_ai_peek_ack(struct amdgpu_device *adev) { in xgpu_ai_peek_ack() argument 82 static int xgpu_ai_poll_ack(struct amdgpu_device *adev) in xgpu_ai_poll_ack() argument 96 dev_err(adev->dev, "Doesn't get TRN_MSG_ACK from pf in %d msec\n", AI_MAILBOX_POLL_ACK_TIMEDOUT); in xgpu_ai_poll_ack() 101 static int xgpu_ai_poll_msg(struct amdgpu_device *adev, enum idh_event event) in xgpu_ai_poll_msg() argument 106 r = xgpu_ai_mailbox_rcv_msg(adev, event); in xgpu_ai_poll_msg() [all …]
|
H A D | amdgpu_umc.c | 31 static int amdgpu_umc_convert_error_address(struct amdgpu_device *adev, in amdgpu_umc_convert_error_address() argument 35 switch (amdgpu_ip_version(adev, UMC_HWIP, 0)) { in amdgpu_umc_convert_error_address() 37 umc_v6_7_convert_error_address(adev, in amdgpu_umc_convert_error_address() 41 dev_warn(adev->dev, in amdgpu_umc_convert_error_address() 49 int amdgpu_umc_page_retirement_mca(struct amdgpu_device *adev, in amdgpu_umc_page_retirement_mca() argument 60 kcalloc(adev->umc.max_ras_err_cnt_per_query, in amdgpu_umc_page_retirement_mca() 63 dev_warn(adev->dev, in amdgpu_umc_page_retirement_mca() 69 err_data.err_addr_len = adev->umc.max_ras_err_cnt_per_query; in amdgpu_umc_page_retirement_mca() 74 ret = amdgpu_umc_convert_error_address(adev, &err_data, err_addr, in amdgpu_umc_page_retirement_mca() 80 amdgpu_ras_add_bad_pages(adev, err_data.err_addr, in amdgpu_umc_page_retirement_mca() [all …]
|
/linux/drivers/gpu/drm/amd/pm/ |
H A D | amdgpu_dpm.c | 36 #define amdgpu_dpm_enable_bapm(adev, e) \ argument 37 ((adev)->powerplay.pp_funcs->enable_bapm((adev)->powerplay.pp_handle, (e))) 39 #define amdgpu_dpm_is_legacy_dpm(adev) ((adev)->powerplay.pp_handle == (adev)) argument 41 int amdgpu_dpm_get_sclk(struct amdgpu_device *adev, bool low) in amdgpu_dpm_get_sclk() argument 43 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_sclk() 49 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_sclk() 50 ret = pp_funcs->get_sclk((adev)->powerplay.pp_handle, in amdgpu_dpm_get_sclk() 52 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_sclk() 57 int amdgpu_dpm_get_mclk(struct amdgpu_device *adev, bool low) in amdgpu_dpm_get_mclk() argument 59 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_mclk() [all …]
|
/linux/sound/soc/intel/avs/ |
H A D | loader.c | 101 static int avs_fw_manifest_strip_verify(struct avs_dev *adev, struct firmware *fw, in avs_fw_manifest_strip_verify() argument 125 dev_warn(adev->dev, "bad FW version %d.%d.%d.%d, expected %d.%d.%d.%d or newer\n", in avs_fw_manifest_strip_verify() 137 int avs_cldma_load_basefw(struct avs_dev *adev, struct firmware *fw) in avs_cldma_load_basefw() argument 143 ret = avs_dsp_op(adev, power, AVS_MAIN_CORE_MASK, true); in avs_cldma_load_basefw() 147 ret = avs_dsp_op(adev, reset, AVS_MAIN_CORE_MASK, false); in avs_cldma_load_basefw() 153 dev_err(adev->dev, "cldma reset failed: %d\n", ret); in avs_cldma_load_basefw() 158 ret = avs_dsp_op(adev, stall, AVS_MAIN_CORE_MASK, false); in avs_cldma_load_basefw() 162 reinit_completion(&adev->fw_ready); in avs_cldma_load_basefw() 163 avs_dsp_op(adev, int_control, true); in avs_cldma_load_basefw() 166 ret = snd_hdac_adsp_readl_poll(adev, AVS_FW_REG_STATU in avs_cldma_load_basefw() 191 avs_cldma_load_library(struct avs_dev * adev,struct firmware * lib,u32 id) avs_cldma_load_library() argument 212 avs_cldma_load_module(struct avs_dev * adev,struct avs_module_entry * mentry) avs_cldma_load_module() argument 251 avs_cldma_transfer_modules(struct avs_dev * adev,bool load,struct avs_module_entry * mods,u32 num_mods) avs_cldma_transfer_modules() argument 284 avs_hda_init_rom(struct avs_dev * adev,unsigned int dma_id,bool purge) avs_hda_init_rom() argument 334 avs_imr_load_basefw(struct avs_dev * adev) avs_imr_load_basefw() argument 356 avs_hda_load_basefw(struct avs_dev * adev,struct firmware * fw) avs_hda_load_basefw() argument 423 avs_hda_load_library(struct avs_dev * adev,struct firmware * lib,u32 id) avs_hda_load_library() argument 474 avs_hda_transfer_modules(struct avs_dev * adev,bool load,struct avs_module_entry * mods,u32 num_mods) avs_hda_transfer_modules() argument 484 avs_dsp_load_libraries(struct avs_dev * adev,struct avs_tplg_library * libs,u32 num_libs) avs_dsp_load_libraries() argument 549 avs_dsp_load_basefw(struct avs_dev * adev) avs_dsp_load_basefw() argument 600 avs_dsp_boot_firmware(struct avs_dev * adev,bool purge) avs_dsp_boot_firmware() argument 654 avs_dsp_first_boot_firmware(struct avs_dev * adev) avs_dsp_first_boot_firmware() argument [all...] |
/linux/drivers/gpu/drm/amd/pm/legacy-dpm/ |
H A D | kv_dpm.c | 47 static void kv_dpm_set_irq_funcs(struct amdgpu_device *adev); 48 static int kv_enable_nb_dpm(struct amdgpu_device *adev, 50 static void kv_init_graphics_levels(struct amdgpu_device *adev); 51 static int kv_calculate_ds_divider(struct amdgpu_device *adev); 52 static int kv_calculate_nbps_level_settings(struct amdgpu_device *adev); 53 static int kv_calculate_dpm_settings(struct amdgpu_device *adev); 54 static void kv_enable_new_levels(struct amdgpu_device *adev); 55 static void kv_program_nbps_index_settings(struct amdgpu_device *adev, 57 static int kv_set_enabled_level(struct amdgpu_device *adev, u32 level); 58 static int kv_set_enabled_levels(struct amdgpu_device *adev); [all …]
|