| /linux/drivers/gpu/drm/amd/pm/ |
| H A D | amdgpu_dpm.c | 36 #define amdgpu_dpm_enable_bapm(adev, e) \ argument 37 ((adev)->powerplay.pp_funcs->enable_bapm((adev)->powerplay.pp_handle, (e))) 39 #define amdgpu_dpm_is_legacy_dpm(adev) ((adev)->powerplay.pp_handle == (adev)) argument 41 int amdgpu_dpm_get_sclk(struct amdgpu_device *adev, bool low) in amdgpu_dpm_get_sclk() argument 43 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_sclk() 49 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_sclk() 50 ret = pp_funcs->get_sclk((adev)->powerplay.pp_handle, in amdgpu_dpm_get_sclk() 52 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_sclk() 57 int amdgpu_dpm_get_mclk(struct amdgpu_device *adev, bool low) in amdgpu_dpm_get_mclk() argument 59 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_mclk() [all …]
|
| /linux/drivers/gpu/drm/amd/amdgpu/ |
| H A D | gmc_v9_0.c | 412 static int gmc_v9_0_ecc_interrupt_state(struct amdgpu_device *adev, in gmc_v9_0_ecc_interrupt_state() argument 422 if (adev->asic_type >= CHIP_VEGA20) in gmc_v9_0_ecc_interrupt_state() 463 static int gmc_v9_0_vm_fault_interrupt_state(struct amdgpu_device *adev, in gmc_v9_0_vm_fault_interrupt_state() argument 481 for_each_set_bit(j, adev->vmhubs_mask, AMDGPU_MAX_VMHUBS) { in gmc_v9_0_vm_fault_interrupt_state() 482 hub = &adev->vmhub[j]; in gmc_v9_0_vm_fault_interrupt_state() 491 if (adev->in_s0ix && (j == AMDGPU_GFXHUB(0))) in gmc_v9_0_vm_fault_interrupt_state() 509 for_each_set_bit(j, adev->vmhubs_mask, AMDGPU_MAX_VMHUBS) { in gmc_v9_0_vm_fault_interrupt_state() 510 hub = &adev->vmhub[j]; in gmc_v9_0_vm_fault_interrupt_state() 519 if (adev->in_s0ix && (j == AMDGPU_GFXHUB(0))) in gmc_v9_0_vm_fault_interrupt_state() 543 static int gmc_v9_0_process_interrupt(struct amdgpu_device *adev, in gmc_v9_0_process_interrupt() argument [all …]
|
| H A D | gmc_v10_0.c | 54 static int gmc_v10_0_ecc_interrupt_state(struct amdgpu_device *adev, in gmc_v10_0_ecc_interrupt_state() argument 63 gmc_v10_0_vm_fault_interrupt_state(struct amdgpu_device *adev, in gmc_v10_0_vm_fault_interrupt_state() argument 70 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_MMHUB0(0), false); in gmc_v10_0_vm_fault_interrupt_state() 77 if (!adev->in_s0ix) in gmc_v10_0_vm_fault_interrupt_state() 78 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_GFXHUB(0), false); in gmc_v10_0_vm_fault_interrupt_state() 82 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_MMHUB0(0), true); in gmc_v10_0_vm_fault_interrupt_state() 89 if (!adev->in_s0ix) in gmc_v10_0_vm_fault_interrupt_state() 90 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_GFXHUB(0), true); in gmc_v10_0_vm_fault_interrupt_state() 99 static int gmc_v10_0_process_interrupt(struct amdgpu_device *adev, in gmc_v10_0_process_interrupt() argument 105 struct amdgpu_vmhub *hub = &adev->vmhub[vmhub_index]; in gmc_v10_0_process_interrupt() [all …]
|
| H A D | gmc_v12_0.c | 45 static int gmc_v12_0_ecc_interrupt_state(struct amdgpu_device *adev, in gmc_v12_0_ecc_interrupt_state() argument 53 static int gmc_v12_0_vm_fault_interrupt_state(struct amdgpu_device *adev, in gmc_v12_0_vm_fault_interrupt_state() argument 60 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_MMHUB0(0), false); in gmc_v12_0_vm_fault_interrupt_state() 67 if (!adev->in_s0ix) in gmc_v12_0_vm_fault_interrupt_state() 68 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_GFXHUB(0), false); in gmc_v12_0_vm_fault_interrupt_state() 72 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_MMHUB0(0), true); in gmc_v12_0_vm_fault_interrupt_state() 79 if (!adev->in_s0ix) in gmc_v12_0_vm_fault_interrupt_state() 80 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_GFXHUB(0), true); in gmc_v12_0_vm_fault_interrupt_state() 89 static int gmc_v12_0_process_interrupt(struct amdgpu_device *adev, in gmc_v12_0_process_interrupt() argument 105 hub = &adev->vmhub[AMDGPU_MMHUB0(0)]; in gmc_v12_0_process_interrupt() [all …]
|
| H A D | gmc_v11_0.c | 53 static int gmc_v11_0_ecc_interrupt_state(struct amdgpu_device *adev, in gmc_v11_0_ecc_interrupt_state() argument 62 gmc_v11_0_vm_fault_interrupt_state(struct amdgpu_device *adev, in gmc_v11_0_vm_fault_interrupt_state() argument 69 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_MMHUB0(0), false); in gmc_v11_0_vm_fault_interrupt_state() 76 if (!adev->in_s0ix && (adev->in_runpm || adev->in_suspend || in gmc_v11_0_vm_fault_interrupt_state() 77 amdgpu_in_reset(adev))) in gmc_v11_0_vm_fault_interrupt_state() 78 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_GFXHUB(0), false); in gmc_v11_0_vm_fault_interrupt_state() 82 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_MMHUB0(0), true); in gmc_v11_0_vm_fault_interrupt_state() 89 if (!adev->in_s0ix) in gmc_v11_0_vm_fault_interrupt_state() 90 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_GFXHUB(0), true); in gmc_v11_0_vm_fault_interrupt_state() 99 static int gmc_v11_0_process_interrupt(struct amdgpu_device *adev, in gmc_v11_0_process_interrupt() argument [all …]
|
| H A D | amdgpu_mes.c | 35 int amdgpu_mes_doorbell_process_slice(struct amdgpu_device *adev) in amdgpu_mes_doorbell_process_slice() argument 42 static int amdgpu_mes_doorbell_init(struct amdgpu_device *adev) in amdgpu_mes_doorbell_init() argument 45 struct amdgpu_mes *mes = &adev->mes; in amdgpu_mes_doorbell_init() 50 dev_err(adev->dev, "Failed to allocate MES doorbell bitmap\n"); in amdgpu_mes_doorbell_init() 56 adev->mes.aggregated_doorbells[i] = mes->db_start_dw_offset + i * 2; in amdgpu_mes_doorbell_init() 63 static int amdgpu_mes_event_log_init(struct amdgpu_device *adev) in amdgpu_mes_event_log_init() argument 70 r = amdgpu_bo_create_kernel(adev, adev->mes.event_log_size, PAGE_SIZE, in amdgpu_mes_event_log_init() 72 &adev->mes.event_log_gpu_obj, in amdgpu_mes_event_log_init() 73 &adev->mes.event_log_gpu_addr, in amdgpu_mes_event_log_init() 74 &adev->mes.event_log_cpu_addr); in amdgpu_mes_event_log_init() [all …]
|
| H A D | soc15.c | 190 static int soc15_query_video_codecs(struct amdgpu_device *adev, bool encode, in soc15_query_video_codecs() argument 193 if (amdgpu_ip_version(adev, VCE_HWIP, 0)) { in soc15_query_video_codecs() 194 switch (amdgpu_ip_version(adev, VCE_HWIP, 0)) { in soc15_query_video_codecs() 206 switch (amdgpu_ip_version(adev, UVD_HWIP, 0)) { in soc15_query_video_codecs() 240 static u32 soc15_uvd_ctx_rreg(struct amdgpu_device *adev, u32 reg) in soc15_uvd_ctx_rreg() argument 248 spin_lock_irqsave(&adev->uvd_ctx_idx_lock, flags); in soc15_uvd_ctx_rreg() 251 spin_unlock_irqrestore(&adev->uvd_ctx_idx_lock, flags); in soc15_uvd_ctx_rreg() 255 static void soc15_uvd_ctx_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in soc15_uvd_ctx_wreg() argument 262 spin_lock_irqsave(&adev->uvd_ctx_idx_lock, flags); in soc15_uvd_ctx_wreg() 265 spin_unlock_irqrestore(&adev->uvd_ctx_idx_lock, flags); in soc15_uvd_ctx_wreg() [all …]
|
| H A D | amdgpu_virt.c | 59 bool amdgpu_virt_mmio_blocked(struct amdgpu_device *adev) in amdgpu_virt_mmio_blocked() argument 67 void amdgpu_virt_init_setting(struct amdgpu_device *adev) in amdgpu_virt_init_setting() argument 69 struct drm_device *ddev = adev_to_drm(adev); in amdgpu_virt_init_setting() 72 if (adev->asic_type != CHIP_ALDEBARAN && in amdgpu_virt_init_setting() 73 adev->asic_type != CHIP_ARCTURUS && in amdgpu_virt_init_setting() 74 ((adev->pdev->class >> 8) != PCI_CLASS_ACCELERATOR_PROCESSING)) { in amdgpu_virt_init_setting() 75 if (adev->mode_info.num_crtc == 0) in amdgpu_virt_init_setting() 76 adev->mode_info.num_crtc = 1; in amdgpu_virt_init_setting() 77 adev->enable_virtual_display = true; in amdgpu_virt_init_setting() 80 adev->cg_flags = 0; in amdgpu_virt_init_setting() [all …]
|
| H A D | amdgpu_acp.c | 103 struct amdgpu_device *adev = ip_block->adev; in acp_sw_init() local 105 adev->acp.parent = adev->dev; in acp_sw_init() 107 adev->acp.cgs_device = in acp_sw_init() 108 amdgpu_cgs_create_device(adev); in acp_sw_init() 109 if (!adev->acp.cgs_device) in acp_sw_init() 117 struct amdgpu_device *adev = ip_block->adev; in acp_sw_fini() local 119 if (adev->acp.cgs_device) in acp_sw_fini() 120 amdgpu_cgs_destroy_device(adev->acp.cgs_device); in acp_sw_fini() 126 void *adev; member 133 struct amdgpu_device *adev; in acp_poweroff() local [all …]
|
| H A D | amdgpu_discovery.c | 245 static int amdgpu_discovery_read_binary_from_sysmem(struct amdgpu_device *adev, uint8_t *binary) in amdgpu_discovery_read_binary_from_sysmem() argument 251 ret = amdgpu_acpi_get_tmr_info(adev, &tmr_offset, &tmr_size); in amdgpu_discovery_read_binary_from_sysmem() 258 discv_regn = memremap(pos, adev->discovery.size, MEMREMAP_WC); in amdgpu_discovery_read_binary_from_sysmem() 260 memcpy(binary, discv_regn, adev->discovery.size); in amdgpu_discovery_read_binary_from_sysmem() 271 static int amdgpu_discovery_read_binary_from_mem(struct amdgpu_device *adev, in amdgpu_discovery_read_binary_from_mem() argument 279 if (!amdgpu_sriov_vf(adev)) { in amdgpu_discovery_read_binary_from_mem() 307 if (amdgpu_sriov_vf(adev) && adev->virt.is_dynamic_crit_regn_enabled) { in amdgpu_discovery_read_binary_from_mem() 312 if (amdgpu_virt_get_dynamic_data_info(adev, in amdgpu_discovery_read_binary_from_mem() 314 &adev->discovery.size)) { in amdgpu_discovery_read_binary_from_mem() 315 dev_err(adev->dev, in amdgpu_discovery_read_binary_from_mem() [all …]
|
| H A D | amdgpu_irq.c | 124 void amdgpu_irq_disable_all(struct amdgpu_device *adev) in amdgpu_irq_disable_all() argument 130 spin_lock_irqsave(&adev->irq.lock, irqflags); in amdgpu_irq_disable_all() 132 if (!adev->irq.client[i].sources) in amdgpu_irq_disable_all() 136 struct amdgpu_irq_src *src = adev->irq.client[i].sources[j]; in amdgpu_irq_disable_all() 142 r = src->funcs->set(adev, src, k, in amdgpu_irq_disable_all() 145 dev_err(adev->dev, in amdgpu_irq_disable_all() 151 spin_unlock_irqrestore(&adev->irq.lock, irqflags); in amdgpu_irq_disable_all() 168 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_irq_handler() local 171 ret = amdgpu_ih_process(adev, &adev->irq.ih); in amdgpu_irq_handler() 175 amdgpu_ras_interrupt_fatal_error_handler(adev); in amdgpu_irq_handler() [all …]
|
| H A D | amdgpu_jpeg.c | 36 static void amdgpu_jpeg_reg_dump_fini(struct amdgpu_device *adev); 38 int amdgpu_jpeg_sw_init(struct amdgpu_device *adev) in amdgpu_jpeg_sw_init() argument 42 INIT_DELAYED_WORK(&adev->jpeg.idle_work, amdgpu_jpeg_idle_work_handler); in amdgpu_jpeg_sw_init() 43 mutex_init(&adev->jpeg.jpeg_pg_lock); in amdgpu_jpeg_sw_init() 44 atomic_set(&adev->jpeg.total_submission_cnt, 0); in amdgpu_jpeg_sw_init() 46 if ((adev->firmware.load_type == AMDGPU_FW_LOAD_PSP) && in amdgpu_jpeg_sw_init() 47 (adev->pg_flags & AMD_PG_SUPPORT_JPEG_DPG)) in amdgpu_jpeg_sw_init() 48 adev->jpeg.indirect_sram = true; in amdgpu_jpeg_sw_init() 50 for (i = 0; i < adev->jpeg.num_jpeg_inst; i++) { in amdgpu_jpeg_sw_init() 51 if (adev->jpeg.harvest_config & (1U << i)) in amdgpu_jpeg_sw_init() [all …]
|
| H A D | amdgpu_amdkfd.c | 70 void amdgpu_amdkfd_device_probe(struct amdgpu_device *adev) in amdgpu_amdkfd_device_probe() argument 72 bool vf = amdgpu_sriov_vf(adev); in amdgpu_amdkfd_device_probe() 77 adev->kfd.dev = kgd2kfd_probe(adev, vf); in amdgpu_amdkfd_device_probe() 93 static void amdgpu_doorbell_get_kfd_info(struct amdgpu_device *adev, in amdgpu_doorbell_get_kfd_info() argument 102 if (adev->enable_mes) { in amdgpu_doorbell_get_kfd_info() 109 *aperture_base = adev->doorbell.base; in amdgpu_doorbell_get_kfd_info() 112 } else if (adev->doorbell.size > adev->doorbell.num_kernel_doorbells * in amdgpu_doorbell_get_kfd_info() 114 *aperture_base = adev->doorbell.base; in amdgpu_doorbell_get_kfd_info() 115 *aperture_size = adev->doorbell.size; in amdgpu_doorbell_get_kfd_info() 116 *start_offset = adev->doorbell.num_kernel_doorbells * sizeof(u32); in amdgpu_doorbell_get_kfd_info() [all …]
|
| H A D | amdgpu_gmc.c | 43 bool amdgpu_gmc_is_pdb0_enabled(struct amdgpu_device *adev) in amdgpu_gmc_is_pdb0_enabled() argument 45 return adev->gmc.xgmi.connected_to_cpu || amdgpu_virt_xgmi_migrate_enabled(adev); in amdgpu_gmc_is_pdb0_enabled() 56 int amdgpu_gmc_pdb0_alloc(struct amdgpu_device *adev) in amdgpu_gmc_pdb0_alloc() argument 60 u64 vram_size = adev->gmc.xgmi.node_segment_size * adev->gmc.xgmi.num_physical_nodes; in amdgpu_gmc_pdb0_alloc() 61 uint32_t pde0_page_shift = adev->gmc.vmid0_page_table_block_size + 21; in amdgpu_gmc_pdb0_alloc() 74 r = amdgpu_bo_create(adev, &bp, &adev->gmc.pdb0_bo); in amdgpu_gmc_pdb0_alloc() 78 r = amdgpu_bo_reserve(adev->gmc.pdb0_bo, false); in amdgpu_gmc_pdb0_alloc() 82 r = amdgpu_bo_pin(adev->gmc.pdb0_bo, AMDGPU_GEM_DOMAIN_VRAM); in amdgpu_gmc_pdb0_alloc() 85 r = amdgpu_bo_kmap(adev->gmc.pdb0_bo, &adev->gmc.ptr_pdb0); in amdgpu_gmc_pdb0_alloc() 89 amdgpu_bo_unreserve(adev->gmc.pdb0_bo); in amdgpu_gmc_pdb0_alloc() [all …]
|
| H A D | amdgpu_ras.c | 145 static int amdgpu_ras_check_bad_page(struct amdgpu_device *adev, 148 static void amdgpu_ras_critical_region_init(struct amdgpu_device *adev); 149 static void amdgpu_ras_critical_region_fini(struct amdgpu_device *adev); 152 static void amdgpu_register_bad_pages_mca_notifier(struct amdgpu_device *adev); 154 amdgpu_unregister_bad_pages_mca_notifier(struct amdgpu_device *adev); 162 void amdgpu_ras_set_error_query_ready(struct amdgpu_device *adev, bool ready) in amdgpu_ras_set_error_query_ready() argument 164 if (adev && amdgpu_ras_get_context(adev)) in amdgpu_ras_set_error_query_ready() 165 amdgpu_ras_get_context(adev)->error_query_ready = ready; in amdgpu_ras_set_error_query_ready() 168 static bool amdgpu_ras_get_error_query_ready(struct amdgpu_device *adev) in amdgpu_ras_get_error_query_ready() argument 170 if (adev && amdgpu_ras_get_context(adev)) in amdgpu_ras_get_error_query_ready() [all …]
|
| H A D | gfx_v12_0.c | 268 static void gfx_v12_0_disable_gpa_mode(struct amdgpu_device *adev); 269 static void gfx_v12_0_set_ring_funcs(struct amdgpu_device *adev); 270 static void gfx_v12_0_set_irq_funcs(struct amdgpu_device *adev); 271 static void gfx_v12_0_set_rlc_funcs(struct amdgpu_device *adev); 272 static void gfx_v12_0_set_mqd_funcs(struct amdgpu_device *adev); 273 static void gfx_v12_0_set_imu_funcs(struct amdgpu_device *adev); 274 static int gfx_v12_0_get_cu_info(struct amdgpu_device *adev, 276 static uint64_t gfx_v12_0_get_gpu_clock_counter(struct amdgpu_device *adev); 277 static void gfx_v12_0_select_se_sh(struct amdgpu_device *adev, u32 se_num, 279 static u32 gfx_v12_0_get_wgp_active_bitmap_per_sh(struct amdgpu_device *adev); [all …]
|
| H A D | gfx_v11_0.c | 320 static void gfx_v11_0_disable_gpa_mode(struct amdgpu_device *adev); 321 static void gfx_v11_0_set_ring_funcs(struct amdgpu_device *adev); 322 static void gfx_v11_0_set_irq_funcs(struct amdgpu_device *adev); 323 static void gfx_v11_0_set_gds_init(struct amdgpu_device *adev); 324 static void gfx_v11_0_set_rlc_funcs(struct amdgpu_device *adev); 325 static void gfx_v11_0_set_mqd_funcs(struct amdgpu_device *adev); 326 static void gfx_v11_0_set_imu_funcs(struct amdgpu_device *adev); 327 static int gfx_v11_0_get_cu_info(struct amdgpu_device *adev, 329 static uint64_t gfx_v11_0_get_gpu_clock_counter(struct amdgpu_device *adev); 330 static void gfx_v11_0_select_se_sh(struct amdgpu_device *adev, u32 se_num, [all …]
|
| H A D | vega20_reg_init.c | 29 int vega20_reg_base_init(struct amdgpu_device *adev) in vega20_reg_base_init() argument 34 adev->reg_offset[GC_HWIP][i] = (uint32_t *)(&(GC_BASE.instance[i])); in vega20_reg_base_init() 35 adev->reg_offset[HDP_HWIP][i] = (uint32_t *)(&(HDP_BASE.instance[i])); in vega20_reg_base_init() 36 adev->reg_offset[MMHUB_HWIP][i] = (uint32_t *)(&(MMHUB_BASE.instance[i])); in vega20_reg_base_init() 37 adev->reg_offset[ATHUB_HWIP][i] = (uint32_t *)(&(ATHUB_BASE.instance[i])); in vega20_reg_base_init() 38 adev->reg_offset[NBIO_HWIP][i] = (uint32_t *)(&(NBIO_BASE.instance[i])); in vega20_reg_base_init() 39 adev->reg_offset[MP0_HWIP][i] = (uint32_t *)(&(MP0_BASE.instance[i])); in vega20_reg_base_init() 40 adev->reg_offset[MP1_HWIP][i] = (uint32_t *)(&(MP1_BASE.instance[i])); in vega20_reg_base_init() 41 adev->reg_offset[UVD_HWIP][i] = (uint32_t *)(&(UVD_BASE.instance[i])); in vega20_reg_base_init() 42 adev->reg_offset[VCE_HWIP][i] = (uint32_t *)(&(VCE_BASE.instance[i])); in vega20_reg_base_init() [all …]
|
| H A D | amdgpu_sdma.c | 42 struct amdgpu_device *adev = ring->adev; in amdgpu_sdma_get_instance_from_ring() local 45 for (i = 0; i < adev->sdma.num_instances; i++) in amdgpu_sdma_get_instance_from_ring() 46 if (ring == &adev->sdma.instance[i].ring || in amdgpu_sdma_get_instance_from_ring() 47 ring == &adev->sdma.instance[i].page) in amdgpu_sdma_get_instance_from_ring() 48 return &adev->sdma.instance[i]; in amdgpu_sdma_get_instance_from_ring() 55 struct amdgpu_device *adev = ring->adev; in amdgpu_sdma_get_index_from_ring() local 58 for (i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_sdma_get_index_from_ring() 59 if (ring == &adev->sdma.instance[i].ring || in amdgpu_sdma_get_index_from_ring() 60 ring == &adev->sdma.instance[i].page) { in amdgpu_sdma_get_index_from_ring() 72 struct amdgpu_device *adev = ring->adev; in amdgpu_sdma_get_csa_mc_addr() local [all …]
|
| H A D | gmc_v6_0.c | 44 static void gmc_v6_0_set_gmc_funcs(struct amdgpu_device *adev); 45 static void gmc_v6_0_set_irq_funcs(struct amdgpu_device *adev); 64 static void gmc_v6_0_mc_stop(struct amdgpu_device *adev) in gmc_v6_0_mc_stop() argument 69 ip_block = amdgpu_device_ip_get_ip_block(adev, AMD_IP_BLOCK_TYPE_GMC); in gmc_v6_0_mc_stop() 89 static void gmc_v6_0_mc_resume(struct amdgpu_device *adev) in gmc_v6_0_mc_resume() argument 103 static int gmc_v6_0_init_microcode(struct amdgpu_device *adev) in gmc_v6_0_init_microcode() argument 110 switch (adev->asic_type) { in gmc_v6_0_init_microcode() 134 err = amdgpu_ucode_request(adev, &adev->gmc.fw, AMDGPU_UCODE_REQUIRED, in gmc_v6_0_init_microcode() 137 dev_err(adev->dev, in gmc_v6_0_init_microcode() 140 amdgpu_ucode_release(&adev->gmc.fw); in gmc_v6_0_init_microcode() [all …]
|
| H A D | amdgpu.h | 128 struct amdgpu_device *adev; member 371 void amdgpu_device_ip_get_clockgating_state(struct amdgpu_device *adev, 373 int amdgpu_device_ip_wait_for_idle(struct amdgpu_device *adev, 375 bool amdgpu_device_ip_is_hw(struct amdgpu_device *adev, 377 bool amdgpu_device_ip_is_valid(struct amdgpu_device *adev, 404 struct amdgpu_device *adev; member 407 int amdgpu_device_ip_block_version_cmp(struct amdgpu_device *adev, 412 amdgpu_device_ip_get_ip_block(struct amdgpu_device *adev, 415 int amdgpu_device_ip_block_add(struct amdgpu_device *adev, 421 bool amdgpu_get_bios(struct amdgpu_device *adev); [all …]
|
| H A D | vega10_reg_init.c | 29 int vega10_reg_base_init(struct amdgpu_device *adev) in vega10_reg_base_init() argument 34 adev->reg_offset[GC_HWIP][i] = (uint32_t *)(&(GC_BASE.instance[i])); in vega10_reg_base_init() 35 adev->reg_offset[HDP_HWIP][i] = (uint32_t *)(&(HDP_BASE.instance[i])); in vega10_reg_base_init() 36 adev->reg_offset[MMHUB_HWIP][i] = (uint32_t *)(&(MMHUB_BASE.instance[i])); in vega10_reg_base_init() 37 adev->reg_offset[ATHUB_HWIP][i] = (uint32_t *)(&(ATHUB_BASE.instance[i])); in vega10_reg_base_init() 38 adev->reg_offset[NBIO_HWIP][i] = (uint32_t *)(&(NBIO_BASE.instance[i])); in vega10_reg_base_init() 39 adev->reg_offset[MP0_HWIP][i] = (uint32_t *)(&(MP0_BASE.instance[i])); in vega10_reg_base_init() 40 adev->reg_offset[MP1_HWIP][i] = (uint32_t *)(&(MP1_BASE.instance[i])); in vega10_reg_base_init() 41 adev->reg_offset[UVD_HWIP][i] = (uint32_t *)(&(UVD_BASE.instance[i])); in vega10_reg_base_init() 42 adev->reg_offset[VCE_HWIP][i] = (uint32_t *)(&(VCE_BASE.instance[i])); in vega10_reg_base_init() [all …]
|
| H A D | amdgpu_kms.c | 50 void amdgpu_unregister_gpu_instance(struct amdgpu_device *adev) in amdgpu_unregister_gpu_instance() argument 59 if (gpu_instance->adev == adev) { in amdgpu_unregister_gpu_instance() 63 if (adev->flags & AMD_IS_APU) in amdgpu_unregister_gpu_instance() 84 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_driver_unload_kms() local 86 if (adev == NULL) in amdgpu_driver_unload_kms() 89 amdgpu_unregister_gpu_instance(adev); in amdgpu_driver_unload_kms() 91 if (adev->rmmio == NULL) in amdgpu_driver_unload_kms() 94 if (amdgpu_acpi_smart_shift_update(adev, AMDGPU_SS_DRV_UNLOAD)) in amdgpu_driver_unload_kms() 97 amdgpu_acpi_fini(adev); in amdgpu_driver_unload_kms() 98 amdgpu_device_fini_hw(adev); in amdgpu_driver_unload_kms() [all …]
|
| /linux/sound/soc/intel/avs/ |
| H A D | loader.c | 102 static int avs_fw_manifest_strip_verify(struct avs_dev *adev, struct firmware *fw, in avs_fw_manifest_strip_verify() argument 126 dev_warn(adev->dev, "bad FW version %d.%d.%d.%d, expected %d.%d.%d.%d or newer\n", in avs_fw_manifest_strip_verify() 138 int avs_cldma_load_basefw(struct avs_dev *adev, struct firmware *fw) in avs_cldma_load_basefw() argument 144 ret = avs_dsp_op(adev, power, AVS_MAIN_CORE_MASK, true); in avs_cldma_load_basefw() 148 ret = avs_dsp_op(adev, reset, AVS_MAIN_CORE_MASK, false); in avs_cldma_load_basefw() 154 dev_err(adev->dev, "cldma reset failed: %d\n", ret); in avs_cldma_load_basefw() 159 ret = avs_dsp_op(adev, stall, AVS_MAIN_CORE_MASK, false); in avs_cldma_load_basefw() 163 reinit_completion(&adev->fw_ready); in avs_cldma_load_basefw() 164 avs_dsp_op(adev, int_control, true); in avs_cldma_load_basefw() 167 ret = snd_hdac_adsp_readl_poll(adev, AVS_FW_REG_STATUS(adev), reg, in avs_cldma_load_basefw() [all …]
|
| H A D | utils.c | 16 static int avs_module_entry_index(struct avs_dev *adev, const guid_t *uuid) in avs_module_entry_index() argument 20 for (i = 0; i < adev->mods_info->count; i++) { in avs_module_entry_index() 23 module = &adev->mods_info->entries[i]; in avs_module_entry_index() 32 static int avs_module_id_entry_index(struct avs_dev *adev, u32 module_id) in avs_module_id_entry_index() argument 36 for (i = 0; i < adev->mods_info->count; i++) { in avs_module_id_entry_index() 39 module = &adev->mods_info->entries[i]; in avs_module_id_entry_index() 47 int avs_get_module_entry(struct avs_dev *adev, const guid_t *uuid, struct avs_module_entry *entry) in avs_get_module_entry() argument 51 mutex_lock(&adev->modres_mutex); in avs_get_module_entry() 53 idx = avs_module_entry_index(adev, uuid); in avs_get_module_entry() 55 memcpy(entry, &adev->mods_info->entries[idx], sizeof(*entry)); in avs_get_module_entry() [all …]
|