Home
last modified time | relevance | path

Searched refs:adev (Results 1 – 25 of 705) sorted by relevance

12345678910>>...29

/linux/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_device.c184 static int amdgpu_device_ip_resume_phase1(struct amdgpu_device *adev);
185 static int amdgpu_device_ip_resume_phase2(struct amdgpu_device *adev);
186 static int amdgpu_device_ip_resume_phase3(struct amdgpu_device *adev);
188 static void amdgpu_device_load_switch_state(struct amdgpu_device *adev);
190 static inline bool amdgpu_ip_member_of_hwini(struct amdgpu_device *adev, in amdgpu_ip_member_of_hwini() argument
193 return (adev->init_lvl->hwini_ip_block_mask & (1U << block)) != 0; in amdgpu_ip_member_of_hwini()
196 void amdgpu_set_init_level(struct amdgpu_device *adev, in amdgpu_set_init_level() argument
201 adev->init_lvl = &amdgpu_init_minimal_xgmi; in amdgpu_set_init_level()
204 adev->init_lvl = &amdgpu_init_recovery; in amdgpu_set_init_level()
209 adev->init_lvl = &amdgpu_init_default; in amdgpu_set_init_level()
[all …]
H A Dgmc_v9_0.c412 static int gmc_v9_0_ecc_interrupt_state(struct amdgpu_device *adev, in gmc_v9_0_ecc_interrupt_state() argument
422 if (adev->asic_type >= CHIP_VEGA20) in gmc_v9_0_ecc_interrupt_state()
463 static int gmc_v9_0_vm_fault_interrupt_state(struct amdgpu_device *adev, in gmc_v9_0_vm_fault_interrupt_state() argument
481 for_each_set_bit(j, adev->vmhubs_mask, AMDGPU_MAX_VMHUBS) { in gmc_v9_0_vm_fault_interrupt_state()
482 hub = &adev->vmhub[j]; in gmc_v9_0_vm_fault_interrupt_state()
491 if (adev->in_s0ix && (j == AMDGPU_GFXHUB(0))) in gmc_v9_0_vm_fault_interrupt_state()
509 for_each_set_bit(j, adev->vmhubs_mask, AMDGPU_MAX_VMHUBS) { in gmc_v9_0_vm_fault_interrupt_state()
510 hub = &adev->vmhub[j]; in gmc_v9_0_vm_fault_interrupt_state()
519 if (adev->in_s0ix && (j == AMDGPU_GFXHUB(0))) in gmc_v9_0_vm_fault_interrupt_state()
543 static int gmc_v9_0_process_interrupt(struct amdgpu_device *adev, in gmc_v9_0_process_interrupt() argument
[all …]
H A Dsoc15.c190 static int soc15_query_video_codecs(struct amdgpu_device *adev, bool encode, in soc15_query_video_codecs() argument
193 if (amdgpu_ip_version(adev, VCE_HWIP, 0)) { in soc15_query_video_codecs()
194 switch (amdgpu_ip_version(adev, VCE_HWIP, 0)) { in soc15_query_video_codecs()
206 switch (amdgpu_ip_version(adev, UVD_HWIP, 0)) { in soc15_query_video_codecs()
240 static u32 soc15_uvd_ctx_rreg(struct amdgpu_device *adev, u32 reg) in soc15_uvd_ctx_rreg() argument
248 spin_lock_irqsave(&adev->uvd_ctx_idx_lock, flags); in soc15_uvd_ctx_rreg()
251 spin_unlock_irqrestore(&adev->uvd_ctx_idx_lock, flags); in soc15_uvd_ctx_rreg()
255 static void soc15_uvd_ctx_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in soc15_uvd_ctx_wreg() argument
262 spin_lock_irqsave(&adev->uvd_ctx_idx_lock, flags); in soc15_uvd_ctx_wreg()
265 spin_unlock_irqrestore(&adev->uvd_ctx_idx_lock, flags); in soc15_uvd_ctx_wreg()
[all …]
H A Dsoc21.c169 static int soc21_query_video_codecs(struct amdgpu_device *adev, bool encode, in soc21_query_video_codecs() argument
172 if (adev->vcn.num_vcn_inst == hweight8(adev->vcn.harvest_config)) in soc21_query_video_codecs()
175 switch (amdgpu_ip_version(adev, UVD_HWIP, 0)) { in soc21_query_video_codecs()
180 if (amdgpu_sriov_vf(adev)) { in soc21_query_video_codecs()
181 if ((adev->vcn.harvest_config & AMDGPU_VCN_HARVEST_VCN0) || in soc21_query_video_codecs()
182 !amdgpu_sriov_is_av1_support(adev)) { in soc21_query_video_codecs()
194 if ((adev->vcn.harvest_config & AMDGPU_VCN_HARVEST_VCN0)) { in soc21_query_video_codecs()
224 static u32 soc21_didt_rreg(struct amdgpu_device *adev, u32 reg) in soc21_didt_rreg() argument
232 spin_lock_irqsave(&adev->didt_idx_lock, flags); in soc21_didt_rreg()
235 spin_unlock_irqrestore(&adev->didt_idx_lock, flags); in soc21_didt_rreg()
[all …]
H A Damdgpu_virt.c60 bool amdgpu_virt_mmio_blocked(struct amdgpu_device *adev) in amdgpu_virt_mmio_blocked() argument
68 void amdgpu_virt_init_setting(struct amdgpu_device *adev) in amdgpu_virt_init_setting() argument
70 struct drm_device *ddev = adev_to_drm(adev); in amdgpu_virt_init_setting()
73 if (adev->asic_type != CHIP_ALDEBARAN && in amdgpu_virt_init_setting()
74 adev->asic_type != CHIP_ARCTURUS && in amdgpu_virt_init_setting()
75 ((adev->pdev->class >> 8) != PCI_CLASS_ACCELERATOR_PROCESSING)) { in amdgpu_virt_init_setting()
76 if (adev->mode_info.num_crtc == 0) in amdgpu_virt_init_setting()
77 adev->mode_info.num_crtc = 1; in amdgpu_virt_init_setting()
78 adev->enable_virtual_display = true; in amdgpu_virt_init_setting()
81 adev->cg_flags = 0; in amdgpu_virt_init_setting()
[all …]
H A Damdgpu_acp.c103 struct amdgpu_device *adev = ip_block->adev; in acp_sw_init() local
105 adev->acp.parent = adev->dev; in acp_sw_init()
107 adev->acp.cgs_device = in acp_sw_init()
108 amdgpu_cgs_create_device(adev); in acp_sw_init()
109 if (!adev->acp.cgs_device) in acp_sw_init()
117 struct amdgpu_device *adev = ip_block->adev; in acp_sw_fini() local
119 if (adev->acp.cgs_device) in acp_sw_fini()
120 amdgpu_cgs_destroy_device(adev->acp.cgs_device); in acp_sw_fini()
126 void *adev; member
133 struct amdgpu_device *adev; in acp_poweroff() local
[all …]
H A Damdgpu_discovery.c256 static int amdgpu_discovery_read_binary_from_sysmem(struct amdgpu_device *adev, uint8_t *binary) in amdgpu_discovery_read_binary_from_sysmem() argument
262 ret = amdgpu_acpi_get_tmr_info(adev, &tmr_offset, &tmr_size); in amdgpu_discovery_read_binary_from_sysmem()
269 discv_regn = memremap(pos, adev->discovery.size, MEMREMAP_WC); in amdgpu_discovery_read_binary_from_sysmem()
271 memcpy(binary, discv_regn, adev->discovery.size); in amdgpu_discovery_read_binary_from_sysmem()
282 static int amdgpu_discovery_read_binary_from_mem(struct amdgpu_device *adev, in amdgpu_discovery_read_binary_from_mem() argument
290 if (!amdgpu_sriov_vf(adev)) { in amdgpu_discovery_read_binary_from_mem()
318 if (amdgpu_sriov_vf(adev) && adev->virt.is_dynamic_crit_regn_enabled) { in amdgpu_discovery_read_binary_from_mem()
323 if (amdgpu_virt_get_dynamic_data_info(adev, in amdgpu_discovery_read_binary_from_mem()
325 &adev->discovery.size)) { in amdgpu_discovery_read_binary_from_mem()
326 dev_err(adev->dev, in amdgpu_discovery_read_binary_from_mem()
[all …]
H A Damdgpu_irq.c159 void amdgpu_irq_disable_all(struct amdgpu_device *adev) in amdgpu_irq_disable_all() argument
165 spin_lock_irqsave(&adev->irq.lock, irqflags); in amdgpu_irq_disable_all()
167 if (!adev->irq.client[i].sources) in amdgpu_irq_disable_all()
171 struct amdgpu_irq_src *src = adev->irq.client[i].sources[j]; in amdgpu_irq_disable_all()
177 r = src->funcs->set(adev, src, k, in amdgpu_irq_disable_all()
180 dev_err(adev->dev, in amdgpu_irq_disable_all()
186 spin_unlock_irqrestore(&adev->irq.lock, irqflags); in amdgpu_irq_disable_all()
203 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_irq_handler() local
206 ret = amdgpu_ih_process(adev, &adev->irq.ih); in amdgpu_irq_handler()
210 amdgpu_ras_interrupt_fatal_error_handler(adev); in amdgpu_irq_handler()
[all …]
H A Damdgpu_jpeg.c36 static void amdgpu_jpeg_reg_dump_fini(struct amdgpu_device *adev);
38 int amdgpu_jpeg_sw_init(struct amdgpu_device *adev) in amdgpu_jpeg_sw_init() argument
42 INIT_DELAYED_WORK(&adev->jpeg.idle_work, amdgpu_jpeg_idle_work_handler); in amdgpu_jpeg_sw_init()
43 mutex_init(&adev->jpeg.jpeg_pg_lock); in amdgpu_jpeg_sw_init()
44 atomic_set(&adev->jpeg.total_submission_cnt, 0); in amdgpu_jpeg_sw_init()
46 if ((adev->firmware.load_type == AMDGPU_FW_LOAD_PSP) && in amdgpu_jpeg_sw_init()
47 (adev->pg_flags & AMD_PG_SUPPORT_JPEG_DPG)) in amdgpu_jpeg_sw_init()
48 adev->jpeg.indirect_sram = true; in amdgpu_jpeg_sw_init()
50 for (i = 0; i < adev->jpeg.num_jpeg_inst; i++) { in amdgpu_jpeg_sw_init()
51 if (adev->jpeg.harvest_config & (1U << i)) in amdgpu_jpeg_sw_init()
[all …]
H A Damdgpu_amdkfd.c70 void amdgpu_amdkfd_device_probe(struct amdgpu_device *adev) in amdgpu_amdkfd_device_probe() argument
72 bool vf = amdgpu_sriov_vf(adev); in amdgpu_amdkfd_device_probe()
77 adev->kfd.dev = kgd2kfd_probe(adev, vf); in amdgpu_amdkfd_device_probe()
93 static void amdgpu_doorbell_get_kfd_info(struct amdgpu_device *adev, in amdgpu_doorbell_get_kfd_info() argument
102 if (adev->enable_mes) { in amdgpu_doorbell_get_kfd_info()
109 *aperture_base = adev->doorbell.base; in amdgpu_doorbell_get_kfd_info()
112 } else if (adev->doorbell.size > adev->doorbell.num_kernel_doorbells * in amdgpu_doorbell_get_kfd_info()
114 *aperture_base = adev->doorbell.base; in amdgpu_doorbell_get_kfd_info()
115 *aperture_size = adev->doorbell.size; in amdgpu_doorbell_get_kfd_info()
116 *start_offset = adev->doorbell.num_kernel_doorbells * sizeof(u32); in amdgpu_doorbell_get_kfd_info()
[all …]
H A Damdgpu_gmc.c43 bool amdgpu_gmc_is_pdb0_enabled(struct amdgpu_device *adev) in amdgpu_gmc_is_pdb0_enabled() argument
45 return adev->gmc.xgmi.connected_to_cpu || amdgpu_virt_xgmi_migrate_enabled(adev); in amdgpu_gmc_is_pdb0_enabled()
56 int amdgpu_gmc_pdb0_alloc(struct amdgpu_device *adev) in amdgpu_gmc_pdb0_alloc() argument
60 u64 vram_size = adev->gmc.xgmi.node_segment_size * adev->gmc.xgmi.num_physical_nodes; in amdgpu_gmc_pdb0_alloc()
61 uint32_t pde0_page_shift = adev->gmc.vmid0_page_table_block_size + 21; in amdgpu_gmc_pdb0_alloc()
74 r = amdgpu_bo_create(adev, &bp, &adev->gmc.pdb0_bo); in amdgpu_gmc_pdb0_alloc()
78 r = amdgpu_bo_reserve(adev->gmc.pdb0_bo, false); in amdgpu_gmc_pdb0_alloc()
82 r = amdgpu_bo_pin(adev->gmc.pdb0_bo, AMDGPU_GEM_DOMAIN_VRAM); in amdgpu_gmc_pdb0_alloc()
85 r = amdgpu_bo_kmap(adev->gmc.pdb0_bo, &adev->gmc.ptr_pdb0); in amdgpu_gmc_pdb0_alloc()
89 amdgpu_bo_unreserve(adev->gmc.pdb0_bo); in amdgpu_gmc_pdb0_alloc()
[all …]
H A Damdgpu_ras.c145 static int amdgpu_ras_check_bad_page(struct amdgpu_device *adev,
148 static void amdgpu_ras_critical_region_init(struct amdgpu_device *adev);
149 static void amdgpu_ras_critical_region_fini(struct amdgpu_device *adev);
152 static void amdgpu_register_bad_pages_mca_notifier(struct amdgpu_device *adev);
154 amdgpu_unregister_bad_pages_mca_notifier(struct amdgpu_device *adev);
162 void amdgpu_ras_set_error_query_ready(struct amdgpu_device *adev, bool ready) in amdgpu_ras_set_error_query_ready() argument
164 if (adev && amdgpu_ras_get_context(adev)) in amdgpu_ras_set_error_query_ready()
165 amdgpu_ras_get_context(adev)->error_query_ready = ready; in amdgpu_ras_set_error_query_ready()
168 static bool amdgpu_ras_get_error_query_ready(struct amdgpu_device *adev) in amdgpu_ras_get_error_query_ready() argument
170 if (adev && amdgpu_ras_get_context(adev)) in amdgpu_ras_get_error_query_ready()
[all …]
H A Dgfx_v12_0.c268 static void gfx_v12_0_disable_gpa_mode(struct amdgpu_device *adev);
269 static void gfx_v12_0_set_ring_funcs(struct amdgpu_device *adev);
270 static void gfx_v12_0_set_irq_funcs(struct amdgpu_device *adev);
271 static void gfx_v12_0_set_rlc_funcs(struct amdgpu_device *adev);
272 static void gfx_v12_0_set_mqd_funcs(struct amdgpu_device *adev);
273 static void gfx_v12_0_set_imu_funcs(struct amdgpu_device *adev);
274 static int gfx_v12_0_get_cu_info(struct amdgpu_device *adev,
276 static uint64_t gfx_v12_0_get_gpu_clock_counter(struct amdgpu_device *adev);
277 static void gfx_v12_0_select_se_sh(struct amdgpu_device *adev, u32 se_num,
279 static u32 gfx_v12_0_get_wgp_active_bitmap_per_sh(struct amdgpu_device *adev);
[all …]
H A Damdgpu_umc.c32 static int amdgpu_umc_convert_error_address(struct amdgpu_device *adev, in amdgpu_umc_convert_error_address() argument
36 switch (amdgpu_ip_version(adev, UMC_HWIP, 0)) { in amdgpu_umc_convert_error_address()
38 umc_v6_7_convert_error_address(adev, in amdgpu_umc_convert_error_address()
42 dev_warn(adev->dev, in amdgpu_umc_convert_error_address()
50 int amdgpu_umc_page_retirement_mca(struct amdgpu_device *adev, in amdgpu_umc_page_retirement_mca() argument
62 adev->umc.max_ras_err_cnt_per_query); in amdgpu_umc_page_retirement_mca()
64 dev_warn(adev->dev, in amdgpu_umc_page_retirement_mca()
70 err_data.err_addr_len = adev->umc.max_ras_err_cnt_per_query; in amdgpu_umc_page_retirement_mca()
75 ret = amdgpu_umc_convert_error_address(adev, &err_data, err_addr, in amdgpu_umc_page_retirement_mca()
81 amdgpu_ras_add_bad_pages(adev, err_data.err_addr, in amdgpu_umc_page_retirement_mca()
[all …]
H A Damdgpu.h123 struct amdgpu_device *adev; member
365 bool amdgpu_get_bios(struct amdgpu_device *adev);
366 bool amdgpu_read_bios(struct amdgpu_device *adev);
367 bool amdgpu_soc15_read_bios_from_rom(struct amdgpu_device *adev,
369 void amdgpu_bios_release(struct amdgpu_device *adev);
426 struct amdgpu_device *adev; member
525 int amdgpu_device_wb_get(struct amdgpu_device *adev, u32 *wb);
526 void amdgpu_device_wb_free(struct amdgpu_device *adev, u32 wb);
531 int amdgpu_benchmark(struct amdgpu_device *adev, int test_number);
603 bool (*read_disabled_bios)(struct amdgpu_device *adev);
[all …]
H A Damdgpu_gart.c75 static int amdgpu_gart_dummy_page_init(struct amdgpu_device *adev) in amdgpu_gart_dummy_page_init() argument
79 if (adev->dummy_page_addr) in amdgpu_gart_dummy_page_init()
81 adev->dummy_page_addr = dma_map_page_attrs(&adev->pdev->dev, dummy_page, 0, in amdgpu_gart_dummy_page_init()
84 if (dma_mapping_error(&adev->pdev->dev, adev->dummy_page_addr)) { in amdgpu_gart_dummy_page_init()
85 dev_err(&adev->pdev->dev, "Failed to DMA MAP the dummy page\n"); in amdgpu_gart_dummy_page_init()
86 adev->dummy_page_addr = 0; in amdgpu_gart_dummy_page_init()
99 void amdgpu_gart_dummy_page_fini(struct amdgpu_device *adev) in amdgpu_gart_dummy_page_fini() argument
101 if (!adev->dummy_page_addr) in amdgpu_gart_dummy_page_fini()
103 dma_unmap_page_attrs(&adev->pdev->dev, adev->dummy_page_addr, PAGE_SIZE, in amdgpu_gart_dummy_page_fini()
106 adev->dummy_page_addr = 0; in amdgpu_gart_dummy_page_fini()
[all …]
H A Dvega20_reg_init.c29 int vega20_reg_base_init(struct amdgpu_device *adev) in vega20_reg_base_init() argument
34 adev->reg_offset[GC_HWIP][i] = (uint32_t *)(&(GC_BASE.instance[i])); in vega20_reg_base_init()
35 adev->reg_offset[HDP_HWIP][i] = (uint32_t *)(&(HDP_BASE.instance[i])); in vega20_reg_base_init()
36 adev->reg_offset[MMHUB_HWIP][i] = (uint32_t *)(&(MMHUB_BASE.instance[i])); in vega20_reg_base_init()
37 adev->reg_offset[ATHUB_HWIP][i] = (uint32_t *)(&(ATHUB_BASE.instance[i])); in vega20_reg_base_init()
38 adev->reg_offset[NBIO_HWIP][i] = (uint32_t *)(&(NBIO_BASE.instance[i])); in vega20_reg_base_init()
39 adev->reg_offset[MP0_HWIP][i] = (uint32_t *)(&(MP0_BASE.instance[i])); in vega20_reg_base_init()
40 adev->reg_offset[MP1_HWIP][i] = (uint32_t *)(&(MP1_BASE.instance[i])); in vega20_reg_base_init()
41 adev->reg_offset[UVD_HWIP][i] = (uint32_t *)(&(UVD_BASE.instance[i])); in vega20_reg_base_init()
42 adev->reg_offset[VCE_HWIP][i] = (uint32_t *)(&(VCE_BASE.instance[i])); in vega20_reg_base_init()
[all …]
H A Damdgpu_sdma.c42 struct amdgpu_device *adev = ring->adev; in amdgpu_sdma_get_instance_from_ring() local
45 for (i = 0; i < adev->sdma.num_instances; i++) in amdgpu_sdma_get_instance_from_ring()
46 if (ring == &adev->sdma.instance[i].ring || in amdgpu_sdma_get_instance_from_ring()
47 ring == &adev->sdma.instance[i].page) in amdgpu_sdma_get_instance_from_ring()
48 return &adev->sdma.instance[i]; in amdgpu_sdma_get_instance_from_ring()
55 struct amdgpu_device *adev = ring->adev; in amdgpu_sdma_get_index_from_ring() local
58 for (i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_sdma_get_index_from_ring()
59 if (ring == &adev->sdma.instance[i].ring || in amdgpu_sdma_get_index_from_ring()
60 ring == &adev->sdma.instance[i].page) { in amdgpu_sdma_get_index_from_ring()
72 struct amdgpu_device *adev = ring->adev; in amdgpu_sdma_get_csa_mc_addr() local
[all …]
H A Damdgpu_kms.c50 void amdgpu_unregister_gpu_instance(struct amdgpu_device *adev) in amdgpu_unregister_gpu_instance() argument
59 if (gpu_instance->adev == adev) { in amdgpu_unregister_gpu_instance()
63 if (adev->flags & AMD_IS_APU) in amdgpu_unregister_gpu_instance()
84 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_driver_unload_kms() local
86 if (adev == NULL) in amdgpu_driver_unload_kms()
89 amdgpu_unregister_gpu_instance(adev); in amdgpu_driver_unload_kms()
91 if (adev->rmmio == NULL) in amdgpu_driver_unload_kms()
94 if (amdgpu_acpi_smart_shift_update(adev, AMDGPU_SS_DRV_UNLOAD)) in amdgpu_driver_unload_kms()
97 amdgpu_acpi_fini(adev); in amdgpu_driver_unload_kms()
98 amdgpu_device_fini_hw(adev); in amdgpu_driver_unload_kms()
[all …]
H A Dvega10_reg_init.c29 int vega10_reg_base_init(struct amdgpu_device *adev) in vega10_reg_base_init() argument
34 adev->reg_offset[GC_HWIP][i] = (uint32_t *)(&(GC_BASE.instance[i])); in vega10_reg_base_init()
35 adev->reg_offset[HDP_HWIP][i] = (uint32_t *)(&(HDP_BASE.instance[i])); in vega10_reg_base_init()
36 adev->reg_offset[MMHUB_HWIP][i] = (uint32_t *)(&(MMHUB_BASE.instance[i])); in vega10_reg_base_init()
37 adev->reg_offset[ATHUB_HWIP][i] = (uint32_t *)(&(ATHUB_BASE.instance[i])); in vega10_reg_base_init()
38 adev->reg_offset[NBIO_HWIP][i] = (uint32_t *)(&(NBIO_BASE.instance[i])); in vega10_reg_base_init()
39 adev->reg_offset[MP0_HWIP][i] = (uint32_t *)(&(MP0_BASE.instance[i])); in vega10_reg_base_init()
40 adev->reg_offset[MP1_HWIP][i] = (uint32_t *)(&(MP1_BASE.instance[i])); in vega10_reg_base_init()
41 adev->reg_offset[UVD_HWIP][i] = (uint32_t *)(&(UVD_BASE.instance[i])); in vega10_reg_base_init()
42 adev->reg_offset[VCE_HWIP][i] = (uint32_t *)(&(VCE_BASE.instance[i])); in vega10_reg_base_init()
[all …]
H A Dsdma_v6_0.c121 static void sdma_v6_0_set_ring_funcs(struct amdgpu_device *adev);
122 static void sdma_v6_0_set_buffer_funcs(struct amdgpu_device *adev);
123 static void sdma_v6_0_set_vm_pte_funcs(struct amdgpu_device *adev);
124 static void sdma_v6_0_set_irq_funcs(struct amdgpu_device *adev);
125 static int sdma_v6_0_start(struct amdgpu_device *adev);
127 static u32 sdma_v6_0_get_reg_offset(struct amdgpu_device *adev, u32 instance, u32 internal_offset) in sdma_v6_0_get_reg_offset() argument
133 base = adev->reg_offset[GC_HWIP][0][1]; in sdma_v6_0_get_reg_offset()
137 base = adev->reg_offset[GC_HWIP][0][0]; in sdma_v6_0_get_reg_offset()
209 struct amdgpu_device *adev = ring->adev; in sdma_v6_0_ring_set_wptr() local
233 WREG32_SOC15_IP(GC, sdma_v6_0_get_reg_offset(adev, in sdma_v6_0_ring_set_wptr()
[all …]
/linux/drivers/gpu/drm/amd/ras/ras_mgr/
H A Damdgpu_ras_mgr.c68 struct amdgpu_device *adev = (struct amdgpu_device *)ras_core->dev; in amdgpu_ras_mgr_init_event_mgr() local
69 struct amdgpu_ras_mgr *ras_mgr = amdgpu_ras_mgr_get_context(adev); in amdgpu_ras_mgr_init_event_mgr()
73 hive = amdgpu_get_xgmi_hive(adev); in amdgpu_ras_mgr_init_event_mgr()
77 if (!amdgpu_reset_in_recovery(adev)) { in amdgpu_ras_mgr_init_event_mgr()
78 if (!hive || adev->gmc.xgmi.node_id == 0) in amdgpu_ras_mgr_init_event_mgr()
86 static int amdgpu_ras_mgr_init_aca_config(struct amdgpu_device *adev, in amdgpu_ras_mgr_init_aca_config() argument
98 static int amdgpu_ras_mgr_init_eeprom_config(struct amdgpu_device *adev, in amdgpu_ras_mgr_init_eeprom_config() argument
104 eeprom_cfg->eeprom_i2c_adapter = adev->pm.ras_eeprom_i2c_bus; in amdgpu_ras_mgr_init_eeprom_config()
133 div64_u64(adev->gmc.mc_vram_size, TYPICAL_ECC_BAD_PAGE_RATE); in amdgpu_ras_mgr_init_eeprom_config()
145 static int amdgpu_ras_mgr_init_mp1_config(struct amdgpu_device *adev, in amdgpu_ras_mgr_init_mp1_config() argument
[all …]
/linux/sound/soc/intel/avs/
H A Dloader.c102 static int avs_fw_manifest_strip_verify(struct avs_dev *adev, struct firmware *fw, in avs_fw_manifest_strip_verify() argument
126 dev_warn(adev->dev, "bad FW version %d.%d.%d.%d, expected %d.%d.%d.%d or newer\n", in avs_fw_manifest_strip_verify()
138 int avs_cldma_load_basefw(struct avs_dev *adev, struct firmware *fw) in avs_cldma_load_basefw() argument
144 ret = avs_dsp_op(adev, power, AVS_MAIN_CORE_MASK, true); in avs_cldma_load_basefw()
148 ret = avs_dsp_op(adev, reset, AVS_MAIN_CORE_MASK, false); in avs_cldma_load_basefw()
154 dev_err(adev->dev, "cldma reset failed: %d\n", ret); in avs_cldma_load_basefw()
159 ret = avs_dsp_op(adev, stall, AVS_MAIN_CORE_MASK, false); in avs_cldma_load_basefw()
163 reinit_completion(&adev->fw_ready); in avs_cldma_load_basefw()
164 avs_dsp_op(adev, int_control, true); in avs_cldma_load_basefw()
167 ret = snd_hdac_adsp_readl_poll(adev, AVS_FW_REG_STATUS(adev), reg, in avs_cldma_load_basefw()
[all …]
H A Dutils.c16 static int avs_module_entry_index(struct avs_dev *adev, const guid_t *uuid) in avs_module_entry_index() argument
20 for (i = 0; i < adev->mods_info->count; i++) { in avs_module_entry_index()
23 module = &adev->mods_info->entries[i]; in avs_module_entry_index()
32 static int avs_module_id_entry_index(struct avs_dev *adev, u32 module_id) in avs_module_id_entry_index() argument
36 for (i = 0; i < adev->mods_info->count; i++) { in avs_module_id_entry_index()
39 module = &adev->mods_info->entries[i]; in avs_module_id_entry_index()
47 int avs_get_module_entry(struct avs_dev *adev, const guid_t *uuid, struct avs_module_entry *entry) in avs_get_module_entry() argument
51 mutex_lock(&adev->modres_mutex); in avs_get_module_entry()
53 idx = avs_module_entry_index(adev, uuid); in avs_get_module_entry()
55 memcpy(entry, &adev->mods_info->entries[idx], sizeof(*entry)); in avs_get_module_entry()
[all …]
/linux/drivers/gpu/drm/amd/pm/legacy-dpm/
H A Dkv_dpm.c47 static void kv_dpm_set_irq_funcs(struct amdgpu_device *adev);
48 static int kv_enable_nb_dpm(struct amdgpu_device *adev,
50 static void kv_init_graphics_levels(struct amdgpu_device *adev);
51 static int kv_calculate_ds_divider(struct amdgpu_device *adev);
52 static int kv_calculate_nbps_level_settings(struct amdgpu_device *adev);
53 static int kv_calculate_dpm_settings(struct amdgpu_device *adev);
54 static void kv_enable_new_levels(struct amdgpu_device *adev);
55 static void kv_program_nbps_index_settings(struct amdgpu_device *adev,
57 static int kv_set_enabled_level(struct amdgpu_device *adev, u32 level);
58 static int kv_set_enabled_levels(struct amdgpu_device *adev);
[all …]

12345678910>>...29