/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | gfxhub_v1_2.c | 45 uint32_t xcc_mask) in gfxhub_v1_2_xcc_setup_vm_pt_regs() argument 50 for_each_inst(i, xcc_mask) { in gfxhub_v1_2_xcc_setup_vm_pt_regs() 68 uint32_t xcc_mask; in gfxhub_v1_2_setup_vm_pt_regs() local 70 xcc_mask = GENMASK(NUM_XCC(adev->gfx.xcc_mask) - 1, 0); in gfxhub_v1_2_setup_vm_pt_regs() 71 gfxhub_v1_2_xcc_setup_vm_pt_regs(adev, vmid, page_table_base, xcc_mask); in gfxhub_v1_2_setup_vm_pt_regs() 75 uint32_t xcc_mask) in gfxhub_v1_2_xcc_init_gart_aperture_regs() argument 85 gfxhub_v1_2_xcc_setup_vm_pt_regs(adev, 0, pt_base, xcc_mask); in gfxhub_v1_2_xcc_init_gart_aperture_regs() 90 for_each_inst(i, xcc_mask) { in gfxhub_v1_2_xcc_init_gart_aperture_regs() 125 uint32_t xcc_mask) in gfxhub_v1_2_xcc_init_system_aperture_regs() argument 131 for_each_inst(i, xcc_mask) { in gfxhub_v1_2_xcc_init_system_aperture_regs() [all …]
|
H A D | gfx_v9_4_3.c | 337 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_set_kiq_pm4_funcs() 346 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_init_golden_registers() 630 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_mec_init() 808 NUM_XCC(adev->gfx.xcc_mask) / in gfx_v9_4_3_switch_compute_partition() 813 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_switch_compute_partition() 835 xcc = hweight8(adev->gfx.xcc_mask & GENMASK(ih_node / 2, 0)); in gfx_v9_4_3_ih_to_xcc_inst() 1028 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_alloc_ip_dump() 1080 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_sw_init() 1189 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_sw_fini() 1328 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_constants_init() [all …]
|
H A D | amdgpu_gfx.c | 218 int num_xcc = adev->gfx.xcc_mask ? NUM_XCC(adev->gfx.xcc_mask) : 1; in amdgpu_gfx_compute_queue_acquire() 1002 int num_xcc = adev->gfx.xcc_mask ? NUM_XCC(adev->gfx.xcc_mask) : 1; in amdgpu_gfx_ras_error_func() 1003 uint32_t xcc_mask = GENMASK(num_xcc - 1, 0); in amdgpu_gfx_ras_error_func() local 1011 for_each_inst(i, xcc_mask) in amdgpu_gfx_ras_error_func() 1330 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in amdgpu_gfx_set_compute_partition() 1375 switch (NUM_XCC(adev->gfx.xcc_mask)) { in amdgpu_gfx_get_available_compute_partition() 1446 int num_xcc = NUM_XCC(adev->gfx.xcc_mask); in amdgpu_gfx_run_cleaner_shader()
|
H A D | ta_ras_if.h | 140 uint16_t xcc_mask; member
|
H A D | gmc_v9_0.c | 1861 uint32_t xcc_mask; in gmc_v9_0_init_acpi_mem_ranges() local 1863 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gmc_v9_0_init_acpi_mem_ranges() 1864 xcc_mask = (1U << num_xcc) - 1; in gmc_v9_0_init_acpi_mem_ranges() 1866 for_each_inst(xcc_id, xcc_mask) { in gmc_v9_0_init_acpi_mem_ranges() 2088 NUM_XCC(adev->gfx.xcc_mask)); in gmc_v9_0_sw_init()
|
H A D | amdgpu_virt.c | 1020 if (adev->gfx.xcc_mask && (((1 << xcc_id) & adev->gfx.xcc_mask) == 0)) { in amdgpu_virt_rlcg_reg_rw()
|
H A D | amdgpu_vm.h | 504 uint32_t xcc_mask);
|
H A D | nbio_v7_9.c | 426 0xff & ~(adev->gfx.xcc_mask)); in nbio_v7_9_init_registers()
|
H A D | amdgpu_gfx.h | 451 uint16_t xcc_mask; member
|
H A D | amdgpu_discovery.c | 712 adev->gfx.xcc_mask &= in amdgpu_discovery_read_from_harvest_table() 1002 harvest = ((1 << inst) & adev->gfx.xcc_mask) == 0; in amdgpu_discovery_get_harvest_info() 1293 adev->gfx.xcc_mask = 0; in amdgpu_discovery_reg_base_init() 1388 adev->gfx.xcc_mask |= in amdgpu_discovery_reg_base_init()
|
H A D | sdma_v4_4_2.c | 141 if (amdgpu_sriov_vf(adev) && (adev->gfx.xcc_mask == 0x1)) in sdma_v4_4_2_irq_id_to_seq() 146 if (amdgpu_sriov_vf(adev) && (adev->gfx.xcc_mask == 0x1)) in sdma_v4_4_2_irq_id_to_seq()
|
H A D | amdgpu_vm.c | 1570 uint32_t xcc_mask) in amdgpu_vm_flush_compute_tlb() argument 1590 for_each_inst(xcc, xcc_mask) { in amdgpu_vm_flush_compute_tlb()
|
H A D | amdgpu_psp.c | 1807 ras_cmd->ras_in_message.init_flags.xcc_mask = in psp_ras_initialize() 1808 adev->gfx.xcc_mask; in psp_ras_initialize()
|
H A D | amdgpu_ras.c | 359 int num_xcc = adev->gfx.xcc_mask ? NUM_XCC(adev->gfx.xcc_mask) : 1; in amdgpu_ras_instance_mask_check()
|
H A D | gfx_v6_0.c | 3030 adev->gfx.xcc_mask = 1; in gfx_v6_0_early_init()
|
/linux/drivers/gpu/drm/amd/amdkfd/ |
H A D | kfd_mqd_manager_v9.c | 140 NUM_XCC(node->xcc_mask), in allocate_mqd() 544 for (xcc = 0; xcc < NUM_XCC(mm->dev->xcc_mask); xcc++) { in init_mqd_hiq_v9_4_3() 571 uint32_t xcc_mask = mm->dev->xcc_mask; in hiq_load_mqd_kiq_v9_4_3() local 576 for_each_inst(xcc_id, xcc_mask) { in hiq_load_mqd_kiq_v9_4_3() 595 uint32_t xcc_mask = mm->dev->xcc_mask; in destroy_hiq_mqd_v9_4_3() local 601 for_each_inst(xcc_id, xcc_mask) { in destroy_hiq_mqd_v9_4_3() 621 uint32_t xcc_mask = mm->dev->xcc_mask; in check_preemption_failed_v9_4_3() local 626 for_each_inst(xcc_id, xcc_mask) { in check_preemption_failed_v9_4_3() 661 for (xcc = 0; xcc < NUM_XCC(mm->dev->xcc_mask); xcc++) { in init_mqd_v9_4_3() 688 NUM_XCC(mm->dev->xcc_mask); in init_mqd_v9_4_3() [all …]
|
H A D | kfd_mqd_manager.c | 80 NUM_XCC(dev->xcc_mask); in allocate_sdma_mqd() 109 int inc = cu_inc * NUM_XCC(mm->dev->xcc_mask); in mqd_symmetrically_map_cu_mask() 110 int xcc_inst = inst + ffs(mm->dev->xcc_mask) - 1; in mqd_symmetrically_map_cu_mask()
|
H A D | kfd_device_queue_manager.c | 144 uint32_t xcc_mask = dqm->dev->xcc_mask; in program_sh_mem_settings() local 147 for_each_inst(xcc_id, xcc_mask) in program_sh_mem_settings() 485 uint32_t xcc_mask = dqm->dev->xcc_mask; in program_trap_handler_settings() local 489 for_each_inst(xcc_id, xcc_mask) in program_trap_handler_settings() 758 uint32_t xcc_mask = dev->xcc_mask; in dbgdev_wave_reset_wavefronts() local 804 for_each_inst(xcc_id, xcc_mask) in dbgdev_wave_reset_wavefronts() 1429 uint32_t xcc_mask = dqm->dev->xcc_mask; in set_pasid_vmid_mapping() local 1432 for_each_inst(xcc_id, xcc_mask) { in set_pasid_vmid_mapping() 1444 uint32_t xcc_mask = dqm->dev->xcc_mask; in init_interrupts() local 1447 for_each_inst(xcc_id, xcc_mask) { in init_interrupts() [all …]
|
H A D | kfd_queue.c | 299 * NUM_XCC(pdd->dev->xcc_mask); in kfd_queue_acquire_buffers() 346 * NUM_XCC(pdd->dev->xcc_mask); in kfd_queue_release_buffers() 429 cu_num = props->simd_count / props->simd_per_cu / NUM_XCC(dev->gpu->xcc_mask); in kfd_queue_ctx_save_restore_size()
|
H A D | kfd_device.c | 653 uint32_t xcc_mask = node->xcc_mask; in kfd_setup_interrupt_bitmap() local 676 for_each_inst(xcc, xcc_mask) { in kfd_setup_interrupt_bitmap() 836 &node->xcc_mask); in kgd2kfd_device_init() 839 node->xcc_mask = in kgd2kfd_device_init() 840 (1U << NUM_XCC(kfd->adev->gfx.xcc_mask)) - 1; in kgd2kfd_device_init()
|
H A D | kfd_topology.c | 481 NUM_XCC(dev->gpu->xcc_mask)) : 0); in node_show() 547 NUM_XCC(dev->gpu->xcc_mask)); in node_show() 1118 buf[7] = (ffs(gpu->xcc_mask) - 1) | (NUM_XCC(gpu->xcc_mask) << 16); in kfd_generate_gpu_id() 1689 int num_xcc = NUM_XCC(knode->xcc_mask); in fill_in_l2_l3_pcache() 1694 start = ffs(knode->xcc_mask) - 1; in fill_in_l2_l3_pcache() 1804 start = ffs(kdev->xcc_mask) - 1; in kfd_fill_cache_non_crat_info() 1805 end = start + NUM_XCC(kdev->xcc_mask); in kfd_fill_cache_non_crat_info()
|
H A D | kfd_debug.c | 450 uint32_t xcc_mask = pdd->dev->xcc_mask; in kfd_dbg_trap_set_dev_address_watch() local 464 for_each_inst(xcc_id, xcc_mask) in kfd_dbg_trap_set_dev_address_watch() 1081 device_info.num_xcc = NUM_XCC(pdd->dev->xcc_mask); in kfd_dbg_trap_device_snapshot()
|
H A D | kfd_priv.h | 271 uint32_t xcc_mask; /* Instance mask of XCCs present */ member 1504 amdgpu_vm_flush_compute_tlb(adev, vm, type, pdd->dev->xcc_mask); in kfd_flush_tlb()
|
H A D | kfd_process.c | 304 &max_waves_per_cu, ffs(dev->xcc_mask) - 1); in kfd_get_cu_occupancy() 314 wave_cnt += (NUM_XCC(dev->xcc_mask) * in kfd_get_cu_occupancy()
|
H A D | kfd_process_queue_manager.c | 1105 num_xccs = NUM_XCC(q->device->xcc_mask); in pqm_debugfs_mqds()
|