Home
last modified time | relevance | path

Searched refs:num_instances (Results 1 – 25 of 32) sorted by relevance

12

/linux/drivers/accel/habanalabs/common/
H A Dsecurity.c308 u32 dcore_offset, u32 num_instances, u32 instance_offset, in hl_init_pb_with_mask() argument
327 for (j = 0 ; j < num_instances ; j++) { in hl_init_pb_with_mask()
328 int seq = i * num_instances + j; in hl_init_pb_with_mask()
360 u32 num_instances, u32 instance_offset, in hl_init_pb() argument
365 num_instances, instance_offset, pb_blocks, in hl_init_pb()
388 u32 dcore_offset, u32 num_instances, u32 instance_offset, in hl_init_pb_ranges_with_mask() argument
411 for (j = 0 ; j < num_instances ; j++) { in hl_init_pb_ranges_with_mask()
412 int seq = i * num_instances + j; in hl_init_pb_ranges_with_mask()
446 u32 dcore_offset, u32 num_instances, u32 instance_offset, in hl_init_pb_ranges() argument
452 num_instances, instance_offset, pb_blocks, in hl_init_pb_ranges()
[all …]
H A Dhabanalabs.h4203 u32 dcore_offset, u32 num_instances, u32 instance_offset,
4207 u32 num_instances, u32 instance_offset,
4211 u32 dcore_offset, u32 num_instances, u32 instance_offset,
4216 u32 dcore_offset, u32 num_instances, u32 instance_offset,
4221 u32 num_instances, u32 instance_offset,
4225 u32 num_instances, u32 instance_offset,
4230 u32 num_instances, u32 instance_offset,
4233 u32 dcore_offset, u32 num_instances, u32 instance_offset,
4236 u32 num_instances, u32 instance_offset,
/linux/drivers/iommu/arm/arm-smmu/
H A Darm-smmu-nvidia.c37 unsigned int num_instances; member
69 for (i = 0; i < nvidia->num_instances; i++) { in nvidia_smmu_write_reg()
90 for (i = 0; i < nvidia->num_instances; i++) { in nvidia_smmu_write_reg64()
112 for (i = 0; i < nvidia->num_instances; i++) { in nvidia_smmu_tlb_sync()
137 for (i = 0; i < nvidia->num_instances; i++) { in nvidia_smmu_reset()
182 for (inst = 0; inst < nvidia->num_instances; inst++) { in nvidia_smmu_global_fault()
230 for (inst = 0; inst < nvidia->num_instances; inst++) { in nvidia_smmu_context_fault()
323 nvidia_smmu->num_instances++; in nvidia_smmu_impl_init()
334 nvidia_smmu->num_instances++; in nvidia_smmu_impl_init()
337 if (nvidia_smmu->num_instances == 1) in nvidia_smmu_impl_init()
/linux/drivers/gpu/drm/amd/amdgpu/
H A Dsdma_v4_0.c602 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_setup_ulv()
627 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_init_microcode()
927 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_gfx_enable()
961 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_page_stop()
1010 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_ctx_switch_enable()
1056 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_enable()
1349 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_load_microcode()
1405 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_start()
1435 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_start()
1804 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_sw_init()
[all …]
H A Dsdma_v4_4_2.c161 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_2_inst_init_golden_registers()
190 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_2_init_microcode()
1389 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_2_sw_init()
1443 ptr = kcalloc(adev->sdma.num_instances * reg_count, sizeof(uint32_t), GFP_KERNEL); in sdma_v4_4_2_sw_init()
1461 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_2_sw_fini()
1485 inst_mask = GENMASK(adev->sdma.num_instances - 1, 0); in sdma_v4_4_2_hw_init()
1503 inst_mask = GENMASK(adev->sdma.num_instances - 1, 0); in sdma_v4_4_2_hw_fini()
1505 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_2_hw_fini()
1540 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_2_is_idle()
1557 for (j = 0; j < adev->sdma.num_instances; j++) { in sdma_v4_4_2_wait_for_idle()
[all …]
H A Dsdma_v3_0.c254 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v3_0_free_microcode()
305 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_init_microcode()
332 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v3_0_init_microcode()
517 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_stop()
576 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_ctx_switch_enable()
618 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_enable()
645 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_resume()
743 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_resume()
1090 adev->sdma.num_instances = 1; in sdma_v3_0_early_init()
1093 adev->sdma.num_instances = SDMA_MAX_INSTANCE; in sdma_v3_0_early_init()
[all …]
H A Dsdma_v6_0.c398 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_gfx_stop()
434 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_ctxempty_int_enable()
464 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_enable()
566 adev->doorbell_index.sdma_doorbell_range * adev->sdma.num_instances); in sdma_v6_0_gfx_resume_instance()
634 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_gfx_resume()
716 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_load_microcode()
766 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_soft_reset()
801 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_check_soft_reset()
1331 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v6_0_sw_init()
1372 ptr = kcalloc(adev->sdma.num_instances * reg_count, sizeof(uint32_t), GFP_KERNEL); in sdma_v6_0_sw_init()
[all …]
H A Dsdma_v7_0.c430 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v7_0_gfx_stop()
485 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v7_0_enable()
511 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v7_0_gfx_resume()
584 adev->doorbell_index.sdma_doorbell_range * adev->sdma.num_instances); in sdma_v7_0_gfx_resume()
668 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v12_0_free_ucode_buffer()
705 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v7_0_load_microcode()
764 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v7_0_soft_reset()
799 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v7_0_check_soft_reset()
1297 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v7_0_sw_init()
1320 ptr = kcalloc(adev->sdma.num_instances * reg_count, sizeof(uint32_t), GFP_KERNEL); in sdma_v7_0_sw_init()
[all …]
H A Dsdma_v5_2.c418 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_gfx_stop()
477 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_ctx_switch_enable()
516 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_enable()
698 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_gfx_resume()
738 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_load_microcode()
771 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_soft_reset()
1331 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_sw_init()
1339 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_sw_init()
1379 ptr = kcalloc(adev->sdma.num_instances * reg_count, sizeof(uint32_t), GFP_KERNEL); in sdma_v5_2_sw_init()
1397 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v5_2_sw_fini()
[all …]
H A Dvpe_v6_1.c78 for (i = 0; i < vpe->num_instances; i++) { in vpe_v6_1_halt()
108 for (i = 0; i < vpe->num_instances; i++) { in vpe_v6_1_set_collaborate_mode()
133 for (j = 0; j < vpe->num_instances; j++) { in vpe_v6_1_load_microcode()
183 for (j = 0; j < vpe->num_instances; j++) { in vpe_v6_1_load_microcode()
215 for (i = 0; i < vpe->num_instances; i++) { in vpe_v6_1_ring_start()
282 for (i = 0; i < vpe->num_instances; i++) { in vpe_v_6_1_ring_stop()
H A Dsdma_v5_0.c292 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_init_microcode()
599 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_gfx_stop()
658 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_ctx_switch_enable()
700 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_enable()
882 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_gfx_resume()
922 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_load_microcode()
1433 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_sw_init()
1469 ptr = kcalloc(adev->sdma.num_instances * reg_count, sizeof(uint32_t), GFP_KERNEL); in sdma_v5_0_sw_init()
1487 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v5_0_sw_fini()
1538 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_is_idle()
[all …]
H A Dsdma_v4_4.c243 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_reset_ras_error_count()
256 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_query_ras_error_count()
H A Daqua_vanjaram.c51 for (i = 0; i < adev->sdma.num_instances; i++) in aqua_vanjaram_doorbell_index_init()
400 num_sdma = adev->sdma.num_instances; in __aqua_vanjaram_get_xcp_ip_info()
464 max_res[AMDGPU_XCP_RES_DMA] = adev->sdma.num_instances; in aqua_vanjaram_get_xcp_res_info()
810 adev->sdma.num_instances = NUM_SDMA(adev->sdma.sdma_mask); in aqua_vanjaram_init_soc_config()
943 pcie_reg_state->common_header.num_instances = 1; in aqua_vanjaram_read_pcie_state()
1027 xgmi_reg_state->common_header.num_instances = max_xgmi_instances; in aqua_vanjaram_read_xgmi_state()
1100 wafl_reg_state->common_header.num_instances = max_wafl_instances; in aqua_vanjaram_read_wafl_state()
1219 usr_reg_state->common_header.num_instances = max_usr_instances; in aqua_vanjaram_read_usr_state()
H A Damdgpu_discovery.c1361 if (adev->sdma.num_instances < in amdgpu_discovery_reg_base_init()
1363 adev->sdma.num_instances++; in amdgpu_discovery_reg_base_init()
1368 adev->sdma.num_instances + 1, in amdgpu_discovery_reg_base_init()
1374 if (adev->vpe.num_instances < AMDGPU_MAX_VPE_INSTANCES) in amdgpu_discovery_reg_base_init()
1375 adev->vpe.num_instances++; in amdgpu_discovery_reg_base_init()
1378 adev->vpe.num_instances + 1, in amdgpu_discovery_reg_base_init()
2472 adev->sdma.num_instances = 2; in amdgpu_discovery_set_ip_blocks()
2494 adev->sdma.num_instances = 2; in amdgpu_discovery_set_ip_blocks()
2516 adev->sdma.num_instances = 1; in amdgpu_discovery_set_ip_blocks()
2557 adev->sdma.num_instances = 2; in amdgpu_discovery_set_ip_blocks()
[all …]
H A Damdgpu_sdma.h111 int num_instances; member
H A Damdgpu_kms.c320 if (query_fw->index >= adev->sdma.num_instances) in amdgpu_firmware_info()
405 for (i = 0; i < adev->sdma.num_instances; i++) in amdgpu_hw_ip_info()
662 count = adev->sdma.num_instances; in amdgpu_info_ioctl()
1762 for (i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_debugfs_firmware_info_show()
H A Damdgpu_dev_coredump.c159 for (int i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_devcoredump_fw_info()
H A Dgfx_v9_4_2.c1600 uint32_t num_instances; in gfx_v9_4_2_query_utc_edc_count() local
1610 num_instances = in gfx_v9_4_2_query_utc_edc_count()
1612 for (j = 0; j < num_instances; j++) { in gfx_v9_4_2_query_utc_edc_count()
H A Damdgpu_ras.h945 void amdgpu_ras_query_boot_status(struct amdgpu_device *adev, u32 num_instances);
/linux/drivers/gpu/drm/amd/include/
H A Damdgpu_reg_state.h51 uint8_t num_instances; member
/linux/drivers/media/platform/samsung/exynos4-is/
H A Dfimc-lite.h69 unsigned short num_instances; member
/linux/include/sound/
H A Dtimer.h79 int num_instances; /* current number of timer instances */ member
/linux/drivers/hwmon/
H A Dibmaem.c191 u8 num_instances; member
203 u8 num_instances; member
519 return ff_resp.num_instances; in aem_find_aem1_count()
657 fi_resp->num_instances <= instance_num) in aem_find_aem2()
/linux/sound/core/
H A Dtimer.c235 if (master->timer->num_instances >= master->timer->max_instances) in check_matching_master_slave()
238 master->timer->num_instances++; in check_matching_master_slave()
353 if (timer->num_instances >= timer->max_instances) { in snd_timer_open()
380 timer->num_instances++; in snd_timer_open()
406 timer->num_instances--; in remove_slave_links()
438 timer->num_instances--; in snd_timer_close_locked()
/linux/drivers/gpu/drm/i915/gt/
H A Dintel_engine_cs.c906 u8 class, const u8 *map, u8 num_instances) in populate_logical_ids() argument
911 for (j = 0; j < num_instances; ++j) { in populate_logical_ids()

12