/linux/arch/x86/kernel/apic/ |
H A D | ipi.c | 204 for_each_cpu(cpu, mask) { in default_send_IPI_mask_sequence_phys() 218 for_each_cpu(cpu, mask) { in default_send_IPI_mask_allbutself_phys() 257 for_each_cpu(cpu, mask) in default_send_IPI_mask_sequence_logical() 269 for_each_cpu(cpu, mask) { in default_send_IPI_mask_allbutself_logical()
|
H A D | apic_numachip.c | 97 for_each_cpu(cpu, mask) in numachip_send_IPI_mask() 107 for_each_cpu(cpu, mask) { in numachip_send_IPI_mask_allbutself()
|
/linux/drivers/cpuidle/ |
H A D | driver.c | 53 for_each_cpu(cpu, drv->cpumask) { in __cpuidle_unset_driver() 73 for_each_cpu(cpu, drv->cpumask) { in __cpuidle_set_driver() 81 for_each_cpu(cpu, drv->cpumask) in __cpuidle_set_driver() 376 for_each_cpu(cpu, drv->cpumask) { in cpuidle_driver_state_disabled()
|
H A D | coupled.c | 306 for_each_cpu(i, &coupled->coupled_cpus) in cpuidle_coupled_get_state() 352 for_each_cpu(cpu, &coupled->coupled_cpus) in cpuidle_coupled_poke_others() 645 for_each_cpu(cpu, &dev->coupled_cpus) { in cpuidle_coupled_register_device()
|
H A D | cpuidle-tegra.c | 55 for_each_cpu(lcpu, cpu_possible_mask) { in tegra_cpuidle_report_cpus_state() 101 for_each_cpu(lcpu, cpu_online_mask) { in tegra_cpuidle_unpark_secondary_cpus()
|
/linux/drivers/powercap/ |
H A D | idle_inject.c | 279 for_each_cpu(cpu, to_cpumask(ii_dev->cpumask)) { in idle_inject_stop() 347 for_each_cpu(cpu, to_cpumask(ii_dev->cpumask)) { in idle_inject_register_full() 360 for_each_cpu(cpu_rb, to_cpumask(ii_dev->cpumask)) { in idle_inject_register_full() 403 for_each_cpu(cpu, to_cpumask(ii_dev->cpumask)) in idle_inject_unregister()
|
H A D | dtpm_cpu.c | 152 for_each_cpu(dtpm_cpu->cpu, policy->related_cpus) in pd_release() 222 for_each_cpu(cpu, policy->related_cpus) in __dtpm_cpu_setup() 248 for_each_cpu(cpu, policy->related_cpus) in __dtpm_cpu_setup()
|
/linux/drivers/cpufreq/ |
H A D | cpufreq_governor.c | 102 for_each_cpu(j, policy_dbs->policy->cpus) { in gov_update_cpu_data() 137 for_each_cpu(j, policy->cpus) { in dbs_update() 331 for_each_cpu(cpu, policy->cpus) { in gov_set_update_util() 343 for_each_cpu(i, policy->cpus) in gov_clear_update_util() 367 for_each_cpu(j, policy->related_cpus) { in alloc_policy_dbs_info() 382 for_each_cpu(j, policy_dbs->policy->related_cpus) { in free_policy_dbs_info() 531 for_each_cpu(j, policy->cpus) { in cpufreq_dbs_governor_start()
|
/linux/arch/mips/kernel/ |
H A D | smp.c | 107 for_each_cpu(i, &cpu_sibling_setup_map) { in set_cpu_sibling_map() 123 for_each_cpu(i, &cpu_core_setup_map) { in set_cpu_core_map() 144 for_each_cpu(k, &temp_foreign_map) in calculate_cpu_foreign_map() 195 for_each_cpu(cpu, mask) { in mips_smp_send_ipi_mask() 287 for_each_cpu(cpu, mask) { in mips_smp_ipi_allocate() 323 for_each_cpu(cpu, mask) { in mips_smp_ipi_free() 735 for_each_cpu(cpu, mask) { in tick_broadcast()
|
/linux/drivers/md/ |
H A D | dm-ps-io-affinity.c | 79 for_each_cpu(cpu, pi->cpumask) { in ioa_add_path() 143 for_each_cpu(cpu, s->path_mask) in ioa_destroy() 220 for_each_cpu(i, cpumask) { in ioa_select_path() 226 for_each_cpu(i, s->path_mask) { in ioa_select_path()
|
/linux/tools/testing/selftests/cpufreq/ |
H A D | cpu.sh | 15 for_each_cpu() function 70 for_each_cpu cpu_should_have_cpufreq_directory
|
/linux/drivers/virt/nitro_enclaves/ |
H A D | ne_misc_dev.c | 214 for_each_cpu(cpu, cpu_pool) in ne_setup_cpu_pool() 227 for_each_cpu(cpu, cpu_pool) in ne_setup_cpu_pool() 261 for_each_cpu(cpu_sibling, topology_sibling_cpumask(0)) { in ne_setup_cpu_pool() 277 for_each_cpu(cpu, cpu_pool) { in ne_setup_cpu_pool() 278 for_each_cpu(cpu_sibling, topology_sibling_cpumask(cpu)) { in ne_setup_cpu_pool() 292 for_each_cpu(cpu_sibling, topology_sibling_cpumask(cpu)) in ne_setup_cpu_pool() 317 for_each_cpu(cpu, cpu_pool) { in ne_setup_cpu_pool() 341 for_each_cpu(cpu, cpu_pool) { in ne_setup_cpu_pool() 360 for_each_cpu(cpu, cpu_pool) in ne_setup_cpu_pool() 401 for_each_cpu(cpu, ne_cpu_pool.avail_threads_per_core[i]) { in ne_teardown_cpu_pool() [all …]
|
/linux/arch/powerpc/kernel/ |
H A D | smp.c | 377 for_each_cpu(cpu, mask) in arch_send_call_function_ipi_mask() 574 for_each_cpu(cpu, mask) in tick_broadcast() 742 for_each_cpu(k, srcmask(i)) in or_cpumasks_related() 749 for_each_cpu(k, srcmask(j)) in or_cpumasks_related() 1426 for_each_cpu(i, per_cpu(thread_group_l2_cache_map, cpu)) { in update_mask_by_l2() 1444 for_each_cpu(i, cpu_sibling_mask(cpu)) in update_mask_by_l2() 1458 for_each_cpu(i, *mask) { in update_mask_by_l2() 1491 for_each_cpu(i, mask_fn(cpu)) { in remove_cpu_from_masks() 1498 for_each_cpu(i, cpu_core_mask(cpu)) in remove_cpu_from_masks() 1502 for_each_cpu(i, cpu_coregroup_mask(cpu)) in remove_cpu_from_masks() [all …]
|
/linux/kernel/irq/ |
H A D | matrix.c | 143 for_each_cpu(cpu, msk) { in matrix_find_best_cpu() 164 for_each_cpu(cpu, msk) { in matrix_find_best_cpu_managed() 220 for_each_cpu(cpu, msk) { in irq_matrix_reserve_managed() 237 for_each_cpu(cpu, msk) { in irq_matrix_reserve_managed() 261 for_each_cpu(cpu, msk) { in irq_matrix_remove_managed()
|
/linux/drivers/perf/ |
H A D | arm_pmu_platform.c | 54 for_each_cpu(cpu, &pmu->supported_cpus) in pmu_parse_percpu_irq() 161 for_each_cpu(cpu, &armpmu->supported_cpus) { in armpmu_request_irqs() 179 for_each_cpu(cpu, &armpmu->supported_cpus) { in armpmu_free_irqs()
|
/linux/arch/x86/kernel/ |
H A D | smpboot.c | 573 for_each_cpu(i, cpu_sibling_setup_mask) { in set_cpu_sibling_map() 596 for_each_cpu(i, topology_sibling_cpumask(cpu)) in set_cpu_sibling_map() 603 for_each_cpu(i, cpu_sibling_setup_mask) { in set_cpu_sibling_map() 1179 for_each_cpu(sibling, topology_core_cpumask(cpu)) { in remove_siblinginfo() 1188 for_each_cpu(sibling, topology_die_cpumask(cpu)) in remove_siblinginfo() 1191 for_each_cpu(sibling, topology_sibling_cpumask(cpu)) { in remove_siblinginfo() 1197 for_each_cpu(sibling, cpu_llc_shared_mask(cpu)) in remove_siblinginfo() 1199 for_each_cpu(sibling, cpu_l2c_shared_mask(cpu)) in remove_siblinginfo()
|
/linux/arch/riscv/kernel/ |
H A D | sys_hwprobe.c | 28 for_each_cpu(cpu, cpus) { in hwprobe_arch_id() 81 for_each_cpu(cpu, cpus) { in hwprobe_isa_ext0() 174 for_each_cpu(cpu, cpus) { in hwprobe_misaligned() 365 for_each_cpu(cpu, &cpus) { in hwprobe_get_cpus()
|
H A D | unaligned_access_speed.c | 232 for_each_cpu(cpu, cpu_online_mask) { in check_unaligned_access_speed_all_cpus() 254 for_each_cpu(cpu, cpu_online_mask) { in check_unaligned_access_speed_all_cpus()
|
/linux/lib/ |
H A D | cpu_rmap.c | 99 for_each_cpu(neigh, mask) { in cpu_rmap_copy_neigh() 191 for_each_cpu(cpu, affinity) { in cpu_rmap_update() 201 for_each_cpu(cpu, update_mask) { in cpu_rmap_update()
|
/linux/arch/powerpc/platforms/cell/ |
H A D | cpufreq_spudemand.c | 94 for_each_cpu(i, policy->cpus) { in spu_gov_start() 117 for_each_cpu (i, policy->cpus) { in spu_gov_stop()
|
/linux/arch/loongarch/kernel/ |
H A D | smp.c | 96 for_each_cpu(i, &cpu_core_setup_map) { in set_cpu_core_map() 110 for_each_cpu(i, &cpu_sibling_setup_map) { in set_cpu_sibling_map() 122 for_each_cpu(i, &cpu_sibling_setup_map) { in clear_cpu_sibling_map() 145 for_each_cpu(k, &temp_foreign_map) in calculate_cpu_foreign_map() 208 for_each_cpu(i, mask) in loongson_send_ipi_mask()
|
/linux/drivers/opp/ |
H A D | cpu.c | 116 for_each_cpu(cpu, cpumask) { in _dev_pm_opp_cpumask_remove_table() 167 for_each_cpu(cpu, cpumask) { in dev_pm_opp_set_sharing_cpus()
|
/linux/tools/workqueue/ |
H A D | wq_dump.py | 52 from drgn.helpers.linux.cpumask import for_each_cpu,for_each_possible_cpu 69 for cpu in for_each_cpu(cpumask[0]):
|
/linux/block/ |
H A D | blk-mq-pci.c | 36 for_each_cpu(cpu, mask) in blk_mq_pci_map_queues()
|
/linux/kernel/sched/ |
H A D | topology.c | 223 for_each_cpu(i, cpu_mask) { in sched_is_eas_possible() 255 for_each_cpu(i, cpu_mask) { in sched_is_eas_possible() 438 for_each_cpu(i, cpu_map) { in build_perf_domains() 936 for_each_cpu(i, sg_span) { in build_balance_mask() 1310 for_each_cpu(cpu, mask) { in init_sched_groups_capacity() 1321 for_each_cpu(cpu, sched_group_span(sg)) { in init_sched_groups_capacity() 2246 for_each_cpu(j, cpu_map) { in __sdt_alloc() 2299 for_each_cpu(j, cpu_map) { in __sdt_free() 2414 for_each_cpu(i, cpu_map) { in build_sched_domains() 2437 for_each_cpu(i, cpu_map) { in build_sched_domains() [all …]
|