Home
last modified time | relevance | path

Searched refs:cpu_smt_mask (Results 1 – 11 of 11) sorted by relevance

/linux/drivers/platform/x86/intel/ifs/
H A Druntest.c88 cpumask_pr_args(cpu_smt_mask(cpu)), ifsd->cur_batch, ifsd->loaded_version); in message_not_tested()
94 cpumask_pr_args(cpu_smt_mask(cpu)), in message_not_tested()
98 cpumask_pr_args(cpu_smt_mask(cpu))); in message_not_tested()
101 cpumask_pr_args(cpu_smt_mask(cpu)), in message_not_tested()
105 cpumask_pr_args(cpu_smt_mask(cpu)), status.data); in message_not_tested()
122 cpumask_pr_args(cpu_smt_mask(cpu)), ifsd->cur_batch, ifsd->loaded_version); in message_fail()
164 const struct cpumask *smt_mask = cpu_smt_mask(cpu); in wait_for_sibling_cpu()
200 first = cpumask_first(cpu_smt_mask(cpu)); in doscan()
321 first = cpumask_first(cpu_smt_mask(cpu)); in do_array_test()
374 first = cpumask_first(cpu_smt_mask(cpu)); in do_array_test_gen1()
[all …]
/linux/arch/powerpc/include/asm/
H A Dsmp.h140 #define cpu_smt_mask cpu_smt_mask macro
142 static inline const struct cpumask *cpu_smt_mask(int cpu) in cpu_smt_mask() function
/linux/include/linux/
H A Dtopology.h236 #if defined(CONFIG_SCHED_SMT) && !defined(cpu_smt_mask)
237 static inline const struct cpumask *cpu_smt_mask(int cpu) in cpu_smt_mask() function
/linux/kernel/sched/
H A Dcore_sched.c242 const struct cpumask *smt_mask = cpu_smt_mask(cpu_of(rq)); in __sched_core_account_forceidle()
H A Dcore.c369 const struct cpumask *smt_mask = cpu_smt_mask(cpu); in sched_core_lock()
379 const struct cpumask *smt_mask = cpu_smt_mask(cpu); in sched_core_unlock()
399 const struct cpumask *smt_mask = cpu_smt_mask(cpu); in __sched_core_flip()
6079 smt_mask = cpu_smt_mask(cpu); in pick_next_task()
6372 const struct cpumask *smt_mask = cpu_smt_mask(cpu); in sched_core_cpu_starting()
6411 const struct cpumask *smt_mask = cpu_smt_mask(cpu); in sched_core_cpu_deactivate()
8075 if (cpumask_weight(cpu_smt_mask(cpu)) == 2) in sched_smt_present_inc()
8083 if (cpumask_weight(cpu_smt_mask(cpu)) == 2) in sched_smt_present_dec()
H A Dtopology.c1313 cpumask_andnot(mask, mask, cpu_smt_mask(cpu)); in init_sched_groups_capacity()
1704 { cpu_smt_mask, cpu_smt_flags, SD_INIT_NAME(SMT) },
H A Dfair.c1394 for_each_cpu(sibling, cpu_smt_mask(cpu)) {
7601 for_each_cpu(cpu, cpu_smt_mask(core)) { in select_idle_sibling()
7624 for_each_cpu(cpu, cpu_smt_mask(core)) { in select_idle_sibling()
7643 cpumask_andnot(cpus, cpus, cpu_smt_mask(core)); in select_idle_sibling()
7654 for_each_cpu_and(cpu, cpu_smt_mask(target), p->cpus_ptr) { in select_idle_sibling()
11635 cpumask_andnot(swb_cpus, swb_cpus, cpu_smt_mask(cpu)); in active_load_balance_cpu_stop()
H A Dext.c2726 const struct cpumask *smt_mask = cpu_smt_mask(cpu_of(rq)); in balance_scx()
3013 const struct cpumask *smt = cpu_smt_mask(cpu); in test_and_clear_cpu_idle()
3209 const struct cpumask *smt = cpu_smt_mask(cpu); in __scx_update_idle()
H A Dsched.h1411 for_each_cpu(cpu, cpu_smt_mask(cpu_of(rq))) {
/linux/kernel/
H A Dstop_machine.c637 const struct cpumask *smt_mask = cpu_smt_mask(cpu); in stop_core_cpuslocked()
H A Dworkqueue.c7964 return cpumask_test_cpu(cpu0, cpu_smt_mask(cpu1)); in cpus_share_smt()