Home
last modified time | relevance | path

Searched refs:nr_cpus_allowed (Results 1 – 16 of 16) sorted by relevance

/linux/kernel/sched/
H A Ddeadline.c2156 if (!task_current(rq, p) && !p->dl.dl_throttled && p->nr_cpus_allowed > 1) in enqueue_task_dl()
2242 (curr->nr_cpus_allowed < 2 || in select_task_rq_dl()
2244 p->nr_cpus_allowed > 1; in select_task_rq_dl()
2305 if (rq->curr->nr_cpus_allowed == 1 || in check_preempt_equal_dl()
2313 if (p->nr_cpus_allowed != 1 && in check_preempt_equal_dl()
2456 if (on_dl_rq(&p->dl) && p->nr_cpus_allowed > 1) in put_prev_task_dl()
2537 if (task->nr_cpus_allowed == 1) in find_later_rq()
2679 WARN_ON_ONCE(p->nr_cpus_allowed <= 1); in pick_next_pushable_dl_task()
2710 rq->curr->nr_cpus_allowed > 1) { in push_dl_task()
2870 p->nr_cpus_allowed > 1 && in task_woken_dl()
[all …]
H A Drt.c1482 if (!task_current(rq, p) && p->nr_cpus_allowed > 1) in enqueue_task_rt()
1580 (curr->nr_cpus_allowed < 2 || donor->prio <= p->prio); in select_task_rq_rt()
1610 if (rq->curr->nr_cpus_allowed == 1 || in check_preempt_equal_prio()
1618 if (p->nr_cpus_allowed != 1 && in check_preempt_equal_prio()
1766 if (on_rt_rq(&p->rt) && p->nr_cpus_allowed > 1) in put_prev_task_rt()
1809 if (task->nr_cpus_allowed == 1) in find_lowest_rq()
1962 BUG_ON(p->nr_cpus_allowed <= 1); in pick_next_pushable_task()
2388 p->nr_cpus_allowed > 1 && in task_woken_rt()
2390 (rq->curr->nr_cpus_allowed < 2 || in task_woken_rt()
2472 if (p->nr_cpus_allowed > 1 && rq->rt.overloaded) in switched_to_rt()
H A Didle.c389 WARN_ON_ONCE(current->nr_cpus_allowed != 1); in play_idle_precise()
H A Dsched.h2629 if (p->nr_cpus_allowed == 1) in get_push_task()
3573 if (p->nr_cpus_allowed != 1) in is_per_cpu_kthread()
3681 while (cid < READ_ONCE(mm->nr_cpus_allowed) && cid < atomic_read(&mm->mm_users)) { in __mm_cid_try_get()
3695 if (cid < READ_ONCE(mm->nr_cpus_allowed)) in __mm_cid_try_get()
H A Dcore.c2693 p->nr_cpus_allowed = cpumask_weight(ctx->new_mask); in set_cpus_allowed_common()
3570 if (p->nr_cpus_allowed > 1 && !is_migration_disabled(p)) { in select_task_rq()
10401 if (dst_cid_is_set && atomic_read(&mm->mm_users) >= READ_ONCE(mm->nr_cpus_allowed)) in sched_mm_cid_migrate_to()
H A Dfair.c5217 if (!p || (p->nr_cpus_allowed == 1) || in update_misfit_status()
10914 if (p->nr_cpus_allowed != NR_CPUS) { in sched_balance_find_dst_group()
H A Dext.c3380 if (p->nr_cpus_allowed >= num_possible_cpus()) { in scx_select_cpu_dfl()
/linux/init/
H A Dinit_task.c83 .nr_cpus_allowed= NR_CPUS,
/linux/tools/sched_ext/
H A Dscx_central.bpf.c119 if ((p->flags & PF_KTHREAD) && p->nr_cpus_allowed == 1) { in BPF_STRUCT_OPS()
H A Dscx_flatcg.bpf.c367 if (p->nr_cpus_allowed != nr_cpus) { in BPF_STRUCT_OPS()
378 if (p->nr_cpus_allowed == 1 && (p->flags & PF_KTHREAD)) { in BPF_STRUCT_OPS()
H A Dscx_qmap.bpf.c138 if (p->nr_cpus_allowed == 1 || in pick_direct_dispatch_cpu()
/linux/drivers/infiniband/hw/hfi1/
H A Daffinity.c1008 if (current->nr_cpus_allowed == 1) { in hfi1_get_proc_affinity()
1019 } else if (current->nr_cpus_allowed < cpumask_weight(&set->mask)) { in hfi1_get_proc_affinity()
H A Dsdma.c838 if (current->nr_cpus_allowed != 1) in sdma_select_user_engine()
/linux/include/linux/
H A Dsched.h884 int nr_cpus_allowed; member
1738 (current->nr_cpus_allowed == 1); in is_percpu_thread()
/linux/drivers/infiniband/hw/qib/
H A Dqib_file_ops.c1144 const unsigned int weight = current->nr_cpus_allowed; in assign_ctxt_affinity()
1626 const unsigned int weight = current->nr_cpus_allowed; in qib_assign_ctxt()
/linux/kernel/trace/
H A Dtrace_osnoise.c2438 if (current->nr_cpus_allowed > 1 || cpu != smp_processor_id()) { in timerlat_fd_open()