Home
last modified time | relevance | path

Searched refs:task_on_rq_queued (Results 1 – 5 of 5) sorted by relevance

/linux/kernel/sched/
H A Ddeadline.c340 if (task_on_rq_queued(p)) in dl_change_utilization()
1263 if (!task_on_rq_queued(p)) { in dl_task_timer()
2830 WARN_ON_ONCE(!task_on_rq_queued(p)); in pick_next_pushable_dl_task()
2890 !task_on_rq_queued(task))) || in find_lock_later_rq()
3054 WARN_ON(!task_on_rq_queued(p)); in pull_dl_task()
3301 if (task_on_rq_queued(p) && p->dl.dl_runtime) in switched_from_dl()
3310 if (!task_on_rq_queued(p)) { in switched_from_dl()
3335 if (!task_on_rq_queued(p) || rq->dl.dl_nr_running)
3356 if (!task_on_rq_queued(p)) { in switched_to_dl()
3391 if (!task_on_rq_queued( in prio_changed_dl()
[all...]
H A Dcore_sched.c81 if (cookie && task_on_rq_queued(p)) in sched_core_update_cookie()
H A Dcore.c1401 if (!task_on_rq_queued(p)) in __need_bw_check()
2131 return task_on_rq_queued(p); in sched_task_on_rq()
2260 if (task_on_rq_queued(donor) && test_tsk_need_resched(rq->curr)) in wakeup_preempt()
2350 queued = task_on_rq_queued(p); in wait_task_inactive()
2624 if (task_on_rq_queued(p)) { in migration_cpu_stop()
3038 if (task_on_rq_queued(p)) in affine_move_task()
3357 if (task_on_rq_queued(p)) { in __migrate_swap_task()
3773 if (task_on_rq_queued(p)) { in ttwu_runnable()
5566 if (!p->on_cpu || !task_on_rq_queued(p)) in task_sched_runtime()
5576 if (task_current_donor(rq, p) && task_on_rq_queued(p)) { in task_sched_runtime()
[all …]
H A Dsched.h2412 static inline int task_on_rq_queued(struct task_struct *p) in task_on_rq_queued() function
2786 return rq->stop && task_on_rq_queued(rq->stop); in sched_stop_runnable()
H A Dfair.c8316 else if (p && unlikely(task_on_rq_queued(p) || current == p)) in cpu_util()
10980 if (task_on_rq_queued(p)) in task_running_on_cpu()
13726 if (!task_on_rq_queued(p)) in prio_changed_fair()
13848 if (task_on_rq_queued(p)) { in switched_to_fair()
13865 if (task_on_rq_queued(p)) { in __set_next_task_fair()