Home
last modified time | relevance | path

Searched refs:__rq_lockp (Results 1 – 5 of 5) sorted by relevance

/linux/kernel/sched/
H A Dsched.h1466 static inline raw_spinlock_t *__rq_lockp(struct rq *rq) in __rq_lockp() function
1566 static inline raw_spinlock_t *__rq_lockp(struct rq *rq) in __rq_lockp() function
1610 __assumes_ctx_lock(__rq_lockp(rq)) in lockdep_assert_rq_held()
1612 lockdep_assert_held(__rq_lockp(rq)); in lockdep_assert_rq_held()
1616 __acquires(__rq_lockp(rq));
1619 __cond_acquires(true, __rq_lockp(rq));
1622 __acquires(__rq_lockp(rq)) in raw_spin_rq_lock()
1628 __releases(__rq_lockp(rq)) in raw_spin_rq_unlock()
1634 __acquires(__rq_lockp(rq)) in raw_spin_rq_lock_irq()
1641 __releases(__rq_lockp(rq)) in raw_spin_rq_unlock_irq()
[all …]
H A Dcore.c659 lock = __rq_lockp(rq); in raw_spin_rq_lock_nested()
661 if (likely(lock == __rq_lockp(rq))) { in raw_spin_rq_lock_nested()
685 lock = __rq_lockp(rq); in raw_spin_rq_trylock()
687 if (!ret || (likely(lock == __rq_lockp(rq)))) { in raw_spin_rq_trylock()
706 if (__rq_lockp(rq1) != __rq_lockp(rq2)) in double_rq_lock()
709 __acquire_ctx_lock(__rq_lockp(rq2)); /* fake acquire */ in double_rq_lock()
2508 __must_hold(__rq_lockp(rq)) in move_queued_task()
2555 __must_hold(__rq_lockp(rq)) in __migrate_task()
2921 __releases(__rq_lockp(rq), &p->pi_lock) in affine_move_task()
3076 __releases(__rq_lockp(rq), &p->pi_lock) in __set_cpus_allowed_ptr_locked()
[all …]
H A Dext_internal.h1398 lockdep_is_held(__rq_lockp(task_rq(p)))); in scx_task_sched()
1465 lockdep_is_held(__rq_lockp(task_rq(p)))); in scx_task_sched()
H A Ddeadline.c1273 lockdep_unpin_lock(__rq_lockp(rq), rf.cookie); in dl_task_timer()
1275 rf.cookie = lockdep_pin_lock(__rq_lockp(rq)); in dl_task_timer()
H A Dfair.c1675 (lockdep_is_held(__rq_lockp(task_rq(p))) && !READ_ONCE(p->on_cpu))); in deref_task_numa_group()
5062 __must_hold(__rq_lockp(this_rq));
9235 __must_hold(__rq_lockp(rq)) in pick_next_task_fair()
13182 __must_hold(__rq_lockp(this_rq)) in sched_balance_newidle()