Searched refs:raw_lock (Results 1 – 7 of 7) sorted by relevance
15 arch_spinlock_t raw_lock; member 65 .raw_lock = __ARCH_SPIN_LOCK_UNLOCKED, \
116 #define raw_spin_is_locked(lock) arch_spin_is_locked(&(lock)->raw_lock)119 #define raw_spin_is_contended(lock) arch_spin_is_contended(&(lock)->raw_lock)187 arch_spin_lock(&lock->raw_lock); in do_raw_spin_lock() 193 int ret = arch_spin_trylock(&(lock)->raw_lock); in do_raw_spin_trylock() 204 arch_spin_unlock(&lock->raw_lock); in do_raw_spin_unlock()
34 .raw_lock = __ARCH_SPIN_LOCK_UNLOCKED, \
248 arch_spin_lock(&kvm->mmu_lock.rlock.raw_lock); in kvmppc_do_h_enter()263 arch_spin_unlock(&kvm->mmu_lock.rlock.raw_lock); in kvmppc_do_h_enter()277 arch_spin_unlock(&kvm->mmu_lock.rlock.raw_lock); in kvmppc_do_h_enter()938 arch_spin_lock(&kvm->mmu_lock.rlock.raw_lock); in kvmppc_do_h_page_init_zero()950 arch_spin_unlock(&kvm->mmu_lock.rlock.raw_lock); in kvmppc_do_h_page_init_zero()966 arch_spin_lock(&kvm->mmu_lock.rlock.raw_lock); in kvmppc_do_h_page_init_copy()981 arch_spin_unlock(&kvm->mmu_lock.rlock.raw_lock); in kvmppc_do_h_page_init_copy()
58 mtctl(__pa(__ldcw_align(&pgd_lock->rlock.raw_lock)), 28); in switch_mm_irqs_off()
16 while (likely(arch_spin_value_unlocked(old.lock.rlock.raw_lock))) { \
82 rqspinlock_t raw_lock; member144 raw_res_spin_lock_init(&htab->buckets[i].raw_lock); in htab_init_buckets()154 ret = raw_res_spin_lock_irqsave(&b->raw_lock, flags); in htab_lock_bucket()163 raw_res_spin_unlock_irqrestore(&b->raw_lock, flags); in htab_unlock_bucket()