Lines Matching refs:hlock

227 static inline struct lock_class *hlock_class(struct held_lock *hlock)  in hlock_class()  argument
229 unsigned int class_idx = hlock->class_idx; in hlock_class()
345 static void lock_release_holdtime(struct held_lock *hlock) in lock_release_holdtime() argument
353 holdtime = lockstat_clock() - hlock->holdtime_stamp; in lock_release_holdtime()
355 stats = get_lock_stats(hlock_class(hlock)); in lock_release_holdtime()
356 if (hlock->read) in lock_release_holdtime()
362 static inline void lock_release_holdtime(struct held_lock *hlock) in lock_release_holdtime() argument
429 static inline u16 hlock_id(struct held_lock *hlock) in hlock_id() argument
433 return (hlock->class_idx | (hlock->read << MAX_LOCKDEP_KEYS_BITS)); in hlock_id()
717 static void __print_lock_name(struct held_lock *hlock, struct lock_class *class) in __print_lock_name() argument
732 if (hlock && class->print_fn) in __print_lock_name()
733 class->print_fn(hlock->instance); in __print_lock_name()
737 static void print_lock_name(struct held_lock *hlock, struct lock_class *class) in print_lock_name() argument
744 __print_lock_name(hlock, class); in print_lock_name()
762 static void print_lock(struct held_lock *hlock) in print_lock() argument
774 struct lock_class *lock = hlock_class(hlock); in print_lock()
781 printk(KERN_CONT "%px", hlock->instance); in print_lock()
782 print_lock_name(hlock, lock); in print_lock()
783 printk(KERN_CONT ", at: %pS\n", (void *)hlock->acquire_ip); in print_lock()
1677 struct held_lock *hlock) in bfs_init_root() argument
1679 __bfs_init_root(lock, hlock_class(hlock)); in bfs_init_root()
1680 lock->only_xr = (hlock->read == 2); in bfs_init_root()
1691 struct held_lock *hlock) in bfs_init_rootb() argument
1693 __bfs_init_root(lock, hlock_class(hlock)); in bfs_init_rootb()
1694 lock->only_xr = (hlock->read != 0); in bfs_init_rootb()
1998 struct held_lock *hlock = (struct held_lock *)data; in hlock_conflict() local
2000 return hlock_class(hlock) == entry->class && /* Found A -> .. -> B */ in hlock_conflict()
2001 (hlock->read == 0 || /* B -> A is -(E*)-> */ in hlock_conflict()
2910 struct held_lock *hlock = (struct held_lock *)data; in hlock_equal() local
2912 return hlock_class(hlock) == entry->class && /* Found A -> .. -> B */ in hlock_equal()
2913 (hlock->read == 2 || /* A -> B is -(*R)-> */ in hlock_equal()
3262 struct held_lock *hlock; in check_prevs_add() local
3281 hlock = curr->held_locks + depth - 1; in check_prevs_add()
3283 if (hlock->check) { in check_prevs_add()
3284 int ret = check_prev_add(curr, hlock, next, distance, &trace); in check_prevs_add()
3294 if (!hlock->trylock) in check_prevs_add()
3571 struct held_lock *hlock) in get_first_held_lock() argument
3578 if (hlock_curr->irq_context != hlock->irq_context) in get_first_held_lock()
3603 struct held_lock *hlock; in print_chain_keys_held_locks() local
3611 hlock = curr->held_locks + i; in print_chain_keys_held_locks()
3612 chain_key = print_chain_key_iteration(hlock_id(hlock), chain_key); in print_chain_keys_held_locks()
3614 print_lock(hlock); in print_chain_keys_held_locks()
3671 struct held_lock *hlock, in check_no_collision() argument
3677 i = get_first_held_lock(curr, hlock); in check_no_collision()
3680 print_collision(curr, hlock, chain); in check_no_collision()
3688 print_collision(curr, hlock, chain); in check_no_collision()
3731 struct held_lock *hlock, in add_chain_cache() argument
3758 chain->irq_context = hlock->irq_context; in add_chain_cache()
3759 i = get_first_held_lock(curr, hlock); in add_chain_cache()
3784 chain_hlocks[chain->base + j] = hlock_id(hlock); in add_chain_cache()
3817 struct held_lock *hlock, in lookup_chain_cache_add() argument
3820 struct lock_class *class = hlock_class(hlock); in lookup_chain_cache_add()
3825 if (!check_no_collision(curr, hlock, chain)) in lookup_chain_cache_add()
3855 if (!add_chain_cache(curr, hlock, chain_key)) in lookup_chain_cache_add()
3862 struct held_lock *hlock, in validate_chain() argument
3875 if (!hlock->trylock && hlock->check && in validate_chain()
3876 lookup_chain_cache_add(curr, hlock, chain_key)) { in validate_chain()
3895 int ret = check_deadlock(curr, hlock); in validate_chain()
3908 if (!check_prevs_add(curr, hlock)) in validate_chain()
3923 struct held_lock *hlock, in validate_chain() argument
3939 struct held_lock *hlock, *prev_hlock = NULL; in check_chain_key() local
3944 hlock = curr->held_locks + i; in check_chain_key()
3945 if (chain_key != hlock->prev_chain_key) { in check_chain_key()
3954 (unsigned long long)hlock->prev_chain_key); in check_chain_key()
3962 if (DEBUG_LOCKS_WARN_ON(!test_bit(hlock->class_idx, lock_classes_in_use))) in check_chain_key()
3966 hlock->irq_context)) in check_chain_key()
3968 chain_key = iterate_chain_key(chain_key, hlock_id(hlock)); in check_chain_key()
3969 prev_hlock = hlock; in check_chain_key()
4310 struct held_lock *hlock; in mark_held_locks() local
4315 hlock = curr->held_locks + i; in mark_held_locks()
4317 if (hlock->read) in mark_held_locks()
4322 if (!hlock->check) in mark_held_locks()
4325 if (!mark_lock(curr, hlock, hlock_bit)) in mark_held_locks()
4618 mark_usage(struct task_struct *curr, struct held_lock *hlock, int check) in mark_usage() argument
4627 if (!hlock->trylock) { in mark_usage()
4628 if (hlock->read) { in mark_usage()
4630 if (!mark_lock(curr, hlock, in mark_usage()
4634 if (!mark_lock(curr, hlock, in mark_usage()
4639 if (!mark_lock(curr, hlock, LOCK_USED_IN_HARDIRQ)) in mark_usage()
4642 if (!mark_lock(curr, hlock, LOCK_USED_IN_SOFTIRQ)) in mark_usage()
4652 if (!hlock->hardirqs_off && !hlock->sync) { in mark_usage()
4653 if (hlock->read) { in mark_usage()
4654 if (!mark_lock(curr, hlock, in mark_usage()
4658 if (!mark_lock(curr, hlock, in mark_usage()
4662 if (!mark_lock(curr, hlock, in mark_usage()
4666 if (!mark_lock(curr, hlock, in mark_usage()
4674 if (!mark_lock(curr, hlock, LOCK_USED)) in mark_usage()
4687 struct held_lock *hlock) in separate_irq_context() argument
4703 if (prev_hlock->irq_context != hlock->irq_context) in separate_irq_context()
4802 struct held_lock *hlock) in print_lock_invalid_wait_context() argument
4820 print_lock(hlock); in print_lock_invalid_wait_context()
4910 mark_usage(struct task_struct *curr, struct held_lock *hlock, int check) in mark_usage() argument
4921 struct held_lock *hlock) in separate_irq_context() argument
5034 struct held_lock *hlock) in print_lock_nested_lock_not_held() argument
5050 print_lock(hlock); in print_lock_nested_lock_not_held()
5053 pr_warn("%s\n", hlock->nest_lock->name); in print_lock_nested_lock_not_held()
5084 struct held_lock *hlock; in __lock_acquire() local
5145 hlock = curr->held_locks + depth - 1; in __lock_acquire()
5146 if (hlock->class_idx == class_idx && nest_lock) { in __lock_acquire()
5150 if (!hlock->references) in __lock_acquire()
5151 hlock->references++; in __lock_acquire()
5153 hlock->references += references; in __lock_acquire()
5156 if (DEBUG_LOCKS_WARN_ON(hlock->references < references)) in __lock_acquire()
5163 hlock = curr->held_locks + depth; in __lock_acquire()
5170 hlock->class_idx = class_idx; in __lock_acquire()
5171 hlock->acquire_ip = ip; in __lock_acquire()
5172 hlock->instance = lock; in __lock_acquire()
5173 hlock->nest_lock = nest_lock; in __lock_acquire()
5174 hlock->irq_context = task_irq_context(curr); in __lock_acquire()
5175 hlock->trylock = trylock; in __lock_acquire()
5176 hlock->read = read; in __lock_acquire()
5177 hlock->check = check; in __lock_acquire()
5178 hlock->sync = !!sync; in __lock_acquire()
5179 hlock->hardirqs_off = !!hardirqs_off; in __lock_acquire()
5180 hlock->references = references; in __lock_acquire()
5182 hlock->waittime_stamp = 0; in __lock_acquire()
5183 hlock->holdtime_stamp = lockstat_clock(); in __lock_acquire()
5185 hlock->pin_count = pin_count; in __lock_acquire()
5187 if (check_wait_context(curr, hlock)) in __lock_acquire()
5191 if (!mark_usage(curr, hlock, check)) in __lock_acquire()
5220 hlock->prev_chain_key = chain_key; in __lock_acquire()
5221 if (separate_irq_context(curr, hlock)) { in __lock_acquire()
5225 chain_key = iterate_chain_key(chain_key, hlock_id(hlock)); in __lock_acquire()
5228 print_lock_nested_lock_not_held(curr, hlock); in __lock_acquire()
5233 WARN_ON_ONCE(depth && !hlock_class(hlock - 1)->key); in __lock_acquire()
5234 WARN_ON_ONCE(!hlock_class(hlock)->key); in __lock_acquire()
5237 if (!validate_chain(curr, hlock, chain_head, chain_key)) in __lock_acquire()
5241 if (hlock->sync) in __lock_acquire()
5303 static noinstr int match_held_lock(const struct held_lock *hlock, in match_held_lock() argument
5306 if (hlock->instance == lock) in match_held_lock()
5309 if (hlock->references) { in match_held_lock()
5329 if (DEBUG_LOCKS_WARN_ON(!hlock->nest_lock)) in match_held_lock()
5332 if (hlock->class_idx == class - lock_classes) in match_held_lock()
5344 struct held_lock *ret, *hlock, *prev_hlock; in find_held_lock() local
5348 hlock = curr->held_locks + i; in find_held_lock()
5349 ret = hlock; in find_held_lock()
5350 if (match_held_lock(hlock, lock)) in find_held_lock()
5354 for (i--, prev_hlock = hlock--; in find_held_lock()
5356 i--, prev_hlock = hlock--) { in find_held_lock()
5360 if (prev_hlock->irq_context != hlock->irq_context) { in find_held_lock()
5364 if (match_held_lock(hlock, lock)) { in find_held_lock()
5365 ret = hlock; in find_held_lock()
5378 struct held_lock *hlock; in reacquire_held_locks() local
5384 for (hlock = curr->held_locks + idx; idx < depth; idx++, hlock++) { in reacquire_held_locks()
5385 switch (__lock_acquire(hlock->instance, in reacquire_held_locks()
5386 hlock_class(hlock)->subclass, in reacquire_held_locks()
5387 hlock->trylock, in reacquire_held_locks()
5388 hlock->read, hlock->check, in reacquire_held_locks()
5389 hlock->hardirqs_off, in reacquire_held_locks()
5390 hlock->nest_lock, hlock->acquire_ip, in reacquire_held_locks()
5391 hlock->references, hlock->pin_count, 0)) { in reacquire_held_locks()
5414 struct held_lock *hlock; in __lock_set_class() local
5429 hlock = find_held_lock(curr, lock, depth, &i); in __lock_set_class()
5430 if (!hlock) { in __lock_set_class()
5440 hlock->class_idx = class - lock_classes; in __lock_set_class()
5443 curr->curr_chain_key = hlock->prev_chain_key; in __lock_set_class()
5461 struct held_lock *hlock; in __lock_downgrade() local
5475 hlock = find_held_lock(curr, lock, depth, &i); in __lock_downgrade()
5476 if (!hlock) { in __lock_downgrade()
5482 curr->curr_chain_key = hlock->prev_chain_key; in __lock_downgrade()
5484 WARN(hlock->read, "downgrading a read lock"); in __lock_downgrade()
5485 hlock->read = 1; in __lock_downgrade()
5486 hlock->acquire_ip = ip; in __lock_downgrade()
5515 struct held_lock *hlock; in __lock_release() local
5535 hlock = find_held_lock(curr, lock, depth, &i); in __lock_release()
5536 if (!hlock) { in __lock_release()
5541 if (hlock->instance == lock) in __lock_release()
5542 lock_release_holdtime(hlock); in __lock_release()
5544 WARN(hlock->pin_count, "releasing a pinned lock\n"); in __lock_release()
5546 if (hlock->references) { in __lock_release()
5547 hlock->references--; in __lock_release()
5548 if (hlock->references) { in __lock_release()
5565 curr->curr_chain_key = hlock->prev_chain_key; in __lock_release()
5599 struct held_lock *hlock = curr->held_locks + i; in __lock_is_held() local
5601 if (match_held_lock(hlock, lock)) { in __lock_is_held()
5602 if (read == -1 || !!hlock->read == read) in __lock_is_held()
5622 struct held_lock *hlock = curr->held_locks + i; in __lock_pin_lock() local
5624 if (match_held_lock(hlock, lock)) { in __lock_pin_lock()
5631 hlock->pin_count += cookie.val; in __lock_pin_lock()
5649 struct held_lock *hlock = curr->held_locks + i; in __lock_repin_lock() local
5651 if (match_held_lock(hlock, lock)) { in __lock_repin_lock()
5652 hlock->pin_count += cookie.val; in __lock_repin_lock()
5669 struct held_lock *hlock = curr->held_locks + i; in __lock_unpin_lock() local
5671 if (match_held_lock(hlock, lock)) { in __lock_unpin_lock()
5672 if (WARN(!hlock->pin_count, "unpinning an unpinned lock\n")) in __lock_unpin_lock()
5675 hlock->pin_count -= cookie.val; in __lock_unpin_lock()
5677 if (WARN((int)hlock->pin_count < 0, "pin count corrupted\n")) in __lock_unpin_lock()
5678 hlock->pin_count = 0; in __lock_unpin_lock()
5770 static void verify_lock_unused(struct lockdep_map *lock, struct held_lock *hlock, int subclass) in verify_lock_unused() argument
5784 if (!hlock->read) in verify_lock_unused()
5790 hlock->class_idx = class - lock_classes; in verify_lock_unused()
5792 print_usage_bug(current, hlock, LOCK_USED, LOCK_USAGE_STATES); in verify_lock_unused()
5847 struct held_lock hlock; in lock_acquire() local
5849 hlock.acquire_ip = ip; in lock_acquire()
5850 hlock.instance = lock; in lock_acquire()
5851 hlock.nest_lock = nest_lock; in lock_acquire()
5852 hlock.irq_context = 2; // XXX in lock_acquire()
5853 hlock.trylock = trylock; in lock_acquire()
5854 hlock.read = read; in lock_acquire()
5855 hlock.check = check; in lock_acquire()
5856 hlock.hardirqs_off = true; in lock_acquire()
5857 hlock.references = 0; in lock_acquire()
5859 verify_lock_unused(lock, &hlock, subclass); in lock_acquire()
6039 struct held_lock *hlock; in __lock_contended() local
6055 hlock = find_held_lock(curr, lock, depth, &i); in __lock_contended()
6056 if (!hlock) { in __lock_contended()
6061 if (hlock->instance != lock) in __lock_contended()
6064 hlock->waittime_stamp = lockstat_clock(); in __lock_contended()
6066 contention_point = lock_point(hlock_class(hlock)->contention_point, ip); in __lock_contended()
6067 contending_point = lock_point(hlock_class(hlock)->contending_point, in __lock_contended()
6070 stats = get_lock_stats(hlock_class(hlock)); in __lock_contended()
6076 stats->bounces[bounce_contended + !!hlock->read]++; in __lock_contended()
6083 struct held_lock *hlock; in __lock_acquired() local
6100 hlock = find_held_lock(curr, lock, depth, &i); in __lock_acquired()
6101 if (!hlock) { in __lock_acquired()
6106 if (hlock->instance != lock) in __lock_acquired()
6110 if (hlock->waittime_stamp) { in __lock_acquired()
6112 waittime = now - hlock->waittime_stamp; in __lock_acquired()
6113 hlock->holdtime_stamp = now; in __lock_acquired()
6116 stats = get_lock_stats(hlock_class(hlock)); in __lock_acquired()
6118 if (hlock->read) in __lock_acquired()
6124 stats->bounces[bounce_acquired + !!hlock->read]++; in __lock_acquired()
6673 const void *mem_to, struct held_lock *hlock) in print_freed_lock_bug() argument
6689 print_lock(hlock); in print_freed_lock_bug()
6713 struct held_lock *hlock; in debug_check_no_locks_freed() local
6722 hlock = curr->held_locks + i; in debug_check_no_locks_freed()
6724 if (not_in_range(mem_from, mem_len, hlock->instance, in debug_check_no_locks_freed()
6725 sizeof(*hlock->instance))) in debug_check_no_locks_freed()
6728 print_freed_lock_bug(curr, mem_from, mem_from + mem_len, hlock); in debug_check_no_locks_freed()