Home
last modified time | relevance | path

Searched refs:lock (Results 1 – 25 of 1653) sorted by relevance

12345678910>>...67

/freebsd/sys/dev/drm2/ttm/
H A Dttm_lock.c48 void ttm_lock_init(struct ttm_lock *lock) in ttm_lock_init() argument
50 mtx_init(&lock->lock, "ttmlk", NULL, MTX_DEF); in ttm_lock_init()
51 lock->rw = 0; in ttm_lock_init()
52 lock->flags = 0; in ttm_lock_init()
53 lock->kill_takers = false; in ttm_lock_init()
54 lock->signal = SIGKILL; in ttm_lock_init()
68 void ttm_read_unlock(struct ttm_lock *lock) in ttm_read_unlock() argument
70 mtx_lock(&lock->lock); in ttm_read_unlock()
71 if (--lock->rw == 0) in ttm_read_unlock()
72 wakeup(lock); in ttm_read_unlock()
[all …]
H A Dttm_lock.h71 struct mtx lock; member
86 extern void ttm_lock_init(struct ttm_lock *lock);
95 extern void ttm_read_unlock(struct ttm_lock *lock);
107 extern int ttm_read_lock(struct ttm_lock *lock, bool interruptible);
124 extern int ttm_read_trylock(struct ttm_lock *lock, bool interruptible);
133 extern void ttm_lock_downgrade(struct ttm_lock *lock);
142 extern void ttm_suspend_lock(struct ttm_lock *lock);
151 extern void ttm_suspend_unlock(struct ttm_lock *lock);
165 extern int ttm_vt_lock(struct ttm_lock *lock, bool interruptible,
177 extern int ttm_vt_unlock(struct ttm_lock *lock);
[all …]
/freebsd/contrib/unbound/util/
H A Dlocks.h86 #define lock_protect(lock, area, size) /* nop */ argument
87 #define lock_unprotect(lock, area) /* nop */ argument
88 #define lock_get_mem(lock) (0) /* nothing */ argument
101 #define lock_basic_init(lock) LOCKRET(pthread_mutex_init(lock, NULL)) argument
102 #define lock_basic_destroy(lock) LOCKRET(pthread_mutex_destroy(lock)) argument
103 #define lock_basic_lock(lock) LOCKRET(pthread_mutex_lock(lock)) argument
104 #define lock_basic_unlock(lock) LOCKRET(pthread_mutex_unlock(lock)) argument
109 #define lock_rw_init(lock) LOCKRET(pthread_mutex_init(lock, NULL)) argument
110 #define lock_rw_destroy(lock) LOCKRET(pthread_mutex_destroy(lock)) argument
111 #define lock_rw_rdlock(lock) LOCKRET(pthread_mutex_lock(lock)) argument
[all …]
/freebsd/crypto/openssl/crypto/
H A Dthreads_win.c41 SRWLOCK lock; member
59 CRYPTO_RCU_LOCK *lock; member
107 /* lock protecting write side operations */
110 /* lock protecting updates to writers_alloced/current_alloc_idx */
116 /* lock to enforce in-order retirement */
122 /* lock used with NO_INTERLOCKEDOR64: VS2010 x86 */
126 static struct rcu_qp *allocate_new_qp_group(struct rcu_lock_st *lock, in allocate_new_qp_group() argument
131 lock->group_count = count; in allocate_new_qp_group()
183 void ossl_rcu_lock_free(CRYPTO_RCU_LOCK *lock) in ossl_rcu_lock_new()
185 CRYPTO_THREAD_lock_free(lock argument
198 get_hold_current_qp(CRYPTO_RCU_LOCK * lock) get_hold_current_qp() argument
230 ossl_rcu_read_lock(CRYPTO_RCU_LOCK * lock) ossl_rcu_read_lock() argument
268 ossl_rcu_write_lock(CRYPTO_RCU_LOCK * lock) ossl_rcu_write_lock() argument
273 ossl_rcu_write_unlock(CRYPTO_RCU_LOCK * lock) ossl_rcu_write_unlock() argument
278 ossl_rcu_read_unlock(CRYPTO_RCU_LOCK * lock) ossl_rcu_read_unlock() argument
307 update_qp(CRYPTO_RCU_LOCK * lock,uint32_t * curr_id) update_qp() argument
351 retire_qp(CRYPTO_RCU_LOCK * lock,struct rcu_qp * qp) retire_qp() argument
361 ossl_synchronize_rcu(CRYPTO_RCU_LOCK * lock) ossl_synchronize_rcu() argument
408 ossl_rcu_call(CRYPTO_RCU_LOCK * lock,rcu_cb_fn cb,void * data) ossl_rcu_call() argument
437 CRYPTO_RWLOCK *lock; CRYPTO_THREAD_lock_new() local
466 CRYPTO_THREAD_read_lock(CRYPTO_RWLOCK * lock) CRYPTO_THREAD_read_lock() argument
478 CRYPTO_THREAD_write_lock(CRYPTO_RWLOCK * lock) CRYPTO_THREAD_write_lock() argument
491 CRYPTO_THREAD_unlock(CRYPTO_RWLOCK * lock) CRYPTO_THREAD_unlock() argument
508 CRYPTO_THREAD_lock_free(CRYPTO_RWLOCK * lock) CRYPTO_THREAD_lock_free() argument
531 LONG volatile *lock = (LONG *)once; CRYPTO_THREAD_run_once() local
608 CRYPTO_atomic_add(int * val,int amount,int * ret,CRYPTO_RWLOCK * lock) CRYPTO_atomic_add() argument
628 CRYPTO_atomic_add64(uint64_t * val,uint64_t op,uint64_t * ret,CRYPTO_RWLOCK * lock) CRYPTO_atomic_add64() argument
647 CRYPTO_atomic_and(uint64_t * val,uint64_t op,uint64_t * ret,CRYPTO_RWLOCK * lock) CRYPTO_atomic_and() argument
666 CRYPTO_atomic_or(uint64_t * val,uint64_t op,uint64_t * ret,CRYPTO_RWLOCK * lock) CRYPTO_atomic_or() argument
684 CRYPTO_atomic_load(uint64_t * val,uint64_t * ret,CRYPTO_RWLOCK * lock) CRYPTO_atomic_load() argument
700 CRYPTO_atomic_store(uint64_t * dst,uint64_t val,CRYPTO_RWLOCK * lock) CRYPTO_atomic_store() argument
716 CRYPTO_atomic_load_int(int * val,int * ret,CRYPTO_RWLOCK * lock) CRYPTO_atomic_load_int() argument
[all...]
H A Dthreads_pthread.c185 * This is the core of an rcu lock. It tracks the readers and writers for the
186 * current quiescence point for a given lock. Users is the 64 bit value that
197 CRYPTO_RCU_LOCK *lock;
245 /* lock protecting write side operations */
248 /* lock protecting updates to writers_alloced/current_alloc_idx */
254 /* lock to enforce in-order retirement */
262 static struct rcu_qp *get_hold_current_qp(struct rcu_lock_st *lock) in get_hold_current_qp()
268 qp_idx = ATOMIC_LOAD_N(uint32_t, &lock->reader_idx, __ATOMIC_RELAXED); in get_hold_current_qp()
277 * of the lock is flushed from a local cpu cache so that we see any in get_hold_current_qp()
281 ATOMIC_ADD_FETCH(&lock in get_hold_current_qp()
195 CRYPTO_RCU_LOCK *lock; global() member
260 get_hold_current_qp(struct rcu_lock_st * lock) get_hold_current_qp() argument
304 ossl_rcu_read_lock(CRYPTO_RCU_LOCK * lock) ossl_rcu_read_lock() argument
343 ossl_rcu_read_unlock(CRYPTO_RCU_LOCK * lock) ossl_rcu_read_unlock() argument
381 update_qp(CRYPTO_RCU_LOCK * lock,uint32_t * curr_id) update_qp() argument
428 retire_qp(CRYPTO_RCU_LOCK * lock,struct rcu_qp * qp) retire_qp() argument
436 allocate_new_qp_group(CRYPTO_RCU_LOCK * lock,uint32_t count) allocate_new_qp_group() argument
446 ossl_rcu_write_lock(CRYPTO_RCU_LOCK * lock) ossl_rcu_write_lock() argument
452 ossl_rcu_write_unlock(CRYPTO_RCU_LOCK * lock) ossl_rcu_write_unlock() argument
458 ossl_synchronize_rcu(CRYPTO_RCU_LOCK * lock) ossl_synchronize_rcu() argument
509 ossl_rcu_call(CRYPTO_RCU_LOCK * lock,rcu_cb_fn cb,void * data) ossl_rcu_call() argument
570 ossl_rcu_lock_free(CRYPTO_RCU_LOCK * lock) ossl_rcu_lock_free() argument
588 CRYPTO_RWLOCK *lock; CRYPTO_THREAD_lock_new() local
630 CRYPTO_THREAD_read_lock(CRYPTO_RWLOCK * lock) CRYPTO_THREAD_read_lock() argument
645 CRYPTO_THREAD_write_lock(CRYPTO_RWLOCK * lock) CRYPTO_THREAD_write_lock() argument
660 CRYPTO_THREAD_unlock(CRYPTO_RWLOCK * lock) CRYPTO_THREAD_unlock() argument
675 CRYPTO_THREAD_lock_free(CRYPTO_RWLOCK * lock) CRYPTO_THREAD_lock_free() argument
737 CRYPTO_atomic_add(int * val,int amount,int * ret,CRYPTO_RWLOCK * lock) CRYPTO_atomic_add() argument
764 CRYPTO_atomic_add64(uint64_t * val,uint64_t op,uint64_t * ret,CRYPTO_RWLOCK * lock) CRYPTO_atomic_add64() argument
790 CRYPTO_atomic_and(uint64_t * val,uint64_t op,uint64_t * ret,CRYPTO_RWLOCK * lock) CRYPTO_atomic_and() argument
816 CRYPTO_atomic_or(uint64_t * val,uint64_t op,uint64_t * ret,CRYPTO_RWLOCK * lock) CRYPTO_atomic_or() argument
841 CRYPTO_atomic_load(uint64_t * val,uint64_t * ret,CRYPTO_RWLOCK * lock) CRYPTO_atomic_load() argument
864 CRYPTO_atomic_store(uint64_t * dst,uint64_t val,CRYPTO_RWLOCK * lock) CRYPTO_atomic_store() argument
887 CRYPTO_atomic_load_int(int * val,int * ret,CRYPTO_RWLOCK * lock) CRYPTO_atomic_load_int() argument
[all...]
H A Dthreads_none.c30 struct rcu_lock_st *lock; in ossl_rcu_lock_new()
32 lock = OPENSSL_zalloc(sizeof(*lock)); in ossl_rcu_lock_new()
33 return lock; in ossl_rcu_lock_new()
36 void ossl_rcu_lock_free(CRYPTO_RCU_LOCK *lock) in ossl_rcu_lock_free()
38 OPENSSL_free(lock); in ossl_rcu_lock_free()
41 void ossl_rcu_read_lock(CRYPTO_RCU_LOCK *lock) in ossl_rcu_read_lock()
46 void ossl_rcu_write_lock(CRYPTO_RCU_LOCK *lock) in ossl_rcu_write_lock()
51 void ossl_rcu_write_unlock(CRYPTO_RCU_LOCK *lock) in ossl_rcu_write_unlock()
56 void ossl_rcu_read_unlock(CRYPTO_RCU_LOCK *lock) in ossl_rcu_read_unlock()
29 struct rcu_lock_st *lock; ossl_rcu_lock_new() local
35 ossl_rcu_lock_free(CRYPTO_RCU_LOCK * lock) ossl_rcu_lock_free() argument
40 ossl_rcu_read_lock(CRYPTO_RCU_LOCK * lock) ossl_rcu_read_lock() argument
45 ossl_rcu_write_lock(CRYPTO_RCU_LOCK * lock) ossl_rcu_write_lock() argument
50 ossl_rcu_write_unlock(CRYPTO_RCU_LOCK * lock) ossl_rcu_write_unlock() argument
55 ossl_rcu_read_unlock(CRYPTO_RCU_LOCK * lock) ossl_rcu_read_unlock() argument
60 ossl_synchronize_rcu(CRYPTO_RCU_LOCK * lock) ossl_synchronize_rcu() argument
75 ossl_rcu_call(CRYPTO_RCU_LOCK * lock,rcu_cb_fn cb,void * data) ossl_rcu_call() argument
101 CRYPTO_RWLOCK *lock; CRYPTO_THREAD_lock_new() local
112 CRYPTO_THREAD_read_lock(CRYPTO_RWLOCK * lock) CRYPTO_THREAD_read_lock() argument
119 CRYPTO_THREAD_write_lock(CRYPTO_RWLOCK * lock) CRYPTO_THREAD_write_lock() argument
126 CRYPTO_THREAD_unlock(CRYPTO_RWLOCK * lock) CRYPTO_THREAD_unlock() argument
133 CRYPTO_THREAD_lock_free(CRYPTO_RWLOCK * lock) CRYPTO_THREAD_lock_free() argument
221 CRYPTO_atomic_add(int * val,int amount,int * ret,CRYPTO_RWLOCK * lock) CRYPTO_atomic_add() argument
230 CRYPTO_atomic_add64(uint64_t * val,uint64_t op,uint64_t * ret,CRYPTO_RWLOCK * lock) CRYPTO_atomic_add64() argument
239 CRYPTO_atomic_and(uint64_t * val,uint64_t op,uint64_t * ret,CRYPTO_RWLOCK * lock) CRYPTO_atomic_and() argument
248 CRYPTO_atomic_or(uint64_t * val,uint64_t op,uint64_t * ret,CRYPTO_RWLOCK * lock) CRYPTO_atomic_or() argument
256 CRYPTO_atomic_load(uint64_t * val,uint64_t * ret,CRYPTO_RWLOCK * lock) CRYPTO_atomic_load() argument
263 CRYPTO_atomic_store(uint64_t * dst,uint64_t val,CRYPTO_RWLOCK * lock) CRYPTO_atomic_store() argument
270 CRYPTO_atomic_load_int(int * val,int * ret,CRYPTO_RWLOCK * lock) CRYPTO_atomic_load_int() argument
[all...]
/freebsd/contrib/ntp/sntp/libevent/
H A Devthread.c118 target->lock == cbs->lock && in evthread_set_lock_callbacks()
127 if (cbs->alloc && cbs->free && cbs->lock && cbs->unlock) { in evthread_set_lock_callbacks()
191 void *lock; member
201 if (!(result->lock = original_lock_fns_.alloc( in debug_lock_alloc()
207 result->lock = NULL; in debug_lock_alloc()
219 struct debug_lock *lock = lock_; in debug_lock_free() local
220 EVUTIL_ASSERT(lock->count == 0); in debug_lock_free()
221 EVUTIL_ASSERT(locktype == lock->locktype); in debug_lock_free()
222 EVUTIL_ASSERT(DEBUG_LOCK_SIG == lock->signature); in debug_lock_free()
224 original_lock_fns_.free(lock->lock, in debug_lock_free()
[all …]
H A Devthread-internal.h96 evthread_lock_fns_.lock(mode, lockvar); \
129 #define EVLOCK_ASSERT_LOCKED(lock) \ argument
131 if ((lock) && evthread_lock_debugging_enabled_) { \
132 EVUTIL_ASSERT(evthread_is_debug_lock_held_(lock)); \
138 static inline int EVLOCK_TRY_LOCK_(void *lock);
140 EVLOCK_TRY_LOCK_(void *lock) in EVLOCK_TRY_LOCK_() argument
142 if (lock && evthread_lock_fns_.lock) { in EVLOCK_TRY_LOCK_()
143 int r = evthread_lock_fns_.lock(EVTHREAD_TRY, lock); in EVLOCK_TRY_LOCK_()
174 #define EVTHREAD_COND_WAIT(cond, lock) \ argument
175 ( (cond) ? evthread_cond_fns_.wait_condition((cond), (lock), NULL) : 0 )
[all …]
/freebsd/contrib/libevent/
H A Devthread.c118 target->lock == cbs->lock && in evthread_set_lock_callbacks()
127 if (cbs->alloc && cbs->free && cbs->lock && cbs->unlock) { in evthread_set_lock_callbacks()
191 void *lock; member
201 if (!(result->lock = original_lock_fns_.alloc( in debug_lock_alloc()
207 result->lock = NULL; in debug_lock_alloc()
219 struct debug_lock *lock = lock_; in debug_lock_free() local
220 EVUTIL_ASSERT(lock->count == 0); in debug_lock_free()
221 EVUTIL_ASSERT(locktype == lock->locktype); in debug_lock_free()
222 EVUTIL_ASSERT(DEBUG_LOCK_SIG == lock->signature); in debug_lock_free()
224 original_lock_fns_.free(lock->lock, in debug_lock_free()
[all …]
H A Devthread-internal.h96 evthread_lock_fns_.lock(mode, lockvar); \
129 #define EVLOCK_ASSERT_LOCKED(lock) \ argument
131 if ((lock) && evthread_lock_debugging_enabled_) { \
132 EVUTIL_ASSERT(evthread_is_debug_lock_held_(lock)); \
138 static inline int EVLOCK_TRY_LOCK_(void *lock);
140 EVLOCK_TRY_LOCK_(void *lock) in EVLOCK_TRY_LOCK_() argument
142 if (lock && evthread_lock_fns_.lock) { in EVLOCK_TRY_LOCK_()
143 int r = evthread_lock_fns_.lock(EVTHREAD_TRY, lock); in EVLOCK_TRY_LOCK_()
174 #define EVTHREAD_COND_WAIT(cond, lock) \ argument
175 ( (cond) ? evthread_cond_fns_.wait_condition((cond), (lock), NULL) : 0 )
[all …]
/freebsd/crypto/heimdal/lib/hx509/
H A Dlock.c60 hx509_lock_init(hx509_context context, hx509_lock *lock) in hx509_lock_init() argument
65 *lock = NULL; in hx509_lock_init()
81 *lock = l; in hx509_lock_init()
87 hx509_lock_add_password(hx509_lock lock, const char *password) in hx509_lock_add_password() argument
96 d = realloc(lock->password.val, in hx509_lock_add_password()
97 (lock->password.len + 1) * sizeof(lock->password.val[0])); in hx509_lock_add_password()
102 lock->password.val = d; in hx509_lock_add_password()
103 lock->password.val[lock->password.len] = s; in hx509_lock_add_password()
104 lock->password.len++; in hx509_lock_add_password()
110 _hx509_lock_get_passwords(hx509_lock lock) in _hx509_lock_get_passwords() argument
[all …]
/freebsd/sys/cddl/compat/opensolaris/sys/
H A Drwlock.h59 #define rw_init(lock, desc, type, arg) do { \ argument
62 KASSERT(((lock)->lock_object.lo_flags & LO_ALLMASK) != \
63 LO_EXPECTED, ("lock %s already initialized", #lock)); \
64 bzero((lock), sizeof(struct sx)); \
65 for (_name = #lock; *_name != '\0'; _name++) { \
70 _name = #lock; \
71 sx_init_flags((lock), _name, RW_FLAGS); \
73 #define rw_destroy(lock) sx_destroy(lock) argument
74 #define rw_enter(lock, how) do { \ argument
76 sx_slock(lock); \
[all …]
/freebsd/sys/dev/drm2/
H A Ddrm_lock.c58 struct drm_lock *lock = data; in drm_lock() local
64 if (lock->context == DRM_KERNEL_CONTEXT) { in drm_lock()
66 DRM_CURRENTPID, lock->context); in drm_lock()
71 lock->context, DRM_CURRENTPID, in drm_lock()
72 master->lock.hw_lock->lock, lock->flags); in drm_lock()
74 mtx_lock(&master->lock.spinlock); in drm_lock()
75 master->lock.user_waiters++; in drm_lock()
76 mtx_unlock(&master->lock.spinlock); in drm_lock()
80 if (!master->lock.hw_lock) { in drm_lock()
87 if (drm_lock_take(&master->lock, lock->context)) { in drm_lock()
[all …]
/freebsd/sys/contrib/openzfs/include/os/freebsd/spl/sys/
H A Drwlock.h61 #define rw_init(lock, desc, type, arg) do { \ argument
64 for (_name = #lock; *_name != '\0'; _name++) { \
69 _name = #lock; \
70 sx_init_flags((lock), _name, RW_FLAGS); \
72 #define rw_destroy(lock) sx_destroy(lock) argument
73 #define rw_enter(lock, how) do { \ argument
75 sx_slock(lock); \
77 sx_xlock(lock); \
80 #define rw_tryenter(lock, how) \ argument
81 ((how) == RW_READER ? sx_try_slock(lock) : sx_try_xlock(lock))
[all …]
H A Dmutex.h55 #define mutex_init(lock, desc, type, arg) do { \ argument
58 for (_name = #lock; *_name != '\0'; _name++) { \
63 _name = #lock; \
64 sx_init_flags((lock), _name, MUTEX_FLAGS); \
66 #define mutex_destroy(lock) sx_destroy(lock) argument
67 #define mutex_enter(lock) sx_xlock(lock) argument
68 #define mutex_enter_interruptible(lock) sx_xlock_sig(lock) argument
69 #define mutex_enter_nested(lock, type) sx_xlock(lock) argument
70 #define mutex_tryenter(lock) sx_try_xlock(lock) argument
71 #define mutex_exit(lock) sx_xunlock(lock) argument
[all …]
/freebsd/sys/kern/
H A Dkern_rangelock.c86 rangelock_cheat_drain(struct rangelock *lock) in rangelock_cheat_drain() argument
92 v = atomic_load_ptr(&lock->head); in rangelock_cheat_drain()
95 sleepq_add(&lock->head, NULL, "ranged1", 0, 0); in rangelock_cheat_drain()
96 sleepq_wait(&lock->head, PRI_USER); in rangelock_cheat_drain()
97 sleepq_lock(&lock->head); in rangelock_cheat_drain()
99 sleepq_release(&lock->head); in rangelock_cheat_drain()
104 rangelock_cheat_lock(struct rangelock *lock, int locktype, bool trylock, in rangelock_cheat_lock() argument
109 v = atomic_load_ptr(&lock->head); in rangelock_cheat_lock()
118 sleepq_lock(&lock->head); in rangelock_cheat_lock()
120 rangelock_cheat_drain(lock); in rangelock_cheat_lock()
[all …]
H A Dkern_condvar.c35 #include <sys/lock.h>
65 #define CV_ASSERT(cvp, lock, td) do { \ argument
69 KASSERT((lock) != NULL, ("%s: lock NULL", __func__)); \
108 _cv_wait(struct cv *cvp, struct lock_object *lock) in _cv_wait() argument
119 CV_ASSERT(cvp, lock, td); in _cv_wait()
120 WITNESS_WARN(WARN_GIANTOK | WARN_SLEEPOK, lock, in _cv_wait()
135 class = LOCK_CLASS(lock); in _cv_wait()
140 if (lock == &Giant.lock_object) in _cv_wait()
144 sleepq_add(cvp, lock, cv in _cv_wait()
171 _cv_wait_unlock(struct cv * cvp,struct lock_object * lock) _cv_wait_unlock() argument
228 _cv_wait_sig(struct cv * cvp,struct lock_object * lock) _cv_wait_sig() argument
296 _cv_timedwait_sbt(struct cv * cvp,struct lock_object * lock,sbintime_t sbt,sbintime_t pr,int flags) _cv_timedwait_sbt() argument
366 _cv_timedwait_sig_sbt(struct cv * cvp,struct lock_object * lock,sbintime_t sbt,sbintime_t pr,int flags) _cv_timedwait_sig_sbt() argument
[all...]
H A Dkern_lockf.c205 struct sx lock; member
286 sx_init(&lf_lock_owners[i].lock, "lock owners lock"); in lf_init()
343 sx_xlock(&lf_lock_owners[lo->lo_hash].lock); in lf_alloc_lock()
345 sx_xunlock(&lf_lock_owners[lo->lo_hash].lock); in lf_alloc_lock()
353 lf_free_lock(struct lockf_entry *lock) in lf_free_lock() argument
357 KASSERT(lock->lf_refs > 0, ("lockf_entry negative ref count %p", lock)); in lf_free_lock()
358 if (--lock->lf_refs > 0) in lf_free_lock()
365 struct lock_owner *lo = lock->lf_owner; in lf_free_lock()
367 KASSERT(LIST_EMPTY(&lock->lf_outedges), in lf_free_lock()
369 KASSERT(LIST_EMPTY(&lock->lf_inedges), in lf_free_lock()
[all …]
/freebsd/sys/sys/
H A Dlock.h63 void (*lc_assert)(const struct lock_object *lock, int what);
64 void (*lc_ddb_show)(const struct lock_object *lock);
65 void (*lc_lock)(struct lock_object *lock, uintptr_t how);
66 int (*lc_owner)(const struct lock_object *lock,
68 uintptr_t (*lc_unlock)(struct lock_object *lock);
69 int (*lc_trylock)(struct lock_object *lock, uintptr_t how);
97 #define LO_CLASSINDEX(lock) ((((lock)->lo_flags) & LO_CLASSMASK) >> LO_CLASSSHIFT) argument
98 #define LOCK_CLASS(lock) (lock_classes[LO_CLASSINDEX((lock))]) argument
240 int witness_is_owned(const struct lock_object *lock);
258 #define WITNESS_INIT(lock, type) \ argument
[all …]
H A Dcondvar.h51 void _cv_wait(struct cv *cvp, struct lock_object *lock);
52 void _cv_wait_unlock(struct cv *cvp, struct lock_object *lock);
53 int _cv_wait_sig(struct cv *cvp, struct lock_object *lock);
54 int _cv_timedwait_sbt(struct cv *cvp, struct lock_object *lock,
56 int _cv_timedwait_sig_sbt(struct cv *cvp, struct lock_object *lock,
62 #define cv_wait(cvp, lock) \ argument
63 _cv_wait((cvp), &(lock)->lock_object)
64 #define cv_wait_unlock(cvp, lock) \ argument
65 _cv_wait_unlock((cvp), &(lock)->lock_object)
66 #define cv_wait_sig(cvp, lock) \ argument
[all …]
/freebsd/lib/libthr/thread/
H A Dthr_pspinlock.c53 _pthread_spin_init(pthread_spinlock_t *lock, int pshared) in _pthread_spin_init() argument
57 if (lock == NULL) in _pthread_spin_init()
64 *lock = lck; in _pthread_spin_init()
66 lck = __thr_pshared_offpage(lock, 1); in _pthread_spin_init()
69 *lock = THR_PSHARED_PTR; in _pthread_spin_init()
78 _pthread_spin_destroy(pthread_spinlock_t *lock) in _pthread_spin_destroy() argument
83 if (lock == NULL || *lock == NULL) { in _pthread_spin_destroy()
85 } else if (*lock == THR_PSHARED_PTR) { in _pthread_spin_destroy()
86 l = __thr_pshared_offpage(lock, 0); in _pthread_spin_destroy()
91 free(*lock); in _pthread_spin_destroy()
[all …]
/freebsd/sys/contrib/ck/include/spinlock/
H A Danderson.h58 ck_spinlock_anderson_init(struct ck_spinlock_anderson *lock, in ck_spinlock_anderson_init() argument
71 lock->slots = slots; in ck_spinlock_anderson_init()
72 lock->count = count; in ck_spinlock_anderson_init()
73 lock->mask = count - 1; in ck_spinlock_anderson_init()
74 lock->next = 0; in ck_spinlock_anderson_init()
82 lock->wrap = (UINT_MAX % count) + 1; in ck_spinlock_anderson_init()
84 lock->wrap = 0; in ck_spinlock_anderson_init()
91 ck_spinlock_anderson_locked(struct ck_spinlock_anderson *lock) in ck_spinlock_anderson_locked() argument
96 position = ck_pr_load_uint(&lock->next) & lock->mask; in ck_spinlock_anderson_locked()
97 r = ck_pr_load_uint(&lock->slots[position].locked); in ck_spinlock_anderson_locked()
[all …]
/freebsd/sys/compat/linuxkpi/common/include/linux/
H A Drwlock.h33 #include <sys/lock.h>
43 #define read_lock_irq(lock) read_lock((lock))
44 #define read_unlock_irq(lock) read_unlock((lock))
45 #define write_lock_irq(lock) write_lock((lock)) argument
46 #define write_unlock_irq(lock) write_unlock((lock)) argument
47 #define read_lock_irqsave(lock, flag argument
48 write_unlock_irq(lock) global() argument
49 read_lock_irqsave(lock,flags) global() argument
51 write_lock_irqsave(lock,flags) global() argument
53 read_unlock_irqrestore(lock,flags) global() argument
55 write_unlock_irqrestore(lock,flags) global() argument
59 rwlock_init(rwlock_t * lock) rwlock_init() argument
[all...]
/freebsd/contrib/openbsm/bin/auditdistd/
H A Dsynch.h49 mtx_init(pthread_mutex_t *lock) in mtx_init() argument
53 error = pthread_mutex_init(lock, NULL); in mtx_init()
57 mtx_destroy(pthread_mutex_t *lock) in mtx_destroy() argument
61 error = pthread_mutex_destroy(lock); in mtx_destroy()
65 mtx_lock(pthread_mutex_t *lock) in mtx_lock() argument
69 error = pthread_mutex_lock(lock); in mtx_lock()
73 mtx_trylock(pthread_mutex_t *lock) in mtx_trylock() argument
77 error = pthread_mutex_trylock(lock); in mtx_trylock()
82 mtx_unlock(pthread_mutex_t *lock) in mtx_unlock() argument
86 error = pthread_mutex_unlock(lock); in mtx_unlock()
[all …]
/freebsd/sys/contrib/openzfs/cmd/zed/
H A Dzed_file.c35 struct flock lock; in zed_file_lock() local
41 lock.l_type = F_WRLCK; in zed_file_lock()
42 lock.l_whence = SEEK_SET; in zed_file_lock()
43 lock.l_start = 0; in zed_file_lock()
44 lock.l_len = 0; in zed_file_lock()
46 if (fcntl(fd, F_SETLK, &lock) < 0) { in zed_file_lock()
62 struct flock lock; in zed_file_unlock() local
68 lock.l_type = F_UNLCK; in zed_file_unlock()
69 lock.l_whence = SEEK_SET; in zed_file_unlock()
70 lock.l_start = 0; in zed_file_unlock()
[all …]

12345678910>>...67