Lines Matching defs:lcp
161 lwpchan_data_t *lcp;
169 lcp = p->p_lcp;
170 hashbucket = lcp->lwpchan_cache;
171 endbucket = hashbucket + lcp->lwpchan_size;
198 atomic_dec_32(&lcp->lwpchan_entries);
213 lwpchan_bucket(lwpchan_data_t *lcp, uintptr_t addr)
223 i = (addr ^ (addr >> lcp->lwpchan_bits)) & lcp->lwpchan_mask;
224 return (lcp->lwpchan_cache + i);
233 lwpchan_data_t *lcp;
244 lcp = kmem_alloc(sizeof (lwpchan_data_t), KM_SLEEP);
245 lcp->lwpchan_bits = bits;
246 lcp->lwpchan_size = 1 << lcp->lwpchan_bits;
247 lcp->lwpchan_mask = lcp->lwpchan_size - 1;
248 lcp->lwpchan_entries = 0;
249 lcp->lwpchan_cache = kmem_zalloc(lcp->lwpchan_size *
251 lcp->lwpchan_next_data = NULL;
258 kmem_free(lcp->lwpchan_cache, lcp->lwpchan_size *
260 kmem_free(lcp, sizeof (lwpchan_data_t));
281 newbucket = lwpchan_bucket(lcp,
290 lcp->lwpchan_entries = count;
302 lcp->lwpchan_next_data = old_lcp;
305 * As soon as we store the new lcp, future locking operations will
307 * established reaches global visibility before the new lcp does.
310 p->p_lcp = lcp;
334 lwpchan_data_t *lcp;
339 lcp = p->p_lcp;
343 hashbucket = lcp->lwpchan_cache;
344 endbucket = hashbucket + lcp->lwpchan_size;
359 while (lcp != NULL) {
360 lwpchan_data_t *next_lcp = lcp->lwpchan_next_data;
361 kmem_free(lcp->lwpchan_cache, lcp->lwpchan_size *
363 kmem_free(lcp, sizeof (lwpchan_data_t));
364 lcp = next_lcp;
411 lwpchan_data_t *lcp;
420 if ((lcp = p->p_lcp) == NULL) {
424 hashbucket = lwpchan_bucket(lcp, (uintptr_t)addr);
426 if (lcp != p->p_lcp) {
443 if (lcp != p->p_lcp) {
456 if (count > lcp->lwpchan_bits + 2 && /* larger table, longer chains */
457 (bits = lcp->lwpchan_bits) < LWPCHAN_MAX_BITS) {
471 atomic_inc_32(&lcp->lwpchan_entries);
532 upi_get(upib_t *upibp, lwpchan_t *lcp)
538 if (upip->upi_lwpchan.lc_wchan0 == lcp->lc_wchan0 &&
539 upip->upi_lwpchan.lc_wchan == lcp->lc_wchan)