Lines Matching refs:rgnp
113 #define SFMMU_VALIDATE_SHAREDHBLK(hmeblkp, srdp, rgnp, rid) \ argument
123 ASSERT((rgnp)->rgn_id == rid); \
124 ASSERT(!((rgnp)->rgn_flags & SFMMU_REGION_FREE)); \
125 ASSERT(((rgnp)->rgn_flags & SFMMU_REGION_TYPE_MASK) == \
127 ASSERT(_ttesz <= (rgnp)->rgn_pgszc); \
131 (uintptr_t)(rgnp)->rgn_saddr, HBLK_MIN_BYTES); \
133 (uintptr_t)((rgnp)->rgn_saddr + (rgnp)->rgn_size), \
141 ASSERT(rgnp->rgn_hmeflags & (0x1 << _flagtte)); \
146 #define SFMMU_VALIDATE_SHAREDHBLK(hmeblkp, srdp, rgnp, rid) argument
3268 sf_region_t *rgnp = srdp->srd_hmergnp[rid]; in sfmmu_tteload_addentry() local
3274 atomic_inc_ulong(&rgnp->rgn_ttecnt[size]); in sfmmu_tteload_addentry()
3893 sfmmu_unload_hmeregion(sf_srd_t *srdp, sf_region_t *rgnp) in sfmmu_unload_hmeregion() argument
3895 int ttesz = rgnp->rgn_pgszc; in sfmmu_unload_hmeregion()
3896 size_t rsz = rgnp->rgn_size; in sfmmu_unload_hmeregion()
3897 caddr_t rsaddr = rgnp->rgn_saddr; in sfmmu_unload_hmeregion()
3901 uint_t rid = rgnp->rgn_id; in sfmmu_unload_hmeregion()
3919 if ((rcbfunc = rgnp->rgn_cb_function) == NULL) { in sfmmu_unload_hmeregion()
3926 if (!(rgnp->rgn_hmeflags & (1 << ttesz))) { in sfmmu_unload_hmeregion()
3938 rsaddr, rsz, rgnp->rgn_obj, in sfmmu_unload_hmeregion()
3939 rgnp->rgn_objoff); in sfmmu_unload_hmeregion()
3952 rsz, rgnp->rgn_obj, in sfmmu_unload_hmeregion()
3953 rgnp->rgn_objoff); in sfmmu_unload_hmeregion()
4038 sf_region_t *rgnp; in hat_unlock_region() local
4065 rgnp = srdp->srd_hmergnp[rid]; in hat_unlock_region()
4068 ASSERT(IS_P2ALIGNED(addr, TTEBYTES(rgnp->rgn_pgszc))); in hat_unlock_region()
4069 ASSERT(IS_P2ALIGNED(len, TTEBYTES(rgnp->rgn_pgszc))); in hat_unlock_region()
4070 if (rgnp->rgn_pgszc < HBLK_MIN_TTESZ) { in hat_unlock_region()
4073 ttesz = rgnp->rgn_pgszc; in hat_unlock_region()
4076 while (ttesz < rgnp->rgn_pgszc && in hat_unlock_region()
4081 if (!(rgnp->rgn_hmeflags & (1 << ttesz))) { in hat_unlock_region()
7337 sf_region_t *rgnp; in sfmmu_pageunload() local
7341 rgnp = srdp->srd_hmergnp[rid]; in sfmmu_pageunload()
7342 SFMMU_VALIDATE_SHAREDHBLK(hmeblkp, srdp, rgnp, rid); in sfmmu_pageunload()
7343 cpuset = sfmmu_rgntlb_demap(addr, rgnp, hmeblkp, 1); in sfmmu_pageunload()
7345 ASSERT(rgnp->rgn_ttecnt[ttesz] > 0); in sfmmu_pageunload()
7346 atomic_dec_ulong(&rgnp->rgn_ttecnt[ttesz]); in sfmmu_pageunload()
7561 sf_region_t *rgnp; in hat_pagesync() local
7565 rgnp = srdp->srd_hmergnp[rid]; in hat_pagesync()
7567 rgnp, rid); in hat_pagesync()
7568 shcnt += rgnp->rgn_refcnt; in hat_pagesync()
7662 sf_region_t *rgnp; in sfmmu_pagesync() local
7666 rgnp = srdp->srd_hmergnp[rid]; in sfmmu_pagesync()
7668 srdp, rgnp, rid); in sfmmu_pagesync()
7670 rgnp, hmeblkp, 1); in sfmmu_pagesync()
7738 sf_region_t *rgnp; in sfmmu_pageclrwrt() local
7742 rgnp = srdp->srd_hmergnp[rid]; in sfmmu_pageclrwrt()
7744 srdp, rgnp, rid); in sfmmu_pageclrwrt()
7746 rgnp, hmeblkp, 1); in sfmmu_pageclrwrt()
8108 sf_region_t *rgnp; in sfmmu_uvatopfn() local
8120 rgnp = srdp->srd_hmergnp[rid]; in sfmmu_uvatopfn()
8121 SFMMU_VALIDATE_SHAREDHBLK(hmeblkp, srdp, rgnp, rid); in sfmmu_uvatopfn()
8124 rsaddr = rgnp->rgn_saddr; in sfmmu_uvatopfn()
8125 readdr = rsaddr + rgnp->rgn_size; in sfmmu_uvatopfn()
8275 sf_region_t *rgnp; in hat_page_checkshare() local
8279 rgnp = srdp->srd_hmergnp[rid]; in hat_page_checkshare()
8281 rgnp, rid); in hat_page_checkshare()
8282 cnt += rgnp->rgn_refcnt; in hat_page_checkshare()
9694 sf_region_t *rgnp; in sfmmu_page_cache() local
9698 rgnp = srdp->srd_hmergnp[rid]; in sfmmu_page_cache()
9700 srdp, rgnp, rid); in sfmmu_page_cache()
9701 (void) sfmmu_rgntlb_demap(vaddr, rgnp, in sfmmu_page_cache()
9729 sf_region_t *rgnp; in sfmmu_page_cache() local
9733 rgnp = srdp->srd_hmergnp[rid]; in sfmmu_page_cache()
9735 srdp, rgnp, rid); in sfmmu_page_cache()
9736 (void) sfmmu_rgntlb_demap(vaddr, rgnp, in sfmmu_page_cache()
10208 sf_region_t *rgnp; in sfmmu_set_scd_rttecnt() local
10230 rgnp = srdp->srd_hmergnp[rid]; in sfmmu_set_scd_rttecnt()
10231 ASSERT(rgnp->rgn_refcnt > 0); in sfmmu_set_scd_rttecnt()
10232 ASSERT(rgnp->rgn_id == rid); in sfmmu_set_scd_rttecnt()
10234 scdp->scd_rttecnt[rgnp->rgn_pgszc] += in sfmmu_set_scd_rttecnt()
10235 rgnp->rgn_size >> TTE_PAGE_SHIFT(rgnp->rgn_pgszc); in sfmmu_set_scd_rttecnt()
10241 if (rgnp->rgn_pgszc >= TTE4M) { in sfmmu_set_scd_rttecnt()
10243 rgnp->rgn_size >> in sfmmu_set_scd_rttecnt()
11124 sf_region_t *rgnp; in sfmmu_hblk_alloc() local
11204 rgnp = srdp->srd_hmergnp[rid]; in sfmmu_hblk_alloc()
11205 ASSERT(rgnp != NULL && rgnp->rgn_id == rid); in sfmmu_hblk_alloc()
11206 ASSERT(rgnp->rgn_refcnt != 0); in sfmmu_hblk_alloc()
11207 ASSERT(size <= rgnp->rgn_pgszc); in sfmmu_hblk_alloc()
11211 if (!(rgnp->rgn_hmeflags & (0x1 << ttesz))) { in sfmmu_hblk_alloc()
11224 } while (++ttesz <= rgnp->rgn_pgszc); in sfmmu_hblk_alloc()
11403 if (!(rgnp->rgn_hmeflags & tteflag)) { in sfmmu_hblk_alloc()
11404 atomic_or_16(&rgnp->rgn_hmeflags, tteflag); in sfmmu_hblk_alloc()
11690 sf_region_t *rgnp; in sfmmu_steal_this_hblk() local
11698 rgnp = srdp->srd_hmergnp[rid]; in sfmmu_steal_this_hblk()
11699 ASSERT(rgnp != NULL); in sfmmu_steal_this_hblk()
11700 SFMMU_VALIDATE_SHAREDHBLK(hmeblkp, srdp, rgnp, rid); in sfmmu_steal_this_hblk()
11825 sf_region_t *rgnp; in sfmmu_is_rgnva() local
11838 rgnp = srdp->srd_hmergnp[rid]; in sfmmu_is_rgnva()
11839 ASSERT(rgnp->rgn_refcnt > 0); in sfmmu_is_rgnva()
11840 ASSERT(rgnp->rgn_id == rid); in sfmmu_is_rgnva()
11841 if (addr < rgnp->rgn_saddr || in sfmmu_is_rgnva()
11842 addr >= (rgnp->rgn_saddr + rgnp->rgn_size)) { in sfmmu_is_rgnva()
12125 sfmmu_rgntlb_demap(caddr_t addr, sf_region_t *rgnp, in sfmmu_rgntlb_demap() argument
12132 uint_t rid = rgnp->rgn_id; in sfmmu_rgntlb_demap()
12142 mutex_enter(&rgnp->rgn_mutex); in sfmmu_rgntlb_demap()
12144 sfmmup = rgnp->rgn_sfmmu_head; in sfmmu_rgntlb_demap()
12192 mutex_exit(&rgnp->rgn_mutex); in sfmmu_rgntlb_demap()
13853 sf_region_t *rgnp; in sfmmu_leave_srd() local
13900 for (rgnp = srdp->srd_hmergnfree; rgnp != NULL; rgnp = nrgnp) { in sfmmu_leave_srd()
13901 nrgnp = rgnp->rgn_next; in sfmmu_leave_srd()
13902 ASSERT(rgnp->rgn_id < srdp->srd_next_hmerid); in sfmmu_leave_srd()
13903 ASSERT(rgnp->rgn_refcnt == 0); in sfmmu_leave_srd()
13904 ASSERT(rgnp->rgn_sfmmu_head == NULL); in sfmmu_leave_srd()
13905 ASSERT(rgnp->rgn_flags & SFMMU_REGION_FREE); in sfmmu_leave_srd()
13906 ASSERT(rgnp->rgn_hmeflags == 0); in sfmmu_leave_srd()
13907 ASSERT(srdp->srd_hmergnp[rgnp->rgn_id] == rgnp); in sfmmu_leave_srd()
13910 ASSERT(rgnp->rgn_ttecnt[i] == 0); in sfmmu_leave_srd()
13914 kmem_cache_free(region_cache, rgnp); in sfmmu_leave_srd()
13922 for (rgnp = srdp->srd_ismrgnfree; rgnp != NULL; rgnp = nrgnp) { in sfmmu_leave_srd()
13923 nrgnp = rgnp->rgn_next; in sfmmu_leave_srd()
13924 ASSERT(rgnp->rgn_id < srdp->srd_next_ismrid); in sfmmu_leave_srd()
13925 ASSERT(rgnp->rgn_refcnt == 0); in sfmmu_leave_srd()
13926 ASSERT(rgnp->rgn_sfmmu_head == NULL); in sfmmu_leave_srd()
13927 ASSERT(rgnp->rgn_flags & SFMMU_REGION_FREE); in sfmmu_leave_srd()
13928 ASSERT(srdp->srd_ismrgnp[rgnp->rgn_id] == rgnp); in sfmmu_leave_srd()
13931 ASSERT(rgnp->rgn_ttecnt[i] == 0); in sfmmu_leave_srd()
13935 kmem_cache_free(region_cache, rgnp); in sfmmu_leave_srd()
14009 sf_region_t *rgnp; in hat_join_region() local
14068 for (rgnp = srdp->srd_rgnhash[rhash]; rgnp != NULL; in hat_join_region()
14069 rgnp = rgnp->rgn_hash) { in hat_join_region()
14070 if (rgnp->rgn_saddr == r_saddr && rgnp->rgn_size == r_size && in hat_join_region()
14071 rgnp->rgn_obj == r_obj && rgnp->rgn_objoff == r_objoff && in hat_join_region()
14072 rgnp->rgn_perm == r_perm && rgnp->rgn_pgszc == r_pgszc) { in hat_join_region()
14078 if (rgnp != NULL) { in hat_join_region()
14079 ASSERT((rgnp->rgn_flags & SFMMU_REGION_TYPE_MASK) == r_type); in hat_join_region()
14080 ASSERT(rgnp->rgn_cb_function == r_cb_function); in hat_join_region()
14081 ASSERT(rgnp->rgn_refcnt >= 0); in hat_join_region()
14082 rid = rgnp->rgn_id; in hat_join_region()
14084 ASSERT(rarrp[rid] == rgnp); in hat_join_region()
14086 atomic_inc_32((volatile uint_t *)&rgnp->rgn_refcnt); in hat_join_region()
14095 sfmmu_link_to_hmeregion(sfmmup, rgnp); in hat_join_region()
14206 rgnp = *freelistp; in hat_join_region()
14207 *freelistp = rgnp->rgn_next; in hat_join_region()
14208 ASSERT(rgnp->rgn_id < *nextidp); in hat_join_region()
14209 ASSERT(rgnp->rgn_id < maxids); in hat_join_region()
14210 ASSERT(rgnp->rgn_flags & SFMMU_REGION_FREE); in hat_join_region()
14211 ASSERT((rgnp->rgn_flags & SFMMU_REGION_TYPE_MASK) in hat_join_region()
14213 ASSERT(rarrp[rgnp->rgn_id] == rgnp); in hat_join_region()
14214 ASSERT(rgnp->rgn_hmeflags == 0); in hat_join_region()
14224 for (rgnp = srdp->srd_rgnhash[rhash]; rgnp != NULL; in hat_join_region()
14225 rgnp = rgnp->rgn_hash) { in hat_join_region()
14226 if (rgnp->rgn_saddr == r_saddr && in hat_join_region()
14227 rgnp->rgn_size == r_size && in hat_join_region()
14228 rgnp->rgn_obj == r_obj && in hat_join_region()
14229 rgnp->rgn_objoff == r_objoff && in hat_join_region()
14230 rgnp->rgn_perm == r_perm && in hat_join_region()
14231 rgnp->rgn_pgszc == r_pgszc) { in hat_join_region()
14235 if (rgnp != NULL) { in hat_join_region()
14243 rgnp = new_rgnp; in hat_join_region()
14245 rgnp->rgn_id = (*nextidp)++; in hat_join_region()
14246 ASSERT(rgnp->rgn_id < maxids); in hat_join_region()
14247 ASSERT(rarrp[rgnp->rgn_id] == NULL); in hat_join_region()
14248 rarrp[rgnp->rgn_id] = rgnp; in hat_join_region()
14251 ASSERT(rgnp->rgn_sfmmu_head == NULL); in hat_join_region()
14252 ASSERT(rgnp->rgn_hmeflags == 0); in hat_join_region()
14255 ASSERT(rgnp->rgn_ttecnt[i] == 0); in hat_join_region()
14258 rgnp->rgn_saddr = r_saddr; in hat_join_region()
14259 rgnp->rgn_size = r_size; in hat_join_region()
14260 rgnp->rgn_obj = r_obj; in hat_join_region()
14261 rgnp->rgn_objoff = r_objoff; in hat_join_region()
14262 rgnp->rgn_perm = r_perm; in hat_join_region()
14263 rgnp->rgn_pgszc = r_pgszc; in hat_join_region()
14264 rgnp->rgn_flags = r_type; in hat_join_region()
14265 rgnp->rgn_refcnt = 0; in hat_join_region()
14266 rgnp->rgn_cb_function = r_cb_function; in hat_join_region()
14267 rgnp->rgn_hash = srdp->srd_rgnhash[rhash]; in hat_join_region()
14268 srdp->srd_rgnhash[rhash] = rgnp; in hat_join_region()
14297 sf_region_t *rgnp; in hat_leave_region() local
14321 rgnp = srdp->srd_ismrgnp[rid]; in hat_leave_region()
14325 rgnp = srdp->srd_hmergnp[rid]; in hat_leave_region()
14327 ASSERT(rgnp != NULL); in hat_leave_region()
14328 ASSERT(rgnp->rgn_id == rid); in hat_leave_region()
14329 ASSERT((rgnp->rgn_flags & SFMMU_REGION_TYPE_MASK) == r_type); in hat_leave_region()
14330 ASSERT(!(rgnp->rgn_flags & SFMMU_REGION_FREE)); in hat_leave_region()
14335 xhat_unload_callback_all(sfmmup->sfmmu_as, rgnp->rgn_saddr, in hat_leave_region()
14336 rgnp->rgn_size, 0, NULL); in hat_leave_region()
14341 r_pgszc = rgnp->rgn_pgszc; in hat_leave_region()
14342 r_size = rgnp->rgn_size; in hat_leave_region()
14370 r_pgszc = rgnp->rgn_pgszc; in hat_leave_region()
14371 r_saddr = rgnp->rgn_saddr; in hat_leave_region()
14372 r_size = rgnp->rgn_size; in hat_leave_region()
14397 if (rgnp->rgn_ttecnt[i] != 0) { in hat_leave_region()
14432 sfmmu_unlink_from_hmeregion(sfmmup, rgnp); in hat_leave_region()
14435 r_obj = rgnp->rgn_obj; in hat_leave_region()
14436 if (atomic_dec_32_nv((volatile uint_t *)&rgnp->rgn_refcnt)) { in hat_leave_region()
14448 if (cur_rgnp == rgnp && cur_rgnp->rgn_refcnt == 0) { in hat_leave_region()
14458 ASSERT((rgnp->rgn_flags & SFMMU_REGION_TYPE_MASK) == r_type); in hat_leave_region()
14459 *prev_rgnpp = rgnp->rgn_hash; in hat_leave_region()
14461 rgnp->rgn_flags |= SFMMU_REGION_FREE; in hat_leave_region()
14463 rgnp->rgn_next = srdp->srd_ismrgnfree; in hat_leave_region()
14464 srdp->srd_ismrgnfree = rgnp; in hat_leave_region()
14475 sfmmu_unload_hmeregion(srdp, rgnp); in hat_leave_region()
14477 rgnp->rgn_hmeflags = 0; in hat_leave_region()
14479 ASSERT(rgnp->rgn_sfmmu_head == NULL); in hat_leave_region()
14480 ASSERT(rgnp->rgn_id == rid); in hat_leave_region()
14482 rgnp->rgn_ttecnt[i] = 0; in hat_leave_region()
14484 rgnp->rgn_flags |= SFMMU_REGION_FREE; in hat_leave_region()
14487 rgnp->rgn_next = srdp->srd_hmergnfree; in hat_leave_region()
14488 srdp->srd_hmergnfree = rgnp; in hat_leave_region()
14502 sf_region_t *rgnp; in hat_dup_region() local
14515 rgnp = srdp->srd_hmergnp[rid]; in hat_dup_region()
14516 ASSERT(rgnp->rgn_refcnt > 0); in hat_dup_region()
14517 ASSERT(rgnp->rgn_id == rid); in hat_dup_region()
14518 ASSERT((rgnp->rgn_flags & SFMMU_REGION_TYPE_MASK) == SFMMU_REGION_HME); in hat_dup_region()
14519 ASSERT(!(rgnp->rgn_flags & SFMMU_REGION_FREE)); in hat_dup_region()
14521 atomic_inc_32((volatile uint_t *)&rgnp->rgn_refcnt); in hat_dup_region()
14526 mutex_enter(&rgnp->rgn_mutex); in hat_dup_region()
14527 ASSERT(rgnp->rgn_sfmmu_head != NULL); in hat_dup_region()
14529 SFMMU_HMERID2RLINKP(rgnp->rgn_sfmmu_head, rid, hrlink, 0, 0); in hat_dup_region()
14532 rlink->next = rgnp->rgn_sfmmu_head; in hat_dup_region()
14540 rgnp->rgn_sfmmu_head = sfmmup; in hat_dup_region()
14541 mutex_exit(&rgnp->rgn_mutex); in hat_dup_region()
14544 rttecnt = rgnp->rgn_size >> TTE_PAGE_SHIFT(rgnp->rgn_pgszc); in hat_dup_region()
14545 atomic_add_long(&sfmmup->sfmmu_ttecnt[rgnp->rgn_pgszc], rttecnt); in hat_dup_region()
14547 if (rgnp->rgn_pgszc >= TTE4M) { in hat_dup_region()
14549 rgnp->rgn_size >> (TTE_PAGE_SHIFT(TTE8K) + 2); in hat_dup_region()
14562 sf_region_t *rgnp = (sf_region_t *)buf; in sfmmu_rgncache_constructor() local
14563 bzero(buf, sizeof (*rgnp)); in sfmmu_rgncache_constructor()
14565 mutex_init(&rgnp->rgn_mutex, NULL, MUTEX_DEFAULT, NULL); in sfmmu_rgncache_constructor()
14574 sf_region_t *rgnp = (sf_region_t *)buf; in sfmmu_rgncache_destructor() local
14575 mutex_destroy(&rgnp->rgn_mutex); in sfmmu_rgncache_destructor()
14711 sf_region_t *rgnp; in sfmmu_alloc_scd_tsbs() local
14738 rgnp = srdp->srd_hmergnp[rid]; in sfmmu_alloc_scd_tsbs()
14739 ASSERT(rgnp->rgn_id == rid); in sfmmu_alloc_scd_tsbs()
14740 ASSERT(rgnp->rgn_refcnt > 0); in sfmmu_alloc_scd_tsbs()
14742 if (rgnp->rgn_pgszc < TTE4M) { in sfmmu_alloc_scd_tsbs()
14743 tte8k_cnt += rgnp->rgn_size >> in sfmmu_alloc_scd_tsbs()
14746 ASSERT(rgnp->rgn_pgszc >= TTE4M); in sfmmu_alloc_scd_tsbs()
14747 tte4m_cnt += rgnp->rgn_size >> in sfmmu_alloc_scd_tsbs()
14754 tte8k_cnt += rgnp->rgn_size >> in sfmmu_alloc_scd_tsbs()
14759 rgnp = srdp->srd_ismrgnp[rid]; in sfmmu_alloc_scd_tsbs()
14760 ASSERT(rgnp->rgn_id == rid); in sfmmu_alloc_scd_tsbs()
14761 ASSERT(rgnp->rgn_refcnt > 0); in sfmmu_alloc_scd_tsbs()
14763 ism_hatid = (sfmmu_t *)rgnp->rgn_obj; in sfmmu_alloc_scd_tsbs()
14772 ASSERT(rgnp->rgn_pgszc >= TTE4M); in sfmmu_alloc_scd_tsbs()
14773 if (rgnp->rgn_pgszc >= TTE4M) { in sfmmu_alloc_scd_tsbs()
14774 tte4m_cnt += rgnp->rgn_size >> in sfmmu_alloc_scd_tsbs()
14837 sfmmu_link_to_hmeregion(sfmmu_t *sfmmup, sf_region_t *rgnp) in sfmmu_link_to_hmeregion() argument
14844 rid = rgnp->rgn_id; in sfmmu_link_to_hmeregion()
14850 mutex_enter(&rgnp->rgn_mutex); in sfmmu_link_to_hmeregion()
14851 if ((head = rgnp->rgn_sfmmu_head) == NULL) { in sfmmu_link_to_hmeregion()
14859 rgnp->rgn_sfmmu_head = sfmmup; in sfmmu_link_to_hmeregion()
14873 rgnp->rgn_sfmmu_head = sfmmup; in sfmmu_link_to_hmeregion()
14875 mutex_exit(&rgnp->rgn_mutex); in sfmmu_link_to_hmeregion()
14882 sfmmu_unlink_from_hmeregion(sfmmu_t *sfmmup, sf_region_t *rgnp) in sfmmu_unlink_from_hmeregion() argument
14887 rid = rgnp->rgn_id; in sfmmu_unlink_from_hmeregion()
14893 mutex_enter(&rgnp->rgn_mutex); in sfmmu_unlink_from_hmeregion()
14894 if (rgnp->rgn_sfmmu_head == sfmmup) { in sfmmu_unlink_from_hmeregion()
14896 rgnp->rgn_sfmmu_head = next; in sfmmu_unlink_from_hmeregion()
14943 mutex_exit(&rgnp->rgn_mutex); in sfmmu_unlink_from_hmeregion()
14957 sf_region_t *rgnp; in sfmmu_link_scd_to_regions() local
14978 rgnp = srdp->srd_hmergnp[rid]; in sfmmu_link_scd_to_regions()
14979 ASSERT(rgnp->rgn_id == rid); in sfmmu_link_scd_to_regions()
14980 ASSERT(rgnp->rgn_refcnt > 0); in sfmmu_link_scd_to_regions()
14981 sfmmu_link_to_hmeregion(scsfmmup, rgnp); in sfmmu_link_scd_to_regions()
14986 rgnp = srdp->srd_ismrgnp[rid]; in sfmmu_link_scd_to_regions()
14987 ASSERT(rgnp->rgn_id == rid); in sfmmu_link_scd_to_regions()
14988 ASSERT(rgnp->rgn_refcnt > 0); in sfmmu_link_scd_to_regions()
14990 ism_hatid = (sfmmu_t *)rgnp->rgn_obj; in sfmmu_link_scd_to_regions()
14994 ism_ment->iment_base_va = rgnp->rgn_saddr; in sfmmu_link_scd_to_regions()
15014 sf_region_t *rgnp; in sfmmu_unlink_scd_from_regions() local
15034 rgnp = srdp->srd_hmergnp[rid]; in sfmmu_unlink_scd_from_regions()
15035 ASSERT(rgnp->rgn_id == rid); in sfmmu_unlink_scd_from_regions()
15036 ASSERT(rgnp->rgn_refcnt > 0); in sfmmu_unlink_scd_from_regions()
15038 rgnp); in sfmmu_unlink_scd_from_regions()
15044 rgnp = srdp->srd_ismrgnp[rid]; in sfmmu_unlink_scd_from_regions()
15045 ASSERT(rgnp->rgn_id == rid); in sfmmu_unlink_scd_from_regions()
15046 ASSERT(rgnp->rgn_refcnt > 0); in sfmmu_unlink_scd_from_regions()
15048 ism_hatid = (sfmmu_t *)rgnp->rgn_obj; in sfmmu_unlink_scd_from_regions()
15053 rgnp->rgn_saddr); in sfmmu_unlink_scd_from_regions()
15762 sf_region_t *rgnp; in sfmmu_hblk_hash_rm() local
15770 rgnp = srdp->srd_hmergnp[rid]; in sfmmu_hblk_hash_rm()
15771 ASSERT(rgnp != NULL); in sfmmu_hblk_hash_rm()
15772 SFMMU_VALIDATE_SHAREDHBLK(hmeblkp, srdp, rgnp, rid); in sfmmu_hblk_hash_rm()