Searched refs:ESID_MASK (Results 1 – 10 of 10) sorted by relevance
32 #define ESID_MASK 0xf0000000 macro
29 #define ESID_MASK 0xfffffffff0000000UL macro
283 else if ((svcpu->slb[i].esid & ESID_MASK) == esid) { in kvmppc_mmu_next_segment()316 u64 slb_esid = (eaddr & ESID_MASK) | SLB_ESID_V; in kvmppc_mmu_map_segment()323 slb_index = kvmppc_mmu_next_segment(vcpu, eaddr & ESID_MASK); in kvmppc_mmu_map_segment()
112 page = (eaddr & ~ESID_MASK) >> 12; in kvmppc_mmu_get_pteg()168 ((eaddr & ~ESID_MASK) >> VPN_SHIFT); in kvmppc_mmu_map_page()
405 slbe->orige = rb & (ESID_MASK | SLB_ESID_V); in kvmppc_mmu_book3s_64_slbmte()
324 mask = ESID_MASK; in kvmppc_mmu_book3s_hv_find_slbe()
132 slb->esid = (ea & (ssize == MMU_SEGSIZE_1T ? ESID_MASK_1T : ESID_MASK)) | SLB_ESID_V; in copro_calculate_slb()
220 slb->esid = (ea & ESID_MASK) | SLB_ESID_V; in __spu_kernel_slb()234 if (!((slbs[i].esid ^ ea) & ESID_MASK)) in __slb_present()
148 (u64)ctx->sstp, (u64)ctx->sstp & ESID_MASK, mmu_kernel_ssize, vsid, sstp0, sstp1); in cxl_alloc_sst()
863 (((ssize) == MMU_SEGSIZE_256M) ? ESID_MASK : ESID_MASK_1T)