Searched refs:slb (Results 1 – 12 of 12) sorted by relevance
| /linux/arch/powerpc/kvm/ |
| H A D | book3s_64_mmu.c | 38 if (!vcpu->arch.slb[i].valid) in kvmppc_mmu_book3s_64_find_slbe() 41 if (vcpu->arch.slb[i].tb) in kvmppc_mmu_book3s_64_find_slbe() 44 if (vcpu->arch.slb[i].esid == cmp_esid) in kvmppc_mmu_book3s_64_find_slbe() 45 return &vcpu->arch.slb[i]; in kvmppc_mmu_book3s_64_find_slbe() 51 if (vcpu->arch.slb[i].vsid) in kvmppc_mmu_book3s_64_find_slbe() 53 vcpu->arch.slb[i].valid ? 'v' : ' ', in kvmppc_mmu_book3s_64_find_slbe() 54 vcpu->arch.slb[i].large ? 'l' : ' ', in kvmppc_mmu_book3s_64_find_slbe() 55 vcpu->arch.slb[i].tb ? 't' : ' ', in kvmppc_mmu_book3s_64_find_slbe() 56 vcpu->arch.slb[i].esid, in kvmppc_mmu_book3s_64_find_slbe() 57 vcpu->arch.slb[i].vsid); in kvmppc_mmu_book3s_64_find_slbe() [all …]
|
| H A D | book3s_hv_ras.c | 39 struct slb_shadow *slb; in reload_slb() local 46 slb = vcpu->arch.slb_shadow.pinned_addr; in reload_slb() 47 if (!slb) in reload_slb() 51 n = min_t(u32, be32_to_cpu(slb->persistent), SLB_MIN_SIZE); in reload_slb() 52 if ((void *) &slb->save_area[n] > vcpu->arch.slb_shadow.pinned_end) in reload_slb() 57 unsigned long rb = be64_to_cpu(slb->save_area[i].esid); in reload_slb() 58 unsigned long rs = be64_to_cpu(slb->save_area[i].vsid); in reload_slb()
|
| H A D | book3s_hv_p9_entry.c | 347 mtslb(vcpu->arch.slb[i].orige, vcpu->arch.slb[i].origv); in switch_mmu_to_guest_hpt() 409 vcpu->arch.slb[nr].orige = slbee | i; in save_clear_guest_mmu() 410 vcpu->arch.slb[nr].origv = slbev; in save_clear_guest_mmu()
|
| /linux/arch/powerpc/platforms/cell/ |
| H A D | spu_base.c | 133 static inline void spu_load_slb(struct spu *spu, int slbe, struct copro_slb *slb) in spu_load_slb() argument 138 __func__, slbe, slb->vsid, slb->esid); in spu_load_slb() 144 out_be64(&priv2->slb_vsid_RW, slb->vsid); in spu_load_slb() 146 out_be64(&priv2->slb_esid_RW, slb->esid); in spu_load_slb() 151 struct copro_slb slb; in __spu_trap_data_seg() local 154 ret = copro_calculate_slb(spu->mm, ea, &slb); in __spu_trap_data_seg() 158 spu_load_slb(spu, spu->slb_replace, &slb); in __spu_trap_data_seg() 207 static void __spu_kernel_slb(void *addr, struct copro_slb *slb) in __spu_kernel_slb() argument 217 slb->vsid = (get_kernel_vsid(ea, MMU_SEGSIZE_256M) << SLB_VSID_SHIFT) | in __spu_kernel_slb() 219 slb->esid = (ea & ESID_MASK) | SLB_ESID_V; in __spu_kernel_slb()
|
| /linux/arch/powerpc/mm/book3s64/ |
| H A D | Makefile | 6 obj-y += hash_pgtable.o hash_utils.o hash_tlb.o slb.o slice.o
|
| /linux/arch/powerpc/include/asm/ |
| H A D | kvm_book3s_asm.h | 148 } slb[64]; /* guest SLB */ member
|
| /linux/arch/x86/kvm/ |
| H A D | trace.h | 860 TP_PROTO(__u64 rip, __u32 slb), 861 TP_ARGS(rip, slb), 865 __field( __u32, slb ) 870 __entry->slb = slb; 874 __entry->rip, __entry->slb)
|
| /linux/arch/powerpc/kernel/ |
| H A D | asm-offsets.c | 461 OFFSET(VCPU_SLB, kvm_vcpu, arch.slb); in main() 554 SVCPU_FIELD(SVCPU_SLB, slb); in main()
|
| /linux/arch/powerpc/include/uapi/asm/ |
| H A D | kvm.h | 171 } slb[64]; member
|
| /linux/tools/arch/powerpc/include/uapi/asm/ |
| H A D | kvm.h | 171 } slb[64]; member
|
| /linux/arch/s390/tools/ |
| H A D | opcodes.txt | 844 e399 slb RXY_RRRD
|
| /linux/Documentation/admin-guide/ |
| H A D | kernel-parameters.txt | 7318 Disables slb preloading for userspace.
|