Home
last modified time | relevance | path

Searched refs:eaddr (Results 1 – 25 of 26) sorted by relevance

12

/linux/arch/powerpc/kvm/
H A Dbook3s_32_mmu.c69 static int kvmppc_mmu_book3s_32_xlate_bat(struct kvm_vcpu *vcpu, gva_t eaddr,
75 static u32 find_sr(struct kvm_vcpu *vcpu, gva_t eaddr) in find_sr() argument
77 return kvmppc_get_sr(vcpu, (eaddr >> 28) & 0xf); in find_sr()
80 static u64 kvmppc_mmu_book3s_32_ea_to_vp(struct kvm_vcpu *vcpu, gva_t eaddr, in kvmppc_mmu_book3s_32_ea_to_vp() argument
86 if (!kvmppc_mmu_book3s_32_xlate_bat(vcpu, eaddr, &pte, data, false)) in kvmppc_mmu_book3s_32_ea_to_vp()
89 kvmppc_mmu_book3s_32_esid_to_vsid(vcpu, eaddr >> SID_SHIFT, &vsid); in kvmppc_mmu_book3s_32_ea_to_vp()
90 return (((u64)eaddr >> 12) & 0xffff) | (vsid << 16); in kvmppc_mmu_book3s_32_ea_to_vp()
94 u32 sre, gva_t eaddr, in kvmppc_mmu_book3s_32_get_pteg() argument
101 page = (eaddr & 0x0FFFFFFF) >> 12; in kvmppc_mmu_book3s_32_get_pteg()
112 kvmppc_get_pc(vcpu), eaddr, vcpu_book3s->sdr1, pteg, in kvmppc_mmu_book3s_32_get_pteg()
[all …]
H A Dbook3s_64_mmu.c29 gva_t eaddr) in kvmppc_mmu_book3s_64_find_slbe() argument
32 u64 esid = GET_ESID(eaddr); in kvmppc_mmu_book3s_64_find_slbe()
33 u64 esid_1t = GET_ESID_1T(eaddr); in kvmppc_mmu_book3s_64_find_slbe()
49 eaddr, esid, esid_1t); in kvmppc_mmu_book3s_64_find_slbe()
73 static u64 kvmppc_slb_calc_vpn(struct kvmppc_slb *slb, gva_t eaddr) in kvmppc_slb_calc_vpn() argument
75 eaddr &= kvmppc_slb_offset_mask(slb); in kvmppc_slb_calc_vpn()
77 return (eaddr >> VPN_SHIFT) | in kvmppc_slb_calc_vpn()
81 static u64 kvmppc_mmu_book3s_64_ea_to_vp(struct kvm_vcpu *vcpu, gva_t eaddr, in kvmppc_mmu_book3s_64_ea_to_vp() argument
86 slb = kvmppc_mmu_book3s_64_find_slbe(vcpu, eaddr); in kvmppc_mmu_book3s_64_ea_to_vp()
90 return kvmppc_slb_calc_vpn(slb, eaddr); in kvmppc_mmu_book3s_64_ea_to_vp()
[all …]
H A Dtrace_pr.h39 __field( unsigned long, eaddr )
49 __entry->eaddr = orig_pte->eaddr;
57 __entry->flag_w, __entry->flag_x, __entry->eaddr,
70 __field( ulong, eaddr )
79 __entry->eaddr = pte->pte.eaddr;
88 __entry->host_vpn, __entry->pfn, __entry->eaddr,
99 __field( ulong, eaddr )
108 __entry->eaddr = pte->pte.eaddr;
117 __entry->host_vpn, __entry->pfn, __entry->eaddr,
H A De500_mmu.c81 gva_t eaddr, int tlbsel, unsigned int pid, int as) in kvmppc_e500_tlb_index() argument
88 set_base = gtlb0_set_base(vcpu_e500, eaddr); in kvmppc_e500_tlb_index()
91 if (eaddr < vcpu_e500->tlb1_min_eaddr || in kvmppc_e500_tlb_index()
92 eaddr > vcpu_e500->tlb1_max_eaddr) in kvmppc_e500_tlb_index()
104 if (eaddr < get_tlb_eaddr(tlbe)) in kvmppc_e500_tlb_index()
107 if (eaddr > get_tlb_end(tlbe)) in kvmppc_e500_tlb_index()
127 gva_t eaddr, int as) in kvmppc_e500_deliver_tlb_miss() argument
143 vcpu->arch.shared->mas2 = (eaddr & MAS2_EPN) in kvmppc_e500_deliver_tlb_miss()
155 gva_t eaddr; in kvmppc_recalc_tlb1map_range() local
169 eaddr = get_tlb_eaddr(tlbe); in kvmppc_recalc_tlb1map_range()
[all …]
H A De500_mmu_host.c108 static u32 get_host_mas0(unsigned long eaddr) in get_host_mas0() argument
118 asm volatile("tlbsx 0, %0" : : "b" (eaddr & ~CONFIG_PAGE_OFFSET)); in get_host_mas0()
548 void kvmppc_mmu_map(struct kvm_vcpu *vcpu, u64 eaddr, gpa_t gpaddr, in kvmppc_mmu_map() argument
568 &priv->ref, eaddr, &stlbe); in kvmppc_mmu_map()
575 kvmppc_e500_tlb1_map(vcpu_e500, eaddr, gfn, gtlbe, &stlbe, in kvmppc_mmu_map()
593 hva_t eaddr; in kvmppc_load_last_inst() local
670 eaddr = (unsigned long)kmap_atomic(page); in kvmppc_load_last_inst()
671 *instr = *(u32 *)(eaddr | (unsigned long)(addr & ~PAGE_MASK)); in kvmppc_load_last_inst()
672 kunmap_atomic((u32 *)eaddr); in kvmppc_load_last_inst()
H A Dbooke.c1284 unsigned long eaddr = vcpu->arch.fault_dear; in kvmppc_handle_exit() local
1291 (eaddr & PAGE_MASK) == vcpu->arch.magic_page_ea) { in kvmppc_handle_exit()
1301 gtlb_index = kvmppc_mmu_dtlb_index(vcpu, eaddr); in kvmppc_handle_exit()
1315 gpaddr = kvmppc_mmu_xlate(vcpu, gtlb_index, eaddr); in kvmppc_handle_exit()
1325 kvmppc_mmu_map(vcpu, eaddr, gpaddr, gtlb_index); in kvmppc_handle_exit()
1332 vcpu->arch.vaddr_accessed = eaddr; in kvmppc_handle_exit()
1342 unsigned long eaddr = vcpu->arch.regs.nip; in kvmppc_handle_exit() local
1350 gtlb_index = kvmppc_mmu_itlb_index(vcpu, eaddr); in kvmppc_handle_exit()
1363 gpaddr = kvmppc_mmu_xlate(vcpu, gtlb_index, eaddr); in kvmppc_handle_exit()
1373 kvmppc_mmu_map(vcpu, eaddr, gpaddr, gtlb_index); in kvmppc_handle_exit()
[all …]
H A De500mc.c60 gva_t eaddr; in kvmppc_e500_tlbil_one() local
69 eaddr = get_tlb_eaddr(gtlbe); in kvmppc_e500_tlbil_one()
76 asm volatile("tlbsx 0, %[eaddr]\n" : : [eaddr] "r" (eaddr)); in kvmppc_e500_tlbil_one()
H A De500.c238 u32 val, eaddr; in kvmppc_e500_tlbil_one() local
270 eaddr = get_tlb_eaddr(gtlbe); in kvmppc_e500_tlbil_one()
275 asm volatile("tlbsx 0, %[eaddr]" : : [eaddr] "r" (eaddr)); in kvmppc_e500_tlbil_one()
H A Dbook3s_hv_rm_mmu.c1063 unsigned long eaddr, unsigned long slb_v, long mmio_update) in mmio_cache_search() argument
1073 if ((entry->eaddr >> pshift) == (eaddr >> pshift) && in mmio_cache_search()
1097 long kvmppc_hv_find_lock_hpte(struct kvm *kvm, gva_t eaddr, unsigned long slb_v, in kvmppc_hv_find_lock_hpte() argument
1126 hash = (vsid ^ ((eaddr & somask) >> pshift)) & kvmppc_hpt_mask(&kvm->arch.hpt); in kvmppc_hv_find_lock_hpte()
1128 avpn |= (eaddr & somask) >> 16; in kvmppc_hv_find_lock_hpte()
1285 cache_entry->eaddr = addr; in kvmppc_hpte_hv_fault()
H A Dbook3s_paired_singles.c155 static void kvmppc_inject_pf(struct kvm_vcpu *vcpu, ulong eaddr, bool is_store) in kvmppc_inject_pf() argument
163 kvmppc_set_dar(vcpu, eaddr); in kvmppc_inject_pf()
/linux/drivers/edac/
H A Digen6_edac.c141 u64 (*err_addr_to_sys_addr)(u64 eaddr, int mc);
143 u64 (*err_addr_to_imc_addr)(u64 eaddr, int mc);
311 static u64 ehl_err_addr_to_sys_addr(u64 eaddr, int mc) in ehl_err_addr_to_sys_addr() argument
313 return eaddr; in ehl_err_addr_to_sys_addr()
316 static u64 ehl_err_addr_to_imc_addr(u64 eaddr, int mc) in ehl_err_addr_to_imc_addr() argument
318 if (eaddr < igen6_tolud) in ehl_err_addr_to_imc_addr()
319 return eaddr; in ehl_err_addr_to_imc_addr()
322 return eaddr + igen6_tolud - _4GB; in ehl_err_addr_to_imc_addr()
324 if (eaddr >= igen6_tom) in ehl_err_addr_to_imc_addr()
325 return eaddr + igen6_tolud - igen6_tom; in ehl_err_addr_to_imc_addr()
[all …]
/linux/arch/arm64/kernel/
H A Dcompat_alignment.c118 unsigned long eaddr, newaddr; in do_alignment_ldmstm() local
125 newaddr = eaddr = regs->regs[rn]; in do_alignment_ldmstm()
131 eaddr = newaddr; in do_alignment_ldmstm()
134 eaddr += 4; in do_alignment_ldmstm()
140 if (get_user(val, (u32 __user *)eaddr)) in do_alignment_ldmstm()
155 if (put_user(val, (u32 __user *)eaddr)) in do_alignment_ldmstm()
158 eaddr += 4; in do_alignment_ldmstm()
/linux/arch/arm/mm/
H A Dalignment.c503 unsigned long eaddr, newaddr; in do_alignment_ldmstm() local
517 newaddr = eaddr = regs->uregs[rn]; in do_alignment_ldmstm()
523 eaddr = newaddr; in do_alignment_ldmstm()
526 eaddr += 4; in do_alignment_ldmstm()
540 if (addr != eaddr) { in do_alignment_ldmstm()
543 instruction_pointer(regs), instr, addr, eaddr); in do_alignment_ldmstm()
555 get32t_unaligned_check(val, eaddr); in do_alignment_ldmstm()
558 put32t_unaligned_check(regs->uregs[rd], eaddr); in do_alignment_ldmstm()
559 eaddr += 4; in do_alignment_ldmstm()
568 get32_unaligned_check(val, eaddr); in do_alignment_ldmstm()
[all …]
/linux/fs/freevxfs/
H A Dvxfs_olt.c58 char *oaddr, *eaddr; in vxfs_read_olt() local
81 eaddr = bp->b_data + (infp->vsi_oltsize * sbp->s_blocksize); in vxfs_read_olt()
83 while (oaddr < eaddr) { in vxfs_read_olt()
/linux/arch/powerpc/platforms/pseries/
H A Dras.c572 unsigned long eaddr = 0, paddr = 0; in mce_handle_err_virtmode() local
642 eaddr = be64_to_cpu(mce_log->effective_address); in mce_handle_err_virtmode()
649 pfn = addr_to_pfn(regs, eaddr); in mce_handle_err_virtmode()
670 eaddr = be64_to_cpu(mce_log->effective_address); in mce_handle_err_virtmode()
687 eaddr = be64_to_cpu(mce_log->effective_address); in mce_handle_err_virtmode()
704 eaddr = be64_to_cpu(mce_log->effective_address); in mce_handle_err_virtmode()
725 eaddr = be64_to_cpu(mce_log->effective_address); in mce_handle_err_virtmode()
734 &mce_err, regs->nip, eaddr, paddr); in mce_handle_err_virtmode()
/linux/drivers/slimbus/
H A Dcore.c176 struct slim_eaddr *eaddr, in slim_alloc_device() argument
186 sbdev->e_addr = *eaddr; in slim_alloc_device()
349 struct slim_eaddr *eaddr) in find_slim_device()
354 dev = device_find_child(ctrl->dev, eaddr, slim_match_dev); in find_slim_device()
348 find_slim_device(struct slim_controller * ctrl,struct slim_eaddr * eaddr) find_slim_device() argument
/linux/arch/powerpc/include/asm/
H A Dkvm_ppc.h91 extern int kvmppc_ld(struct kvm_vcpu *vcpu, ulong *eaddr, int size, void *ptr,
93 extern int kvmppc_st(struct kvm_vcpu *vcpu, ulong *eaddr, int size, void *ptr,
110 extern int kvmppc_mmu_dtlb_index(struct kvm_vcpu *vcpu, gva_t eaddr);
111 extern int kvmppc_mmu_itlb_index(struct kvm_vcpu *vcpu, gva_t eaddr);
113 gva_t eaddr);
116 extern int kvmppc_xlate(struct kvm_vcpu *vcpu, ulong eaddr,
312 int (*load_from_eaddr)(struct kvm_vcpu *vcpu, ulong *eaddr, void *ptr,
314 int (*store_to_eaddr)(struct kvm_vcpu *vcpu, ulong *eaddr, void *ptr,
/linux/net/mac802154/
H A Dcfg.c393 u64 eaddr; in mac802154_disassociate_from_parent() local
406 eaddr = swab64((__force u64)child->extended_addr); in mac802154_disassociate_from_parent()
409 &eaddr, ret); in mac802154_disassociate_from_parent()
418 eaddr = swab64((__force u64)wpan_dev->parent->extended_addr); in mac802154_disassociate_from_parent()
421 &eaddr, ret); in mac802154_disassociate_from_parent()
/linux/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_vm.c1805 uint64_t eaddr; in amdgpu_vm_bo_map() local
1813 eaddr = saddr + (size - 1) / AMDGPU_GPU_PAGE_SIZE; in amdgpu_vm_bo_map()
1815 tmp = amdgpu_vm_it_iter_first(&vm->va, saddr, eaddr); in amdgpu_vm_bo_map()
1819 "0x%010Lx-0x%010Lx\n", bo, saddr, eaddr, in amdgpu_vm_bo_map()
1829 mapping->last = eaddr; in amdgpu_vm_bo_map()
1863 uint64_t eaddr; in amdgpu_vm_bo_replace_map() local
1882 eaddr = saddr + (size - 1) / AMDGPU_GPU_PAGE_SIZE; in amdgpu_vm_bo_replace_map()
1885 mapping->last = eaddr; in amdgpu_vm_bo_replace_map()
1968 uint64_t eaddr; in amdgpu_vm_bo_clear_mappings() local
1976 eaddr = saddr + (size - 1) / AMDGPU_GPU_PAGE_SIZE; in amdgpu_vm_bo_clear_mappings()
[all …]
/linux/kernel/
H A Dkexec_core.c345 unsigned long pfn, epfn, addr, eaddr; in kimage_alloc_normal_control_pages() local
353 eaddr = (epfn << PAGE_SHIFT) - 1; in kimage_alloc_normal_control_pages()
355 kimage_is_destination_range(image, addr, eaddr)) { in kimage_alloc_normal_control_pages()
/linux/arch/arm64/kvm/vgic/
H A Dvgic-its.c901 gpa_t *eaddr) in vgic_its_check_id() argument
931 if (eaddr) in vgic_its_check_id()
932 *eaddr = addr; in vgic_its_check_id()
961 if (eaddr) in vgic_its_check_id()
962 *eaddr = indirect_ptr; in vgic_its_check_id()
2388 gpa_t eaddr; in vgic_its_save_device_tables() local
2391 dev->device_id, &eaddr)) in vgic_its_save_device_tables()
2398 ret = vgic_its_save_dte(its, dev, eaddr); in vgic_its_save_device_tables()
/linux/lib/
H A Ddebugobjects.c1071 unsigned long flags, oaddr, saddr, eaddr, paddr, chunks; in __debug_check_no_obj_freed() local
1078 eaddr = saddr + size; in __debug_check_no_obj_freed()
1080 chunks = ((eaddr - paddr) + (ODEBUG_CHUNK_SIZE - 1)); in __debug_check_no_obj_freed()
1092 if (oaddr < saddr || oaddr >= eaddr) in __debug_check_no_obj_freed()
/linux/drivers/mtd/nand/raw/atmel/
H A Dpmecc.c164 u32 eaddr; member
403 user->cache.eaddr = req->ecc.ooboffset + req->ecc.bytes - 1; in atmel_pmecc_create_user()
785 writel(user->cache.eaddr, pmecc->regs.base + ATMEL_PMECC_EADDR); in atmel_pmecc_enable()
/linux/tools/testing/selftests/powerpc/ptrace/
H A Dptrace-hwbreak.c217 unsigned long eaddr = (saddr + len - 1) | 0x7; in check_success() local
228 (unsigned long)siginfo.si_addr > eaddr) { in check_success()
/linux/mm/damon/tests/
H A Dcore-kunit.h81 unsigned long eaddr[][3] = {{15, 27, 40}, {31, 45, 55}, {23, 44, 66} }; in damon_test_aggregate() local
95 r = damon_new_region(saddr[it][ir], eaddr[it][ir]); in damon_test_aggregate()

12