/linux/sound/soc/codecs/ |
H A D | lpass-va-macro.c | 453 static int va_clk_rsc_fs_gen_request(struct va_macro *va, bool enable) in va_clk_rsc_fs_gen_request() argument 455 struct regmap *regmap = va->regmap; in va_clk_rsc_fs_gen_request() 486 static int va_macro_mclk_enable(struct va_macro *va, bool mclk_enable) in va_macro_mclk_enable() argument 488 struct regmap *regmap = va->regmap; in va_macro_mclk_enable() 491 va_clk_rsc_fs_gen_request(va, true); in va_macro_mclk_enable() 495 va_clk_rsc_fs_gen_request(va, false); in va_macro_mclk_enable() 505 struct va_macro *va = snd_soc_component_get_drvdata(comp); in va_macro_mclk_event() local 509 return clk_prepare_enable(va->fsgen); in va_macro_mclk_event() 511 clk_disable_unprepare(va->fsgen); in va_macro_mclk_event() 568 struct va_macro *va = snd_soc_component_get_drvdata(component); in va_macro_tx_mixer_get() local [all …]
|
/linux/mm/ |
H A D | vmalloc.c | 971 va_size(struct vmap_area *va) in va_size() argument 973 return (va->va_end - va->va_start); in va_size() 979 struct vmap_area *va; in get_subtree_max_size() local 981 va = rb_entry_safe(node, struct vmap_area, rb_node); in get_subtree_max_size() 982 return va ? va->subtree_max_size : 0; in get_subtree_max_size() 1007 struct vmap_area *va; in __find_vmap_area() local 1009 va = rb_entry(n, struct vmap_area, rb_node); in __find_vmap_area() 1010 if (addr < va->va_start) in __find_vmap_area() 1012 else if (addr >= va->va_end) in __find_vmap_area() 1015 return va; in __find_vmap_area() [all …]
|
/linux/drivers/gpu/drm/nouveau/ |
H A D | nouveau_uvmm.c | 70 } va; member 174 u64 addr = reg->va.addr; in nouveau_uvma_region_sparse_unref() 175 u64 range = reg->va.range; in nouveau_uvma_region_sparse_unref() 183 u64 addr = uvma->va.va.addr; in nouveau_uvma_vmm_put() 184 u64 range = uvma->va.va.range; in nouveau_uvma_vmm_put() 193 u64 addr = uvma->va.va.addr; in nouveau_uvma_map() 194 u64 offset = uvma->va.gem.offset; in nouveau_uvma_map() 195 u64 range = uvma->va.va.range; in nouveau_uvma_map() 204 u64 addr = uvma->va.va.addr; in nouveau_uvma_unmap() 205 u64 range = uvma->va.va.range; in nouveau_uvma_unmap() [all …]
|
/linux/drivers/gpu/drm/ |
H A D | drm_gpuvm.c | 870 #define GPUVA_START(node) ((node)->va.addr) 871 #define GPUVA_LAST(node) ((node)->va.addr + (node)->va.range - 1) 881 struct drm_gpuva *va); 882 static void __drm_gpuva_remove(struct drm_gpuva *va); 913 u64 kstart = gpuvm->kernel_alloc_node.va.addr; in drm_gpuvm_in_kernel_node() 914 u64 krange = gpuvm->kernel_alloc_node.va.range; in drm_gpuvm_in_kernel_node() 1030 gpuvm->kernel_alloc_node.va.addr = reserve_offset; in drm_gpuvm_init() 1031 gpuvm->kernel_alloc_node.va.range = reserve_range; in drm_gpuvm_init() 1045 if (gpuvm->kernel_alloc_node.va.range) in drm_gpuvm_fini() 1215 struct drm_gpuva *va; in drm_gpuvm_prepare_range() local [all …]
|
/linux/drivers/scsi/qedi/ |
H A D | qedi_dbg.c | 14 va_list va; in qedi_dbg_err() local 17 va_start(va, fmt); in qedi_dbg_err() 20 vaf.va = &va; in qedi_dbg_err() 28 va_end(va); in qedi_dbg_err() 35 va_list va; in qedi_dbg_warn() local 38 va_start(va, fmt); in qedi_dbg_warn() 41 vaf.va = &va; in qedi_dbg_warn() 53 va_end(va); in qedi_dbg_warn() 60 va_list va; in qedi_dbg_notice() local 63 va_start(va, fmt); in qedi_dbg_notice() [all …]
|
/linux/include/drm/ |
H A D | drm_gpuvm.h | 103 } va; member 150 int drm_gpuva_insert(struct drm_gpuvm *gpuvm, struct drm_gpuva *va); 151 void drm_gpuva_remove(struct drm_gpuva *va); 153 void drm_gpuva_link(struct drm_gpuva *va, struct drm_gpuvm_bo *vm_bo); 154 void drm_gpuva_unlink(struct drm_gpuva *va); 163 static inline void drm_gpuva_init(struct drm_gpuva *va, u64 addr, u64 range, in drm_gpuva_init() argument 166 va->va.addr = addr; in drm_gpuva_init() 167 va->va.range = range; in drm_gpuva_init() 168 va->gem.obj = obj; in drm_gpuva_init() 169 va->gem.offset = offset; in drm_gpuva_init() [all …]
|
/linux/drivers/scsi/qedf/ |
H A D | qedf_dbg.c | 13 va_list va; in qedf_dbg_err() local 16 va_start(va, fmt); in qedf_dbg_err() 19 vaf.va = &va; in qedf_dbg_err() 27 va_end(va); in qedf_dbg_err() 34 va_list va; in qedf_dbg_warn() local 37 va_start(va, fmt); in qedf_dbg_warn() 40 vaf.va = &va; in qedf_dbg_warn() 52 va_end(va); in qedf_dbg_warn() 59 va_list va; in qedf_dbg_notice() local 62 va_start(va, fmt); in qedf_dbg_notice() [all …]
|
/linux/drivers/gpu/drm/i915/gt/ |
H A D | selftest_tlb.c | 33 struct i915_vma *va, in pte_tlbinv() argument 68 va->size, align); in pte_tlbinv() 69 err = i915_vma_pin(va, 0, 0, addr | PIN_OFFSET_FIXED | PIN_USER); in pte_tlbinv() 71 pr_err("Cannot pin at %llx+%llx\n", addr, va->size); in pte_tlbinv() 74 GEM_BUG_ON(i915_vma_offset(va) != addr); in pte_tlbinv() 75 if (vb != va) { in pte_tlbinv() 77 vb->node = va->node; /* overwrites the _same_ PTE */ in pte_tlbinv() 93 if (va != vb) in pte_tlbinv() 95 ce->engine->name, va->obj->mm.region->name ?: "smem", in pte_tlbinv() 96 addr, align, va->resource->page_sizes_gtt, in pte_tlbinv() [all …]
|
/linux/drivers/dio/ |
H A D | dio.c | 127 void *va; in dio_find() local 139 va = (void *)(pa + DIO_VIRADDRBASE); in dio_find() 141 va = ioremap(pa, PAGE_SIZE); in dio_find() 144 (unsigned char *)va + DIO_IDOFF, 1)) { in dio_find() 146 iounmap(va); in dio_find() 150 prid = DIO_ID(va); in dio_find() 153 secid = DIO_SECID(va); in dio_find() 160 iounmap(va); in dio_find() 200 u_char *va; in dio_init() local 212 va = (void *)(pa + DIO_VIRADDRBASE); in dio_init() [all …]
|
/linux/tools/testing/selftests/kvm/lib/ |
H A D | ucall_common.c | 86 va_list va; in ucall_assert() local 95 va_start(va, fmt); in ucall_assert() 96 guest_vsnprintf(uc->buffer, UCALL_BUFFER_LEN, fmt, va); in ucall_assert() 97 va_end(va); in ucall_assert() 107 va_list va; in ucall_fmt() local 112 va_start(va, fmt); in ucall_fmt() 113 guest_vsnprintf(uc->buffer, UCALL_BUFFER_LEN, fmt, va); in ucall_fmt() 114 va_end(va); in ucall_fmt() 124 va_list va; in ucall() local 133 va_start(va, nargs); in ucall() [all …]
|
/linux/arch/alpha/kernel/ |
H A D | traps.c | 368 unsigned long count, va, pc; member 377 do_entUna(void * va, unsigned long opcode, unsigned long reg, in do_entUna() argument 386 unaligned[0].va = (unsigned long) va; in do_entUna() 404 : "r"(va), "0"(0)); in do_entUna() 420 : "r"(va), "0"(0)); in do_entUna() 436 : "r"(va), "0"(0)); in do_entUna() 464 : "r"(va), "r"(una_reg(reg)), "0"(0)); in do_entUna() 488 : "r"(va), "r"(una_reg(reg)), "0"(0)); in do_entUna() 512 : "r"(va), "r"(una_reg(reg)), "0"(0)); in do_entUna() 519 pc, va, opcode, reg); in do_entUna() [all …]
|
/linux/arch/powerpc/mm/book3s64/ |
H A D | hash_native.c | 72 unsigned long va; in ___tlbie() local 83 va = vpn << VPN_SHIFT; in ___tlbie() 90 va &= ~(0xffffULL << 48); in ___tlbie() 95 va &= ~((1ul << (64 - 52)) - 1); in ___tlbie() 96 va |= ssize << 8; in ___tlbie() 98 va |= sllp << 5; in ___tlbie() 100 : : "r" (va), "r"(0), "i" (CPU_FTR_ARCH_206) in ___tlbie() 106 va &= ~((1ul << mmu_psize_defs[apsize].shift) - 1); in ___tlbie() 107 va |= penc << 12; in ___tlbie() 108 va |= ssize << 8; in ___tlbie() [all …]
|
H A D | radix_tlb.c | 158 static __always_inline void __tlbiel_va(unsigned long va, unsigned long pid, in __tlbiel_va() argument 163 rb = va & ~(PPC_BITMASK(52, 63)); in __tlbiel_va() 174 static __always_inline void __tlbie_va(unsigned long va, unsigned long pid, in __tlbie_va() argument 179 rb = va & ~(PPC_BITMASK(52, 63)); in __tlbie_va() 190 static __always_inline void __tlbie_lpid_va(unsigned long va, unsigned long lpid, in __tlbie_lpid_va() argument 195 rb = va & ~(PPC_BITMASK(52, 63)); in __tlbie_lpid_va() 207 static inline void fixup_tlbie_va(unsigned long va, unsigned long pid, in fixup_tlbie_va() argument 212 __tlbie_va(va, 0, ap, RIC_FLUSH_TLB); in fixup_tlbie_va() 217 __tlbie_va(va, pid, ap, RIC_FLUSH_TLB); in fixup_tlbie_va() 221 static inline void fixup_tlbie_va_range(unsigned long va, unsigned long pid, in fixup_tlbie_va_range() argument [all …]
|
/linux/fs/ceph/ |
H A D | ceph_frag.c | 10 unsigned va = ceph_frag_value(a); in ceph_frag_compare() local 12 if (va < vb) in ceph_frag_compare() 14 if (va > vb) in ceph_frag_compare() 16 va = ceph_frag_bits(a); in ceph_frag_compare() 18 if (va < vb) in ceph_frag_compare() 20 if (va > vb) in ceph_frag_compare()
|
/linux/arch/powerpc/mm/nohash/ |
H A D | 8xx.c | 26 phys_addr_t v_block_mapped(unsigned long va) in v_block_mapped() argument 30 if (va >= VIRT_IMMR_BASE && va < VIRT_IMMR_BASE + IMMR_SIZE) in v_block_mapped() 31 return p + va - VIRT_IMMR_BASE; in v_block_mapped() 32 if (va >= PAGE_OFFSET && va < PAGE_OFFSET + block_mapped_ram) in v_block_mapped() 33 return __pa(va); in v_block_mapped() 52 static int __ref __early_map_kernel_hugepage(unsigned long va, phys_addr_t pa, in __early_map_kernel_hugepage() argument 55 pmd_t *pmdp = pmd_off_k(va); in __early_map_kernel_hugepage() 66 ptep = early_pte_alloc_kernel(pmdp, va); in __early_map_kernel_hugepage() 84 ptep = pte_offset_kernel(pmdp, va); in __early_map_kernel_hugepage() 92 set_huge_pte_at(&init_mm, va, ptep, in __early_map_kernel_hugepage()
|
/linux/lib/ |
H A D | test_debug_virtual.c | 25 void *va; in test_debug_virtual_init() local 27 va = (void *)VMALLOC_START; in test_debug_virtual_init() 28 pa = virt_to_phys(va); in test_debug_virtual_init() 30 pr_info("PA: %pa for VA: 0x%lx\n", &pa, (unsigned long)va); in test_debug_virtual_init() 37 va = foo; in test_debug_virtual_init() 38 pr_info("PA: %pa for VA: 0x%lx\n", &pa, (unsigned long)va); in test_debug_virtual_init()
|
/linux/arch/riscv/include/asm/ |
H A D | sections.h | 18 static inline bool is_va_kernel_text(uintptr_t va) in is_va_kernel_text() argument 23 return va >= start && va < end; in is_va_kernel_text() 26 static inline bool is_va_kernel_lm_alias_text(uintptr_t va) in is_va_kernel_lm_alias_text() argument 31 return va >= start && va < end; in is_va_kernel_lm_alias_text()
|
/linux/tools/testing/selftests/proc/ |
H A D | proc-self-map-files-002.c | 55 unsigned long va; in main() local 64 for (va = 0; va < va_max; va += PAGE_SIZE) { in main() 65 p = mmap((void *)va, PAGE_SIZE, PROT_NONE, MAP_PRIVATE|MAP_FILE|MAP_FIXED, fd, 0); in main() 66 if (p == (void *)va) in main() 69 if (va == va_max) { in main()
|
/linux/drivers/media/platform/mediatek/vcodec/decoder/vdec/ |
H A D | vdec_vp9_if.c | 37 unsigned long va; member 227 if (fb->base_y.va == addr) { in vp9_rm_from_fb_use_list() 278 vsi->frm_bufs[ref_idx].buf.fb->base_y.va); in vp9_ref_cnt_fb() 296 if (vsi->sf_ref_fb[i].fb.base_y.va) { in vp9_free_all_sf_ref_fb() 321 if (vsi->sf_ref_fb[idx].fb.base_y.va && in vp9_get_sf_ref_fb() 330 if (vsi->sf_ref_fb[idx].fb.base_y.va == NULL) in vp9_get_sf_ref_fb() 391 if (mem->va) in vp9_alloc_work_buf() 403 vsi->mv_buf.va = (unsigned long)mem->va; in vp9_alloc_work_buf() 409 if (mem->va) in vp9_alloc_work_buf() 420 vsi->seg_id_buf.va = (unsigned long)mem->va; in vp9_alloc_work_buf() [all …]
|
/linux/arch/arm/mach-omap1/ |
H A D | irq.c | 59 void __iomem *va; member 71 return readl_relaxed(irq_banks[bank].va + offset); in irq_bank_readl() 75 writel_relaxed(value, irq_banks[bank].va + offset); in irq_bank_writel() 81 writel_relaxed(0x1, irq_banks[1].va + IRQ_CONTROL_REG_OFFSET); in omap_ack_irq() 83 writel_relaxed(0x1, irq_banks[0].va + IRQ_CONTROL_REG_OFFSET); in omap_ack_irq() 137 void __iomem *l1 = irq_banks[0].va; in omap1_handle_irq() 138 void __iomem *l2 = irq_banks[1].va; in omap1_handle_irq() 208 irq_banks[i].va = ioremap(irq_banks[i].base_reg, 0xff); in omap1_init_irq() 209 if (WARN_ON(!irq_banks[i].va)) in omap1_init_irq() 248 omap_alloc_gc(irq_banks[i].va, irq_base + i * 32, 32); in omap1_init_irq()
|
/linux/drivers/tee/amdtee/ |
H A D | shm_pool.c | 15 unsigned long va; in pool_op_alloc() local 22 va = __get_free_pages(GFP_KERNEL | __GFP_ZERO, order); in pool_op_alloc() 23 if (!va) in pool_op_alloc() 26 shm->kaddr = (void *)va; in pool_op_alloc() 27 shm->paddr = __psp_pa((void *)va); in pool_op_alloc() 33 free_pages(va, order); in pool_op_alloc()
|
/linux/arch/arm64/crypto/ |
H A D | aes-ce-ccm-core.S | 26 .macro dround, va, vb, vk 27 aese \va\().16b, \vk\().16b 28 aesmc \va\().16b, \va\().16b 33 .macro aes_encrypt, va, vb, nr 35 dround \va, \vb, v10 36 dround \va, \vb, v11 38 dround \va, \vb, v12 39 dround \va, \vb, v13 41 dround \va, \vb, \v 43 aese \va\().16b, v4.16b
|
/linux/arch/powerpc/math-emu/ |
H A D | math_efp.c | 183 union dw_union vc, va, vb; in do_spe_mathemu() local 202 va.wp[0] = current->thread.evr[fa]; in do_spe_mathemu() 203 va.wp[1] = regs->gpr[fa]; in do_spe_mathemu() 211 pr_debug("va: %08x %08x\n", va.wp[0], va.wp[1]); in do_spe_mathemu() 221 FP_UNPACK_SP(SA, va.wp + 1); in do_spe_mathemu() 227 FP_UNPACK_SP(SA, va.wp + 1); in do_spe_mathemu() 236 vc.wp[1] = va.wp[1] & ~SIGN_BIT_S; in do_spe_mathemu() 240 vc.wp[1] = va.wp[1] | SIGN_BIT_S; in do_spe_mathemu() 244 vc.wp[1] = va.wp[1] ^ SIGN_BIT_S; in do_spe_mathemu() 350 FP_UNPACK_DP(DA, va.dp); in do_spe_mathemu() [all …]
|
/linux/drivers/infiniband/sw/rxe/ |
H A D | rxe_mr.c | 252 void *va; in rxe_mr_copy_xarray() local 261 va = kmap_local_page(page); in rxe_mr_copy_xarray() 263 memcpy(addr, va + page_offset, bytes); in rxe_mr_copy_xarray() 265 memcpy(va + page_offset, addr, bytes); in rxe_mr_copy_xarray() 266 kunmap_local(va); in rxe_mr_copy_xarray() 283 u8 *va; in rxe_mr_copy_dma() local 289 va = kmap_local_page(page); in rxe_mr_copy_dma() 292 memcpy(va + page_offset, addr, bytes); in rxe_mr_copy_dma() 294 memcpy(addr, va + page_offset, bytes); in rxe_mr_copy_dma() 296 kunmap_local(va); in rxe_mr_copy_dma() [all …]
|
/linux/arch/xtensa/mm/ |
H A D | ioremap.c | 25 unsigned long va = (unsigned long) addr; in iounmap() local 27 if ((va >= XCHAL_KIO_CACHED_VADDR && in iounmap() 28 va - XCHAL_KIO_CACHED_VADDR < XCHAL_KIO_SIZE) || in iounmap() 29 (va >= XCHAL_KIO_BYPASS_VADDR && in iounmap() 30 va - XCHAL_KIO_BYPASS_VADDR < XCHAL_KIO_SIZE)) in iounmap()
|