/linux/arch/powerpc/mm/book3s64/ |
H A D | iommu_api.c | 60 struct mm_iommu_table_group_mem_t *mem, *mem2; in mm_iommu_do_alloc() local 73 mem = kzalloc(sizeof(*mem), GFP_KERNEL); in mm_iommu_do_alloc() 74 if (!mem) { in mm_iommu_do_alloc() 80 mem->pageshift = __ffs(dev_hpa | (entries << PAGE_SHIFT)); in mm_iommu_do_alloc() 81 mem->dev_hpa = dev_hpa; in mm_iommu_do_alloc() 84 mem->dev_hpa = MM_IOMMU_TABLE_INVALID_HPA; in mm_iommu_do_alloc() 91 mem->pageshift = __ffs(ua | (entries << PAGE_SHIFT)); in mm_iommu_do_alloc() 92 mem->hpas = vzalloc(array_size(entries, sizeof(mem->hpas[0]))); in mm_iommu_do_alloc() 93 if (!mem->hpas) { in mm_iommu_do_alloc() 94 kfree(mem); in mm_iommu_do_alloc() [all …]
|
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ |
H A D | mem.c | 23 #include "mem.h" 35 struct page **mem; member 57 struct nvkm_mem *mem = nvkm_mem(memory); in nvkm_mem_addr() local 58 if (mem->pages == 1 && mem->mem) in nvkm_mem_addr() 59 return mem->dma[0]; in nvkm_mem_addr() 73 struct nvkm_mem *mem = nvkm_mem(memory); in nvkm_mem_map_dma() local 75 .memory = &mem->memory, in nvkm_mem_map_dma() 77 .dma = mem->dma, in nvkm_mem_map_dma() 85 struct nvkm_mem *mem = nvkm_mem(memory); in nvkm_mem_dtor() local 86 if (mem->mem) { in nvkm_mem_dtor() [all …]
|
/linux/drivers/infiniband/sw/siw/ |
H A D | siw_mem.c | 29 struct siw_mem *mem; in siw_mem_id2obj() local 32 mem = xa_load(&sdev->mem_xa, stag_index); in siw_mem_id2obj() 33 if (likely(mem && kref_get_unless_zero(&mem->ref))) { in siw_mem_id2obj() 35 return mem; in siw_mem_id2obj() 61 struct siw_mem *mem = kzalloc(sizeof(*mem), GFP_KERNEL); in siw_mr_add_mem() local 65 if (!mem) in siw_mr_add_mem() 68 mem->mem_obj = mem_obj; in siw_mr_add_mem() 69 mem->stag_valid = 0; in siw_mr_add_mem() 70 mem->sdev = sdev; in siw_mr_add_mem() 71 mem->va = start; in siw_mr_add_mem() [all …]
|
/linux/drivers/gpu/drm/nouveau/ |
H A D | nouveau_mem.c | 37 nouveau_mem_map(struct nouveau_mem *mem, in nouveau_mem_map() argument 53 args.nv50.kind = mem->kind; in nouveau_mem_map() 54 args.nv50.comp = mem->comp; in nouveau_mem_map() 61 if (mem->mem.type & NVIF_MEM_VRAM) in nouveau_mem_map() 67 args.gf100.kind = mem->kind; in nouveau_mem_map() 75 return nvif_vmm_map(vmm, vma->addr, mem->mem.size, &args, argc, &mem->mem, 0); in nouveau_mem_map() 79 nouveau_mem_fini(struct nouveau_mem *mem) in nouveau_mem_fini() argument 81 nvif_vmm_put(&mem->drm->client.vmm.vmm, &mem->vma[1]); in nouveau_mem_fini() 82 nvif_vmm_put(&mem->drm->client.vmm.vmm, &mem->vma[0]); in nouveau_mem_fini() 83 mutex_lock(&mem->drm->client_mutex); in nouveau_mem_fini() [all …]
|
H A D | nouveau_vmm.c | 31 if (vma->mem) { in nouveau_vma_unmap() 33 vma->mem = NULL; in nouveau_vma_unmap() 38 nouveau_vma_map(struct nouveau_vma *vma, struct nouveau_mem *mem) in nouveau_vma_map() argument 41 int ret = nouveau_mem_map(mem, &vma->vmm->vmm, &tmp); in nouveau_vma_map() 44 vma->mem = mem; in nouveau_vma_map() 80 struct nouveau_mem *mem = nouveau_mem(nvbo->bo.resource); in nouveau_vma_new() local 95 vma->mem = NULL; in nouveau_vma_new() 100 mem->mem.page == nvbo->page) { in nouveau_vma_new() 101 ret = nvif_vmm_get(&vmm->vmm, LAZY, false, mem->mem.page, 0, in nouveau_vma_new() 102 mem->mem.size, &tmp); in nouveau_vma_new() [all …]
|
/linux/kernel/dma/ |
H A D | coherent.c | 31 struct dma_coherent_mem * mem) in dma_get_device_base() argument 33 if (mem->use_dev_dma_pfn_offset) in dma_get_device_base() 34 return phys_to_dma(dev, PFN_PHYS(mem->pfn_base)); in dma_get_device_base() 35 return mem->device_base; in dma_get_device_base() 77 static void _dma_release_coherent_memory(struct dma_coherent_mem *mem) in _dma_release_coherent_memory() argument 79 if (!mem) in _dma_release_coherent_memory() 82 memunmap(mem->virt_base); in _dma_release_coherent_memory() 83 bitmap_free(mem->bitmap); in _dma_release_coherent_memory() 84 kfree(mem); in _dma_release_coherent_memory() 88 struct dma_coherent_mem *mem) in dma_assign_coherent_memory() argument [all …]
|
H A D | swiotlb.c | 231 struct io_tlb_pool *mem = &io_tlb_default_mem.defpool; in swiotlb_print_info() local 233 if (!mem->nslabs) { in swiotlb_print_info() 234 pr_warn("No low mem\n"); in swiotlb_print_info() 238 pr_info("mapped [mem %pa-%pa] (%luMB)\n", &mem->start, &mem->end, in swiotlb_print_info() 239 (mem->nslabs << IO_TLB_SHIFT) >> 20); in swiotlb_print_info() 260 struct io_tlb_pool *mem = &io_tlb_default_mem.defpool; in swiotlb_update_mem_attributes() local 263 if (!mem->nslabs || mem->late_alloc) in swiotlb_update_mem_attributes() 265 bytes = PAGE_ALIGN(mem->nslabs << IO_TLB_SHIFT); in swiotlb_update_mem_attributes() 266 set_memory_decrypted((unsigned long)mem->vaddr, bytes >> PAGE_SHIFT); in swiotlb_update_mem_attributes() 269 static void swiotlb_init_io_tlb_pool(struct io_tlb_pool *mem, phys_addr_t start, in swiotlb_init_io_tlb_pool() argument [all …]
|
/linux/tools/testing/selftests/mm/ |
H A D | pagemap_ioctl.c | 163 char *mem; in gethugetlb_mem() local 170 mem = shmat(*shmid, 0, 0); in gethugetlb_mem() 171 if (mem == (char *)-1) { in gethugetlb_mem() 176 mem = mmap(NULL, size, PROT_READ | PROT_WRITE, in gethugetlb_mem() 178 if (mem == MAP_FAILED) in gethugetlb_mem() 182 return mem; in gethugetlb_mem() 188 char *mem, *vec; in userfaultfd_tests() local 191 mem = mmap(NULL, mem_size, PROT_NONE, MAP_PRIVATE | MAP_ANON, -1, 0); in userfaultfd_tests() 192 if (mem == MAP_FAILED) in userfaultfd_tests() 195 wp_init(mem, mem_size); in userfaultfd_tests() [all …]
|
H A D | mkdirty.c | 41 static void do_test_write_sigsegv(char *mem) in do_test_write_sigsegv() argument 43 char orig = *mem; in do_test_write_sigsegv() 53 *mem = orig + 1; in do_test_write_sigsegv() 58 ksft_test_result(ret == 1 && *mem == orig, in do_test_write_sigsegv() 65 char *mem, *mmap_mem; in mmap_thp_range() local 73 mem = (char *)(((uintptr_t)mmap_mem + thpsize) & ~(thpsize - 1)); in mmap_thp_range() 75 if (madvise(mem, thpsize, MADV_HUGEPAGE)) { in mmap_thp_range() 83 return mem; in mmap_thp_range() 89 char *mem; in test_ptrace_write() local 94 mem = mmap(NULL, pagesize, PROT_READ, MAP_PRIVATE|MAP_ANON, -1, 0); in test_ptrace_write() [all …]
|
H A D | cow.c | 137 static int child_memcmp_fn(char *mem, size_t size, in child_memcmp_fn() argument 144 memcpy(old, mem, size); in child_memcmp_fn() 152 return memcmp(old, mem, size); in child_memcmp_fn() 155 static int child_vmsplice_memcmp_fn(char *mem, size_t size, in child_vmsplice_memcmp_fn() argument 159 .iov_base = mem, in child_vmsplice_memcmp_fn() 171 memcpy(old, mem, size); in child_vmsplice_memcmp_fn() 184 if (munmap(mem, size) < 0) in child_vmsplice_memcmp_fn() 202 typedef int (*child_fn)(char *mem, size_t size, struct comm_pipes *comm_pipes); 204 static void do_test_cow_in_parent(char *mem, size_t size, bool do_mprotect, in do_test_cow_in_parent() argument 223 exit(fn(mem, size, &comm_pipes)); in do_test_cow_in_parent() [all …]
|
H A D | memfd_secret.c | 63 char *mem; in test_mlock_limit() local 69 mem = mmap(NULL, len, prot, mode, fd, 0); in test_mlock_limit() 70 if (mem == MAP_FAILED) { in test_mlock_limit() 74 munmap(mem, len); in test_mlock_limit() 77 mem = mmap(NULL, len, prot, mode, fd, 0); in test_mlock_limit() 78 if (mem != MAP_FAILED) { in test_mlock_limit() 80 munmap(mem, len); in test_mlock_limit() 92 char *mem; in test_vmsplice() local 99 mem = mmap(NULL, page_size, prot, mode, fd, 0); in test_vmsplice() 100 if (mem == MAP_FAILED) { in test_vmsplice() [all …]
|
/linux/drivers/gpu/drm/i915/ |
H A D | intel_region_ttm.c | 54 int intel_region_to_ttm_type(const struct intel_memory_region *mem) in intel_region_to_ttm_type() argument 58 GEM_BUG_ON(mem->type != INTEL_MEMORY_LOCAL && in intel_region_to_ttm_type() 59 mem->type != INTEL_MEMORY_MOCK && in intel_region_to_ttm_type() 60 mem->type != INTEL_MEMORY_SYSTEM); in intel_region_to_ttm_type() 62 if (mem->type == INTEL_MEMORY_SYSTEM) in intel_region_to_ttm_type() 65 type = mem->instance + TTM_PL_PRIV; in intel_region_to_ttm_type() 73 * @mem: The region to initialize. 82 int intel_region_ttm_init(struct intel_memory_region *mem) in intel_region_ttm_init() argument 84 struct ttm_device *bdev = &mem->i915->bdev; in intel_region_ttm_init() 85 int mem_type = intel_region_to_ttm_type(mem); in intel_region_ttm_init() [all …]
|
/linux/drivers/base/ |
H A D | memory.c | 107 struct memory_block *mem = to_memory_block(dev); in memory_block_release() local 109 WARN_ON(mem->altmap); in memory_block_release() 110 kfree(mem); in memory_block_release() 174 struct memory_block *mem = to_memory_block(dev); in phys_index_show() local 176 return sysfs_emit(buf, "%08lx\n", memory_block_id(mem->start_section_nr)); in phys_index_show() 195 struct memory_block *mem = to_memory_block(dev); in state_show() local 202 switch (mem->state) { in state_show() 214 return sysfs_emit(buf, "ERROR-UNKNOWN-%ld\n", mem->state); in state_show() 226 static unsigned long memblk_nr_poison(struct memory_block *mem); 228 static inline unsigned long memblk_nr_poison(struct memory_block *mem) in memblk_nr_poison() argument [all …]
|
/linux/drivers/spi/ |
H A D | spi-mem.c | 12 #include <linux/spi/spi-mem.h> 104 static int spi_check_buswidth_req(struct spi_mem *mem, u8 buswidth, bool tx) in spi_check_buswidth_req() argument 106 u32 mode = mem->spi->mode; in spi_check_buswidth_req() 142 static bool spi_mem_check_buswidth(struct spi_mem *mem, in spi_mem_check_buswidth() argument 145 if (spi_check_buswidth_req(mem, op->cmd.buswidth, true)) in spi_mem_check_buswidth() 149 spi_check_buswidth_req(mem, op->addr.buswidth, true)) in spi_mem_check_buswidth() 153 spi_check_buswidth_req(mem, op->dummy.buswidth, true)) in spi_mem_check_buswidth() 157 spi_check_buswidth_req(mem, op->data.buswidth, in spi_mem_check_buswidth() 164 bool spi_mem_default_supports_op(struct spi_mem *mem, in spi_mem_default_supports_op() argument 167 struct spi_controller *ctlr = mem->spi->controller; in spi_mem_default_supports_op() [all …]
|
/linux/drivers/firmware/efi/ |
H A D | cper.c | 241 int cper_mem_err_location(struct cper_mem_err_compact *mem, char *msg) in cper_mem_err_location() argument 250 if (mem->validation_bits & CPER_MEM_VALID_NODE) in cper_mem_err_location() 251 n += scnprintf(msg + n, len - n, "node:%d ", mem->node); in cper_mem_err_location() 252 if (mem->validation_bits & CPER_MEM_VALID_CARD) in cper_mem_err_location() 253 n += scnprintf(msg + n, len - n, "card:%d ", mem->card); in cper_mem_err_location() 254 if (mem->validation_bits & CPER_MEM_VALID_MODULE) in cper_mem_err_location() 255 n += scnprintf(msg + n, len - n, "module:%d ", mem->module); in cper_mem_err_location() 256 if (mem->validation_bits & CPER_MEM_VALID_RANK_NUMBER) in cper_mem_err_location() 257 n += scnprintf(msg + n, len - n, "rank:%d ", mem->rank); in cper_mem_err_location() 258 if (mem->validation_bits & CPER_MEM_VALID_BANK) in cper_mem_err_location() [all …]
|
/linux/drivers/media/platform/mediatek/vcodec/common/ |
H A D | mtk_vcodec_util.c | 48 int mtk_vcodec_mem_alloc(void *priv, struct mtk_vcodec_mem *mem) in mtk_vcodec_mem_alloc() argument 66 mem->va = dma_alloc_attrs(&plat_dev->dev, mem->size, &mem->dma_addr, in mtk_vcodec_mem_alloc() 68 if (!mem->va) { in mtk_vcodec_mem_alloc() 70 __func__, mem->size); in mtk_vcodec_mem_alloc() 74 mtk_v4l2_debug(plat_dev, 3, "[%d] - va = %p dma = 0x%lx size = 0x%zx", id, mem->va, in mtk_vcodec_mem_alloc() 75 (unsigned long)mem->dma_addr, mem->size); in mtk_vcodec_mem_alloc() 81 void mtk_vcodec_mem_free(void *priv, struct mtk_vcodec_mem *mem) in mtk_vcodec_mem_free() argument 99 if (!mem->va) { in mtk_vcodec_mem_free() 101 if (mem->size) in mtk_vcodec_mem_free() 102 mtk_v4l2_err(plat_dev, "Failed to free %zu bytes", mem->size); in mtk_vcodec_mem_free() [all …]
|
/linux/drivers/gpu/drm/nouveau/nvif/ |
H A D | mem.c | 22 #include <nvif/mem.h> 29 struct nvif_mem *mem) in nvif_mem_ctor_map() argument 31 int ret = nvif_mem_ctor(mmu, name, mmu->mem, NVIF_MEM_MAPPABLE | type, in nvif_mem_ctor_map() 32 0, size, NULL, 0, mem); in nvif_mem_ctor_map() 34 ret = nvif_object_map(&mem->object, NULL, 0); in nvif_mem_ctor_map() 36 nvif_mem_dtor(mem); in nvif_mem_ctor_map() 42 nvif_mem_dtor(struct nvif_mem *mem) in nvif_mem_dtor() argument 44 nvif_object_dtor(&mem->object); in nvif_mem_dtor() 50 struct nvif_mem *mem) in nvif_mem_ctor_type() argument 56 mem->object.client = NULL; in nvif_mem_ctor_type() [all …]
|
/linux/drivers/gpu/drm/ttm/ |
H A D | ttm_agp_backend.c | 46 struct agp_memory *mem; member 54 struct agp_memory *mem; in ttm_agp_bind() local 58 if (agp_be->mem) in ttm_agp_bind() 61 mem = agp_allocate_memory(agp_be->bridge, ttm->num_pages, AGP_USER_MEMORY); in ttm_agp_bind() 62 if (unlikely(mem == NULL)) in ttm_agp_bind() 65 mem->page_count = 0; in ttm_agp_bind() 72 mem->pages[mem->page_count++] = page; in ttm_agp_bind() 74 agp_be->mem = mem; in ttm_agp_bind() 76 mem->is_flushed = 1; in ttm_agp_bind() 77 mem->type = (cached) ? AGP_USER_CACHED_MEMORY : AGP_USER_MEMORY; in ttm_agp_bind() [all …]
|
/linux/drivers/net/ethernet/amd/ |
H A D | sun3lance.c | 107 #define PKTBUF_ADDR(head) (void *)((unsigned long)(MEM) | (head)->base) 153 struct lance_memory *mem; member 163 #define MEM lp->mem macro 344 MEM = dvma_malloc_align(sizeof(struct lance_memory), 0x10000); in lance_probe() 345 if (!MEM) { in lance_probe() 362 dvma_free((void *)MEM); in lance_probe() 369 printk("%s: SUN3 Lance at io %#lx, mem %#lx, irq %d, hwaddr ", in lance_probe() 372 (unsigned long)MEM, in lance_probe() 379 MEM->init.hwaddr[0] = dev->dev_addr[1]; in lance_probe() 380 MEM->init.hwaddr[1] = dev->dev_addr[0]; in lance_probe() [all …]
|
/linux/drivers/uio/ |
H A D | uio_mf624.c | 35 void __iomem *INTCSR_reg = info->mem[0].internal_addr + INTCSR; in mf624_disable_interrupt() 63 void __iomem *INTCSR_reg = info->mem[0].internal_addr + INTCSR; in mf624_enable_interrupt() 90 void __iomem *INTCSR_reg = info->mem[0].internal_addr + INTCSR; in mf624_irq_handler() 117 static int mf624_setup_mem(struct pci_dev *dev, int bar, struct uio_mem *mem, const char *name) in mf624_setup_mem() argument 122 mem->name = name; in mf624_setup_mem() 123 mem->addr = start & PAGE_MASK; in mf624_setup_mem() 124 mem->offs = start & ~PAGE_MASK; in mf624_setup_mem() 125 if (!mem->addr) in mf624_setup_mem() 127 mem->size = ((start & ~PAGE_MASK) + len + PAGE_SIZE - 1) & PAGE_MASK; in mf624_setup_mem() 128 mem->memtype = UIO_MEM_PHYS; in mf624_setup_mem() [all …]
|
/linux/drivers/char/hw_random/ |
H A D | intel-rng.c | 155 static inline u8 hwstatus_get(void __iomem *mem) in hwstatus_get() argument 157 return readb(mem + INTEL_RNG_HW_STATUS); in hwstatus_get() 160 static inline u8 hwstatus_set(void __iomem *mem, in hwstatus_set() argument 163 writeb(hw_status, mem + INTEL_RNG_HW_STATUS); in hwstatus_set() 164 return hwstatus_get(mem); in hwstatus_set() 169 void __iomem *mem = (void __iomem *)rng->priv; in intel_rng_data_present() local 173 data = !!(readb(mem + INTEL_RNG_STATUS) & in intel_rng_data_present() 184 void __iomem *mem = (void __iomem *)rng->priv; in intel_rng_data_read() local 186 *data = readb(mem + INTEL_RNG_DATA); in intel_rng_data_read() 193 void __iomem *mem = (void __iomem *)rng->priv; in intel_rng_init() local [all …]
|
H A D | xiphera-trng.c | 31 void __iomem *mem; member 42 if (readl(trng->mem + STATUS_REG) == TRNG_NEW_RAND_AVAILABLE) { in xiphera_trng_read() 43 *(u32 *)buf = readl(trng->mem + RAND_REG); in xiphera_trng_read() 48 writel(HOST_TO_TRNG_READ, trng->mem + CONTROL_REG); in xiphera_trng_read() 49 writel(HOST_TO_TRNG_ENABLE, trng->mem + CONTROL_REG); in xiphera_trng_read() 70 trng->mem = devm_platform_ioremap_resource(pdev, 0); in xiphera_trng_probe() 71 if (IS_ERR(trng->mem)) in xiphera_trng_probe() 72 return PTR_ERR(trng->mem); in xiphera_trng_probe() 78 writel(HOST_TO_TRNG_RESET, trng->mem + CONTROL_REG); in xiphera_trng_probe() 81 if (readl(trng->mem + STATUS_REG) != TRNG_ACK_RESET) { in xiphera_trng_probe() [all …]
|
/linux/drivers/net/ipa/ |
H A D | ipa_qmi.c | 285 const struct ipa_mem *mem; in init_modem_driver_req() local 298 mem = ipa_mem_find(ipa, IPA_MEM_MODEM_HEADER); in init_modem_driver_req() 299 if (mem->size) { in init_modem_driver_req() 301 req.hdr_tbl_info.start = ipa->mem_offset + mem->offset; in init_modem_driver_req() 302 req.hdr_tbl_info.end = req.hdr_tbl_info.start + mem->size - 1; in init_modem_driver_req() 305 mem = ipa_mem_find(ipa, IPA_MEM_V4_ROUTE); in init_modem_driver_req() 307 req.v4_route_tbl_info.start = ipa->mem_offset + mem->offset; in init_modem_driver_req() 310 mem = ipa_mem_find(ipa, IPA_MEM_V6_ROUTE); in init_modem_driver_req() 312 req.v6_route_tbl_info.start = ipa->mem_offset + mem->offset; in init_modem_driver_req() 315 mem = ipa_mem_find(ipa, IPA_MEM_V4_FILTER); in init_modem_driver_req() [all …]
|
/linux/arch/arm/boot/dts/samsung/ |
H A D | exynos5422-samsung-k3g.dts | 95 regulator-state-mem { 107 regulator-state-mem { 119 regulator-state-mem { 131 regulator-state-mem { 143 regulator-state-mem { 155 regulator-state-mem { 201 regulator-state-mem { 212 regulator-state-mem { 224 regulator-state-mem { 242 regulator-state-mem { [all …]
|
/linux/arch/arm64/boot/dts/rockchip/ |
H A D | rk3588-fet3588-c.dtsi | 107 mem-supply = <&vdd_cpu_big0_s0>; 112 mem-supply = <&vdd_cpu_big0_s0>; 117 mem-supply = <&vdd_cpu_big1_s0>; 122 mem-supply = <&vdd_cpu_big1_s0>; 127 mem-supply = <&vdd_cpu_lit_mem_s0>; 132 mem-supply = <&vdd_cpu_lit_mem_s0>; 137 mem-supply = <&vdd_cpu_lit_mem_s0>; 142 mem-supply = <&vdd_cpu_lit_mem_s0>; 162 regulator-state-mem { 179 regulator-state-mem { [all …]
|