/linux/drivers/parisc/ |
H A D | iommu-helpers.h | 35 (unsigned long)sg_dma_address(startsg), in iommu_fill_pdir() 44 if (sg_dma_address(startsg) & PIDE_FLAG) { in iommu_fill_pdir() 45 u32 pide = sg_dma_address(startsg) & ~PIDE_FLAG; in iommu_fill_pdir() 57 sg_dma_address(dma_sg) = pide | ioc->ibase; in iommu_fill_pdir() 62 sg_dma_address(dma_sg) = pide; in iommu_fill_pdir() 124 sg_dma_address(startsg) = 0; in iommu_coalesce_chunks() 141 sg_dma_address(startsg) = 0; in iommu_coalesce_chunks() 173 sg_dma_address(contig_sg) = in iommu_coalesce_chunks()
|
/linux/drivers/gpu/drm/i915/ |
H A D | i915_scatterlist.c | 30 sg_dma_address(new_sg) = sg_dma_address(sg); in i915_sg_trim() 125 sg_dma_address(sg) = region_start + offset; in i915_rsgt_from_mm_node() 126 GEM_BUG_ON(!IS_ALIGNED(sg_dma_address(sg), in i915_rsgt_from_mm_node() 215 sg_dma_address(sg) = region_start + offset; in i915_rsgt_from_buddy_resource() 216 GEM_BUG_ON(!IS_ALIGNED(sg_dma_address(sg), in i915_rsgt_from_buddy_resource()
|
/linux/drivers/media/pci/tw68/ |
H A D | tw68-risc.c | 65 *(rp++) = cpu_to_le32(sg_dma_address(sg) + offset); in tw68_risc_field() 80 *(rp++) = cpu_to_le32(sg_dma_address(sg) + offset); in tw68_risc_field() 88 *(rp++) = cpu_to_le32(sg_dma_address(sg)); in tw68_risc_field() 98 *(rp++) = cpu_to_le32(sg_dma_address(sg)); in tw68_risc_field()
|
/linux/lib/ |
H A D | sg_split.c | 94 sg_dma_address(out_sg) = 0; in sg_split_phys() 113 sg_dma_address(out_sg) = sg_dma_address(in_sg); in sg_split_mapped() 116 sg_dma_address(out_sg) += split->skip_sg0; in sg_split_mapped()
|
/linux/drivers/scsi/qla2xxx/ |
H A D | qla_dsd.h | 14 put_unaligned_le32(sg_dma_address(sg), &(*dsd)->address); in append_dsd32() 27 put_unaligned_le64(sg_dma_address(sg), &(*dsd)->address); in append_dsd64()
|
/linux/drivers/media/pci/intel/ipu6/ |
H A D | ipu6-dma.c | 337 PHYS_PFN(sg_dma_address(sglist))); in ipu6_dma_unmap_sg() 356 &sg_dma_address(sg), sg_dma_len(sg)); in ipu6_dma_unmap_sg() 358 sg_dma_address(sg)); in ipu6_dma_unmap_sg() 361 sg_dma_address(sg) = pci_dma_addr; in ipu6_dma_unmap_sg() 415 i, &iova_pa, &sg_dma_address(sg), sg_dma_len(sg)); in ipu6_dma_map_sg() 418 sg_dma_address(sg), in ipu6_dma_map_sg() 423 sg_dma_address(sg) = PFN_PHYS(iova_addr); in ipu6_dma_map_sg()
|
/linux/drivers/iommu/ |
H A D | dma-iommu.c | 1140 iommu_dma_sync_single_for_cpu(dev, sg_dma_address(sg), in iommu_dma_sync_sg_for_cpu() 1156 sg_dma_address(sg), in iommu_dma_sync_sg_for_device() 1258 dma_addr_t s_dma_addr = sg_dma_address(s); in __finalise_sg() 1259 unsigned int s_iova_off = sg_dma_address(s); in __finalise_sg() 1263 sg_dma_address(s) = DMA_MAPPING_ERROR; in __finalise_sg() 1271 sg_dma_address(cur) = s_dma_addr; in __finalise_sg() 1300 sg_dma_address(cur) = dma_addr + s_iova_off; in __finalise_sg() 1325 if (sg_dma_address(s) != DMA_MAPPING_ERROR) in __invalidate_sg() 1326 s->offset += sg_dma_address(s); in __invalidate_sg() 1330 sg_dma_address(s) = DMA_MAPPING_ERROR; in __invalidate_sg() [all …]
|
/linux/drivers/spi/ |
H A D | spi-dw-dma.c | 584 sg_dma_address(&tx_tmp) = sg_dma_address(tx_sg); in dw_spi_dma_transfer_one() 591 sg_dma_address(&rx_tmp) = sg_dma_address(rx_sg); in dw_spi_dma_transfer_one() 627 sg_dma_address(&tx_tmp) += len; in dw_spi_dma_transfer_one() 628 sg_dma_address(&rx_tmp) += len; in dw_spi_dma_transfer_one()
|
/linux/drivers/hwtracing/intel_th/ |
H A D | msu-sink.c | 72 PAGE_SIZE, &sg_dma_address(sg_ptr), in msu_sink_alloc_window() 92 sg_virt(sg_ptr), sg_dma_address(sg_ptr)); in msu_sink_free_window()
|
/linux/arch/powerpc/platforms/512x/ |
H A D | mpc512x_lpbfifo.c | 250 sg_dma_address(&sg) = dma_map_single(dma_dev->dev, in mpc512x_lpbfifo_kick() 252 if (dma_mapping_error(dma_dev->dev, sg_dma_address(&sg))) in mpc512x_lpbfifo_kick() 255 lpbfifo.ram_bus_addr = sg_dma_address(&sg); /* For freeing later */ in mpc512x_lpbfifo_kick() 327 dma_unmap_single(dma_dev->dev, sg_dma_address(&sg), in mpc512x_lpbfifo_kick()
|
/linux/include/rdma/ |
H A D | ib_umem.h | 58 return (sg_dma_address(umem->sgt_append.sgt.sgl) + ib_umem_offset(umem)) & in ib_umem_dma_offset() 141 dma_addr = sg_dma_address(sg) + (umem->address & ~PAGE_MASK); in ib_umem_find_best_pgoff()
|
/linux/drivers/media/pci/bt8xx/ |
H A D | bttv-risc.c | 81 *(rp++)=cpu_to_le32(sg_dma_address(sg)+offset); in bttv_risc_packed() 88 *(rp++)=cpu_to_le32(sg_dma_address(sg)+offset); in bttv_risc_packed() 95 *(rp++)=cpu_to_le32(sg_dma_address(sg)); in bttv_risc_packed() 101 *(rp++)=cpu_to_le32(sg_dma_address(sg)); in bttv_risc_packed() 212 *(rp++)=cpu_to_le32(sg_dma_address(ysg)+yoffset); in bttv_risc_planar() 215 *(rp++)=cpu_to_le32(sg_dma_address(usg)+uoffset); in bttv_risc_planar() 217 *(rp++)=cpu_to_le32(sg_dma_address(vsg)+voffset); in bttv_risc_planar()
|
/linux/drivers/mtd/nand/ |
H A D | ecc-mxic.c | 583 writel(sg_dma_address(&ctx->sg[0]) + (step * ctx->data_step_sz), in mxic_ecc_prepare_io_req_external() 585 writel(sg_dma_address(&ctx->sg[1]) + (step * (ctx->oob_step_sz + STAT_BYTES)), in mxic_ecc_prepare_io_req_external() 640 writel(sg_dma_address(&ctx->sg[0]) + (step * ctx->data_step_sz), in mxic_ecc_finish_io_req_external() 642 writel(sg_dma_address(&ctx->sg[1]) + (step * (ctx->oob_step_sz + STAT_BYTES)), in mxic_ecc_finish_io_req_external() 694 writel(sg_dma_address(&ctx->sg[0]), mxic->regs + SDMA_MAIN_ADDR); in mxic_ecc_prepare_io_req_pipelined() 695 writel(sg_dma_address(&ctx->sg[1]), mxic->regs + SDMA_SPARE_ADDR); in mxic_ecc_prepare_io_req_pipelined()
|
/linux/drivers/scsi/aic94xx/ |
H A D | aic94xx_task.c | 85 sg->bus_addr = cpu_to_le64((u64)sg_dma_address(sc)); in asd_map_scatterlist() 93 cpu_to_le64((u64)sg_dma_address(sc)); in asd_map_scatterlist() 105 cpu_to_le64((u64)sg_dma_address(sc)); in asd_map_scatterlist() 430 cpu_to_le64((u64)sg_dma_address(&task->smp_task.smp_req)); in asd_build_smp_ascb() 435 cpu_to_le64((u64)sg_dma_address(&task->smp_task.smp_resp)); in asd_build_smp_ascb()
|
/linux/drivers/media/common/videobuf2/ |
H A D | videobuf2-dma-contig.c | 56 dma_addr_t expected = sg_dma_address(sgt->sgl); in vb2_dc_get_contiguous_size() 61 if (sg_dma_address(s) != expected) in vb2_dc_get_contiguous_size() 222 buf->dma_addr = sg_dma_address(buf->dma_sgt->sgl); in vb2_dc_alloc_non_coherent() 659 buf->dma_addr = sg_dma_address(sgt->sgl); in vb2_dc_get_userptr() 723 buf->dma_addr = sg_dma_address(sgt->sgl); in vb2_dc_map_dmabuf()
|
/linux/drivers/target/iscsi/cxgbit/ |
H A D | cxgbit_ddp.c | 22 addr = sg_dma_address(sg); in cxgbit_set_one_ppod() 34 addr = sg_dma_address(sg); in cxgbit_set_one_ppod() 57 addr = sg_dma_address(sg); in cxgbit_set_one_ppod()
|
/linux/drivers/media/pci/cx25821/ |
H A D | cx25821-core.c | 1017 *(rp++) = cpu_to_le32(sg_dma_address(sg) + offset); in cx25821_risc_field() 1025 *(rp++) = cpu_to_le32(sg_dma_address(sg) + offset); in cx25821_risc_field() 1033 *(rp++) = cpu_to_le32(sg_dma_address(sg)); in cx25821_risc_field() 1039 *(rp++) = cpu_to_le32(sg_dma_address(sg)); in cx25821_risc_field() 1128 *(rp++) = cpu_to_le32(sg_dma_address(sg) + offset); in cx25821_risc_field_audio() 1136 *(rp++) = cpu_to_le32(sg_dma_address(sg) + offset); in cx25821_risc_field_audio() 1144 *(rp++) = cpu_to_le32(sg_dma_address(sg)); in cx25821_risc_field_audio() 1150 *(rp++) = cpu_to_le32(sg_dma_address(sg)); in cx25821_risc_field_audio()
|
/linux/drivers/media/platform/nvidia/tegra-vde/ |
H A D | dmabuf-cache.c | 93 *addrp = sg_dma_address(entry->sgt->sgl); in tegra_vde_dmabuf_cache_map() 131 *addrp = sg_dma_address(sgt->sgl); in tegra_vde_dmabuf_cache_map()
|
/linux/include/trace/events/ |
H A D | dma.h | 183 __entry->dma_addr = sg_dma_address(sgt->sgl); 263 __entry->dma_addr = sg_dma_address(sgt->sgl); 301 sg_dma_address(sg); 445 sg_dma_address(sg);
|
/linux/drivers/crypto/ccree/ |
H A D | cc_buffer_mgr.c | 170 rc = cc_render_buff_to_mlli(dev, sg_dma_address(curr_sgl) + in cc_render_sg_to_mlli() 302 &sg_dma_address(&areq_ctx->ccm_adata_sg), in cc_set_aead_conf_buf() 327 &sg_dma_address(areq_ctx->buff_sg), sg_page(areq_ctx->buff_sg), in cc_set_hash_buf() 671 areq_ctx->icv_dma_addr = sg_dma_address(sg) + offset; in cc_prepare_aead_data_dlli() 721 areq_ctx->icv_dma_addr = sg_dma_address(sg) + in cc_prepare_aead_data_mlli() 753 areq_ctx->icv_dma_addr = sg_dma_address(sg) + in cc_prepare_aead_data_mlli() 777 areq_ctx->icv_dma_addr = sg_dma_address(sg) + in cc_prepare_aead_data_mlli() 1351 sg_virt(src), &sg_dma_address(src), sg_dma_len(src)); in cc_unmap_hash_request() 1359 &sg_dma_address(areq_ctx->buff_sg), in cc_unmap_hash_request()
|
/linux/drivers/gpu/drm/i915/gem/ |
H A D | i915_gem_phys.c | 62 sg_dma_address(sg) = dma; in i915_gem_object_get_pages_phys() 101 dma_addr_t dma = sg_dma_address(pages->sgl); in i915_gem_object_put_pages_phys()
|
/linux/drivers/gpu/drm/virtio/ |
H A D | virtgpu_vram.c | 105 sg_dma_address(sgt->sgl) = addr; in virtio_gpu_vram_map_dma_buf() 120 dma_unmap_resource(dev, sg_dma_address(sgt->sgl), in virtio_gpu_vram_unmap_dma_buf()
|
/linux/drivers/crypto/caam/ |
H A D | sg_sw_qm2.h | 36 dma_to_qm_sg_one(qm_sg_ptr, sg_dma_address(sg), ent_len, in sg_to_qm_sg()
|
/linux/drivers/gpu/drm/i915/selftests/ |
H A D | i915_vma.c | 399 if (sg_dma_address(sg) != src) { in assert_rotated() 425 if (sg_dma_address(sg) != 0) { in assert_rotated() 427 &sg_dma_address(sg), x, y); in assert_rotated() 480 if (sg_dma_address(sg) + offset != src) { in assert_remapped() 519 if (sg_dma_address(sg) != 0) { in assert_remapped() 521 &sg_dma_address(sg), in assert_remapped()
|
/linux/drivers/infiniband/core/ |
H A D | umem_dmabuf.c | 50 sg_dma_address(sg) += offset; in ib_umem_dmabuf_map_pages() 95 sg_dma_address(umem_dmabuf->first_sg) -= in ib_umem_dmabuf_unmap_pages()
|