| /linux/drivers/misc/genwqe/ |
| H A D | card_utils.c | 291 int genwqe_alloc_sync_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl, in genwqe_alloc_sync_sgl() argument 297 sgl->fpage_offs = offset_in_page((unsigned long)user_addr); in genwqe_alloc_sync_sgl() 298 sgl->fpage_size = min_t(size_t, PAGE_SIZE-sgl->fpage_offs, user_size); in genwqe_alloc_sync_sgl() 299 sgl->nr_pages = DIV_ROUND_UP(sgl->fpage_offs + user_size, PAGE_SIZE); in genwqe_alloc_sync_sgl() 300 sgl->lpage_size = (user_size - sgl->fpage_size) % PAGE_SIZE; in genwqe_alloc_sync_sgl() 303 __func__, user_addr, user_size, sgl->nr_pages, in genwqe_alloc_sync_sgl() 304 sgl->fpage_offs, sgl->fpage_size, sgl->lpage_size); in genwqe_alloc_sync_sgl() 306 sgl->user_addr = user_addr; in genwqe_alloc_sync_sgl() 307 sgl->user_size = user_size; in genwqe_alloc_sync_sgl() 308 sgl->write = write; in genwqe_alloc_sync_sgl() [all …]
|
| /linux/drivers/dma-buf/ |
| H A D | dma-buf-mapping.c | 9 static struct scatterlist *fill_sg_entry(struct scatterlist *sgl, size_t length, in fill_sg_entry() argument 26 sg_set_page(sgl, NULL, 0, 0); in fill_sg_entry() 27 sg_dma_address(sgl) = addr + (dma_addr_t)i * UINT_MAX; in fill_sg_entry() 28 sg_dma_len(sgl) = len; in fill_sg_entry() 29 sgl = sg_next(sgl); in fill_sg_entry() 32 return sgl; in fill_sg_entry() 100 struct scatterlist *sgl; in dma_buf_phys_vec_to_sgt() local 140 sgl = dma->sgt.sgl; in dma_buf_phys_vec_to_sgt() 165 sgl = fill_sg_entry(sgl, phys_vec[i].len, addr); in dma_buf_phys_vec_to_sgt() 174 sgl = fill_sg_entry(sgl, mapped_len, dma->state->addr); in dma_buf_phys_vec_to_sgt() [all …]
|
| /linux/drivers/crypto/hisilicon/ |
| H A D | sgl.c | 37 struct hisi_acc_hw_sgl *sgl; member 92 block[i].sgl = dma_alloc_coherent(dev, block_size, in hisi_acc_create_sgl_pool() 95 if (!block[i].sgl) { in hisi_acc_create_sgl_pool() 104 block[i].sgl = dma_alloc_coherent(dev, remain_sgl * sgl_size, in hisi_acc_create_sgl_pool() 107 if (!block[i].sgl) { in hisi_acc_create_sgl_pool() 125 dma_free_coherent(dev, block_size, block[j].sgl, in hisi_acc_create_sgl_pool() 151 dma_free_coherent(dev, block[i].size, block[i].sgl, in hisi_acc_free_sgl_pool() 169 return (void *)block[block_index].sgl + pool->sgl_size * offset; in acc_get_sgl() 172 static void sg_map_to_hw_sg(struct scatterlist *sgl, in sg_map_to_hw_sg() argument 175 hw_sge->buf = sg_dma_address(sgl); in sg_map_to_hw_sg() [all …]
|
| /linux/mm/ |
| H A D | page_reporting.c | 106 struct scatterlist *sgl, unsigned int nents, bool reported) in page_reporting_drain() argument 108 struct scatterlist *sg = sgl; in page_reporting_drain() 137 sg_init_table(sgl, nents); in page_reporting_drain() 148 struct scatterlist *sgl, unsigned int *offset) in page_reporting_cycle() argument 208 sg_set_page(&sgl[*offset], page, page_len, 0); in page_reporting_cycle() 225 err = prdev->report(prdev, sgl, PAGE_REPORTING_CAPACITY); in page_reporting_cycle() 237 page_reporting_drain(prdev, sgl, PAGE_REPORTING_CAPACITY, !err); in page_reporting_cycle() 261 struct scatterlist *sgl, struct zone *zone) in page_reporting_process_zone() argument 286 sgl, &offset); in page_reporting_process_zone() 295 sgl = &sgl[offset]; in page_reporting_process_zone() [all …]
|
| /linux/lib/ |
| H A D | scatterlist.c | 81 struct scatterlist *sg_last(struct scatterlist *sgl, unsigned int nents) in sg_last() argument 86 for_each_sg(sgl, sg, nents, i) in sg_last() 104 void sg_init_table(struct scatterlist *sgl, unsigned int nents) in sg_init_table() argument 106 memset(sgl, 0, sizeof(*sgl) * nents); in sg_init_table() 107 sg_init_marker(sgl, nents); in sg_init_table() 177 struct scatterlist *sgl, *next; in __sg_free_table() local 180 if (unlikely(!table->sgl)) in __sg_free_table() 183 sgl = table->sgl; in __sg_free_table() 195 next = sg_chain_ptr(&sgl[curr_max_ents - 1]); in __sg_free_table() 207 free_fn(sgl, alloc_size); in __sg_free_table() [all …]
|
| H A D | kfifo.c | 309 static unsigned int setup_sgl_buf(struct __kfifo *fifo, struct scatterlist *sgl, in setup_sgl_buf() 318 sg_set_buf(sgl, buf, len); in setup_sgl_buf() 321 sg_dma_address(sgl) = dma + data_offset; in setup_sgl_buf() 322 sg_dma_len(sgl) = len; in setup_sgl_buf() 328 static unsigned int setup_sgl(struct __kfifo *fifo, struct scatterlist *sgl, in setup_sgl_buf() 344 n = setup_sgl_buf(fifo, sgl, off, nents, len_to_end, dma); in setup_sgl() 345 n += setup_sgl_buf(fifo, sgl + n, 0, nents - n, len - len_to_end, dma); in setup_sgl() 351 struct scatterlist *sgl, int nents, unsigned int len, in setup_sgl() 360 return setup_sgl(fifo, sgl, nents, len, fifo->in, dma); in __kfifo_dma_in_prepare() 365 struct scatterlist *sgl, in in __kfifo_dma_in_prepare() 295 setup_sgl_buf(struct scatterlist * sgl,void * buf,int nents,unsigned int len) setup_sgl_buf() argument 334 setup_sgl(struct __kfifo * fifo,struct scatterlist * sgl,int nents,unsigned int len,unsigned int off) setup_sgl() argument 357 __kfifo_dma_in_prepare(struct __kfifo * fifo,struct scatterlist * sgl,int nents,unsigned int len) __kfifo_dma_in_prepare() argument 370 __kfifo_dma_out_prepare(struct __kfifo * fifo,struct scatterlist * sgl,int nents,unsigned int len) __kfifo_dma_out_prepare() argument 549 __kfifo_dma_in_prepare_r(struct __kfifo * fifo,struct scatterlist * sgl,int nents,unsigned int len,size_t recsize) __kfifo_dma_in_prepare_r() argument 572 __kfifo_dma_out_prepare_r(struct __kfifo * fifo,struct scatterlist * sgl,int nents,unsigned int len,size_t recsize) __kfifo_dma_out_prepare_r() argument [all...] |
| /linux/include/linux/ |
| H A D | scatterlist.h | 40 struct scatterlist *sgl; /* the list */ member 225 for_each_sg((sgt)->sgl, sg, (sgt)->orig_nents, i) 233 for_each_sg((sgt)->sgl, sg, (sgt)->nents, i) 236 struct scatterlist *sgl) in __sg_chain() argument 248 chain_sg->page_link = ((unsigned long) sgl | SG_CHAIN) & ~SG_END; in __sg_chain() 262 struct scatterlist *sgl) in sg_chain() argument 264 __sg_chain(&prv[prv_nents - 1], sgl); in sg_chain() 436 static inline void sg_init_marker(struct scatterlist *sgl, in sg_init_marker() argument 439 sg_mark_end(&sgl[nents - 1]); in sg_init_marker() 509 void sgl_free_n_order(struct scatterlist *sgl, int nents, int order); [all …]
|
| /linux/drivers/gpu/drm/nouveau/nvkm/core/ |
| H A D | firmware.c | 121 case NVKM_FIRMWARE_IMG_DMA: return &fw->mem.sgl; in nvkm_firmware_mem_sgl() 122 case NVKM_FIRMWARE_IMG_SGT: return fw->mem.sgt.sgl; in nvkm_firmware_mem_sgl() 139 .sgl = nvkm_firmware_mem_sgl(memory), in nvkm_firmware_mem_map() 142 if (!map.sgl) in nvkm_firmware_mem_map() 151 struct scatterlist *sgl = nvkm_firmware_mem_sgl(memory); in nvkm_firmware_mem_size() local 153 return sgl ? sg_dma_len(sgl) : 0; in nvkm_firmware_mem_size() 208 dma_free_noncoherent(fw->device->dev, sg_dma_len(&fw->mem.sgl), in nvkm_firmware_dtor() 251 sg_init_one(&fw->mem.sgl, fw->img, len); in nvkm_firmware_ctor() 252 sg_dma_address(&fw->mem.sgl) = fw->phys; in nvkm_firmware_ctor() 253 sg_dma_len(&fw->mem.sgl) = len; in nvkm_firmware_ctor() [all …]
|
| /linux/drivers/target/iscsi/cxgbit/ |
| H A D | cxgbit_ddp.c | 133 struct scatterlist *sg = ttinfo->sgl; in cxgbit_ddp_set_map() 176 struct scatterlist *sgl = ttinfo->sgl; in cxgbit_ddp_reserve() local 178 unsigned int sg_offset = sgl->offset; in cxgbit_ddp_reserve() 188 if (cxgbit_ddp_sgl_check(sgl, sgcnt) < 0) in cxgbit_ddp_reserve() 191 ttinfo->nr_pages = (xferlen + sgl->offset + in cxgbit_ddp_reserve() 203 sgl->offset = 0; in cxgbit_ddp_reserve() 204 ret = dma_map_sg(&ppm->pdev->dev, sgl, sgcnt, DMA_FROM_DEVICE); in cxgbit_ddp_reserve() 205 sgl->offset = sg_offset; in cxgbit_ddp_reserve() 212 cxgbi_ppm_make_ppod_hdr(ppm, ttinfo->tag, csk->tid, sgl->offset, in cxgbit_ddp_reserve() 218 dma_unmap_sg(&ppm->pdev->dev, sgl, sgcnt, DMA_FROM_DEVICE); in cxgbit_ddp_reserve() [all …]
|
| /linux/crypto/ |
| H A D | algif_hash.c | 20 struct af_alg_sgl sgl; member 106 ctx->sgl.sgt.sgl = ctx->sgl.sgl; in hash_sendmsg() 107 ctx->sgl.sgt.nents = 0; in hash_sendmsg() 108 ctx->sgl.sgt.orig_nents = 0; in hash_sendmsg() 115 sg_init_table(ctx->sgl.sgl, npages); in hash_sendmsg() 117 ctx->sgl.need_unpin = iov_iter_extract_will_pin(&msg->msg_iter); in hash_sendmsg() 120 &ctx->sgl.sgt, npages, 0); in hash_sendmsg() 124 sg_mark_end(ctx->sgl.sgt.sgl + ctx->sgl.sgt.nents - 1); in hash_sendmsg() 132 ahash_request_set_crypt(&ctx->req, ctx->sgl.sgt.sgl, in hash_sendmsg() 160 af_alg_free_sg(&ctx->sgl); in hash_sendmsg() [all …]
|
| /linux/tools/virtio/linux/ |
| H A D | scatterlist.h | 92 struct scatterlist *sgl) in sg_chain() argument 104 prv[prv_nents - 1].page_link = ((unsigned long) sgl | 0x01) & ~0x02; in sg_chain() 150 static inline void sg_init_table(struct scatterlist *sgl, unsigned int nents) in sg_init_table() argument 152 memset(sgl, 0, sizeof(*sgl) * nents); in sg_init_table() 153 sg_mark_end(&sgl[nents - 1]); in sg_init_table()
|
| /linux/include/trace/events/ |
| H A D | dma.h | 181 for_each_sg(sgt->sgl, sg, sgt->orig_nents, i) 183 __entry->dma_addr = sg_dma_address(sgt->sgl); 261 for_each_sg(sgt->sgl, sg, sgt->orig_nents, i) 263 __entry->dma_addr = sg_dma_address(sgt->sgl); 279 TP_PROTO(struct device *dev, struct scatterlist *sgl, int nents, 281 TP_ARGS(dev, sgl, nents, ents, dir, attrs), 297 for_each_sg(sgl, sg, nents, i) 299 for_each_sg(sgl, sg, ents, i) { 325 TP_PROTO(struct device *dev, struct scatterlist *sgl, int nents, 327 TP_ARGS(dev, sgl, nents, err, dir, attrs), [all …]
|
| /linux/drivers/vfio/pci/pds/ |
| H A D | cmds.c | 206 struct pds_lm_sg_elem *sgl, *sge; in pds_vfio_dma_map_lm_file() local 225 sgl = kzalloc(sgl_size, GFP_KERNEL); in pds_vfio_dma_map_lm_file() 226 if (!sgl) { in pds_vfio_dma_map_lm_file() 232 sge = sgl; in pds_vfio_dma_map_lm_file() 240 sgl_addr = dma_map_single(dev, sgl, sgl_size, DMA_TO_DEVICE); in pds_vfio_dma_map_lm_file() 246 lm_file->sgl = sgl; in pds_vfio_dma_map_lm_file() 252 kfree(sgl); in pds_vfio_dma_map_lm_file() 267 if (lm_file->sgl) { in pds_vfio_dma_unmap_lm_file() 269 lm_file->num_sge * sizeof(*lm_file->sgl), in pds_vfio_dma_unmap_lm_file() 271 kfree(lm_file->sgl); in pds_vfio_dma_unmap_lm_file() [all …]
|
| H A D | dirty.c | 121 kfree(region->sgl); in __pds_vfio_dirty_free_sgl() 124 region->sgl = NULL; in __pds_vfio_dirty_free_sgl() 138 if (region->sgl) in pds_vfio_dirty_free_sgl() 149 struct pds_lm_sg_elem *sgl; in pds_vfio_dirty_alloc_sgl() local 157 sgl = kzalloc(sgl_size, GFP_KERNEL); in pds_vfio_dirty_alloc_sgl() 158 if (!sgl) in pds_vfio_dirty_alloc_sgl() 161 sgl_addr = dma_map_single(pdsc_dev, sgl, sgl_size, DMA_BIDIRECTIONAL); in pds_vfio_dirty_alloc_sgl() 163 kfree(sgl); in pds_vfio_dirty_alloc_sgl() 167 region->sgl = sgl; in pds_vfio_dirty_alloc_sgl() 427 struct pds_lm_sg_elem *sg_elem = ®ion->sgl[i]; in pds_vfio_dirty_seq_ack()
|
| /linux/drivers/xen/ |
| H A D | swiotlb-xen.c | 346 xen_swiotlb_unmap_sg(struct device *hwdev, struct scatterlist *sgl, int nelems, in xen_swiotlb_unmap_sg() argument 354 for_each_sg(sgl, sg, nelems, i) in xen_swiotlb_unmap_sg() 361 xen_swiotlb_map_sg(struct device *dev, struct scatterlist *sgl, int nelems, in xen_swiotlb_map_sg() argument 369 for_each_sg(sgl, sg, nelems, i) { in xen_swiotlb_map_sg() 379 xen_swiotlb_unmap_sg(dev, sgl, i, dir, attrs | DMA_ATTR_SKIP_CPU_SYNC); in xen_swiotlb_map_sg() 380 sg_dma_len(sgl) = 0; in xen_swiotlb_map_sg() 385 xen_swiotlb_sync_sg_for_cpu(struct device *dev, struct scatterlist *sgl, in xen_swiotlb_sync_sg_for_cpu() argument 391 for_each_sg(sgl, sg, nelems, i) { in xen_swiotlb_sync_sg_for_cpu() 398 xen_swiotlb_sync_sg_for_device(struct device *dev, struct scatterlist *sgl, in xen_swiotlb_sync_sg_for_device() argument 404 for_each_sg(sgl, sg, nelems, i) { in xen_swiotlb_sync_sg_for_device()
|
| /linux/drivers/dma/ |
| H A D | loongson1-apb-dma.c | 217 struct scatterlist *sgl, unsigned int sg_len, in ls1x_dma_prep_lli() argument 244 for_each_sg(sgl, sg, sg_len, i) { in ls1x_dma_prep_lli() 296 ls1x_dma_prep_slave_sg(struct dma_chan *dchan, struct scatterlist *sgl, in ls1x_dma_prep_slave_sg() argument 309 if (ls1x_dma_prep_lli(dchan, desc, sgl, sg_len, dir, false)) { in ls1x_dma_prep_slave_sg() 323 struct scatterlist *sgl; in ls1x_dma_prep_dma_cyclic() local 338 sgl = kmalloc_array(sg_len, sizeof(*sgl), GFP_NOWAIT); in ls1x_dma_prep_dma_cyclic() 339 if (!sgl) in ls1x_dma_prep_dma_cyclic() 342 sg_init_table(sgl, sg_len); in ls1x_dma_prep_dma_cyclic() 344 sg_set_page(&sgl[i], pfn_to_page(PFN_DOWN(buf_addr)), in ls1x_dma_prep_dma_cyclic() 346 sg_dma_address(&sgl[i]) = buf_addr; in ls1x_dma_prep_dma_cyclic() [all …]
|
| /linux/drivers/net/ethernet/intel/ixgbe/ |
| H A D | ixgbe_fcoe.c | 29 ddp->sgl = NULL; in ixgbe_fcoe_clear_ddp() 110 if (ddp->sgl) in ixgbe_fcoe_ddp_put() 111 dma_unmap_sg(&adapter->pdev->dev, ddp->sgl, ddp->sgc, in ixgbe_fcoe_ddp_put() 134 struct scatterlist *sgl, unsigned int sgc, in ixgbe_fcoe_ddp_setup() argument 153 if (!netdev || !sgl) in ixgbe_fcoe_ddp_setup() 169 if (ddp->sgl) { in ixgbe_fcoe_ddp_setup() 171 xid, ddp->sgl, ddp->sgc); in ixgbe_fcoe_ddp_setup() 189 dmacount = dma_map_sg(&adapter->pdev->dev, sgl, sgc, DMA_FROM_DEVICE); in ixgbe_fcoe_ddp_setup() 202 ddp->sgl = sgl; in ixgbe_fcoe_ddp_setup() 206 for_each_sg(sgl, sg, dmacount, i) { in ixgbe_fcoe_ddp_setup() [all …]
|
| /linux/drivers/iio/buffer/ |
| H A D | industrialio-buffer-dmaengine.c | 68 struct scatterlist *sgl; in iio_dmaengine_buffer_submit_block() local 85 sgl = block->sg_table->sgl; in iio_dmaengine_buffer_submit_block() 86 nents = sg_nents_for_len(sgl, block->bytes_used); in iio_dmaengine_buffer_submit_block() 97 vecs[i].addr = sg_dma_address(sgl); in iio_dmaengine_buffer_submit_block() 98 vecs[i].len = min(sg_dma_len(sgl), len_total); in iio_dmaengine_buffer_submit_block() 101 sgl = sg_next(sgl); in iio_dmaengine_buffer_submit_block()
|
| /linux/drivers/scsi/esas2r/ |
| H A D | esas2r_io.c | 224 struct esas2r_mem_desc *sgl; in esas2r_build_sg_list_sge() local 231 sgl = esas2r_alloc_sgl(a); in esas2r_build_sg_list_sge() 233 if (unlikely(sgl == NULL)) in esas2r_build_sg_list_sge() 244 memcpy(sgl->virt_addr, sgc->sge.a64.last, sgelen); in esas2r_build_sg_list_sge() 248 (struct atto_vda_sge *)((u8 *)sgl->virt_addr + in esas2r_build_sg_list_sge() 253 (struct atto_vda_sge *)((u8 *)sgl->virt_addr in esas2r_build_sg_list_sge() 260 cpu_to_le64(sgl->phys_addr); in esas2r_build_sg_list_sge() 302 list_add(&sgl->next_desc, &rq->sg_table_head); in esas2r_build_sg_list_sge() 376 struct esas2r_mem_desc *sgl; in esas2r_build_prd_iblk() local 449 sgl = esas2r_alloc_sgl(a); in esas2r_build_prd_iblk() [all …]
|
| /linux/drivers/gpu/drm/i915/ |
| H A D | i915_scatterlist.h | 31 } __sgt_iter(struct scatterlist *sgl, bool dma) { in __sgt_iter() argument 32 struct sgt_iter s = { .sgp = sgl }; in __sgt_iter() 90 for ((__iter) = __sgt_iter((__sgt)->sgl, true); \ 112 for ((__iter) = __sgt_iter((__sgt)->sgl, false); \ 225 rsgt->table.sgl = NULL; in __i915_refct_sgt_init()
|
| /linux/drivers/tty/serial/8250/ |
| H A D | 8250_dma.c | 93 struct scatterlist sgl[2]; in serial8250_tx_dma() local 115 sg_init_table(sgl, ARRAY_SIZE(sgl)); in serial8250_tx_dma() 117 ret = kfifo_dma_out_prepare_mapped(&tport->xmit_fifo, sgl, ARRAY_SIZE(sgl), in serial8250_tx_dma() 122 for_each_sg(sgl, sg, ret, i) in serial8250_tx_dma() 125 desc = dmaengine_prep_slave_sg(dma->txchan, sgl, ret, in serial8250_tx_dma()
|
| /linux/drivers/scsi/qedf/ |
| H A D | drv_scsi_fw_funcs.c | 34 val = cpu_to_le32(sgl_task_params->sgl[sge_index].sge_addr.lo); in init_scsi_sgl_context() 36 val = cpu_to_le32(sgl_task_params->sgl[sge_index].sge_addr.hi); in init_scsi_sgl_context() 38 val = cpu_to_le32(sgl_task_params->sgl[sge_index].sge_len); in init_scsi_sgl_context()
|
| /linux/drivers/gpu/drm/v3d/ |
| H A D | v3d_mmu.c | 88 struct scatterlist *sgl; in v3d_mmu_insert_ptes() local 91 for_each_sgtable_dma_sg(shmem_obj->sgt, sgl, count) { in v3d_mmu_insert_ptes() 92 dma_addr_t dma_addr = sg_dma_address(sgl); in v3d_mmu_insert_ptes() 94 unsigned int len = sg_dma_len(sgl); in v3d_mmu_insert_ptes()
|
| /linux/drivers/scsi/bnx2fc/ |
| H A D | bnx2fc_hwi.c | 1496 struct fcoe_ext_mul_sges_ctx *sgl; in bnx2fc_init_seq_cleanup_task() local 1543 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_addr.lo = in bnx2fc_init_seq_cleanup_task() 1545 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_addr.hi = in bnx2fc_init_seq_cleanup_task() 1547 task->txwr_only.sgl_ctx.sgl.mul_sgl.sgl_size = in bnx2fc_init_seq_cleanup_task() 1549 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_off = in bnx2fc_init_seq_cleanup_task() 1551 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_idx = i; in bnx2fc_init_seq_cleanup_task() 1555 sgl = &task->rxwr_only.union_ctx.read_info.sgl_ctx.sgl; in bnx2fc_init_seq_cleanup_task() 1556 sgl->mul_sgl.cur_sge_addr.lo = (u32)phys_addr; in bnx2fc_init_seq_cleanup_task() 1557 sgl->mul_sgl.cur_sge_addr.hi = (u32)((u64)phys_addr >> 32); in bnx2fc_init_seq_cleanup_task() 1558 sgl->mul_sgl.sgl_size = bd_count; in bnx2fc_init_seq_cleanup_task() [all …]
|
| /linux/drivers/dma/sh/ |
| H A D | shdma-base.c | 574 struct scatterlist *sgl, unsigned int sg_len, dma_addr_t *addr, in shdma_prep_sg() argument 584 for_each_sg(sgl, sg, sg_len, i) in shdma_prep_sg() 601 for_each_sg(sgl, sg, sg_len, i) { in shdma_prep_sg() 675 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, in shdma_prep_slave_sg() argument 698 return shdma_prep_sg(schan, sgl, sg_len, &slave_addr, in shdma_prep_slave_sg() 716 struct scatterlist *sgl; in shdma_prep_dma_cyclic() local 744 sgl = kmalloc_array(sg_len, sizeof(*sgl), GFP_KERNEL); in shdma_prep_dma_cyclic() 745 if (!sgl) in shdma_prep_dma_cyclic() 748 sg_init_table(sgl, sg_len); in shdma_prep_dma_cyclic() 753 sg_set_page(&sgl[i], pfn_to_page(PFN_DOWN(src)), period_len, in shdma_prep_dma_cyclic() [all …]
|