Lines Matching refs:dmaengine_buffer
32 struct dmaengine_buffer { struct
42 static struct dmaengine_buffer *iio_buffer_to_dmaengine_buffer( in iio_buffer_to_dmaengine_buffer() argument
45 return container_of(buffer, struct dmaengine_buffer, queue.buffer); in iio_buffer_to_dmaengine_buffer()
64 struct dmaengine_buffer *dmaengine_buffer = in iio_dmaengine_buffer_submit_block() local
76 max_size = min(block->size, dmaengine_buffer->max_size); in iio_dmaengine_buffer_submit_block()
77 max_size = round_down(max_size, dmaengine_buffer->align); in iio_dmaengine_buffer_submit_block()
104 desc = dmaengine_prep_peripheral_dma_vec(dmaengine_buffer->chan, in iio_dmaengine_buffer_submit_block()
109 max_size = min(block->size, dmaengine_buffer->max_size); in iio_dmaengine_buffer_submit_block()
110 max_size = round_down(max_size, dmaengine_buffer->align); in iio_dmaengine_buffer_submit_block()
118 desc = dmaengine_prep_slave_single(dmaengine_buffer->chan, in iio_dmaengine_buffer_submit_block()
134 spin_lock_irq(&dmaengine_buffer->queue.list_lock); in iio_dmaengine_buffer_submit_block()
135 list_add_tail(&block->head, &dmaengine_buffer->active); in iio_dmaengine_buffer_submit_block()
136 spin_unlock_irq(&dmaengine_buffer->queue.list_lock); in iio_dmaengine_buffer_submit_block()
138 dma_async_issue_pending(dmaengine_buffer->chan); in iio_dmaengine_buffer_submit_block()
145 struct dmaengine_buffer *dmaengine_buffer = in iio_dmaengine_buffer_abort() local
148 dmaengine_terminate_sync(dmaengine_buffer->chan); in iio_dmaengine_buffer_abort()
149 iio_dma_buffer_block_list_abort(queue, &dmaengine_buffer->active); in iio_dmaengine_buffer_abort()
154 struct dmaengine_buffer *dmaengine_buffer = in iio_dmaengine_buffer_release() local
157 iio_dma_buffer_release(&dmaengine_buffer->queue); in iio_dmaengine_buffer_release()
158 kfree(dmaengine_buffer); in iio_dmaengine_buffer_release()
193 struct dmaengine_buffer *dmaengine_buffer = in iio_dmaengine_buffer_get_length_align() local
196 return sysfs_emit(buf, "%zu\n", dmaengine_buffer->align); in iio_dmaengine_buffer_get_length_align()
222 struct dmaengine_buffer *dmaengine_buffer; in iio_dmaengine_buffer_alloc() local
228 dmaengine_buffer = kzalloc(sizeof(*dmaengine_buffer), GFP_KERNEL); in iio_dmaengine_buffer_alloc()
229 if (!dmaengine_buffer) in iio_dmaengine_buffer_alloc()
253 INIT_LIST_HEAD(&dmaengine_buffer->active); in iio_dmaengine_buffer_alloc()
254 dmaengine_buffer->chan = chan; in iio_dmaengine_buffer_alloc()
255 dmaengine_buffer->align = width; in iio_dmaengine_buffer_alloc()
256 dmaengine_buffer->max_size = dma_get_max_seg_size(chan->device->dev); in iio_dmaengine_buffer_alloc()
258 iio_dma_buffer_init(&dmaengine_buffer->queue, chan->device->dev, in iio_dmaengine_buffer_alloc()
261 dmaengine_buffer->queue.buffer.attrs = iio_dmaengine_buffer_attrs; in iio_dmaengine_buffer_alloc()
262 dmaengine_buffer->queue.buffer.access = &iio_dmaengine_buffer_ops; in iio_dmaengine_buffer_alloc()
264 return &dmaengine_buffer->queue.buffer; in iio_dmaengine_buffer_alloc()
269 kfree(dmaengine_buffer); in iio_dmaengine_buffer_alloc()
281 struct dmaengine_buffer *dmaengine_buffer = in iio_dmaengine_buffer_free() local
284 iio_dma_buffer_exit(&dmaengine_buffer->queue); in iio_dmaengine_buffer_free()
285 dma_release_channel(dmaengine_buffer->chan); in iio_dmaengine_buffer_free()