Lines Matching +full:multi +full:- +full:instance

1 /* SPDX-License-Identifier: GPL-2.0-or-later */
3 * Memory-to-memory device framework for Video for Linux 2.
16 #include <media/videobuf2-v4l2.h>
19 * struct v4l2_m2m_ops - mem-to-mem device driver callbacks
52 * struct v4l2_m2m_queue_ctx - represents a queue for buffers ready to be
56 * @rdy_queue: List of V4L2 mem-to-mem queues
62 * instance receives access to the device.
75 * struct v4l2_m2m_ctx - Memory to memory context structure
97 * @job_flags: Job queue flags, used internally by v4l2-mem2mem.c:
100 * @priv: Instance private data
133 * struct v4l2_m2m_buffer - Memory to memory buffer
144 * v4l2_m2m_get_curr_priv() - return driver private data for the currently
145 * running instance or NULL if no instance is running
152 * v4l2_m2m_get_vq() - return vb2_queue for the given type
154 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
161 * v4l2_m2m_try_schedule() - check whether an instance is ready to be added to
164 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
166 * There are three basic requirements an instance has to meet to be able to run:
177 * return 1 if the instance is ready.
178 * An example of the above could be an instance that requires more than one
184 * v4l2_m2m_job_finish() - inform the framework that a job has been finished
188 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
194 * This function has to be called only after &v4l2_m2m_ops->device_run
196 * not be called directly from the &v4l2_m2m_ops->device_run callback though.
202 * v4l2_m2m_buf_done_and_job_finish() - return source/destination buffers with
207 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
231 vb2_buffer_done(&buf->vb2_buf, state); in v4l2_m2m_buf_done()
235 * v4l2_m2m_clear_state() - clear encoding/decoding state
237 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
242 m2m_ctx->next_buf_last = false; in v4l2_m2m_clear_state()
243 m2m_ctx->is_draining = false; in v4l2_m2m_clear_state()
244 m2m_ctx->has_stopped = false; in v4l2_m2m_clear_state()
248 * v4l2_m2m_mark_stopped() - set current encoding/decoding state as stopped
250 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
255 m2m_ctx->next_buf_last = false; in v4l2_m2m_mark_stopped()
256 m2m_ctx->is_draining = false; in v4l2_m2m_mark_stopped()
257 m2m_ctx->has_stopped = true; in v4l2_m2m_mark_stopped()
261 * v4l2_m2m_dst_buf_is_last() - return the current encoding/decoding session
267 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
272 return m2m_ctx->is_draining && m2m_ctx->next_buf_last; in v4l2_m2m_dst_buf_is_last()
276 * v4l2_m2m_has_stopped() - return the current encoding/decoding session
279 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
284 return m2m_ctx->has_stopped; in v4l2_m2m_has_stopped()
288 * v4l2_m2m_is_last_draining_src_buf() - return the output buffer draining
295 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
302 return m2m_ctx->is_draining && vbuf == m2m_ctx->last_src_buf; in v4l2_m2m_is_last_draining_src_buf()
306 * v4l2_m2m_last_buffer_done() - marks the buffer with LAST flag and DONE
308 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
315 * v4l2_m2m_suspend() - stop new jobs from being run and wait for current job
326 * v4l2_m2m_resume() - resume job running and try to run a queued job
337 * v4l2_m2m_reqbufs() - multi-queue-aware REQBUFS multiplexer
340 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
347 * v4l2_m2m_querybuf() - multi-queue-aware QUERYBUF multiplexer
350 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
359 * v4l2_m2m_qbuf() - enqueue a source or destination buffer, depending on
363 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
370 * v4l2_m2m_dqbuf() - dequeue a source or destination buffer, depending on
374 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
381 * v4l2_m2m_prepare_buf() - prepare a source or destination buffer, depending on
385 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
392 * v4l2_m2m_create_bufs() - create a source or destination buffer, depending
396 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
403 * v4l2_m2m_expbuf() - export a source or destination buffer, depending on
407 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
414 * v4l2_m2m_streamon() - turn on streaming for a video queue
417 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
424 * v4l2_m2m_streamoff() - turn off streaming for a video queue
427 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
434 * v4l2_m2m_update_start_streaming_state() - update the encoding/decoding
437 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
444 * v4l2_m2m_update_stop_streaming_state() - update the encoding/decoding
447 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
454 * v4l2_m2m_encoder_cmd() - execute an encoder command
457 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
464 * v4l2_m2m_decoder_cmd() - execute a decoder command
467 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
474 * v4l2_m2m_poll() - poll replacement, for destination buffers only
477 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
482 * indicate that a non-blocking write can be performed, while read will be
489 * v4l2_m2m_mmap() - source and destination queues-aware mmap multiplexer
492 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
496 * seamlessly for the video buffer, which will receive normal per-queue offsets
511 * v4l2_m2m_init() - initialize per-driver m2m data
540 * v4l2_m2m_release() - cleans up and frees a m2m_dev structure
549 * v4l2_m2m_ctx_init() - allocate and initialize a m2m context
552 * @drv_priv: driver's instance private data
553 * @queue_init: a callback for queue type-specific initialization function
565 m2m_ctx->out_q_ctx.buffered = buffered; in v4l2_m2m_set_src_buffered()
571 m2m_ctx->cap_q_ctx.buffered = buffered; in v4l2_m2m_set_dst_buffered()
575 * v4l2_m2m_ctx_release() - release m2m context
577 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
584 * v4l2_m2m_buf_queue() - add a buffer to the proper ready buffers list.
586 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
589 * Call from vb2_queue_ops->ops->buf_queue, vb2_queue_ops callback.
595 * v4l2_m2m_num_src_bufs_ready() - return the number of source buffers ready for
598 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
606 spin_lock_irqsave(&m2m_ctx->out_q_ctx.rdy_spinlock, flags); in v4l2_m2m_num_src_bufs_ready()
607 num_buf_rdy = m2m_ctx->out_q_ctx.num_rdy; in v4l2_m2m_num_src_bufs_ready()
608 spin_unlock_irqrestore(&m2m_ctx->out_q_ctx.rdy_spinlock, flags); in v4l2_m2m_num_src_bufs_ready()
614 * v4l2_m2m_num_dst_bufs_ready() - return the number of destination buffers
617 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
625 spin_lock_irqsave(&m2m_ctx->cap_q_ctx.rdy_spinlock, flags); in v4l2_m2m_num_dst_bufs_ready()
626 num_buf_rdy = m2m_ctx->cap_q_ctx.num_rdy; in v4l2_m2m_num_dst_bufs_ready()
627 spin_unlock_irqrestore(&m2m_ctx->cap_q_ctx.rdy_spinlock, flags); in v4l2_m2m_num_dst_bufs_ready()
633 * v4l2_m2m_next_buf() - return next buffer from the list of ready buffers
640 * v4l2_m2m_next_src_buf() - return next source buffer from the list of ready
643 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
648 return v4l2_m2m_next_buf(&m2m_ctx->out_q_ctx); in v4l2_m2m_next_src_buf()
652 * v4l2_m2m_next_dst_buf() - return next destination buffer from the list of
655 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
660 return v4l2_m2m_next_buf(&m2m_ctx->cap_q_ctx); in v4l2_m2m_next_dst_buf()
664 * v4l2_m2m_last_buf() - return last buffer from the list of ready buffers
671 * v4l2_m2m_last_src_buf() - return last source buffer from the list of
674 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
679 return v4l2_m2m_last_buf(&m2m_ctx->out_q_ctx); in v4l2_m2m_last_src_buf()
683 * v4l2_m2m_last_dst_buf() - return last destination buffer from the list of
686 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
691 return v4l2_m2m_last_buf(&m2m_ctx->cap_q_ctx); in v4l2_m2m_last_dst_buf()
695 * v4l2_m2m_for_each_dst_buf() - iterate over a list of destination ready
698 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
702 list_for_each_entry(b, &m2m_ctx->cap_q_ctx.rdy_queue, list)
705 * v4l2_m2m_for_each_src_buf() - iterate over a list of source ready buffers
707 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
711 list_for_each_entry(b, &m2m_ctx->out_q_ctx.rdy_queue, list)
714 * v4l2_m2m_for_each_dst_buf_safe() - iterate over a list of destination ready
717 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
722 list_for_each_entry_safe(b, n, &m2m_ctx->cap_q_ctx.rdy_queue, list)
725 * v4l2_m2m_for_each_src_buf_safe() - iterate over a list of source ready
728 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
733 list_for_each_entry_safe(b, n, &m2m_ctx->out_q_ctx.rdy_queue, list)
736 * v4l2_m2m_get_src_vq() - return vb2_queue for source buffers
738 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
743 return &m2m_ctx->out_q_ctx.q; in v4l2_m2m_get_src_vq()
747 * v4l2_m2m_get_dst_vq() - return vb2_queue for destination buffers
749 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
754 return &m2m_ctx->cap_q_ctx.q; in v4l2_m2m_get_dst_vq()
758 * v4l2_m2m_buf_remove() - take off a buffer from the list of ready buffers and
766 * v4l2_m2m_src_buf_remove() - take off a source buffer from the list of ready
769 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
774 return v4l2_m2m_buf_remove(&m2m_ctx->out_q_ctx); in v4l2_m2m_src_buf_remove()
778 * v4l2_m2m_dst_buf_remove() - take off a destination buffer from the list of
781 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
786 return v4l2_m2m_buf_remove(&m2m_ctx->cap_q_ctx); in v4l2_m2m_dst_buf_remove()
790 * v4l2_m2m_buf_remove_by_buf() - take off exact buffer from the list of ready
800 * v4l2_m2m_src_buf_remove_by_buf() - take off exact source buffer from the list
803 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
809 v4l2_m2m_buf_remove_by_buf(&m2m_ctx->out_q_ctx, vbuf); in v4l2_m2m_src_buf_remove_by_buf()
813 * v4l2_m2m_dst_buf_remove_by_buf() - take off exact destination buffer from the
816 * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
822 v4l2_m2m_buf_remove_by_buf(&m2m_ctx->cap_q_ctx, vbuf); in v4l2_m2m_dst_buf_remove_by_buf()
831 return v4l2_m2m_buf_remove_by_idx(&m2m_ctx->out_q_ctx, idx); in v4l2_m2m_src_buf_remove_by_idx()
837 return v4l2_m2m_buf_remove_by_idx(&m2m_ctx->cap_q_ctx, idx); in v4l2_m2m_dst_buf_remove_by_idx()
841 * v4l2_m2m_buf_copy_metadata() - copy buffer metadata from