1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * Copyright (C) 2017 Rockchip Electronics Co., Ltd. 4 * Author: Jacob Chen <jacob-chen@iotwrt.com> 5 */ 6 7 #include <linux/pm_runtime.h> 8 #include <linux/scatterlist.h> 9 10 #include <media/v4l2-common.h> 11 #include <media/v4l2-device.h> 12 #include <media/v4l2-ioctl.h> 13 #include <media/v4l2-mem2mem.h> 14 #include <media/videobuf2-dma-sg.h> 15 #include <media/videobuf2-v4l2.h> 16 17 #include "rga-hw.h" 18 #include "rga.h" 19 20 static ssize_t fill_descriptors(struct rga_dma_desc *desc, size_t max_desc, 21 struct sg_table *sgt) 22 { 23 struct sg_dma_page_iter iter; 24 struct rga_dma_desc *tmp = desc; 25 size_t n_desc = 0; 26 dma_addr_t addr; 27 28 for_each_sgtable_dma_page(sgt, &iter, 0) { 29 if (n_desc > max_desc) 30 return -EINVAL; 31 addr = sg_page_iter_dma_address(&iter); 32 tmp->addr = lower_32_bits(addr); 33 tmp++; 34 n_desc++; 35 } 36 37 return n_desc; 38 } 39 40 static int 41 rga_queue_setup(struct vb2_queue *vq, 42 unsigned int *nbuffers, unsigned int *nplanes, 43 unsigned int sizes[], struct device *alloc_devs[]) 44 { 45 struct rga_ctx *ctx = vb2_get_drv_priv(vq); 46 struct rga_frame *f = rga_get_frame(ctx, vq->type); 47 const struct v4l2_pix_format_mplane *pix_fmt; 48 int i; 49 50 if (IS_ERR(f)) 51 return PTR_ERR(f); 52 53 pix_fmt = &f->pix; 54 55 if (*nplanes) { 56 if (*nplanes != pix_fmt->num_planes) 57 return -EINVAL; 58 59 for (i = 0; i < pix_fmt->num_planes; i++) 60 if (sizes[i] < pix_fmt->plane_fmt[i].sizeimage) 61 return -EINVAL; 62 63 return 0; 64 } 65 66 *nplanes = pix_fmt->num_planes; 67 68 for (i = 0; i < pix_fmt->num_planes; i++) 69 sizes[i] = pix_fmt->plane_fmt[i].sizeimage; 70 71 return 0; 72 } 73 74 static int rga_buf_init(struct vb2_buffer *vb) 75 { 76 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); 77 struct rga_vb_buffer *rbuf = vb_to_rga(vbuf); 78 struct rga_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue); 79 struct rockchip_rga *rga = ctx->rga; 80 struct rga_frame *f = rga_get_frame(ctx, vb->vb2_queue->type); 81 size_t n_desc = 0; 82 83 n_desc = DIV_ROUND_UP(f->size, PAGE_SIZE); 84 85 rbuf->n_desc = n_desc; 86 rbuf->dma_desc = dma_alloc_coherent(rga->dev, 87 rbuf->n_desc * sizeof(*rbuf->dma_desc), 88 &rbuf->dma_desc_pa, GFP_KERNEL); 89 if (!rbuf->dma_desc) 90 return -ENOMEM; 91 92 return 0; 93 } 94 95 static int get_plane_offset(struct rga_frame *f, int plane) 96 { 97 if (plane == 0) 98 return 0; 99 if (plane == 1) 100 return f->width * f->height; 101 if (plane == 2) 102 return f->width * f->height + (f->width * f->height / f->fmt->uv_factor); 103 104 return -EINVAL; 105 } 106 107 static int rga_buf_prepare(struct vb2_buffer *vb) 108 { 109 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); 110 struct rga_vb_buffer *rbuf = vb_to_rga(vbuf); 111 struct rga_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue); 112 struct rga_frame *f = rga_get_frame(ctx, vb->vb2_queue->type); 113 ssize_t n_desc = 0; 114 size_t curr_desc = 0; 115 int i; 116 const struct v4l2_format_info *info; 117 unsigned int offsets[VIDEO_MAX_PLANES]; 118 119 if (IS_ERR(f)) 120 return PTR_ERR(f); 121 122 if (V4L2_TYPE_IS_OUTPUT(vb->vb2_queue->type)) { 123 if (vbuf->field == V4L2_FIELD_ANY) 124 vbuf->field = V4L2_FIELD_NONE; 125 if (vbuf->field != V4L2_FIELD_NONE) 126 return -EINVAL; 127 } 128 129 for (i = 0; i < vb->num_planes; i++) { 130 vb2_set_plane_payload(vb, i, f->pix.plane_fmt[i].sizeimage); 131 132 /* Create local MMU table for RGA */ 133 n_desc = fill_descriptors(&rbuf->dma_desc[curr_desc], 134 rbuf->n_desc - curr_desc, 135 vb2_dma_sg_plane_desc(vb, i)); 136 if (n_desc < 0) { 137 v4l2_err(&ctx->rga->v4l2_dev, 138 "Failed to map video buffer to RGA\n"); 139 return n_desc; 140 } 141 offsets[i] = curr_desc << PAGE_SHIFT; 142 curr_desc += n_desc; 143 } 144 145 /* Fill the remaining planes */ 146 info = v4l2_format_info(f->fmt->fourcc); 147 for (i = info->mem_planes; i < info->comp_planes; i++) 148 offsets[i] = get_plane_offset(f, i); 149 150 rbuf->offset.y_off = offsets[0]; 151 rbuf->offset.u_off = offsets[1]; 152 rbuf->offset.v_off = offsets[2]; 153 154 return 0; 155 } 156 157 static void rga_buf_queue(struct vb2_buffer *vb) 158 { 159 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); 160 struct rga_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue); 161 162 v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf); 163 } 164 165 static void rga_buf_cleanup(struct vb2_buffer *vb) 166 { 167 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); 168 struct rga_vb_buffer *rbuf = vb_to_rga(vbuf); 169 struct rga_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue); 170 struct rockchip_rga *rga = ctx->rga; 171 172 dma_free_coherent(rga->dev, rbuf->n_desc * sizeof(*rbuf->dma_desc), 173 rbuf->dma_desc, rbuf->dma_desc_pa); 174 } 175 176 static void rga_buf_return_buffers(struct vb2_queue *q, 177 enum vb2_buffer_state state) 178 { 179 struct rga_ctx *ctx = vb2_get_drv_priv(q); 180 struct vb2_v4l2_buffer *vbuf; 181 182 for (;;) { 183 if (V4L2_TYPE_IS_OUTPUT(q->type)) 184 vbuf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx); 185 else 186 vbuf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx); 187 if (!vbuf) 188 break; 189 v4l2_m2m_buf_done(vbuf, state); 190 } 191 } 192 193 static int rga_buf_start_streaming(struct vb2_queue *q, unsigned int count) 194 { 195 struct rga_ctx *ctx = vb2_get_drv_priv(q); 196 struct rockchip_rga *rga = ctx->rga; 197 int ret; 198 199 ret = pm_runtime_resume_and_get(rga->dev); 200 if (ret < 0) { 201 rga_buf_return_buffers(q, VB2_BUF_STATE_QUEUED); 202 return ret; 203 } 204 205 if (V4L2_TYPE_IS_OUTPUT(q->type)) 206 ctx->osequence = 0; 207 else 208 ctx->csequence = 0; 209 210 return 0; 211 } 212 213 static void rga_buf_stop_streaming(struct vb2_queue *q) 214 { 215 struct rga_ctx *ctx = vb2_get_drv_priv(q); 216 struct rockchip_rga *rga = ctx->rga; 217 218 rga_buf_return_buffers(q, VB2_BUF_STATE_ERROR); 219 pm_runtime_put(rga->dev); 220 } 221 222 const struct vb2_ops rga_qops = { 223 .queue_setup = rga_queue_setup, 224 .buf_init = rga_buf_init, 225 .buf_prepare = rga_buf_prepare, 226 .buf_queue = rga_buf_queue, 227 .buf_cleanup = rga_buf_cleanup, 228 .start_streaming = rga_buf_start_streaming, 229 .stop_streaming = rga_buf_stop_streaming, 230 }; 231