xref: /linux/drivers/media/platform/chips-media/wave5/wave5-helper.c (revision a1ff5a7d78a036d6c2178ee5acd6ba4946243800)
1 // SPDX-License-Identifier: (GPL-2.0 OR BSD-3-Clause)
2 /*
3  * Wave5 series multi-standard codec IP - decoder interface
4  *
5  * Copyright (C) 2021-2023 CHIPS&MEDIA INC
6  */
7 
8 #include "wave5-helper.h"
9 
state_to_str(enum vpu_instance_state state)10 const char *state_to_str(enum vpu_instance_state state)
11 {
12 	switch (state) {
13 	case VPU_INST_STATE_NONE:
14 		return "NONE";
15 	case VPU_INST_STATE_OPEN:
16 		return "OPEN";
17 	case VPU_INST_STATE_INIT_SEQ:
18 		return "INIT_SEQ";
19 	case VPU_INST_STATE_PIC_RUN:
20 		return "PIC_RUN";
21 	case VPU_INST_STATE_STOP:
22 		return "STOP";
23 	default:
24 		return "UNKNOWN";
25 	}
26 }
27 
wave5_cleanup_instance(struct vpu_instance * inst)28 void wave5_cleanup_instance(struct vpu_instance *inst)
29 {
30 	int i;
31 
32 	/*
33 	 * For Wave515 SRAM memory is allocated at
34 	 * wave5_vpu_dec_register_device() and freed at
35 	 * wave5_vpu_dec_unregister_device().
36 	 */
37 	if (list_is_singular(&inst->list) &&
38 	    inst->dev->product_code != WAVE515_CODE)
39 		wave5_vdi_free_sram(inst->dev);
40 
41 	for (i = 0; i < inst->fbc_buf_count; i++)
42 		wave5_vpu_dec_reset_framebuffer(inst, i);
43 
44 	wave5_vdi_free_dma_memory(inst->dev, &inst->bitstream_vbuf);
45 	v4l2_ctrl_handler_free(&inst->v4l2_ctrl_hdl);
46 	if (inst->v4l2_fh.vdev) {
47 		v4l2_fh_del(&inst->v4l2_fh);
48 		v4l2_fh_exit(&inst->v4l2_fh);
49 	}
50 	list_del_init(&inst->list);
51 	ida_free(&inst->dev->inst_ida, inst->id);
52 	kfree(inst->codec_info);
53 	kfree(inst);
54 }
55 
wave5_vpu_release_device(struct file * filp,int (* close_func)(struct vpu_instance * inst,u32 * fail_res),char * name)56 int wave5_vpu_release_device(struct file *filp,
57 			     int (*close_func)(struct vpu_instance *inst, u32 *fail_res),
58 			     char *name)
59 {
60 	struct vpu_instance *inst = wave5_to_vpu_inst(filp->private_data);
61 	struct vpu_device *dev = inst->dev;
62 	int ret = 0;
63 
64 	v4l2_m2m_ctx_release(inst->v4l2_fh.m2m_ctx);
65 	if (inst->state != VPU_INST_STATE_NONE) {
66 		u32 fail_res;
67 
68 		ret = close_func(inst, &fail_res);
69 		if (fail_res == WAVE5_SYSERR_VPU_STILL_RUNNING) {
70 			dev_err(inst->dev->dev, "%s close failed, device is still running\n",
71 				name);
72 			return -EBUSY;
73 		}
74 		if (ret && ret != -EIO) {
75 			dev_err(inst->dev->dev, "%s close, fail: %d\n", name, ret);
76 			return ret;
77 		}
78 	}
79 
80 	wave5_cleanup_instance(inst);
81 	if (dev->irq < 0) {
82 		ret = mutex_lock_interruptible(&dev->dev_lock);
83 		if (ret)
84 			return ret;
85 
86 		if (list_empty(&dev->instances)) {
87 			dev_dbg(dev->dev, "Disabling the hrtimer\n");
88 			hrtimer_cancel(&dev->hrtimer);
89 		}
90 
91 		mutex_unlock(&dev->dev_lock);
92 	}
93 
94 	return ret;
95 }
96 
wave5_vpu_queue_init(void * priv,struct vb2_queue * src_vq,struct vb2_queue * dst_vq,const struct vb2_ops * ops)97 int wave5_vpu_queue_init(void *priv, struct vb2_queue *src_vq, struct vb2_queue *dst_vq,
98 			 const struct vb2_ops *ops)
99 {
100 	struct vpu_instance *inst = priv;
101 	int ret;
102 
103 	src_vq->type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
104 	src_vq->io_modes = VB2_MMAP | VB2_DMABUF;
105 	src_vq->mem_ops = &vb2_dma_contig_memops;
106 	src_vq->ops = ops;
107 	src_vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
108 	src_vq->buf_struct_size = sizeof(struct vpu_src_buffer);
109 	src_vq->drv_priv = inst;
110 	src_vq->lock = &inst->dev->dev_lock;
111 	src_vq->dev = inst->dev->v4l2_dev.dev;
112 	ret = vb2_queue_init(src_vq);
113 	if (ret)
114 		return ret;
115 
116 	dst_vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
117 	dst_vq->io_modes = VB2_MMAP | VB2_DMABUF;
118 	dst_vq->mem_ops = &vb2_dma_contig_memops;
119 	dst_vq->ops = ops;
120 	dst_vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
121 	dst_vq->buf_struct_size = sizeof(struct vpu_src_buffer);
122 	dst_vq->drv_priv = inst;
123 	dst_vq->lock = &inst->dev->dev_lock;
124 	dst_vq->dev = inst->dev->v4l2_dev.dev;
125 	ret = vb2_queue_init(dst_vq);
126 	if (ret)
127 		return ret;
128 
129 	return 0;
130 }
131 
wave5_vpu_subscribe_event(struct v4l2_fh * fh,const struct v4l2_event_subscription * sub)132 int wave5_vpu_subscribe_event(struct v4l2_fh *fh, const struct v4l2_event_subscription *sub)
133 {
134 	struct vpu_instance *inst = wave5_to_vpu_inst(fh);
135 	bool is_decoder = inst->type == VPU_INST_TYPE_DEC;
136 
137 	dev_dbg(inst->dev->dev, "%s: [%s] type: %u id: %u | flags: %u\n", __func__,
138 		is_decoder ? "decoder" : "encoder", sub->type, sub->id, sub->flags);
139 
140 	switch (sub->type) {
141 	case V4L2_EVENT_EOS:
142 		return v4l2_event_subscribe(fh, sub, 0, NULL);
143 	case V4L2_EVENT_SOURCE_CHANGE:
144 		if (is_decoder)
145 			return v4l2_src_change_event_subscribe(fh, sub);
146 		return -EINVAL;
147 	case V4L2_EVENT_CTRL:
148 		return v4l2_ctrl_subscribe_event(fh, sub);
149 	default:
150 		return -EINVAL;
151 	}
152 }
153 
wave5_vpu_g_fmt_out(struct file * file,void * fh,struct v4l2_format * f)154 int wave5_vpu_g_fmt_out(struct file *file, void *fh, struct v4l2_format *f)
155 {
156 	struct vpu_instance *inst = wave5_to_vpu_inst(fh);
157 	int i;
158 
159 	f->fmt.pix_mp.width = inst->src_fmt.width;
160 	f->fmt.pix_mp.height = inst->src_fmt.height;
161 	f->fmt.pix_mp.pixelformat = inst->src_fmt.pixelformat;
162 	f->fmt.pix_mp.field = inst->src_fmt.field;
163 	f->fmt.pix_mp.flags = inst->src_fmt.flags;
164 	f->fmt.pix_mp.num_planes = inst->src_fmt.num_planes;
165 	for (i = 0; i < f->fmt.pix_mp.num_planes; i++) {
166 		f->fmt.pix_mp.plane_fmt[i].bytesperline = inst->src_fmt.plane_fmt[i].bytesperline;
167 		f->fmt.pix_mp.plane_fmt[i].sizeimage = inst->src_fmt.plane_fmt[i].sizeimage;
168 	}
169 
170 	f->fmt.pix_mp.colorspace = inst->colorspace;
171 	f->fmt.pix_mp.ycbcr_enc = inst->ycbcr_enc;
172 	f->fmt.pix_mp.quantization = inst->quantization;
173 	f->fmt.pix_mp.xfer_func = inst->xfer_func;
174 
175 	return 0;
176 }
177 
wave5_find_vpu_fmt(unsigned int v4l2_pix_fmt,const struct vpu_format fmt_list[MAX_FMTS])178 const struct vpu_format *wave5_find_vpu_fmt(unsigned int v4l2_pix_fmt,
179 					    const struct vpu_format fmt_list[MAX_FMTS])
180 {
181 	unsigned int index;
182 
183 	for (index = 0; index < MAX_FMTS; index++) {
184 		if (fmt_list[index].v4l2_pix_fmt == v4l2_pix_fmt)
185 			return &fmt_list[index];
186 	}
187 
188 	return NULL;
189 }
190 
wave5_find_vpu_fmt_by_idx(unsigned int idx,const struct vpu_format fmt_list[MAX_FMTS])191 const struct vpu_format *wave5_find_vpu_fmt_by_idx(unsigned int idx,
192 						   const struct vpu_format fmt_list[MAX_FMTS])
193 {
194 	if (idx >= MAX_FMTS)
195 		return NULL;
196 
197 	if (!fmt_list[idx].v4l2_pix_fmt)
198 		return NULL;
199 
200 	return &fmt_list[idx];
201 }
202 
wave5_to_vpu_std(unsigned int v4l2_pix_fmt,enum vpu_instance_type type)203 enum wave_std wave5_to_vpu_std(unsigned int v4l2_pix_fmt, enum vpu_instance_type type)
204 {
205 	switch (v4l2_pix_fmt) {
206 	case V4L2_PIX_FMT_H264:
207 		return type == VPU_INST_TYPE_DEC ? W_AVC_DEC : W_AVC_ENC;
208 	case V4L2_PIX_FMT_HEVC:
209 		return type == VPU_INST_TYPE_DEC ? W_HEVC_DEC : W_HEVC_ENC;
210 	default:
211 		return STD_UNKNOWN;
212 	}
213 }
214 
wave5_return_bufs(struct vb2_queue * q,u32 state)215 void wave5_return_bufs(struct vb2_queue *q, u32 state)
216 {
217 	struct vpu_instance *inst = vb2_get_drv_priv(q);
218 	struct v4l2_m2m_ctx *m2m_ctx = inst->v4l2_fh.m2m_ctx;
219 	struct v4l2_ctrl_handler v4l2_ctrl_hdl = inst->v4l2_ctrl_hdl;
220 	struct vb2_v4l2_buffer *vbuf;
221 
222 	for (;;) {
223 		if (q->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
224 			vbuf = v4l2_m2m_src_buf_remove(m2m_ctx);
225 		else
226 			vbuf = v4l2_m2m_dst_buf_remove(m2m_ctx);
227 		if (!vbuf)
228 			return;
229 		v4l2_ctrl_request_complete(vbuf->vb2_buf.req_obj.req, &v4l2_ctrl_hdl);
230 		v4l2_m2m_buf_done(vbuf, state);
231 	}
232 }
233