xref: /linux/drivers/media/platform/qcom/iris/iris_buffer.c (revision 07fdad3a93756b872da7b53647715c48d0f4a2d0)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2022-2024 Qualcomm Innovation Center, Inc. All rights reserved.
4  */
5 
6 #include <media/v4l2-event.h>
7 #include <media/v4l2-mem2mem.h>
8 
9 #include "iris_buffer.h"
10 #include "iris_instance.h"
11 #include "iris_power.h"
12 #include "iris_vpu_buffer.h"
13 
14 #define PIXELS_4K 4096
15 #define MAX_WIDTH 4096
16 #define MAX_HEIGHT 2304
17 #define Y_STRIDE_ALIGN 128
18 #define UV_STRIDE_ALIGN 128
19 #define Y_SCANLINE_ALIGN 32
20 #define UV_SCANLINE_ALIGN 16
21 #define UV_SCANLINE_ALIGN_QC08C 32
22 #define META_STRIDE_ALIGNED 64
23 #define META_SCANLINE_ALIGNED 16
24 #define NUM_MBS_4K (DIV_ROUND_UP(MAX_WIDTH, 16) * DIV_ROUND_UP(MAX_HEIGHT, 16))
25 
26 /*
27  * NV12:
28  * YUV 4:2:0 image with a plane of 8 bit Y samples followed
29  * by an interleaved U/V plane containing 8 bit 2x2 subsampled
30  * colour difference samples.
31  *
32  * <-Y/UV_Stride (aligned to 128)->
33  * <------- Width ------->
34  * Y Y Y Y Y Y Y Y Y Y Y Y . . . .  ^           ^
35  * Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |           |
36  * Y Y Y Y Y Y Y Y Y Y Y Y . . . .  Height      |
37  * Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |          y_scanlines (aligned to 32)
38  * Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |           |
39  * Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |           |
40  * Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |           |
41  * Y Y Y Y Y Y Y Y Y Y Y Y . . . .  V           |
42  * . . . . . . . . . . . . . . . .              |
43  * . . . . . . . . . . . . . . . .              |
44  * . . . . . . . . . . . . . . . .              |
45  * . . . . . . . . . . . . . . . .              V
46  * U V U V U V U V U V U V . . . .  ^
47  * U V U V U V U V U V U V . . . .  |
48  * U V U V U V U V U V U V . . . .  |
49  * U V U V U V U V U V U V . . . .  uv_scanlines (aligned to 16)
50  * . . . . . . . . . . . . . . . .  |
51  * . . . . . . . . . . . . . . . .  V
52  * . . . . . . . . . . . . . . . .  --> Buffer size aligned to 4K
53  *
54  * y_stride : Width aligned to 128
55  * uv_stride : Width aligned to 128
56  * y_scanlines: Height aligned to 32
57  * uv_scanlines: Height/2 aligned to 16
58  * Total size = align((y_stride * y_scanlines
59  *          + uv_stride * uv_scanlines , 4096)
60  *
61  * Note: All the alignments are hardware requirements.
62  */
63 static u32 iris_yuv_buffer_size_nv12(struct iris_inst *inst)
64 {
65 	u32 y_plane, uv_plane, y_stride, uv_stride, y_scanlines, uv_scanlines;
66 	struct v4l2_format *f;
67 
68 	if (inst->domain == DECODER)
69 		f = inst->fmt_dst;
70 	else
71 		f = inst->fmt_src;
72 
73 	y_stride = ALIGN(f->fmt.pix_mp.width, Y_STRIDE_ALIGN);
74 	uv_stride = ALIGN(f->fmt.pix_mp.width, UV_STRIDE_ALIGN);
75 	y_scanlines = ALIGN(f->fmt.pix_mp.height, Y_SCANLINE_ALIGN);
76 	uv_scanlines = ALIGN((f->fmt.pix_mp.height + 1) >> 1, UV_SCANLINE_ALIGN);
77 	y_plane = y_stride * y_scanlines;
78 	uv_plane = uv_stride * uv_scanlines;
79 
80 	return ALIGN(y_plane + uv_plane, PIXELS_4K);
81 }
82 
83 /*
84  * QC08C:
85  * Compressed Macro-tile format for NV12.
86  * Contains 4 planes in the following order -
87  * (A) Y_Meta_Plane
88  * (B) Y_UBWC_Plane
89  * (C) UV_Meta_Plane
90  * (D) UV_UBWC_Plane
91  *
92  * Y_Meta_Plane consists of meta information to decode compressed
93  * tile data in Y_UBWC_Plane.
94  * Y_UBWC_Plane consists of Y data in compressed macro-tile format.
95  * UBWC decoder block will use the Y_Meta_Plane data together with
96  * Y_UBWC_Plane data to produce loss-less uncompressed 8 bit Y samples.
97  *
98  * UV_Meta_Plane consists of meta information to decode compressed
99  * tile data in UV_UBWC_Plane.
100  * UV_UBWC_Plane consists of UV data in compressed macro-tile format.
101  * UBWC decoder block will use UV_Meta_Plane data together with
102  * UV_UBWC_Plane data to produce loss-less uncompressed 8 bit 2x2
103  * subsampled color difference samples.
104  *
105  * Each tile in Y_UBWC_Plane/UV_UBWC_Plane is independently decodable
106  * and randomly accessible. There is no dependency between tiles.
107  *
108  * <----- y_meta_stride ----> (aligned to 64)
109  * <-------- Width ------>
110  * M M M M M M M M M M M M . .      ^           ^
111  * M M M M M M M M M M M M . .      |           |
112  * M M M M M M M M M M M M . .      Height      |
113  * M M M M M M M M M M M M . .      |         y_meta_scanlines  (aligned to 16)
114  * M M M M M M M M M M M M . .      |           |
115  * M M M M M M M M M M M M . .      |           |
116  * M M M M M M M M M M M M . .      |           |
117  * M M M M M M M M M M M M . .      V           |
118  * . . . . . . . . . . . . . .                  |
119  * . . . . . . . . . . . . . .                  |
120  * . . . . . . . . . . . . . .      -------> Buffer size aligned to 4k
121  * . . . . . . . . . . . . . .                  V
122  * <--Compressed tile y_stride---> (aligned to 128)
123  * <------- Width ------->
124  * Y* Y* Y* Y* Y* Y* Y* Y* . . . .  ^           ^
125  * Y* Y* Y* Y* Y* Y* Y* Y* . . . .  |           |
126  * Y* Y* Y* Y* Y* Y* Y* Y* . . . .  Height      |
127  * Y* Y* Y* Y* Y* Y* Y* Y* . . . .  |        Macro_tile y_scanlines (aligned to 32)
128  * Y* Y* Y* Y* Y* Y* Y* Y* . . . .  |           |
129  * Y* Y* Y* Y* Y* Y* Y* Y* . . . .  |           |
130  * Y* Y* Y* Y* Y* Y* Y* Y* . . . .  |           |
131  * Y* Y* Y* Y* Y* Y* Y* Y* . . . .  V           |
132  * . . . . . . . . . . . . . . . .              |
133  * . . . . . . . . . . . . . . . .              |
134  * . . . . . . . . . . . . . . . .  -------> Buffer size aligned to 4k
135  * . . . . . . . . . . . . . . . .              V
136  * <----- uv_meta_stride ---->  (aligned to 64)
137  * M M M M M M M M M M M M . .      ^
138  * M M M M M M M M M M M M . .      |
139  * M M M M M M M M M M M M . .      |
140  * M M M M M M M M M M M M . .      uv_meta_scanlines (aligned to 16)
141  * . . . . . . . . . . . . . .      |
142  * . . . . . . . . . . . . . .      V
143  * . . . . . . . . . . . . . .      -------> Buffer size aligned to 4k
144  * <--Compressed tile uv_stride---> (aligned to 128)
145  * U* V* U* V* U* V* U* V* . . . .  ^
146  * U* V* U* V* U* V* U* V* . . . .  |
147  * U* V* U* V* U* V* U* V* . . . .  |
148  * U* V* U* V* U* V* U* V* . . . .  uv_scanlines (aligned to 32)
149  * . . . . . . . . . . . . . . . .  |
150  * . . . . . . . . . . . . . . . .  V
151  * . . . . . . . . . . . . . . . .  -------> Buffer size aligned to 4k
152  *
153  * y_stride: width aligned to 128
154  * uv_stride: width aligned to 128
155  * y_scanlines: height aligned to 32
156  * uv_scanlines: height aligned to 32
157  * y_plane: buffer size aligned to 4096
158  * uv_plane: buffer size aligned to 4096
159  * y_meta_stride: width aligned to 64
160  * y_meta_scanlines: height aligned to 16
161  * y_meta_plane: buffer size aligned to 4096
162  * uv_meta_stride: width aligned to 64
163  * uv_meta_scanlines: height aligned to 16
164  * uv_meta_plane: buffer size aligned to 4096
165  *
166  * Total size = align( y_plane + uv_plane +
167  *           y_meta_plane + uv_meta_plane, 4096)
168  *
169  * Note: All the alignments are hardware requirements.
170  */
171 static u32 iris_yuv_buffer_size_qc08c(struct iris_inst *inst)
172 {
173 	u32 y_plane, uv_plane, y_stride, uv_stride;
174 	struct v4l2_format *f = inst->fmt_dst;
175 	u32 uv_meta_stride, uv_meta_plane;
176 	u32 y_meta_stride, y_meta_plane;
177 
178 	y_meta_stride = ALIGN(DIV_ROUND_UP(f->fmt.pix_mp.width, META_STRIDE_ALIGNED >> 1),
179 			      META_STRIDE_ALIGNED);
180 	y_meta_plane = y_meta_stride * ALIGN(DIV_ROUND_UP(f->fmt.pix_mp.height,
181 							  META_SCANLINE_ALIGNED >> 1),
182 					     META_SCANLINE_ALIGNED);
183 	y_meta_plane = ALIGN(y_meta_plane, PIXELS_4K);
184 
185 	y_stride = ALIGN(f->fmt.pix_mp.width, Y_STRIDE_ALIGN);
186 	y_plane = ALIGN(y_stride * ALIGN(f->fmt.pix_mp.height, Y_SCANLINE_ALIGN), PIXELS_4K);
187 
188 	uv_meta_stride = ALIGN(DIV_ROUND_UP(f->fmt.pix_mp.width / 2, META_STRIDE_ALIGNED >> 2),
189 			       META_STRIDE_ALIGNED);
190 	uv_meta_plane = uv_meta_stride * ALIGN(DIV_ROUND_UP(f->fmt.pix_mp.height / 2,
191 							    META_SCANLINE_ALIGNED >> 1),
192 					       META_SCANLINE_ALIGNED);
193 	uv_meta_plane = ALIGN(uv_meta_plane, PIXELS_4K);
194 
195 	uv_stride = ALIGN(f->fmt.pix_mp.width, UV_STRIDE_ALIGN);
196 	uv_plane = ALIGN(uv_stride * ALIGN(f->fmt.pix_mp.height / 2, UV_SCANLINE_ALIGN_QC08C),
197 			 PIXELS_4K);
198 
199 	return ALIGN(y_meta_plane + y_plane + uv_meta_plane + uv_plane, PIXELS_4K);
200 }
201 
202 static u32 iris_dec_bitstream_buffer_size(struct iris_inst *inst)
203 {
204 	struct platform_inst_caps *caps = inst->core->iris_platform_data->inst_caps;
205 	u32 base_res_mbs = NUM_MBS_4K;
206 	u32 frame_size, num_mbs;
207 	u32 div_factor = 2;
208 
209 	num_mbs = iris_get_mbpf(inst);
210 	if (num_mbs > NUM_MBS_4K) {
211 		div_factor = 4;
212 		base_res_mbs = caps->max_mbpf;
213 	} else {
214 		if (inst->codec == V4L2_PIX_FMT_VP9)
215 			div_factor = 1;
216 	}
217 
218 	/*
219 	 * frame_size = YUVsize / div_factor
220 	 * where YUVsize = resolution_in_MBs * MBs_in_pixel * 3 / 2
221 	 */
222 	frame_size = base_res_mbs * (16 * 16) * 3 / 2 / div_factor;
223 
224 	return ALIGN(frame_size, PIXELS_4K);
225 }
226 
227 static u32 iris_enc_bitstream_buffer_size(struct iris_inst *inst)
228 {
229 	u32 aligned_width, aligned_height, bitstream_size, yuv_size;
230 	int bitrate_mode, frame_rc;
231 	struct v4l2_format *f;
232 
233 	f = inst->fmt_dst;
234 
235 	bitrate_mode = inst->fw_caps[BITRATE_MODE].value;
236 	frame_rc = inst->fw_caps[FRAME_RC_ENABLE].value;
237 
238 	aligned_width = ALIGN(f->fmt.pix_mp.width, 32);
239 	aligned_height = ALIGN(f->fmt.pix_mp.height, 32);
240 	bitstream_size = aligned_width * aligned_height * 3;
241 	yuv_size = (aligned_width * aligned_height * 3) >> 1;
242 	if (aligned_width * aligned_height > (4096 * 2176))
243 		/* bitstream_size = 0.25 * yuv_size; */
244 		bitstream_size = (bitstream_size >> 3);
245 	else if (aligned_width * aligned_height > (1280 * 720))
246 		/* bitstream_size = 0.5 * yuv_size; */
247 		bitstream_size = (bitstream_size >> 2);
248 
249 	if ((!frame_rc || bitrate_mode == V4L2_MPEG_VIDEO_BITRATE_MODE_CQ) &&
250 	    bitstream_size < yuv_size)
251 		bitstream_size = (bitstream_size << 1);
252 
253 	return ALIGN(bitstream_size, 4096);
254 }
255 
256 int iris_get_buffer_size(struct iris_inst *inst,
257 			 enum iris_buffer_type buffer_type)
258 {
259 	if (inst->domain == DECODER) {
260 		switch (buffer_type) {
261 		case BUF_INPUT:
262 			return iris_dec_bitstream_buffer_size(inst);
263 		case BUF_OUTPUT:
264 			return iris_yuv_buffer_size_nv12(inst);
265 		case BUF_DPB:
266 			return iris_yuv_buffer_size_qc08c(inst);
267 		default:
268 			return 0;
269 		}
270 	} else {
271 		switch (buffer_type) {
272 		case BUF_INPUT:
273 			return iris_yuv_buffer_size_nv12(inst);
274 		case BUF_OUTPUT:
275 			return iris_enc_bitstream_buffer_size(inst);
276 		default:
277 			return 0;
278 		}
279 	}
280 }
281 
282 static void iris_fill_internal_buf_info(struct iris_inst *inst,
283 					enum iris_buffer_type buffer_type)
284 {
285 	struct iris_buffers *buffers = &inst->buffers[buffer_type];
286 
287 	buffers->size = inst->core->iris_platform_data->get_vpu_buffer_size(inst, buffer_type);
288 	buffers->min_count = iris_vpu_buf_count(inst, buffer_type);
289 }
290 
291 void iris_get_internal_buffers(struct iris_inst *inst, u32 plane)
292 {
293 	const struct iris_platform_data *platform_data = inst->core->iris_platform_data;
294 	const u32 *internal_buf_type;
295 	u32 internal_buffer_count, i;
296 
297 	if (inst->domain == DECODER) {
298 		if (V4L2_TYPE_IS_OUTPUT(plane)) {
299 			internal_buf_type = platform_data->dec_ip_int_buf_tbl;
300 			internal_buffer_count = platform_data->dec_ip_int_buf_tbl_size;
301 			for (i = 0; i < internal_buffer_count; i++)
302 				iris_fill_internal_buf_info(inst, internal_buf_type[i]);
303 		} else {
304 			internal_buf_type = platform_data->dec_op_int_buf_tbl;
305 			internal_buffer_count = platform_data->dec_op_int_buf_tbl_size;
306 			for (i = 0; i < internal_buffer_count; i++)
307 				iris_fill_internal_buf_info(inst, internal_buf_type[i]);
308 		}
309 	} else {
310 		if (V4L2_TYPE_IS_OUTPUT(plane)) {
311 			internal_buf_type = platform_data->enc_ip_int_buf_tbl;
312 			internal_buffer_count = platform_data->enc_ip_int_buf_tbl_size;
313 			for (i = 0; i < internal_buffer_count; i++)
314 				iris_fill_internal_buf_info(inst, internal_buf_type[i]);
315 		} else {
316 			internal_buf_type = platform_data->enc_op_int_buf_tbl;
317 			internal_buffer_count = platform_data->enc_op_int_buf_tbl_size;
318 			for (i = 0; i < internal_buffer_count; i++)
319 				iris_fill_internal_buf_info(inst, internal_buf_type[i]);
320 		}
321 	}
322 }
323 
324 static int iris_create_internal_buffer(struct iris_inst *inst,
325 				       enum iris_buffer_type buffer_type, u32 index)
326 {
327 	struct iris_buffers *buffers = &inst->buffers[buffer_type];
328 	struct iris_core *core = inst->core;
329 	struct iris_buffer *buffer;
330 
331 	if (!buffers->size)
332 		return 0;
333 
334 	buffer = kzalloc(sizeof(*buffer), GFP_KERNEL);
335 	if (!buffer)
336 		return -ENOMEM;
337 
338 	INIT_LIST_HEAD(&buffer->list);
339 	buffer->type = buffer_type;
340 	buffer->index = index;
341 	buffer->buffer_size = buffers->size;
342 	buffer->dma_attrs = DMA_ATTR_WRITE_COMBINE | DMA_ATTR_NO_KERNEL_MAPPING;
343 	list_add_tail(&buffer->list, &buffers->list);
344 
345 	buffer->kvaddr = dma_alloc_attrs(core->dev, buffer->buffer_size,
346 					 &buffer->device_addr, GFP_KERNEL, buffer->dma_attrs);
347 	if (!buffer->kvaddr)
348 		return -ENOMEM;
349 
350 	return 0;
351 }
352 
353 int iris_create_internal_buffers(struct iris_inst *inst, u32 plane)
354 {
355 	const struct iris_platform_data *platform_data = inst->core->iris_platform_data;
356 	u32 internal_buffer_count, i, j;
357 	struct iris_buffers *buffers;
358 	const u32 *internal_buf_type;
359 	int ret;
360 
361 	if (inst->domain == DECODER) {
362 		if (V4L2_TYPE_IS_OUTPUT(plane)) {
363 			internal_buf_type = platform_data->dec_ip_int_buf_tbl;
364 			internal_buffer_count = platform_data->dec_ip_int_buf_tbl_size;
365 		} else {
366 			internal_buf_type = platform_data->dec_op_int_buf_tbl;
367 			internal_buffer_count = platform_data->dec_op_int_buf_tbl_size;
368 		}
369 	} else {
370 		if (V4L2_TYPE_IS_OUTPUT(plane)) {
371 			internal_buf_type = platform_data->enc_ip_int_buf_tbl;
372 			internal_buffer_count = platform_data->enc_ip_int_buf_tbl_size;
373 		} else {
374 			internal_buf_type = platform_data->enc_op_int_buf_tbl;
375 			internal_buffer_count = platform_data->enc_op_int_buf_tbl_size;
376 		}
377 	}
378 
379 	for (i = 0; i < internal_buffer_count; i++) {
380 		buffers = &inst->buffers[internal_buf_type[i]];
381 		for (j = 0; j < buffers->min_count; j++) {
382 			ret = iris_create_internal_buffer(inst, internal_buf_type[i], j);
383 			if (ret)
384 				return ret;
385 		}
386 	}
387 
388 	return 0;
389 }
390 
391 int iris_queue_buffer(struct iris_inst *inst, struct iris_buffer *buf)
392 {
393 	const struct iris_hfi_command_ops *hfi_ops = inst->core->hfi_ops;
394 	int ret;
395 
396 	ret = hfi_ops->session_queue_buf(inst, buf);
397 	if (ret)
398 		return ret;
399 
400 	buf->attr &= ~BUF_ATTR_DEFERRED;
401 	buf->attr |= BUF_ATTR_QUEUED;
402 
403 	return 0;
404 }
405 
406 int iris_queue_internal_deferred_buffers(struct iris_inst *inst, enum iris_buffer_type buffer_type)
407 {
408 	struct iris_buffer *buffer, *next;
409 	struct iris_buffers *buffers;
410 	int ret = 0;
411 
412 	buffers = &inst->buffers[buffer_type];
413 	list_for_each_entry_safe(buffer, next, &buffers->list, list) {
414 		if (buffer->attr & BUF_ATTR_PENDING_RELEASE)
415 			continue;
416 		if (buffer->attr & BUF_ATTR_QUEUED)
417 			continue;
418 
419 		if (buffer->attr & BUF_ATTR_DEFERRED) {
420 			ret = iris_queue_buffer(inst, buffer);
421 			if (ret)
422 				return ret;
423 		}
424 	}
425 
426 	return ret;
427 }
428 
429 int iris_queue_internal_buffers(struct iris_inst *inst, u32 plane)
430 {
431 	const struct iris_platform_data *platform_data = inst->core->iris_platform_data;
432 	struct iris_buffer *buffer, *next;
433 	struct iris_buffers *buffers;
434 	const u32 *internal_buf_type;
435 	u32 internal_buffer_count, i;
436 	int ret;
437 
438 	if (inst->domain == DECODER) {
439 		if (V4L2_TYPE_IS_OUTPUT(plane)) {
440 			internal_buf_type = platform_data->dec_ip_int_buf_tbl;
441 			internal_buffer_count = platform_data->dec_ip_int_buf_tbl_size;
442 		} else {
443 			internal_buf_type = platform_data->dec_op_int_buf_tbl;
444 			internal_buffer_count = platform_data->dec_op_int_buf_tbl_size;
445 		}
446 	} else {
447 		if (V4L2_TYPE_IS_OUTPUT(plane)) {
448 			internal_buf_type = platform_data->enc_ip_int_buf_tbl;
449 			internal_buffer_count = platform_data->enc_ip_int_buf_tbl_size;
450 		} else {
451 			internal_buf_type = platform_data->enc_op_int_buf_tbl;
452 			internal_buffer_count = platform_data->enc_op_int_buf_tbl_size;
453 		}
454 	}
455 
456 	for (i = 0; i < internal_buffer_count; i++) {
457 		buffers = &inst->buffers[internal_buf_type[i]];
458 		list_for_each_entry_safe(buffer, next, &buffers->list, list) {
459 			if (buffer->attr & BUF_ATTR_PENDING_RELEASE)
460 				continue;
461 			if (buffer->attr & BUF_ATTR_QUEUED)
462 				continue;
463 			if (buffer->type == BUF_DPB && inst->state != IRIS_INST_STREAMING) {
464 				buffer->attr |= BUF_ATTR_DEFERRED;
465 				continue;
466 			}
467 			ret = iris_queue_buffer(inst, buffer);
468 			if (ret)
469 				return ret;
470 		}
471 	}
472 
473 	return 0;
474 }
475 
476 int iris_destroy_internal_buffer(struct iris_inst *inst, struct iris_buffer *buffer)
477 {
478 	struct iris_core *core = inst->core;
479 
480 	list_del(&buffer->list);
481 	dma_free_attrs(core->dev, buffer->buffer_size, buffer->kvaddr,
482 		       buffer->device_addr, buffer->dma_attrs);
483 	kfree(buffer);
484 
485 	return 0;
486 }
487 
488 static int iris_destroy_internal_buffers(struct iris_inst *inst, u32 plane, bool force)
489 {
490 	const struct iris_platform_data *platform_data = inst->core->iris_platform_data;
491 	struct iris_buffer *buf, *next;
492 	struct iris_buffers *buffers;
493 	const u32 *internal_buf_type;
494 	u32 i, len;
495 	int ret;
496 
497 	if (inst->domain == DECODER) {
498 		if (V4L2_TYPE_IS_OUTPUT(plane)) {
499 			internal_buf_type = platform_data->dec_ip_int_buf_tbl;
500 			len = platform_data->dec_ip_int_buf_tbl_size;
501 		} else {
502 			internal_buf_type = platform_data->dec_op_int_buf_tbl;
503 			len = platform_data->dec_op_int_buf_tbl_size;
504 		}
505 	} else {
506 		if (V4L2_TYPE_IS_OUTPUT(plane)) {
507 			internal_buf_type = platform_data->enc_ip_int_buf_tbl;
508 			len = platform_data->enc_ip_int_buf_tbl_size;
509 		} else {
510 			internal_buf_type = platform_data->enc_op_int_buf_tbl;
511 			len = platform_data->enc_op_int_buf_tbl_size;
512 		}
513 	}
514 
515 	for (i = 0; i < len; i++) {
516 		buffers = &inst->buffers[internal_buf_type[i]];
517 		list_for_each_entry_safe(buf, next, &buffers->list, list) {
518 			/*
519 			 * during stream on, skip destroying internal(DPB) buffer
520 			 * if firmware did not return it.
521 			 * during close, destroy all buffers irrespectively.
522 			 */
523 			if (!force && buf->attr & BUF_ATTR_QUEUED)
524 				continue;
525 
526 			ret = iris_destroy_internal_buffer(inst, buf);
527 			if (ret)
528 				return ret;
529 		}
530 	}
531 
532 	if (force) {
533 		if (inst->domain == DECODER)
534 			buffers = &inst->buffers[BUF_PERSIST];
535 		else
536 			buffers = &inst->buffers[BUF_ARP];
537 
538 		list_for_each_entry_safe(buf, next, &buffers->list, list) {
539 			ret = iris_destroy_internal_buffer(inst, buf);
540 			if (ret)
541 				return ret;
542 		}
543 	}
544 
545 	return 0;
546 }
547 
548 int iris_destroy_all_internal_buffers(struct iris_inst *inst, u32 plane)
549 {
550 	return iris_destroy_internal_buffers(inst, plane, true);
551 }
552 
553 int iris_destroy_dequeued_internal_buffers(struct iris_inst *inst, u32 plane)
554 {
555 	return iris_destroy_internal_buffers(inst, plane, false);
556 }
557 
558 static int iris_release_internal_buffers(struct iris_inst *inst,
559 					 enum iris_buffer_type buffer_type)
560 {
561 	const struct iris_hfi_command_ops *hfi_ops = inst->core->hfi_ops;
562 	struct iris_buffers *buffers = &inst->buffers[buffer_type];
563 	struct iris_buffer *buffer, *next;
564 	int ret;
565 
566 	list_for_each_entry_safe(buffer, next, &buffers->list, list) {
567 		if (buffer->attr & BUF_ATTR_PENDING_RELEASE)
568 			continue;
569 		if (!(buffer->attr & BUF_ATTR_QUEUED))
570 			continue;
571 		ret = hfi_ops->session_release_buf(inst, buffer);
572 		if (ret)
573 			return ret;
574 		buffer->attr |= BUF_ATTR_PENDING_RELEASE;
575 	}
576 
577 	return 0;
578 }
579 
580 static int iris_release_input_internal_buffers(struct iris_inst *inst)
581 {
582 	const struct iris_platform_data *platform_data = inst->core->iris_platform_data;
583 	const u32 *internal_buf_type;
584 	u32 internal_buffer_count, i;
585 	int ret;
586 
587 	if (inst->domain == DECODER) {
588 		internal_buf_type = platform_data->dec_ip_int_buf_tbl;
589 		internal_buffer_count = platform_data->dec_ip_int_buf_tbl_size;
590 	} else {
591 		internal_buf_type = platform_data->enc_ip_int_buf_tbl;
592 		internal_buffer_count = platform_data->enc_ip_int_buf_tbl_size;
593 	}
594 
595 	for (i = 0; i < internal_buffer_count; i++) {
596 		ret = iris_release_internal_buffers(inst, internal_buf_type[i]);
597 		if (ret)
598 			return ret;
599 	}
600 
601 	return 0;
602 }
603 
604 int iris_alloc_and_queue_persist_bufs(struct iris_inst *inst, enum iris_buffer_type buffer_type)
605 {
606 	struct iris_buffers *buffers = &inst->buffers[buffer_type];
607 	struct iris_buffer *buffer, *next;
608 	int ret;
609 	u32 i;
610 
611 	if (!list_empty(&buffers->list))
612 		return 0;
613 
614 	iris_fill_internal_buf_info(inst, buffer_type);
615 
616 	for (i = 0; i < buffers->min_count; i++) {
617 		ret = iris_create_internal_buffer(inst, buffer_type, i);
618 		if (ret)
619 			return ret;
620 	}
621 
622 	list_for_each_entry_safe(buffer, next, &buffers->list, list) {
623 		if (buffer->attr & BUF_ATTR_PENDING_RELEASE)
624 			continue;
625 		if (buffer->attr & BUF_ATTR_QUEUED)
626 			continue;
627 		ret = iris_queue_buffer(inst, buffer);
628 		if (ret)
629 			return ret;
630 	}
631 
632 	return 0;
633 }
634 
635 int iris_alloc_and_queue_input_int_bufs(struct iris_inst *inst)
636 {
637 	int ret;
638 
639 	iris_get_internal_buffers(inst, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
640 
641 	ret = iris_release_input_internal_buffers(inst);
642 	if (ret)
643 		return ret;
644 
645 	ret = iris_create_internal_buffers(inst, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
646 	if (ret)
647 		return ret;
648 
649 	return iris_queue_internal_buffers(inst, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
650 }
651 
652 int iris_queue_deferred_buffers(struct iris_inst *inst, enum iris_buffer_type buf_type)
653 {
654 	struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
655 	struct v4l2_m2m_buffer *buffer, *n;
656 	struct iris_buffer *buf;
657 	int ret;
658 
659 	iris_scale_power(inst);
660 
661 	if (buf_type == BUF_INPUT) {
662 		v4l2_m2m_for_each_src_buf_safe(m2m_ctx, buffer, n) {
663 			buf = to_iris_buffer(&buffer->vb);
664 			if (!(buf->attr & BUF_ATTR_DEFERRED))
665 				continue;
666 			ret = iris_queue_buffer(inst, buf);
667 			if (ret)
668 				return ret;
669 		}
670 	} else {
671 		v4l2_m2m_for_each_dst_buf_safe(m2m_ctx, buffer, n) {
672 			buf = to_iris_buffer(&buffer->vb);
673 			if (!(buf->attr & BUF_ATTR_DEFERRED))
674 				continue;
675 			ret = iris_queue_buffer(inst, buf);
676 			if (ret)
677 				return ret;
678 		}
679 	}
680 
681 	return 0;
682 }
683 
684 void iris_vb2_queue_error(struct iris_inst *inst)
685 {
686 	struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
687 	struct vb2_queue *q;
688 
689 	q = v4l2_m2m_get_src_vq(m2m_ctx);
690 	vb2_queue_error(q);
691 	q = v4l2_m2m_get_dst_vq(m2m_ctx);
692 	vb2_queue_error(q);
693 }
694 
695 static struct vb2_v4l2_buffer *
696 iris_helper_find_buf(struct iris_inst *inst, u32 type, u32 idx)
697 {
698 	struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
699 
700 	if (V4L2_TYPE_IS_OUTPUT(type))
701 		return v4l2_m2m_src_buf_remove_by_idx(m2m_ctx, idx);
702 	else
703 		return v4l2_m2m_dst_buf_remove_by_idx(m2m_ctx, idx);
704 }
705 
706 static void iris_get_ts_metadata(struct iris_inst *inst, u64 timestamp_ns,
707 				 struct vb2_v4l2_buffer *vbuf)
708 {
709 	u32 mask = V4L2_BUF_FLAG_TIMECODE | V4L2_BUF_FLAG_TSTAMP_SRC_MASK;
710 	u32 i;
711 
712 	for (i = 0; i < ARRAY_SIZE(inst->tss); ++i) {
713 		if (inst->tss[i].ts_ns != timestamp_ns)
714 			continue;
715 
716 		vbuf->flags &= ~mask;
717 		vbuf->flags |= inst->tss[i].flags;
718 		vbuf->timecode = inst->tss[i].tc;
719 		return;
720 	}
721 
722 	vbuf->flags &= ~mask;
723 	vbuf->flags |= inst->tss[inst->metadata_idx].flags;
724 	vbuf->timecode = inst->tss[inst->metadata_idx].tc;
725 }
726 
727 int iris_vb2_buffer_done(struct iris_inst *inst, struct iris_buffer *buf)
728 {
729 	struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
730 	struct vb2_v4l2_buffer *vbuf;
731 	struct vb2_buffer *vb2;
732 	u32 type, state;
733 
734 	switch (buf->type) {
735 	case BUF_INPUT:
736 		type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
737 		break;
738 	case BUF_OUTPUT:
739 		type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
740 		break;
741 	default:
742 		return 0; /* Internal DPB Buffers */
743 	}
744 
745 	vbuf = iris_helper_find_buf(inst, type, buf->index);
746 	if (!vbuf)
747 		return -EINVAL;
748 
749 	vb2 = &vbuf->vb2_buf;
750 
751 	vbuf->flags |= buf->flags;
752 
753 	if (buf->flags & V4L2_BUF_FLAG_ERROR) {
754 		state = VB2_BUF_STATE_ERROR;
755 		vb2_set_plane_payload(vb2, 0, 0);
756 		vb2->timestamp = 0;
757 		v4l2_m2m_buf_done(vbuf, state);
758 		return 0;
759 	}
760 
761 	if (V4L2_TYPE_IS_CAPTURE(type)) {
762 		vb2_set_plane_payload(vb2, 0, buf->data_size);
763 		vbuf->sequence = inst->sequence_cap++;
764 		iris_get_ts_metadata(inst, buf->timestamp, vbuf);
765 	} else {
766 		vbuf->sequence = inst->sequence_out++;
767 	}
768 
769 	if (vbuf->flags & V4L2_BUF_FLAG_LAST) {
770 		if (!v4l2_m2m_has_stopped(m2m_ctx)) {
771 			const struct v4l2_event ev = { .type = V4L2_EVENT_EOS };
772 
773 			v4l2_event_queue_fh(&inst->fh, &ev);
774 			v4l2_m2m_mark_stopped(m2m_ctx);
775 		}
776 		inst->last_buffer_dequeued = true;
777 	}
778 
779 	state = VB2_BUF_STATE_DONE;
780 	vb2->timestamp = buf->timestamp;
781 	v4l2_m2m_buf_done(vbuf, state);
782 
783 	return 0;
784 }
785