xref: /linux/drivers/media/platform/qcom/iris/iris_buffer.c (revision cfd71b14b0d6f62d97338a524c858c63cf699c3f)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2022-2024 Qualcomm Innovation Center, Inc. All rights reserved.
4  */
5 
6 #include <media/v4l2-event.h>
7 #include <media/v4l2-mem2mem.h>
8 
9 #include "iris_buffer.h"
10 #include "iris_instance.h"
11 #include "iris_power.h"
12 #include "iris_vpu_buffer.h"
13 
14 #define PIXELS_4K 4096
15 #define MAX_WIDTH 4096
16 #define MAX_HEIGHT 2304
17 #define Y_STRIDE_ALIGN 128
18 #define UV_STRIDE_ALIGN 128
19 #define Y_SCANLINE_ALIGN 32
20 #define UV_SCANLINE_ALIGN 16
21 #define UV_SCANLINE_ALIGN_QC08C 32
22 #define META_STRIDE_ALIGNED 64
23 #define META_SCANLINE_ALIGNED 16
24 #define NUM_MBS_4K (DIV_ROUND_UP(MAX_WIDTH, 16) * DIV_ROUND_UP(MAX_HEIGHT, 16))
25 
26 /*
27  * NV12:
28  * YUV 4:2:0 image with a plane of 8 bit Y samples followed
29  * by an interleaved U/V plane containing 8 bit 2x2 subsampled
30  * colour difference samples.
31  *
32  * <-Y/UV_Stride (aligned to 128)->
33  * <------- Width ------->
34  * Y Y Y Y Y Y Y Y Y Y Y Y . . . .  ^           ^
35  * Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |           |
36  * Y Y Y Y Y Y Y Y Y Y Y Y . . . .  Height      |
37  * Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |          y_scanlines (aligned to 32)
38  * Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |           |
39  * Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |           |
40  * Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |           |
41  * Y Y Y Y Y Y Y Y Y Y Y Y . . . .  V           |
42  * . . . . . . . . . . . . . . . .              |
43  * . . . . . . . . . . . . . . . .              |
44  * . . . . . . . . . . . . . . . .              |
45  * . . . . . . . . . . . . . . . .              V
46  * U V U V U V U V U V U V . . . .  ^
47  * U V U V U V U V U V U V . . . .  |
48  * U V U V U V U V U V U V . . . .  |
49  * U V U V U V U V U V U V . . . .  uv_scanlines (aligned to 16)
50  * . . . . . . . . . . . . . . . .  |
51  * . . . . . . . . . . . . . . . .  V
52  * . . . . . . . . . . . . . . . .  --> Buffer size aligned to 4K
53  *
54  * y_stride : Width aligned to 128
55  * uv_stride : Width aligned to 128
56  * y_scanlines: Height aligned to 32
57  * uv_scanlines: Height/2 aligned to 16
58  * Total size = align((y_stride * y_scanlines
59  *          + uv_stride * uv_scanlines , 4096)
60  *
61  * Note: All the alignments are hardware requirements.
62  */
63 static u32 iris_yuv_buffer_size_nv12(struct iris_inst *inst)
64 {
65 	u32 y_plane, uv_plane, y_stride, uv_stride, y_scanlines, uv_scanlines;
66 	struct v4l2_format *f;
67 
68 	if (inst->domain == DECODER)
69 		f = inst->fmt_dst;
70 	else
71 		f = inst->fmt_src;
72 
73 	y_stride = ALIGN(f->fmt.pix_mp.width, Y_STRIDE_ALIGN);
74 	uv_stride = ALIGN(f->fmt.pix_mp.width, UV_STRIDE_ALIGN);
75 	y_scanlines = ALIGN(f->fmt.pix_mp.height, Y_SCANLINE_ALIGN);
76 	uv_scanlines = ALIGN((f->fmt.pix_mp.height + 1) >> 1, UV_SCANLINE_ALIGN);
77 	y_plane = y_stride * y_scanlines;
78 	uv_plane = uv_stride * uv_scanlines;
79 
80 	return ALIGN(y_plane + uv_plane, PIXELS_4K);
81 }
82 
83 /*
84  * QC08C:
85  * Compressed Macro-tile format for NV12.
86  * Contains 4 planes in the following order -
87  * (A) Y_Meta_Plane
88  * (B) Y_UBWC_Plane
89  * (C) UV_Meta_Plane
90  * (D) UV_UBWC_Plane
91  *
92  * Y_Meta_Plane consists of meta information to decode compressed
93  * tile data in Y_UBWC_Plane.
94  * Y_UBWC_Plane consists of Y data in compressed macro-tile format.
95  * UBWC decoder block will use the Y_Meta_Plane data together with
96  * Y_UBWC_Plane data to produce loss-less uncompressed 8 bit Y samples.
97  *
98  * UV_Meta_Plane consists of meta information to decode compressed
99  * tile data in UV_UBWC_Plane.
100  * UV_UBWC_Plane consists of UV data in compressed macro-tile format.
101  * UBWC decoder block will use UV_Meta_Plane data together with
102  * UV_UBWC_Plane data to produce loss-less uncompressed 8 bit 2x2
103  * subsampled color difference samples.
104  *
105  * Each tile in Y_UBWC_Plane/UV_UBWC_Plane is independently decodable
106  * and randomly accessible. There is no dependency between tiles.
107  *
108  * <----- y_meta_stride ----> (aligned to 64)
109  * <-------- Width ------>
110  * M M M M M M M M M M M M . .      ^           ^
111  * M M M M M M M M M M M M . .      |           |
112  * M M M M M M M M M M M M . .      Height      |
113  * M M M M M M M M M M M M . .      |         y_meta_scanlines  (aligned to 16)
114  * M M M M M M M M M M M M . .      |           |
115  * M M M M M M M M M M M M . .      |           |
116  * M M M M M M M M M M M M . .      |           |
117  * M M M M M M M M M M M M . .      V           |
118  * . . . . . . . . . . . . . .                  |
119  * . . . . . . . . . . . . . .                  |
120  * . . . . . . . . . . . . . .      -------> Buffer size aligned to 4k
121  * . . . . . . . . . . . . . .                  V
122  * <--Compressed tile y_stride---> (aligned to 128)
123  * <------- Width ------->
124  * Y* Y* Y* Y* Y* Y* Y* Y* . . . .  ^           ^
125  * Y* Y* Y* Y* Y* Y* Y* Y* . . . .  |           |
126  * Y* Y* Y* Y* Y* Y* Y* Y* . . . .  Height      |
127  * Y* Y* Y* Y* Y* Y* Y* Y* . . . .  |        Macro_tile y_scanlines (aligned to 32)
128  * Y* Y* Y* Y* Y* Y* Y* Y* . . . .  |           |
129  * Y* Y* Y* Y* Y* Y* Y* Y* . . . .  |           |
130  * Y* Y* Y* Y* Y* Y* Y* Y* . . . .  |           |
131  * Y* Y* Y* Y* Y* Y* Y* Y* . . . .  V           |
132  * . . . . . . . . . . . . . . . .              |
133  * . . . . . . . . . . . . . . . .              |
134  * . . . . . . . . . . . . . . . .  -------> Buffer size aligned to 4k
135  * . . . . . . . . . . . . . . . .              V
136  * <----- uv_meta_stride ---->  (aligned to 64)
137  * M M M M M M M M M M M M . .      ^
138  * M M M M M M M M M M M M . .      |
139  * M M M M M M M M M M M M . .      |
140  * M M M M M M M M M M M M . .      uv_meta_scanlines (aligned to 16)
141  * . . . . . . . . . . . . . .      |
142  * . . . . . . . . . . . . . .      V
143  * . . . . . . . . . . . . . .      -------> Buffer size aligned to 4k
144  * <--Compressed tile uv_stride---> (aligned to 128)
145  * U* V* U* V* U* V* U* V* . . . .  ^
146  * U* V* U* V* U* V* U* V* . . . .  |
147  * U* V* U* V* U* V* U* V* . . . .  |
148  * U* V* U* V* U* V* U* V* . . . .  uv_scanlines (aligned to 32)
149  * . . . . . . . . . . . . . . . .  |
150  * . . . . . . . . . . . . . . . .  V
151  * . . . . . . . . . . . . . . . .  -------> Buffer size aligned to 4k
152  *
153  * y_stride: width aligned to 128
154  * uv_stride: width aligned to 128
155  * y_scanlines: height aligned to 32
156  * uv_scanlines: height aligned to 32
157  * y_plane: buffer size aligned to 4096
158  * uv_plane: buffer size aligned to 4096
159  * y_meta_stride: width aligned to 64
160  * y_meta_scanlines: height aligned to 16
161  * y_meta_plane: buffer size aligned to 4096
162  * uv_meta_stride: width aligned to 64
163  * uv_meta_scanlines: height aligned to 16
164  * uv_meta_plane: buffer size aligned to 4096
165  *
166  * Total size = align( y_plane + uv_plane +
167  *           y_meta_plane + uv_meta_plane, 4096)
168  *
169  * Note: All the alignments are hardware requirements.
170  */
171 static u32 iris_yuv_buffer_size_qc08c(struct iris_inst *inst)
172 {
173 	u32 y_plane, uv_plane, y_stride, uv_stride;
174 	struct v4l2_format *f = inst->fmt_dst;
175 	u32 uv_meta_stride, uv_meta_plane;
176 	u32 y_meta_stride, y_meta_plane;
177 
178 	y_meta_stride = ALIGN(DIV_ROUND_UP(f->fmt.pix_mp.width, META_STRIDE_ALIGNED >> 1),
179 			      META_STRIDE_ALIGNED);
180 	y_meta_plane = y_meta_stride * ALIGN(DIV_ROUND_UP(f->fmt.pix_mp.height,
181 							  META_SCANLINE_ALIGNED >> 1),
182 					     META_SCANLINE_ALIGNED);
183 	y_meta_plane = ALIGN(y_meta_plane, PIXELS_4K);
184 
185 	y_stride = ALIGN(f->fmt.pix_mp.width, Y_STRIDE_ALIGN);
186 	y_plane = ALIGN(y_stride * ALIGN(f->fmt.pix_mp.height, Y_SCANLINE_ALIGN), PIXELS_4K);
187 
188 	uv_meta_stride = ALIGN(DIV_ROUND_UP(f->fmt.pix_mp.width / 2, META_STRIDE_ALIGNED >> 2),
189 			       META_STRIDE_ALIGNED);
190 	uv_meta_plane = uv_meta_stride * ALIGN(DIV_ROUND_UP(f->fmt.pix_mp.height / 2,
191 							    META_SCANLINE_ALIGNED >> 1),
192 					       META_SCANLINE_ALIGNED);
193 	uv_meta_plane = ALIGN(uv_meta_plane, PIXELS_4K);
194 
195 	uv_stride = ALIGN(f->fmt.pix_mp.width, UV_STRIDE_ALIGN);
196 	uv_plane = ALIGN(uv_stride * ALIGN(f->fmt.pix_mp.height / 2, UV_SCANLINE_ALIGN_QC08C),
197 			 PIXELS_4K);
198 
199 	return ALIGN(y_meta_plane + y_plane + uv_meta_plane + uv_plane, PIXELS_4K);
200 }
201 
202 static u32 iris_dec_bitstream_buffer_size(struct iris_inst *inst)
203 {
204 	struct platform_inst_caps *caps = inst->core->iris_platform_data->inst_caps;
205 	u32 base_res_mbs = NUM_MBS_4K;
206 	u32 frame_size, num_mbs;
207 	u32 div_factor = 2;
208 
209 	num_mbs = iris_get_mbpf(inst);
210 	if (num_mbs > NUM_MBS_4K) {
211 		div_factor = 4;
212 		base_res_mbs = caps->max_mbpf;
213 	} else {
214 		if (inst->codec == V4L2_PIX_FMT_VP9)
215 			div_factor = 1;
216 	}
217 
218 	/*
219 	 * frame_size = YUVsize / div_factor
220 	 * where YUVsize = resolution_in_MBs * MBs_in_pixel * 3 / 2
221 	 */
222 	frame_size = base_res_mbs * (16 * 16) * 3 / 2 / div_factor;
223 
224 	return ALIGN(frame_size, PIXELS_4K);
225 }
226 
227 static u32 iris_enc_bitstream_buffer_size(struct iris_inst *inst)
228 {
229 	u32 aligned_width, aligned_height, bitstream_size, yuv_size;
230 	int bitrate_mode, frame_rc;
231 	struct v4l2_format *f;
232 
233 	f = inst->fmt_dst;
234 
235 	bitrate_mode = inst->fw_caps[BITRATE_MODE].value;
236 	frame_rc = inst->fw_caps[FRAME_RC_ENABLE].value;
237 
238 	aligned_width = ALIGN(f->fmt.pix_mp.width, 32);
239 	aligned_height = ALIGN(f->fmt.pix_mp.height, 32);
240 	bitstream_size = aligned_width * aligned_height * 3;
241 	yuv_size = (aligned_width * aligned_height * 3) >> 1;
242 	if (aligned_width * aligned_height > (4096 * 2176))
243 		/* bitstream_size = 0.25 * yuv_size; */
244 		bitstream_size = (bitstream_size >> 3);
245 	else if (aligned_width * aligned_height > (1280 * 720))
246 		/* bitstream_size = 0.5 * yuv_size; */
247 		bitstream_size = (bitstream_size >> 2);
248 
249 	if ((!frame_rc || bitrate_mode == V4L2_MPEG_VIDEO_BITRATE_MODE_CQ) &&
250 	    bitstream_size < yuv_size)
251 		bitstream_size = (bitstream_size << 1);
252 
253 	return ALIGN(bitstream_size, 4096);
254 }
255 
256 int iris_get_buffer_size(struct iris_inst *inst,
257 			 enum iris_buffer_type buffer_type)
258 {
259 	if (inst->domain == DECODER) {
260 		switch (buffer_type) {
261 		case BUF_INPUT:
262 			return iris_dec_bitstream_buffer_size(inst);
263 		case BUF_OUTPUT:
264 			if (inst->fmt_dst->fmt.pix_mp.pixelformat == V4L2_PIX_FMT_QC08C)
265 				return iris_yuv_buffer_size_qc08c(inst);
266 			else
267 				return iris_yuv_buffer_size_nv12(inst);
268 		case BUF_DPB:
269 			return iris_yuv_buffer_size_qc08c(inst);
270 		default:
271 			return 0;
272 		}
273 	} else {
274 		switch (buffer_type) {
275 		case BUF_INPUT:
276 			return iris_yuv_buffer_size_nv12(inst);
277 		case BUF_OUTPUT:
278 			return iris_enc_bitstream_buffer_size(inst);
279 		default:
280 			return 0;
281 		}
282 	}
283 }
284 
285 static void iris_fill_internal_buf_info(struct iris_inst *inst,
286 					enum iris_buffer_type buffer_type)
287 {
288 	struct iris_buffers *buffers = &inst->buffers[buffer_type];
289 
290 	buffers->size = inst->core->iris_platform_data->get_vpu_buffer_size(inst, buffer_type);
291 	buffers->min_count = iris_vpu_buf_count(inst, buffer_type);
292 }
293 
294 void iris_get_internal_buffers(struct iris_inst *inst, u32 plane)
295 {
296 	const struct iris_platform_data *platform_data = inst->core->iris_platform_data;
297 	const u32 *internal_buf_type;
298 	u32 internal_buffer_count, i;
299 
300 	if (inst->domain == DECODER) {
301 		if (V4L2_TYPE_IS_OUTPUT(plane)) {
302 			internal_buf_type = platform_data->dec_ip_int_buf_tbl;
303 			internal_buffer_count = platform_data->dec_ip_int_buf_tbl_size;
304 			for (i = 0; i < internal_buffer_count; i++)
305 				iris_fill_internal_buf_info(inst, internal_buf_type[i]);
306 		} else {
307 			internal_buf_type = platform_data->dec_op_int_buf_tbl;
308 			internal_buffer_count = platform_data->dec_op_int_buf_tbl_size;
309 			for (i = 0; i < internal_buffer_count; i++)
310 				iris_fill_internal_buf_info(inst, internal_buf_type[i]);
311 		}
312 	} else {
313 		if (V4L2_TYPE_IS_OUTPUT(plane)) {
314 			internal_buf_type = platform_data->enc_ip_int_buf_tbl;
315 			internal_buffer_count = platform_data->enc_ip_int_buf_tbl_size;
316 			for (i = 0; i < internal_buffer_count; i++)
317 				iris_fill_internal_buf_info(inst, internal_buf_type[i]);
318 		} else {
319 			internal_buf_type = platform_data->enc_op_int_buf_tbl;
320 			internal_buffer_count = platform_data->enc_op_int_buf_tbl_size;
321 			for (i = 0; i < internal_buffer_count; i++)
322 				iris_fill_internal_buf_info(inst, internal_buf_type[i]);
323 		}
324 	}
325 }
326 
327 static int iris_create_internal_buffer(struct iris_inst *inst,
328 				       enum iris_buffer_type buffer_type, u32 index)
329 {
330 	struct iris_buffers *buffers = &inst->buffers[buffer_type];
331 	struct iris_core *core = inst->core;
332 	struct iris_buffer *buffer;
333 
334 	if (!buffers->size)
335 		return 0;
336 
337 	buffer = kzalloc(sizeof(*buffer), GFP_KERNEL);
338 	if (!buffer)
339 		return -ENOMEM;
340 
341 	INIT_LIST_HEAD(&buffer->list);
342 	buffer->type = buffer_type;
343 	buffer->index = index;
344 	buffer->buffer_size = buffers->size;
345 	buffer->dma_attrs = DMA_ATTR_WRITE_COMBINE | DMA_ATTR_NO_KERNEL_MAPPING;
346 	list_add_tail(&buffer->list, &buffers->list);
347 
348 	buffer->kvaddr = dma_alloc_attrs(core->dev, buffer->buffer_size,
349 					 &buffer->device_addr, GFP_KERNEL, buffer->dma_attrs);
350 	if (!buffer->kvaddr)
351 		return -ENOMEM;
352 
353 	return 0;
354 }
355 
356 int iris_create_internal_buffers(struct iris_inst *inst, u32 plane)
357 {
358 	const struct iris_platform_data *platform_data = inst->core->iris_platform_data;
359 	u32 internal_buffer_count, i, j;
360 	struct iris_buffers *buffers;
361 	const u32 *internal_buf_type;
362 	int ret;
363 
364 	if (inst->domain == DECODER) {
365 		if (V4L2_TYPE_IS_OUTPUT(plane)) {
366 			internal_buf_type = platform_data->dec_ip_int_buf_tbl;
367 			internal_buffer_count = platform_data->dec_ip_int_buf_tbl_size;
368 		} else {
369 			internal_buf_type = platform_data->dec_op_int_buf_tbl;
370 			internal_buffer_count = platform_data->dec_op_int_buf_tbl_size;
371 		}
372 	} else {
373 		if (V4L2_TYPE_IS_OUTPUT(plane)) {
374 			internal_buf_type = platform_data->enc_ip_int_buf_tbl;
375 			internal_buffer_count = platform_data->enc_ip_int_buf_tbl_size;
376 		} else {
377 			internal_buf_type = platform_data->enc_op_int_buf_tbl;
378 			internal_buffer_count = platform_data->enc_op_int_buf_tbl_size;
379 		}
380 	}
381 
382 	for (i = 0; i < internal_buffer_count; i++) {
383 		buffers = &inst->buffers[internal_buf_type[i]];
384 		for (j = 0; j < buffers->min_count; j++) {
385 			ret = iris_create_internal_buffer(inst, internal_buf_type[i], j);
386 			if (ret)
387 				return ret;
388 		}
389 	}
390 
391 	return 0;
392 }
393 
394 int iris_queue_buffer(struct iris_inst *inst, struct iris_buffer *buf)
395 {
396 	const struct iris_hfi_command_ops *hfi_ops = inst->core->hfi_ops;
397 	int ret;
398 
399 	ret = hfi_ops->session_queue_buf(inst, buf);
400 	if (ret)
401 		return ret;
402 
403 	buf->attr &= ~BUF_ATTR_DEFERRED;
404 	buf->attr |= BUF_ATTR_QUEUED;
405 
406 	return 0;
407 }
408 
409 int iris_queue_internal_deferred_buffers(struct iris_inst *inst, enum iris_buffer_type buffer_type)
410 {
411 	struct iris_buffer *buffer, *next;
412 	struct iris_buffers *buffers;
413 	int ret = 0;
414 
415 	buffers = &inst->buffers[buffer_type];
416 	list_for_each_entry_safe(buffer, next, &buffers->list, list) {
417 		if (buffer->attr & BUF_ATTR_PENDING_RELEASE)
418 			continue;
419 		if (buffer->attr & BUF_ATTR_QUEUED)
420 			continue;
421 
422 		if (buffer->attr & BUF_ATTR_DEFERRED) {
423 			ret = iris_queue_buffer(inst, buffer);
424 			if (ret)
425 				return ret;
426 		}
427 	}
428 
429 	return ret;
430 }
431 
432 int iris_queue_internal_buffers(struct iris_inst *inst, u32 plane)
433 {
434 	const struct iris_platform_data *platform_data = inst->core->iris_platform_data;
435 	struct iris_buffer *buffer, *next;
436 	struct iris_buffers *buffers;
437 	const u32 *internal_buf_type;
438 	u32 internal_buffer_count, i;
439 	int ret;
440 
441 	if (inst->domain == DECODER) {
442 		if (V4L2_TYPE_IS_OUTPUT(plane)) {
443 			internal_buf_type = platform_data->dec_ip_int_buf_tbl;
444 			internal_buffer_count = platform_data->dec_ip_int_buf_tbl_size;
445 		} else {
446 			internal_buf_type = platform_data->dec_op_int_buf_tbl;
447 			internal_buffer_count = platform_data->dec_op_int_buf_tbl_size;
448 		}
449 	} else {
450 		if (V4L2_TYPE_IS_OUTPUT(plane)) {
451 			internal_buf_type = platform_data->enc_ip_int_buf_tbl;
452 			internal_buffer_count = platform_data->enc_ip_int_buf_tbl_size;
453 		} else {
454 			internal_buf_type = platform_data->enc_op_int_buf_tbl;
455 			internal_buffer_count = platform_data->enc_op_int_buf_tbl_size;
456 		}
457 	}
458 
459 	for (i = 0; i < internal_buffer_count; i++) {
460 		buffers = &inst->buffers[internal_buf_type[i]];
461 		list_for_each_entry_safe(buffer, next, &buffers->list, list) {
462 			if (buffer->attr & BUF_ATTR_PENDING_RELEASE)
463 				continue;
464 			if (buffer->attr & BUF_ATTR_QUEUED)
465 				continue;
466 			if (buffer->type == BUF_DPB && inst->state != IRIS_INST_STREAMING) {
467 				buffer->attr |= BUF_ATTR_DEFERRED;
468 				continue;
469 			}
470 			ret = iris_queue_buffer(inst, buffer);
471 			if (ret)
472 				return ret;
473 		}
474 	}
475 
476 	return 0;
477 }
478 
479 int iris_destroy_internal_buffer(struct iris_inst *inst, struct iris_buffer *buffer)
480 {
481 	struct iris_core *core = inst->core;
482 
483 	list_del(&buffer->list);
484 	dma_free_attrs(core->dev, buffer->buffer_size, buffer->kvaddr,
485 		       buffer->device_addr, buffer->dma_attrs);
486 	kfree(buffer);
487 
488 	return 0;
489 }
490 
491 static int iris_destroy_internal_buffers(struct iris_inst *inst, u32 plane, bool force)
492 {
493 	const struct iris_platform_data *platform_data = inst->core->iris_platform_data;
494 	struct iris_buffer *buf, *next;
495 	struct iris_buffers *buffers;
496 	const u32 *internal_buf_type;
497 	u32 i, len;
498 	int ret;
499 
500 	if (inst->domain == DECODER) {
501 		if (V4L2_TYPE_IS_OUTPUT(plane)) {
502 			internal_buf_type = platform_data->dec_ip_int_buf_tbl;
503 			len = platform_data->dec_ip_int_buf_tbl_size;
504 		} else {
505 			internal_buf_type = platform_data->dec_op_int_buf_tbl;
506 			len = platform_data->dec_op_int_buf_tbl_size;
507 		}
508 	} else {
509 		if (V4L2_TYPE_IS_OUTPUT(plane)) {
510 			internal_buf_type = platform_data->enc_ip_int_buf_tbl;
511 			len = platform_data->enc_ip_int_buf_tbl_size;
512 		} else {
513 			internal_buf_type = platform_data->enc_op_int_buf_tbl;
514 			len = platform_data->enc_op_int_buf_tbl_size;
515 		}
516 	}
517 
518 	for (i = 0; i < len; i++) {
519 		buffers = &inst->buffers[internal_buf_type[i]];
520 		list_for_each_entry_safe(buf, next, &buffers->list, list) {
521 			/*
522 			 * during stream on, skip destroying internal(DPB) buffer
523 			 * if firmware did not return it.
524 			 * during close, destroy all buffers irrespectively.
525 			 */
526 			if (!force && buf->attr & BUF_ATTR_QUEUED)
527 				continue;
528 
529 			ret = iris_destroy_internal_buffer(inst, buf);
530 			if (ret)
531 				return ret;
532 		}
533 	}
534 
535 	if (force) {
536 		if (inst->domain == DECODER)
537 			buffers = &inst->buffers[BUF_PERSIST];
538 		else
539 			buffers = &inst->buffers[BUF_ARP];
540 
541 		list_for_each_entry_safe(buf, next, &buffers->list, list) {
542 			ret = iris_destroy_internal_buffer(inst, buf);
543 			if (ret)
544 				return ret;
545 		}
546 	}
547 
548 	return 0;
549 }
550 
551 int iris_destroy_all_internal_buffers(struct iris_inst *inst, u32 plane)
552 {
553 	return iris_destroy_internal_buffers(inst, plane, true);
554 }
555 
556 int iris_destroy_dequeued_internal_buffers(struct iris_inst *inst, u32 plane)
557 {
558 	return iris_destroy_internal_buffers(inst, plane, false);
559 }
560 
561 static int iris_release_internal_buffers(struct iris_inst *inst,
562 					 enum iris_buffer_type buffer_type)
563 {
564 	const struct iris_hfi_command_ops *hfi_ops = inst->core->hfi_ops;
565 	struct iris_buffers *buffers = &inst->buffers[buffer_type];
566 	struct iris_buffer *buffer, *next;
567 	int ret;
568 
569 	list_for_each_entry_safe(buffer, next, &buffers->list, list) {
570 		if (buffer->attr & BUF_ATTR_PENDING_RELEASE)
571 			continue;
572 		if (!(buffer->attr & BUF_ATTR_QUEUED))
573 			continue;
574 		ret = hfi_ops->session_release_buf(inst, buffer);
575 		if (ret)
576 			return ret;
577 		buffer->attr |= BUF_ATTR_PENDING_RELEASE;
578 	}
579 
580 	return 0;
581 }
582 
583 static int iris_release_input_internal_buffers(struct iris_inst *inst)
584 {
585 	const struct iris_platform_data *platform_data = inst->core->iris_platform_data;
586 	const u32 *internal_buf_type;
587 	u32 internal_buffer_count, i;
588 	int ret;
589 
590 	if (inst->domain == DECODER) {
591 		internal_buf_type = platform_data->dec_ip_int_buf_tbl;
592 		internal_buffer_count = platform_data->dec_ip_int_buf_tbl_size;
593 	} else {
594 		internal_buf_type = platform_data->enc_ip_int_buf_tbl;
595 		internal_buffer_count = platform_data->enc_ip_int_buf_tbl_size;
596 	}
597 
598 	for (i = 0; i < internal_buffer_count; i++) {
599 		ret = iris_release_internal_buffers(inst, internal_buf_type[i]);
600 		if (ret)
601 			return ret;
602 	}
603 
604 	return 0;
605 }
606 
607 int iris_alloc_and_queue_persist_bufs(struct iris_inst *inst, enum iris_buffer_type buffer_type)
608 {
609 	struct iris_buffers *buffers = &inst->buffers[buffer_type];
610 	struct iris_buffer *buffer, *next;
611 	int ret;
612 	u32 i;
613 
614 	if (!list_empty(&buffers->list))
615 		return 0;
616 
617 	iris_fill_internal_buf_info(inst, buffer_type);
618 
619 	for (i = 0; i < buffers->min_count; i++) {
620 		ret = iris_create_internal_buffer(inst, buffer_type, i);
621 		if (ret)
622 			return ret;
623 	}
624 
625 	list_for_each_entry_safe(buffer, next, &buffers->list, list) {
626 		if (buffer->attr & BUF_ATTR_PENDING_RELEASE)
627 			continue;
628 		if (buffer->attr & BUF_ATTR_QUEUED)
629 			continue;
630 		ret = iris_queue_buffer(inst, buffer);
631 		if (ret)
632 			return ret;
633 	}
634 
635 	return 0;
636 }
637 
638 int iris_alloc_and_queue_input_int_bufs(struct iris_inst *inst)
639 {
640 	int ret;
641 
642 	iris_get_internal_buffers(inst, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
643 
644 	ret = iris_release_input_internal_buffers(inst);
645 	if (ret)
646 		return ret;
647 
648 	ret = iris_create_internal_buffers(inst, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
649 	if (ret)
650 		return ret;
651 
652 	return iris_queue_internal_buffers(inst, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
653 }
654 
655 int iris_queue_deferred_buffers(struct iris_inst *inst, enum iris_buffer_type buf_type)
656 {
657 	struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
658 	struct v4l2_m2m_buffer *buffer, *n;
659 	struct iris_buffer *buf;
660 	int ret;
661 
662 	iris_scale_power(inst);
663 
664 	if (buf_type == BUF_INPUT) {
665 		v4l2_m2m_for_each_src_buf_safe(m2m_ctx, buffer, n) {
666 			buf = to_iris_buffer(&buffer->vb);
667 			if (!(buf->attr & BUF_ATTR_DEFERRED))
668 				continue;
669 			ret = iris_queue_buffer(inst, buf);
670 			if (ret)
671 				return ret;
672 		}
673 	} else {
674 		v4l2_m2m_for_each_dst_buf_safe(m2m_ctx, buffer, n) {
675 			buf = to_iris_buffer(&buffer->vb);
676 			if (!(buf->attr & BUF_ATTR_DEFERRED))
677 				continue;
678 			ret = iris_queue_buffer(inst, buf);
679 			if (ret)
680 				return ret;
681 		}
682 	}
683 
684 	return 0;
685 }
686 
687 void iris_vb2_queue_error(struct iris_inst *inst)
688 {
689 	struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
690 	struct vb2_queue *q;
691 
692 	q = v4l2_m2m_get_src_vq(m2m_ctx);
693 	vb2_queue_error(q);
694 	q = v4l2_m2m_get_dst_vq(m2m_ctx);
695 	vb2_queue_error(q);
696 }
697 
698 static struct vb2_v4l2_buffer *
699 iris_helper_find_buf(struct iris_inst *inst, u32 type, u32 idx)
700 {
701 	struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
702 
703 	if (V4L2_TYPE_IS_OUTPUT(type))
704 		return v4l2_m2m_src_buf_remove_by_idx(m2m_ctx, idx);
705 	else
706 		return v4l2_m2m_dst_buf_remove_by_idx(m2m_ctx, idx);
707 }
708 
709 static void iris_get_ts_metadata(struct iris_inst *inst, u64 timestamp_ns,
710 				 struct vb2_v4l2_buffer *vbuf)
711 {
712 	u32 mask = V4L2_BUF_FLAG_TIMECODE | V4L2_BUF_FLAG_TSTAMP_SRC_MASK;
713 	u32 i;
714 
715 	for (i = 0; i < ARRAY_SIZE(inst->tss); ++i) {
716 		if (inst->tss[i].ts_ns != timestamp_ns)
717 			continue;
718 
719 		vbuf->flags &= ~mask;
720 		vbuf->flags |= inst->tss[i].flags;
721 		vbuf->timecode = inst->tss[i].tc;
722 		return;
723 	}
724 
725 	vbuf->flags &= ~mask;
726 	vbuf->flags |= inst->tss[inst->metadata_idx].flags;
727 	vbuf->timecode = inst->tss[inst->metadata_idx].tc;
728 }
729 
730 int iris_vb2_buffer_done(struct iris_inst *inst, struct iris_buffer *buf)
731 {
732 	struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
733 	struct vb2_v4l2_buffer *vbuf;
734 	struct vb2_buffer *vb2;
735 	u32 type, state;
736 
737 	switch (buf->type) {
738 	case BUF_INPUT:
739 		type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
740 		break;
741 	case BUF_OUTPUT:
742 		type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
743 		break;
744 	default:
745 		return 0; /* Internal DPB Buffers */
746 	}
747 
748 	vbuf = iris_helper_find_buf(inst, type, buf->index);
749 	if (!vbuf)
750 		return -EINVAL;
751 
752 	vb2 = &vbuf->vb2_buf;
753 
754 	vbuf->flags |= buf->flags;
755 
756 	if (buf->flags & V4L2_BUF_FLAG_ERROR) {
757 		state = VB2_BUF_STATE_ERROR;
758 		vb2_set_plane_payload(vb2, 0, 0);
759 		vb2->timestamp = 0;
760 		v4l2_m2m_buf_done(vbuf, state);
761 		return 0;
762 	}
763 
764 	if (V4L2_TYPE_IS_CAPTURE(type)) {
765 		vb2_set_plane_payload(vb2, 0, buf->data_size);
766 		vbuf->sequence = inst->sequence_cap++;
767 		iris_get_ts_metadata(inst, buf->timestamp, vbuf);
768 	} else {
769 		vbuf->sequence = inst->sequence_out++;
770 	}
771 
772 	if (vbuf->flags & V4L2_BUF_FLAG_LAST) {
773 		if (!v4l2_m2m_has_stopped(m2m_ctx)) {
774 			const struct v4l2_event ev = { .type = V4L2_EVENT_EOS };
775 
776 			v4l2_event_queue_fh(&inst->fh, &ev);
777 			v4l2_m2m_mark_stopped(m2m_ctx);
778 		}
779 		inst->last_buffer_dequeued = true;
780 	}
781 
782 	state = VB2_BUF_STATE_DONE;
783 	vb2->timestamp = buf->timestamp;
784 	v4l2_m2m_buf_done(vbuf, state);
785 
786 	return 0;
787 }
788