1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * Copyright (c) 2022-2024 Qualcomm Innovation Center, Inc. All rights reserved.
4 */
5
6 #include <media/v4l2-event.h>
7 #include <media/v4l2-mem2mem.h>
8
9 #include "iris_buffer.h"
10 #include "iris_instance.h"
11 #include "iris_power.h"
12 #include "iris_vpu_buffer.h"
13
14 #define PIXELS_4K 4096
15 #define MAX_WIDTH 4096
16 #define MAX_HEIGHT 2304
17 #define Y_STRIDE_ALIGN 128
18 #define UV_STRIDE_ALIGN 128
19 #define Y_SCANLINE_ALIGN 32
20 #define UV_SCANLINE_ALIGN 16
21 #define UV_SCANLINE_ALIGN_QC08C 32
22 #define META_STRIDE_ALIGNED 64
23 #define META_SCANLINE_ALIGNED 16
24 #define NUM_MBS_4K (DIV_ROUND_UP(MAX_WIDTH, 16) * DIV_ROUND_UP(MAX_HEIGHT, 16))
25
26 /*
27 * NV12:
28 * YUV 4:2:0 image with a plane of 8 bit Y samples followed
29 * by an interleaved U/V plane containing 8 bit 2x2 subsampled
30 * colour difference samples.
31 *
32 * <-Y/UV_Stride (aligned to 128)->
33 * <------- Width ------->
34 * Y Y Y Y Y Y Y Y Y Y Y Y . . . . ^ ^
35 * Y Y Y Y Y Y Y Y Y Y Y Y . . . . | |
36 * Y Y Y Y Y Y Y Y Y Y Y Y . . . . Height |
37 * Y Y Y Y Y Y Y Y Y Y Y Y . . . . | y_scanlines (aligned to 32)
38 * Y Y Y Y Y Y Y Y Y Y Y Y . . . . | |
39 * Y Y Y Y Y Y Y Y Y Y Y Y . . . . | |
40 * Y Y Y Y Y Y Y Y Y Y Y Y . . . . | |
41 * Y Y Y Y Y Y Y Y Y Y Y Y . . . . V |
42 * . . . . . . . . . . . . . . . . |
43 * . . . . . . . . . . . . . . . . |
44 * . . . . . . . . . . . . . . . . |
45 * . . . . . . . . . . . . . . . . V
46 * U V U V U V U V U V U V . . . . ^
47 * U V U V U V U V U V U V . . . . |
48 * U V U V U V U V U V U V . . . . |
49 * U V U V U V U V U V U V . . . . uv_scanlines (aligned to 16)
50 * . . . . . . . . . . . . . . . . |
51 * . . . . . . . . . . . . . . . . V
52 * . . . . . . . . . . . . . . . . --> Buffer size aligned to 4K
53 *
54 * y_stride : Width aligned to 128
55 * uv_stride : Width aligned to 128
56 * y_scanlines: Height aligned to 32
57 * uv_scanlines: Height/2 aligned to 16
58 * Total size = align((y_stride * y_scanlines
59 * + uv_stride * uv_scanlines , 4096)
60 *
61 * Note: All the alignments are hardware requirements.
62 */
iris_yuv_buffer_size_nv12(struct iris_inst * inst)63 static u32 iris_yuv_buffer_size_nv12(struct iris_inst *inst)
64 {
65 u32 y_plane, uv_plane, y_stride, uv_stride, y_scanlines, uv_scanlines;
66 struct v4l2_format *f;
67
68 if (inst->domain == DECODER)
69 f = inst->fmt_dst;
70 else
71 f = inst->fmt_src;
72
73 y_stride = ALIGN(f->fmt.pix_mp.width, Y_STRIDE_ALIGN);
74 uv_stride = ALIGN(f->fmt.pix_mp.width, UV_STRIDE_ALIGN);
75 y_scanlines = ALIGN(f->fmt.pix_mp.height, Y_SCANLINE_ALIGN);
76 uv_scanlines = ALIGN((f->fmt.pix_mp.height + 1) >> 1, UV_SCANLINE_ALIGN);
77 y_plane = y_stride * y_scanlines;
78 uv_plane = uv_stride * uv_scanlines;
79
80 return ALIGN(y_plane + uv_plane, PIXELS_4K);
81 }
82
83 /*
84 * QC08C:
85 * Compressed Macro-tile format for NV12.
86 * Contains 4 planes in the following order -
87 * (A) Y_Meta_Plane
88 * (B) Y_UBWC_Plane
89 * (C) UV_Meta_Plane
90 * (D) UV_UBWC_Plane
91 *
92 * Y_Meta_Plane consists of meta information to decode compressed
93 * tile data in Y_UBWC_Plane.
94 * Y_UBWC_Plane consists of Y data in compressed macro-tile format.
95 * UBWC decoder block will use the Y_Meta_Plane data together with
96 * Y_UBWC_Plane data to produce loss-less uncompressed 8 bit Y samples.
97 *
98 * UV_Meta_Plane consists of meta information to decode compressed
99 * tile data in UV_UBWC_Plane.
100 * UV_UBWC_Plane consists of UV data in compressed macro-tile format.
101 * UBWC decoder block will use UV_Meta_Plane data together with
102 * UV_UBWC_Plane data to produce loss-less uncompressed 8 bit 2x2
103 * subsampled color difference samples.
104 *
105 * Each tile in Y_UBWC_Plane/UV_UBWC_Plane is independently decodable
106 * and randomly accessible. There is no dependency between tiles.
107 *
108 * <----- y_meta_stride ----> (aligned to 64)
109 * <-------- Width ------>
110 * M M M M M M M M M M M M . . ^ ^
111 * M M M M M M M M M M M M . . | |
112 * M M M M M M M M M M M M . . Height |
113 * M M M M M M M M M M M M . . | y_meta_scanlines (aligned to 16)
114 * M M M M M M M M M M M M . . | |
115 * M M M M M M M M M M M M . . | |
116 * M M M M M M M M M M M M . . | |
117 * M M M M M M M M M M M M . . V |
118 * . . . . . . . . . . . . . . |
119 * . . . . . . . . . . . . . . |
120 * . . . . . . . . . . . . . . -------> Buffer size aligned to 4k
121 * . . . . . . . . . . . . . . V
122 * <--Compressed tile y_stride---> (aligned to 128)
123 * <------- Width ------->
124 * Y* Y* Y* Y* Y* Y* Y* Y* . . . . ^ ^
125 * Y* Y* Y* Y* Y* Y* Y* Y* . . . . | |
126 * Y* Y* Y* Y* Y* Y* Y* Y* . . . . Height |
127 * Y* Y* Y* Y* Y* Y* Y* Y* . . . . | Macro_tile y_scanlines (aligned to 32)
128 * Y* Y* Y* Y* Y* Y* Y* Y* . . . . | |
129 * Y* Y* Y* Y* Y* Y* Y* Y* . . . . | |
130 * Y* Y* Y* Y* Y* Y* Y* Y* . . . . | |
131 * Y* Y* Y* Y* Y* Y* Y* Y* . . . . V |
132 * . . . . . . . . . . . . . . . . |
133 * . . . . . . . . . . . . . . . . |
134 * . . . . . . . . . . . . . . . . -------> Buffer size aligned to 4k
135 * . . . . . . . . . . . . . . . . V
136 * <----- uv_meta_stride ----> (aligned to 64)
137 * M M M M M M M M M M M M . . ^
138 * M M M M M M M M M M M M . . |
139 * M M M M M M M M M M M M . . |
140 * M M M M M M M M M M M M . . uv_meta_scanlines (aligned to 16)
141 * . . . . . . . . . . . . . . |
142 * . . . . . . . . . . . . . . V
143 * . . . . . . . . . . . . . . -------> Buffer size aligned to 4k
144 * <--Compressed tile uv_stride---> (aligned to 128)
145 * U* V* U* V* U* V* U* V* . . . . ^
146 * U* V* U* V* U* V* U* V* . . . . |
147 * U* V* U* V* U* V* U* V* . . . . |
148 * U* V* U* V* U* V* U* V* . . . . uv_scanlines (aligned to 32)
149 * . . . . . . . . . . . . . . . . |
150 * . . . . . . . . . . . . . . . . V
151 * . . . . . . . . . . . . . . . . -------> Buffer size aligned to 4k
152 *
153 * y_stride: width aligned to 128
154 * uv_stride: width aligned to 128
155 * y_scanlines: height aligned to 32
156 * uv_scanlines: height aligned to 32
157 * y_plane: buffer size aligned to 4096
158 * uv_plane: buffer size aligned to 4096
159 * y_meta_stride: width aligned to 64
160 * y_meta_scanlines: height aligned to 16
161 * y_meta_plane: buffer size aligned to 4096
162 * uv_meta_stride: width aligned to 64
163 * uv_meta_scanlines: height aligned to 16
164 * uv_meta_plane: buffer size aligned to 4096
165 *
166 * Total size = align( y_plane + uv_plane +
167 * y_meta_plane + uv_meta_plane, 4096)
168 *
169 * Note: All the alignments are hardware requirements.
170 */
iris_yuv_buffer_size_qc08c(struct iris_inst * inst)171 static u32 iris_yuv_buffer_size_qc08c(struct iris_inst *inst)
172 {
173 u32 y_plane, uv_plane, y_stride, uv_stride;
174 u32 uv_meta_stride, uv_meta_plane;
175 u32 y_meta_stride, y_meta_plane;
176 struct v4l2_format *f = NULL;
177
178 if (inst->domain == DECODER)
179 f = inst->fmt_dst;
180 else
181 f = inst->fmt_src;
182
183 y_meta_stride = ALIGN(DIV_ROUND_UP(f->fmt.pix_mp.width, META_STRIDE_ALIGNED >> 1),
184 META_STRIDE_ALIGNED);
185 y_meta_plane = y_meta_stride * ALIGN(DIV_ROUND_UP(f->fmt.pix_mp.height,
186 META_SCANLINE_ALIGNED >> 1),
187 META_SCANLINE_ALIGNED);
188 y_meta_plane = ALIGN(y_meta_plane, PIXELS_4K);
189
190 y_stride = ALIGN(f->fmt.pix_mp.width, Y_STRIDE_ALIGN);
191 y_plane = ALIGN(y_stride * ALIGN(f->fmt.pix_mp.height, Y_SCANLINE_ALIGN), PIXELS_4K);
192
193 uv_meta_stride = ALIGN(DIV_ROUND_UP(f->fmt.pix_mp.width / 2, META_STRIDE_ALIGNED >> 2),
194 META_STRIDE_ALIGNED);
195 uv_meta_plane = uv_meta_stride * ALIGN(DIV_ROUND_UP(f->fmt.pix_mp.height / 2,
196 META_SCANLINE_ALIGNED >> 1),
197 META_SCANLINE_ALIGNED);
198 uv_meta_plane = ALIGN(uv_meta_plane, PIXELS_4K);
199
200 uv_stride = ALIGN(f->fmt.pix_mp.width, UV_STRIDE_ALIGN);
201 uv_plane = ALIGN(uv_stride * ALIGN(f->fmt.pix_mp.height / 2, UV_SCANLINE_ALIGN_QC08C),
202 PIXELS_4K);
203
204 return ALIGN(y_meta_plane + y_plane + uv_meta_plane + uv_plane, PIXELS_4K);
205 }
206
iris_dec_bitstream_buffer_size(struct iris_inst * inst)207 static u32 iris_dec_bitstream_buffer_size(struct iris_inst *inst)
208 {
209 struct platform_inst_caps *caps = inst->core->iris_platform_data->inst_caps;
210 u32 base_res_mbs = NUM_MBS_4K;
211 u32 frame_size, num_mbs;
212 u32 div_factor = 2;
213
214 num_mbs = iris_get_mbpf(inst);
215 if (num_mbs > NUM_MBS_4K) {
216 div_factor = 4;
217 base_res_mbs = caps->max_mbpf;
218 } else {
219 if (inst->codec == V4L2_PIX_FMT_VP9)
220 div_factor = 1;
221 }
222
223 /*
224 * frame_size = YUVsize / div_factor
225 * where YUVsize = resolution_in_MBs * MBs_in_pixel * 3 / 2
226 */
227 frame_size = base_res_mbs * (16 * 16) * 3 / 2 / div_factor;
228
229 return ALIGN(frame_size, PIXELS_4K);
230 }
231
iris_enc_bitstream_buffer_size(struct iris_inst * inst)232 static u32 iris_enc_bitstream_buffer_size(struct iris_inst *inst)
233 {
234 u32 aligned_width, aligned_height, bitstream_size, yuv_size;
235 int bitrate_mode, frame_rc;
236 struct v4l2_format *f;
237
238 f = inst->fmt_dst;
239
240 bitrate_mode = inst->fw_caps[BITRATE_MODE].value;
241 frame_rc = inst->fw_caps[FRAME_RC_ENABLE].value;
242
243 aligned_width = ALIGN(f->fmt.pix_mp.width, 32);
244 aligned_height = ALIGN(f->fmt.pix_mp.height, 32);
245 bitstream_size = aligned_width * aligned_height * 3;
246 yuv_size = (aligned_width * aligned_height * 3) >> 1;
247 if (aligned_width * aligned_height > (4096 * 2176))
248 /* bitstream_size = 0.25 * yuv_size; */
249 bitstream_size = (bitstream_size >> 3);
250 else if (aligned_width * aligned_height > (1280 * 720))
251 /* bitstream_size = 0.5 * yuv_size; */
252 bitstream_size = (bitstream_size >> 2);
253
254 if ((!frame_rc || bitrate_mode == V4L2_MPEG_VIDEO_BITRATE_MODE_CQ) &&
255 bitstream_size < yuv_size)
256 bitstream_size = (bitstream_size << 1);
257
258 return ALIGN(bitstream_size, 4096);
259 }
260
iris_get_buffer_size(struct iris_inst * inst,enum iris_buffer_type buffer_type)261 int iris_get_buffer_size(struct iris_inst *inst,
262 enum iris_buffer_type buffer_type)
263 {
264 if (inst->domain == DECODER) {
265 switch (buffer_type) {
266 case BUF_INPUT:
267 return iris_dec_bitstream_buffer_size(inst);
268 case BUF_OUTPUT:
269 if (inst->fmt_dst->fmt.pix_mp.pixelformat == V4L2_PIX_FMT_QC08C)
270 return iris_yuv_buffer_size_qc08c(inst);
271 else
272 return iris_yuv_buffer_size_nv12(inst);
273 case BUF_DPB:
274 return iris_yuv_buffer_size_qc08c(inst);
275 default:
276 return 0;
277 }
278 } else {
279 switch (buffer_type) {
280 case BUF_INPUT:
281 if (inst->fmt_src->fmt.pix_mp.pixelformat == V4L2_PIX_FMT_QC08C)
282 return iris_yuv_buffer_size_qc08c(inst);
283 else
284 return iris_yuv_buffer_size_nv12(inst);
285 case BUF_OUTPUT:
286 return iris_enc_bitstream_buffer_size(inst);
287 default:
288 return 0;
289 }
290 }
291 }
292
iris_fill_internal_buf_info(struct iris_inst * inst,enum iris_buffer_type buffer_type)293 static void iris_fill_internal_buf_info(struct iris_inst *inst,
294 enum iris_buffer_type buffer_type)
295 {
296 struct iris_buffers *buffers = &inst->buffers[buffer_type];
297
298 buffers->size = inst->core->iris_platform_data->get_vpu_buffer_size(inst, buffer_type);
299 buffers->min_count = iris_vpu_buf_count(inst, buffer_type);
300 }
301
iris_get_internal_buffers(struct iris_inst * inst,u32 plane)302 void iris_get_internal_buffers(struct iris_inst *inst, u32 plane)
303 {
304 const struct iris_platform_data *platform_data = inst->core->iris_platform_data;
305 const u32 *internal_buf_type;
306 u32 internal_buffer_count, i;
307
308 if (inst->domain == DECODER) {
309 if (V4L2_TYPE_IS_OUTPUT(plane)) {
310 internal_buf_type = platform_data->dec_ip_int_buf_tbl;
311 internal_buffer_count = platform_data->dec_ip_int_buf_tbl_size;
312 for (i = 0; i < internal_buffer_count; i++)
313 iris_fill_internal_buf_info(inst, internal_buf_type[i]);
314 } else {
315 internal_buf_type = platform_data->dec_op_int_buf_tbl;
316 internal_buffer_count = platform_data->dec_op_int_buf_tbl_size;
317 for (i = 0; i < internal_buffer_count; i++)
318 iris_fill_internal_buf_info(inst, internal_buf_type[i]);
319 }
320 } else {
321 if (V4L2_TYPE_IS_OUTPUT(plane)) {
322 internal_buf_type = platform_data->enc_ip_int_buf_tbl;
323 internal_buffer_count = platform_data->enc_ip_int_buf_tbl_size;
324 for (i = 0; i < internal_buffer_count; i++)
325 iris_fill_internal_buf_info(inst, internal_buf_type[i]);
326 } else {
327 internal_buf_type = platform_data->enc_op_int_buf_tbl;
328 internal_buffer_count = platform_data->enc_op_int_buf_tbl_size;
329 for (i = 0; i < internal_buffer_count; i++)
330 iris_fill_internal_buf_info(inst, internal_buf_type[i]);
331 }
332 }
333 }
334
iris_create_internal_buffer(struct iris_inst * inst,enum iris_buffer_type buffer_type,u32 index)335 static int iris_create_internal_buffer(struct iris_inst *inst,
336 enum iris_buffer_type buffer_type, u32 index)
337 {
338 struct iris_buffers *buffers = &inst->buffers[buffer_type];
339 struct iris_core *core = inst->core;
340 struct iris_buffer *buffer;
341
342 if (!buffers->size)
343 return 0;
344
345 buffer = kzalloc_obj(*buffer, GFP_KERNEL);
346 if (!buffer)
347 return -ENOMEM;
348
349 INIT_LIST_HEAD(&buffer->list);
350 buffer->type = buffer_type;
351 buffer->index = index;
352 buffer->buffer_size = buffers->size;
353 buffer->dma_attrs = DMA_ATTR_WRITE_COMBINE | DMA_ATTR_NO_KERNEL_MAPPING;
354
355 buffer->kvaddr = dma_alloc_attrs(core->dev, buffer->buffer_size,
356 &buffer->device_addr, GFP_KERNEL, buffer->dma_attrs);
357 if (!buffer->kvaddr) {
358 kfree(buffer);
359 return -ENOMEM;
360 }
361
362 list_add_tail(&buffer->list, &buffers->list);
363
364 return 0;
365 }
366
iris_create_internal_buffers(struct iris_inst * inst,u32 plane)367 int iris_create_internal_buffers(struct iris_inst *inst, u32 plane)
368 {
369 const struct iris_platform_data *platform_data = inst->core->iris_platform_data;
370 u32 internal_buffer_count, i, j;
371 struct iris_buffers *buffers;
372 const u32 *internal_buf_type;
373 int ret;
374
375 if (inst->domain == DECODER) {
376 if (V4L2_TYPE_IS_OUTPUT(plane)) {
377 internal_buf_type = platform_data->dec_ip_int_buf_tbl;
378 internal_buffer_count = platform_data->dec_ip_int_buf_tbl_size;
379 } else {
380 internal_buf_type = platform_data->dec_op_int_buf_tbl;
381 internal_buffer_count = platform_data->dec_op_int_buf_tbl_size;
382 }
383 } else {
384 if (V4L2_TYPE_IS_OUTPUT(plane)) {
385 internal_buf_type = platform_data->enc_ip_int_buf_tbl;
386 internal_buffer_count = platform_data->enc_ip_int_buf_tbl_size;
387 } else {
388 internal_buf_type = platform_data->enc_op_int_buf_tbl;
389 internal_buffer_count = platform_data->enc_op_int_buf_tbl_size;
390 }
391 }
392
393 for (i = 0; i < internal_buffer_count; i++) {
394 buffers = &inst->buffers[internal_buf_type[i]];
395 for (j = 0; j < buffers->min_count; j++) {
396 ret = iris_create_internal_buffer(inst, internal_buf_type[i], j);
397 if (ret)
398 return ret;
399 }
400 }
401
402 return 0;
403 }
404
iris_queue_buffer(struct iris_inst * inst,struct iris_buffer * buf)405 int iris_queue_buffer(struct iris_inst *inst, struct iris_buffer *buf)
406 {
407 const struct iris_hfi_command_ops *hfi_ops = inst->core->hfi_ops;
408 int ret;
409
410 ret = hfi_ops->session_queue_buf(inst, buf);
411 if (ret)
412 return ret;
413
414 buf->attr &= ~BUF_ATTR_DEFERRED;
415 buf->attr |= BUF_ATTR_QUEUED;
416
417 return 0;
418 }
419
iris_queue_internal_deferred_buffers(struct iris_inst * inst,enum iris_buffer_type buffer_type)420 int iris_queue_internal_deferred_buffers(struct iris_inst *inst, enum iris_buffer_type buffer_type)
421 {
422 struct iris_buffer *buffer, *next;
423 struct iris_buffers *buffers;
424 int ret = 0;
425
426 buffers = &inst->buffers[buffer_type];
427 list_for_each_entry_safe(buffer, next, &buffers->list, list) {
428 if (buffer->attr & BUF_ATTR_PENDING_RELEASE)
429 continue;
430 if (buffer->attr & BUF_ATTR_QUEUED)
431 continue;
432
433 if (buffer->attr & BUF_ATTR_DEFERRED) {
434 ret = iris_queue_buffer(inst, buffer);
435 if (ret)
436 return ret;
437 }
438 }
439
440 return ret;
441 }
442
iris_queue_internal_buffers(struct iris_inst * inst,u32 plane)443 int iris_queue_internal_buffers(struct iris_inst *inst, u32 plane)
444 {
445 const struct iris_platform_data *platform_data = inst->core->iris_platform_data;
446 struct iris_buffer *buffer, *next;
447 struct iris_buffers *buffers;
448 const u32 *internal_buf_type;
449 u32 internal_buffer_count, i;
450 int ret;
451
452 if (inst->domain == DECODER) {
453 if (V4L2_TYPE_IS_OUTPUT(plane)) {
454 internal_buf_type = platform_data->dec_ip_int_buf_tbl;
455 internal_buffer_count = platform_data->dec_ip_int_buf_tbl_size;
456 } else {
457 internal_buf_type = platform_data->dec_op_int_buf_tbl;
458 internal_buffer_count = platform_data->dec_op_int_buf_tbl_size;
459 }
460 } else {
461 if (V4L2_TYPE_IS_OUTPUT(plane)) {
462 internal_buf_type = platform_data->enc_ip_int_buf_tbl;
463 internal_buffer_count = platform_data->enc_ip_int_buf_tbl_size;
464 } else {
465 internal_buf_type = platform_data->enc_op_int_buf_tbl;
466 internal_buffer_count = platform_data->enc_op_int_buf_tbl_size;
467 }
468 }
469
470 for (i = 0; i < internal_buffer_count; i++) {
471 buffers = &inst->buffers[internal_buf_type[i]];
472 list_for_each_entry_safe(buffer, next, &buffers->list, list) {
473 if (buffer->attr & BUF_ATTR_PENDING_RELEASE)
474 continue;
475 if (buffer->attr & BUF_ATTR_QUEUED)
476 continue;
477 if (buffer->type == BUF_DPB && inst->state != IRIS_INST_STREAMING) {
478 buffer->attr |= BUF_ATTR_DEFERRED;
479 continue;
480 }
481 ret = iris_queue_buffer(inst, buffer);
482 if (ret)
483 return ret;
484 }
485 }
486
487 return 0;
488 }
489
iris_destroy_internal_buffer(struct iris_inst * inst,struct iris_buffer * buffer)490 int iris_destroy_internal_buffer(struct iris_inst *inst, struct iris_buffer *buffer)
491 {
492 struct iris_core *core = inst->core;
493
494 list_del(&buffer->list);
495 dma_free_attrs(core->dev, buffer->buffer_size, buffer->kvaddr,
496 buffer->device_addr, buffer->dma_attrs);
497 kfree(buffer);
498
499 return 0;
500 }
501
iris_destroy_internal_buffers(struct iris_inst * inst,u32 plane,bool force)502 static int iris_destroy_internal_buffers(struct iris_inst *inst, u32 plane, bool force)
503 {
504 const struct iris_platform_data *platform_data = inst->core->iris_platform_data;
505 struct iris_buffer *buf, *next;
506 struct iris_buffers *buffers;
507 const u32 *internal_buf_type;
508 u32 i, len;
509 int ret;
510
511 if (inst->domain == DECODER) {
512 if (V4L2_TYPE_IS_OUTPUT(plane)) {
513 internal_buf_type = platform_data->dec_ip_int_buf_tbl;
514 len = platform_data->dec_ip_int_buf_tbl_size;
515 } else {
516 internal_buf_type = platform_data->dec_op_int_buf_tbl;
517 len = platform_data->dec_op_int_buf_tbl_size;
518 }
519 } else {
520 if (V4L2_TYPE_IS_OUTPUT(plane)) {
521 internal_buf_type = platform_data->enc_ip_int_buf_tbl;
522 len = platform_data->enc_ip_int_buf_tbl_size;
523 } else {
524 internal_buf_type = platform_data->enc_op_int_buf_tbl;
525 len = platform_data->enc_op_int_buf_tbl_size;
526 }
527 }
528
529 for (i = 0; i < len; i++) {
530 buffers = &inst->buffers[internal_buf_type[i]];
531 list_for_each_entry_safe(buf, next, &buffers->list, list) {
532 /*
533 * during stream on, skip destroying internal(DPB) buffer
534 * if firmware did not return it.
535 * during close, destroy all buffers irrespectively.
536 */
537 if (!force && buf->attr & BUF_ATTR_QUEUED)
538 continue;
539
540 ret = iris_destroy_internal_buffer(inst, buf);
541 if (ret)
542 return ret;
543 }
544 }
545
546 if (force) {
547 if (inst->domain == DECODER)
548 buffers = &inst->buffers[BUF_PERSIST];
549 else
550 buffers = &inst->buffers[BUF_ARP];
551
552 list_for_each_entry_safe(buf, next, &buffers->list, list) {
553 ret = iris_destroy_internal_buffer(inst, buf);
554 if (ret)
555 return ret;
556 }
557 }
558
559 return 0;
560 }
561
iris_destroy_all_internal_buffers(struct iris_inst * inst,u32 plane)562 int iris_destroy_all_internal_buffers(struct iris_inst *inst, u32 plane)
563 {
564 return iris_destroy_internal_buffers(inst, plane, true);
565 }
566
iris_destroy_dequeued_internal_buffers(struct iris_inst * inst,u32 plane)567 int iris_destroy_dequeued_internal_buffers(struct iris_inst *inst, u32 plane)
568 {
569 return iris_destroy_internal_buffers(inst, plane, false);
570 }
571
iris_release_internal_buffers(struct iris_inst * inst,enum iris_buffer_type buffer_type)572 static int iris_release_internal_buffers(struct iris_inst *inst,
573 enum iris_buffer_type buffer_type)
574 {
575 const struct iris_hfi_command_ops *hfi_ops = inst->core->hfi_ops;
576 struct iris_buffers *buffers = &inst->buffers[buffer_type];
577 struct iris_buffer *buffer, *next;
578 int ret;
579
580 list_for_each_entry_safe(buffer, next, &buffers->list, list) {
581 if (buffer->attr & BUF_ATTR_PENDING_RELEASE)
582 continue;
583 if (!(buffer->attr & BUF_ATTR_QUEUED))
584 continue;
585 ret = hfi_ops->session_release_buf(inst, buffer);
586 if (ret)
587 return ret;
588 buffer->attr |= BUF_ATTR_PENDING_RELEASE;
589 }
590
591 return 0;
592 }
593
iris_release_input_internal_buffers(struct iris_inst * inst)594 static int iris_release_input_internal_buffers(struct iris_inst *inst)
595 {
596 const struct iris_platform_data *platform_data = inst->core->iris_platform_data;
597 const u32 *internal_buf_type;
598 u32 internal_buffer_count, i;
599 int ret;
600
601 if (inst->domain == DECODER) {
602 internal_buf_type = platform_data->dec_ip_int_buf_tbl;
603 internal_buffer_count = platform_data->dec_ip_int_buf_tbl_size;
604 } else {
605 internal_buf_type = platform_data->enc_ip_int_buf_tbl;
606 internal_buffer_count = platform_data->enc_ip_int_buf_tbl_size;
607 }
608
609 for (i = 0; i < internal_buffer_count; i++) {
610 ret = iris_release_internal_buffers(inst, internal_buf_type[i]);
611 if (ret)
612 return ret;
613 }
614
615 return 0;
616 }
617
iris_alloc_and_queue_persist_bufs(struct iris_inst * inst,enum iris_buffer_type buffer_type)618 int iris_alloc_and_queue_persist_bufs(struct iris_inst *inst, enum iris_buffer_type buffer_type)
619 {
620 struct iris_buffers *buffers = &inst->buffers[buffer_type];
621 struct iris_buffer *buffer, *next;
622 int ret;
623 u32 i;
624
625 if (!list_empty(&buffers->list))
626 return 0;
627
628 iris_fill_internal_buf_info(inst, buffer_type);
629
630 for (i = 0; i < buffers->min_count; i++) {
631 ret = iris_create_internal_buffer(inst, buffer_type, i);
632 if (ret)
633 return ret;
634 }
635
636 list_for_each_entry_safe(buffer, next, &buffers->list, list) {
637 if (buffer->attr & BUF_ATTR_PENDING_RELEASE)
638 continue;
639 if (buffer->attr & BUF_ATTR_QUEUED)
640 continue;
641 ret = iris_queue_buffer(inst, buffer);
642 if (ret)
643 return ret;
644 }
645
646 return 0;
647 }
648
iris_alloc_and_queue_input_int_bufs(struct iris_inst * inst)649 int iris_alloc_and_queue_input_int_bufs(struct iris_inst *inst)
650 {
651 int ret;
652
653 iris_get_internal_buffers(inst, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
654
655 ret = iris_release_input_internal_buffers(inst);
656 if (ret)
657 return ret;
658
659 ret = iris_create_internal_buffers(inst, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
660 if (ret)
661 return ret;
662
663 return iris_queue_internal_buffers(inst, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
664 }
665
iris_queue_deferred_buffers(struct iris_inst * inst,enum iris_buffer_type buf_type)666 int iris_queue_deferred_buffers(struct iris_inst *inst, enum iris_buffer_type buf_type)
667 {
668 struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
669 struct v4l2_m2m_buffer *buffer, *n;
670 struct iris_buffer *buf;
671 int ret;
672
673 iris_scale_power(inst);
674
675 if (buf_type == BUF_INPUT) {
676 v4l2_m2m_for_each_src_buf_safe(m2m_ctx, buffer, n) {
677 buf = to_iris_buffer(&buffer->vb);
678 if (!(buf->attr & BUF_ATTR_DEFERRED))
679 continue;
680 ret = iris_queue_buffer(inst, buf);
681 if (ret)
682 return ret;
683 }
684 } else {
685 v4l2_m2m_for_each_dst_buf_safe(m2m_ctx, buffer, n) {
686 buf = to_iris_buffer(&buffer->vb);
687 if (!(buf->attr & BUF_ATTR_DEFERRED))
688 continue;
689 ret = iris_queue_buffer(inst, buf);
690 if (ret)
691 return ret;
692 }
693 }
694
695 return 0;
696 }
697
iris_vb2_queue_error(struct iris_inst * inst)698 void iris_vb2_queue_error(struct iris_inst *inst)
699 {
700 struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
701 struct vb2_queue *q;
702
703 q = v4l2_m2m_get_src_vq(m2m_ctx);
704 vb2_queue_error(q);
705 q = v4l2_m2m_get_dst_vq(m2m_ctx);
706 vb2_queue_error(q);
707 }
708
709 static struct vb2_v4l2_buffer *
iris_helper_find_buf(struct iris_inst * inst,u32 type,u32 idx)710 iris_helper_find_buf(struct iris_inst *inst, u32 type, u32 idx)
711 {
712 struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
713
714 if (V4L2_TYPE_IS_OUTPUT(type))
715 return v4l2_m2m_src_buf_remove_by_idx(m2m_ctx, idx);
716 else
717 return v4l2_m2m_dst_buf_remove_by_idx(m2m_ctx, idx);
718 }
719
iris_get_ts_metadata(struct iris_inst * inst,u64 timestamp_ns,struct vb2_v4l2_buffer * vbuf)720 static void iris_get_ts_metadata(struct iris_inst *inst, u64 timestamp_ns,
721 struct vb2_v4l2_buffer *vbuf)
722 {
723 u32 mask = V4L2_BUF_FLAG_TIMECODE | V4L2_BUF_FLAG_TSTAMP_SRC_MASK;
724 u32 i;
725
726 for (i = 0; i < ARRAY_SIZE(inst->tss); ++i) {
727 if (inst->tss[i].ts_ns != timestamp_ns)
728 continue;
729
730 vbuf->flags &= ~mask;
731 vbuf->flags |= inst->tss[i].flags;
732 vbuf->timecode = inst->tss[i].tc;
733 return;
734 }
735
736 vbuf->flags &= ~mask;
737 vbuf->flags |= inst->tss[inst->metadata_idx].flags;
738 vbuf->timecode = inst->tss[inst->metadata_idx].tc;
739 }
740
iris_vb2_buffer_done(struct iris_inst * inst,struct iris_buffer * buf)741 int iris_vb2_buffer_done(struct iris_inst *inst, struct iris_buffer *buf)
742 {
743 struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
744 struct vb2_v4l2_buffer *vbuf;
745 struct vb2_buffer *vb2;
746 u32 type, state;
747
748 switch (buf->type) {
749 case BUF_INPUT:
750 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
751 break;
752 case BUF_OUTPUT:
753 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
754 break;
755 default:
756 return 0; /* Internal DPB Buffers */
757 }
758
759 vbuf = iris_helper_find_buf(inst, type, buf->index);
760 if (!vbuf)
761 return -EINVAL;
762
763 vb2 = &vbuf->vb2_buf;
764
765 vbuf->flags |= buf->flags;
766
767 if (buf->flags & V4L2_BUF_FLAG_ERROR) {
768 state = VB2_BUF_STATE_ERROR;
769 vb2_set_plane_payload(vb2, 0, 0);
770 vb2->timestamp = 0;
771 v4l2_m2m_buf_done(vbuf, state);
772 return 0;
773 }
774
775 if (V4L2_TYPE_IS_CAPTURE(type)) {
776 vb2_set_plane_payload(vb2, 0, buf->data_size);
777 vbuf->sequence = inst->sequence_cap++;
778 iris_get_ts_metadata(inst, buf->timestamp, vbuf);
779 } else {
780 vbuf->sequence = inst->sequence_out++;
781 }
782
783 if (vbuf->flags & V4L2_BUF_FLAG_LAST) {
784 if (!v4l2_m2m_has_stopped(m2m_ctx)) {
785 const struct v4l2_event ev = { .type = V4L2_EVENT_EOS };
786
787 v4l2_event_queue_fh(&inst->fh, &ev);
788 v4l2_m2m_mark_stopped(m2m_ctx);
789 }
790 inst->last_buffer_dequeued = true;
791 }
792
793 state = VB2_BUF_STATE_DONE;
794 vb2->timestamp = buf->timestamp;
795 v4l2_m2m_buf_done(vbuf, state);
796
797 return 0;
798 }
799