xref: /linux/drivers/media/v4l2-core/v4l2-ctrls-core.c (revision 6fd600d742744dc7ef7fc65ca26daa2b1163158a)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * V4L2 controls framework core implementation.
4  *
5  * Copyright (C) 2010-2021  Hans Verkuil <hverkuil-cisco@xs4all.nl>
6  */
7 
8 #include <linux/export.h>
9 #include <linux/mm.h>
10 #include <linux/slab.h>
11 #include <media/v4l2-ctrls.h>
12 #include <media/v4l2-event.h>
13 #include <media/v4l2-fwnode.h>
14 
15 #include "v4l2-ctrls-priv.h"
16 
17 static const union v4l2_ctrl_ptr ptr_null;
18 
fill_event(struct v4l2_event * ev,struct v4l2_ctrl * ctrl,u32 changes)19 static void fill_event(struct v4l2_event *ev, struct v4l2_ctrl *ctrl,
20 		       u32 changes)
21 {
22 	memset(ev, 0, sizeof(*ev));
23 	ev->type = V4L2_EVENT_CTRL;
24 	ev->id = ctrl->id;
25 	ev->u.ctrl.changes = changes;
26 	ev->u.ctrl.type = ctrl->type;
27 	ev->u.ctrl.flags = user_flags(ctrl);
28 	if (ctrl->is_ptr)
29 		ev->u.ctrl.value64 = 0;
30 	else
31 		ev->u.ctrl.value64 = *ctrl->p_cur.p_s64;
32 	ev->u.ctrl.minimum = ctrl->minimum;
33 	ev->u.ctrl.maximum = ctrl->maximum;
34 	if (ctrl->type == V4L2_CTRL_TYPE_MENU
35 	    || ctrl->type == V4L2_CTRL_TYPE_INTEGER_MENU)
36 		ev->u.ctrl.step = 1;
37 	else
38 		ev->u.ctrl.step = ctrl->step;
39 	ev->u.ctrl.default_value = ctrl->default_value;
40 }
41 
send_initial_event(struct v4l2_fh * fh,struct v4l2_ctrl * ctrl)42 void send_initial_event(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl)
43 {
44 	struct v4l2_event ev;
45 	u32 changes = V4L2_EVENT_CTRL_CH_FLAGS;
46 
47 	if (!(ctrl->flags & V4L2_CTRL_FLAG_WRITE_ONLY))
48 		changes |= V4L2_EVENT_CTRL_CH_VALUE;
49 	fill_event(&ev, ctrl, changes);
50 	v4l2_event_queue_fh(fh, &ev);
51 }
52 
send_event(struct v4l2_fh * fh,struct v4l2_ctrl * ctrl,u32 changes)53 void send_event(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl, u32 changes)
54 {
55 	struct v4l2_event ev;
56 	struct v4l2_subscribed_event *sev;
57 
58 	if (list_empty(&ctrl->ev_subs))
59 		return;
60 	fill_event(&ev, ctrl, changes);
61 
62 	list_for_each_entry(sev, &ctrl->ev_subs, node)
63 		if (sev->fh != fh ||
64 		    (sev->flags & V4L2_EVENT_SUB_FL_ALLOW_FEEDBACK))
65 			v4l2_event_queue_fh(sev->fh, &ev);
66 }
67 
v4l2_ctrl_type_op_equal(const struct v4l2_ctrl * ctrl,union v4l2_ctrl_ptr ptr1,union v4l2_ctrl_ptr ptr2)68 bool v4l2_ctrl_type_op_equal(const struct v4l2_ctrl *ctrl,
69 			     union v4l2_ctrl_ptr ptr1, union v4l2_ctrl_ptr ptr2)
70 {
71 	unsigned int i;
72 
73 	switch (ctrl->type) {
74 	case V4L2_CTRL_TYPE_BUTTON:
75 		return false;
76 	case V4L2_CTRL_TYPE_STRING:
77 		for (i = 0; i < ctrl->elems; i++) {
78 			unsigned int idx = i * ctrl->elem_size;
79 
80 			/* strings are always 0-terminated */
81 			if (strcmp(ptr1.p_char + idx, ptr2.p_char + idx))
82 				return false;
83 		}
84 		return true;
85 	default:
86 		return !memcmp(ptr1.p_const, ptr2.p_const,
87 			       ctrl->elems * ctrl->elem_size);
88 	}
89 }
90 EXPORT_SYMBOL(v4l2_ctrl_type_op_equal);
91 
92 /* Default intra MPEG-2 quantisation coefficients, from the specification. */
93 static const u8 mpeg2_intra_quant_matrix[64] = {
94 	8,  16, 16, 19, 16, 19, 22, 22,
95 	22, 22, 22, 22, 26, 24, 26, 27,
96 	27, 27, 26, 26, 26, 26, 27, 27,
97 	27, 29, 29, 29, 34, 34, 34, 29,
98 	29, 29, 27, 27, 29, 29, 32, 32,
99 	34, 34, 37, 38, 37, 35, 35, 34,
100 	35, 38, 38, 40, 40, 40, 48, 48,
101 	46, 46, 56, 56, 58, 69, 69, 83
102 };
103 
std_init_compound(const struct v4l2_ctrl * ctrl,u32 idx,union v4l2_ctrl_ptr ptr)104 static void std_init_compound(const struct v4l2_ctrl *ctrl, u32 idx,
105 			      union v4l2_ctrl_ptr ptr)
106 {
107 	struct v4l2_ctrl_mpeg2_sequence *p_mpeg2_sequence;
108 	struct v4l2_ctrl_mpeg2_picture *p_mpeg2_picture;
109 	struct v4l2_ctrl_mpeg2_quantisation *p_mpeg2_quant;
110 	struct v4l2_ctrl_vp8_frame *p_vp8_frame;
111 	struct v4l2_ctrl_vp9_frame *p_vp9_frame;
112 	struct v4l2_ctrl_fwht_params *p_fwht_params;
113 	struct v4l2_ctrl_h264_scaling_matrix *p_h264_scaling_matrix;
114 	struct v4l2_ctrl_av1_sequence *p_av1_sequence;
115 	void *p = ptr.p + idx * ctrl->elem_size;
116 
117 	if (ctrl->p_def.p_const)
118 		memcpy(p, ctrl->p_def.p_const, ctrl->elem_size);
119 	else
120 		memset(p, 0, ctrl->elem_size);
121 
122 	switch ((u32)ctrl->type) {
123 	case V4L2_CTRL_TYPE_MPEG2_SEQUENCE:
124 		p_mpeg2_sequence = p;
125 
126 		/* 4:2:0 */
127 		p_mpeg2_sequence->chroma_format = 1;
128 		break;
129 	case V4L2_CTRL_TYPE_MPEG2_PICTURE:
130 		p_mpeg2_picture = p;
131 
132 		/* interlaced top field */
133 		p_mpeg2_picture->picture_structure = V4L2_MPEG2_PIC_TOP_FIELD;
134 		p_mpeg2_picture->picture_coding_type =
135 					V4L2_MPEG2_PIC_CODING_TYPE_I;
136 		break;
137 	case V4L2_CTRL_TYPE_MPEG2_QUANTISATION:
138 		p_mpeg2_quant = p;
139 
140 		memcpy(p_mpeg2_quant->intra_quantiser_matrix,
141 		       mpeg2_intra_quant_matrix,
142 		       ARRAY_SIZE(mpeg2_intra_quant_matrix));
143 		/*
144 		 * The default non-intra MPEG-2 quantisation
145 		 * coefficients are all 16, as per the specification.
146 		 */
147 		memset(p_mpeg2_quant->non_intra_quantiser_matrix, 16,
148 		       sizeof(p_mpeg2_quant->non_intra_quantiser_matrix));
149 		break;
150 	case V4L2_CTRL_TYPE_VP8_FRAME:
151 		p_vp8_frame = p;
152 		p_vp8_frame->num_dct_parts = 1;
153 		break;
154 	case V4L2_CTRL_TYPE_VP9_FRAME:
155 		p_vp9_frame = p;
156 		p_vp9_frame->profile = 0;
157 		p_vp9_frame->bit_depth = 8;
158 		p_vp9_frame->flags |= V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING |
159 			V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING;
160 		break;
161 	case V4L2_CTRL_TYPE_AV1_SEQUENCE:
162 		p_av1_sequence = p;
163 		p_av1_sequence->bit_depth = 8;
164 		break;
165 	case V4L2_CTRL_TYPE_FWHT_PARAMS:
166 		p_fwht_params = p;
167 		p_fwht_params->version = V4L2_FWHT_VERSION;
168 		p_fwht_params->width = 1280;
169 		p_fwht_params->height = 720;
170 		p_fwht_params->flags = V4L2_FWHT_FL_PIXENC_YUV |
171 			(2 << V4L2_FWHT_FL_COMPONENTS_NUM_OFFSET);
172 		break;
173 	case V4L2_CTRL_TYPE_H264_SCALING_MATRIX:
174 		p_h264_scaling_matrix = p;
175 		/*
176 		 * The default (flat) H.264 scaling matrix when none are
177 		 * specified in the bitstream, this is according to formulas
178 		 *  (7-8) and (7-9) of the specification.
179 		 */
180 		memset(p_h264_scaling_matrix, 16, sizeof(*p_h264_scaling_matrix));
181 		break;
182 	}
183 }
184 
v4l2_ctrl_type_op_init(const struct v4l2_ctrl * ctrl,u32 from_idx,union v4l2_ctrl_ptr ptr)185 void v4l2_ctrl_type_op_init(const struct v4l2_ctrl *ctrl, u32 from_idx,
186 			    union v4l2_ctrl_ptr ptr)
187 {
188 	unsigned int i;
189 	u32 tot_elems = ctrl->elems;
190 	u32 elems = tot_elems - from_idx;
191 
192 	if (from_idx >= tot_elems)
193 		return;
194 
195 	switch (ctrl->type) {
196 	case V4L2_CTRL_TYPE_STRING:
197 		for (i = from_idx; i < tot_elems; i++) {
198 			unsigned int offset = i * ctrl->elem_size;
199 
200 			memset(ptr.p_char + offset, ' ', ctrl->minimum);
201 			ptr.p_char[offset + ctrl->minimum] = '\0';
202 		}
203 		break;
204 	case V4L2_CTRL_TYPE_INTEGER64:
205 		if (ctrl->default_value) {
206 			for (i = from_idx; i < tot_elems; i++)
207 				ptr.p_s64[i] = ctrl->default_value;
208 		} else {
209 			memset(ptr.p_s64 + from_idx, 0, elems * sizeof(s64));
210 		}
211 		break;
212 	case V4L2_CTRL_TYPE_INTEGER:
213 	case V4L2_CTRL_TYPE_INTEGER_MENU:
214 	case V4L2_CTRL_TYPE_MENU:
215 	case V4L2_CTRL_TYPE_BITMASK:
216 	case V4L2_CTRL_TYPE_BOOLEAN:
217 		if (ctrl->default_value) {
218 			for (i = from_idx; i < tot_elems; i++)
219 				ptr.p_s32[i] = ctrl->default_value;
220 		} else {
221 			memset(ptr.p_s32 + from_idx, 0, elems * sizeof(s32));
222 		}
223 		break;
224 	case V4L2_CTRL_TYPE_BUTTON:
225 	case V4L2_CTRL_TYPE_CTRL_CLASS:
226 		memset(ptr.p_s32 + from_idx, 0, elems * sizeof(s32));
227 		break;
228 	case V4L2_CTRL_TYPE_U8:
229 		memset(ptr.p_u8 + from_idx, ctrl->default_value, elems);
230 		break;
231 	case V4L2_CTRL_TYPE_U16:
232 		if (ctrl->default_value) {
233 			for (i = from_idx; i < tot_elems; i++)
234 				ptr.p_u16[i] = ctrl->default_value;
235 		} else {
236 			memset(ptr.p_u16 + from_idx, 0, elems * sizeof(u16));
237 		}
238 		break;
239 	case V4L2_CTRL_TYPE_U32:
240 		if (ctrl->default_value) {
241 			for (i = from_idx; i < tot_elems; i++)
242 				ptr.p_u32[i] = ctrl->default_value;
243 		} else {
244 			memset(ptr.p_u32 + from_idx, 0, elems * sizeof(u32));
245 		}
246 		break;
247 	default:
248 		for (i = from_idx; i < tot_elems; i++)
249 			std_init_compound(ctrl, i, ptr);
250 		break;
251 	}
252 }
253 EXPORT_SYMBOL(v4l2_ctrl_type_op_init);
254 
v4l2_ctrl_type_op_log(const struct v4l2_ctrl * ctrl)255 void v4l2_ctrl_type_op_log(const struct v4l2_ctrl *ctrl)
256 {
257 	union v4l2_ctrl_ptr ptr = ctrl->p_cur;
258 
259 	if (ctrl->is_array) {
260 		unsigned i;
261 
262 		for (i = 0; i < ctrl->nr_of_dims; i++)
263 			pr_cont("[%u]", ctrl->dims[i]);
264 		pr_cont(" ");
265 	}
266 
267 	switch (ctrl->type) {
268 	case V4L2_CTRL_TYPE_INTEGER:
269 		pr_cont("%d", *ptr.p_s32);
270 		break;
271 	case V4L2_CTRL_TYPE_BOOLEAN:
272 		pr_cont("%s", *ptr.p_s32 ? "true" : "false");
273 		break;
274 	case V4L2_CTRL_TYPE_MENU:
275 		pr_cont("%s", ctrl->qmenu[*ptr.p_s32]);
276 		break;
277 	case V4L2_CTRL_TYPE_INTEGER_MENU:
278 		pr_cont("%lld", ctrl->qmenu_int[*ptr.p_s32]);
279 		break;
280 	case V4L2_CTRL_TYPE_BITMASK:
281 		pr_cont("0x%08x", *ptr.p_s32);
282 		break;
283 	case V4L2_CTRL_TYPE_INTEGER64:
284 		pr_cont("%lld", *ptr.p_s64);
285 		break;
286 	case V4L2_CTRL_TYPE_STRING:
287 		pr_cont("%s", ptr.p_char);
288 		break;
289 	case V4L2_CTRL_TYPE_U8:
290 		pr_cont("%u", (unsigned)*ptr.p_u8);
291 		break;
292 	case V4L2_CTRL_TYPE_U16:
293 		pr_cont("%u", (unsigned)*ptr.p_u16);
294 		break;
295 	case V4L2_CTRL_TYPE_U32:
296 		pr_cont("%u", (unsigned)*ptr.p_u32);
297 		break;
298 	case V4L2_CTRL_TYPE_AREA:
299 		pr_cont("%ux%u", ptr.p_area->width, ptr.p_area->height);
300 		break;
301 	case V4L2_CTRL_TYPE_H264_SPS:
302 		pr_cont("H264_SPS");
303 		break;
304 	case V4L2_CTRL_TYPE_H264_PPS:
305 		pr_cont("H264_PPS");
306 		break;
307 	case V4L2_CTRL_TYPE_H264_SCALING_MATRIX:
308 		pr_cont("H264_SCALING_MATRIX");
309 		break;
310 	case V4L2_CTRL_TYPE_H264_SLICE_PARAMS:
311 		pr_cont("H264_SLICE_PARAMS");
312 		break;
313 	case V4L2_CTRL_TYPE_H264_DECODE_PARAMS:
314 		pr_cont("H264_DECODE_PARAMS");
315 		break;
316 	case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS:
317 		pr_cont("H264_PRED_WEIGHTS");
318 		break;
319 	case V4L2_CTRL_TYPE_FWHT_PARAMS:
320 		pr_cont("FWHT_PARAMS");
321 		break;
322 	case V4L2_CTRL_TYPE_VP8_FRAME:
323 		pr_cont("VP8_FRAME");
324 		break;
325 	case V4L2_CTRL_TYPE_HDR10_CLL_INFO:
326 		pr_cont("HDR10_CLL_INFO");
327 		break;
328 	case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY:
329 		pr_cont("HDR10_MASTERING_DISPLAY");
330 		break;
331 	case V4L2_CTRL_TYPE_MPEG2_QUANTISATION:
332 		pr_cont("MPEG2_QUANTISATION");
333 		break;
334 	case V4L2_CTRL_TYPE_MPEG2_SEQUENCE:
335 		pr_cont("MPEG2_SEQUENCE");
336 		break;
337 	case V4L2_CTRL_TYPE_MPEG2_PICTURE:
338 		pr_cont("MPEG2_PICTURE");
339 		break;
340 	case V4L2_CTRL_TYPE_VP9_COMPRESSED_HDR:
341 		pr_cont("VP9_COMPRESSED_HDR");
342 		break;
343 	case V4L2_CTRL_TYPE_VP9_FRAME:
344 		pr_cont("VP9_FRAME");
345 		break;
346 	case V4L2_CTRL_TYPE_HEVC_SPS:
347 		pr_cont("HEVC_SPS");
348 		break;
349 	case V4L2_CTRL_TYPE_HEVC_PPS:
350 		pr_cont("HEVC_PPS");
351 		break;
352 	case V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS:
353 		pr_cont("HEVC_SLICE_PARAMS");
354 		break;
355 	case V4L2_CTRL_TYPE_HEVC_SCALING_MATRIX:
356 		pr_cont("HEVC_SCALING_MATRIX");
357 		break;
358 	case V4L2_CTRL_TYPE_HEVC_DECODE_PARAMS:
359 		pr_cont("HEVC_DECODE_PARAMS");
360 		break;
361 	case V4L2_CTRL_TYPE_AV1_SEQUENCE:
362 		pr_cont("AV1_SEQUENCE");
363 		break;
364 	case V4L2_CTRL_TYPE_AV1_TILE_GROUP_ENTRY:
365 		pr_cont("AV1_TILE_GROUP_ENTRY");
366 		break;
367 	case V4L2_CTRL_TYPE_AV1_FRAME:
368 		pr_cont("AV1_FRAME");
369 		break;
370 	case V4L2_CTRL_TYPE_AV1_FILM_GRAIN:
371 		pr_cont("AV1_FILM_GRAIN");
372 		break;
373 
374 	default:
375 		pr_cont("unknown type %d", ctrl->type);
376 		break;
377 	}
378 }
379 EXPORT_SYMBOL(v4l2_ctrl_type_op_log);
380 
381 /*
382  * Round towards the closest legal value. Be careful when we are
383  * close to the maximum range of the control type to prevent
384  * wrap-arounds.
385  */
386 #define ROUND_TO_RANGE(val, offset_type, ctrl)			\
387 ({								\
388 	offset_type offset;					\
389 	if ((ctrl)->maximum >= 0 &&				\
390 	    val >= (ctrl)->maximum - (s32)((ctrl)->step / 2))	\
391 		val = (ctrl)->maximum;				\
392 	else							\
393 		val += (s32)((ctrl)->step / 2);			\
394 	val = clamp_t(typeof(val), val,				\
395 		      (ctrl)->minimum, (ctrl)->maximum);	\
396 	offset = (val) - (ctrl)->minimum;			\
397 	offset = (ctrl)->step * (offset / (u32)(ctrl)->step);	\
398 	val = (ctrl)->minimum + offset;				\
399 	0;							\
400 })
401 
402 /* Validate a new control */
403 
404 #define zero_padding(s) \
405 	memset(&(s).padding, 0, sizeof((s).padding))
406 #define zero_reserved(s) \
407 	memset(&(s).reserved, 0, sizeof((s).reserved))
408 
409 static int
validate_vp9_lf_params(struct v4l2_vp9_loop_filter * lf)410 validate_vp9_lf_params(struct v4l2_vp9_loop_filter *lf)
411 {
412 	unsigned int i;
413 
414 	if (lf->flags & ~(V4L2_VP9_LOOP_FILTER_FLAG_DELTA_ENABLED |
415 			  V4L2_VP9_LOOP_FILTER_FLAG_DELTA_UPDATE))
416 		return -EINVAL;
417 
418 	/* That all values are in the accepted range. */
419 	if (lf->level > GENMASK(5, 0))
420 		return -EINVAL;
421 
422 	if (lf->sharpness > GENMASK(2, 0))
423 		return -EINVAL;
424 
425 	for (i = 0; i < ARRAY_SIZE(lf->ref_deltas); i++)
426 		if (lf->ref_deltas[i] < -63 || lf->ref_deltas[i] > 63)
427 			return -EINVAL;
428 
429 	for (i = 0; i < ARRAY_SIZE(lf->mode_deltas); i++)
430 		if (lf->mode_deltas[i] < -63 || lf->mode_deltas[i] > 63)
431 			return -EINVAL;
432 
433 	zero_reserved(*lf);
434 	return 0;
435 }
436 
437 static int
validate_vp9_quant_params(struct v4l2_vp9_quantization * quant)438 validate_vp9_quant_params(struct v4l2_vp9_quantization *quant)
439 {
440 	if (quant->delta_q_y_dc < -15 || quant->delta_q_y_dc > 15 ||
441 	    quant->delta_q_uv_dc < -15 || quant->delta_q_uv_dc > 15 ||
442 	    quant->delta_q_uv_ac < -15 || quant->delta_q_uv_ac > 15)
443 		return -EINVAL;
444 
445 	zero_reserved(*quant);
446 	return 0;
447 }
448 
449 static int
validate_vp9_seg_params(struct v4l2_vp9_segmentation * seg)450 validate_vp9_seg_params(struct v4l2_vp9_segmentation *seg)
451 {
452 	unsigned int i, j;
453 
454 	if (seg->flags & ~(V4L2_VP9_SEGMENTATION_FLAG_ENABLED |
455 			   V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP |
456 			   V4L2_VP9_SEGMENTATION_FLAG_TEMPORAL_UPDATE |
457 			   V4L2_VP9_SEGMENTATION_FLAG_UPDATE_DATA |
458 			   V4L2_VP9_SEGMENTATION_FLAG_ABS_OR_DELTA_UPDATE))
459 		return -EINVAL;
460 
461 	for (i = 0; i < ARRAY_SIZE(seg->feature_enabled); i++) {
462 		if (seg->feature_enabled[i] &
463 		    ~V4L2_VP9_SEGMENT_FEATURE_ENABLED_MASK)
464 			return -EINVAL;
465 	}
466 
467 	for (i = 0; i < ARRAY_SIZE(seg->feature_data); i++) {
468 		static const int range[] = { 255, 63, 3, 0 };
469 
470 		for (j = 0; j < ARRAY_SIZE(seg->feature_data[j]); j++) {
471 			if (seg->feature_data[i][j] < -range[j] ||
472 			    seg->feature_data[i][j] > range[j])
473 				return -EINVAL;
474 		}
475 	}
476 
477 	zero_reserved(*seg);
478 	return 0;
479 }
480 
481 static int
validate_vp9_compressed_hdr(struct v4l2_ctrl_vp9_compressed_hdr * hdr)482 validate_vp9_compressed_hdr(struct v4l2_ctrl_vp9_compressed_hdr *hdr)
483 {
484 	if (hdr->tx_mode > V4L2_VP9_TX_MODE_SELECT)
485 		return -EINVAL;
486 
487 	return 0;
488 }
489 
490 static int
validate_vp9_frame(struct v4l2_ctrl_vp9_frame * frame)491 validate_vp9_frame(struct v4l2_ctrl_vp9_frame *frame)
492 {
493 	int ret;
494 
495 	/* Make sure we're not passed invalid flags. */
496 	if (frame->flags & ~(V4L2_VP9_FRAME_FLAG_KEY_FRAME |
497 		  V4L2_VP9_FRAME_FLAG_SHOW_FRAME |
498 		  V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT |
499 		  V4L2_VP9_FRAME_FLAG_INTRA_ONLY |
500 		  V4L2_VP9_FRAME_FLAG_ALLOW_HIGH_PREC_MV |
501 		  V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX |
502 		  V4L2_VP9_FRAME_FLAG_PARALLEL_DEC_MODE |
503 		  V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING |
504 		  V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING |
505 		  V4L2_VP9_FRAME_FLAG_COLOR_RANGE_FULL_SWING))
506 		return -EINVAL;
507 
508 	if (frame->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT &&
509 	    frame->flags & V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX)
510 		return -EINVAL;
511 
512 	if (frame->profile > V4L2_VP9_PROFILE_MAX)
513 		return -EINVAL;
514 
515 	if (frame->reset_frame_context > V4L2_VP9_RESET_FRAME_CTX_ALL)
516 		return -EINVAL;
517 
518 	if (frame->frame_context_idx >= V4L2_VP9_NUM_FRAME_CTX)
519 		return -EINVAL;
520 
521 	/*
522 	 * Profiles 0 and 1 only support 8-bit depth, profiles 2 and 3 only 10
523 	 * and 12 bit depths.
524 	 */
525 	if ((frame->profile < 2 && frame->bit_depth != 8) ||
526 	    (frame->profile >= 2 &&
527 	     (frame->bit_depth != 10 && frame->bit_depth != 12)))
528 		return -EINVAL;
529 
530 	/* Profile 0 and 2 only accept YUV 4:2:0. */
531 	if ((frame->profile == 0 || frame->profile == 2) &&
532 	    (!(frame->flags & V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING) ||
533 	     !(frame->flags & V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING)))
534 		return -EINVAL;
535 
536 	/* Profile 1 and 3 only accept YUV 4:2:2, 4:4:0 and 4:4:4. */
537 	if ((frame->profile == 1 || frame->profile == 3) &&
538 	    ((frame->flags & V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING) &&
539 	     (frame->flags & V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING)))
540 		return -EINVAL;
541 
542 	if (frame->interpolation_filter > V4L2_VP9_INTERP_FILTER_SWITCHABLE)
543 		return -EINVAL;
544 
545 	/*
546 	 * According to the spec, tile_cols_log2 shall be less than or equal
547 	 * to 6.
548 	 */
549 	if (frame->tile_cols_log2 > 6)
550 		return -EINVAL;
551 
552 	if (frame->reference_mode > V4L2_VP9_REFERENCE_MODE_SELECT)
553 		return -EINVAL;
554 
555 	ret = validate_vp9_lf_params(&frame->lf);
556 	if (ret)
557 		return ret;
558 
559 	ret = validate_vp9_quant_params(&frame->quant);
560 	if (ret)
561 		return ret;
562 
563 	ret = validate_vp9_seg_params(&frame->seg);
564 	if (ret)
565 		return ret;
566 
567 	zero_reserved(*frame);
568 	return 0;
569 }
570 
validate_av1_quantization(struct v4l2_av1_quantization * q)571 static int validate_av1_quantization(struct v4l2_av1_quantization *q)
572 {
573 	if (q->flags > GENMASK(2, 0))
574 		return -EINVAL;
575 
576 	if (q->delta_q_y_dc < -64 || q->delta_q_y_dc > 63 ||
577 	    q->delta_q_u_dc < -64 || q->delta_q_u_dc > 63 ||
578 	    q->delta_q_v_dc < -64 || q->delta_q_v_dc > 63 ||
579 	    q->delta_q_u_ac < -64 || q->delta_q_u_ac > 63 ||
580 	    q->delta_q_v_ac < -64 || q->delta_q_v_ac > 63 ||
581 	    q->delta_q_res > GENMASK(1, 0))
582 		return -EINVAL;
583 
584 	if (q->qm_y > GENMASK(3, 0) ||
585 	    q->qm_u > GENMASK(3, 0) ||
586 	    q->qm_v > GENMASK(3, 0))
587 		return -EINVAL;
588 
589 	return 0;
590 }
591 
validate_av1_segmentation(struct v4l2_av1_segmentation * s)592 static int validate_av1_segmentation(struct v4l2_av1_segmentation *s)
593 {
594 	u32 i;
595 	u32 j;
596 
597 	if (s->flags > GENMASK(4, 0))
598 		return -EINVAL;
599 
600 	for (i = 0; i < ARRAY_SIZE(s->feature_data); i++) {
601 		static const int segmentation_feature_signed[] = { 1, 1, 1, 1, 1, 0, 0, 0 };
602 		static const int segmentation_feature_max[] = { 255, 63, 63, 63, 63, 7, 0, 0};
603 
604 		for (j = 0; j < ARRAY_SIZE(s->feature_data[j]); j++) {
605 			s32 limit = segmentation_feature_max[j];
606 
607 			if (segmentation_feature_signed[j]) {
608 				if (s->feature_data[i][j] < -limit ||
609 				    s->feature_data[i][j] > limit)
610 					return -EINVAL;
611 			} else {
612 				if (s->feature_data[i][j] < 0 || s->feature_data[i][j] > limit)
613 					return -EINVAL;
614 			}
615 		}
616 	}
617 
618 	return 0;
619 }
620 
validate_av1_loop_filter(struct v4l2_av1_loop_filter * lf)621 static int validate_av1_loop_filter(struct v4l2_av1_loop_filter *lf)
622 {
623 	u32 i;
624 
625 	if (lf->flags > GENMASK(3, 0))
626 		return -EINVAL;
627 
628 	for (i = 0; i < ARRAY_SIZE(lf->level); i++) {
629 		if (lf->level[i] > GENMASK(5, 0))
630 			return -EINVAL;
631 	}
632 
633 	if (lf->sharpness > GENMASK(2, 0))
634 		return -EINVAL;
635 
636 	for (i = 0; i < ARRAY_SIZE(lf->ref_deltas); i++) {
637 		if (lf->ref_deltas[i] < -64 || lf->ref_deltas[i] > 63)
638 			return -EINVAL;
639 	}
640 
641 	for (i = 0; i < ARRAY_SIZE(lf->mode_deltas); i++) {
642 		if (lf->mode_deltas[i] < -64 || lf->mode_deltas[i] > 63)
643 			return -EINVAL;
644 	}
645 
646 	return 0;
647 }
648 
validate_av1_cdef(struct v4l2_av1_cdef * cdef)649 static int validate_av1_cdef(struct v4l2_av1_cdef *cdef)
650 {
651 	u32 i;
652 
653 	if (cdef->damping_minus_3 > GENMASK(1, 0) ||
654 	    cdef->bits > GENMASK(1, 0))
655 		return -EINVAL;
656 
657 	for (i = 0; i < 1 << cdef->bits; i++) {
658 		if (cdef->y_pri_strength[i] > GENMASK(3, 0) ||
659 		    cdef->y_sec_strength[i] > 4 ||
660 		    cdef->uv_pri_strength[i] > GENMASK(3, 0) ||
661 		    cdef->uv_sec_strength[i] > 4)
662 			return -EINVAL;
663 	}
664 
665 	return 0;
666 }
667 
validate_av1_loop_restauration(struct v4l2_av1_loop_restoration * lr)668 static int validate_av1_loop_restauration(struct v4l2_av1_loop_restoration *lr)
669 {
670 	if (lr->lr_unit_shift > 3 || lr->lr_uv_shift > 1)
671 		return -EINVAL;
672 
673 	return 0;
674 }
675 
validate_av1_film_grain(struct v4l2_ctrl_av1_film_grain * fg)676 static int validate_av1_film_grain(struct v4l2_ctrl_av1_film_grain *fg)
677 {
678 	u32 i;
679 
680 	if (fg->flags > GENMASK(4, 0))
681 		return -EINVAL;
682 
683 	if (fg->film_grain_params_ref_idx > GENMASK(2, 0) ||
684 	    fg->num_y_points > 14 ||
685 	    fg->num_cb_points > 10 ||
686 	    fg->num_cr_points > GENMASK(3, 0) ||
687 	    fg->grain_scaling_minus_8 > GENMASK(1, 0) ||
688 	    fg->ar_coeff_lag > GENMASK(1, 0) ||
689 	    fg->ar_coeff_shift_minus_6 > GENMASK(1, 0) ||
690 	    fg->grain_scale_shift > GENMASK(1, 0))
691 		return -EINVAL;
692 
693 	if (!(fg->flags & V4L2_AV1_FILM_GRAIN_FLAG_APPLY_GRAIN))
694 		return 0;
695 
696 	for (i = 1; i < fg->num_y_points; i++)
697 		if (fg->point_y_value[i] <= fg->point_y_value[i - 1])
698 			return -EINVAL;
699 
700 	for (i = 1; i < fg->num_cb_points; i++)
701 		if (fg->point_cb_value[i] <= fg->point_cb_value[i - 1])
702 			return -EINVAL;
703 
704 	for (i = 1; i < fg->num_cr_points; i++)
705 		if (fg->point_cr_value[i] <= fg->point_cr_value[i - 1])
706 			return -EINVAL;
707 
708 	return 0;
709 }
710 
validate_av1_frame(struct v4l2_ctrl_av1_frame * f)711 static int validate_av1_frame(struct v4l2_ctrl_av1_frame *f)
712 {
713 	int ret = 0;
714 
715 	ret = validate_av1_quantization(&f->quantization);
716 	if (ret)
717 		return ret;
718 	ret = validate_av1_segmentation(&f->segmentation);
719 	if (ret)
720 		return ret;
721 	ret = validate_av1_loop_filter(&f->loop_filter);
722 	if (ret)
723 		return ret;
724 	ret = validate_av1_cdef(&f->cdef);
725 	if (ret)
726 		return ret;
727 	ret = validate_av1_loop_restauration(&f->loop_restoration);
728 	if (ret)
729 		return ret;
730 
731 	if (f->flags &
732 	~(V4L2_AV1_FRAME_FLAG_SHOW_FRAME |
733 	  V4L2_AV1_FRAME_FLAG_SHOWABLE_FRAME |
734 	  V4L2_AV1_FRAME_FLAG_ERROR_RESILIENT_MODE |
735 	  V4L2_AV1_FRAME_FLAG_DISABLE_CDF_UPDATE |
736 	  V4L2_AV1_FRAME_FLAG_ALLOW_SCREEN_CONTENT_TOOLS |
737 	  V4L2_AV1_FRAME_FLAG_FORCE_INTEGER_MV |
738 	  V4L2_AV1_FRAME_FLAG_ALLOW_INTRABC |
739 	  V4L2_AV1_FRAME_FLAG_USE_SUPERRES |
740 	  V4L2_AV1_FRAME_FLAG_ALLOW_HIGH_PRECISION_MV |
741 	  V4L2_AV1_FRAME_FLAG_IS_MOTION_MODE_SWITCHABLE |
742 	  V4L2_AV1_FRAME_FLAG_USE_REF_FRAME_MVS |
743 	  V4L2_AV1_FRAME_FLAG_DISABLE_FRAME_END_UPDATE_CDF |
744 	  V4L2_AV1_FRAME_FLAG_ALLOW_WARPED_MOTION |
745 	  V4L2_AV1_FRAME_FLAG_REFERENCE_SELECT |
746 	  V4L2_AV1_FRAME_FLAG_REDUCED_TX_SET |
747 	  V4L2_AV1_FRAME_FLAG_SKIP_MODE_ALLOWED |
748 	  V4L2_AV1_FRAME_FLAG_SKIP_MODE_PRESENT |
749 	  V4L2_AV1_FRAME_FLAG_FRAME_SIZE_OVERRIDE |
750 	  V4L2_AV1_FRAME_FLAG_BUFFER_REMOVAL_TIME_PRESENT |
751 	  V4L2_AV1_FRAME_FLAG_FRAME_REFS_SHORT_SIGNALING))
752 		return -EINVAL;
753 
754 	if (f->superres_denom > GENMASK(2, 0) + 9)
755 		return -EINVAL;
756 
757 	return 0;
758 }
759 
validate_av1_sequence(struct v4l2_ctrl_av1_sequence * s)760 static int validate_av1_sequence(struct v4l2_ctrl_av1_sequence *s)
761 {
762 	if (s->flags &
763 	~(V4L2_AV1_SEQUENCE_FLAG_STILL_PICTURE |
764 	 V4L2_AV1_SEQUENCE_FLAG_USE_128X128_SUPERBLOCK |
765 	 V4L2_AV1_SEQUENCE_FLAG_ENABLE_FILTER_INTRA |
766 	 V4L2_AV1_SEQUENCE_FLAG_ENABLE_INTRA_EDGE_FILTER |
767 	 V4L2_AV1_SEQUENCE_FLAG_ENABLE_INTERINTRA_COMPOUND |
768 	 V4L2_AV1_SEQUENCE_FLAG_ENABLE_MASKED_COMPOUND |
769 	 V4L2_AV1_SEQUENCE_FLAG_ENABLE_WARPED_MOTION |
770 	 V4L2_AV1_SEQUENCE_FLAG_ENABLE_DUAL_FILTER |
771 	 V4L2_AV1_SEQUENCE_FLAG_ENABLE_ORDER_HINT |
772 	 V4L2_AV1_SEQUENCE_FLAG_ENABLE_JNT_COMP |
773 	 V4L2_AV1_SEQUENCE_FLAG_ENABLE_REF_FRAME_MVS |
774 	 V4L2_AV1_SEQUENCE_FLAG_ENABLE_SUPERRES |
775 	 V4L2_AV1_SEQUENCE_FLAG_ENABLE_CDEF |
776 	 V4L2_AV1_SEQUENCE_FLAG_ENABLE_RESTORATION |
777 	 V4L2_AV1_SEQUENCE_FLAG_MONO_CHROME |
778 	 V4L2_AV1_SEQUENCE_FLAG_COLOR_RANGE |
779 	 V4L2_AV1_SEQUENCE_FLAG_SUBSAMPLING_X |
780 	 V4L2_AV1_SEQUENCE_FLAG_SUBSAMPLING_Y |
781 	 V4L2_AV1_SEQUENCE_FLAG_FILM_GRAIN_PARAMS_PRESENT |
782 	 V4L2_AV1_SEQUENCE_FLAG_SEPARATE_UV_DELTA_Q))
783 		return -EINVAL;
784 
785 	if (s->seq_profile == 1 && s->flags & V4L2_AV1_SEQUENCE_FLAG_MONO_CHROME)
786 		return -EINVAL;
787 
788 	/* reserved */
789 	if (s->seq_profile > 2)
790 		return -EINVAL;
791 
792 	/* TODO: PROFILES */
793 	return 0;
794 }
795 
796 /*
797  * Compound controls validation requires setting unused fields/flags to zero
798  * in order to properly detect unchanged controls with v4l2_ctrl_type_op_equal's
799  * memcmp.
800  */
std_validate_compound(const struct v4l2_ctrl * ctrl,u32 idx,union v4l2_ctrl_ptr ptr)801 static int std_validate_compound(const struct v4l2_ctrl *ctrl, u32 idx,
802 				 union v4l2_ctrl_ptr ptr)
803 {
804 	struct v4l2_ctrl_mpeg2_sequence *p_mpeg2_sequence;
805 	struct v4l2_ctrl_mpeg2_picture *p_mpeg2_picture;
806 	struct v4l2_ctrl_vp8_frame *p_vp8_frame;
807 	struct v4l2_ctrl_fwht_params *p_fwht_params;
808 	struct v4l2_ctrl_h264_sps *p_h264_sps;
809 	struct v4l2_ctrl_h264_pps *p_h264_pps;
810 	struct v4l2_ctrl_h264_pred_weights *p_h264_pred_weights;
811 	struct v4l2_ctrl_h264_slice_params *p_h264_slice_params;
812 	struct v4l2_ctrl_h264_decode_params *p_h264_dec_params;
813 	struct v4l2_ctrl_hevc_sps *p_hevc_sps;
814 	struct v4l2_ctrl_hevc_pps *p_hevc_pps;
815 	struct v4l2_ctrl_hdr10_mastering_display *p_hdr10_mastering;
816 	struct v4l2_ctrl_hevc_decode_params *p_hevc_decode_params;
817 	struct v4l2_area *area;
818 	void *p = ptr.p + idx * ctrl->elem_size;
819 	unsigned int i;
820 
821 	switch ((u32)ctrl->type) {
822 	case V4L2_CTRL_TYPE_MPEG2_SEQUENCE:
823 		p_mpeg2_sequence = p;
824 
825 		switch (p_mpeg2_sequence->chroma_format) {
826 		case 1: /* 4:2:0 */
827 		case 2: /* 4:2:2 */
828 		case 3: /* 4:4:4 */
829 			break;
830 		default:
831 			return -EINVAL;
832 		}
833 		break;
834 
835 	case V4L2_CTRL_TYPE_MPEG2_PICTURE:
836 		p_mpeg2_picture = p;
837 
838 		switch (p_mpeg2_picture->intra_dc_precision) {
839 		case 0: /* 8 bits */
840 		case 1: /* 9 bits */
841 		case 2: /* 10 bits */
842 		case 3: /* 11 bits */
843 			break;
844 		default:
845 			return -EINVAL;
846 		}
847 
848 		switch (p_mpeg2_picture->picture_structure) {
849 		case V4L2_MPEG2_PIC_TOP_FIELD:
850 		case V4L2_MPEG2_PIC_BOTTOM_FIELD:
851 		case V4L2_MPEG2_PIC_FRAME:
852 			break;
853 		default:
854 			return -EINVAL;
855 		}
856 
857 		switch (p_mpeg2_picture->picture_coding_type) {
858 		case V4L2_MPEG2_PIC_CODING_TYPE_I:
859 		case V4L2_MPEG2_PIC_CODING_TYPE_P:
860 		case V4L2_MPEG2_PIC_CODING_TYPE_B:
861 			break;
862 		default:
863 			return -EINVAL;
864 		}
865 		zero_reserved(*p_mpeg2_picture);
866 		break;
867 
868 	case V4L2_CTRL_TYPE_MPEG2_QUANTISATION:
869 		break;
870 
871 	case V4L2_CTRL_TYPE_FWHT_PARAMS:
872 		p_fwht_params = p;
873 		if (p_fwht_params->version < V4L2_FWHT_VERSION)
874 			return -EINVAL;
875 		if (!p_fwht_params->width || !p_fwht_params->height)
876 			return -EINVAL;
877 		break;
878 
879 	case V4L2_CTRL_TYPE_H264_SPS:
880 		p_h264_sps = p;
881 
882 		/* Some syntax elements are only conditionally valid */
883 		if (p_h264_sps->pic_order_cnt_type != 0) {
884 			p_h264_sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
885 		} else if (p_h264_sps->pic_order_cnt_type != 1) {
886 			p_h264_sps->num_ref_frames_in_pic_order_cnt_cycle = 0;
887 			p_h264_sps->offset_for_non_ref_pic = 0;
888 			p_h264_sps->offset_for_top_to_bottom_field = 0;
889 			memset(&p_h264_sps->offset_for_ref_frame, 0,
890 			       sizeof(p_h264_sps->offset_for_ref_frame));
891 		}
892 
893 		if (!V4L2_H264_SPS_HAS_CHROMA_FORMAT(p_h264_sps)) {
894 			p_h264_sps->chroma_format_idc = 1;
895 			p_h264_sps->bit_depth_luma_minus8 = 0;
896 			p_h264_sps->bit_depth_chroma_minus8 = 0;
897 
898 			p_h264_sps->flags &=
899 				~V4L2_H264_SPS_FLAG_QPPRIME_Y_ZERO_TRANSFORM_BYPASS;
900 
901 			if (p_h264_sps->chroma_format_idc < 3)
902 				p_h264_sps->flags &=
903 					~V4L2_H264_SPS_FLAG_SEPARATE_COLOUR_PLANE;
904 		}
905 
906 		if (p_h264_sps->flags & V4L2_H264_SPS_FLAG_FRAME_MBS_ONLY)
907 			p_h264_sps->flags &=
908 				~V4L2_H264_SPS_FLAG_MB_ADAPTIVE_FRAME_FIELD;
909 
910 		/*
911 		 * Chroma 4:2:2 format require at least High 4:2:2 profile.
912 		 *
913 		 * The H264 specification and well-known parser implementations
914 		 * use profile-idc values directly, as that is clearer and
915 		 * less ambiguous. We do the same here.
916 		 */
917 		if (p_h264_sps->profile_idc < 122 &&
918 		    p_h264_sps->chroma_format_idc > 1)
919 			return -EINVAL;
920 		/* Chroma 4:4:4 format require at least High 4:2:2 profile */
921 		if (p_h264_sps->profile_idc < 244 &&
922 		    p_h264_sps->chroma_format_idc > 2)
923 			return -EINVAL;
924 		if (p_h264_sps->chroma_format_idc > 3)
925 			return -EINVAL;
926 
927 		if (p_h264_sps->bit_depth_luma_minus8 > 6)
928 			return -EINVAL;
929 		if (p_h264_sps->bit_depth_chroma_minus8 > 6)
930 			return -EINVAL;
931 		if (p_h264_sps->log2_max_frame_num_minus4 > 12)
932 			return -EINVAL;
933 		if (p_h264_sps->pic_order_cnt_type > 2)
934 			return -EINVAL;
935 		if (p_h264_sps->log2_max_pic_order_cnt_lsb_minus4 > 12)
936 			return -EINVAL;
937 		if (p_h264_sps->max_num_ref_frames > V4L2_H264_REF_LIST_LEN)
938 			return -EINVAL;
939 		break;
940 
941 	case V4L2_CTRL_TYPE_H264_PPS:
942 		p_h264_pps = p;
943 
944 		if (p_h264_pps->num_slice_groups_minus1 > 7)
945 			return -EINVAL;
946 		if (p_h264_pps->num_ref_idx_l0_default_active_minus1 >
947 		    (V4L2_H264_REF_LIST_LEN - 1))
948 			return -EINVAL;
949 		if (p_h264_pps->num_ref_idx_l1_default_active_minus1 >
950 		    (V4L2_H264_REF_LIST_LEN - 1))
951 			return -EINVAL;
952 		if (p_h264_pps->weighted_bipred_idc > 2)
953 			return -EINVAL;
954 		/*
955 		 * pic_init_qp_minus26 shall be in the range of
956 		 * -(26 + QpBdOffset_y) to +25, inclusive,
957 		 *  where QpBdOffset_y is 6 * bit_depth_luma_minus8
958 		 */
959 		if (p_h264_pps->pic_init_qp_minus26 < -62 ||
960 		    p_h264_pps->pic_init_qp_minus26 > 25)
961 			return -EINVAL;
962 		if (p_h264_pps->pic_init_qs_minus26 < -26 ||
963 		    p_h264_pps->pic_init_qs_minus26 > 25)
964 			return -EINVAL;
965 		if (p_h264_pps->chroma_qp_index_offset < -12 ||
966 		    p_h264_pps->chroma_qp_index_offset > 12)
967 			return -EINVAL;
968 		if (p_h264_pps->second_chroma_qp_index_offset < -12 ||
969 		    p_h264_pps->second_chroma_qp_index_offset > 12)
970 			return -EINVAL;
971 		break;
972 
973 	case V4L2_CTRL_TYPE_H264_SCALING_MATRIX:
974 		break;
975 
976 	case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS:
977 		p_h264_pred_weights = p;
978 
979 		if (p_h264_pred_weights->luma_log2_weight_denom > 7)
980 			return -EINVAL;
981 		if (p_h264_pred_weights->chroma_log2_weight_denom > 7)
982 			return -EINVAL;
983 		break;
984 
985 	case V4L2_CTRL_TYPE_H264_SLICE_PARAMS:
986 		p_h264_slice_params = p;
987 
988 		if (p_h264_slice_params->slice_type != V4L2_H264_SLICE_TYPE_B)
989 			p_h264_slice_params->flags &=
990 				~V4L2_H264_SLICE_FLAG_DIRECT_SPATIAL_MV_PRED;
991 
992 		if (p_h264_slice_params->colour_plane_id > 2)
993 			return -EINVAL;
994 		if (p_h264_slice_params->cabac_init_idc > 2)
995 			return -EINVAL;
996 		if (p_h264_slice_params->disable_deblocking_filter_idc > 2)
997 			return -EINVAL;
998 		if (p_h264_slice_params->slice_alpha_c0_offset_div2 < -6 ||
999 		    p_h264_slice_params->slice_alpha_c0_offset_div2 > 6)
1000 			return -EINVAL;
1001 		if (p_h264_slice_params->slice_beta_offset_div2 < -6 ||
1002 		    p_h264_slice_params->slice_beta_offset_div2 > 6)
1003 			return -EINVAL;
1004 
1005 		if (p_h264_slice_params->slice_type == V4L2_H264_SLICE_TYPE_I ||
1006 		    p_h264_slice_params->slice_type == V4L2_H264_SLICE_TYPE_SI)
1007 			p_h264_slice_params->num_ref_idx_l0_active_minus1 = 0;
1008 		if (p_h264_slice_params->slice_type != V4L2_H264_SLICE_TYPE_B)
1009 			p_h264_slice_params->num_ref_idx_l1_active_minus1 = 0;
1010 
1011 		if (p_h264_slice_params->num_ref_idx_l0_active_minus1 >
1012 		    (V4L2_H264_REF_LIST_LEN - 1))
1013 			return -EINVAL;
1014 		if (p_h264_slice_params->num_ref_idx_l1_active_minus1 >
1015 		    (V4L2_H264_REF_LIST_LEN - 1))
1016 			return -EINVAL;
1017 		zero_reserved(*p_h264_slice_params);
1018 		break;
1019 
1020 	case V4L2_CTRL_TYPE_H264_DECODE_PARAMS:
1021 		p_h264_dec_params = p;
1022 
1023 		if (p_h264_dec_params->nal_ref_idc > 3)
1024 			return -EINVAL;
1025 		for (i = 0; i < V4L2_H264_NUM_DPB_ENTRIES; i++) {
1026 			struct v4l2_h264_dpb_entry *dpb_entry =
1027 				&p_h264_dec_params->dpb[i];
1028 
1029 			zero_reserved(*dpb_entry);
1030 		}
1031 		zero_reserved(*p_h264_dec_params);
1032 		break;
1033 
1034 	case V4L2_CTRL_TYPE_VP8_FRAME:
1035 		p_vp8_frame = p;
1036 
1037 		switch (p_vp8_frame->num_dct_parts) {
1038 		case 1:
1039 		case 2:
1040 		case 4:
1041 		case 8:
1042 			break;
1043 		default:
1044 			return -EINVAL;
1045 		}
1046 		zero_padding(p_vp8_frame->segment);
1047 		zero_padding(p_vp8_frame->lf);
1048 		zero_padding(p_vp8_frame->quant);
1049 		zero_padding(p_vp8_frame->entropy);
1050 		zero_padding(p_vp8_frame->coder_state);
1051 		break;
1052 
1053 	case V4L2_CTRL_TYPE_HEVC_SPS:
1054 		p_hevc_sps = p;
1055 
1056 		if (!(p_hevc_sps->flags & V4L2_HEVC_SPS_FLAG_PCM_ENABLED)) {
1057 			p_hevc_sps->pcm_sample_bit_depth_luma_minus1 = 0;
1058 			p_hevc_sps->pcm_sample_bit_depth_chroma_minus1 = 0;
1059 			p_hevc_sps->log2_min_pcm_luma_coding_block_size_minus3 = 0;
1060 			p_hevc_sps->log2_diff_max_min_pcm_luma_coding_block_size = 0;
1061 		}
1062 
1063 		if (!(p_hevc_sps->flags &
1064 		      V4L2_HEVC_SPS_FLAG_LONG_TERM_REF_PICS_PRESENT))
1065 			p_hevc_sps->num_long_term_ref_pics_sps = 0;
1066 		break;
1067 
1068 	case V4L2_CTRL_TYPE_HEVC_PPS:
1069 		p_hevc_pps = p;
1070 
1071 		if (!(p_hevc_pps->flags &
1072 		      V4L2_HEVC_PPS_FLAG_CU_QP_DELTA_ENABLED))
1073 			p_hevc_pps->diff_cu_qp_delta_depth = 0;
1074 
1075 		if (!(p_hevc_pps->flags & V4L2_HEVC_PPS_FLAG_TILES_ENABLED)) {
1076 			p_hevc_pps->num_tile_columns_minus1 = 0;
1077 			p_hevc_pps->num_tile_rows_minus1 = 0;
1078 			memset(&p_hevc_pps->column_width_minus1, 0,
1079 			       sizeof(p_hevc_pps->column_width_minus1));
1080 			memset(&p_hevc_pps->row_height_minus1, 0,
1081 			       sizeof(p_hevc_pps->row_height_minus1));
1082 
1083 			p_hevc_pps->flags &=
1084 				~V4L2_HEVC_PPS_FLAG_LOOP_FILTER_ACROSS_TILES_ENABLED;
1085 		}
1086 
1087 		if (p_hevc_pps->flags &
1088 		    V4L2_HEVC_PPS_FLAG_PPS_DISABLE_DEBLOCKING_FILTER) {
1089 			p_hevc_pps->pps_beta_offset_div2 = 0;
1090 			p_hevc_pps->pps_tc_offset_div2 = 0;
1091 		}
1092 		break;
1093 
1094 	case V4L2_CTRL_TYPE_HEVC_DECODE_PARAMS:
1095 		p_hevc_decode_params = p;
1096 
1097 		if (p_hevc_decode_params->num_active_dpb_entries >
1098 		    V4L2_HEVC_DPB_ENTRIES_NUM_MAX)
1099 			return -EINVAL;
1100 		break;
1101 
1102 	case V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS:
1103 		break;
1104 
1105 	case V4L2_CTRL_TYPE_HDR10_CLL_INFO:
1106 		break;
1107 
1108 	case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY:
1109 		p_hdr10_mastering = p;
1110 
1111 		for (i = 0; i < 3; ++i) {
1112 			if (p_hdr10_mastering->display_primaries_x[i] <
1113 				V4L2_HDR10_MASTERING_PRIMARIES_X_LOW ||
1114 			    p_hdr10_mastering->display_primaries_x[i] >
1115 				V4L2_HDR10_MASTERING_PRIMARIES_X_HIGH ||
1116 			    p_hdr10_mastering->display_primaries_y[i] <
1117 				V4L2_HDR10_MASTERING_PRIMARIES_Y_LOW ||
1118 			    p_hdr10_mastering->display_primaries_y[i] >
1119 				V4L2_HDR10_MASTERING_PRIMARIES_Y_HIGH)
1120 				return -EINVAL;
1121 		}
1122 
1123 		if (p_hdr10_mastering->white_point_x <
1124 			V4L2_HDR10_MASTERING_WHITE_POINT_X_LOW ||
1125 		    p_hdr10_mastering->white_point_x >
1126 			V4L2_HDR10_MASTERING_WHITE_POINT_X_HIGH ||
1127 		    p_hdr10_mastering->white_point_y <
1128 			V4L2_HDR10_MASTERING_WHITE_POINT_Y_LOW ||
1129 		    p_hdr10_mastering->white_point_y >
1130 			V4L2_HDR10_MASTERING_WHITE_POINT_Y_HIGH)
1131 			return -EINVAL;
1132 
1133 		if (p_hdr10_mastering->max_display_mastering_luminance <
1134 			V4L2_HDR10_MASTERING_MAX_LUMA_LOW ||
1135 		    p_hdr10_mastering->max_display_mastering_luminance >
1136 			V4L2_HDR10_MASTERING_MAX_LUMA_HIGH ||
1137 		    p_hdr10_mastering->min_display_mastering_luminance <
1138 			V4L2_HDR10_MASTERING_MIN_LUMA_LOW ||
1139 		    p_hdr10_mastering->min_display_mastering_luminance >
1140 			V4L2_HDR10_MASTERING_MIN_LUMA_HIGH)
1141 			return -EINVAL;
1142 
1143 		/* The following restriction comes from ITU-T Rec. H.265 spec */
1144 		if (p_hdr10_mastering->max_display_mastering_luminance ==
1145 			V4L2_HDR10_MASTERING_MAX_LUMA_LOW &&
1146 		    p_hdr10_mastering->min_display_mastering_luminance ==
1147 			V4L2_HDR10_MASTERING_MIN_LUMA_HIGH)
1148 			return -EINVAL;
1149 
1150 		break;
1151 
1152 	case V4L2_CTRL_TYPE_HEVC_SCALING_MATRIX:
1153 		break;
1154 
1155 	case V4L2_CTRL_TYPE_VP9_COMPRESSED_HDR:
1156 		return validate_vp9_compressed_hdr(p);
1157 
1158 	case V4L2_CTRL_TYPE_VP9_FRAME:
1159 		return validate_vp9_frame(p);
1160 	case V4L2_CTRL_TYPE_AV1_FRAME:
1161 		return validate_av1_frame(p);
1162 	case V4L2_CTRL_TYPE_AV1_SEQUENCE:
1163 		return validate_av1_sequence(p);
1164 	case V4L2_CTRL_TYPE_AV1_TILE_GROUP_ENTRY:
1165 		break;
1166 	case V4L2_CTRL_TYPE_AV1_FILM_GRAIN:
1167 		return validate_av1_film_grain(p);
1168 
1169 	case V4L2_CTRL_TYPE_AREA:
1170 		area = p;
1171 		if (!area->width || !area->height)
1172 			return -EINVAL;
1173 		break;
1174 
1175 	default:
1176 		return -EINVAL;
1177 	}
1178 
1179 	return 0;
1180 }
1181 
std_validate_elem(const struct v4l2_ctrl * ctrl,u32 idx,union v4l2_ctrl_ptr ptr)1182 static int std_validate_elem(const struct v4l2_ctrl *ctrl, u32 idx,
1183 			     union v4l2_ctrl_ptr ptr)
1184 {
1185 	size_t len;
1186 	u64 offset;
1187 	s64 val;
1188 
1189 	switch ((u32)ctrl->type) {
1190 	case V4L2_CTRL_TYPE_INTEGER:
1191 		return ROUND_TO_RANGE(ptr.p_s32[idx], u32, ctrl);
1192 	case V4L2_CTRL_TYPE_INTEGER64:
1193 		/*
1194 		 * We can't use the ROUND_TO_RANGE define here due to
1195 		 * the u64 divide that needs special care.
1196 		 */
1197 		val = ptr.p_s64[idx];
1198 		if (ctrl->maximum >= 0 && val >= ctrl->maximum - (s64)(ctrl->step / 2))
1199 			val = ctrl->maximum;
1200 		else
1201 			val += (s64)(ctrl->step / 2);
1202 		val = clamp_t(s64, val, ctrl->minimum, ctrl->maximum);
1203 		offset = val - ctrl->minimum;
1204 		do_div(offset, ctrl->step);
1205 		ptr.p_s64[idx] = ctrl->minimum + offset * ctrl->step;
1206 		return 0;
1207 	case V4L2_CTRL_TYPE_U8:
1208 		return ROUND_TO_RANGE(ptr.p_u8[idx], u8, ctrl);
1209 	case V4L2_CTRL_TYPE_U16:
1210 		return ROUND_TO_RANGE(ptr.p_u16[idx], u16, ctrl);
1211 	case V4L2_CTRL_TYPE_U32:
1212 		return ROUND_TO_RANGE(ptr.p_u32[idx], u32, ctrl);
1213 
1214 	case V4L2_CTRL_TYPE_BOOLEAN:
1215 		ptr.p_s32[idx] = !!ptr.p_s32[idx];
1216 		return 0;
1217 
1218 	case V4L2_CTRL_TYPE_MENU:
1219 	case V4L2_CTRL_TYPE_INTEGER_MENU:
1220 		if (ptr.p_s32[idx] < ctrl->minimum || ptr.p_s32[idx] > ctrl->maximum)
1221 			return -ERANGE;
1222 		if (ptr.p_s32[idx] < BITS_PER_LONG_LONG &&
1223 		    (ctrl->menu_skip_mask & BIT_ULL(ptr.p_s32[idx])))
1224 			return -EINVAL;
1225 		if (ctrl->type == V4L2_CTRL_TYPE_MENU &&
1226 		    ctrl->qmenu[ptr.p_s32[idx]][0] == '\0')
1227 			return -EINVAL;
1228 		return 0;
1229 
1230 	case V4L2_CTRL_TYPE_BITMASK:
1231 		ptr.p_s32[idx] &= ctrl->maximum;
1232 		return 0;
1233 
1234 	case V4L2_CTRL_TYPE_BUTTON:
1235 	case V4L2_CTRL_TYPE_CTRL_CLASS:
1236 		ptr.p_s32[idx] = 0;
1237 		return 0;
1238 
1239 	case V4L2_CTRL_TYPE_STRING:
1240 		idx *= ctrl->elem_size;
1241 		len = strlen(ptr.p_char + idx);
1242 		if (len < ctrl->minimum)
1243 			return -ERANGE;
1244 		if ((len - (u32)ctrl->minimum) % (u32)ctrl->step)
1245 			return -ERANGE;
1246 		return 0;
1247 
1248 	default:
1249 		return std_validate_compound(ctrl, idx, ptr);
1250 	}
1251 }
1252 
v4l2_ctrl_type_op_validate(const struct v4l2_ctrl * ctrl,union v4l2_ctrl_ptr ptr)1253 int v4l2_ctrl_type_op_validate(const struct v4l2_ctrl *ctrl,
1254 			       union v4l2_ctrl_ptr ptr)
1255 {
1256 	unsigned int i;
1257 	int ret = 0;
1258 
1259 	switch ((u32)ctrl->type) {
1260 	case V4L2_CTRL_TYPE_U8:
1261 		if (ctrl->maximum == 0xff && ctrl->minimum == 0 && ctrl->step == 1)
1262 			return 0;
1263 		break;
1264 	case V4L2_CTRL_TYPE_U16:
1265 		if (ctrl->maximum == 0xffff && ctrl->minimum == 0 && ctrl->step == 1)
1266 			return 0;
1267 		break;
1268 	case V4L2_CTRL_TYPE_U32:
1269 		if (ctrl->maximum == 0xffffffff && ctrl->minimum == 0 && ctrl->step == 1)
1270 			return 0;
1271 		break;
1272 
1273 	case V4L2_CTRL_TYPE_BUTTON:
1274 	case V4L2_CTRL_TYPE_CTRL_CLASS:
1275 		memset(ptr.p_s32, 0, ctrl->new_elems * sizeof(s32));
1276 		return 0;
1277 	}
1278 
1279 	for (i = 0; !ret && i < ctrl->new_elems; i++)
1280 		ret = std_validate_elem(ctrl, i, ptr);
1281 	return ret;
1282 }
1283 EXPORT_SYMBOL(v4l2_ctrl_type_op_validate);
1284 
1285 static const struct v4l2_ctrl_type_ops std_type_ops = {
1286 	.equal = v4l2_ctrl_type_op_equal,
1287 	.init = v4l2_ctrl_type_op_init,
1288 	.log = v4l2_ctrl_type_op_log,
1289 	.validate = v4l2_ctrl_type_op_validate,
1290 };
1291 
v4l2_ctrl_notify(struct v4l2_ctrl * ctrl,v4l2_ctrl_notify_fnc notify,void * priv)1292 void v4l2_ctrl_notify(struct v4l2_ctrl *ctrl, v4l2_ctrl_notify_fnc notify, void *priv)
1293 {
1294 	if (!ctrl)
1295 		return;
1296 	if (!notify) {
1297 		ctrl->call_notify = 0;
1298 		return;
1299 	}
1300 	if (WARN_ON(ctrl->handler->notify && ctrl->handler->notify != notify))
1301 		return;
1302 	ctrl->handler->notify = notify;
1303 	ctrl->handler->notify_priv = priv;
1304 	ctrl->call_notify = 1;
1305 }
1306 EXPORT_SYMBOL(v4l2_ctrl_notify);
1307 
1308 /* Copy the one value to another. */
ptr_to_ptr(struct v4l2_ctrl * ctrl,union v4l2_ctrl_ptr from,union v4l2_ctrl_ptr to,unsigned int elems)1309 static void ptr_to_ptr(struct v4l2_ctrl *ctrl,
1310 		       union v4l2_ctrl_ptr from, union v4l2_ctrl_ptr to,
1311 		       unsigned int elems)
1312 {
1313 	if (ctrl == NULL)
1314 		return;
1315 	memcpy(to.p, from.p_const, elems * ctrl->elem_size);
1316 }
1317 
1318 /* Copy the new value to the current value. */
new_to_cur(struct v4l2_fh * fh,struct v4l2_ctrl * ctrl,u32 ch_flags)1319 void new_to_cur(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl, u32 ch_flags)
1320 {
1321 	bool changed;
1322 
1323 	if (ctrl == NULL)
1324 		return;
1325 
1326 	/* has_changed is set by cluster_changed */
1327 	changed = ctrl->has_changed;
1328 	if (changed) {
1329 		if (ctrl->is_dyn_array)
1330 			ctrl->elems = ctrl->new_elems;
1331 		ptr_to_ptr(ctrl, ctrl->p_new, ctrl->p_cur, ctrl->elems);
1332 	}
1333 
1334 	if (ch_flags & V4L2_EVENT_CTRL_CH_FLAGS) {
1335 		/* Note: CH_FLAGS is only set for auto clusters. */
1336 		ctrl->flags &=
1337 			~(V4L2_CTRL_FLAG_INACTIVE | V4L2_CTRL_FLAG_VOLATILE);
1338 		if (!is_cur_manual(ctrl->cluster[0])) {
1339 			ctrl->flags |= V4L2_CTRL_FLAG_INACTIVE;
1340 			if (ctrl->cluster[0]->has_volatiles)
1341 				ctrl->flags |= V4L2_CTRL_FLAG_VOLATILE;
1342 		}
1343 		fh = NULL;
1344 	}
1345 	if (changed || ch_flags) {
1346 		/* If a control was changed that was not one of the controls
1347 		   modified by the application, then send the event to all. */
1348 		if (!ctrl->is_new)
1349 			fh = NULL;
1350 		send_event(fh, ctrl,
1351 			(changed ? V4L2_EVENT_CTRL_CH_VALUE : 0) | ch_flags);
1352 		if (ctrl->call_notify && changed && ctrl->handler->notify)
1353 			ctrl->handler->notify(ctrl, ctrl->handler->notify_priv);
1354 	}
1355 }
1356 
1357 /* Copy the current value to the new value */
cur_to_new(struct v4l2_ctrl * ctrl)1358 void cur_to_new(struct v4l2_ctrl *ctrl)
1359 {
1360 	if (ctrl == NULL)
1361 		return;
1362 	if (ctrl->is_dyn_array)
1363 		ctrl->new_elems = ctrl->elems;
1364 	ptr_to_ptr(ctrl, ctrl->p_cur, ctrl->p_new, ctrl->new_elems);
1365 }
1366 
req_alloc_array(struct v4l2_ctrl_ref * ref,u32 elems)1367 static bool req_alloc_array(struct v4l2_ctrl_ref *ref, u32 elems)
1368 {
1369 	void *tmp;
1370 
1371 	if (elems == ref->p_req_array_alloc_elems)
1372 		return true;
1373 	if (ref->ctrl->is_dyn_array &&
1374 	    elems < ref->p_req_array_alloc_elems)
1375 		return true;
1376 
1377 	tmp = kvmalloc(elems * ref->ctrl->elem_size, GFP_KERNEL);
1378 
1379 	if (!tmp) {
1380 		ref->p_req_array_enomem = true;
1381 		return false;
1382 	}
1383 	ref->p_req_array_enomem = false;
1384 	kvfree(ref->p_req.p);
1385 	ref->p_req.p = tmp;
1386 	ref->p_req_array_alloc_elems = elems;
1387 	return true;
1388 }
1389 
1390 /* Copy the new value to the request value */
new_to_req(struct v4l2_ctrl_ref * ref)1391 void new_to_req(struct v4l2_ctrl_ref *ref)
1392 {
1393 	struct v4l2_ctrl *ctrl;
1394 
1395 	if (!ref)
1396 		return;
1397 
1398 	ctrl = ref->ctrl;
1399 	if (ctrl->is_array && !req_alloc_array(ref, ctrl->new_elems))
1400 		return;
1401 
1402 	ref->p_req_elems = ctrl->new_elems;
1403 	ptr_to_ptr(ctrl, ctrl->p_new, ref->p_req, ref->p_req_elems);
1404 	ref->p_req_valid = true;
1405 }
1406 
1407 /* Copy the current value to the request value */
cur_to_req(struct v4l2_ctrl_ref * ref)1408 void cur_to_req(struct v4l2_ctrl_ref *ref)
1409 {
1410 	struct v4l2_ctrl *ctrl;
1411 
1412 	if (!ref)
1413 		return;
1414 
1415 	ctrl = ref->ctrl;
1416 	if (ctrl->is_array && !req_alloc_array(ref, ctrl->elems))
1417 		return;
1418 
1419 	ref->p_req_elems = ctrl->elems;
1420 	ptr_to_ptr(ctrl, ctrl->p_cur, ref->p_req, ctrl->elems);
1421 	ref->p_req_valid = true;
1422 }
1423 
1424 /* Copy the request value to the new value */
req_to_new(struct v4l2_ctrl_ref * ref)1425 int req_to_new(struct v4l2_ctrl_ref *ref)
1426 {
1427 	struct v4l2_ctrl *ctrl;
1428 
1429 	if (!ref)
1430 		return 0;
1431 
1432 	ctrl = ref->ctrl;
1433 
1434 	/*
1435 	 * This control was never set in the request, so just use the current
1436 	 * value.
1437 	 */
1438 	if (!ref->p_req_valid) {
1439 		if (ctrl->is_dyn_array)
1440 			ctrl->new_elems = ctrl->elems;
1441 		ptr_to_ptr(ctrl, ctrl->p_cur, ctrl->p_new, ctrl->new_elems);
1442 		return 0;
1443 	}
1444 
1445 	/* Not an array, so just copy the request value */
1446 	if (!ctrl->is_array) {
1447 		ptr_to_ptr(ctrl, ref->p_req, ctrl->p_new, ctrl->new_elems);
1448 		return 0;
1449 	}
1450 
1451 	/* Sanity check, should never happen */
1452 	if (WARN_ON(!ref->p_req_array_alloc_elems))
1453 		return -ENOMEM;
1454 
1455 	if (!ctrl->is_dyn_array &&
1456 	    ref->p_req_elems != ctrl->p_array_alloc_elems)
1457 		return -ENOMEM;
1458 
1459 	/*
1460 	 * Check if the number of elements in the request is more than the
1461 	 * elements in ctrl->p_array. If so, attempt to realloc ctrl->p_array.
1462 	 * Note that p_array is allocated with twice the number of elements
1463 	 * in the dynamic array since it has to store both the current and
1464 	 * new value of such a control.
1465 	 */
1466 	if (ref->p_req_elems > ctrl->p_array_alloc_elems) {
1467 		unsigned int sz = ref->p_req_elems * ctrl->elem_size;
1468 		void *old = ctrl->p_array;
1469 		void *tmp = kvzalloc(2 * sz, GFP_KERNEL);
1470 
1471 		if (!tmp)
1472 			return -ENOMEM;
1473 		memcpy(tmp, ctrl->p_new.p, ctrl->elems * ctrl->elem_size);
1474 		memcpy(tmp + sz, ctrl->p_cur.p, ctrl->elems * ctrl->elem_size);
1475 		ctrl->p_new.p = tmp;
1476 		ctrl->p_cur.p = tmp + sz;
1477 		ctrl->p_array = tmp;
1478 		ctrl->p_array_alloc_elems = ref->p_req_elems;
1479 		kvfree(old);
1480 	}
1481 
1482 	ctrl->new_elems = ref->p_req_elems;
1483 	ptr_to_ptr(ctrl, ref->p_req, ctrl->p_new, ctrl->new_elems);
1484 	return 0;
1485 }
1486 
1487 /* Control range checking */
check_range(enum v4l2_ctrl_type type,s64 min,s64 max,u64 step,s64 def)1488 int check_range(enum v4l2_ctrl_type type,
1489 		s64 min, s64 max, u64 step, s64 def)
1490 {
1491 	switch (type) {
1492 	case V4L2_CTRL_TYPE_BOOLEAN:
1493 		if (step != 1 || max > 1 || min < 0)
1494 			return -ERANGE;
1495 		fallthrough;
1496 	case V4L2_CTRL_TYPE_U8:
1497 	case V4L2_CTRL_TYPE_U16:
1498 	case V4L2_CTRL_TYPE_U32:
1499 	case V4L2_CTRL_TYPE_INTEGER:
1500 	case V4L2_CTRL_TYPE_INTEGER64:
1501 		if (step == 0 || min > max || def < min || def > max)
1502 			return -ERANGE;
1503 		return 0;
1504 	case V4L2_CTRL_TYPE_BITMASK:
1505 		if (step || min || !max || (def & ~max))
1506 			return -ERANGE;
1507 		return 0;
1508 	case V4L2_CTRL_TYPE_MENU:
1509 	case V4L2_CTRL_TYPE_INTEGER_MENU:
1510 		if (min > max || def < min || def > max ||
1511 		    min < 0 || (step && max >= BITS_PER_LONG_LONG))
1512 			return -ERANGE;
1513 		/* Note: step == menu_skip_mask for menu controls.
1514 		   So here we check if the default value is masked out. */
1515 		if (def < BITS_PER_LONG_LONG && (step & BIT_ULL(def)))
1516 			return -EINVAL;
1517 		return 0;
1518 	case V4L2_CTRL_TYPE_STRING:
1519 		if (min > max || min < 0 || step < 1 || def)
1520 			return -ERANGE;
1521 		return 0;
1522 	default:
1523 		return 0;
1524 	}
1525 }
1526 
1527 /* Set the handler's error code if it wasn't set earlier already */
handler_set_err(struct v4l2_ctrl_handler * hdl,int err)1528 static inline int handler_set_err(struct v4l2_ctrl_handler *hdl, int err)
1529 {
1530 	if (hdl->error == 0)
1531 		hdl->error = err;
1532 	return err;
1533 }
1534 
1535 /* Initialize the handler */
v4l2_ctrl_handler_init_class(struct v4l2_ctrl_handler * hdl,unsigned nr_of_controls_hint,struct lock_class_key * key,const char * name)1536 int v4l2_ctrl_handler_init_class(struct v4l2_ctrl_handler *hdl,
1537 				 unsigned nr_of_controls_hint,
1538 				 struct lock_class_key *key, const char *name)
1539 {
1540 	mutex_init(&hdl->_lock);
1541 	hdl->lock = &hdl->_lock;
1542 	lockdep_set_class_and_name(hdl->lock, key, name);
1543 	INIT_LIST_HEAD(&hdl->ctrls);
1544 	INIT_LIST_HEAD(&hdl->ctrl_refs);
1545 	hdl->nr_of_buckets = 1 + nr_of_controls_hint / 8;
1546 	hdl->buckets = kvcalloc(hdl->nr_of_buckets, sizeof(hdl->buckets[0]),
1547 				GFP_KERNEL);
1548 	hdl->error = hdl->buckets ? 0 : -ENOMEM;
1549 	v4l2_ctrl_handler_init_request(hdl);
1550 	return hdl->error;
1551 }
1552 EXPORT_SYMBOL(v4l2_ctrl_handler_init_class);
1553 
1554 /* Free all controls and control refs */
v4l2_ctrl_handler_free(struct v4l2_ctrl_handler * hdl)1555 void v4l2_ctrl_handler_free(struct v4l2_ctrl_handler *hdl)
1556 {
1557 	struct v4l2_ctrl_ref *ref, *next_ref;
1558 	struct v4l2_ctrl *ctrl, *next_ctrl;
1559 	struct v4l2_subscribed_event *sev, *next_sev;
1560 
1561 	if (hdl == NULL || hdl->buckets == NULL)
1562 		return;
1563 
1564 	v4l2_ctrl_handler_free_request(hdl);
1565 
1566 	mutex_lock(hdl->lock);
1567 	/* Free all nodes */
1568 	list_for_each_entry_safe(ref, next_ref, &hdl->ctrl_refs, node) {
1569 		list_del(&ref->node);
1570 		if (ref->p_req_array_alloc_elems)
1571 			kvfree(ref->p_req.p);
1572 		kfree(ref);
1573 	}
1574 	/* Free all controls owned by the handler */
1575 	list_for_each_entry_safe(ctrl, next_ctrl, &hdl->ctrls, node) {
1576 		list_del(&ctrl->node);
1577 		list_for_each_entry_safe(sev, next_sev, &ctrl->ev_subs, node)
1578 			list_del(&sev->node);
1579 		kvfree(ctrl->p_array);
1580 		kvfree(ctrl);
1581 	}
1582 	kvfree(hdl->buckets);
1583 	hdl->buckets = NULL;
1584 	hdl->cached = NULL;
1585 	hdl->error = 0;
1586 	mutex_unlock(hdl->lock);
1587 	mutex_destroy(&hdl->_lock);
1588 }
1589 EXPORT_SYMBOL(v4l2_ctrl_handler_free);
1590 
1591 /* For backwards compatibility: V4L2_CID_PRIVATE_BASE should no longer
1592    be used except in G_CTRL, S_CTRL, QUERYCTRL and QUERYMENU when dealing
1593    with applications that do not use the NEXT_CTRL flag.
1594 
1595    We just find the n-th private user control. It's O(N), but that should not
1596    be an issue in this particular case. */
find_private_ref(struct v4l2_ctrl_handler * hdl,u32 id)1597 static struct v4l2_ctrl_ref *find_private_ref(
1598 		struct v4l2_ctrl_handler *hdl, u32 id)
1599 {
1600 	struct v4l2_ctrl_ref *ref;
1601 
1602 	id -= V4L2_CID_PRIVATE_BASE;
1603 	list_for_each_entry(ref, &hdl->ctrl_refs, node) {
1604 		/* Search for private user controls that are compatible with
1605 		   VIDIOC_G/S_CTRL. */
1606 		if (V4L2_CTRL_ID2WHICH(ref->ctrl->id) == V4L2_CTRL_CLASS_USER &&
1607 		    V4L2_CTRL_DRIVER_PRIV(ref->ctrl->id)) {
1608 			if (!ref->ctrl->is_int)
1609 				continue;
1610 			if (id == 0)
1611 				return ref;
1612 			id--;
1613 		}
1614 	}
1615 	return NULL;
1616 }
1617 
1618 /* Find a control with the given ID. */
find_ref(struct v4l2_ctrl_handler * hdl,u32 id)1619 struct v4l2_ctrl_ref *find_ref(struct v4l2_ctrl_handler *hdl, u32 id)
1620 {
1621 	struct v4l2_ctrl_ref *ref;
1622 	int bucket;
1623 
1624 	id &= V4L2_CTRL_ID_MASK;
1625 
1626 	/* Old-style private controls need special handling */
1627 	if (id >= V4L2_CID_PRIVATE_BASE)
1628 		return find_private_ref(hdl, id);
1629 	bucket = id % hdl->nr_of_buckets;
1630 
1631 	/* Simple optimization: cache the last control found */
1632 	if (hdl->cached && hdl->cached->ctrl->id == id)
1633 		return hdl->cached;
1634 
1635 	/* Not in cache, search the hash */
1636 	ref = hdl->buckets ? hdl->buckets[bucket] : NULL;
1637 	while (ref && ref->ctrl->id != id)
1638 		ref = ref->next;
1639 
1640 	if (ref)
1641 		hdl->cached = ref; /* cache it! */
1642 	return ref;
1643 }
1644 
1645 /* Find a control with the given ID. Take the handler's lock first. */
find_ref_lock(struct v4l2_ctrl_handler * hdl,u32 id)1646 struct v4l2_ctrl_ref *find_ref_lock(struct v4l2_ctrl_handler *hdl, u32 id)
1647 {
1648 	struct v4l2_ctrl_ref *ref = NULL;
1649 
1650 	if (hdl) {
1651 		mutex_lock(hdl->lock);
1652 		ref = find_ref(hdl, id);
1653 		mutex_unlock(hdl->lock);
1654 	}
1655 	return ref;
1656 }
1657 
1658 /* Find a control with the given ID. */
v4l2_ctrl_find(struct v4l2_ctrl_handler * hdl,u32 id)1659 struct v4l2_ctrl *v4l2_ctrl_find(struct v4l2_ctrl_handler *hdl, u32 id)
1660 {
1661 	struct v4l2_ctrl_ref *ref = find_ref_lock(hdl, id);
1662 
1663 	return ref ? ref->ctrl : NULL;
1664 }
1665 EXPORT_SYMBOL(v4l2_ctrl_find);
1666 
1667 /* Allocate a new v4l2_ctrl_ref and hook it into the handler. */
handler_new_ref(struct v4l2_ctrl_handler * hdl,struct v4l2_ctrl * ctrl,struct v4l2_ctrl_ref ** ctrl_ref,bool from_other_dev,bool allocate_req)1668 int handler_new_ref(struct v4l2_ctrl_handler *hdl,
1669 		    struct v4l2_ctrl *ctrl,
1670 		    struct v4l2_ctrl_ref **ctrl_ref,
1671 		    bool from_other_dev, bool allocate_req)
1672 {
1673 	struct v4l2_ctrl_ref *ref;
1674 	struct v4l2_ctrl_ref *new_ref;
1675 	u32 id = ctrl->id;
1676 	u32 class_ctrl = V4L2_CTRL_ID2WHICH(id) | 1;
1677 	int bucket = id % hdl->nr_of_buckets;	/* which bucket to use */
1678 	unsigned int size_extra_req = 0;
1679 
1680 	if (ctrl_ref)
1681 		*ctrl_ref = NULL;
1682 
1683 	/*
1684 	 * Automatically add the control class if it is not yet present and
1685 	 * the new control is not a compound control.
1686 	 */
1687 	if (ctrl->type < V4L2_CTRL_COMPOUND_TYPES &&
1688 	    id != class_ctrl && find_ref_lock(hdl, class_ctrl) == NULL)
1689 		if (!v4l2_ctrl_new_std(hdl, NULL, class_ctrl, 0, 0, 0, 0))
1690 			return hdl->error;
1691 
1692 	if (hdl->error)
1693 		return hdl->error;
1694 
1695 	if (allocate_req && !ctrl->is_array)
1696 		size_extra_req = ctrl->elems * ctrl->elem_size;
1697 	new_ref = kzalloc(sizeof(*new_ref) + size_extra_req, GFP_KERNEL);
1698 	if (!new_ref)
1699 		return handler_set_err(hdl, -ENOMEM);
1700 	new_ref->ctrl = ctrl;
1701 	new_ref->from_other_dev = from_other_dev;
1702 	if (size_extra_req)
1703 		new_ref->p_req.p = &new_ref[1];
1704 
1705 	INIT_LIST_HEAD(&new_ref->node);
1706 
1707 	mutex_lock(hdl->lock);
1708 
1709 	/* Add immediately at the end of the list if the list is empty, or if
1710 	   the last element in the list has a lower ID.
1711 	   This ensures that when elements are added in ascending order the
1712 	   insertion is an O(1) operation. */
1713 	if (list_empty(&hdl->ctrl_refs) || id > node2id(hdl->ctrl_refs.prev)) {
1714 		list_add_tail(&new_ref->node, &hdl->ctrl_refs);
1715 		goto insert_in_hash;
1716 	}
1717 
1718 	/* Find insert position in sorted list */
1719 	list_for_each_entry(ref, &hdl->ctrl_refs, node) {
1720 		if (ref->ctrl->id < id)
1721 			continue;
1722 		/* Don't add duplicates */
1723 		if (ref->ctrl->id == id) {
1724 			kfree(new_ref);
1725 			goto unlock;
1726 		}
1727 		list_add(&new_ref->node, ref->node.prev);
1728 		break;
1729 	}
1730 
1731 insert_in_hash:
1732 	/* Insert the control node in the hash */
1733 	new_ref->next = hdl->buckets[bucket];
1734 	hdl->buckets[bucket] = new_ref;
1735 	if (ctrl_ref)
1736 		*ctrl_ref = new_ref;
1737 	if (ctrl->handler == hdl) {
1738 		/* By default each control starts in a cluster of its own.
1739 		 * new_ref->ctrl is basically a cluster array with one
1740 		 * element, so that's perfect to use as the cluster pointer.
1741 		 * But only do this for the handler that owns the control.
1742 		 */
1743 		ctrl->cluster = &new_ref->ctrl;
1744 		ctrl->ncontrols = 1;
1745 	}
1746 
1747 unlock:
1748 	mutex_unlock(hdl->lock);
1749 	return 0;
1750 }
1751 
1752 /* Add a new control */
v4l2_ctrl_new(struct v4l2_ctrl_handler * hdl,const struct v4l2_ctrl_ops * ops,const struct v4l2_ctrl_type_ops * type_ops,u32 id,const char * name,enum v4l2_ctrl_type type,s64 min,s64 max,u64 step,s64 def,const u32 dims[V4L2_CTRL_MAX_DIMS],u32 elem_size,u32 flags,const char * const * qmenu,const s64 * qmenu_int,const union v4l2_ctrl_ptr p_def,void * priv)1753 static struct v4l2_ctrl *v4l2_ctrl_new(struct v4l2_ctrl_handler *hdl,
1754 			const struct v4l2_ctrl_ops *ops,
1755 			const struct v4l2_ctrl_type_ops *type_ops,
1756 			u32 id, const char *name, enum v4l2_ctrl_type type,
1757 			s64 min, s64 max, u64 step, s64 def,
1758 			const u32 dims[V4L2_CTRL_MAX_DIMS], u32 elem_size,
1759 			u32 flags, const char * const *qmenu,
1760 			const s64 *qmenu_int, const union v4l2_ctrl_ptr p_def,
1761 			void *priv)
1762 {
1763 	struct v4l2_ctrl *ctrl;
1764 	unsigned sz_extra;
1765 	unsigned nr_of_dims = 0;
1766 	unsigned elems = 1;
1767 	bool is_array;
1768 	unsigned tot_ctrl_size;
1769 	void *data;
1770 	int err;
1771 
1772 	if (hdl->error)
1773 		return NULL;
1774 
1775 	while (dims && dims[nr_of_dims]) {
1776 		elems *= dims[nr_of_dims];
1777 		nr_of_dims++;
1778 		if (nr_of_dims == V4L2_CTRL_MAX_DIMS)
1779 			break;
1780 	}
1781 	is_array = nr_of_dims > 0;
1782 
1783 	/* Prefill elem_size for all types handled by std_type_ops */
1784 	switch ((u32)type) {
1785 	case V4L2_CTRL_TYPE_INTEGER64:
1786 		elem_size = sizeof(s64);
1787 		break;
1788 	case V4L2_CTRL_TYPE_STRING:
1789 		elem_size = max + 1;
1790 		break;
1791 	case V4L2_CTRL_TYPE_U8:
1792 		elem_size = sizeof(u8);
1793 		break;
1794 	case V4L2_CTRL_TYPE_U16:
1795 		elem_size = sizeof(u16);
1796 		break;
1797 	case V4L2_CTRL_TYPE_U32:
1798 		elem_size = sizeof(u32);
1799 		break;
1800 	case V4L2_CTRL_TYPE_MPEG2_SEQUENCE:
1801 		elem_size = sizeof(struct v4l2_ctrl_mpeg2_sequence);
1802 		break;
1803 	case V4L2_CTRL_TYPE_MPEG2_PICTURE:
1804 		elem_size = sizeof(struct v4l2_ctrl_mpeg2_picture);
1805 		break;
1806 	case V4L2_CTRL_TYPE_MPEG2_QUANTISATION:
1807 		elem_size = sizeof(struct v4l2_ctrl_mpeg2_quantisation);
1808 		break;
1809 	case V4L2_CTRL_TYPE_FWHT_PARAMS:
1810 		elem_size = sizeof(struct v4l2_ctrl_fwht_params);
1811 		break;
1812 	case V4L2_CTRL_TYPE_H264_SPS:
1813 		elem_size = sizeof(struct v4l2_ctrl_h264_sps);
1814 		break;
1815 	case V4L2_CTRL_TYPE_H264_PPS:
1816 		elem_size = sizeof(struct v4l2_ctrl_h264_pps);
1817 		break;
1818 	case V4L2_CTRL_TYPE_H264_SCALING_MATRIX:
1819 		elem_size = sizeof(struct v4l2_ctrl_h264_scaling_matrix);
1820 		break;
1821 	case V4L2_CTRL_TYPE_H264_SLICE_PARAMS:
1822 		elem_size = sizeof(struct v4l2_ctrl_h264_slice_params);
1823 		break;
1824 	case V4L2_CTRL_TYPE_H264_DECODE_PARAMS:
1825 		elem_size = sizeof(struct v4l2_ctrl_h264_decode_params);
1826 		break;
1827 	case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS:
1828 		elem_size = sizeof(struct v4l2_ctrl_h264_pred_weights);
1829 		break;
1830 	case V4L2_CTRL_TYPE_VP8_FRAME:
1831 		elem_size = sizeof(struct v4l2_ctrl_vp8_frame);
1832 		break;
1833 	case V4L2_CTRL_TYPE_HEVC_SPS:
1834 		elem_size = sizeof(struct v4l2_ctrl_hevc_sps);
1835 		break;
1836 	case V4L2_CTRL_TYPE_HEVC_PPS:
1837 		elem_size = sizeof(struct v4l2_ctrl_hevc_pps);
1838 		break;
1839 	case V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS:
1840 		elem_size = sizeof(struct v4l2_ctrl_hevc_slice_params);
1841 		break;
1842 	case V4L2_CTRL_TYPE_HEVC_SCALING_MATRIX:
1843 		elem_size = sizeof(struct v4l2_ctrl_hevc_scaling_matrix);
1844 		break;
1845 	case V4L2_CTRL_TYPE_HEVC_DECODE_PARAMS:
1846 		elem_size = sizeof(struct v4l2_ctrl_hevc_decode_params);
1847 		break;
1848 	case V4L2_CTRL_TYPE_HDR10_CLL_INFO:
1849 		elem_size = sizeof(struct v4l2_ctrl_hdr10_cll_info);
1850 		break;
1851 	case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY:
1852 		elem_size = sizeof(struct v4l2_ctrl_hdr10_mastering_display);
1853 		break;
1854 	case V4L2_CTRL_TYPE_VP9_COMPRESSED_HDR:
1855 		elem_size = sizeof(struct v4l2_ctrl_vp9_compressed_hdr);
1856 		break;
1857 	case V4L2_CTRL_TYPE_VP9_FRAME:
1858 		elem_size = sizeof(struct v4l2_ctrl_vp9_frame);
1859 		break;
1860 	case V4L2_CTRL_TYPE_AV1_SEQUENCE:
1861 		elem_size = sizeof(struct v4l2_ctrl_av1_sequence);
1862 		break;
1863 	case V4L2_CTRL_TYPE_AV1_TILE_GROUP_ENTRY:
1864 		elem_size = sizeof(struct v4l2_ctrl_av1_tile_group_entry);
1865 		break;
1866 	case V4L2_CTRL_TYPE_AV1_FRAME:
1867 		elem_size = sizeof(struct v4l2_ctrl_av1_frame);
1868 		break;
1869 	case V4L2_CTRL_TYPE_AV1_FILM_GRAIN:
1870 		elem_size = sizeof(struct v4l2_ctrl_av1_film_grain);
1871 		break;
1872 	case V4L2_CTRL_TYPE_AREA:
1873 		elem_size = sizeof(struct v4l2_area);
1874 		break;
1875 	default:
1876 		if (type < V4L2_CTRL_COMPOUND_TYPES)
1877 			elem_size = sizeof(s32);
1878 		break;
1879 	}
1880 
1881 	/* Sanity checks */
1882 	if (id == 0 || name == NULL || !elem_size ||
1883 	    id >= V4L2_CID_PRIVATE_BASE ||
1884 	    (type == V4L2_CTRL_TYPE_MENU && qmenu == NULL) ||
1885 	    (type == V4L2_CTRL_TYPE_INTEGER_MENU && qmenu_int == NULL)) {
1886 		handler_set_err(hdl, -ERANGE);
1887 		return NULL;
1888 	}
1889 	err = check_range(type, min, max, step, def);
1890 	if (err) {
1891 		handler_set_err(hdl, err);
1892 		return NULL;
1893 	}
1894 	if (is_array &&
1895 	    (type == V4L2_CTRL_TYPE_BUTTON ||
1896 	     type == V4L2_CTRL_TYPE_CTRL_CLASS)) {
1897 		handler_set_err(hdl, -EINVAL);
1898 		return NULL;
1899 	}
1900 	if (flags & V4L2_CTRL_FLAG_DYNAMIC_ARRAY) {
1901 		/*
1902 		 * For now only support this for one-dimensional arrays only.
1903 		 *
1904 		 * This can be relaxed in the future, but this will
1905 		 * require more effort.
1906 		 */
1907 		if (nr_of_dims != 1) {
1908 			handler_set_err(hdl, -EINVAL);
1909 			return NULL;
1910 		}
1911 		/* Start with just 1 element */
1912 		elems = 1;
1913 	}
1914 
1915 	tot_ctrl_size = elem_size * elems;
1916 	sz_extra = 0;
1917 	if (type == V4L2_CTRL_TYPE_BUTTON)
1918 		flags |= V4L2_CTRL_FLAG_WRITE_ONLY |
1919 			V4L2_CTRL_FLAG_EXECUTE_ON_WRITE;
1920 	else if (type == V4L2_CTRL_TYPE_CTRL_CLASS)
1921 		flags |= V4L2_CTRL_FLAG_READ_ONLY;
1922 	else if (!is_array &&
1923 		 (type == V4L2_CTRL_TYPE_INTEGER64 ||
1924 		  type == V4L2_CTRL_TYPE_STRING ||
1925 		  type >= V4L2_CTRL_COMPOUND_TYPES))
1926 		sz_extra += 2 * tot_ctrl_size;
1927 
1928 	if (type >= V4L2_CTRL_COMPOUND_TYPES && p_def.p_const)
1929 		sz_extra += elem_size;
1930 
1931 	ctrl = kvzalloc(sizeof(*ctrl) + sz_extra, GFP_KERNEL);
1932 	if (ctrl == NULL) {
1933 		handler_set_err(hdl, -ENOMEM);
1934 		return NULL;
1935 	}
1936 
1937 	INIT_LIST_HEAD(&ctrl->node);
1938 	INIT_LIST_HEAD(&ctrl->ev_subs);
1939 	ctrl->handler = hdl;
1940 	ctrl->ops = ops;
1941 	ctrl->type_ops = type_ops ? type_ops : &std_type_ops;
1942 	ctrl->id = id;
1943 	ctrl->name = name;
1944 	ctrl->type = type;
1945 	ctrl->flags = flags;
1946 	ctrl->minimum = min;
1947 	ctrl->maximum = max;
1948 	ctrl->step = step;
1949 	ctrl->default_value = def;
1950 	ctrl->is_string = !is_array && type == V4L2_CTRL_TYPE_STRING;
1951 	ctrl->is_ptr = is_array || type >= V4L2_CTRL_COMPOUND_TYPES || ctrl->is_string;
1952 	ctrl->is_int = !ctrl->is_ptr && type != V4L2_CTRL_TYPE_INTEGER64;
1953 	ctrl->is_array = is_array;
1954 	ctrl->is_dyn_array = !!(flags & V4L2_CTRL_FLAG_DYNAMIC_ARRAY);
1955 	ctrl->elems = elems;
1956 	ctrl->new_elems = elems;
1957 	ctrl->nr_of_dims = nr_of_dims;
1958 	if (nr_of_dims)
1959 		memcpy(ctrl->dims, dims, nr_of_dims * sizeof(dims[0]));
1960 	ctrl->elem_size = elem_size;
1961 	if (type == V4L2_CTRL_TYPE_MENU)
1962 		ctrl->qmenu = qmenu;
1963 	else if (type == V4L2_CTRL_TYPE_INTEGER_MENU)
1964 		ctrl->qmenu_int = qmenu_int;
1965 	ctrl->priv = priv;
1966 	ctrl->cur.val = ctrl->val = def;
1967 	data = &ctrl[1];
1968 
1969 	if (ctrl->is_array) {
1970 		ctrl->p_array_alloc_elems = elems;
1971 		ctrl->p_array = kvzalloc(2 * elems * elem_size, GFP_KERNEL);
1972 		if (!ctrl->p_array) {
1973 			kvfree(ctrl);
1974 			return NULL;
1975 		}
1976 		data = ctrl->p_array;
1977 	}
1978 
1979 	if (!ctrl->is_int) {
1980 		ctrl->p_new.p = data;
1981 		ctrl->p_cur.p = data + tot_ctrl_size;
1982 	} else {
1983 		ctrl->p_new.p = &ctrl->val;
1984 		ctrl->p_cur.p = &ctrl->cur.val;
1985 	}
1986 
1987 	if (type >= V4L2_CTRL_COMPOUND_TYPES && p_def.p_const) {
1988 		if (ctrl->is_array)
1989 			ctrl->p_def.p = &ctrl[1];
1990 		else
1991 			ctrl->p_def.p = ctrl->p_cur.p + tot_ctrl_size;
1992 		memcpy(ctrl->p_def.p, p_def.p_const, elem_size);
1993 	}
1994 
1995 	ctrl->type_ops->init(ctrl, 0, ctrl->p_cur);
1996 	cur_to_new(ctrl);
1997 
1998 	if (handler_new_ref(hdl, ctrl, NULL, false, false)) {
1999 		kvfree(ctrl->p_array);
2000 		kvfree(ctrl);
2001 		return NULL;
2002 	}
2003 	mutex_lock(hdl->lock);
2004 	list_add_tail(&ctrl->node, &hdl->ctrls);
2005 	mutex_unlock(hdl->lock);
2006 	return ctrl;
2007 }
2008 
v4l2_ctrl_new_custom(struct v4l2_ctrl_handler * hdl,const struct v4l2_ctrl_config * cfg,void * priv)2009 struct v4l2_ctrl *v4l2_ctrl_new_custom(struct v4l2_ctrl_handler *hdl,
2010 			const struct v4l2_ctrl_config *cfg, void *priv)
2011 {
2012 	bool is_menu;
2013 	struct v4l2_ctrl *ctrl;
2014 	const char *name = cfg->name;
2015 	const char * const *qmenu = cfg->qmenu;
2016 	const s64 *qmenu_int = cfg->qmenu_int;
2017 	enum v4l2_ctrl_type type = cfg->type;
2018 	u32 flags = cfg->flags;
2019 	s64 min = cfg->min;
2020 	s64 max = cfg->max;
2021 	u64 step = cfg->step;
2022 	s64 def = cfg->def;
2023 
2024 	if (name == NULL)
2025 		v4l2_ctrl_fill(cfg->id, &name, &type, &min, &max, &step,
2026 								&def, &flags);
2027 
2028 	is_menu = (type == V4L2_CTRL_TYPE_MENU ||
2029 		   type == V4L2_CTRL_TYPE_INTEGER_MENU);
2030 	if (is_menu)
2031 		WARN_ON(step);
2032 	else
2033 		WARN_ON(cfg->menu_skip_mask);
2034 	if (type == V4L2_CTRL_TYPE_MENU && !qmenu) {
2035 		qmenu = v4l2_ctrl_get_menu(cfg->id);
2036 	} else if (type == V4L2_CTRL_TYPE_INTEGER_MENU && !qmenu_int) {
2037 		handler_set_err(hdl, -EINVAL);
2038 		return NULL;
2039 	}
2040 
2041 	ctrl = v4l2_ctrl_new(hdl, cfg->ops, cfg->type_ops, cfg->id, name,
2042 			type, min, max,
2043 			is_menu ? cfg->menu_skip_mask : step, def,
2044 			cfg->dims, cfg->elem_size,
2045 			flags, qmenu, qmenu_int, cfg->p_def, priv);
2046 	if (ctrl)
2047 		ctrl->is_private = cfg->is_private;
2048 	return ctrl;
2049 }
2050 EXPORT_SYMBOL(v4l2_ctrl_new_custom);
2051 
2052 /* Helper function for standard non-menu controls */
v4l2_ctrl_new_std(struct v4l2_ctrl_handler * hdl,const struct v4l2_ctrl_ops * ops,u32 id,s64 min,s64 max,u64 step,s64 def)2053 struct v4l2_ctrl *v4l2_ctrl_new_std(struct v4l2_ctrl_handler *hdl,
2054 			const struct v4l2_ctrl_ops *ops,
2055 			u32 id, s64 min, s64 max, u64 step, s64 def)
2056 {
2057 	const char *name;
2058 	enum v4l2_ctrl_type type;
2059 	u32 flags;
2060 
2061 	v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags);
2062 	if (type == V4L2_CTRL_TYPE_MENU ||
2063 	    type == V4L2_CTRL_TYPE_INTEGER_MENU ||
2064 	    type >= V4L2_CTRL_COMPOUND_TYPES) {
2065 		handler_set_err(hdl, -EINVAL);
2066 		return NULL;
2067 	}
2068 	return v4l2_ctrl_new(hdl, ops, NULL, id, name, type,
2069 			     min, max, step, def, NULL, 0,
2070 			     flags, NULL, NULL, ptr_null, NULL);
2071 }
2072 EXPORT_SYMBOL(v4l2_ctrl_new_std);
2073 
2074 /* Helper function for standard menu controls */
v4l2_ctrl_new_std_menu(struct v4l2_ctrl_handler * hdl,const struct v4l2_ctrl_ops * ops,u32 id,u8 _max,u64 mask,u8 _def)2075 struct v4l2_ctrl *v4l2_ctrl_new_std_menu(struct v4l2_ctrl_handler *hdl,
2076 			const struct v4l2_ctrl_ops *ops,
2077 			u32 id, u8 _max, u64 mask, u8 _def)
2078 {
2079 	const char * const *qmenu = NULL;
2080 	const s64 *qmenu_int = NULL;
2081 	unsigned int qmenu_int_len = 0;
2082 	const char *name;
2083 	enum v4l2_ctrl_type type;
2084 	s64 min;
2085 	s64 max = _max;
2086 	s64 def = _def;
2087 	u64 step;
2088 	u32 flags;
2089 
2090 	v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags);
2091 
2092 	if (type == V4L2_CTRL_TYPE_MENU)
2093 		qmenu = v4l2_ctrl_get_menu(id);
2094 	else if (type == V4L2_CTRL_TYPE_INTEGER_MENU)
2095 		qmenu_int = v4l2_ctrl_get_int_menu(id, &qmenu_int_len);
2096 
2097 	if ((!qmenu && !qmenu_int) || (qmenu_int && max >= qmenu_int_len)) {
2098 		handler_set_err(hdl, -EINVAL);
2099 		return NULL;
2100 	}
2101 	return v4l2_ctrl_new(hdl, ops, NULL, id, name, type,
2102 			     0, max, mask, def, NULL, 0,
2103 			     flags, qmenu, qmenu_int, ptr_null, NULL);
2104 }
2105 EXPORT_SYMBOL(v4l2_ctrl_new_std_menu);
2106 
2107 /* Helper function for standard menu controls with driver defined menu */
v4l2_ctrl_new_std_menu_items(struct v4l2_ctrl_handler * hdl,const struct v4l2_ctrl_ops * ops,u32 id,u8 _max,u64 mask,u8 _def,const char * const * qmenu)2108 struct v4l2_ctrl *v4l2_ctrl_new_std_menu_items(struct v4l2_ctrl_handler *hdl,
2109 			const struct v4l2_ctrl_ops *ops, u32 id, u8 _max,
2110 			u64 mask, u8 _def, const char * const *qmenu)
2111 {
2112 	enum v4l2_ctrl_type type;
2113 	const char *name;
2114 	u32 flags;
2115 	u64 step;
2116 	s64 min;
2117 	s64 max = _max;
2118 	s64 def = _def;
2119 
2120 	/* v4l2_ctrl_new_std_menu_items() should only be called for
2121 	 * standard controls without a standard menu.
2122 	 */
2123 	if (v4l2_ctrl_get_menu(id)) {
2124 		handler_set_err(hdl, -EINVAL);
2125 		return NULL;
2126 	}
2127 
2128 	v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags);
2129 	if (type != V4L2_CTRL_TYPE_MENU || qmenu == NULL) {
2130 		handler_set_err(hdl, -EINVAL);
2131 		return NULL;
2132 	}
2133 	return v4l2_ctrl_new(hdl, ops, NULL, id, name, type,
2134 			     0, max, mask, def, NULL, 0,
2135 			     flags, qmenu, NULL, ptr_null, NULL);
2136 
2137 }
2138 EXPORT_SYMBOL(v4l2_ctrl_new_std_menu_items);
2139 
2140 /* Helper function for standard compound controls */
v4l2_ctrl_new_std_compound(struct v4l2_ctrl_handler * hdl,const struct v4l2_ctrl_ops * ops,u32 id,const union v4l2_ctrl_ptr p_def)2141 struct v4l2_ctrl *v4l2_ctrl_new_std_compound(struct v4l2_ctrl_handler *hdl,
2142 				const struct v4l2_ctrl_ops *ops, u32 id,
2143 				const union v4l2_ctrl_ptr p_def)
2144 {
2145 	const char *name;
2146 	enum v4l2_ctrl_type type;
2147 	u32 flags;
2148 	s64 min, max, step, def;
2149 
2150 	v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags);
2151 	if (type < V4L2_CTRL_COMPOUND_TYPES) {
2152 		handler_set_err(hdl, -EINVAL);
2153 		return NULL;
2154 	}
2155 	return v4l2_ctrl_new(hdl, ops, NULL, id, name, type,
2156 			     min, max, step, def, NULL, 0,
2157 			     flags, NULL, NULL, p_def, NULL);
2158 }
2159 EXPORT_SYMBOL(v4l2_ctrl_new_std_compound);
2160 
2161 /* Helper function for standard integer menu controls */
v4l2_ctrl_new_int_menu(struct v4l2_ctrl_handler * hdl,const struct v4l2_ctrl_ops * ops,u32 id,u8 _max,u8 _def,const s64 * qmenu_int)2162 struct v4l2_ctrl *v4l2_ctrl_new_int_menu(struct v4l2_ctrl_handler *hdl,
2163 			const struct v4l2_ctrl_ops *ops,
2164 			u32 id, u8 _max, u8 _def, const s64 *qmenu_int)
2165 {
2166 	const char *name;
2167 	enum v4l2_ctrl_type type;
2168 	s64 min;
2169 	u64 step;
2170 	s64 max = _max;
2171 	s64 def = _def;
2172 	u32 flags;
2173 
2174 	v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags);
2175 	if (type != V4L2_CTRL_TYPE_INTEGER_MENU) {
2176 		handler_set_err(hdl, -EINVAL);
2177 		return NULL;
2178 	}
2179 	return v4l2_ctrl_new(hdl, ops, NULL, id, name, type,
2180 			     0, max, 0, def, NULL, 0,
2181 			     flags, NULL, qmenu_int, ptr_null, NULL);
2182 }
2183 EXPORT_SYMBOL(v4l2_ctrl_new_int_menu);
2184 
2185 /* Add the controls from another handler to our own. */
v4l2_ctrl_add_handler(struct v4l2_ctrl_handler * hdl,struct v4l2_ctrl_handler * add,bool (* filter)(const struct v4l2_ctrl * ctrl),bool from_other_dev)2186 int v4l2_ctrl_add_handler(struct v4l2_ctrl_handler *hdl,
2187 			  struct v4l2_ctrl_handler *add,
2188 			  bool (*filter)(const struct v4l2_ctrl *ctrl),
2189 			  bool from_other_dev)
2190 {
2191 	struct v4l2_ctrl_ref *ref;
2192 	int ret = 0;
2193 
2194 	/* Do nothing if either handler is NULL or if they are the same */
2195 	if (!hdl || !add || hdl == add)
2196 		return 0;
2197 	if (hdl->error)
2198 		return hdl->error;
2199 	mutex_lock(add->lock);
2200 	list_for_each_entry(ref, &add->ctrl_refs, node) {
2201 		struct v4l2_ctrl *ctrl = ref->ctrl;
2202 
2203 		/* Skip handler-private controls. */
2204 		if (ctrl->is_private)
2205 			continue;
2206 		/* And control classes */
2207 		if (ctrl->type == V4L2_CTRL_TYPE_CTRL_CLASS)
2208 			continue;
2209 		/* Filter any unwanted controls */
2210 		if (filter && !filter(ctrl))
2211 			continue;
2212 		ret = handler_new_ref(hdl, ctrl, NULL, from_other_dev, false);
2213 		if (ret)
2214 			break;
2215 	}
2216 	mutex_unlock(add->lock);
2217 	return ret;
2218 }
2219 EXPORT_SYMBOL(v4l2_ctrl_add_handler);
2220 
v4l2_ctrl_radio_filter(const struct v4l2_ctrl * ctrl)2221 bool v4l2_ctrl_radio_filter(const struct v4l2_ctrl *ctrl)
2222 {
2223 	if (V4L2_CTRL_ID2WHICH(ctrl->id) == V4L2_CTRL_CLASS_FM_TX)
2224 		return true;
2225 	if (V4L2_CTRL_ID2WHICH(ctrl->id) == V4L2_CTRL_CLASS_FM_RX)
2226 		return true;
2227 	switch (ctrl->id) {
2228 	case V4L2_CID_AUDIO_MUTE:
2229 	case V4L2_CID_AUDIO_VOLUME:
2230 	case V4L2_CID_AUDIO_BALANCE:
2231 	case V4L2_CID_AUDIO_BASS:
2232 	case V4L2_CID_AUDIO_TREBLE:
2233 	case V4L2_CID_AUDIO_LOUDNESS:
2234 		return true;
2235 	default:
2236 		break;
2237 	}
2238 	return false;
2239 }
2240 EXPORT_SYMBOL(v4l2_ctrl_radio_filter);
2241 
2242 /* Cluster controls */
v4l2_ctrl_cluster(unsigned ncontrols,struct v4l2_ctrl ** controls)2243 void v4l2_ctrl_cluster(unsigned ncontrols, struct v4l2_ctrl **controls)
2244 {
2245 	bool has_volatiles = false;
2246 	int i;
2247 
2248 	/* The first control is the master control and it must not be NULL */
2249 	if (WARN_ON(ncontrols == 0 || controls[0] == NULL))
2250 		return;
2251 
2252 	for (i = 0; i < ncontrols; i++) {
2253 		if (controls[i]) {
2254 			controls[i]->cluster = controls;
2255 			controls[i]->ncontrols = ncontrols;
2256 			if (controls[i]->flags & V4L2_CTRL_FLAG_VOLATILE)
2257 				has_volatiles = true;
2258 		}
2259 	}
2260 	controls[0]->has_volatiles = has_volatiles;
2261 }
2262 EXPORT_SYMBOL(v4l2_ctrl_cluster);
2263 
v4l2_ctrl_auto_cluster(unsigned ncontrols,struct v4l2_ctrl ** controls,u8 manual_val,bool set_volatile)2264 void v4l2_ctrl_auto_cluster(unsigned ncontrols, struct v4l2_ctrl **controls,
2265 			    u8 manual_val, bool set_volatile)
2266 {
2267 	struct v4l2_ctrl *master = controls[0];
2268 	u32 flag = 0;
2269 	int i;
2270 
2271 	v4l2_ctrl_cluster(ncontrols, controls);
2272 	WARN_ON(ncontrols <= 1);
2273 	WARN_ON(manual_val < master->minimum || manual_val > master->maximum);
2274 	WARN_ON(set_volatile && !has_op(master, g_volatile_ctrl));
2275 	master->is_auto = true;
2276 	master->has_volatiles = set_volatile;
2277 	master->manual_mode_value = manual_val;
2278 	master->flags |= V4L2_CTRL_FLAG_UPDATE;
2279 
2280 	if (!is_cur_manual(master))
2281 		flag = V4L2_CTRL_FLAG_INACTIVE |
2282 			(set_volatile ? V4L2_CTRL_FLAG_VOLATILE : 0);
2283 
2284 	for (i = 1; i < ncontrols; i++)
2285 		if (controls[i])
2286 			controls[i]->flags |= flag;
2287 }
2288 EXPORT_SYMBOL(v4l2_ctrl_auto_cluster);
2289 
2290 /*
2291  * Obtain the current volatile values of an autocluster and mark them
2292  * as new.
2293  */
update_from_auto_cluster(struct v4l2_ctrl * master)2294 void update_from_auto_cluster(struct v4l2_ctrl *master)
2295 {
2296 	int i;
2297 
2298 	for (i = 1; i < master->ncontrols; i++)
2299 		cur_to_new(master->cluster[i]);
2300 	if (!call_op(master, g_volatile_ctrl))
2301 		for (i = 1; i < master->ncontrols; i++)
2302 			if (master->cluster[i])
2303 				master->cluster[i]->is_new = 1;
2304 }
2305 
2306 /*
2307  * Return non-zero if one or more of the controls in the cluster has a new
2308  * value that differs from the current value.
2309  */
cluster_changed(struct v4l2_ctrl * master)2310 static int cluster_changed(struct v4l2_ctrl *master)
2311 {
2312 	bool changed = false;
2313 	int i;
2314 
2315 	for (i = 0; i < master->ncontrols; i++) {
2316 		struct v4l2_ctrl *ctrl = master->cluster[i];
2317 		bool ctrl_changed = false;
2318 
2319 		if (!ctrl)
2320 			continue;
2321 
2322 		if (ctrl->flags & V4L2_CTRL_FLAG_EXECUTE_ON_WRITE) {
2323 			changed = true;
2324 			ctrl_changed = true;
2325 		}
2326 
2327 		/*
2328 		 * Set has_changed to false to avoid generating
2329 		 * the event V4L2_EVENT_CTRL_CH_VALUE
2330 		 */
2331 		if (ctrl->flags & V4L2_CTRL_FLAG_VOLATILE) {
2332 			ctrl->has_changed = false;
2333 			continue;
2334 		}
2335 
2336 		if (ctrl->elems != ctrl->new_elems)
2337 			ctrl_changed = true;
2338 		if (!ctrl_changed)
2339 			ctrl_changed = !ctrl->type_ops->equal(ctrl,
2340 				ctrl->p_cur, ctrl->p_new);
2341 		ctrl->has_changed = ctrl_changed;
2342 		changed |= ctrl->has_changed;
2343 	}
2344 	return changed;
2345 }
2346 
2347 /*
2348  * Core function that calls try/s_ctrl and ensures that the new value is
2349  * copied to the current value on a set.
2350  * Must be called with ctrl->handler->lock held.
2351  */
try_or_set_cluster(struct v4l2_fh * fh,struct v4l2_ctrl * master,bool set,u32 ch_flags)2352 int try_or_set_cluster(struct v4l2_fh *fh, struct v4l2_ctrl *master,
2353 		       bool set, u32 ch_flags)
2354 {
2355 	bool update_flag;
2356 	int ret;
2357 	int i;
2358 
2359 	/*
2360 	 * Go through the cluster and either validate the new value or
2361 	 * (if no new value was set), copy the current value to the new
2362 	 * value, ensuring a consistent view for the control ops when
2363 	 * called.
2364 	 */
2365 	for (i = 0; i < master->ncontrols; i++) {
2366 		struct v4l2_ctrl *ctrl = master->cluster[i];
2367 
2368 		if (!ctrl)
2369 			continue;
2370 
2371 		if (!ctrl->is_new) {
2372 			cur_to_new(ctrl);
2373 			continue;
2374 		}
2375 		/*
2376 		 * Check again: it may have changed since the
2377 		 * previous check in try_or_set_ext_ctrls().
2378 		 */
2379 		if (set && (ctrl->flags & V4L2_CTRL_FLAG_GRABBED))
2380 			return -EBUSY;
2381 	}
2382 
2383 	ret = call_op(master, try_ctrl);
2384 
2385 	/* Don't set if there is no change */
2386 	if (ret || !set || !cluster_changed(master))
2387 		return ret;
2388 	ret = call_op(master, s_ctrl);
2389 	if (ret)
2390 		return ret;
2391 
2392 	/* If OK, then make the new values permanent. */
2393 	update_flag = is_cur_manual(master) != is_new_manual(master);
2394 
2395 	for (i = 0; i < master->ncontrols; i++) {
2396 		/*
2397 		 * If we switch from auto to manual mode, and this cluster
2398 		 * contains volatile controls, then all non-master controls
2399 		 * have to be marked as changed. The 'new' value contains
2400 		 * the volatile value (obtained by update_from_auto_cluster),
2401 		 * which now has to become the current value.
2402 		 */
2403 		if (i && update_flag && is_new_manual(master) &&
2404 		    master->has_volatiles && master->cluster[i])
2405 			master->cluster[i]->has_changed = true;
2406 
2407 		new_to_cur(fh, master->cluster[i], ch_flags |
2408 			((update_flag && i > 0) ? V4L2_EVENT_CTRL_CH_FLAGS : 0));
2409 	}
2410 	return 0;
2411 }
2412 
2413 /* Activate/deactivate a control. */
v4l2_ctrl_activate(struct v4l2_ctrl * ctrl,bool active)2414 void v4l2_ctrl_activate(struct v4l2_ctrl *ctrl, bool active)
2415 {
2416 	/* invert since the actual flag is called 'inactive' */
2417 	bool inactive = !active;
2418 	bool old;
2419 
2420 	if (ctrl == NULL)
2421 		return;
2422 
2423 	if (inactive)
2424 		/* set V4L2_CTRL_FLAG_INACTIVE */
2425 		old = test_and_set_bit(4, &ctrl->flags);
2426 	else
2427 		/* clear V4L2_CTRL_FLAG_INACTIVE */
2428 		old = test_and_clear_bit(4, &ctrl->flags);
2429 	if (old != inactive)
2430 		send_event(NULL, ctrl, V4L2_EVENT_CTRL_CH_FLAGS);
2431 }
2432 EXPORT_SYMBOL(v4l2_ctrl_activate);
2433 
__v4l2_ctrl_grab(struct v4l2_ctrl * ctrl,bool grabbed)2434 void __v4l2_ctrl_grab(struct v4l2_ctrl *ctrl, bool grabbed)
2435 {
2436 	bool old;
2437 
2438 	if (ctrl == NULL)
2439 		return;
2440 
2441 	lockdep_assert_held(ctrl->handler->lock);
2442 
2443 	if (grabbed)
2444 		/* set V4L2_CTRL_FLAG_GRABBED */
2445 		old = test_and_set_bit(1, &ctrl->flags);
2446 	else
2447 		/* clear V4L2_CTRL_FLAG_GRABBED */
2448 		old = test_and_clear_bit(1, &ctrl->flags);
2449 	if (old != grabbed)
2450 		send_event(NULL, ctrl, V4L2_EVENT_CTRL_CH_FLAGS);
2451 }
2452 EXPORT_SYMBOL(__v4l2_ctrl_grab);
2453 
2454 /* Call s_ctrl for all controls owned by the handler */
__v4l2_ctrl_handler_setup(struct v4l2_ctrl_handler * hdl)2455 int __v4l2_ctrl_handler_setup(struct v4l2_ctrl_handler *hdl)
2456 {
2457 	struct v4l2_ctrl *ctrl;
2458 	int ret = 0;
2459 
2460 	if (hdl == NULL)
2461 		return 0;
2462 
2463 	lockdep_assert_held(hdl->lock);
2464 
2465 	list_for_each_entry(ctrl, &hdl->ctrls, node)
2466 		ctrl->done = false;
2467 
2468 	list_for_each_entry(ctrl, &hdl->ctrls, node) {
2469 		struct v4l2_ctrl *master = ctrl->cluster[0];
2470 		int i;
2471 
2472 		/* Skip if this control was already handled by a cluster. */
2473 		/* Skip button controls and read-only controls. */
2474 		if (ctrl->done || ctrl->type == V4L2_CTRL_TYPE_BUTTON ||
2475 		    (ctrl->flags & V4L2_CTRL_FLAG_READ_ONLY))
2476 			continue;
2477 
2478 		for (i = 0; i < master->ncontrols; i++) {
2479 			if (master->cluster[i]) {
2480 				cur_to_new(master->cluster[i]);
2481 				master->cluster[i]->is_new = 1;
2482 				master->cluster[i]->done = true;
2483 			}
2484 		}
2485 		ret = call_op(master, s_ctrl);
2486 		if (ret)
2487 			break;
2488 	}
2489 
2490 	return ret;
2491 }
2492 EXPORT_SYMBOL_GPL(__v4l2_ctrl_handler_setup);
2493 
v4l2_ctrl_handler_setup(struct v4l2_ctrl_handler * hdl)2494 int v4l2_ctrl_handler_setup(struct v4l2_ctrl_handler *hdl)
2495 {
2496 	int ret;
2497 
2498 	if (hdl == NULL)
2499 		return 0;
2500 
2501 	mutex_lock(hdl->lock);
2502 	ret = __v4l2_ctrl_handler_setup(hdl);
2503 	mutex_unlock(hdl->lock);
2504 
2505 	return ret;
2506 }
2507 EXPORT_SYMBOL(v4l2_ctrl_handler_setup);
2508 
2509 /* Log the control name and value */
log_ctrl(const struct v4l2_ctrl * ctrl,const char * prefix,const char * colon)2510 static void log_ctrl(const struct v4l2_ctrl *ctrl,
2511 		     const char *prefix, const char *colon)
2512 {
2513 	if (ctrl->flags & (V4L2_CTRL_FLAG_DISABLED | V4L2_CTRL_FLAG_WRITE_ONLY))
2514 		return;
2515 	if (ctrl->type == V4L2_CTRL_TYPE_CTRL_CLASS)
2516 		return;
2517 
2518 	pr_info("%s%s%s: ", prefix, colon, ctrl->name);
2519 
2520 	ctrl->type_ops->log(ctrl);
2521 
2522 	if (ctrl->flags & (V4L2_CTRL_FLAG_INACTIVE |
2523 			   V4L2_CTRL_FLAG_GRABBED |
2524 			   V4L2_CTRL_FLAG_VOLATILE)) {
2525 		if (ctrl->flags & V4L2_CTRL_FLAG_INACTIVE)
2526 			pr_cont(" inactive");
2527 		if (ctrl->flags & V4L2_CTRL_FLAG_GRABBED)
2528 			pr_cont(" grabbed");
2529 		if (ctrl->flags & V4L2_CTRL_FLAG_VOLATILE)
2530 			pr_cont(" volatile");
2531 	}
2532 	pr_cont("\n");
2533 }
2534 
2535 /* Log all controls owned by the handler */
v4l2_ctrl_handler_log_status(struct v4l2_ctrl_handler * hdl,const char * prefix)2536 void v4l2_ctrl_handler_log_status(struct v4l2_ctrl_handler *hdl,
2537 				  const char *prefix)
2538 {
2539 	struct v4l2_ctrl *ctrl;
2540 	const char *colon = "";
2541 	int len;
2542 
2543 	if (!hdl)
2544 		return;
2545 	if (!prefix)
2546 		prefix = "";
2547 	len = strlen(prefix);
2548 	if (len && prefix[len - 1] != ' ')
2549 		colon = ": ";
2550 	mutex_lock(hdl->lock);
2551 	list_for_each_entry(ctrl, &hdl->ctrls, node)
2552 		if (!(ctrl->flags & V4L2_CTRL_FLAG_DISABLED))
2553 			log_ctrl(ctrl, prefix, colon);
2554 	mutex_unlock(hdl->lock);
2555 }
2556 EXPORT_SYMBOL(v4l2_ctrl_handler_log_status);
2557 
v4l2_ctrl_new_fwnode_properties(struct v4l2_ctrl_handler * hdl,const struct v4l2_ctrl_ops * ctrl_ops,const struct v4l2_fwnode_device_properties * p)2558 int v4l2_ctrl_new_fwnode_properties(struct v4l2_ctrl_handler *hdl,
2559 				    const struct v4l2_ctrl_ops *ctrl_ops,
2560 				    const struct v4l2_fwnode_device_properties *p)
2561 {
2562 	if (hdl->error)
2563 		return hdl->error;
2564 
2565 	if (p->orientation != V4L2_FWNODE_PROPERTY_UNSET) {
2566 		u32 orientation_ctrl;
2567 
2568 		switch (p->orientation) {
2569 		case V4L2_FWNODE_ORIENTATION_FRONT:
2570 			orientation_ctrl = V4L2_CAMERA_ORIENTATION_FRONT;
2571 			break;
2572 		case V4L2_FWNODE_ORIENTATION_BACK:
2573 			orientation_ctrl = V4L2_CAMERA_ORIENTATION_BACK;
2574 			break;
2575 		case V4L2_FWNODE_ORIENTATION_EXTERNAL:
2576 			orientation_ctrl = V4L2_CAMERA_ORIENTATION_EXTERNAL;
2577 			break;
2578 		default:
2579 			return -EINVAL;
2580 		}
2581 		if (!v4l2_ctrl_new_std_menu(hdl, ctrl_ops,
2582 					    V4L2_CID_CAMERA_ORIENTATION,
2583 					    V4L2_CAMERA_ORIENTATION_EXTERNAL, 0,
2584 					    orientation_ctrl))
2585 			return hdl->error;
2586 	}
2587 
2588 	if (p->rotation != V4L2_FWNODE_PROPERTY_UNSET) {
2589 		if (!v4l2_ctrl_new_std(hdl, ctrl_ops,
2590 				       V4L2_CID_CAMERA_SENSOR_ROTATION,
2591 				       p->rotation, p->rotation, 1,
2592 				       p->rotation))
2593 			return hdl->error;
2594 	}
2595 
2596 	return hdl->error;
2597 }
2598 EXPORT_SYMBOL(v4l2_ctrl_new_fwnode_properties);
2599