1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * V4L2 controls framework core implementation. 4 * 5 * Copyright (C) 2010-2021 Hans Verkuil <hverkuil-cisco@xs4all.nl> 6 */ 7 8 #include <linux/export.h> 9 #include <linux/mm.h> 10 #include <linux/slab.h> 11 #include <media/v4l2-ctrls.h> 12 #include <media/v4l2-event.h> 13 #include <media/v4l2-fwnode.h> 14 15 #include "v4l2-ctrls-priv.h" 16 17 static const union v4l2_ctrl_ptr ptr_null; 18 19 static void fill_event(struct v4l2_event *ev, struct v4l2_ctrl *ctrl, 20 u32 changes) 21 { 22 memset(ev, 0, sizeof(*ev)); 23 ev->type = V4L2_EVENT_CTRL; 24 ev->id = ctrl->id; 25 ev->u.ctrl.changes = changes; 26 ev->u.ctrl.type = ctrl->type; 27 ev->u.ctrl.flags = user_flags(ctrl); 28 if (ctrl->is_ptr) 29 ev->u.ctrl.value64 = 0; 30 else 31 ev->u.ctrl.value64 = *ctrl->p_cur.p_s64; 32 ev->u.ctrl.minimum = ctrl->minimum; 33 ev->u.ctrl.maximum = ctrl->maximum; 34 if (ctrl->type == V4L2_CTRL_TYPE_MENU 35 || ctrl->type == V4L2_CTRL_TYPE_INTEGER_MENU) 36 ev->u.ctrl.step = 1; 37 else 38 ev->u.ctrl.step = ctrl->step; 39 ev->u.ctrl.default_value = ctrl->default_value; 40 } 41 42 void send_initial_event(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl) 43 { 44 struct v4l2_event ev; 45 u32 changes = V4L2_EVENT_CTRL_CH_FLAGS; 46 47 if (!(ctrl->flags & V4L2_CTRL_FLAG_WRITE_ONLY)) 48 changes |= V4L2_EVENT_CTRL_CH_VALUE; 49 fill_event(&ev, ctrl, changes); 50 v4l2_event_queue_fh(fh, &ev); 51 } 52 53 void send_event(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl, u32 changes) 54 { 55 struct v4l2_event ev; 56 struct v4l2_subscribed_event *sev; 57 58 if (list_empty(&ctrl->ev_subs)) 59 return; 60 fill_event(&ev, ctrl, changes); 61 62 list_for_each_entry(sev, &ctrl->ev_subs, node) 63 if (sev->fh != fh || 64 (sev->flags & V4L2_EVENT_SUB_FL_ALLOW_FEEDBACK)) 65 v4l2_event_queue_fh(sev->fh, &ev); 66 } 67 68 bool v4l2_ctrl_type_op_equal(const struct v4l2_ctrl *ctrl, 69 union v4l2_ctrl_ptr ptr1, union v4l2_ctrl_ptr ptr2) 70 { 71 unsigned int i; 72 73 switch (ctrl->type) { 74 case V4L2_CTRL_TYPE_BUTTON: 75 return false; 76 case V4L2_CTRL_TYPE_STRING: 77 for (i = 0; i < ctrl->elems; i++) { 78 unsigned int idx = i * ctrl->elem_size; 79 80 /* strings are always 0-terminated */ 81 if (strcmp(ptr1.p_char + idx, ptr2.p_char + idx)) 82 return false; 83 } 84 return true; 85 default: 86 return !memcmp(ptr1.p_const, ptr2.p_const, 87 ctrl->elems * ctrl->elem_size); 88 } 89 } 90 EXPORT_SYMBOL(v4l2_ctrl_type_op_equal); 91 92 /* Default intra MPEG-2 quantisation coefficients, from the specification. */ 93 static const u8 mpeg2_intra_quant_matrix[64] = { 94 8, 16, 16, 19, 16, 19, 22, 22, 95 22, 22, 22, 22, 26, 24, 26, 27, 96 27, 27, 26, 26, 26, 26, 27, 27, 97 27, 29, 29, 29, 34, 34, 34, 29, 98 29, 29, 27, 27, 29, 29, 32, 32, 99 34, 34, 37, 38, 37, 35, 35, 34, 100 35, 38, 38, 40, 40, 40, 48, 48, 101 46, 46, 56, 56, 58, 69, 69, 83 102 }; 103 104 static void std_init_compound(const struct v4l2_ctrl *ctrl, u32 idx, 105 union v4l2_ctrl_ptr ptr) 106 { 107 struct v4l2_ctrl_mpeg2_sequence *p_mpeg2_sequence; 108 struct v4l2_ctrl_mpeg2_picture *p_mpeg2_picture; 109 struct v4l2_ctrl_mpeg2_quantisation *p_mpeg2_quant; 110 struct v4l2_ctrl_vp8_frame *p_vp8_frame; 111 struct v4l2_ctrl_vp9_frame *p_vp9_frame; 112 struct v4l2_ctrl_fwht_params *p_fwht_params; 113 struct v4l2_ctrl_h264_scaling_matrix *p_h264_scaling_matrix; 114 struct v4l2_ctrl_av1_sequence *p_av1_sequence; 115 void *p = ptr.p + idx * ctrl->elem_size; 116 117 if (ctrl->p_def.p_const) 118 memcpy(p, ctrl->p_def.p_const, ctrl->elem_size); 119 else 120 memset(p, 0, ctrl->elem_size); 121 122 switch ((u32)ctrl->type) { 123 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE: 124 p_mpeg2_sequence = p; 125 126 /* 4:2:0 */ 127 p_mpeg2_sequence->chroma_format = 1; 128 break; 129 case V4L2_CTRL_TYPE_MPEG2_PICTURE: 130 p_mpeg2_picture = p; 131 132 /* interlaced top field */ 133 p_mpeg2_picture->picture_structure = V4L2_MPEG2_PIC_TOP_FIELD; 134 p_mpeg2_picture->picture_coding_type = 135 V4L2_MPEG2_PIC_CODING_TYPE_I; 136 break; 137 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION: 138 p_mpeg2_quant = p; 139 140 memcpy(p_mpeg2_quant->intra_quantiser_matrix, 141 mpeg2_intra_quant_matrix, 142 ARRAY_SIZE(mpeg2_intra_quant_matrix)); 143 /* 144 * The default non-intra MPEG-2 quantisation 145 * coefficients are all 16, as per the specification. 146 */ 147 memset(p_mpeg2_quant->non_intra_quantiser_matrix, 16, 148 sizeof(p_mpeg2_quant->non_intra_quantiser_matrix)); 149 break; 150 case V4L2_CTRL_TYPE_VP8_FRAME: 151 p_vp8_frame = p; 152 p_vp8_frame->num_dct_parts = 1; 153 break; 154 case V4L2_CTRL_TYPE_VP9_FRAME: 155 p_vp9_frame = p; 156 p_vp9_frame->profile = 0; 157 p_vp9_frame->bit_depth = 8; 158 p_vp9_frame->flags |= V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING | 159 V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING; 160 break; 161 case V4L2_CTRL_TYPE_AV1_SEQUENCE: 162 p_av1_sequence = p; 163 p_av1_sequence->bit_depth = 8; 164 break; 165 case V4L2_CTRL_TYPE_FWHT_PARAMS: 166 p_fwht_params = p; 167 p_fwht_params->version = V4L2_FWHT_VERSION; 168 p_fwht_params->width = 1280; 169 p_fwht_params->height = 720; 170 p_fwht_params->flags = V4L2_FWHT_FL_PIXENC_YUV | 171 (2 << V4L2_FWHT_FL_COMPONENTS_NUM_OFFSET); 172 break; 173 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX: 174 p_h264_scaling_matrix = p; 175 /* 176 * The default (flat) H.264 scaling matrix when none are 177 * specified in the bitstream, this is according to formulas 178 * (7-8) and (7-9) of the specification. 179 */ 180 memset(p_h264_scaling_matrix, 16, sizeof(*p_h264_scaling_matrix)); 181 break; 182 } 183 } 184 185 void v4l2_ctrl_type_op_init(const struct v4l2_ctrl *ctrl, u32 from_idx, 186 union v4l2_ctrl_ptr ptr) 187 { 188 unsigned int i; 189 u32 tot_elems = ctrl->elems; 190 u32 elems = tot_elems - from_idx; 191 192 if (from_idx >= tot_elems) 193 return; 194 195 switch (ctrl->type) { 196 case V4L2_CTRL_TYPE_STRING: 197 for (i = from_idx; i < tot_elems; i++) { 198 unsigned int offset = i * ctrl->elem_size; 199 200 memset(ptr.p_char + offset, ' ', ctrl->minimum); 201 ptr.p_char[offset + ctrl->minimum] = '\0'; 202 } 203 break; 204 case V4L2_CTRL_TYPE_INTEGER64: 205 if (ctrl->default_value) { 206 for (i = from_idx; i < tot_elems; i++) 207 ptr.p_s64[i] = ctrl->default_value; 208 } else { 209 memset(ptr.p_s64 + from_idx, 0, elems * sizeof(s64)); 210 } 211 break; 212 case V4L2_CTRL_TYPE_INTEGER: 213 case V4L2_CTRL_TYPE_INTEGER_MENU: 214 case V4L2_CTRL_TYPE_MENU: 215 case V4L2_CTRL_TYPE_BITMASK: 216 case V4L2_CTRL_TYPE_BOOLEAN: 217 if (ctrl->default_value) { 218 for (i = from_idx; i < tot_elems; i++) 219 ptr.p_s32[i] = ctrl->default_value; 220 } else { 221 memset(ptr.p_s32 + from_idx, 0, elems * sizeof(s32)); 222 } 223 break; 224 case V4L2_CTRL_TYPE_BUTTON: 225 case V4L2_CTRL_TYPE_CTRL_CLASS: 226 memset(ptr.p_s32 + from_idx, 0, elems * sizeof(s32)); 227 break; 228 case V4L2_CTRL_TYPE_U8: 229 memset(ptr.p_u8 + from_idx, ctrl->default_value, elems); 230 break; 231 case V4L2_CTRL_TYPE_U16: 232 if (ctrl->default_value) { 233 for (i = from_idx; i < tot_elems; i++) 234 ptr.p_u16[i] = ctrl->default_value; 235 } else { 236 memset(ptr.p_u16 + from_idx, 0, elems * sizeof(u16)); 237 } 238 break; 239 case V4L2_CTRL_TYPE_U32: 240 if (ctrl->default_value) { 241 for (i = from_idx; i < tot_elems; i++) 242 ptr.p_u32[i] = ctrl->default_value; 243 } else { 244 memset(ptr.p_u32 + from_idx, 0, elems * sizeof(u32)); 245 } 246 break; 247 default: 248 for (i = from_idx; i < tot_elems; i++) 249 std_init_compound(ctrl, i, ptr); 250 break; 251 } 252 } 253 EXPORT_SYMBOL(v4l2_ctrl_type_op_init); 254 255 void v4l2_ctrl_type_op_log(const struct v4l2_ctrl *ctrl) 256 { 257 union v4l2_ctrl_ptr ptr = ctrl->p_cur; 258 259 if (ctrl->is_array) { 260 unsigned i; 261 262 for (i = 0; i < ctrl->nr_of_dims; i++) 263 pr_cont("[%u]", ctrl->dims[i]); 264 pr_cont(" "); 265 } 266 267 switch (ctrl->type) { 268 case V4L2_CTRL_TYPE_INTEGER: 269 pr_cont("%d", *ptr.p_s32); 270 break; 271 case V4L2_CTRL_TYPE_BOOLEAN: 272 pr_cont("%s", *ptr.p_s32 ? "true" : "false"); 273 break; 274 case V4L2_CTRL_TYPE_MENU: 275 pr_cont("%s", ctrl->qmenu[*ptr.p_s32]); 276 break; 277 case V4L2_CTRL_TYPE_INTEGER_MENU: 278 pr_cont("%lld", ctrl->qmenu_int[*ptr.p_s32]); 279 break; 280 case V4L2_CTRL_TYPE_BITMASK: 281 pr_cont("0x%08x", *ptr.p_s32); 282 break; 283 case V4L2_CTRL_TYPE_INTEGER64: 284 pr_cont("%lld", *ptr.p_s64); 285 break; 286 case V4L2_CTRL_TYPE_STRING: 287 pr_cont("%s", ptr.p_char); 288 break; 289 case V4L2_CTRL_TYPE_U8: 290 pr_cont("%u", (unsigned)*ptr.p_u8); 291 break; 292 case V4L2_CTRL_TYPE_U16: 293 pr_cont("%u", (unsigned)*ptr.p_u16); 294 break; 295 case V4L2_CTRL_TYPE_U32: 296 pr_cont("%u", (unsigned)*ptr.p_u32); 297 break; 298 case V4L2_CTRL_TYPE_H264_SPS: 299 pr_cont("H264_SPS"); 300 break; 301 case V4L2_CTRL_TYPE_H264_PPS: 302 pr_cont("H264_PPS"); 303 break; 304 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX: 305 pr_cont("H264_SCALING_MATRIX"); 306 break; 307 case V4L2_CTRL_TYPE_H264_SLICE_PARAMS: 308 pr_cont("H264_SLICE_PARAMS"); 309 break; 310 case V4L2_CTRL_TYPE_H264_DECODE_PARAMS: 311 pr_cont("H264_DECODE_PARAMS"); 312 break; 313 case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS: 314 pr_cont("H264_PRED_WEIGHTS"); 315 break; 316 case V4L2_CTRL_TYPE_FWHT_PARAMS: 317 pr_cont("FWHT_PARAMS"); 318 break; 319 case V4L2_CTRL_TYPE_VP8_FRAME: 320 pr_cont("VP8_FRAME"); 321 break; 322 case V4L2_CTRL_TYPE_HDR10_CLL_INFO: 323 pr_cont("HDR10_CLL_INFO"); 324 break; 325 case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY: 326 pr_cont("HDR10_MASTERING_DISPLAY"); 327 break; 328 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION: 329 pr_cont("MPEG2_QUANTISATION"); 330 break; 331 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE: 332 pr_cont("MPEG2_SEQUENCE"); 333 break; 334 case V4L2_CTRL_TYPE_MPEG2_PICTURE: 335 pr_cont("MPEG2_PICTURE"); 336 break; 337 case V4L2_CTRL_TYPE_VP9_COMPRESSED_HDR: 338 pr_cont("VP9_COMPRESSED_HDR"); 339 break; 340 case V4L2_CTRL_TYPE_VP9_FRAME: 341 pr_cont("VP9_FRAME"); 342 break; 343 case V4L2_CTRL_TYPE_HEVC_SPS: 344 pr_cont("HEVC_SPS"); 345 break; 346 case V4L2_CTRL_TYPE_HEVC_PPS: 347 pr_cont("HEVC_PPS"); 348 break; 349 case V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS: 350 pr_cont("HEVC_SLICE_PARAMS"); 351 break; 352 case V4L2_CTRL_TYPE_HEVC_SCALING_MATRIX: 353 pr_cont("HEVC_SCALING_MATRIX"); 354 break; 355 case V4L2_CTRL_TYPE_HEVC_DECODE_PARAMS: 356 pr_cont("HEVC_DECODE_PARAMS"); 357 break; 358 case V4L2_CTRL_TYPE_AV1_SEQUENCE: 359 pr_cont("AV1_SEQUENCE"); 360 break; 361 case V4L2_CTRL_TYPE_AV1_TILE_GROUP_ENTRY: 362 pr_cont("AV1_TILE_GROUP_ENTRY"); 363 break; 364 case V4L2_CTRL_TYPE_AV1_FRAME: 365 pr_cont("AV1_FRAME"); 366 break; 367 case V4L2_CTRL_TYPE_AV1_FILM_GRAIN: 368 pr_cont("AV1_FILM_GRAIN"); 369 break; 370 371 default: 372 pr_cont("unknown type %d", ctrl->type); 373 break; 374 } 375 } 376 EXPORT_SYMBOL(v4l2_ctrl_type_op_log); 377 378 /* 379 * Round towards the closest legal value. Be careful when we are 380 * close to the maximum range of the control type to prevent 381 * wrap-arounds. 382 */ 383 #define ROUND_TO_RANGE(val, offset_type, ctrl) \ 384 ({ \ 385 offset_type offset; \ 386 if ((ctrl)->maximum >= 0 && \ 387 val >= (ctrl)->maximum - (s32)((ctrl)->step / 2)) \ 388 val = (ctrl)->maximum; \ 389 else \ 390 val += (s32)((ctrl)->step / 2); \ 391 val = clamp_t(typeof(val), val, \ 392 (ctrl)->minimum, (ctrl)->maximum); \ 393 offset = (val) - (ctrl)->minimum; \ 394 offset = (ctrl)->step * (offset / (u32)(ctrl)->step); \ 395 val = (ctrl)->minimum + offset; \ 396 0; \ 397 }) 398 399 /* Validate a new control */ 400 401 #define zero_padding(s) \ 402 memset(&(s).padding, 0, sizeof((s).padding)) 403 #define zero_reserved(s) \ 404 memset(&(s).reserved, 0, sizeof((s).reserved)) 405 406 static int 407 validate_vp9_lf_params(struct v4l2_vp9_loop_filter *lf) 408 { 409 unsigned int i; 410 411 if (lf->flags & ~(V4L2_VP9_LOOP_FILTER_FLAG_DELTA_ENABLED | 412 V4L2_VP9_LOOP_FILTER_FLAG_DELTA_UPDATE)) 413 return -EINVAL; 414 415 /* That all values are in the accepted range. */ 416 if (lf->level > GENMASK(5, 0)) 417 return -EINVAL; 418 419 if (lf->sharpness > GENMASK(2, 0)) 420 return -EINVAL; 421 422 for (i = 0; i < ARRAY_SIZE(lf->ref_deltas); i++) 423 if (lf->ref_deltas[i] < -63 || lf->ref_deltas[i] > 63) 424 return -EINVAL; 425 426 for (i = 0; i < ARRAY_SIZE(lf->mode_deltas); i++) 427 if (lf->mode_deltas[i] < -63 || lf->mode_deltas[i] > 63) 428 return -EINVAL; 429 430 zero_reserved(*lf); 431 return 0; 432 } 433 434 static int 435 validate_vp9_quant_params(struct v4l2_vp9_quantization *quant) 436 { 437 if (quant->delta_q_y_dc < -15 || quant->delta_q_y_dc > 15 || 438 quant->delta_q_uv_dc < -15 || quant->delta_q_uv_dc > 15 || 439 quant->delta_q_uv_ac < -15 || quant->delta_q_uv_ac > 15) 440 return -EINVAL; 441 442 zero_reserved(*quant); 443 return 0; 444 } 445 446 static int 447 validate_vp9_seg_params(struct v4l2_vp9_segmentation *seg) 448 { 449 unsigned int i, j; 450 451 if (seg->flags & ~(V4L2_VP9_SEGMENTATION_FLAG_ENABLED | 452 V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP | 453 V4L2_VP9_SEGMENTATION_FLAG_TEMPORAL_UPDATE | 454 V4L2_VP9_SEGMENTATION_FLAG_UPDATE_DATA | 455 V4L2_VP9_SEGMENTATION_FLAG_ABS_OR_DELTA_UPDATE)) 456 return -EINVAL; 457 458 for (i = 0; i < ARRAY_SIZE(seg->feature_enabled); i++) { 459 if (seg->feature_enabled[i] & 460 ~V4L2_VP9_SEGMENT_FEATURE_ENABLED_MASK) 461 return -EINVAL; 462 } 463 464 for (i = 0; i < ARRAY_SIZE(seg->feature_data); i++) { 465 static const int range[] = { 255, 63, 3, 0 }; 466 467 for (j = 0; j < ARRAY_SIZE(seg->feature_data[j]); j++) { 468 if (seg->feature_data[i][j] < -range[j] || 469 seg->feature_data[i][j] > range[j]) 470 return -EINVAL; 471 } 472 } 473 474 zero_reserved(*seg); 475 return 0; 476 } 477 478 static int 479 validate_vp9_compressed_hdr(struct v4l2_ctrl_vp9_compressed_hdr *hdr) 480 { 481 if (hdr->tx_mode > V4L2_VP9_TX_MODE_SELECT) 482 return -EINVAL; 483 484 return 0; 485 } 486 487 static int 488 validate_vp9_frame(struct v4l2_ctrl_vp9_frame *frame) 489 { 490 int ret; 491 492 /* Make sure we're not passed invalid flags. */ 493 if (frame->flags & ~(V4L2_VP9_FRAME_FLAG_KEY_FRAME | 494 V4L2_VP9_FRAME_FLAG_SHOW_FRAME | 495 V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT | 496 V4L2_VP9_FRAME_FLAG_INTRA_ONLY | 497 V4L2_VP9_FRAME_FLAG_ALLOW_HIGH_PREC_MV | 498 V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX | 499 V4L2_VP9_FRAME_FLAG_PARALLEL_DEC_MODE | 500 V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING | 501 V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING | 502 V4L2_VP9_FRAME_FLAG_COLOR_RANGE_FULL_SWING)) 503 return -EINVAL; 504 505 if (frame->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT && 506 frame->flags & V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX) 507 return -EINVAL; 508 509 if (frame->profile > V4L2_VP9_PROFILE_MAX) 510 return -EINVAL; 511 512 if (frame->reset_frame_context > V4L2_VP9_RESET_FRAME_CTX_ALL) 513 return -EINVAL; 514 515 if (frame->frame_context_idx >= V4L2_VP9_NUM_FRAME_CTX) 516 return -EINVAL; 517 518 /* 519 * Profiles 0 and 1 only support 8-bit depth, profiles 2 and 3 only 10 520 * and 12 bit depths. 521 */ 522 if ((frame->profile < 2 && frame->bit_depth != 8) || 523 (frame->profile >= 2 && 524 (frame->bit_depth != 10 && frame->bit_depth != 12))) 525 return -EINVAL; 526 527 /* Profile 0 and 2 only accept YUV 4:2:0. */ 528 if ((frame->profile == 0 || frame->profile == 2) && 529 (!(frame->flags & V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING) || 530 !(frame->flags & V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING))) 531 return -EINVAL; 532 533 /* Profile 1 and 3 only accept YUV 4:2:2, 4:4:0 and 4:4:4. */ 534 if ((frame->profile == 1 || frame->profile == 3) && 535 ((frame->flags & V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING) && 536 (frame->flags & V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING))) 537 return -EINVAL; 538 539 if (frame->interpolation_filter > V4L2_VP9_INTERP_FILTER_SWITCHABLE) 540 return -EINVAL; 541 542 /* 543 * According to the spec, tile_cols_log2 shall be less than or equal 544 * to 6. 545 */ 546 if (frame->tile_cols_log2 > 6) 547 return -EINVAL; 548 549 if (frame->reference_mode > V4L2_VP9_REFERENCE_MODE_SELECT) 550 return -EINVAL; 551 552 ret = validate_vp9_lf_params(&frame->lf); 553 if (ret) 554 return ret; 555 556 ret = validate_vp9_quant_params(&frame->quant); 557 if (ret) 558 return ret; 559 560 ret = validate_vp9_seg_params(&frame->seg); 561 if (ret) 562 return ret; 563 564 zero_reserved(*frame); 565 return 0; 566 } 567 568 static int validate_av1_quantization(struct v4l2_av1_quantization *q) 569 { 570 if (q->flags > GENMASK(2, 0)) 571 return -EINVAL; 572 573 if (q->delta_q_y_dc < -64 || q->delta_q_y_dc > 63 || 574 q->delta_q_u_dc < -64 || q->delta_q_u_dc > 63 || 575 q->delta_q_v_dc < -64 || q->delta_q_v_dc > 63 || 576 q->delta_q_u_ac < -64 || q->delta_q_u_ac > 63 || 577 q->delta_q_v_ac < -64 || q->delta_q_v_ac > 63 || 578 q->delta_q_res > GENMASK(1, 0)) 579 return -EINVAL; 580 581 if (q->qm_y > GENMASK(3, 0) || 582 q->qm_u > GENMASK(3, 0) || 583 q->qm_v > GENMASK(3, 0)) 584 return -EINVAL; 585 586 return 0; 587 } 588 589 static int validate_av1_segmentation(struct v4l2_av1_segmentation *s) 590 { 591 u32 i; 592 u32 j; 593 594 if (s->flags > GENMASK(4, 0)) 595 return -EINVAL; 596 597 for (i = 0; i < ARRAY_SIZE(s->feature_data); i++) { 598 static const int segmentation_feature_signed[] = { 1, 1, 1, 1, 1, 0, 0, 0 }; 599 static const int segmentation_feature_max[] = { 255, 63, 63, 63, 63, 7, 0, 0}; 600 601 for (j = 0; j < ARRAY_SIZE(s->feature_data[j]); j++) { 602 s32 limit = segmentation_feature_max[j]; 603 604 if (segmentation_feature_signed[j]) { 605 if (s->feature_data[i][j] < -limit || 606 s->feature_data[i][j] > limit) 607 return -EINVAL; 608 } else { 609 if (s->feature_data[i][j] < 0 || s->feature_data[i][j] > limit) 610 return -EINVAL; 611 } 612 } 613 } 614 615 return 0; 616 } 617 618 static int validate_av1_loop_filter(struct v4l2_av1_loop_filter *lf) 619 { 620 u32 i; 621 622 if (lf->flags > GENMASK(3, 0)) 623 return -EINVAL; 624 625 for (i = 0; i < ARRAY_SIZE(lf->level); i++) { 626 if (lf->level[i] > GENMASK(5, 0)) 627 return -EINVAL; 628 } 629 630 if (lf->sharpness > GENMASK(2, 0)) 631 return -EINVAL; 632 633 for (i = 0; i < ARRAY_SIZE(lf->ref_deltas); i++) { 634 if (lf->ref_deltas[i] < -64 || lf->ref_deltas[i] > 63) 635 return -EINVAL; 636 } 637 638 for (i = 0; i < ARRAY_SIZE(lf->mode_deltas); i++) { 639 if (lf->mode_deltas[i] < -64 || lf->mode_deltas[i] > 63) 640 return -EINVAL; 641 } 642 643 return 0; 644 } 645 646 static int validate_av1_cdef(struct v4l2_av1_cdef *cdef) 647 { 648 u32 i; 649 650 if (cdef->damping_minus_3 > GENMASK(1, 0) || 651 cdef->bits > GENMASK(1, 0)) 652 return -EINVAL; 653 654 for (i = 0; i < 1 << cdef->bits; i++) { 655 if (cdef->y_pri_strength[i] > GENMASK(3, 0) || 656 cdef->y_sec_strength[i] > 4 || 657 cdef->uv_pri_strength[i] > GENMASK(3, 0) || 658 cdef->uv_sec_strength[i] > 4) 659 return -EINVAL; 660 } 661 662 return 0; 663 } 664 665 static int validate_av1_loop_restauration(struct v4l2_av1_loop_restoration *lr) 666 { 667 if (lr->lr_unit_shift > 3 || lr->lr_uv_shift > 1) 668 return -EINVAL; 669 670 return 0; 671 } 672 673 static int validate_av1_film_grain(struct v4l2_ctrl_av1_film_grain *fg) 674 { 675 u32 i; 676 677 if (fg->flags > GENMASK(4, 0)) 678 return -EINVAL; 679 680 if (fg->film_grain_params_ref_idx > GENMASK(2, 0) || 681 fg->num_y_points > 14 || 682 fg->num_cb_points > 10 || 683 fg->num_cr_points > GENMASK(3, 0) || 684 fg->grain_scaling_minus_8 > GENMASK(1, 0) || 685 fg->ar_coeff_lag > GENMASK(1, 0) || 686 fg->ar_coeff_shift_minus_6 > GENMASK(1, 0) || 687 fg->grain_scale_shift > GENMASK(1, 0)) 688 return -EINVAL; 689 690 if (!(fg->flags & V4L2_AV1_FILM_GRAIN_FLAG_APPLY_GRAIN)) 691 return 0; 692 693 for (i = 1; i < fg->num_y_points; i++) 694 if (fg->point_y_value[i] <= fg->point_y_value[i - 1]) 695 return -EINVAL; 696 697 for (i = 1; i < fg->num_cb_points; i++) 698 if (fg->point_cb_value[i] <= fg->point_cb_value[i - 1]) 699 return -EINVAL; 700 701 for (i = 1; i < fg->num_cr_points; i++) 702 if (fg->point_cr_value[i] <= fg->point_cr_value[i - 1]) 703 return -EINVAL; 704 705 return 0; 706 } 707 708 static int validate_av1_frame(struct v4l2_ctrl_av1_frame *f) 709 { 710 int ret = 0; 711 712 ret = validate_av1_quantization(&f->quantization); 713 if (ret) 714 return ret; 715 ret = validate_av1_segmentation(&f->segmentation); 716 if (ret) 717 return ret; 718 ret = validate_av1_loop_filter(&f->loop_filter); 719 if (ret) 720 return ret; 721 ret = validate_av1_cdef(&f->cdef); 722 if (ret) 723 return ret; 724 ret = validate_av1_loop_restauration(&f->loop_restoration); 725 if (ret) 726 return ret; 727 728 if (f->flags & 729 ~(V4L2_AV1_FRAME_FLAG_SHOW_FRAME | 730 V4L2_AV1_FRAME_FLAG_SHOWABLE_FRAME | 731 V4L2_AV1_FRAME_FLAG_ERROR_RESILIENT_MODE | 732 V4L2_AV1_FRAME_FLAG_DISABLE_CDF_UPDATE | 733 V4L2_AV1_FRAME_FLAG_ALLOW_SCREEN_CONTENT_TOOLS | 734 V4L2_AV1_FRAME_FLAG_FORCE_INTEGER_MV | 735 V4L2_AV1_FRAME_FLAG_ALLOW_INTRABC | 736 V4L2_AV1_FRAME_FLAG_USE_SUPERRES | 737 V4L2_AV1_FRAME_FLAG_ALLOW_HIGH_PRECISION_MV | 738 V4L2_AV1_FRAME_FLAG_IS_MOTION_MODE_SWITCHABLE | 739 V4L2_AV1_FRAME_FLAG_USE_REF_FRAME_MVS | 740 V4L2_AV1_FRAME_FLAG_DISABLE_FRAME_END_UPDATE_CDF | 741 V4L2_AV1_FRAME_FLAG_ALLOW_WARPED_MOTION | 742 V4L2_AV1_FRAME_FLAG_REFERENCE_SELECT | 743 V4L2_AV1_FRAME_FLAG_REDUCED_TX_SET | 744 V4L2_AV1_FRAME_FLAG_SKIP_MODE_ALLOWED | 745 V4L2_AV1_FRAME_FLAG_SKIP_MODE_PRESENT | 746 V4L2_AV1_FRAME_FLAG_FRAME_SIZE_OVERRIDE | 747 V4L2_AV1_FRAME_FLAG_BUFFER_REMOVAL_TIME_PRESENT | 748 V4L2_AV1_FRAME_FLAG_FRAME_REFS_SHORT_SIGNALING)) 749 return -EINVAL; 750 751 if (f->superres_denom > GENMASK(2, 0) + 9) 752 return -EINVAL; 753 754 return 0; 755 } 756 757 static int validate_av1_sequence(struct v4l2_ctrl_av1_sequence *s) 758 { 759 if (s->flags & 760 ~(V4L2_AV1_SEQUENCE_FLAG_STILL_PICTURE | 761 V4L2_AV1_SEQUENCE_FLAG_USE_128X128_SUPERBLOCK | 762 V4L2_AV1_SEQUENCE_FLAG_ENABLE_FILTER_INTRA | 763 V4L2_AV1_SEQUENCE_FLAG_ENABLE_INTRA_EDGE_FILTER | 764 V4L2_AV1_SEQUENCE_FLAG_ENABLE_INTERINTRA_COMPOUND | 765 V4L2_AV1_SEQUENCE_FLAG_ENABLE_MASKED_COMPOUND | 766 V4L2_AV1_SEQUENCE_FLAG_ENABLE_WARPED_MOTION | 767 V4L2_AV1_SEQUENCE_FLAG_ENABLE_DUAL_FILTER | 768 V4L2_AV1_SEQUENCE_FLAG_ENABLE_ORDER_HINT | 769 V4L2_AV1_SEQUENCE_FLAG_ENABLE_JNT_COMP | 770 V4L2_AV1_SEQUENCE_FLAG_ENABLE_REF_FRAME_MVS | 771 V4L2_AV1_SEQUENCE_FLAG_ENABLE_SUPERRES | 772 V4L2_AV1_SEQUENCE_FLAG_ENABLE_CDEF | 773 V4L2_AV1_SEQUENCE_FLAG_ENABLE_RESTORATION | 774 V4L2_AV1_SEQUENCE_FLAG_MONO_CHROME | 775 V4L2_AV1_SEQUENCE_FLAG_COLOR_RANGE | 776 V4L2_AV1_SEQUENCE_FLAG_SUBSAMPLING_X | 777 V4L2_AV1_SEQUENCE_FLAG_SUBSAMPLING_Y | 778 V4L2_AV1_SEQUENCE_FLAG_FILM_GRAIN_PARAMS_PRESENT | 779 V4L2_AV1_SEQUENCE_FLAG_SEPARATE_UV_DELTA_Q)) 780 return -EINVAL; 781 782 if (s->seq_profile == 1 && s->flags & V4L2_AV1_SEQUENCE_FLAG_MONO_CHROME) 783 return -EINVAL; 784 785 /* reserved */ 786 if (s->seq_profile > 2) 787 return -EINVAL; 788 789 /* TODO: PROFILES */ 790 return 0; 791 } 792 793 /* 794 * Compound controls validation requires setting unused fields/flags to zero 795 * in order to properly detect unchanged controls with v4l2_ctrl_type_op_equal's 796 * memcmp. 797 */ 798 static int std_validate_compound(const struct v4l2_ctrl *ctrl, u32 idx, 799 union v4l2_ctrl_ptr ptr) 800 { 801 struct v4l2_ctrl_mpeg2_sequence *p_mpeg2_sequence; 802 struct v4l2_ctrl_mpeg2_picture *p_mpeg2_picture; 803 struct v4l2_ctrl_vp8_frame *p_vp8_frame; 804 struct v4l2_ctrl_fwht_params *p_fwht_params; 805 struct v4l2_ctrl_h264_sps *p_h264_sps; 806 struct v4l2_ctrl_h264_pps *p_h264_pps; 807 struct v4l2_ctrl_h264_pred_weights *p_h264_pred_weights; 808 struct v4l2_ctrl_h264_slice_params *p_h264_slice_params; 809 struct v4l2_ctrl_h264_decode_params *p_h264_dec_params; 810 struct v4l2_ctrl_hevc_sps *p_hevc_sps; 811 struct v4l2_ctrl_hevc_pps *p_hevc_pps; 812 struct v4l2_ctrl_hdr10_mastering_display *p_hdr10_mastering; 813 struct v4l2_ctrl_hevc_decode_params *p_hevc_decode_params; 814 struct v4l2_area *area; 815 void *p = ptr.p + idx * ctrl->elem_size; 816 unsigned int i; 817 818 switch ((u32)ctrl->type) { 819 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE: 820 p_mpeg2_sequence = p; 821 822 switch (p_mpeg2_sequence->chroma_format) { 823 case 1: /* 4:2:0 */ 824 case 2: /* 4:2:2 */ 825 case 3: /* 4:4:4 */ 826 break; 827 default: 828 return -EINVAL; 829 } 830 break; 831 832 case V4L2_CTRL_TYPE_MPEG2_PICTURE: 833 p_mpeg2_picture = p; 834 835 switch (p_mpeg2_picture->intra_dc_precision) { 836 case 0: /* 8 bits */ 837 case 1: /* 9 bits */ 838 case 2: /* 10 bits */ 839 case 3: /* 11 bits */ 840 break; 841 default: 842 return -EINVAL; 843 } 844 845 switch (p_mpeg2_picture->picture_structure) { 846 case V4L2_MPEG2_PIC_TOP_FIELD: 847 case V4L2_MPEG2_PIC_BOTTOM_FIELD: 848 case V4L2_MPEG2_PIC_FRAME: 849 break; 850 default: 851 return -EINVAL; 852 } 853 854 switch (p_mpeg2_picture->picture_coding_type) { 855 case V4L2_MPEG2_PIC_CODING_TYPE_I: 856 case V4L2_MPEG2_PIC_CODING_TYPE_P: 857 case V4L2_MPEG2_PIC_CODING_TYPE_B: 858 break; 859 default: 860 return -EINVAL; 861 } 862 zero_reserved(*p_mpeg2_picture); 863 break; 864 865 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION: 866 break; 867 868 case V4L2_CTRL_TYPE_FWHT_PARAMS: 869 p_fwht_params = p; 870 if (p_fwht_params->version < V4L2_FWHT_VERSION) 871 return -EINVAL; 872 if (!p_fwht_params->width || !p_fwht_params->height) 873 return -EINVAL; 874 break; 875 876 case V4L2_CTRL_TYPE_H264_SPS: 877 p_h264_sps = p; 878 879 /* Some syntax elements are only conditionally valid */ 880 if (p_h264_sps->pic_order_cnt_type != 0) { 881 p_h264_sps->log2_max_pic_order_cnt_lsb_minus4 = 0; 882 } else if (p_h264_sps->pic_order_cnt_type != 1) { 883 p_h264_sps->num_ref_frames_in_pic_order_cnt_cycle = 0; 884 p_h264_sps->offset_for_non_ref_pic = 0; 885 p_h264_sps->offset_for_top_to_bottom_field = 0; 886 memset(&p_h264_sps->offset_for_ref_frame, 0, 887 sizeof(p_h264_sps->offset_for_ref_frame)); 888 } 889 890 if (!V4L2_H264_SPS_HAS_CHROMA_FORMAT(p_h264_sps)) { 891 p_h264_sps->chroma_format_idc = 1; 892 p_h264_sps->bit_depth_luma_minus8 = 0; 893 p_h264_sps->bit_depth_chroma_minus8 = 0; 894 895 p_h264_sps->flags &= 896 ~V4L2_H264_SPS_FLAG_QPPRIME_Y_ZERO_TRANSFORM_BYPASS; 897 898 if (p_h264_sps->chroma_format_idc < 3) 899 p_h264_sps->flags &= 900 ~V4L2_H264_SPS_FLAG_SEPARATE_COLOUR_PLANE; 901 } 902 903 if (p_h264_sps->flags & V4L2_H264_SPS_FLAG_FRAME_MBS_ONLY) 904 p_h264_sps->flags &= 905 ~V4L2_H264_SPS_FLAG_MB_ADAPTIVE_FRAME_FIELD; 906 907 /* 908 * Chroma 4:2:2 format require at least High 4:2:2 profile. 909 * 910 * The H264 specification and well-known parser implementations 911 * use profile-idc values directly, as that is clearer and 912 * less ambiguous. We do the same here. 913 */ 914 if (p_h264_sps->profile_idc < 122 && 915 p_h264_sps->chroma_format_idc > 1) 916 return -EINVAL; 917 /* Chroma 4:4:4 format require at least High 4:2:2 profile */ 918 if (p_h264_sps->profile_idc < 244 && 919 p_h264_sps->chroma_format_idc > 2) 920 return -EINVAL; 921 if (p_h264_sps->chroma_format_idc > 3) 922 return -EINVAL; 923 924 if (p_h264_sps->bit_depth_luma_minus8 > 6) 925 return -EINVAL; 926 if (p_h264_sps->bit_depth_chroma_minus8 > 6) 927 return -EINVAL; 928 if (p_h264_sps->log2_max_frame_num_minus4 > 12) 929 return -EINVAL; 930 if (p_h264_sps->pic_order_cnt_type > 2) 931 return -EINVAL; 932 if (p_h264_sps->log2_max_pic_order_cnt_lsb_minus4 > 12) 933 return -EINVAL; 934 if (p_h264_sps->max_num_ref_frames > V4L2_H264_REF_LIST_LEN) 935 return -EINVAL; 936 break; 937 938 case V4L2_CTRL_TYPE_H264_PPS: 939 p_h264_pps = p; 940 941 if (p_h264_pps->num_slice_groups_minus1 > 7) 942 return -EINVAL; 943 if (p_h264_pps->num_ref_idx_l0_default_active_minus1 > 944 (V4L2_H264_REF_LIST_LEN - 1)) 945 return -EINVAL; 946 if (p_h264_pps->num_ref_idx_l1_default_active_minus1 > 947 (V4L2_H264_REF_LIST_LEN - 1)) 948 return -EINVAL; 949 if (p_h264_pps->weighted_bipred_idc > 2) 950 return -EINVAL; 951 /* 952 * pic_init_qp_minus26 shall be in the range of 953 * -(26 + QpBdOffset_y) to +25, inclusive, 954 * where QpBdOffset_y is 6 * bit_depth_luma_minus8 955 */ 956 if (p_h264_pps->pic_init_qp_minus26 < -62 || 957 p_h264_pps->pic_init_qp_minus26 > 25) 958 return -EINVAL; 959 if (p_h264_pps->pic_init_qs_minus26 < -26 || 960 p_h264_pps->pic_init_qs_minus26 > 25) 961 return -EINVAL; 962 if (p_h264_pps->chroma_qp_index_offset < -12 || 963 p_h264_pps->chroma_qp_index_offset > 12) 964 return -EINVAL; 965 if (p_h264_pps->second_chroma_qp_index_offset < -12 || 966 p_h264_pps->second_chroma_qp_index_offset > 12) 967 return -EINVAL; 968 break; 969 970 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX: 971 break; 972 973 case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS: 974 p_h264_pred_weights = p; 975 976 if (p_h264_pred_weights->luma_log2_weight_denom > 7) 977 return -EINVAL; 978 if (p_h264_pred_weights->chroma_log2_weight_denom > 7) 979 return -EINVAL; 980 break; 981 982 case V4L2_CTRL_TYPE_H264_SLICE_PARAMS: 983 p_h264_slice_params = p; 984 985 if (p_h264_slice_params->slice_type != V4L2_H264_SLICE_TYPE_B) 986 p_h264_slice_params->flags &= 987 ~V4L2_H264_SLICE_FLAG_DIRECT_SPATIAL_MV_PRED; 988 989 if (p_h264_slice_params->colour_plane_id > 2) 990 return -EINVAL; 991 if (p_h264_slice_params->cabac_init_idc > 2) 992 return -EINVAL; 993 if (p_h264_slice_params->disable_deblocking_filter_idc > 2) 994 return -EINVAL; 995 if (p_h264_slice_params->slice_alpha_c0_offset_div2 < -6 || 996 p_h264_slice_params->slice_alpha_c0_offset_div2 > 6) 997 return -EINVAL; 998 if (p_h264_slice_params->slice_beta_offset_div2 < -6 || 999 p_h264_slice_params->slice_beta_offset_div2 > 6) 1000 return -EINVAL; 1001 1002 if (p_h264_slice_params->slice_type == V4L2_H264_SLICE_TYPE_I || 1003 p_h264_slice_params->slice_type == V4L2_H264_SLICE_TYPE_SI) 1004 p_h264_slice_params->num_ref_idx_l0_active_minus1 = 0; 1005 if (p_h264_slice_params->slice_type != V4L2_H264_SLICE_TYPE_B) 1006 p_h264_slice_params->num_ref_idx_l1_active_minus1 = 0; 1007 1008 if (p_h264_slice_params->num_ref_idx_l0_active_minus1 > 1009 (V4L2_H264_REF_LIST_LEN - 1)) 1010 return -EINVAL; 1011 if (p_h264_slice_params->num_ref_idx_l1_active_minus1 > 1012 (V4L2_H264_REF_LIST_LEN - 1)) 1013 return -EINVAL; 1014 zero_reserved(*p_h264_slice_params); 1015 break; 1016 1017 case V4L2_CTRL_TYPE_H264_DECODE_PARAMS: 1018 p_h264_dec_params = p; 1019 1020 if (p_h264_dec_params->nal_ref_idc > 3) 1021 return -EINVAL; 1022 for (i = 0; i < V4L2_H264_NUM_DPB_ENTRIES; i++) { 1023 struct v4l2_h264_dpb_entry *dpb_entry = 1024 &p_h264_dec_params->dpb[i]; 1025 1026 zero_reserved(*dpb_entry); 1027 } 1028 zero_reserved(*p_h264_dec_params); 1029 break; 1030 1031 case V4L2_CTRL_TYPE_VP8_FRAME: 1032 p_vp8_frame = p; 1033 1034 switch (p_vp8_frame->num_dct_parts) { 1035 case 1: 1036 case 2: 1037 case 4: 1038 case 8: 1039 break; 1040 default: 1041 return -EINVAL; 1042 } 1043 zero_padding(p_vp8_frame->segment); 1044 zero_padding(p_vp8_frame->lf); 1045 zero_padding(p_vp8_frame->quant); 1046 zero_padding(p_vp8_frame->entropy); 1047 zero_padding(p_vp8_frame->coder_state); 1048 break; 1049 1050 case V4L2_CTRL_TYPE_HEVC_SPS: 1051 p_hevc_sps = p; 1052 1053 if (!(p_hevc_sps->flags & V4L2_HEVC_SPS_FLAG_PCM_ENABLED)) { 1054 p_hevc_sps->pcm_sample_bit_depth_luma_minus1 = 0; 1055 p_hevc_sps->pcm_sample_bit_depth_chroma_minus1 = 0; 1056 p_hevc_sps->log2_min_pcm_luma_coding_block_size_minus3 = 0; 1057 p_hevc_sps->log2_diff_max_min_pcm_luma_coding_block_size = 0; 1058 } 1059 1060 if (!(p_hevc_sps->flags & 1061 V4L2_HEVC_SPS_FLAG_LONG_TERM_REF_PICS_PRESENT)) 1062 p_hevc_sps->num_long_term_ref_pics_sps = 0; 1063 break; 1064 1065 case V4L2_CTRL_TYPE_HEVC_PPS: 1066 p_hevc_pps = p; 1067 1068 if (!(p_hevc_pps->flags & 1069 V4L2_HEVC_PPS_FLAG_CU_QP_DELTA_ENABLED)) 1070 p_hevc_pps->diff_cu_qp_delta_depth = 0; 1071 1072 if (!(p_hevc_pps->flags & V4L2_HEVC_PPS_FLAG_TILES_ENABLED)) { 1073 p_hevc_pps->num_tile_columns_minus1 = 0; 1074 p_hevc_pps->num_tile_rows_minus1 = 0; 1075 memset(&p_hevc_pps->column_width_minus1, 0, 1076 sizeof(p_hevc_pps->column_width_minus1)); 1077 memset(&p_hevc_pps->row_height_minus1, 0, 1078 sizeof(p_hevc_pps->row_height_minus1)); 1079 1080 p_hevc_pps->flags &= 1081 ~V4L2_HEVC_PPS_FLAG_LOOP_FILTER_ACROSS_TILES_ENABLED; 1082 } 1083 1084 if (p_hevc_pps->flags & 1085 V4L2_HEVC_PPS_FLAG_PPS_DISABLE_DEBLOCKING_FILTER) { 1086 p_hevc_pps->pps_beta_offset_div2 = 0; 1087 p_hevc_pps->pps_tc_offset_div2 = 0; 1088 } 1089 break; 1090 1091 case V4L2_CTRL_TYPE_HEVC_DECODE_PARAMS: 1092 p_hevc_decode_params = p; 1093 1094 if (p_hevc_decode_params->num_active_dpb_entries > 1095 V4L2_HEVC_DPB_ENTRIES_NUM_MAX) 1096 return -EINVAL; 1097 break; 1098 1099 case V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS: 1100 break; 1101 1102 case V4L2_CTRL_TYPE_HDR10_CLL_INFO: 1103 break; 1104 1105 case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY: 1106 p_hdr10_mastering = p; 1107 1108 for (i = 0; i < 3; ++i) { 1109 if (p_hdr10_mastering->display_primaries_x[i] < 1110 V4L2_HDR10_MASTERING_PRIMARIES_X_LOW || 1111 p_hdr10_mastering->display_primaries_x[i] > 1112 V4L2_HDR10_MASTERING_PRIMARIES_X_HIGH || 1113 p_hdr10_mastering->display_primaries_y[i] < 1114 V4L2_HDR10_MASTERING_PRIMARIES_Y_LOW || 1115 p_hdr10_mastering->display_primaries_y[i] > 1116 V4L2_HDR10_MASTERING_PRIMARIES_Y_HIGH) 1117 return -EINVAL; 1118 } 1119 1120 if (p_hdr10_mastering->white_point_x < 1121 V4L2_HDR10_MASTERING_WHITE_POINT_X_LOW || 1122 p_hdr10_mastering->white_point_x > 1123 V4L2_HDR10_MASTERING_WHITE_POINT_X_HIGH || 1124 p_hdr10_mastering->white_point_y < 1125 V4L2_HDR10_MASTERING_WHITE_POINT_Y_LOW || 1126 p_hdr10_mastering->white_point_y > 1127 V4L2_HDR10_MASTERING_WHITE_POINT_Y_HIGH) 1128 return -EINVAL; 1129 1130 if (p_hdr10_mastering->max_display_mastering_luminance < 1131 V4L2_HDR10_MASTERING_MAX_LUMA_LOW || 1132 p_hdr10_mastering->max_display_mastering_luminance > 1133 V4L2_HDR10_MASTERING_MAX_LUMA_HIGH || 1134 p_hdr10_mastering->min_display_mastering_luminance < 1135 V4L2_HDR10_MASTERING_MIN_LUMA_LOW || 1136 p_hdr10_mastering->min_display_mastering_luminance > 1137 V4L2_HDR10_MASTERING_MIN_LUMA_HIGH) 1138 return -EINVAL; 1139 1140 /* The following restriction comes from ITU-T Rec. H.265 spec */ 1141 if (p_hdr10_mastering->max_display_mastering_luminance == 1142 V4L2_HDR10_MASTERING_MAX_LUMA_LOW && 1143 p_hdr10_mastering->min_display_mastering_luminance == 1144 V4L2_HDR10_MASTERING_MIN_LUMA_HIGH) 1145 return -EINVAL; 1146 1147 break; 1148 1149 case V4L2_CTRL_TYPE_HEVC_SCALING_MATRIX: 1150 break; 1151 1152 case V4L2_CTRL_TYPE_VP9_COMPRESSED_HDR: 1153 return validate_vp9_compressed_hdr(p); 1154 1155 case V4L2_CTRL_TYPE_VP9_FRAME: 1156 return validate_vp9_frame(p); 1157 case V4L2_CTRL_TYPE_AV1_FRAME: 1158 return validate_av1_frame(p); 1159 case V4L2_CTRL_TYPE_AV1_SEQUENCE: 1160 return validate_av1_sequence(p); 1161 case V4L2_CTRL_TYPE_AV1_TILE_GROUP_ENTRY: 1162 break; 1163 case V4L2_CTRL_TYPE_AV1_FILM_GRAIN: 1164 return validate_av1_film_grain(p); 1165 1166 case V4L2_CTRL_TYPE_AREA: 1167 area = p; 1168 if (!area->width || !area->height) 1169 return -EINVAL; 1170 break; 1171 1172 default: 1173 return -EINVAL; 1174 } 1175 1176 return 0; 1177 } 1178 1179 static int std_validate_elem(const struct v4l2_ctrl *ctrl, u32 idx, 1180 union v4l2_ctrl_ptr ptr) 1181 { 1182 size_t len; 1183 u64 offset; 1184 s64 val; 1185 1186 switch ((u32)ctrl->type) { 1187 case V4L2_CTRL_TYPE_INTEGER: 1188 return ROUND_TO_RANGE(ptr.p_s32[idx], u32, ctrl); 1189 case V4L2_CTRL_TYPE_INTEGER64: 1190 /* 1191 * We can't use the ROUND_TO_RANGE define here due to 1192 * the u64 divide that needs special care. 1193 */ 1194 val = ptr.p_s64[idx]; 1195 if (ctrl->maximum >= 0 && val >= ctrl->maximum - (s64)(ctrl->step / 2)) 1196 val = ctrl->maximum; 1197 else 1198 val += (s64)(ctrl->step / 2); 1199 val = clamp_t(s64, val, ctrl->minimum, ctrl->maximum); 1200 offset = val - ctrl->minimum; 1201 do_div(offset, ctrl->step); 1202 ptr.p_s64[idx] = ctrl->minimum + offset * ctrl->step; 1203 return 0; 1204 case V4L2_CTRL_TYPE_U8: 1205 return ROUND_TO_RANGE(ptr.p_u8[idx], u8, ctrl); 1206 case V4L2_CTRL_TYPE_U16: 1207 return ROUND_TO_RANGE(ptr.p_u16[idx], u16, ctrl); 1208 case V4L2_CTRL_TYPE_U32: 1209 return ROUND_TO_RANGE(ptr.p_u32[idx], u32, ctrl); 1210 1211 case V4L2_CTRL_TYPE_BOOLEAN: 1212 ptr.p_s32[idx] = !!ptr.p_s32[idx]; 1213 return 0; 1214 1215 case V4L2_CTRL_TYPE_MENU: 1216 case V4L2_CTRL_TYPE_INTEGER_MENU: 1217 if (ptr.p_s32[idx] < ctrl->minimum || ptr.p_s32[idx] > ctrl->maximum) 1218 return -ERANGE; 1219 if (ptr.p_s32[idx] < BITS_PER_LONG_LONG && 1220 (ctrl->menu_skip_mask & BIT_ULL(ptr.p_s32[idx]))) 1221 return -EINVAL; 1222 if (ctrl->type == V4L2_CTRL_TYPE_MENU && 1223 ctrl->qmenu[ptr.p_s32[idx]][0] == '\0') 1224 return -EINVAL; 1225 return 0; 1226 1227 case V4L2_CTRL_TYPE_BITMASK: 1228 ptr.p_s32[idx] &= ctrl->maximum; 1229 return 0; 1230 1231 case V4L2_CTRL_TYPE_BUTTON: 1232 case V4L2_CTRL_TYPE_CTRL_CLASS: 1233 ptr.p_s32[idx] = 0; 1234 return 0; 1235 1236 case V4L2_CTRL_TYPE_STRING: 1237 idx *= ctrl->elem_size; 1238 len = strlen(ptr.p_char + idx); 1239 if (len < ctrl->minimum) 1240 return -ERANGE; 1241 if ((len - (u32)ctrl->minimum) % (u32)ctrl->step) 1242 return -ERANGE; 1243 return 0; 1244 1245 default: 1246 return std_validate_compound(ctrl, idx, ptr); 1247 } 1248 } 1249 1250 int v4l2_ctrl_type_op_validate(const struct v4l2_ctrl *ctrl, 1251 union v4l2_ctrl_ptr ptr) 1252 { 1253 unsigned int i; 1254 int ret = 0; 1255 1256 switch ((u32)ctrl->type) { 1257 case V4L2_CTRL_TYPE_U8: 1258 if (ctrl->maximum == 0xff && ctrl->minimum == 0 && ctrl->step == 1) 1259 return 0; 1260 break; 1261 case V4L2_CTRL_TYPE_U16: 1262 if (ctrl->maximum == 0xffff && ctrl->minimum == 0 && ctrl->step == 1) 1263 return 0; 1264 break; 1265 case V4L2_CTRL_TYPE_U32: 1266 if (ctrl->maximum == 0xffffffff && ctrl->minimum == 0 && ctrl->step == 1) 1267 return 0; 1268 break; 1269 1270 case V4L2_CTRL_TYPE_BUTTON: 1271 case V4L2_CTRL_TYPE_CTRL_CLASS: 1272 memset(ptr.p_s32, 0, ctrl->new_elems * sizeof(s32)); 1273 return 0; 1274 } 1275 1276 for (i = 0; !ret && i < ctrl->new_elems; i++) 1277 ret = std_validate_elem(ctrl, i, ptr); 1278 return ret; 1279 } 1280 EXPORT_SYMBOL(v4l2_ctrl_type_op_validate); 1281 1282 static const struct v4l2_ctrl_type_ops std_type_ops = { 1283 .equal = v4l2_ctrl_type_op_equal, 1284 .init = v4l2_ctrl_type_op_init, 1285 .log = v4l2_ctrl_type_op_log, 1286 .validate = v4l2_ctrl_type_op_validate, 1287 }; 1288 1289 void v4l2_ctrl_notify(struct v4l2_ctrl *ctrl, v4l2_ctrl_notify_fnc notify, void *priv) 1290 { 1291 if (!ctrl) 1292 return; 1293 if (!notify) { 1294 ctrl->call_notify = 0; 1295 return; 1296 } 1297 if (WARN_ON(ctrl->handler->notify && ctrl->handler->notify != notify)) 1298 return; 1299 ctrl->handler->notify = notify; 1300 ctrl->handler->notify_priv = priv; 1301 ctrl->call_notify = 1; 1302 } 1303 EXPORT_SYMBOL(v4l2_ctrl_notify); 1304 1305 /* Copy the one value to another. */ 1306 static void ptr_to_ptr(struct v4l2_ctrl *ctrl, 1307 union v4l2_ctrl_ptr from, union v4l2_ctrl_ptr to, 1308 unsigned int elems) 1309 { 1310 if (ctrl == NULL) 1311 return; 1312 memcpy(to.p, from.p_const, elems * ctrl->elem_size); 1313 } 1314 1315 /* Copy the new value to the current value. */ 1316 void new_to_cur(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl, u32 ch_flags) 1317 { 1318 bool changed; 1319 1320 if (ctrl == NULL) 1321 return; 1322 1323 /* has_changed is set by cluster_changed */ 1324 changed = ctrl->has_changed; 1325 if (changed) { 1326 if (ctrl->is_dyn_array) 1327 ctrl->elems = ctrl->new_elems; 1328 ptr_to_ptr(ctrl, ctrl->p_new, ctrl->p_cur, ctrl->elems); 1329 } 1330 1331 if (ch_flags & V4L2_EVENT_CTRL_CH_FLAGS) { 1332 /* Note: CH_FLAGS is only set for auto clusters. */ 1333 ctrl->flags &= 1334 ~(V4L2_CTRL_FLAG_INACTIVE | V4L2_CTRL_FLAG_VOLATILE); 1335 if (!is_cur_manual(ctrl->cluster[0])) { 1336 ctrl->flags |= V4L2_CTRL_FLAG_INACTIVE; 1337 if (ctrl->cluster[0]->has_volatiles) 1338 ctrl->flags |= V4L2_CTRL_FLAG_VOLATILE; 1339 } 1340 fh = NULL; 1341 } 1342 if (changed || ch_flags) { 1343 /* If a control was changed that was not one of the controls 1344 modified by the application, then send the event to all. */ 1345 if (!ctrl->is_new) 1346 fh = NULL; 1347 send_event(fh, ctrl, 1348 (changed ? V4L2_EVENT_CTRL_CH_VALUE : 0) | ch_flags); 1349 if (ctrl->call_notify && changed && ctrl->handler->notify) 1350 ctrl->handler->notify(ctrl, ctrl->handler->notify_priv); 1351 } 1352 } 1353 1354 /* Copy the current value to the new value */ 1355 void cur_to_new(struct v4l2_ctrl *ctrl) 1356 { 1357 if (ctrl == NULL) 1358 return; 1359 if (ctrl->is_dyn_array) 1360 ctrl->new_elems = ctrl->elems; 1361 ptr_to_ptr(ctrl, ctrl->p_cur, ctrl->p_new, ctrl->new_elems); 1362 } 1363 1364 static bool req_alloc_array(struct v4l2_ctrl_ref *ref, u32 elems) 1365 { 1366 void *tmp; 1367 1368 if (elems == ref->p_req_array_alloc_elems) 1369 return true; 1370 if (ref->ctrl->is_dyn_array && 1371 elems < ref->p_req_array_alloc_elems) 1372 return true; 1373 1374 tmp = kvmalloc(elems * ref->ctrl->elem_size, GFP_KERNEL); 1375 1376 if (!tmp) { 1377 ref->p_req_array_enomem = true; 1378 return false; 1379 } 1380 ref->p_req_array_enomem = false; 1381 kvfree(ref->p_req.p); 1382 ref->p_req.p = tmp; 1383 ref->p_req_array_alloc_elems = elems; 1384 return true; 1385 } 1386 1387 /* Copy the new value to the request value */ 1388 void new_to_req(struct v4l2_ctrl_ref *ref) 1389 { 1390 struct v4l2_ctrl *ctrl; 1391 1392 if (!ref) 1393 return; 1394 1395 ctrl = ref->ctrl; 1396 if (ctrl->is_array && !req_alloc_array(ref, ctrl->new_elems)) 1397 return; 1398 1399 ref->p_req_elems = ctrl->new_elems; 1400 ptr_to_ptr(ctrl, ctrl->p_new, ref->p_req, ref->p_req_elems); 1401 ref->p_req_valid = true; 1402 } 1403 1404 /* Copy the current value to the request value */ 1405 void cur_to_req(struct v4l2_ctrl_ref *ref) 1406 { 1407 struct v4l2_ctrl *ctrl; 1408 1409 if (!ref) 1410 return; 1411 1412 ctrl = ref->ctrl; 1413 if (ctrl->is_array && !req_alloc_array(ref, ctrl->elems)) 1414 return; 1415 1416 ref->p_req_elems = ctrl->elems; 1417 ptr_to_ptr(ctrl, ctrl->p_cur, ref->p_req, ctrl->elems); 1418 ref->p_req_valid = true; 1419 } 1420 1421 /* Copy the request value to the new value */ 1422 int req_to_new(struct v4l2_ctrl_ref *ref) 1423 { 1424 struct v4l2_ctrl *ctrl; 1425 1426 if (!ref) 1427 return 0; 1428 1429 ctrl = ref->ctrl; 1430 1431 /* 1432 * This control was never set in the request, so just use the current 1433 * value. 1434 */ 1435 if (!ref->p_req_valid) { 1436 if (ctrl->is_dyn_array) 1437 ctrl->new_elems = ctrl->elems; 1438 ptr_to_ptr(ctrl, ctrl->p_cur, ctrl->p_new, ctrl->new_elems); 1439 return 0; 1440 } 1441 1442 /* Not an array, so just copy the request value */ 1443 if (!ctrl->is_array) { 1444 ptr_to_ptr(ctrl, ref->p_req, ctrl->p_new, ctrl->new_elems); 1445 return 0; 1446 } 1447 1448 /* Sanity check, should never happen */ 1449 if (WARN_ON(!ref->p_req_array_alloc_elems)) 1450 return -ENOMEM; 1451 1452 if (!ctrl->is_dyn_array && 1453 ref->p_req_elems != ctrl->p_array_alloc_elems) 1454 return -ENOMEM; 1455 1456 /* 1457 * Check if the number of elements in the request is more than the 1458 * elements in ctrl->p_array. If so, attempt to realloc ctrl->p_array. 1459 * Note that p_array is allocated with twice the number of elements 1460 * in the dynamic array since it has to store both the current and 1461 * new value of such a control. 1462 */ 1463 if (ref->p_req_elems > ctrl->p_array_alloc_elems) { 1464 unsigned int sz = ref->p_req_elems * ctrl->elem_size; 1465 void *old = ctrl->p_array; 1466 void *tmp = kvzalloc(2 * sz, GFP_KERNEL); 1467 1468 if (!tmp) 1469 return -ENOMEM; 1470 memcpy(tmp, ctrl->p_new.p, ctrl->elems * ctrl->elem_size); 1471 memcpy(tmp + sz, ctrl->p_cur.p, ctrl->elems * ctrl->elem_size); 1472 ctrl->p_new.p = tmp; 1473 ctrl->p_cur.p = tmp + sz; 1474 ctrl->p_array = tmp; 1475 ctrl->p_array_alloc_elems = ref->p_req_elems; 1476 kvfree(old); 1477 } 1478 1479 ctrl->new_elems = ref->p_req_elems; 1480 ptr_to_ptr(ctrl, ref->p_req, ctrl->p_new, ctrl->new_elems); 1481 return 0; 1482 } 1483 1484 /* Control range checking */ 1485 int check_range(enum v4l2_ctrl_type type, 1486 s64 min, s64 max, u64 step, s64 def) 1487 { 1488 switch (type) { 1489 case V4L2_CTRL_TYPE_BOOLEAN: 1490 if (step != 1 || max > 1 || min < 0) 1491 return -ERANGE; 1492 fallthrough; 1493 case V4L2_CTRL_TYPE_U8: 1494 case V4L2_CTRL_TYPE_U16: 1495 case V4L2_CTRL_TYPE_U32: 1496 case V4L2_CTRL_TYPE_INTEGER: 1497 case V4L2_CTRL_TYPE_INTEGER64: 1498 if (step == 0 || min > max || def < min || def > max) 1499 return -ERANGE; 1500 return 0; 1501 case V4L2_CTRL_TYPE_BITMASK: 1502 if (step || min || !max || (def & ~max)) 1503 return -ERANGE; 1504 return 0; 1505 case V4L2_CTRL_TYPE_MENU: 1506 case V4L2_CTRL_TYPE_INTEGER_MENU: 1507 if (min > max || def < min || def > max || 1508 min < 0 || (step && max >= BITS_PER_LONG_LONG)) 1509 return -ERANGE; 1510 /* Note: step == menu_skip_mask for menu controls. 1511 So here we check if the default value is masked out. */ 1512 if (def < BITS_PER_LONG_LONG && (step & BIT_ULL(def))) 1513 return -EINVAL; 1514 return 0; 1515 case V4L2_CTRL_TYPE_STRING: 1516 if (min > max || min < 0 || step < 1 || def) 1517 return -ERANGE; 1518 return 0; 1519 default: 1520 return 0; 1521 } 1522 } 1523 1524 /* Set the handler's error code if it wasn't set earlier already */ 1525 static inline int handler_set_err(struct v4l2_ctrl_handler *hdl, int err) 1526 { 1527 if (hdl->error == 0) 1528 hdl->error = err; 1529 return err; 1530 } 1531 1532 /* Initialize the handler */ 1533 int v4l2_ctrl_handler_init_class(struct v4l2_ctrl_handler *hdl, 1534 unsigned nr_of_controls_hint, 1535 struct lock_class_key *key, const char *name) 1536 { 1537 mutex_init(&hdl->_lock); 1538 hdl->lock = &hdl->_lock; 1539 lockdep_set_class_and_name(hdl->lock, key, name); 1540 INIT_LIST_HEAD(&hdl->ctrls); 1541 INIT_LIST_HEAD(&hdl->ctrl_refs); 1542 hdl->nr_of_buckets = 1 + nr_of_controls_hint / 8; 1543 hdl->buckets = kvcalloc(hdl->nr_of_buckets, sizeof(hdl->buckets[0]), 1544 GFP_KERNEL); 1545 hdl->error = hdl->buckets ? 0 : -ENOMEM; 1546 v4l2_ctrl_handler_init_request(hdl); 1547 return hdl->error; 1548 } 1549 EXPORT_SYMBOL(v4l2_ctrl_handler_init_class); 1550 1551 /* Free all controls and control refs */ 1552 void v4l2_ctrl_handler_free(struct v4l2_ctrl_handler *hdl) 1553 { 1554 struct v4l2_ctrl_ref *ref, *next_ref; 1555 struct v4l2_ctrl *ctrl, *next_ctrl; 1556 struct v4l2_subscribed_event *sev, *next_sev; 1557 1558 if (hdl == NULL || hdl->buckets == NULL) 1559 return; 1560 1561 v4l2_ctrl_handler_free_request(hdl); 1562 1563 mutex_lock(hdl->lock); 1564 /* Free all nodes */ 1565 list_for_each_entry_safe(ref, next_ref, &hdl->ctrl_refs, node) { 1566 list_del(&ref->node); 1567 if (ref->p_req_array_alloc_elems) 1568 kvfree(ref->p_req.p); 1569 kfree(ref); 1570 } 1571 /* Free all controls owned by the handler */ 1572 list_for_each_entry_safe(ctrl, next_ctrl, &hdl->ctrls, node) { 1573 list_del(&ctrl->node); 1574 list_for_each_entry_safe(sev, next_sev, &ctrl->ev_subs, node) 1575 list_del(&sev->node); 1576 kvfree(ctrl->p_array); 1577 kvfree(ctrl); 1578 } 1579 kvfree(hdl->buckets); 1580 hdl->buckets = NULL; 1581 hdl->cached = NULL; 1582 hdl->error = 0; 1583 mutex_unlock(hdl->lock); 1584 mutex_destroy(&hdl->_lock); 1585 } 1586 EXPORT_SYMBOL(v4l2_ctrl_handler_free); 1587 1588 /* For backwards compatibility: V4L2_CID_PRIVATE_BASE should no longer 1589 be used except in G_CTRL, S_CTRL, QUERYCTRL and QUERYMENU when dealing 1590 with applications that do not use the NEXT_CTRL flag. 1591 1592 We just find the n-th private user control. It's O(N), but that should not 1593 be an issue in this particular case. */ 1594 static struct v4l2_ctrl_ref *find_private_ref( 1595 struct v4l2_ctrl_handler *hdl, u32 id) 1596 { 1597 struct v4l2_ctrl_ref *ref; 1598 1599 id -= V4L2_CID_PRIVATE_BASE; 1600 list_for_each_entry(ref, &hdl->ctrl_refs, node) { 1601 /* Search for private user controls that are compatible with 1602 VIDIOC_G/S_CTRL. */ 1603 if (V4L2_CTRL_ID2WHICH(ref->ctrl->id) == V4L2_CTRL_CLASS_USER && 1604 V4L2_CTRL_DRIVER_PRIV(ref->ctrl->id)) { 1605 if (!ref->ctrl->is_int) 1606 continue; 1607 if (id == 0) 1608 return ref; 1609 id--; 1610 } 1611 } 1612 return NULL; 1613 } 1614 1615 /* Find a control with the given ID. */ 1616 struct v4l2_ctrl_ref *find_ref(struct v4l2_ctrl_handler *hdl, u32 id) 1617 { 1618 struct v4l2_ctrl_ref *ref; 1619 int bucket; 1620 1621 id &= V4L2_CTRL_ID_MASK; 1622 1623 /* Old-style private controls need special handling */ 1624 if (id >= V4L2_CID_PRIVATE_BASE) 1625 return find_private_ref(hdl, id); 1626 bucket = id % hdl->nr_of_buckets; 1627 1628 /* Simple optimization: cache the last control found */ 1629 if (hdl->cached && hdl->cached->ctrl->id == id) 1630 return hdl->cached; 1631 1632 /* Not in cache, search the hash */ 1633 ref = hdl->buckets ? hdl->buckets[bucket] : NULL; 1634 while (ref && ref->ctrl->id != id) 1635 ref = ref->next; 1636 1637 if (ref) 1638 hdl->cached = ref; /* cache it! */ 1639 return ref; 1640 } 1641 1642 /* Find a control with the given ID. Take the handler's lock first. */ 1643 struct v4l2_ctrl_ref *find_ref_lock(struct v4l2_ctrl_handler *hdl, u32 id) 1644 { 1645 struct v4l2_ctrl_ref *ref = NULL; 1646 1647 if (hdl) { 1648 mutex_lock(hdl->lock); 1649 ref = find_ref(hdl, id); 1650 mutex_unlock(hdl->lock); 1651 } 1652 return ref; 1653 } 1654 1655 /* Find a control with the given ID. */ 1656 struct v4l2_ctrl *v4l2_ctrl_find(struct v4l2_ctrl_handler *hdl, u32 id) 1657 { 1658 struct v4l2_ctrl_ref *ref = find_ref_lock(hdl, id); 1659 1660 return ref ? ref->ctrl : NULL; 1661 } 1662 EXPORT_SYMBOL(v4l2_ctrl_find); 1663 1664 /* Allocate a new v4l2_ctrl_ref and hook it into the handler. */ 1665 int handler_new_ref(struct v4l2_ctrl_handler *hdl, 1666 struct v4l2_ctrl *ctrl, 1667 struct v4l2_ctrl_ref **ctrl_ref, 1668 bool from_other_dev, bool allocate_req) 1669 { 1670 struct v4l2_ctrl_ref *ref; 1671 struct v4l2_ctrl_ref *new_ref; 1672 u32 id = ctrl->id; 1673 u32 class_ctrl = V4L2_CTRL_ID2WHICH(id) | 1; 1674 int bucket = id % hdl->nr_of_buckets; /* which bucket to use */ 1675 unsigned int size_extra_req = 0; 1676 1677 if (ctrl_ref) 1678 *ctrl_ref = NULL; 1679 1680 /* 1681 * Automatically add the control class if it is not yet present and 1682 * the new control is not a compound control. 1683 */ 1684 if (ctrl->type < V4L2_CTRL_COMPOUND_TYPES && 1685 id != class_ctrl && find_ref_lock(hdl, class_ctrl) == NULL) 1686 if (!v4l2_ctrl_new_std(hdl, NULL, class_ctrl, 0, 0, 0, 0)) 1687 return hdl->error; 1688 1689 if (hdl->error) 1690 return hdl->error; 1691 1692 if (allocate_req && !ctrl->is_array) 1693 size_extra_req = ctrl->elems * ctrl->elem_size; 1694 new_ref = kzalloc(sizeof(*new_ref) + size_extra_req, GFP_KERNEL); 1695 if (!new_ref) 1696 return handler_set_err(hdl, -ENOMEM); 1697 new_ref->ctrl = ctrl; 1698 new_ref->from_other_dev = from_other_dev; 1699 if (size_extra_req) 1700 new_ref->p_req.p = &new_ref[1]; 1701 1702 INIT_LIST_HEAD(&new_ref->node); 1703 1704 mutex_lock(hdl->lock); 1705 1706 /* Add immediately at the end of the list if the list is empty, or if 1707 the last element in the list has a lower ID. 1708 This ensures that when elements are added in ascending order the 1709 insertion is an O(1) operation. */ 1710 if (list_empty(&hdl->ctrl_refs) || id > node2id(hdl->ctrl_refs.prev)) { 1711 list_add_tail(&new_ref->node, &hdl->ctrl_refs); 1712 goto insert_in_hash; 1713 } 1714 1715 /* Find insert position in sorted list */ 1716 list_for_each_entry(ref, &hdl->ctrl_refs, node) { 1717 if (ref->ctrl->id < id) 1718 continue; 1719 /* Don't add duplicates */ 1720 if (ref->ctrl->id == id) { 1721 kfree(new_ref); 1722 goto unlock; 1723 } 1724 list_add(&new_ref->node, ref->node.prev); 1725 break; 1726 } 1727 1728 insert_in_hash: 1729 /* Insert the control node in the hash */ 1730 new_ref->next = hdl->buckets[bucket]; 1731 hdl->buckets[bucket] = new_ref; 1732 if (ctrl_ref) 1733 *ctrl_ref = new_ref; 1734 if (ctrl->handler == hdl) { 1735 /* By default each control starts in a cluster of its own. 1736 * new_ref->ctrl is basically a cluster array with one 1737 * element, so that's perfect to use as the cluster pointer. 1738 * But only do this for the handler that owns the control. 1739 */ 1740 ctrl->cluster = &new_ref->ctrl; 1741 ctrl->ncontrols = 1; 1742 } 1743 1744 unlock: 1745 mutex_unlock(hdl->lock); 1746 return 0; 1747 } 1748 1749 /* Add a new control */ 1750 static struct v4l2_ctrl *v4l2_ctrl_new(struct v4l2_ctrl_handler *hdl, 1751 const struct v4l2_ctrl_ops *ops, 1752 const struct v4l2_ctrl_type_ops *type_ops, 1753 u32 id, const char *name, enum v4l2_ctrl_type type, 1754 s64 min, s64 max, u64 step, s64 def, 1755 const u32 dims[V4L2_CTRL_MAX_DIMS], u32 elem_size, 1756 u32 flags, const char * const *qmenu, 1757 const s64 *qmenu_int, const union v4l2_ctrl_ptr p_def, 1758 void *priv) 1759 { 1760 struct v4l2_ctrl *ctrl; 1761 unsigned sz_extra; 1762 unsigned nr_of_dims = 0; 1763 unsigned elems = 1; 1764 bool is_array; 1765 unsigned tot_ctrl_size; 1766 void *data; 1767 int err; 1768 1769 if (hdl->error) 1770 return NULL; 1771 1772 while (dims && dims[nr_of_dims]) { 1773 elems *= dims[nr_of_dims]; 1774 nr_of_dims++; 1775 if (nr_of_dims == V4L2_CTRL_MAX_DIMS) 1776 break; 1777 } 1778 is_array = nr_of_dims > 0; 1779 1780 /* Prefill elem_size for all types handled by std_type_ops */ 1781 switch ((u32)type) { 1782 case V4L2_CTRL_TYPE_INTEGER64: 1783 elem_size = sizeof(s64); 1784 break; 1785 case V4L2_CTRL_TYPE_STRING: 1786 elem_size = max + 1; 1787 break; 1788 case V4L2_CTRL_TYPE_U8: 1789 elem_size = sizeof(u8); 1790 break; 1791 case V4L2_CTRL_TYPE_U16: 1792 elem_size = sizeof(u16); 1793 break; 1794 case V4L2_CTRL_TYPE_U32: 1795 elem_size = sizeof(u32); 1796 break; 1797 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE: 1798 elem_size = sizeof(struct v4l2_ctrl_mpeg2_sequence); 1799 break; 1800 case V4L2_CTRL_TYPE_MPEG2_PICTURE: 1801 elem_size = sizeof(struct v4l2_ctrl_mpeg2_picture); 1802 break; 1803 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION: 1804 elem_size = sizeof(struct v4l2_ctrl_mpeg2_quantisation); 1805 break; 1806 case V4L2_CTRL_TYPE_FWHT_PARAMS: 1807 elem_size = sizeof(struct v4l2_ctrl_fwht_params); 1808 break; 1809 case V4L2_CTRL_TYPE_H264_SPS: 1810 elem_size = sizeof(struct v4l2_ctrl_h264_sps); 1811 break; 1812 case V4L2_CTRL_TYPE_H264_PPS: 1813 elem_size = sizeof(struct v4l2_ctrl_h264_pps); 1814 break; 1815 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX: 1816 elem_size = sizeof(struct v4l2_ctrl_h264_scaling_matrix); 1817 break; 1818 case V4L2_CTRL_TYPE_H264_SLICE_PARAMS: 1819 elem_size = sizeof(struct v4l2_ctrl_h264_slice_params); 1820 break; 1821 case V4L2_CTRL_TYPE_H264_DECODE_PARAMS: 1822 elem_size = sizeof(struct v4l2_ctrl_h264_decode_params); 1823 break; 1824 case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS: 1825 elem_size = sizeof(struct v4l2_ctrl_h264_pred_weights); 1826 break; 1827 case V4L2_CTRL_TYPE_VP8_FRAME: 1828 elem_size = sizeof(struct v4l2_ctrl_vp8_frame); 1829 break; 1830 case V4L2_CTRL_TYPE_HEVC_SPS: 1831 elem_size = sizeof(struct v4l2_ctrl_hevc_sps); 1832 break; 1833 case V4L2_CTRL_TYPE_HEVC_PPS: 1834 elem_size = sizeof(struct v4l2_ctrl_hevc_pps); 1835 break; 1836 case V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS: 1837 elem_size = sizeof(struct v4l2_ctrl_hevc_slice_params); 1838 break; 1839 case V4L2_CTRL_TYPE_HEVC_SCALING_MATRIX: 1840 elem_size = sizeof(struct v4l2_ctrl_hevc_scaling_matrix); 1841 break; 1842 case V4L2_CTRL_TYPE_HEVC_DECODE_PARAMS: 1843 elem_size = sizeof(struct v4l2_ctrl_hevc_decode_params); 1844 break; 1845 case V4L2_CTRL_TYPE_HDR10_CLL_INFO: 1846 elem_size = sizeof(struct v4l2_ctrl_hdr10_cll_info); 1847 break; 1848 case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY: 1849 elem_size = sizeof(struct v4l2_ctrl_hdr10_mastering_display); 1850 break; 1851 case V4L2_CTRL_TYPE_VP9_COMPRESSED_HDR: 1852 elem_size = sizeof(struct v4l2_ctrl_vp9_compressed_hdr); 1853 break; 1854 case V4L2_CTRL_TYPE_VP9_FRAME: 1855 elem_size = sizeof(struct v4l2_ctrl_vp9_frame); 1856 break; 1857 case V4L2_CTRL_TYPE_AV1_SEQUENCE: 1858 elem_size = sizeof(struct v4l2_ctrl_av1_sequence); 1859 break; 1860 case V4L2_CTRL_TYPE_AV1_TILE_GROUP_ENTRY: 1861 elem_size = sizeof(struct v4l2_ctrl_av1_tile_group_entry); 1862 break; 1863 case V4L2_CTRL_TYPE_AV1_FRAME: 1864 elem_size = sizeof(struct v4l2_ctrl_av1_frame); 1865 break; 1866 case V4L2_CTRL_TYPE_AV1_FILM_GRAIN: 1867 elem_size = sizeof(struct v4l2_ctrl_av1_film_grain); 1868 break; 1869 case V4L2_CTRL_TYPE_AREA: 1870 elem_size = sizeof(struct v4l2_area); 1871 break; 1872 default: 1873 if (type < V4L2_CTRL_COMPOUND_TYPES) 1874 elem_size = sizeof(s32); 1875 break; 1876 } 1877 1878 /* Sanity checks */ 1879 if (id == 0 || name == NULL || !elem_size || 1880 id >= V4L2_CID_PRIVATE_BASE || 1881 (type == V4L2_CTRL_TYPE_MENU && qmenu == NULL) || 1882 (type == V4L2_CTRL_TYPE_INTEGER_MENU && qmenu_int == NULL)) { 1883 handler_set_err(hdl, -ERANGE); 1884 return NULL; 1885 } 1886 err = check_range(type, min, max, step, def); 1887 if (err) { 1888 handler_set_err(hdl, err); 1889 return NULL; 1890 } 1891 if (is_array && 1892 (type == V4L2_CTRL_TYPE_BUTTON || 1893 type == V4L2_CTRL_TYPE_CTRL_CLASS)) { 1894 handler_set_err(hdl, -EINVAL); 1895 return NULL; 1896 } 1897 if (flags & V4L2_CTRL_FLAG_DYNAMIC_ARRAY) { 1898 /* 1899 * For now only support this for one-dimensional arrays only. 1900 * 1901 * This can be relaxed in the future, but this will 1902 * require more effort. 1903 */ 1904 if (nr_of_dims != 1) { 1905 handler_set_err(hdl, -EINVAL); 1906 return NULL; 1907 } 1908 /* Start with just 1 element */ 1909 elems = 1; 1910 } 1911 1912 tot_ctrl_size = elem_size * elems; 1913 sz_extra = 0; 1914 if (type == V4L2_CTRL_TYPE_BUTTON) 1915 flags |= V4L2_CTRL_FLAG_WRITE_ONLY | 1916 V4L2_CTRL_FLAG_EXECUTE_ON_WRITE; 1917 else if (type == V4L2_CTRL_TYPE_CTRL_CLASS) 1918 flags |= V4L2_CTRL_FLAG_READ_ONLY; 1919 else if (!is_array && 1920 (type == V4L2_CTRL_TYPE_INTEGER64 || 1921 type == V4L2_CTRL_TYPE_STRING || 1922 type >= V4L2_CTRL_COMPOUND_TYPES)) 1923 sz_extra += 2 * tot_ctrl_size; 1924 1925 if (type >= V4L2_CTRL_COMPOUND_TYPES && p_def.p_const) 1926 sz_extra += elem_size; 1927 1928 ctrl = kvzalloc(sizeof(*ctrl) + sz_extra, GFP_KERNEL); 1929 if (ctrl == NULL) { 1930 handler_set_err(hdl, -ENOMEM); 1931 return NULL; 1932 } 1933 1934 INIT_LIST_HEAD(&ctrl->node); 1935 INIT_LIST_HEAD(&ctrl->ev_subs); 1936 ctrl->handler = hdl; 1937 ctrl->ops = ops; 1938 ctrl->type_ops = type_ops ? type_ops : &std_type_ops; 1939 ctrl->id = id; 1940 ctrl->name = name; 1941 ctrl->type = type; 1942 ctrl->flags = flags; 1943 ctrl->minimum = min; 1944 ctrl->maximum = max; 1945 ctrl->step = step; 1946 ctrl->default_value = def; 1947 ctrl->is_string = !is_array && type == V4L2_CTRL_TYPE_STRING; 1948 ctrl->is_ptr = is_array || type >= V4L2_CTRL_COMPOUND_TYPES || ctrl->is_string; 1949 ctrl->is_int = !ctrl->is_ptr && type != V4L2_CTRL_TYPE_INTEGER64; 1950 ctrl->is_array = is_array; 1951 ctrl->is_dyn_array = !!(flags & V4L2_CTRL_FLAG_DYNAMIC_ARRAY); 1952 ctrl->elems = elems; 1953 ctrl->new_elems = elems; 1954 ctrl->nr_of_dims = nr_of_dims; 1955 if (nr_of_dims) 1956 memcpy(ctrl->dims, dims, nr_of_dims * sizeof(dims[0])); 1957 ctrl->elem_size = elem_size; 1958 if (type == V4L2_CTRL_TYPE_MENU) 1959 ctrl->qmenu = qmenu; 1960 else if (type == V4L2_CTRL_TYPE_INTEGER_MENU) 1961 ctrl->qmenu_int = qmenu_int; 1962 ctrl->priv = priv; 1963 ctrl->cur.val = ctrl->val = def; 1964 data = &ctrl[1]; 1965 1966 if (ctrl->is_array) { 1967 ctrl->p_array_alloc_elems = elems; 1968 ctrl->p_array = kvzalloc(2 * elems * elem_size, GFP_KERNEL); 1969 if (!ctrl->p_array) { 1970 kvfree(ctrl); 1971 return NULL; 1972 } 1973 data = ctrl->p_array; 1974 } 1975 1976 if (!ctrl->is_int) { 1977 ctrl->p_new.p = data; 1978 ctrl->p_cur.p = data + tot_ctrl_size; 1979 } else { 1980 ctrl->p_new.p = &ctrl->val; 1981 ctrl->p_cur.p = &ctrl->cur.val; 1982 } 1983 1984 if (type >= V4L2_CTRL_COMPOUND_TYPES && p_def.p_const) { 1985 if (ctrl->is_array) 1986 ctrl->p_def.p = &ctrl[1]; 1987 else 1988 ctrl->p_def.p = ctrl->p_cur.p + tot_ctrl_size; 1989 memcpy(ctrl->p_def.p, p_def.p_const, elem_size); 1990 } 1991 1992 ctrl->type_ops->init(ctrl, 0, ctrl->p_cur); 1993 cur_to_new(ctrl); 1994 1995 if (handler_new_ref(hdl, ctrl, NULL, false, false)) { 1996 kvfree(ctrl->p_array); 1997 kvfree(ctrl); 1998 return NULL; 1999 } 2000 mutex_lock(hdl->lock); 2001 list_add_tail(&ctrl->node, &hdl->ctrls); 2002 mutex_unlock(hdl->lock); 2003 return ctrl; 2004 } 2005 2006 struct v4l2_ctrl *v4l2_ctrl_new_custom(struct v4l2_ctrl_handler *hdl, 2007 const struct v4l2_ctrl_config *cfg, void *priv) 2008 { 2009 bool is_menu; 2010 struct v4l2_ctrl *ctrl; 2011 const char *name = cfg->name; 2012 const char * const *qmenu = cfg->qmenu; 2013 const s64 *qmenu_int = cfg->qmenu_int; 2014 enum v4l2_ctrl_type type = cfg->type; 2015 u32 flags = cfg->flags; 2016 s64 min = cfg->min; 2017 s64 max = cfg->max; 2018 u64 step = cfg->step; 2019 s64 def = cfg->def; 2020 2021 if (name == NULL) 2022 v4l2_ctrl_fill(cfg->id, &name, &type, &min, &max, &step, 2023 &def, &flags); 2024 2025 is_menu = (type == V4L2_CTRL_TYPE_MENU || 2026 type == V4L2_CTRL_TYPE_INTEGER_MENU); 2027 if (is_menu) 2028 WARN_ON(step); 2029 else 2030 WARN_ON(cfg->menu_skip_mask); 2031 if (type == V4L2_CTRL_TYPE_MENU && !qmenu) { 2032 qmenu = v4l2_ctrl_get_menu(cfg->id); 2033 } else if (type == V4L2_CTRL_TYPE_INTEGER_MENU && !qmenu_int) { 2034 handler_set_err(hdl, -EINVAL); 2035 return NULL; 2036 } 2037 2038 ctrl = v4l2_ctrl_new(hdl, cfg->ops, cfg->type_ops, cfg->id, name, 2039 type, min, max, 2040 is_menu ? cfg->menu_skip_mask : step, def, 2041 cfg->dims, cfg->elem_size, 2042 flags, qmenu, qmenu_int, cfg->p_def, priv); 2043 if (ctrl) 2044 ctrl->is_private = cfg->is_private; 2045 return ctrl; 2046 } 2047 EXPORT_SYMBOL(v4l2_ctrl_new_custom); 2048 2049 /* Helper function for standard non-menu controls */ 2050 struct v4l2_ctrl *v4l2_ctrl_new_std(struct v4l2_ctrl_handler *hdl, 2051 const struct v4l2_ctrl_ops *ops, 2052 u32 id, s64 min, s64 max, u64 step, s64 def) 2053 { 2054 const char *name; 2055 enum v4l2_ctrl_type type; 2056 u32 flags; 2057 2058 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags); 2059 if (type == V4L2_CTRL_TYPE_MENU || 2060 type == V4L2_CTRL_TYPE_INTEGER_MENU || 2061 type >= V4L2_CTRL_COMPOUND_TYPES) { 2062 handler_set_err(hdl, -EINVAL); 2063 return NULL; 2064 } 2065 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type, 2066 min, max, step, def, NULL, 0, 2067 flags, NULL, NULL, ptr_null, NULL); 2068 } 2069 EXPORT_SYMBOL(v4l2_ctrl_new_std); 2070 2071 /* Helper function for standard menu controls */ 2072 struct v4l2_ctrl *v4l2_ctrl_new_std_menu(struct v4l2_ctrl_handler *hdl, 2073 const struct v4l2_ctrl_ops *ops, 2074 u32 id, u8 _max, u64 mask, u8 _def) 2075 { 2076 const char * const *qmenu = NULL; 2077 const s64 *qmenu_int = NULL; 2078 unsigned int qmenu_int_len = 0; 2079 const char *name; 2080 enum v4l2_ctrl_type type; 2081 s64 min; 2082 s64 max = _max; 2083 s64 def = _def; 2084 u64 step; 2085 u32 flags; 2086 2087 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags); 2088 2089 if (type == V4L2_CTRL_TYPE_MENU) 2090 qmenu = v4l2_ctrl_get_menu(id); 2091 else if (type == V4L2_CTRL_TYPE_INTEGER_MENU) 2092 qmenu_int = v4l2_ctrl_get_int_menu(id, &qmenu_int_len); 2093 2094 if ((!qmenu && !qmenu_int) || (qmenu_int && max >= qmenu_int_len)) { 2095 handler_set_err(hdl, -EINVAL); 2096 return NULL; 2097 } 2098 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type, 2099 0, max, mask, def, NULL, 0, 2100 flags, qmenu, qmenu_int, ptr_null, NULL); 2101 } 2102 EXPORT_SYMBOL(v4l2_ctrl_new_std_menu); 2103 2104 /* Helper function for standard menu controls with driver defined menu */ 2105 struct v4l2_ctrl *v4l2_ctrl_new_std_menu_items(struct v4l2_ctrl_handler *hdl, 2106 const struct v4l2_ctrl_ops *ops, u32 id, u8 _max, 2107 u64 mask, u8 _def, const char * const *qmenu) 2108 { 2109 enum v4l2_ctrl_type type; 2110 const char *name; 2111 u32 flags; 2112 u64 step; 2113 s64 min; 2114 s64 max = _max; 2115 s64 def = _def; 2116 2117 /* v4l2_ctrl_new_std_menu_items() should only be called for 2118 * standard controls without a standard menu. 2119 */ 2120 if (v4l2_ctrl_get_menu(id)) { 2121 handler_set_err(hdl, -EINVAL); 2122 return NULL; 2123 } 2124 2125 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags); 2126 if (type != V4L2_CTRL_TYPE_MENU || qmenu == NULL) { 2127 handler_set_err(hdl, -EINVAL); 2128 return NULL; 2129 } 2130 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type, 2131 0, max, mask, def, NULL, 0, 2132 flags, qmenu, NULL, ptr_null, NULL); 2133 2134 } 2135 EXPORT_SYMBOL(v4l2_ctrl_new_std_menu_items); 2136 2137 /* Helper function for standard compound controls */ 2138 struct v4l2_ctrl *v4l2_ctrl_new_std_compound(struct v4l2_ctrl_handler *hdl, 2139 const struct v4l2_ctrl_ops *ops, u32 id, 2140 const union v4l2_ctrl_ptr p_def) 2141 { 2142 const char *name; 2143 enum v4l2_ctrl_type type; 2144 u32 flags; 2145 s64 min, max, step, def; 2146 2147 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags); 2148 if (type < V4L2_CTRL_COMPOUND_TYPES) { 2149 handler_set_err(hdl, -EINVAL); 2150 return NULL; 2151 } 2152 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type, 2153 min, max, step, def, NULL, 0, 2154 flags, NULL, NULL, p_def, NULL); 2155 } 2156 EXPORT_SYMBOL(v4l2_ctrl_new_std_compound); 2157 2158 /* Helper function for standard integer menu controls */ 2159 struct v4l2_ctrl *v4l2_ctrl_new_int_menu(struct v4l2_ctrl_handler *hdl, 2160 const struct v4l2_ctrl_ops *ops, 2161 u32 id, u8 _max, u8 _def, const s64 *qmenu_int) 2162 { 2163 const char *name; 2164 enum v4l2_ctrl_type type; 2165 s64 min; 2166 u64 step; 2167 s64 max = _max; 2168 s64 def = _def; 2169 u32 flags; 2170 2171 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags); 2172 if (type != V4L2_CTRL_TYPE_INTEGER_MENU) { 2173 handler_set_err(hdl, -EINVAL); 2174 return NULL; 2175 } 2176 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type, 2177 0, max, 0, def, NULL, 0, 2178 flags, NULL, qmenu_int, ptr_null, NULL); 2179 } 2180 EXPORT_SYMBOL(v4l2_ctrl_new_int_menu); 2181 2182 /* Add the controls from another handler to our own. */ 2183 int v4l2_ctrl_add_handler(struct v4l2_ctrl_handler *hdl, 2184 struct v4l2_ctrl_handler *add, 2185 bool (*filter)(const struct v4l2_ctrl *ctrl), 2186 bool from_other_dev) 2187 { 2188 struct v4l2_ctrl_ref *ref; 2189 int ret = 0; 2190 2191 /* Do nothing if either handler is NULL or if they are the same */ 2192 if (!hdl || !add || hdl == add) 2193 return 0; 2194 if (hdl->error) 2195 return hdl->error; 2196 mutex_lock(add->lock); 2197 list_for_each_entry(ref, &add->ctrl_refs, node) { 2198 struct v4l2_ctrl *ctrl = ref->ctrl; 2199 2200 /* Skip handler-private controls. */ 2201 if (ctrl->is_private) 2202 continue; 2203 /* And control classes */ 2204 if (ctrl->type == V4L2_CTRL_TYPE_CTRL_CLASS) 2205 continue; 2206 /* Filter any unwanted controls */ 2207 if (filter && !filter(ctrl)) 2208 continue; 2209 ret = handler_new_ref(hdl, ctrl, NULL, from_other_dev, false); 2210 if (ret) 2211 break; 2212 } 2213 mutex_unlock(add->lock); 2214 return ret; 2215 } 2216 EXPORT_SYMBOL(v4l2_ctrl_add_handler); 2217 2218 bool v4l2_ctrl_radio_filter(const struct v4l2_ctrl *ctrl) 2219 { 2220 if (V4L2_CTRL_ID2WHICH(ctrl->id) == V4L2_CTRL_CLASS_FM_TX) 2221 return true; 2222 if (V4L2_CTRL_ID2WHICH(ctrl->id) == V4L2_CTRL_CLASS_FM_RX) 2223 return true; 2224 switch (ctrl->id) { 2225 case V4L2_CID_AUDIO_MUTE: 2226 case V4L2_CID_AUDIO_VOLUME: 2227 case V4L2_CID_AUDIO_BALANCE: 2228 case V4L2_CID_AUDIO_BASS: 2229 case V4L2_CID_AUDIO_TREBLE: 2230 case V4L2_CID_AUDIO_LOUDNESS: 2231 return true; 2232 default: 2233 break; 2234 } 2235 return false; 2236 } 2237 EXPORT_SYMBOL(v4l2_ctrl_radio_filter); 2238 2239 /* Cluster controls */ 2240 void v4l2_ctrl_cluster(unsigned ncontrols, struct v4l2_ctrl **controls) 2241 { 2242 bool has_volatiles = false; 2243 int i; 2244 2245 /* The first control is the master control and it must not be NULL */ 2246 if (WARN_ON(ncontrols == 0 || controls[0] == NULL)) 2247 return; 2248 2249 for (i = 0; i < ncontrols; i++) { 2250 if (controls[i]) { 2251 controls[i]->cluster = controls; 2252 controls[i]->ncontrols = ncontrols; 2253 if (controls[i]->flags & V4L2_CTRL_FLAG_VOLATILE) 2254 has_volatiles = true; 2255 } 2256 } 2257 controls[0]->has_volatiles = has_volatiles; 2258 } 2259 EXPORT_SYMBOL(v4l2_ctrl_cluster); 2260 2261 void v4l2_ctrl_auto_cluster(unsigned ncontrols, struct v4l2_ctrl **controls, 2262 u8 manual_val, bool set_volatile) 2263 { 2264 struct v4l2_ctrl *master = controls[0]; 2265 u32 flag = 0; 2266 int i; 2267 2268 v4l2_ctrl_cluster(ncontrols, controls); 2269 WARN_ON(ncontrols <= 1); 2270 WARN_ON(manual_val < master->minimum || manual_val > master->maximum); 2271 WARN_ON(set_volatile && !has_op(master, g_volatile_ctrl)); 2272 master->is_auto = true; 2273 master->has_volatiles = set_volatile; 2274 master->manual_mode_value = manual_val; 2275 master->flags |= V4L2_CTRL_FLAG_UPDATE; 2276 2277 if (!is_cur_manual(master)) 2278 flag = V4L2_CTRL_FLAG_INACTIVE | 2279 (set_volatile ? V4L2_CTRL_FLAG_VOLATILE : 0); 2280 2281 for (i = 1; i < ncontrols; i++) 2282 if (controls[i]) 2283 controls[i]->flags |= flag; 2284 } 2285 EXPORT_SYMBOL(v4l2_ctrl_auto_cluster); 2286 2287 /* 2288 * Obtain the current volatile values of an autocluster and mark them 2289 * as new. 2290 */ 2291 void update_from_auto_cluster(struct v4l2_ctrl *master) 2292 { 2293 int i; 2294 2295 for (i = 1; i < master->ncontrols; i++) 2296 cur_to_new(master->cluster[i]); 2297 if (!call_op(master, g_volatile_ctrl)) 2298 for (i = 1; i < master->ncontrols; i++) 2299 if (master->cluster[i]) 2300 master->cluster[i]->is_new = 1; 2301 } 2302 2303 /* 2304 * Return non-zero if one or more of the controls in the cluster has a new 2305 * value that differs from the current value. 2306 */ 2307 static int cluster_changed(struct v4l2_ctrl *master) 2308 { 2309 bool changed = false; 2310 int i; 2311 2312 for (i = 0; i < master->ncontrols; i++) { 2313 struct v4l2_ctrl *ctrl = master->cluster[i]; 2314 bool ctrl_changed = false; 2315 2316 if (!ctrl) 2317 continue; 2318 2319 if (ctrl->flags & V4L2_CTRL_FLAG_EXECUTE_ON_WRITE) { 2320 changed = true; 2321 ctrl_changed = true; 2322 } 2323 2324 /* 2325 * Set has_changed to false to avoid generating 2326 * the event V4L2_EVENT_CTRL_CH_VALUE 2327 */ 2328 if (ctrl->flags & V4L2_CTRL_FLAG_VOLATILE) { 2329 ctrl->has_changed = false; 2330 continue; 2331 } 2332 2333 if (ctrl->elems != ctrl->new_elems) 2334 ctrl_changed = true; 2335 if (!ctrl_changed) 2336 ctrl_changed = !ctrl->type_ops->equal(ctrl, 2337 ctrl->p_cur, ctrl->p_new); 2338 ctrl->has_changed = ctrl_changed; 2339 changed |= ctrl->has_changed; 2340 } 2341 return changed; 2342 } 2343 2344 /* 2345 * Core function that calls try/s_ctrl and ensures that the new value is 2346 * copied to the current value on a set. 2347 * Must be called with ctrl->handler->lock held. 2348 */ 2349 int try_or_set_cluster(struct v4l2_fh *fh, struct v4l2_ctrl *master, 2350 bool set, u32 ch_flags) 2351 { 2352 bool update_flag; 2353 int ret; 2354 int i; 2355 2356 /* 2357 * Go through the cluster and either validate the new value or 2358 * (if no new value was set), copy the current value to the new 2359 * value, ensuring a consistent view for the control ops when 2360 * called. 2361 */ 2362 for (i = 0; i < master->ncontrols; i++) { 2363 struct v4l2_ctrl *ctrl = master->cluster[i]; 2364 2365 if (!ctrl) 2366 continue; 2367 2368 if (!ctrl->is_new) { 2369 cur_to_new(ctrl); 2370 continue; 2371 } 2372 /* 2373 * Check again: it may have changed since the 2374 * previous check in try_or_set_ext_ctrls(). 2375 */ 2376 if (set && (ctrl->flags & V4L2_CTRL_FLAG_GRABBED)) 2377 return -EBUSY; 2378 } 2379 2380 ret = call_op(master, try_ctrl); 2381 2382 /* Don't set if there is no change */ 2383 if (ret || !set || !cluster_changed(master)) 2384 return ret; 2385 ret = call_op(master, s_ctrl); 2386 if (ret) 2387 return ret; 2388 2389 /* If OK, then make the new values permanent. */ 2390 update_flag = is_cur_manual(master) != is_new_manual(master); 2391 2392 for (i = 0; i < master->ncontrols; i++) { 2393 /* 2394 * If we switch from auto to manual mode, and this cluster 2395 * contains volatile controls, then all non-master controls 2396 * have to be marked as changed. The 'new' value contains 2397 * the volatile value (obtained by update_from_auto_cluster), 2398 * which now has to become the current value. 2399 */ 2400 if (i && update_flag && is_new_manual(master) && 2401 master->has_volatiles && master->cluster[i]) 2402 master->cluster[i]->has_changed = true; 2403 2404 new_to_cur(fh, master->cluster[i], ch_flags | 2405 ((update_flag && i > 0) ? V4L2_EVENT_CTRL_CH_FLAGS : 0)); 2406 } 2407 return 0; 2408 } 2409 2410 /* Activate/deactivate a control. */ 2411 void v4l2_ctrl_activate(struct v4l2_ctrl *ctrl, bool active) 2412 { 2413 /* invert since the actual flag is called 'inactive' */ 2414 bool inactive = !active; 2415 bool old; 2416 2417 if (ctrl == NULL) 2418 return; 2419 2420 if (inactive) 2421 /* set V4L2_CTRL_FLAG_INACTIVE */ 2422 old = test_and_set_bit(4, &ctrl->flags); 2423 else 2424 /* clear V4L2_CTRL_FLAG_INACTIVE */ 2425 old = test_and_clear_bit(4, &ctrl->flags); 2426 if (old != inactive) 2427 send_event(NULL, ctrl, V4L2_EVENT_CTRL_CH_FLAGS); 2428 } 2429 EXPORT_SYMBOL(v4l2_ctrl_activate); 2430 2431 void __v4l2_ctrl_grab(struct v4l2_ctrl *ctrl, bool grabbed) 2432 { 2433 bool old; 2434 2435 if (ctrl == NULL) 2436 return; 2437 2438 lockdep_assert_held(ctrl->handler->lock); 2439 2440 if (grabbed) 2441 /* set V4L2_CTRL_FLAG_GRABBED */ 2442 old = test_and_set_bit(1, &ctrl->flags); 2443 else 2444 /* clear V4L2_CTRL_FLAG_GRABBED */ 2445 old = test_and_clear_bit(1, &ctrl->flags); 2446 if (old != grabbed) 2447 send_event(NULL, ctrl, V4L2_EVENT_CTRL_CH_FLAGS); 2448 } 2449 EXPORT_SYMBOL(__v4l2_ctrl_grab); 2450 2451 /* Call s_ctrl for all controls owned by the handler */ 2452 int __v4l2_ctrl_handler_setup(struct v4l2_ctrl_handler *hdl) 2453 { 2454 struct v4l2_ctrl *ctrl; 2455 int ret = 0; 2456 2457 if (hdl == NULL) 2458 return 0; 2459 2460 lockdep_assert_held(hdl->lock); 2461 2462 list_for_each_entry(ctrl, &hdl->ctrls, node) 2463 ctrl->done = false; 2464 2465 list_for_each_entry(ctrl, &hdl->ctrls, node) { 2466 struct v4l2_ctrl *master = ctrl->cluster[0]; 2467 int i; 2468 2469 /* Skip if this control was already handled by a cluster. */ 2470 /* Skip button controls and read-only controls. */ 2471 if (ctrl->done || ctrl->type == V4L2_CTRL_TYPE_BUTTON || 2472 (ctrl->flags & V4L2_CTRL_FLAG_READ_ONLY)) 2473 continue; 2474 2475 for (i = 0; i < master->ncontrols; i++) { 2476 if (master->cluster[i]) { 2477 cur_to_new(master->cluster[i]); 2478 master->cluster[i]->is_new = 1; 2479 master->cluster[i]->done = true; 2480 } 2481 } 2482 ret = call_op(master, s_ctrl); 2483 if (ret) 2484 break; 2485 } 2486 2487 return ret; 2488 } 2489 EXPORT_SYMBOL_GPL(__v4l2_ctrl_handler_setup); 2490 2491 int v4l2_ctrl_handler_setup(struct v4l2_ctrl_handler *hdl) 2492 { 2493 int ret; 2494 2495 if (hdl == NULL) 2496 return 0; 2497 2498 mutex_lock(hdl->lock); 2499 ret = __v4l2_ctrl_handler_setup(hdl); 2500 mutex_unlock(hdl->lock); 2501 2502 return ret; 2503 } 2504 EXPORT_SYMBOL(v4l2_ctrl_handler_setup); 2505 2506 /* Log the control name and value */ 2507 static void log_ctrl(const struct v4l2_ctrl_handler *hdl, 2508 struct v4l2_ctrl *ctrl, 2509 const char *prefix, const char *colon) 2510 { 2511 if (ctrl->flags & (V4L2_CTRL_FLAG_DISABLED | V4L2_CTRL_FLAG_WRITE_ONLY)) 2512 return; 2513 if (ctrl->type == V4L2_CTRL_TYPE_CTRL_CLASS) 2514 return; 2515 2516 pr_info("%s%s%s: ", prefix, colon, ctrl->name); 2517 2518 if (ctrl->handler != hdl) 2519 v4l2_ctrl_lock(ctrl); 2520 ctrl->type_ops->log(ctrl); 2521 if (ctrl->handler != hdl) 2522 v4l2_ctrl_unlock(ctrl); 2523 2524 if (ctrl->flags & (V4L2_CTRL_FLAG_INACTIVE | 2525 V4L2_CTRL_FLAG_GRABBED | 2526 V4L2_CTRL_FLAG_VOLATILE)) { 2527 if (ctrl->flags & V4L2_CTRL_FLAG_INACTIVE) 2528 pr_cont(" inactive"); 2529 if (ctrl->flags & V4L2_CTRL_FLAG_GRABBED) 2530 pr_cont(" grabbed"); 2531 if (ctrl->flags & V4L2_CTRL_FLAG_VOLATILE) 2532 pr_cont(" volatile"); 2533 } 2534 pr_cont("\n"); 2535 } 2536 2537 /* Log all controls owned by the handler */ 2538 void v4l2_ctrl_handler_log_status(struct v4l2_ctrl_handler *hdl, 2539 const char *prefix) 2540 { 2541 struct v4l2_ctrl_ref *ref; 2542 const char *colon = ""; 2543 int len; 2544 2545 if (!hdl) 2546 return; 2547 if (!prefix) 2548 prefix = ""; 2549 len = strlen(prefix); 2550 if (len && prefix[len - 1] != ' ') 2551 colon = ": "; 2552 mutex_lock(hdl->lock); 2553 list_for_each_entry(ref, &hdl->ctrl_refs, node) { 2554 if (ref->from_other_dev || 2555 (ref->ctrl->flags & V4L2_CTRL_FLAG_DISABLED)) 2556 continue; 2557 log_ctrl(hdl, ref->ctrl, prefix, colon); 2558 } 2559 mutex_unlock(hdl->lock); 2560 } 2561 EXPORT_SYMBOL(v4l2_ctrl_handler_log_status); 2562 2563 int v4l2_ctrl_new_fwnode_properties(struct v4l2_ctrl_handler *hdl, 2564 const struct v4l2_ctrl_ops *ctrl_ops, 2565 const struct v4l2_fwnode_device_properties *p) 2566 { 2567 if (p->orientation != V4L2_FWNODE_PROPERTY_UNSET) { 2568 u32 orientation_ctrl; 2569 2570 switch (p->orientation) { 2571 case V4L2_FWNODE_ORIENTATION_FRONT: 2572 orientation_ctrl = V4L2_CAMERA_ORIENTATION_FRONT; 2573 break; 2574 case V4L2_FWNODE_ORIENTATION_BACK: 2575 orientation_ctrl = V4L2_CAMERA_ORIENTATION_BACK; 2576 break; 2577 case V4L2_FWNODE_ORIENTATION_EXTERNAL: 2578 orientation_ctrl = V4L2_CAMERA_ORIENTATION_EXTERNAL; 2579 break; 2580 default: 2581 return -EINVAL; 2582 } 2583 if (!v4l2_ctrl_new_std_menu(hdl, ctrl_ops, 2584 V4L2_CID_CAMERA_ORIENTATION, 2585 V4L2_CAMERA_ORIENTATION_EXTERNAL, 0, 2586 orientation_ctrl)) 2587 return hdl->error; 2588 } 2589 2590 if (p->rotation != V4L2_FWNODE_PROPERTY_UNSET) { 2591 if (!v4l2_ctrl_new_std(hdl, ctrl_ops, 2592 V4L2_CID_CAMERA_SENSOR_ROTATION, 2593 p->rotation, p->rotation, 1, 2594 p->rotation)) 2595 return hdl->error; 2596 } 2597 2598 return hdl->error; 2599 } 2600 EXPORT_SYMBOL(v4l2_ctrl_new_fwnode_properties); 2601