1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * V4L2 controls framework core implementation.
5 * Copyright (C) 2010-2021 Hans Verkuil <hverkuil-cisco@xs4all.nl>
8 #include <linux/export.h>
10 #include <linux/slab.h>
11 #include <media/v4l2-ctrls.h>
12 #include <media/v4l2-event.h>
13 #include <media/v4l2-fwnode.h>
15 #include "v4l2-ctrls-priv.h"
17 static const union v4l2_ctrl_ptr ptr_null;
19 static void fill_event(struct v4l2_event *ev, struct v4l2_ctrl *ctrl,
22 memset(ev, 0, sizeof(*ev));
23 ev->type = V4L2_EVENT_CTRL;
25 ev->u.ctrl.changes = changes;
26 ev->u.ctrl.type = ctrl->type;
27 ev->u.ctrl.flags = user_flags(ctrl);
29 ev->u.ctrl.value64 = 0;
31 ev->u.ctrl.value64 = *ctrl->p_cur.p_s64;
32 ev->u.ctrl.minimum = ctrl->minimum;
33 ev->u.ctrl.maximum = ctrl->maximum;
34 if (ctrl->type == V4L2_CTRL_TYPE_MENU
35 || ctrl->type == V4L2_CTRL_TYPE_INTEGER_MENU)
38 ev->u.ctrl.step = ctrl->step;
39 ev->u.ctrl.default_value = ctrl->default_value;
42 void send_initial_event(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl)
45 u32 changes = V4L2_EVENT_CTRL_CH_FLAGS;
47 if (!(ctrl->flags & V4L2_CTRL_FLAG_WRITE_ONLY))
48 changes |= V4L2_EVENT_CTRL_CH_VALUE;
49 fill_event(&ev, ctrl, changes);
50 v4l2_event_queue_fh(fh, &ev);
53 void send_event(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl, u32 changes)
56 struct v4l2_subscribed_event *sev;
58 if (list_empty(&ctrl->ev_subs))
60 fill_event(&ev, ctrl, changes);
62 list_for_each_entry(sev, &ctrl->ev_subs, node)
64 (sev->flags & V4L2_EVENT_SUB_FL_ALLOW_FEEDBACK))
65 v4l2_event_queue_fh(sev->fh, &ev);
68 static bool std_equal(const struct v4l2_ctrl *ctrl, u32 idx,
69 union v4l2_ctrl_ptr ptr1,
70 union v4l2_ctrl_ptr ptr2)
73 case V4L2_CTRL_TYPE_BUTTON:
75 case V4L2_CTRL_TYPE_STRING:
76 idx *= ctrl->elem_size;
77 /* strings are always 0-terminated */
78 return !strcmp(ptr1.p_char + idx, ptr2.p_char + idx);
79 case V4L2_CTRL_TYPE_INTEGER64:
80 return ptr1.p_s64[idx] == ptr2.p_s64[idx];
81 case V4L2_CTRL_TYPE_U8:
82 return ptr1.p_u8[idx] == ptr2.p_u8[idx];
83 case V4L2_CTRL_TYPE_U16:
84 return ptr1.p_u16[idx] == ptr2.p_u16[idx];
85 case V4L2_CTRL_TYPE_U32:
86 return ptr1.p_u32[idx] == ptr2.p_u32[idx];
89 return ptr1.p_s32[idx] == ptr2.p_s32[idx];
90 idx *= ctrl->elem_size;
91 return !memcmp(ptr1.p_const + idx, ptr2.p_const + idx,
96 /* Default intra MPEG-2 quantisation coefficients, from the specification. */
97 static const u8 mpeg2_intra_quant_matrix[64] = {
98 8, 16, 16, 19, 16, 19, 22, 22,
99 22, 22, 22, 22, 26, 24, 26, 27,
100 27, 27, 26, 26, 26, 26, 27, 27,
101 27, 29, 29, 29, 34, 34, 34, 29,
102 29, 29, 27, 27, 29, 29, 32, 32,
103 34, 34, 37, 38, 37, 35, 35, 34,
104 35, 38, 38, 40, 40, 40, 48, 48,
105 46, 46, 56, 56, 58, 69, 69, 83
108 static void std_init_compound(const struct v4l2_ctrl *ctrl, u32 idx,
109 union v4l2_ctrl_ptr ptr)
111 struct v4l2_ctrl_mpeg2_sequence *p_mpeg2_sequence;
112 struct v4l2_ctrl_mpeg2_picture *p_mpeg2_picture;
113 struct v4l2_ctrl_mpeg2_quantisation *p_mpeg2_quant;
114 struct v4l2_ctrl_vp8_frame *p_vp8_frame;
115 struct v4l2_ctrl_fwht_params *p_fwht_params;
116 struct v4l2_ctrl_h264_scaling_matrix *p_h264_scaling_matrix;
117 void *p = ptr.p + idx * ctrl->elem_size;
119 if (ctrl->p_def.p_const)
120 memcpy(p, ctrl->p_def.p_const, ctrl->elem_size);
122 memset(p, 0, ctrl->elem_size);
124 switch ((u32)ctrl->type) {
125 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE:
126 p_mpeg2_sequence = p;
129 p_mpeg2_sequence->chroma_format = 1;
131 case V4L2_CTRL_TYPE_MPEG2_PICTURE:
134 /* interlaced top field */
135 p_mpeg2_picture->picture_structure = V4L2_MPEG2_PIC_TOP_FIELD;
136 p_mpeg2_picture->picture_coding_type =
137 V4L2_MPEG2_PIC_CODING_TYPE_I;
139 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION:
142 memcpy(p_mpeg2_quant->intra_quantiser_matrix,
143 mpeg2_intra_quant_matrix,
144 ARRAY_SIZE(mpeg2_intra_quant_matrix));
146 * The default non-intra MPEG-2 quantisation
147 * coefficients are all 16, as per the specification.
149 memset(p_mpeg2_quant->non_intra_quantiser_matrix, 16,
150 sizeof(p_mpeg2_quant->non_intra_quantiser_matrix));
152 case V4L2_CTRL_TYPE_VP8_FRAME:
154 p_vp8_frame->num_dct_parts = 1;
156 case V4L2_CTRL_TYPE_FWHT_PARAMS:
158 p_fwht_params->version = V4L2_FWHT_VERSION;
159 p_fwht_params->width = 1280;
160 p_fwht_params->height = 720;
161 p_fwht_params->flags = V4L2_FWHT_FL_PIXENC_YUV |
162 (2 << V4L2_FWHT_FL_COMPONENTS_NUM_OFFSET);
164 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX:
165 p_h264_scaling_matrix = p;
167 * The default (flat) H.264 scaling matrix when none are
168 * specified in the bitstream, this is according to formulas
169 * (7-8) and (7-9) of the specification.
171 memset(p_h264_scaling_matrix, 16, sizeof(*p_h264_scaling_matrix));
176 static void std_init(const struct v4l2_ctrl *ctrl, u32 idx,
177 union v4l2_ctrl_ptr ptr)
179 switch (ctrl->type) {
180 case V4L2_CTRL_TYPE_STRING:
181 idx *= ctrl->elem_size;
182 memset(ptr.p_char + idx, ' ', ctrl->minimum);
183 ptr.p_char[idx + ctrl->minimum] = '\0';
185 case V4L2_CTRL_TYPE_INTEGER64:
186 ptr.p_s64[idx] = ctrl->default_value;
188 case V4L2_CTRL_TYPE_INTEGER:
189 case V4L2_CTRL_TYPE_INTEGER_MENU:
190 case V4L2_CTRL_TYPE_MENU:
191 case V4L2_CTRL_TYPE_BITMASK:
192 case V4L2_CTRL_TYPE_BOOLEAN:
193 ptr.p_s32[idx] = ctrl->default_value;
195 case V4L2_CTRL_TYPE_BUTTON:
196 case V4L2_CTRL_TYPE_CTRL_CLASS:
199 case V4L2_CTRL_TYPE_U8:
200 ptr.p_u8[idx] = ctrl->default_value;
202 case V4L2_CTRL_TYPE_U16:
203 ptr.p_u16[idx] = ctrl->default_value;
205 case V4L2_CTRL_TYPE_U32:
206 ptr.p_u32[idx] = ctrl->default_value;
209 std_init_compound(ctrl, idx, ptr);
214 static void std_log(const struct v4l2_ctrl *ctrl)
216 union v4l2_ctrl_ptr ptr = ctrl->p_cur;
218 if (ctrl->is_array) {
221 for (i = 0; i < ctrl->nr_of_dims; i++)
222 pr_cont("[%u]", ctrl->dims[i]);
226 switch (ctrl->type) {
227 case V4L2_CTRL_TYPE_INTEGER:
228 pr_cont("%d", *ptr.p_s32);
230 case V4L2_CTRL_TYPE_BOOLEAN:
231 pr_cont("%s", *ptr.p_s32 ? "true" : "false");
233 case V4L2_CTRL_TYPE_MENU:
234 pr_cont("%s", ctrl->qmenu[*ptr.p_s32]);
236 case V4L2_CTRL_TYPE_INTEGER_MENU:
237 pr_cont("%lld", ctrl->qmenu_int[*ptr.p_s32]);
239 case V4L2_CTRL_TYPE_BITMASK:
240 pr_cont("0x%08x", *ptr.p_s32);
242 case V4L2_CTRL_TYPE_INTEGER64:
243 pr_cont("%lld", *ptr.p_s64);
245 case V4L2_CTRL_TYPE_STRING:
246 pr_cont("%s", ptr.p_char);
248 case V4L2_CTRL_TYPE_U8:
249 pr_cont("%u", (unsigned)*ptr.p_u8);
251 case V4L2_CTRL_TYPE_U16:
252 pr_cont("%u", (unsigned)*ptr.p_u16);
254 case V4L2_CTRL_TYPE_U32:
255 pr_cont("%u", (unsigned)*ptr.p_u32);
257 case V4L2_CTRL_TYPE_H264_SPS:
260 case V4L2_CTRL_TYPE_H264_PPS:
263 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX:
264 pr_cont("H264_SCALING_MATRIX");
266 case V4L2_CTRL_TYPE_H264_SLICE_PARAMS:
267 pr_cont("H264_SLICE_PARAMS");
269 case V4L2_CTRL_TYPE_H264_DECODE_PARAMS:
270 pr_cont("H264_DECODE_PARAMS");
272 case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS:
273 pr_cont("H264_PRED_WEIGHTS");
275 case V4L2_CTRL_TYPE_FWHT_PARAMS:
276 pr_cont("FWHT_PARAMS");
278 case V4L2_CTRL_TYPE_VP8_FRAME:
279 pr_cont("VP8_FRAME");
281 case V4L2_CTRL_TYPE_HDR10_CLL_INFO:
282 pr_cont("HDR10_CLL_INFO");
284 case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY:
285 pr_cont("HDR10_MASTERING_DISPLAY");
287 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION:
288 pr_cont("MPEG2_QUANTISATION");
290 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE:
291 pr_cont("MPEG2_SEQUENCE");
293 case V4L2_CTRL_TYPE_MPEG2_PICTURE:
294 pr_cont("MPEG2_PICTURE");
297 pr_cont("unknown type %d", ctrl->type);
303 * Round towards the closest legal value. Be careful when we are
304 * close to the maximum range of the control type to prevent
307 #define ROUND_TO_RANGE(val, offset_type, ctrl) \
309 offset_type offset; \
310 if ((ctrl)->maximum >= 0 && \
311 val >= (ctrl)->maximum - (s32)((ctrl)->step / 2)) \
312 val = (ctrl)->maximum; \
314 val += (s32)((ctrl)->step / 2); \
315 val = clamp_t(typeof(val), val, \
316 (ctrl)->minimum, (ctrl)->maximum); \
317 offset = (val) - (ctrl)->minimum; \
318 offset = (ctrl)->step * (offset / (u32)(ctrl)->step); \
319 val = (ctrl)->minimum + offset; \
323 /* Validate a new control */
325 #define zero_padding(s) \
326 memset(&(s).padding, 0, sizeof((s).padding))
327 #define zero_reserved(s) \
328 memset(&(s).reserved, 0, sizeof((s).reserved))
331 * Compound controls validation requires setting unused fields/flags to zero
332 * in order to properly detect unchanged controls with std_equal's memcmp.
334 static int std_validate_compound(const struct v4l2_ctrl *ctrl, u32 idx,
335 union v4l2_ctrl_ptr ptr)
337 struct v4l2_ctrl_mpeg2_sequence *p_mpeg2_sequence;
338 struct v4l2_ctrl_mpeg2_picture *p_mpeg2_picture;
339 struct v4l2_ctrl_vp8_frame *p_vp8_frame;
340 struct v4l2_ctrl_fwht_params *p_fwht_params;
341 struct v4l2_ctrl_h264_sps *p_h264_sps;
342 struct v4l2_ctrl_h264_pps *p_h264_pps;
343 struct v4l2_ctrl_h264_pred_weights *p_h264_pred_weights;
344 struct v4l2_ctrl_h264_slice_params *p_h264_slice_params;
345 struct v4l2_ctrl_h264_decode_params *p_h264_dec_params;
346 struct v4l2_ctrl_hevc_sps *p_hevc_sps;
347 struct v4l2_ctrl_hevc_pps *p_hevc_pps;
348 struct v4l2_ctrl_hevc_slice_params *p_hevc_slice_params;
349 struct v4l2_ctrl_hdr10_mastering_display *p_hdr10_mastering;
350 struct v4l2_ctrl_hevc_decode_params *p_hevc_decode_params;
351 struct v4l2_area *area;
352 void *p = ptr.p + idx * ctrl->elem_size;
355 switch ((u32)ctrl->type) {
356 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE:
357 p_mpeg2_sequence = p;
359 switch (p_mpeg2_sequence->chroma_format) {
369 case V4L2_CTRL_TYPE_MPEG2_PICTURE:
372 switch (p_mpeg2_picture->intra_dc_precision) {
375 case 2: /* 10 bits */
376 case 3: /* 11 bits */
382 switch (p_mpeg2_picture->picture_structure) {
383 case V4L2_MPEG2_PIC_TOP_FIELD:
384 case V4L2_MPEG2_PIC_BOTTOM_FIELD:
385 case V4L2_MPEG2_PIC_FRAME:
391 switch (p_mpeg2_picture->picture_coding_type) {
392 case V4L2_MPEG2_PIC_CODING_TYPE_I:
393 case V4L2_MPEG2_PIC_CODING_TYPE_P:
394 case V4L2_MPEG2_PIC_CODING_TYPE_B:
399 zero_reserved(*p_mpeg2_picture);
402 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION:
405 case V4L2_CTRL_TYPE_FWHT_PARAMS:
407 if (p_fwht_params->version < V4L2_FWHT_VERSION)
409 if (!p_fwht_params->width || !p_fwht_params->height)
413 case V4L2_CTRL_TYPE_H264_SPS:
416 /* Some syntax elements are only conditionally valid */
417 if (p_h264_sps->pic_order_cnt_type != 0) {
418 p_h264_sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
419 } else if (p_h264_sps->pic_order_cnt_type != 1) {
420 p_h264_sps->num_ref_frames_in_pic_order_cnt_cycle = 0;
421 p_h264_sps->offset_for_non_ref_pic = 0;
422 p_h264_sps->offset_for_top_to_bottom_field = 0;
423 memset(&p_h264_sps->offset_for_ref_frame, 0,
424 sizeof(p_h264_sps->offset_for_ref_frame));
427 if (!V4L2_H264_SPS_HAS_CHROMA_FORMAT(p_h264_sps)) {
428 p_h264_sps->chroma_format_idc = 1;
429 p_h264_sps->bit_depth_luma_minus8 = 0;
430 p_h264_sps->bit_depth_chroma_minus8 = 0;
433 ~V4L2_H264_SPS_FLAG_QPPRIME_Y_ZERO_TRANSFORM_BYPASS;
435 if (p_h264_sps->chroma_format_idc < 3)
437 ~V4L2_H264_SPS_FLAG_SEPARATE_COLOUR_PLANE;
440 if (p_h264_sps->flags & V4L2_H264_SPS_FLAG_FRAME_MBS_ONLY)
442 ~V4L2_H264_SPS_FLAG_MB_ADAPTIVE_FRAME_FIELD;
445 * Chroma 4:2:2 format require at least High 4:2:2 profile.
447 * The H264 specification and well-known parser implementations
448 * use profile-idc values directly, as that is clearer and
449 * less ambiguous. We do the same here.
451 if (p_h264_sps->profile_idc < 122 &&
452 p_h264_sps->chroma_format_idc > 1)
454 /* Chroma 4:4:4 format require at least High 4:2:2 profile */
455 if (p_h264_sps->profile_idc < 244 &&
456 p_h264_sps->chroma_format_idc > 2)
458 if (p_h264_sps->chroma_format_idc > 3)
461 if (p_h264_sps->bit_depth_luma_minus8 > 6)
463 if (p_h264_sps->bit_depth_chroma_minus8 > 6)
465 if (p_h264_sps->log2_max_frame_num_minus4 > 12)
467 if (p_h264_sps->pic_order_cnt_type > 2)
469 if (p_h264_sps->log2_max_pic_order_cnt_lsb_minus4 > 12)
471 if (p_h264_sps->max_num_ref_frames > V4L2_H264_REF_LIST_LEN)
475 case V4L2_CTRL_TYPE_H264_PPS:
478 if (p_h264_pps->num_slice_groups_minus1 > 7)
480 if (p_h264_pps->num_ref_idx_l0_default_active_minus1 >
481 (V4L2_H264_REF_LIST_LEN - 1))
483 if (p_h264_pps->num_ref_idx_l1_default_active_minus1 >
484 (V4L2_H264_REF_LIST_LEN - 1))
486 if (p_h264_pps->weighted_bipred_idc > 2)
489 * pic_init_qp_minus26 shall be in the range of
490 * -(26 + QpBdOffset_y) to +25, inclusive,
491 * where QpBdOffset_y is 6 * bit_depth_luma_minus8
493 if (p_h264_pps->pic_init_qp_minus26 < -62 ||
494 p_h264_pps->pic_init_qp_minus26 > 25)
496 if (p_h264_pps->pic_init_qs_minus26 < -26 ||
497 p_h264_pps->pic_init_qs_minus26 > 25)
499 if (p_h264_pps->chroma_qp_index_offset < -12 ||
500 p_h264_pps->chroma_qp_index_offset > 12)
502 if (p_h264_pps->second_chroma_qp_index_offset < -12 ||
503 p_h264_pps->second_chroma_qp_index_offset > 12)
507 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX:
510 case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS:
511 p_h264_pred_weights = p;
513 if (p_h264_pred_weights->luma_log2_weight_denom > 7)
515 if (p_h264_pred_weights->chroma_log2_weight_denom > 7)
519 case V4L2_CTRL_TYPE_H264_SLICE_PARAMS:
520 p_h264_slice_params = p;
522 if (p_h264_slice_params->slice_type != V4L2_H264_SLICE_TYPE_B)
523 p_h264_slice_params->flags &=
524 ~V4L2_H264_SLICE_FLAG_DIRECT_SPATIAL_MV_PRED;
526 if (p_h264_slice_params->colour_plane_id > 2)
528 if (p_h264_slice_params->cabac_init_idc > 2)
530 if (p_h264_slice_params->disable_deblocking_filter_idc > 2)
532 if (p_h264_slice_params->slice_alpha_c0_offset_div2 < -6 ||
533 p_h264_slice_params->slice_alpha_c0_offset_div2 > 6)
535 if (p_h264_slice_params->slice_beta_offset_div2 < -6 ||
536 p_h264_slice_params->slice_beta_offset_div2 > 6)
539 if (p_h264_slice_params->slice_type == V4L2_H264_SLICE_TYPE_I ||
540 p_h264_slice_params->slice_type == V4L2_H264_SLICE_TYPE_SI)
541 p_h264_slice_params->num_ref_idx_l0_active_minus1 = 0;
542 if (p_h264_slice_params->slice_type != V4L2_H264_SLICE_TYPE_B)
543 p_h264_slice_params->num_ref_idx_l1_active_minus1 = 0;
545 if (p_h264_slice_params->num_ref_idx_l0_active_minus1 >
546 (V4L2_H264_REF_LIST_LEN - 1))
548 if (p_h264_slice_params->num_ref_idx_l1_active_minus1 >
549 (V4L2_H264_REF_LIST_LEN - 1))
551 zero_reserved(*p_h264_slice_params);
554 case V4L2_CTRL_TYPE_H264_DECODE_PARAMS:
555 p_h264_dec_params = p;
557 if (p_h264_dec_params->nal_ref_idc > 3)
559 for (i = 0; i < V4L2_H264_NUM_DPB_ENTRIES; i++) {
560 struct v4l2_h264_dpb_entry *dpb_entry =
561 &p_h264_dec_params->dpb[i];
563 zero_reserved(*dpb_entry);
565 zero_reserved(*p_h264_dec_params);
568 case V4L2_CTRL_TYPE_VP8_FRAME:
571 switch (p_vp8_frame->num_dct_parts) {
580 zero_padding(p_vp8_frame->segment);
581 zero_padding(p_vp8_frame->lf);
582 zero_padding(p_vp8_frame->quant);
583 zero_padding(p_vp8_frame->entropy);
584 zero_padding(p_vp8_frame->coder_state);
587 case V4L2_CTRL_TYPE_HEVC_SPS:
590 if (!(p_hevc_sps->flags & V4L2_HEVC_SPS_FLAG_PCM_ENABLED)) {
591 p_hevc_sps->pcm_sample_bit_depth_luma_minus1 = 0;
592 p_hevc_sps->pcm_sample_bit_depth_chroma_minus1 = 0;
593 p_hevc_sps->log2_min_pcm_luma_coding_block_size_minus3 = 0;
594 p_hevc_sps->log2_diff_max_min_pcm_luma_coding_block_size = 0;
597 if (!(p_hevc_sps->flags &
598 V4L2_HEVC_SPS_FLAG_LONG_TERM_REF_PICS_PRESENT))
599 p_hevc_sps->num_long_term_ref_pics_sps = 0;
602 case V4L2_CTRL_TYPE_HEVC_PPS:
605 if (!(p_hevc_pps->flags &
606 V4L2_HEVC_PPS_FLAG_CU_QP_DELTA_ENABLED))
607 p_hevc_pps->diff_cu_qp_delta_depth = 0;
609 if (!(p_hevc_pps->flags & V4L2_HEVC_PPS_FLAG_TILES_ENABLED)) {
610 p_hevc_pps->num_tile_columns_minus1 = 0;
611 p_hevc_pps->num_tile_rows_minus1 = 0;
612 memset(&p_hevc_pps->column_width_minus1, 0,
613 sizeof(p_hevc_pps->column_width_minus1));
614 memset(&p_hevc_pps->row_height_minus1, 0,
615 sizeof(p_hevc_pps->row_height_minus1));
618 ~V4L2_HEVC_PPS_FLAG_LOOP_FILTER_ACROSS_TILES_ENABLED;
621 if (p_hevc_pps->flags &
622 V4L2_HEVC_PPS_FLAG_PPS_DISABLE_DEBLOCKING_FILTER) {
623 p_hevc_pps->pps_beta_offset_div2 = 0;
624 p_hevc_pps->pps_tc_offset_div2 = 0;
627 zero_padding(*p_hevc_pps);
630 case V4L2_CTRL_TYPE_HEVC_DECODE_PARAMS:
631 p_hevc_decode_params = p;
633 if (p_hevc_decode_params->num_active_dpb_entries >
634 V4L2_HEVC_DPB_ENTRIES_NUM_MAX)
637 for (i = 0; i < p_hevc_decode_params->num_active_dpb_entries;
639 struct v4l2_hevc_dpb_entry *dpb_entry =
640 &p_hevc_decode_params->dpb[i];
642 zero_padding(*dpb_entry);
646 case V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS:
647 p_hevc_slice_params = p;
649 zero_padding(p_hevc_slice_params->pred_weight_table);
650 zero_padding(*p_hevc_slice_params);
653 case V4L2_CTRL_TYPE_HDR10_CLL_INFO:
656 case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY:
657 p_hdr10_mastering = p;
659 for (i = 0; i < 3; ++i) {
660 if (p_hdr10_mastering->display_primaries_x[i] <
661 V4L2_HDR10_MASTERING_PRIMARIES_X_LOW ||
662 p_hdr10_mastering->display_primaries_x[i] >
663 V4L2_HDR10_MASTERING_PRIMARIES_X_HIGH ||
664 p_hdr10_mastering->display_primaries_y[i] <
665 V4L2_HDR10_MASTERING_PRIMARIES_Y_LOW ||
666 p_hdr10_mastering->display_primaries_y[i] >
667 V4L2_HDR10_MASTERING_PRIMARIES_Y_HIGH)
671 if (p_hdr10_mastering->white_point_x <
672 V4L2_HDR10_MASTERING_WHITE_POINT_X_LOW ||
673 p_hdr10_mastering->white_point_x >
674 V4L2_HDR10_MASTERING_WHITE_POINT_X_HIGH ||
675 p_hdr10_mastering->white_point_y <
676 V4L2_HDR10_MASTERING_WHITE_POINT_Y_LOW ||
677 p_hdr10_mastering->white_point_y >
678 V4L2_HDR10_MASTERING_WHITE_POINT_Y_HIGH)
681 if (p_hdr10_mastering->max_display_mastering_luminance <
682 V4L2_HDR10_MASTERING_MAX_LUMA_LOW ||
683 p_hdr10_mastering->max_display_mastering_luminance >
684 V4L2_HDR10_MASTERING_MAX_LUMA_HIGH ||
685 p_hdr10_mastering->min_display_mastering_luminance <
686 V4L2_HDR10_MASTERING_MIN_LUMA_LOW ||
687 p_hdr10_mastering->min_display_mastering_luminance >
688 V4L2_HDR10_MASTERING_MIN_LUMA_HIGH)
691 /* The following restriction comes from ITU-T Rec. H.265 spec */
692 if (p_hdr10_mastering->max_display_mastering_luminance ==
693 V4L2_HDR10_MASTERING_MAX_LUMA_LOW &&
694 p_hdr10_mastering->min_display_mastering_luminance ==
695 V4L2_HDR10_MASTERING_MIN_LUMA_HIGH)
700 case V4L2_CTRL_TYPE_AREA:
702 if (!area->width || !area->height)
713 static int std_validate(const struct v4l2_ctrl *ctrl, u32 idx,
714 union v4l2_ctrl_ptr ptr)
720 switch ((u32)ctrl->type) {
721 case V4L2_CTRL_TYPE_INTEGER:
722 return ROUND_TO_RANGE(ptr.p_s32[idx], u32, ctrl);
723 case V4L2_CTRL_TYPE_INTEGER64:
725 * We can't use the ROUND_TO_RANGE define here due to
726 * the u64 divide that needs special care.
728 val = ptr.p_s64[idx];
729 if (ctrl->maximum >= 0 && val >= ctrl->maximum - (s64)(ctrl->step / 2))
732 val += (s64)(ctrl->step / 2);
733 val = clamp_t(s64, val, ctrl->minimum, ctrl->maximum);
734 offset = val - ctrl->minimum;
735 do_div(offset, ctrl->step);
736 ptr.p_s64[idx] = ctrl->minimum + offset * ctrl->step;
738 case V4L2_CTRL_TYPE_U8:
739 return ROUND_TO_RANGE(ptr.p_u8[idx], u8, ctrl);
740 case V4L2_CTRL_TYPE_U16:
741 return ROUND_TO_RANGE(ptr.p_u16[idx], u16, ctrl);
742 case V4L2_CTRL_TYPE_U32:
743 return ROUND_TO_RANGE(ptr.p_u32[idx], u32, ctrl);
745 case V4L2_CTRL_TYPE_BOOLEAN:
746 ptr.p_s32[idx] = !!ptr.p_s32[idx];
749 case V4L2_CTRL_TYPE_MENU:
750 case V4L2_CTRL_TYPE_INTEGER_MENU:
751 if (ptr.p_s32[idx] < ctrl->minimum || ptr.p_s32[idx] > ctrl->maximum)
753 if (ptr.p_s32[idx] < BITS_PER_LONG_LONG &&
754 (ctrl->menu_skip_mask & BIT_ULL(ptr.p_s32[idx])))
756 if (ctrl->type == V4L2_CTRL_TYPE_MENU &&
757 ctrl->qmenu[ptr.p_s32[idx]][0] == '\0')
761 case V4L2_CTRL_TYPE_BITMASK:
762 ptr.p_s32[idx] &= ctrl->maximum;
765 case V4L2_CTRL_TYPE_BUTTON:
766 case V4L2_CTRL_TYPE_CTRL_CLASS:
770 case V4L2_CTRL_TYPE_STRING:
771 idx *= ctrl->elem_size;
772 len = strlen(ptr.p_char + idx);
773 if (len < ctrl->minimum)
775 if ((len - (u32)ctrl->minimum) % (u32)ctrl->step)
780 return std_validate_compound(ctrl, idx, ptr);
784 static const struct v4l2_ctrl_type_ops std_type_ops = {
788 .validate = std_validate,
791 void v4l2_ctrl_notify(struct v4l2_ctrl *ctrl, v4l2_ctrl_notify_fnc notify, void *priv)
796 ctrl->call_notify = 0;
799 if (WARN_ON(ctrl->handler->notify && ctrl->handler->notify != notify))
801 ctrl->handler->notify = notify;
802 ctrl->handler->notify_priv = priv;
803 ctrl->call_notify = 1;
805 EXPORT_SYMBOL(v4l2_ctrl_notify);
807 /* Copy the one value to another. */
808 static void ptr_to_ptr(struct v4l2_ctrl *ctrl,
809 union v4l2_ctrl_ptr from, union v4l2_ctrl_ptr to)
813 memcpy(to.p, from.p_const, ctrl->elems * ctrl->elem_size);
816 /* Copy the new value to the current value. */
817 void new_to_cur(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl, u32 ch_flags)
824 /* has_changed is set by cluster_changed */
825 changed = ctrl->has_changed;
827 ptr_to_ptr(ctrl, ctrl->p_new, ctrl->p_cur);
829 if (ch_flags & V4L2_EVENT_CTRL_CH_FLAGS) {
830 /* Note: CH_FLAGS is only set for auto clusters. */
832 ~(V4L2_CTRL_FLAG_INACTIVE | V4L2_CTRL_FLAG_VOLATILE);
833 if (!is_cur_manual(ctrl->cluster[0])) {
834 ctrl->flags |= V4L2_CTRL_FLAG_INACTIVE;
835 if (ctrl->cluster[0]->has_volatiles)
836 ctrl->flags |= V4L2_CTRL_FLAG_VOLATILE;
840 if (changed || ch_flags) {
841 /* If a control was changed that was not one of the controls
842 modified by the application, then send the event to all. */
846 (changed ? V4L2_EVENT_CTRL_CH_VALUE : 0) | ch_flags);
847 if (ctrl->call_notify && changed && ctrl->handler->notify)
848 ctrl->handler->notify(ctrl, ctrl->handler->notify_priv);
852 /* Copy the current value to the new value */
853 void cur_to_new(struct v4l2_ctrl *ctrl)
857 ptr_to_ptr(ctrl, ctrl->p_cur, ctrl->p_new);
860 /* Copy the new value to the request value */
861 void new_to_req(struct v4l2_ctrl_ref *ref)
865 ptr_to_ptr(ref->ctrl, ref->ctrl->p_new, ref->p_req);
866 ref->valid_p_req = true;
869 /* Copy the current value to the request value */
870 void cur_to_req(struct v4l2_ctrl_ref *ref)
874 ptr_to_ptr(ref->ctrl, ref->ctrl->p_cur, ref->p_req);
875 ref->valid_p_req = true;
878 /* Copy the request value to the new value */
879 void req_to_new(struct v4l2_ctrl_ref *ref)
883 if (ref->valid_p_req)
884 ptr_to_ptr(ref->ctrl, ref->p_req, ref->ctrl->p_new);
886 ptr_to_ptr(ref->ctrl, ref->ctrl->p_cur, ref->ctrl->p_new);
889 /* Control range checking */
890 int check_range(enum v4l2_ctrl_type type,
891 s64 min, s64 max, u64 step, s64 def)
894 case V4L2_CTRL_TYPE_BOOLEAN:
895 if (step != 1 || max > 1 || min < 0)
898 case V4L2_CTRL_TYPE_U8:
899 case V4L2_CTRL_TYPE_U16:
900 case V4L2_CTRL_TYPE_U32:
901 case V4L2_CTRL_TYPE_INTEGER:
902 case V4L2_CTRL_TYPE_INTEGER64:
903 if (step == 0 || min > max || def < min || def > max)
906 case V4L2_CTRL_TYPE_BITMASK:
907 if (step || min || !max || (def & ~max))
910 case V4L2_CTRL_TYPE_MENU:
911 case V4L2_CTRL_TYPE_INTEGER_MENU:
912 if (min > max || def < min || def > max)
914 /* Note: step == menu_skip_mask for menu controls.
915 So here we check if the default value is masked out. */
916 if (step && ((1 << def) & step))
919 case V4L2_CTRL_TYPE_STRING:
920 if (min > max || min < 0 || step < 1 || def)
928 /* Validate a new control */
929 int validate_new(const struct v4l2_ctrl *ctrl, union v4l2_ctrl_ptr p_new)
934 for (idx = 0; !err && idx < ctrl->elems; idx++)
935 err = ctrl->type_ops->validate(ctrl, idx, p_new);
939 /* Set the handler's error code if it wasn't set earlier already */
940 static inline int handler_set_err(struct v4l2_ctrl_handler *hdl, int err)
947 /* Initialize the handler */
948 int v4l2_ctrl_handler_init_class(struct v4l2_ctrl_handler *hdl,
949 unsigned nr_of_controls_hint,
950 struct lock_class_key *key, const char *name)
952 mutex_init(&hdl->_lock);
953 hdl->lock = &hdl->_lock;
954 lockdep_set_class_and_name(hdl->lock, key, name);
955 INIT_LIST_HEAD(&hdl->ctrls);
956 INIT_LIST_HEAD(&hdl->ctrl_refs);
957 hdl->nr_of_buckets = 1 + nr_of_controls_hint / 8;
958 hdl->buckets = kvmalloc_array(hdl->nr_of_buckets,
959 sizeof(hdl->buckets[0]),
960 GFP_KERNEL | __GFP_ZERO);
961 hdl->error = hdl->buckets ? 0 : -ENOMEM;
962 v4l2_ctrl_handler_init_request(hdl);
965 EXPORT_SYMBOL(v4l2_ctrl_handler_init_class);
967 /* Free all controls and control refs */
968 void v4l2_ctrl_handler_free(struct v4l2_ctrl_handler *hdl)
970 struct v4l2_ctrl_ref *ref, *next_ref;
971 struct v4l2_ctrl *ctrl, *next_ctrl;
972 struct v4l2_subscribed_event *sev, *next_sev;
974 if (hdl == NULL || hdl->buckets == NULL)
977 v4l2_ctrl_handler_free_request(hdl);
979 mutex_lock(hdl->lock);
981 list_for_each_entry_safe(ref, next_ref, &hdl->ctrl_refs, node) {
982 list_del(&ref->node);
985 /* Free all controls owned by the handler */
986 list_for_each_entry_safe(ctrl, next_ctrl, &hdl->ctrls, node) {
987 list_del(&ctrl->node);
988 list_for_each_entry_safe(sev, next_sev, &ctrl->ev_subs, node)
989 list_del(&sev->node);
992 kvfree(hdl->buckets);
996 mutex_unlock(hdl->lock);
997 mutex_destroy(&hdl->_lock);
999 EXPORT_SYMBOL(v4l2_ctrl_handler_free);
1001 /* For backwards compatibility: V4L2_CID_PRIVATE_BASE should no longer
1002 be used except in G_CTRL, S_CTRL, QUERYCTRL and QUERYMENU when dealing
1003 with applications that do not use the NEXT_CTRL flag.
1005 We just find the n-th private user control. It's O(N), but that should not
1006 be an issue in this particular case. */
1007 static struct v4l2_ctrl_ref *find_private_ref(
1008 struct v4l2_ctrl_handler *hdl, u32 id)
1010 struct v4l2_ctrl_ref *ref;
1012 id -= V4L2_CID_PRIVATE_BASE;
1013 list_for_each_entry(ref, &hdl->ctrl_refs, node) {
1014 /* Search for private user controls that are compatible with
1016 if (V4L2_CTRL_ID2WHICH(ref->ctrl->id) == V4L2_CTRL_CLASS_USER &&
1017 V4L2_CTRL_DRIVER_PRIV(ref->ctrl->id)) {
1018 if (!ref->ctrl->is_int)
1028 /* Find a control with the given ID. */
1029 struct v4l2_ctrl_ref *find_ref(struct v4l2_ctrl_handler *hdl, u32 id)
1031 struct v4l2_ctrl_ref *ref;
1034 id &= V4L2_CTRL_ID_MASK;
1036 /* Old-style private controls need special handling */
1037 if (id >= V4L2_CID_PRIVATE_BASE)
1038 return find_private_ref(hdl, id);
1039 bucket = id % hdl->nr_of_buckets;
1041 /* Simple optimization: cache the last control found */
1042 if (hdl->cached && hdl->cached->ctrl->id == id)
1045 /* Not in cache, search the hash */
1046 ref = hdl->buckets ? hdl->buckets[bucket] : NULL;
1047 while (ref && ref->ctrl->id != id)
1051 hdl->cached = ref; /* cache it! */
1055 /* Find a control with the given ID. Take the handler's lock first. */
1056 struct v4l2_ctrl_ref *find_ref_lock(struct v4l2_ctrl_handler *hdl, u32 id)
1058 struct v4l2_ctrl_ref *ref = NULL;
1061 mutex_lock(hdl->lock);
1062 ref = find_ref(hdl, id);
1063 mutex_unlock(hdl->lock);
1068 /* Find a control with the given ID. */
1069 struct v4l2_ctrl *v4l2_ctrl_find(struct v4l2_ctrl_handler *hdl, u32 id)
1071 struct v4l2_ctrl_ref *ref = find_ref_lock(hdl, id);
1073 return ref ? ref->ctrl : NULL;
1075 EXPORT_SYMBOL(v4l2_ctrl_find);
1077 /* Allocate a new v4l2_ctrl_ref and hook it into the handler. */
1078 int handler_new_ref(struct v4l2_ctrl_handler *hdl,
1079 struct v4l2_ctrl *ctrl,
1080 struct v4l2_ctrl_ref **ctrl_ref,
1081 bool from_other_dev, bool allocate_req)
1083 struct v4l2_ctrl_ref *ref;
1084 struct v4l2_ctrl_ref *new_ref;
1086 u32 class_ctrl = V4L2_CTRL_ID2WHICH(id) | 1;
1087 int bucket = id % hdl->nr_of_buckets; /* which bucket to use */
1088 unsigned int size_extra_req = 0;
1094 * Automatically add the control class if it is not yet present and
1095 * the new control is not a compound control.
1097 if (ctrl->type < V4L2_CTRL_COMPOUND_TYPES &&
1098 id != class_ctrl && find_ref_lock(hdl, class_ctrl) == NULL)
1099 if (!v4l2_ctrl_new_std(hdl, NULL, class_ctrl, 0, 0, 0, 0))
1106 size_extra_req = ctrl->elems * ctrl->elem_size;
1107 new_ref = kzalloc(sizeof(*new_ref) + size_extra_req, GFP_KERNEL);
1109 return handler_set_err(hdl, -ENOMEM);
1110 new_ref->ctrl = ctrl;
1111 new_ref->from_other_dev = from_other_dev;
1113 new_ref->p_req.p = &new_ref[1];
1115 INIT_LIST_HEAD(&new_ref->node);
1117 mutex_lock(hdl->lock);
1119 /* Add immediately at the end of the list if the list is empty, or if
1120 the last element in the list has a lower ID.
1121 This ensures that when elements are added in ascending order the
1122 insertion is an O(1) operation. */
1123 if (list_empty(&hdl->ctrl_refs) || id > node2id(hdl->ctrl_refs.prev)) {
1124 list_add_tail(&new_ref->node, &hdl->ctrl_refs);
1125 goto insert_in_hash;
1128 /* Find insert position in sorted list */
1129 list_for_each_entry(ref, &hdl->ctrl_refs, node) {
1130 if (ref->ctrl->id < id)
1132 /* Don't add duplicates */
1133 if (ref->ctrl->id == id) {
1137 list_add(&new_ref->node, ref->node.prev);
1142 /* Insert the control node in the hash */
1143 new_ref->next = hdl->buckets[bucket];
1144 hdl->buckets[bucket] = new_ref;
1146 *ctrl_ref = new_ref;
1147 if (ctrl->handler == hdl) {
1148 /* By default each control starts in a cluster of its own.
1149 * new_ref->ctrl is basically a cluster array with one
1150 * element, so that's perfect to use as the cluster pointer.
1151 * But only do this for the handler that owns the control.
1153 ctrl->cluster = &new_ref->ctrl;
1154 ctrl->ncontrols = 1;
1158 mutex_unlock(hdl->lock);
1162 /* Add a new control */
1163 static struct v4l2_ctrl *v4l2_ctrl_new(struct v4l2_ctrl_handler *hdl,
1164 const struct v4l2_ctrl_ops *ops,
1165 const struct v4l2_ctrl_type_ops *type_ops,
1166 u32 id, const char *name, enum v4l2_ctrl_type type,
1167 s64 min, s64 max, u64 step, s64 def,
1168 const u32 dims[V4L2_CTRL_MAX_DIMS], u32 elem_size,
1169 u32 flags, const char * const *qmenu,
1170 const s64 *qmenu_int, const union v4l2_ctrl_ptr p_def,
1173 struct v4l2_ctrl *ctrl;
1175 unsigned nr_of_dims = 0;
1178 unsigned tot_ctrl_size;
1186 while (dims && dims[nr_of_dims]) {
1187 elems *= dims[nr_of_dims];
1189 if (nr_of_dims == V4L2_CTRL_MAX_DIMS)
1192 is_array = nr_of_dims > 0;
1194 /* Prefill elem_size for all types handled by std_type_ops */
1195 switch ((u32)type) {
1196 case V4L2_CTRL_TYPE_INTEGER64:
1197 elem_size = sizeof(s64);
1199 case V4L2_CTRL_TYPE_STRING:
1200 elem_size = max + 1;
1202 case V4L2_CTRL_TYPE_U8:
1203 elem_size = sizeof(u8);
1205 case V4L2_CTRL_TYPE_U16:
1206 elem_size = sizeof(u16);
1208 case V4L2_CTRL_TYPE_U32:
1209 elem_size = sizeof(u32);
1211 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE:
1212 elem_size = sizeof(struct v4l2_ctrl_mpeg2_sequence);
1214 case V4L2_CTRL_TYPE_MPEG2_PICTURE:
1215 elem_size = sizeof(struct v4l2_ctrl_mpeg2_picture);
1217 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION:
1218 elem_size = sizeof(struct v4l2_ctrl_mpeg2_quantisation);
1220 case V4L2_CTRL_TYPE_FWHT_PARAMS:
1221 elem_size = sizeof(struct v4l2_ctrl_fwht_params);
1223 case V4L2_CTRL_TYPE_H264_SPS:
1224 elem_size = sizeof(struct v4l2_ctrl_h264_sps);
1226 case V4L2_CTRL_TYPE_H264_PPS:
1227 elem_size = sizeof(struct v4l2_ctrl_h264_pps);
1229 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX:
1230 elem_size = sizeof(struct v4l2_ctrl_h264_scaling_matrix);
1232 case V4L2_CTRL_TYPE_H264_SLICE_PARAMS:
1233 elem_size = sizeof(struct v4l2_ctrl_h264_slice_params);
1235 case V4L2_CTRL_TYPE_H264_DECODE_PARAMS:
1236 elem_size = sizeof(struct v4l2_ctrl_h264_decode_params);
1238 case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS:
1239 elem_size = sizeof(struct v4l2_ctrl_h264_pred_weights);
1241 case V4L2_CTRL_TYPE_VP8_FRAME:
1242 elem_size = sizeof(struct v4l2_ctrl_vp8_frame);
1244 case V4L2_CTRL_TYPE_HEVC_SPS:
1245 elem_size = sizeof(struct v4l2_ctrl_hevc_sps);
1247 case V4L2_CTRL_TYPE_HEVC_PPS:
1248 elem_size = sizeof(struct v4l2_ctrl_hevc_pps);
1250 case V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS:
1251 elem_size = sizeof(struct v4l2_ctrl_hevc_slice_params);
1253 case V4L2_CTRL_TYPE_HEVC_DECODE_PARAMS:
1254 elem_size = sizeof(struct v4l2_ctrl_hevc_decode_params);
1256 case V4L2_CTRL_TYPE_HDR10_CLL_INFO:
1257 elem_size = sizeof(struct v4l2_ctrl_hdr10_cll_info);
1259 case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY:
1260 elem_size = sizeof(struct v4l2_ctrl_hdr10_mastering_display);
1262 case V4L2_CTRL_TYPE_AREA:
1263 elem_size = sizeof(struct v4l2_area);
1266 if (type < V4L2_CTRL_COMPOUND_TYPES)
1267 elem_size = sizeof(s32);
1270 tot_ctrl_size = elem_size * elems;
1273 if (id == 0 || name == NULL || !elem_size ||
1274 id >= V4L2_CID_PRIVATE_BASE ||
1275 (type == V4L2_CTRL_TYPE_MENU && qmenu == NULL) ||
1276 (type == V4L2_CTRL_TYPE_INTEGER_MENU && qmenu_int == NULL)) {
1277 handler_set_err(hdl, -ERANGE);
1280 err = check_range(type, min, max, step, def);
1282 handler_set_err(hdl, err);
1286 (type == V4L2_CTRL_TYPE_BUTTON ||
1287 type == V4L2_CTRL_TYPE_CTRL_CLASS)) {
1288 handler_set_err(hdl, -EINVAL);
1293 if (type == V4L2_CTRL_TYPE_BUTTON)
1294 flags |= V4L2_CTRL_FLAG_WRITE_ONLY |
1295 V4L2_CTRL_FLAG_EXECUTE_ON_WRITE;
1296 else if (type == V4L2_CTRL_TYPE_CTRL_CLASS)
1297 flags |= V4L2_CTRL_FLAG_READ_ONLY;
1298 else if (type == V4L2_CTRL_TYPE_INTEGER64 ||
1299 type == V4L2_CTRL_TYPE_STRING ||
1300 type >= V4L2_CTRL_COMPOUND_TYPES ||
1302 sz_extra += 2 * tot_ctrl_size;
1304 if (type >= V4L2_CTRL_COMPOUND_TYPES && p_def.p_const)
1305 sz_extra += elem_size;
1307 ctrl = kvzalloc(sizeof(*ctrl) + sz_extra, GFP_KERNEL);
1309 handler_set_err(hdl, -ENOMEM);
1313 INIT_LIST_HEAD(&ctrl->node);
1314 INIT_LIST_HEAD(&ctrl->ev_subs);
1315 ctrl->handler = hdl;
1317 ctrl->type_ops = type_ops ? type_ops : &std_type_ops;
1321 ctrl->flags = flags;
1322 ctrl->minimum = min;
1323 ctrl->maximum = max;
1325 ctrl->default_value = def;
1326 ctrl->is_string = !is_array && type == V4L2_CTRL_TYPE_STRING;
1327 ctrl->is_ptr = is_array || type >= V4L2_CTRL_COMPOUND_TYPES || ctrl->is_string;
1328 ctrl->is_int = !ctrl->is_ptr && type != V4L2_CTRL_TYPE_INTEGER64;
1329 ctrl->is_array = is_array;
1330 ctrl->elems = elems;
1331 ctrl->nr_of_dims = nr_of_dims;
1333 memcpy(ctrl->dims, dims, nr_of_dims * sizeof(dims[0]));
1334 ctrl->elem_size = elem_size;
1335 if (type == V4L2_CTRL_TYPE_MENU)
1336 ctrl->qmenu = qmenu;
1337 else if (type == V4L2_CTRL_TYPE_INTEGER_MENU)
1338 ctrl->qmenu_int = qmenu_int;
1340 ctrl->cur.val = ctrl->val = def;
1343 if (!ctrl->is_int) {
1344 ctrl->p_new.p = data;
1345 ctrl->p_cur.p = data + tot_ctrl_size;
1347 ctrl->p_new.p = &ctrl->val;
1348 ctrl->p_cur.p = &ctrl->cur.val;
1351 if (type >= V4L2_CTRL_COMPOUND_TYPES && p_def.p_const) {
1352 ctrl->p_def.p = ctrl->p_cur.p + tot_ctrl_size;
1353 memcpy(ctrl->p_def.p, p_def.p_const, elem_size);
1356 for (idx = 0; idx < elems; idx++) {
1357 ctrl->type_ops->init(ctrl, idx, ctrl->p_cur);
1358 ctrl->type_ops->init(ctrl, idx, ctrl->p_new);
1361 if (handler_new_ref(hdl, ctrl, NULL, false, false)) {
1365 mutex_lock(hdl->lock);
1366 list_add_tail(&ctrl->node, &hdl->ctrls);
1367 mutex_unlock(hdl->lock);
1371 struct v4l2_ctrl *v4l2_ctrl_new_custom(struct v4l2_ctrl_handler *hdl,
1372 const struct v4l2_ctrl_config *cfg, void *priv)
1375 struct v4l2_ctrl *ctrl;
1376 const char *name = cfg->name;
1377 const char * const *qmenu = cfg->qmenu;
1378 const s64 *qmenu_int = cfg->qmenu_int;
1379 enum v4l2_ctrl_type type = cfg->type;
1380 u32 flags = cfg->flags;
1383 u64 step = cfg->step;
1387 v4l2_ctrl_fill(cfg->id, &name, &type, &min, &max, &step,
1390 is_menu = (type == V4L2_CTRL_TYPE_MENU ||
1391 type == V4L2_CTRL_TYPE_INTEGER_MENU);
1395 WARN_ON(cfg->menu_skip_mask);
1396 if (type == V4L2_CTRL_TYPE_MENU && !qmenu) {
1397 qmenu = v4l2_ctrl_get_menu(cfg->id);
1398 } else if (type == V4L2_CTRL_TYPE_INTEGER_MENU && !qmenu_int) {
1399 handler_set_err(hdl, -EINVAL);
1403 ctrl = v4l2_ctrl_new(hdl, cfg->ops, cfg->type_ops, cfg->id, name,
1405 is_menu ? cfg->menu_skip_mask : step, def,
1406 cfg->dims, cfg->elem_size,
1407 flags, qmenu, qmenu_int, cfg->p_def, priv);
1409 ctrl->is_private = cfg->is_private;
1412 EXPORT_SYMBOL(v4l2_ctrl_new_custom);
1414 /* Helper function for standard non-menu controls */
1415 struct v4l2_ctrl *v4l2_ctrl_new_std(struct v4l2_ctrl_handler *hdl,
1416 const struct v4l2_ctrl_ops *ops,
1417 u32 id, s64 min, s64 max, u64 step, s64 def)
1420 enum v4l2_ctrl_type type;
1423 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags);
1424 if (type == V4L2_CTRL_TYPE_MENU ||
1425 type == V4L2_CTRL_TYPE_INTEGER_MENU ||
1426 type >= V4L2_CTRL_COMPOUND_TYPES) {
1427 handler_set_err(hdl, -EINVAL);
1430 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type,
1431 min, max, step, def, NULL, 0,
1432 flags, NULL, NULL, ptr_null, NULL);
1434 EXPORT_SYMBOL(v4l2_ctrl_new_std);
1436 /* Helper function for standard menu controls */
1437 struct v4l2_ctrl *v4l2_ctrl_new_std_menu(struct v4l2_ctrl_handler *hdl,
1438 const struct v4l2_ctrl_ops *ops,
1439 u32 id, u8 _max, u64 mask, u8 _def)
1441 const char * const *qmenu = NULL;
1442 const s64 *qmenu_int = NULL;
1443 unsigned int qmenu_int_len = 0;
1445 enum v4l2_ctrl_type type;
1452 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags);
1454 if (type == V4L2_CTRL_TYPE_MENU)
1455 qmenu = v4l2_ctrl_get_menu(id);
1456 else if (type == V4L2_CTRL_TYPE_INTEGER_MENU)
1457 qmenu_int = v4l2_ctrl_get_int_menu(id, &qmenu_int_len);
1459 if ((!qmenu && !qmenu_int) || (qmenu_int && max > qmenu_int_len)) {
1460 handler_set_err(hdl, -EINVAL);
1463 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type,
1464 0, max, mask, def, NULL, 0,
1465 flags, qmenu, qmenu_int, ptr_null, NULL);
1467 EXPORT_SYMBOL(v4l2_ctrl_new_std_menu);
1469 /* Helper function for standard menu controls with driver defined menu */
1470 struct v4l2_ctrl *v4l2_ctrl_new_std_menu_items(struct v4l2_ctrl_handler *hdl,
1471 const struct v4l2_ctrl_ops *ops, u32 id, u8 _max,
1472 u64 mask, u8 _def, const char * const *qmenu)
1474 enum v4l2_ctrl_type type;
1482 /* v4l2_ctrl_new_std_menu_items() should only be called for
1483 * standard controls without a standard menu.
1485 if (v4l2_ctrl_get_menu(id)) {
1486 handler_set_err(hdl, -EINVAL);
1490 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags);
1491 if (type != V4L2_CTRL_TYPE_MENU || qmenu == NULL) {
1492 handler_set_err(hdl, -EINVAL);
1495 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type,
1496 0, max, mask, def, NULL, 0,
1497 flags, qmenu, NULL, ptr_null, NULL);
1500 EXPORT_SYMBOL(v4l2_ctrl_new_std_menu_items);
1502 /* Helper function for standard compound controls */
1503 struct v4l2_ctrl *v4l2_ctrl_new_std_compound(struct v4l2_ctrl_handler *hdl,
1504 const struct v4l2_ctrl_ops *ops, u32 id,
1505 const union v4l2_ctrl_ptr p_def)
1508 enum v4l2_ctrl_type type;
1510 s64 min, max, step, def;
1512 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags);
1513 if (type < V4L2_CTRL_COMPOUND_TYPES) {
1514 handler_set_err(hdl, -EINVAL);
1517 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type,
1518 min, max, step, def, NULL, 0,
1519 flags, NULL, NULL, p_def, NULL);
1521 EXPORT_SYMBOL(v4l2_ctrl_new_std_compound);
1523 /* Helper function for standard integer menu controls */
1524 struct v4l2_ctrl *v4l2_ctrl_new_int_menu(struct v4l2_ctrl_handler *hdl,
1525 const struct v4l2_ctrl_ops *ops,
1526 u32 id, u8 _max, u8 _def, const s64 *qmenu_int)
1529 enum v4l2_ctrl_type type;
1536 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags);
1537 if (type != V4L2_CTRL_TYPE_INTEGER_MENU) {
1538 handler_set_err(hdl, -EINVAL);
1541 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type,
1542 0, max, 0, def, NULL, 0,
1543 flags, NULL, qmenu_int, ptr_null, NULL);
1545 EXPORT_SYMBOL(v4l2_ctrl_new_int_menu);
1547 /* Add the controls from another handler to our own. */
1548 int v4l2_ctrl_add_handler(struct v4l2_ctrl_handler *hdl,
1549 struct v4l2_ctrl_handler *add,
1550 bool (*filter)(const struct v4l2_ctrl *ctrl),
1551 bool from_other_dev)
1553 struct v4l2_ctrl_ref *ref;
1556 /* Do nothing if either handler is NULL or if they are the same */
1557 if (!hdl || !add || hdl == add)
1561 mutex_lock(add->lock);
1562 list_for_each_entry(ref, &add->ctrl_refs, node) {
1563 struct v4l2_ctrl *ctrl = ref->ctrl;
1565 /* Skip handler-private controls. */
1566 if (ctrl->is_private)
1568 /* And control classes */
1569 if (ctrl->type == V4L2_CTRL_TYPE_CTRL_CLASS)
1571 /* Filter any unwanted controls */
1572 if (filter && !filter(ctrl))
1574 ret = handler_new_ref(hdl, ctrl, NULL, from_other_dev, false);
1578 mutex_unlock(add->lock);
1581 EXPORT_SYMBOL(v4l2_ctrl_add_handler);
1583 bool v4l2_ctrl_radio_filter(const struct v4l2_ctrl *ctrl)
1585 if (V4L2_CTRL_ID2WHICH(ctrl->id) == V4L2_CTRL_CLASS_FM_TX)
1587 if (V4L2_CTRL_ID2WHICH(ctrl->id) == V4L2_CTRL_CLASS_FM_RX)
1590 case V4L2_CID_AUDIO_MUTE:
1591 case V4L2_CID_AUDIO_VOLUME:
1592 case V4L2_CID_AUDIO_BALANCE:
1593 case V4L2_CID_AUDIO_BASS:
1594 case V4L2_CID_AUDIO_TREBLE:
1595 case V4L2_CID_AUDIO_LOUDNESS:
1602 EXPORT_SYMBOL(v4l2_ctrl_radio_filter);
1604 /* Cluster controls */
1605 void v4l2_ctrl_cluster(unsigned ncontrols, struct v4l2_ctrl **controls)
1607 bool has_volatiles = false;
1610 /* The first control is the master control and it must not be NULL */
1611 if (WARN_ON(ncontrols == 0 || controls[0] == NULL))
1614 for (i = 0; i < ncontrols; i++) {
1616 controls[i]->cluster = controls;
1617 controls[i]->ncontrols = ncontrols;
1618 if (controls[i]->flags & V4L2_CTRL_FLAG_VOLATILE)
1619 has_volatiles = true;
1622 controls[0]->has_volatiles = has_volatiles;
1624 EXPORT_SYMBOL(v4l2_ctrl_cluster);
1626 void v4l2_ctrl_auto_cluster(unsigned ncontrols, struct v4l2_ctrl **controls,
1627 u8 manual_val, bool set_volatile)
1629 struct v4l2_ctrl *master = controls[0];
1633 v4l2_ctrl_cluster(ncontrols, controls);
1634 WARN_ON(ncontrols <= 1);
1635 WARN_ON(manual_val < master->minimum || manual_val > master->maximum);
1636 WARN_ON(set_volatile && !has_op(master, g_volatile_ctrl));
1637 master->is_auto = true;
1638 master->has_volatiles = set_volatile;
1639 master->manual_mode_value = manual_val;
1640 master->flags |= V4L2_CTRL_FLAG_UPDATE;
1642 if (!is_cur_manual(master))
1643 flag = V4L2_CTRL_FLAG_INACTIVE |
1644 (set_volatile ? V4L2_CTRL_FLAG_VOLATILE : 0);
1646 for (i = 1; i < ncontrols; i++)
1648 controls[i]->flags |= flag;
1650 EXPORT_SYMBOL(v4l2_ctrl_auto_cluster);
1653 * Obtain the current volatile values of an autocluster and mark them
1656 void update_from_auto_cluster(struct v4l2_ctrl *master)
1660 for (i = 1; i < master->ncontrols; i++)
1661 cur_to_new(master->cluster[i]);
1662 if (!call_op(master, g_volatile_ctrl))
1663 for (i = 1; i < master->ncontrols; i++)
1664 if (master->cluster[i])
1665 master->cluster[i]->is_new = 1;
1669 * Return non-zero if one or more of the controls in the cluster has a new
1670 * value that differs from the current value.
1672 static int cluster_changed(struct v4l2_ctrl *master)
1674 bool changed = false;
1678 for (i = 0; i < master->ncontrols; i++) {
1679 struct v4l2_ctrl *ctrl = master->cluster[i];
1680 bool ctrl_changed = false;
1685 if (ctrl->flags & V4L2_CTRL_FLAG_EXECUTE_ON_WRITE) {
1687 ctrl_changed = true;
1691 * Set has_changed to false to avoid generating
1692 * the event V4L2_EVENT_CTRL_CH_VALUE
1694 if (ctrl->flags & V4L2_CTRL_FLAG_VOLATILE) {
1695 ctrl->has_changed = false;
1699 for (idx = 0; !ctrl_changed && idx < ctrl->elems; idx++)
1700 ctrl_changed = !ctrl->type_ops->equal(ctrl, idx,
1701 ctrl->p_cur, ctrl->p_new);
1702 ctrl->has_changed = ctrl_changed;
1703 changed |= ctrl->has_changed;
1709 * Core function that calls try/s_ctrl and ensures that the new value is
1710 * copied to the current value on a set.
1711 * Must be called with ctrl->handler->lock held.
1713 int try_or_set_cluster(struct v4l2_fh *fh, struct v4l2_ctrl *master,
1714 bool set, u32 ch_flags)
1721 * Go through the cluster and either validate the new value or
1722 * (if no new value was set), copy the current value to the new
1723 * value, ensuring a consistent view for the control ops when
1726 for (i = 0; i < master->ncontrols; i++) {
1727 struct v4l2_ctrl *ctrl = master->cluster[i];
1732 if (!ctrl->is_new) {
1737 * Check again: it may have changed since the
1738 * previous check in try_or_set_ext_ctrls().
1740 if (set && (ctrl->flags & V4L2_CTRL_FLAG_GRABBED))
1744 ret = call_op(master, try_ctrl);
1746 /* Don't set if there is no change */
1747 if (ret || !set || !cluster_changed(master))
1749 ret = call_op(master, s_ctrl);
1753 /* If OK, then make the new values permanent. */
1754 update_flag = is_cur_manual(master) != is_new_manual(master);
1756 for (i = 0; i < master->ncontrols; i++) {
1758 * If we switch from auto to manual mode, and this cluster
1759 * contains volatile controls, then all non-master controls
1760 * have to be marked as changed. The 'new' value contains
1761 * the volatile value (obtained by update_from_auto_cluster),
1762 * which now has to become the current value.
1764 if (i && update_flag && is_new_manual(master) &&
1765 master->has_volatiles && master->cluster[i])
1766 master->cluster[i]->has_changed = true;
1768 new_to_cur(fh, master->cluster[i], ch_flags |
1769 ((update_flag && i > 0) ? V4L2_EVENT_CTRL_CH_FLAGS : 0));
1774 /* Activate/deactivate a control. */
1775 void v4l2_ctrl_activate(struct v4l2_ctrl *ctrl, bool active)
1777 /* invert since the actual flag is called 'inactive' */
1778 bool inactive = !active;
1785 /* set V4L2_CTRL_FLAG_INACTIVE */
1786 old = test_and_set_bit(4, &ctrl->flags);
1788 /* clear V4L2_CTRL_FLAG_INACTIVE */
1789 old = test_and_clear_bit(4, &ctrl->flags);
1790 if (old != inactive)
1791 send_event(NULL, ctrl, V4L2_EVENT_CTRL_CH_FLAGS);
1793 EXPORT_SYMBOL(v4l2_ctrl_activate);
1795 void __v4l2_ctrl_grab(struct v4l2_ctrl *ctrl, bool grabbed)
1802 lockdep_assert_held(ctrl->handler->lock);
1805 /* set V4L2_CTRL_FLAG_GRABBED */
1806 old = test_and_set_bit(1, &ctrl->flags);
1808 /* clear V4L2_CTRL_FLAG_GRABBED */
1809 old = test_and_clear_bit(1, &ctrl->flags);
1811 send_event(NULL, ctrl, V4L2_EVENT_CTRL_CH_FLAGS);
1813 EXPORT_SYMBOL(__v4l2_ctrl_grab);
1815 /* Call s_ctrl for all controls owned by the handler */
1816 int __v4l2_ctrl_handler_setup(struct v4l2_ctrl_handler *hdl)
1818 struct v4l2_ctrl *ctrl;
1824 lockdep_assert_held(hdl->lock);
1826 list_for_each_entry(ctrl, &hdl->ctrls, node)
1829 list_for_each_entry(ctrl, &hdl->ctrls, node) {
1830 struct v4l2_ctrl *master = ctrl->cluster[0];
1833 /* Skip if this control was already handled by a cluster. */
1834 /* Skip button controls and read-only controls. */
1835 if (ctrl->done || ctrl->type == V4L2_CTRL_TYPE_BUTTON ||
1836 (ctrl->flags & V4L2_CTRL_FLAG_READ_ONLY))
1839 for (i = 0; i < master->ncontrols; i++) {
1840 if (master->cluster[i]) {
1841 cur_to_new(master->cluster[i]);
1842 master->cluster[i]->is_new = 1;
1843 master->cluster[i]->done = true;
1846 ret = call_op(master, s_ctrl);
1853 EXPORT_SYMBOL_GPL(__v4l2_ctrl_handler_setup);
1855 int v4l2_ctrl_handler_setup(struct v4l2_ctrl_handler *hdl)
1862 mutex_lock(hdl->lock);
1863 ret = __v4l2_ctrl_handler_setup(hdl);
1864 mutex_unlock(hdl->lock);
1868 EXPORT_SYMBOL(v4l2_ctrl_handler_setup);
1870 /* Log the control name and value */
1871 static void log_ctrl(const struct v4l2_ctrl *ctrl,
1872 const char *prefix, const char *colon)
1874 if (ctrl->flags & (V4L2_CTRL_FLAG_DISABLED | V4L2_CTRL_FLAG_WRITE_ONLY))
1876 if (ctrl->type == V4L2_CTRL_TYPE_CTRL_CLASS)
1879 pr_info("%s%s%s: ", prefix, colon, ctrl->name);
1881 ctrl->type_ops->log(ctrl);
1883 if (ctrl->flags & (V4L2_CTRL_FLAG_INACTIVE |
1884 V4L2_CTRL_FLAG_GRABBED |
1885 V4L2_CTRL_FLAG_VOLATILE)) {
1886 if (ctrl->flags & V4L2_CTRL_FLAG_INACTIVE)
1887 pr_cont(" inactive");
1888 if (ctrl->flags & V4L2_CTRL_FLAG_GRABBED)
1889 pr_cont(" grabbed");
1890 if (ctrl->flags & V4L2_CTRL_FLAG_VOLATILE)
1891 pr_cont(" volatile");
1896 /* Log all controls owned by the handler */
1897 void v4l2_ctrl_handler_log_status(struct v4l2_ctrl_handler *hdl,
1900 struct v4l2_ctrl *ctrl;
1901 const char *colon = "";
1908 len = strlen(prefix);
1909 if (len && prefix[len - 1] != ' ')
1911 mutex_lock(hdl->lock);
1912 list_for_each_entry(ctrl, &hdl->ctrls, node)
1913 if (!(ctrl->flags & V4L2_CTRL_FLAG_DISABLED))
1914 log_ctrl(ctrl, prefix, colon);
1915 mutex_unlock(hdl->lock);
1917 EXPORT_SYMBOL(v4l2_ctrl_handler_log_status);
1919 int v4l2_ctrl_new_fwnode_properties(struct v4l2_ctrl_handler *hdl,
1920 const struct v4l2_ctrl_ops *ctrl_ops,
1921 const struct v4l2_fwnode_device_properties *p)
1923 if (p->orientation != V4L2_FWNODE_PROPERTY_UNSET) {
1924 u32 orientation_ctrl;
1926 switch (p->orientation) {
1927 case V4L2_FWNODE_ORIENTATION_FRONT:
1928 orientation_ctrl = V4L2_CAMERA_ORIENTATION_FRONT;
1930 case V4L2_FWNODE_ORIENTATION_BACK:
1931 orientation_ctrl = V4L2_CAMERA_ORIENTATION_BACK;
1933 case V4L2_FWNODE_ORIENTATION_EXTERNAL:
1934 orientation_ctrl = V4L2_CAMERA_ORIENTATION_EXTERNAL;
1939 if (!v4l2_ctrl_new_std_menu(hdl, ctrl_ops,
1940 V4L2_CID_CAMERA_ORIENTATION,
1941 V4L2_CAMERA_ORIENTATION_EXTERNAL, 0,
1946 if (p->rotation != V4L2_FWNODE_PROPERTY_UNSET) {
1947 if (!v4l2_ctrl_new_std(hdl, ctrl_ops,
1948 V4L2_CID_CAMERA_SENSOR_ROTATION,
1949 p->rotation, p->rotation, 1,
1956 EXPORT_SYMBOL(v4l2_ctrl_new_fwnode_properties);