1 // SPDX-License-Identifier: GPL-2.0-only
3 * BCM283x / BCM271x Unicam Capture Driver
5 * Copyright (C) 2017-2020 - Raspberry Pi (Trading) Ltd.
7 * Dave Stevenson <dave.stevenson@raspberrypi.com>
9 * Based on TI am437x driver by
10 * Benoit Parrot <bparrot@ti.com>
11 * Lad, Prabhakar <prabhakar.csengg@gmail.com>
13 * and TI CAL camera interface driver by
14 * Benoit Parrot <bparrot@ti.com>
17 * There are two camera drivers in the kernel for BCM283x - this one
18 * and bcm2835-camera (currently in staging).
20 * This driver directly controls the Unicam peripheral - there is no
21 * involvement with the VideoCore firmware. Unicam receives CSI-2 or
22 * CCP2 data and writes it into SDRAM.
23 * The only potential processing options are to repack Bayer data into an
24 * alternate format, and applying windowing.
25 * The repacking does not shift the data, so can repack V4L2_PIX_FMT_Sxxxx10P
26 * to V4L2_PIX_FMT_Sxxxx10, or V4L2_PIX_FMT_Sxxxx12P to V4L2_PIX_FMT_Sxxxx12,
27 * but not generically up to V4L2_PIX_FMT_Sxxxx16. The driver will add both
28 * formats where the relevant formats are defined, and will automatically
29 * configure the repacking as required.
30 * Support for windowing may be added later.
32 * It should be possible to connect this driver to any sensor with a
33 * suitable output interface and V4L2 subdevice driver.
35 * bcm2835-camera uses the VideoCore firmware to control the sensor,
36 * Unicam, ISP, and all tuner control loops. Fully processed frames are
37 * delivered to the driver by the firmware. It only has sensor drivers
38 * for Omnivision OV5647, and Sony IMX219 sensors.
40 * The two drivers are mutually exclusive for the same Unicam instance.
41 * The VideoCore firmware checks the device tree configuration during boot.
42 * If it finds device tree nodes called csi0 or csi1 it will block the
43 * firmware from accessing the peripheral, and bcm2835-camera will
44 * not be able to stream data.
47 #include <linux/clk.h>
48 #include <linux/delay.h>
49 #include <linux/device.h>
50 #include <linux/dma-mapping.h>
51 #include <linux/err.h>
52 #include <linux/init.h>
53 #include <linux/interrupt.h>
55 #include <linux/module.h>
56 #include <linux/of_device.h>
57 #include <linux/of_graph.h>
58 #include <linux/pinctrl/consumer.h>
59 #include <linux/platform_device.h>
60 #include <linux/pm_runtime.h>
61 #include <linux/slab.h>
62 #include <linux/uaccess.h>
63 #include <linux/videodev2.h>
65 #include <media/mipi-csi2.h>
66 #include <media/v4l2-common.h>
67 #include <media/v4l2-ctrls.h>
68 #include <media/v4l2-dev.h>
69 #include <media/v4l2-device.h>
70 #include <media/v4l2-dv-timings.h>
71 #include <media/v4l2-event.h>
72 #include <media/v4l2-ioctl.h>
73 #include <media/v4l2-fwnode.h>
74 #include <media/videobuf2-dma-contig.h>
76 #include <media/v4l2-async.h>
78 #include "vc4-regs-unicam.h"
80 #define UNICAM_MODULE_NAME "unicam"
81 #define UNICAM_VERSION "0.1.0"
84 module_param(debug, int, 0644);
85 MODULE_PARM_DESC(debug, "Debug level 0-3");
87 static int media_controller;
88 module_param(media_controller, int, 0644);
89 MODULE_PARM_DESC(media_controller, "Use media controller API");
91 #define unicam_dbg(level, dev, fmt, arg...) \
92 v4l2_dbg(level, debug, &(dev)->v4l2_dev, fmt, ##arg)
93 #define unicam_info(dev, fmt, arg...) \
94 v4l2_info(&(dev)->v4l2_dev, fmt, ##arg)
95 #define unicam_err(dev, fmt, arg...) \
96 v4l2_err(&(dev)->v4l2_dev, fmt, ##arg)
99 * Unicam must request a minimum of 250Mhz from the VPU clock.
100 * Otherwise the input FIFOs overrun and cause image corruption.
102 #define MIN_VPU_CLOCK_RATE (250 * 1000 * 1000)
104 * To protect against a dodgy sensor driver never returning an error from
105 * enum_mbus_code, set a maximum index value to be used.
107 #define MAX_ENUM_MBUS_CODE 128
110 * Stride is a 16 bit register, but also has to be a multiple of 32.
112 #define BPL_ALIGNMENT 32
113 #define MAX_BYTESPERLINE ((1 << 16) - BPL_ALIGNMENT)
115 * Max width is therefore determined by the max stride divided by
116 * the number of bits per pixel. Take 32bpp as a
118 * No imposed limit on the height, so adopt a square image for want
119 * of anything better.
121 #define MAX_WIDTH (MAX_BYTESPERLINE / 4)
122 #define MAX_HEIGHT MAX_WIDTH
123 /* Define a nominal minimum image size */
125 #define MIN_HEIGHT 16
126 /* Default size of the embedded buffer */
127 #define UNICAM_EMBEDDED_SIZE 16384
130 * Size of the dummy buffer allocation.
132 * Due to a HW bug causing buffer overruns in circular buffer mode under certain
133 * (not yet fully known) conditions, the dummy buffer allocation is set to a
134 * a single page size, but the hardware gets programmed with a buffer size of 0.
136 #define DUMMY_BUF_SIZE (PAGE_SIZE)
144 #define MASK_CS_DEFAULT BIT(V4L2_COLORSPACE_DEFAULT)
145 #define MASK_CS_SMPTE170M BIT(V4L2_COLORSPACE_SMPTE170M)
146 #define MASK_CS_SMPTE240M BIT(V4L2_COLORSPACE_SMPTE240M)
147 #define MASK_CS_REC709 BIT(V4L2_COLORSPACE_REC709)
148 #define MASK_CS_BT878 BIT(V4L2_COLORSPACE_BT878)
149 #define MASK_CS_470_M BIT(V4L2_COLORSPACE_470_SYSTEM_M)
150 #define MASK_CS_470_BG BIT(V4L2_COLORSPACE_470_SYSTEM_BG)
151 #define MASK_CS_JPEG BIT(V4L2_COLORSPACE_JPEG)
152 #define MASK_CS_SRGB BIT(V4L2_COLORSPACE_SRGB)
153 #define MASK_CS_OPRGB BIT(V4L2_COLORSPACE_OPRGB)
154 #define MASK_CS_BT2020 BIT(V4L2_COLORSPACE_BT2020)
155 #define MASK_CS_RAW BIT(V4L2_COLORSPACE_RAW)
156 #define MASK_CS_DCI_P3 BIT(V4L2_COLORSPACE_DCI_P3)
158 #define MAX_COLORSPACE 32
161 * struct unicam_fmt - Unicam media bus format information
162 * @pixelformat: V4L2 pixel format FCC identifier. 0 if n/a.
163 * @repacked_fourcc: V4L2 pixel format FCC identifier if the data is expanded
164 * out to 16bpp. 0 if n/a.
165 * @code: V4L2 media bus format code.
166 * @depth: Bits per pixel as delivered from the source.
167 * @csi_dt: CSI data type.
168 * @valid_colorspaces: Bitmask of valid colorspaces so that the Media Controller
169 * centric try_fmt can validate the colorspace and pass
171 * @check_variants: Flag to denote that there are multiple mediabus formats
172 * still in the list that could match this V4L2 format.
173 * @mc_skip: Media Controller shouldn't list this format via ENUM_FMT as it is
174 * a duplicate of an earlier format.
175 * @metadata_fmt: This format only applies to the metadata pad.
183 u32 valid_colorspaces;
189 static const struct unicam_fmt formats[] = {
192 .fourcc = V4L2_PIX_FMT_YUYV,
193 .code = MEDIA_BUS_FMT_YUYV8_2X8,
195 .csi_dt = MIPI_CSI2_DT_YUV422_8B,
197 .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
200 .fourcc = V4L2_PIX_FMT_UYVY,
201 .code = MEDIA_BUS_FMT_UYVY8_2X8,
203 .csi_dt = MIPI_CSI2_DT_YUV422_8B,
205 .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
208 .fourcc = V4L2_PIX_FMT_YVYU,
209 .code = MEDIA_BUS_FMT_YVYU8_2X8,
211 .csi_dt = MIPI_CSI2_DT_YUV422_8B,
213 .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
216 .fourcc = V4L2_PIX_FMT_VYUY,
217 .code = MEDIA_BUS_FMT_VYUY8_2X8,
219 .csi_dt = MIPI_CSI2_DT_YUV422_8B,
221 .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
224 .fourcc = V4L2_PIX_FMT_YUYV,
225 .code = MEDIA_BUS_FMT_YUYV8_1X16,
227 .csi_dt = MIPI_CSI2_DT_YUV422_8B,
229 .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
232 .fourcc = V4L2_PIX_FMT_UYVY,
233 .code = MEDIA_BUS_FMT_UYVY8_1X16,
235 .csi_dt = MIPI_CSI2_DT_YUV422_8B,
237 .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
240 .fourcc = V4L2_PIX_FMT_YVYU,
241 .code = MEDIA_BUS_FMT_YVYU8_1X16,
243 .csi_dt = MIPI_CSI2_DT_YUV422_8B,
245 .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
248 .fourcc = V4L2_PIX_FMT_VYUY,
249 .code = MEDIA_BUS_FMT_VYUY8_1X16,
251 .csi_dt = MIPI_CSI2_DT_YUV422_8B,
253 .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
257 .fourcc = V4L2_PIX_FMT_RGB565, /* gggbbbbb rrrrrggg */
258 .code = MEDIA_BUS_FMT_RGB565_2X8_LE,
260 .csi_dt = MIPI_CSI2_DT_RGB565,
261 .valid_colorspaces = MASK_CS_SRGB,
263 .fourcc = V4L2_PIX_FMT_RGB565X, /* rrrrrggg gggbbbbb */
264 .code = MEDIA_BUS_FMT_RGB565_2X8_BE,
266 .csi_dt = MIPI_CSI2_DT_RGB565,
267 .valid_colorspaces = MASK_CS_SRGB,
269 .fourcc = V4L2_PIX_FMT_RGB555, /* gggbbbbb arrrrrgg */
270 .code = MEDIA_BUS_FMT_RGB555_2X8_PADHI_LE,
272 .csi_dt = MIPI_CSI2_DT_RGB555,
273 .valid_colorspaces = MASK_CS_SRGB,
275 .fourcc = V4L2_PIX_FMT_RGB555X, /* arrrrrgg gggbbbbb */
276 .code = MEDIA_BUS_FMT_RGB555_2X8_PADHI_BE,
278 .csi_dt = MIPI_CSI2_DT_RGB555,
279 .valid_colorspaces = MASK_CS_SRGB,
281 .fourcc = V4L2_PIX_FMT_RGB24, /* rgb */
282 .code = MEDIA_BUS_FMT_RGB888_1X24,
284 .csi_dt = MIPI_CSI2_DT_RGB888,
285 .valid_colorspaces = MASK_CS_SRGB,
287 .fourcc = V4L2_PIX_FMT_BGR24, /* bgr */
288 .code = MEDIA_BUS_FMT_BGR888_1X24,
290 .csi_dt = MIPI_CSI2_DT_RGB888,
291 .valid_colorspaces = MASK_CS_SRGB,
293 .fourcc = V4L2_PIX_FMT_RGB32, /* argb */
294 .code = MEDIA_BUS_FMT_ARGB8888_1X32,
297 .valid_colorspaces = MASK_CS_SRGB,
300 .fourcc = V4L2_PIX_FMT_SBGGR8,
301 .code = MEDIA_BUS_FMT_SBGGR8_1X8,
303 .csi_dt = MIPI_CSI2_DT_RAW8,
304 .valid_colorspaces = MASK_CS_RAW,
306 .fourcc = V4L2_PIX_FMT_SGBRG8,
307 .code = MEDIA_BUS_FMT_SGBRG8_1X8,
309 .csi_dt = MIPI_CSI2_DT_RAW8,
310 .valid_colorspaces = MASK_CS_RAW,
312 .fourcc = V4L2_PIX_FMT_SGRBG8,
313 .code = MEDIA_BUS_FMT_SGRBG8_1X8,
315 .csi_dt = MIPI_CSI2_DT_RAW8,
316 .valid_colorspaces = MASK_CS_RAW,
318 .fourcc = V4L2_PIX_FMT_SRGGB8,
319 .code = MEDIA_BUS_FMT_SRGGB8_1X8,
321 .csi_dt = MIPI_CSI2_DT_RAW8,
322 .valid_colorspaces = MASK_CS_RAW,
324 .fourcc = V4L2_PIX_FMT_SBGGR10P,
325 .repacked_fourcc = V4L2_PIX_FMT_SBGGR10,
326 .code = MEDIA_BUS_FMT_SBGGR10_1X10,
328 .csi_dt = MIPI_CSI2_DT_RAW10,
329 .valid_colorspaces = MASK_CS_RAW,
331 .fourcc = V4L2_PIX_FMT_SGBRG10P,
332 .repacked_fourcc = V4L2_PIX_FMT_SGBRG10,
333 .code = MEDIA_BUS_FMT_SGBRG10_1X10,
335 .csi_dt = MIPI_CSI2_DT_RAW10,
336 .valid_colorspaces = MASK_CS_RAW,
338 .fourcc = V4L2_PIX_FMT_SGRBG10P,
339 .repacked_fourcc = V4L2_PIX_FMT_SGRBG10,
340 .code = MEDIA_BUS_FMT_SGRBG10_1X10,
342 .csi_dt = MIPI_CSI2_DT_RAW10,
343 .valid_colorspaces = MASK_CS_RAW,
345 .fourcc = V4L2_PIX_FMT_SRGGB10P,
346 .repacked_fourcc = V4L2_PIX_FMT_SRGGB10,
347 .code = MEDIA_BUS_FMT_SRGGB10_1X10,
349 .csi_dt = MIPI_CSI2_DT_RAW10,
350 .valid_colorspaces = MASK_CS_RAW,
352 .fourcc = V4L2_PIX_FMT_SBGGR12P,
353 .repacked_fourcc = V4L2_PIX_FMT_SBGGR12,
354 .code = MEDIA_BUS_FMT_SBGGR12_1X12,
356 .csi_dt = MIPI_CSI2_DT_RAW12,
357 .valid_colorspaces = MASK_CS_RAW,
359 .fourcc = V4L2_PIX_FMT_SGBRG12P,
360 .repacked_fourcc = V4L2_PIX_FMT_SGBRG12,
361 .code = MEDIA_BUS_FMT_SGBRG12_1X12,
363 .csi_dt = MIPI_CSI2_DT_RAW12,
364 .valid_colorspaces = MASK_CS_RAW,
366 .fourcc = V4L2_PIX_FMT_SGRBG12P,
367 .repacked_fourcc = V4L2_PIX_FMT_SGRBG12,
368 .code = MEDIA_BUS_FMT_SGRBG12_1X12,
370 .csi_dt = MIPI_CSI2_DT_RAW12,
371 .valid_colorspaces = MASK_CS_RAW,
373 .fourcc = V4L2_PIX_FMT_SRGGB12P,
374 .repacked_fourcc = V4L2_PIX_FMT_SRGGB12,
375 .code = MEDIA_BUS_FMT_SRGGB12_1X12,
377 .csi_dt = MIPI_CSI2_DT_RAW12,
378 .valid_colorspaces = MASK_CS_RAW,
380 .fourcc = V4L2_PIX_FMT_SBGGR14P,
381 .repacked_fourcc = V4L2_PIX_FMT_SBGGR14,
382 .code = MEDIA_BUS_FMT_SBGGR14_1X14,
384 .csi_dt = MIPI_CSI2_DT_RAW14,
385 .valid_colorspaces = MASK_CS_RAW,
387 .fourcc = V4L2_PIX_FMT_SGBRG14P,
388 .repacked_fourcc = V4L2_PIX_FMT_SGBRG14,
389 .code = MEDIA_BUS_FMT_SGBRG14_1X14,
391 .csi_dt = MIPI_CSI2_DT_RAW14,
392 .valid_colorspaces = MASK_CS_RAW,
394 .fourcc = V4L2_PIX_FMT_SGRBG14P,
395 .repacked_fourcc = V4L2_PIX_FMT_SGRBG14,
396 .code = MEDIA_BUS_FMT_SGRBG14_1X14,
398 .csi_dt = MIPI_CSI2_DT_RAW14,
399 .valid_colorspaces = MASK_CS_RAW,
401 .fourcc = V4L2_PIX_FMT_SRGGB14P,
402 .repacked_fourcc = V4L2_PIX_FMT_SRGGB14,
403 .code = MEDIA_BUS_FMT_SRGGB14_1X14,
405 .csi_dt = MIPI_CSI2_DT_RAW14,
406 .valid_colorspaces = MASK_CS_RAW,
408 .fourcc = V4L2_PIX_FMT_SBGGR16,
409 .code = MEDIA_BUS_FMT_SBGGR16_1X16,
411 .csi_dt = MIPI_CSI2_DT_RAW16,
412 .valid_colorspaces = MASK_CS_RAW,
414 .fourcc = V4L2_PIX_FMT_SGBRG16,
415 .code = MEDIA_BUS_FMT_SGBRG16_1X16,
417 .csi_dt = MIPI_CSI2_DT_RAW16,
418 .valid_colorspaces = MASK_CS_RAW,
420 .fourcc = V4L2_PIX_FMT_SGRBG16,
421 .code = MEDIA_BUS_FMT_SGRBG16_1X16,
423 .csi_dt = MIPI_CSI2_DT_RAW16,
424 .valid_colorspaces = MASK_CS_RAW,
426 .fourcc = V4L2_PIX_FMT_SRGGB16,
427 .code = MEDIA_BUS_FMT_SRGGB16_1X16,
429 .csi_dt = MIPI_CSI2_DT_RAW16,
430 .valid_colorspaces = MASK_CS_RAW,
433 /* Greyscale formats */
434 .fourcc = V4L2_PIX_FMT_GREY,
435 .code = MEDIA_BUS_FMT_Y8_1X8,
437 .csi_dt = MIPI_CSI2_DT_RAW8,
438 .valid_colorspaces = MASK_CS_RAW,
440 .fourcc = V4L2_PIX_FMT_Y10P,
441 .repacked_fourcc = V4L2_PIX_FMT_Y10,
442 .code = MEDIA_BUS_FMT_Y10_1X10,
444 .csi_dt = MIPI_CSI2_DT_RAW10,
445 .valid_colorspaces = MASK_CS_RAW,
447 .fourcc = V4L2_PIX_FMT_Y12P,
448 .repacked_fourcc = V4L2_PIX_FMT_Y12,
449 .code = MEDIA_BUS_FMT_Y12_1X12,
451 .csi_dt = MIPI_CSI2_DT_RAW12,
452 .valid_colorspaces = MASK_CS_RAW,
454 .fourcc = V4L2_PIX_FMT_Y14P,
455 .repacked_fourcc = V4L2_PIX_FMT_Y14,
456 .code = MEDIA_BUS_FMT_Y14_1X14,
458 .csi_dt = MIPI_CSI2_DT_RAW14,
459 .valid_colorspaces = MASK_CS_RAW,
461 .fourcc = V4L2_PIX_FMT_Y16,
462 .code = MEDIA_BUS_FMT_Y16_1X16,
464 .csi_dt = MIPI_CSI2_DT_RAW16,
465 .valid_colorspaces = MASK_CS_RAW,
467 /* Embedded data format */
469 .fourcc = V4L2_META_FMT_SENSOR_DATA,
470 .code = MEDIA_BUS_FMT_SENSOR_DATA,
476 struct unicam_buffer {
477 struct vb2_v4l2_buffer vb;
478 struct list_head list;
481 static inline struct unicam_buffer *to_unicam_buffer(struct vb2_buffer *vb)
483 return container_of(vb, struct unicam_buffer, vb.vb2_buf);
491 /* Source pad id on the sensor for this node */
492 unsigned int src_pad_id;
493 /* Pointer pointing to current v4l2_buffer */
494 struct unicam_buffer *cur_frm;
495 /* Pointer pointing to next v4l2_buffer */
496 struct unicam_buffer *next_frm;
498 const struct unicam_fmt *fmt;
499 /* Used to store current pixel format */
500 struct v4l2_format v_fmt;
501 /* Used to store current mbus frame format */
502 struct v4l2_mbus_framefmt m_fmt;
503 /* Buffer queue used in video-buf */
504 struct vb2_queue buffer_queue;
505 /* Queue of filled frames */
506 struct list_head dma_queue;
507 /* IRQ lock for DMA queue */
508 spinlock_t dma_queue_lock;
509 /* lock used to access this structure */
511 /* Identifies video device for this channel */
512 struct video_device video_dev;
513 /* Pointer to the parent handle */
514 struct unicam_device *dev;
515 struct media_pad pad;
516 unsigned int embedded_lines;
517 struct media_pipeline pipe;
519 * Dummy buffer intended to be used by unicam
520 * if we have no other queued buffers to swap to.
522 void *dummy_buf_cpu_addr;
523 dma_addr_t dummy_buf_dma_addr;
526 struct unicam_device {
529 /* V4l2 specific parameters */
530 struct v4l2_async_subdev asd;
532 /* peripheral base address */
534 /* clock gating base address */
535 void __iomem *clk_gate_base;
536 /* lp clock handle */
538 /* vpu clock handle */
539 struct clk *vpu_clock;
540 /* clock status for error handling */
543 struct v4l2_device v4l2_dev;
544 struct media_device mdev;
547 struct platform_device *pdev;
548 /* subdevice async Notifier */
549 struct v4l2_async_notifier notifier;
550 unsigned int sequence;
553 /* ptr to sub device */
554 struct v4l2_subdev *sensor;
555 /* Pad config for the sensor */
556 struct v4l2_subdev_state *sensor_state;
558 enum v4l2_mbus_type bus_type;
560 * Stores bus.mipi_csi2.flags for CSI2 sensors, or
561 * bus.mipi_csi1.strobe for CCP2.
563 unsigned int bus_flags;
564 unsigned int max_data_lanes;
565 unsigned int active_data_lanes;
566 bool sensor_embedded_data;
568 struct unicam_node node[MAX_NODES];
569 struct v4l2_ctrl_handler ctrl_handler;
574 static inline struct unicam_device *
575 to_unicam_device(struct v4l2_device *v4l2_dev)
577 return container_of(v4l2_dev, struct unicam_device, v4l2_dev);
580 /* Hardware access */
581 static inline void clk_write(struct unicam_device *dev, u32 val)
583 writel(val | 0x5a000000, dev->clk_gate_base);
586 static inline u32 reg_read(struct unicam_device *dev, u32 offset)
588 return readl(dev->base + offset);
591 static inline void reg_write(struct unicam_device *dev, u32 offset, u32 val)
593 writel(val, dev->base + offset);
596 static inline int get_field(u32 value, u32 mask)
598 return (value & mask) >> __ffs(mask);
601 static inline void set_field(u32 *valp, u32 field, u32 mask)
606 val |= (field << __ffs(mask)) & mask;
610 static inline u32 reg_read_field(struct unicam_device *dev, u32 offset,
613 return get_field(reg_read(dev, offset), mask);
616 static inline void reg_write_field(struct unicam_device *dev, u32 offset,
619 u32 val = reg_read(dev, offset);
621 set_field(&val, field, mask);
622 reg_write(dev, offset, val);
625 /* Power management functions */
626 static inline int unicam_runtime_get(struct unicam_device *dev)
628 return pm_runtime_get_sync(&dev->pdev->dev);
631 static inline void unicam_runtime_put(struct unicam_device *dev)
633 pm_runtime_put_sync(&dev->pdev->dev);
636 /* Format setup functions */
637 static const struct unicam_fmt *find_format_by_code(u32 code)
641 for (i = 0; i < ARRAY_SIZE(formats); i++) {
642 if (formats[i].code == code)
649 static int check_mbus_format(struct unicam_device *dev,
650 const struct unicam_fmt *format)
655 for (i = 0; !ret && i < MAX_ENUM_MBUS_CODE; i++) {
656 struct v4l2_subdev_mbus_code_enum mbus_code = {
659 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
662 ret = v4l2_subdev_call(dev->sensor, pad, enum_mbus_code,
665 if (!ret && mbus_code.code == format->code)
672 static const struct unicam_fmt *find_format_by_pix(struct unicam_device *dev,
677 for (i = 0; i < ARRAY_SIZE(formats); i++) {
678 if (formats[i].fourcc == pixelformat ||
679 formats[i].repacked_fourcc == pixelformat) {
680 if (formats[i].check_variants &&
681 !check_mbus_format(dev, &formats[i]))
690 static unsigned int bytes_per_line(u32 width, const struct unicam_fmt *fmt,
693 if (v4l2_fourcc == fmt->repacked_fourcc)
694 /* Repacking always goes to 16bpp */
695 return ALIGN(width << 1, BPL_ALIGNMENT);
697 return ALIGN((width * fmt->depth) >> 3, BPL_ALIGNMENT);
700 static int __subdev_get_format(struct unicam_device *dev,
701 struct v4l2_mbus_framefmt *fmt, int pad_id)
703 struct v4l2_subdev_format sd_fmt = {
704 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
705 .pad = dev->node[pad_id].src_pad_id,
709 ret = v4l2_subdev_call(dev->sensor, pad, get_fmt, dev->sensor_state,
714 *fmt = sd_fmt.format;
716 unicam_dbg(1, dev, "%s %dx%d code:%04x\n", __func__,
717 fmt->width, fmt->height, fmt->code);
722 static int __subdev_set_format(struct unicam_device *dev,
723 struct v4l2_mbus_framefmt *fmt, int pad_id)
725 struct v4l2_subdev_format sd_fmt = {
726 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
727 .pad = dev->node[pad_id].src_pad_id,
731 sd_fmt.format = *fmt;
733 ret = v4l2_subdev_call(dev->sensor, pad, set_fmt, dev->sensor_state,
738 *fmt = sd_fmt.format;
740 if (pad_id == IMAGE_PAD)
741 unicam_dbg(1, dev, "%s %dx%d code:%04x\n", __func__, fmt->width,
742 fmt->height, fmt->code);
744 unicam_dbg(1, dev, "%s Embedded data code:%04x\n", __func__,
750 static int unicam_calc_format_size_bpl(struct unicam_device *dev,
751 const struct unicam_fmt *fmt,
752 struct v4l2_format *f)
754 unsigned int min_bytesperline;
756 v4l_bound_align_image(&f->fmt.pix.width, MIN_WIDTH, MAX_WIDTH, 2,
757 &f->fmt.pix.height, MIN_HEIGHT, MAX_HEIGHT, 0,
760 min_bytesperline = bytes_per_line(f->fmt.pix.width, fmt,
761 f->fmt.pix.pixelformat);
763 if (f->fmt.pix.bytesperline > min_bytesperline &&
764 f->fmt.pix.bytesperline <= MAX_BYTESPERLINE)
765 f->fmt.pix.bytesperline = ALIGN(f->fmt.pix.bytesperline,
768 f->fmt.pix.bytesperline = min_bytesperline;
770 f->fmt.pix.sizeimage = f->fmt.pix.height * f->fmt.pix.bytesperline;
772 unicam_dbg(3, dev, "%s: fourcc: %08X size: %dx%d bpl:%d img_size:%d\n",
774 f->fmt.pix.pixelformat,
775 f->fmt.pix.width, f->fmt.pix.height,
776 f->fmt.pix.bytesperline, f->fmt.pix.sizeimage);
781 static int unicam_reset_format(struct unicam_node *node)
783 struct unicam_device *dev = node->dev;
784 struct v4l2_mbus_framefmt mbus_fmt;
787 if (dev->sensor_embedded_data || node->pad_id != METADATA_PAD) {
788 ret = __subdev_get_format(dev, &mbus_fmt, node->pad_id);
790 unicam_err(dev, "Failed to get_format - ret %d\n", ret);
794 if (mbus_fmt.code != node->fmt->code) {
795 unicam_err(dev, "code mismatch - fmt->code %08x, mbus_fmt.code %08x\n",
796 node->fmt->code, mbus_fmt.code);
801 if (node->pad_id == IMAGE_PAD) {
802 v4l2_fill_pix_format(&node->v_fmt.fmt.pix, &mbus_fmt);
803 node->v_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
804 unicam_calc_format_size_bpl(dev, node->fmt, &node->v_fmt);
806 node->v_fmt.type = V4L2_BUF_TYPE_META_CAPTURE;
807 node->v_fmt.fmt.meta.dataformat = V4L2_META_FMT_SENSOR_DATA;
808 if (dev->sensor_embedded_data) {
809 node->v_fmt.fmt.meta.buffersize =
810 mbus_fmt.width * mbus_fmt.height;
811 node->embedded_lines = mbus_fmt.height;
813 node->v_fmt.fmt.meta.buffersize = UNICAM_EMBEDDED_SIZE;
814 node->embedded_lines = 1;
818 node->m_fmt = mbus_fmt;
822 static void unicam_wr_dma_addr(struct unicam_device *dev, dma_addr_t dmaaddr,
823 unsigned int buffer_size, int pad_id)
825 dma_addr_t endaddr = dmaaddr + buffer_size;
827 if (pad_id == IMAGE_PAD) {
828 reg_write(dev, UNICAM_IBSA0, dmaaddr);
829 reg_write(dev, UNICAM_IBEA0, endaddr);
831 reg_write(dev, UNICAM_DBSA0, dmaaddr);
832 reg_write(dev, UNICAM_DBEA0, endaddr);
836 static unsigned int unicam_get_lines_done(struct unicam_device *dev)
838 dma_addr_t start_addr, cur_addr;
839 unsigned int stride = dev->node[IMAGE_PAD].v_fmt.fmt.pix.bytesperline;
840 struct unicam_buffer *frm = dev->node[IMAGE_PAD].cur_frm;
845 start_addr = vb2_dma_contig_plane_dma_addr(&frm->vb.vb2_buf, 0);
846 cur_addr = reg_read(dev, UNICAM_IBWP);
847 return (unsigned int)(cur_addr - start_addr) / stride;
850 static void unicam_schedule_next_buffer(struct unicam_node *node)
852 struct unicam_device *dev = node->dev;
853 struct unicam_buffer *buf;
857 buf = list_first_entry(&node->dma_queue, struct unicam_buffer, list);
858 node->next_frm = buf;
859 list_del(&buf->list);
861 addr = vb2_dma_contig_plane_dma_addr(&buf->vb.vb2_buf, 0);
862 size = (node->pad_id == IMAGE_PAD) ?
863 node->v_fmt.fmt.pix.sizeimage :
864 node->v_fmt.fmt.meta.buffersize;
866 unicam_wr_dma_addr(dev, addr, size, node->pad_id);
869 static void unicam_schedule_dummy_buffer(struct unicam_node *node)
871 struct unicam_device *dev = node->dev;
873 unicam_dbg(3, dev, "Scheduling dummy buffer for node %d\n",
876 unicam_wr_dma_addr(dev, node->dummy_buf_dma_addr, 0, node->pad_id);
877 node->next_frm = NULL;
880 static void unicam_process_buffer_complete(struct unicam_node *node,
881 unsigned int sequence)
883 node->cur_frm->vb.field = node->m_fmt.field;
884 node->cur_frm->vb.sequence = sequence;
886 vb2_buffer_done(&node->cur_frm->vb.vb2_buf, VB2_BUF_STATE_DONE);
889 static void unicam_queue_event_sof(struct unicam_device *unicam)
891 struct v4l2_event event = {
892 .type = V4L2_EVENT_FRAME_SYNC,
893 .u.frame_sync.frame_sequence = unicam->sequence,
896 v4l2_event_queue(&unicam->node[IMAGE_PAD].video_dev, &event);
900 * unicam_isr : ISR handler for unicam capture
902 * @dev_id: dev_id ptr
904 * It changes status of the captured buffer, takes next buffer from the queue
905 * and sets its address in unicam registers
907 static irqreturn_t unicam_isr(int irq, void *dev)
909 struct unicam_device *unicam = dev;
910 unsigned int lines_done = unicam_get_lines_done(dev);
911 unsigned int sequence = unicam->sequence;
917 sta = reg_read(unicam, UNICAM_STA);
918 /* Write value back to clear the interrupts */
919 reg_write(unicam, UNICAM_STA, sta);
921 ista = reg_read(unicam, UNICAM_ISTA);
922 /* Write value back to clear the interrupts */
923 reg_write(unicam, UNICAM_ISTA, ista);
925 unicam_dbg(3, unicam, "ISR: ISTA: 0x%X, STA: 0x%X, sequence %d, lines done %d",
926 ista, sta, sequence, lines_done);
928 if (!(sta & (UNICAM_IS | UNICAM_PI0)))
932 * Look for either the Frame End interrupt or the Packet Capture status
933 * to signal a frame end.
935 fe = (ista & UNICAM_FEI || sta & UNICAM_PI0);
938 * We must run the frame end handler first. If we have a valid next_frm
939 * and we get a simultaneout FE + FS interrupt, running the FS handler
940 * first would null out the next_frm ptr and we would have lost the
944 bool inc_seq = unicam->frame_started;
947 * Ensure we have swapped buffers already as we can't
948 * stop the peripheral. If no buffer is available, use a
949 * dummy buffer to dump out frames until we get a new buffer
952 for (i = 0; i < ARRAY_SIZE(unicam->node); i++) {
953 struct unicam_node *node = &unicam->node[i];
955 if (!node->streaming)
959 * If cur_frm == next_frm, it means we have not had
960 * a chance to swap buffers, likely due to having
961 * multiple interrupts occurring simultaneously (like FE
962 * + FS + LS). In this case, we cannot signal the buffer
963 * as complete, as the HW will reuse that buffer.
965 if (node->cur_frm && node->cur_frm != node->next_frm) {
967 * This condition checks if FE + FS for the same
968 * frame has occurred. In such cases, we cannot
969 * return out the frame, as no buffer handling
970 * or timestamping has yet been done as part of
973 if (!node->cur_frm->vb.vb2_buf.timestamp) {
974 unicam_dbg(2, unicam, "ISR: FE without FS, dropping frame\n");
978 unicam_process_buffer_complete(node, sequence);
979 node->cur_frm = node->next_frm;
980 node->next_frm = NULL;
983 node->cur_frm = node->next_frm;
988 * Increment the sequence number conditionally on either a FS
989 * having already occurred, or in the FE + FS condition as
990 * caught in the FE handler above. This ensures the sequence
991 * number corresponds to the frames generated by the sensor, not
992 * the frames dequeued to userland.
996 unicam->frame_started = false;
1000 if (ista & UNICAM_FSI) {
1002 * Timestamp is to be when the first data byte was captured,
1005 ts = ktime_get_ns();
1006 for (i = 0; i < ARRAY_SIZE(unicam->node); i++) {
1007 if (!unicam->node[i].streaming)
1010 if (unicam->node[i].cur_frm)
1011 unicam->node[i].cur_frm->vb.vb2_buf.timestamp =
1014 unicam_dbg(2, unicam, "ISR: [%d] Dropping frame, buffer not available at FS\n",
1017 * Set the next frame output to go to a dummy frame
1018 * if no buffer currently queued.
1020 if (!unicam->node[i].next_frm ||
1021 unicam->node[i].next_frm == unicam->node[i].cur_frm) {
1022 unicam_schedule_dummy_buffer(&unicam->node[i]);
1023 } else if (unicam->node[i].cur_frm) {
1025 * Repeated FS without FE. Hardware will have
1026 * swapped buffers, but the cur_frm doesn't
1027 * contain valid data. Return cur_frm to the
1030 spin_lock(&unicam->node[i].dma_queue_lock);
1031 list_add_tail(&unicam->node[i].cur_frm->list,
1032 &unicam->node[i].dma_queue);
1033 spin_unlock(&unicam->node[i].dma_queue_lock);
1034 unicam->node[i].cur_frm = unicam->node[i].next_frm;
1035 unicam->node[i].next_frm = NULL;
1039 unicam_queue_event_sof(unicam);
1040 unicam->frame_started = true;
1044 * Cannot swap buffer at frame end, there may be a race condition
1045 * where the HW does not actually swap it if the new frame has
1048 if (ista & (UNICAM_FSI | UNICAM_LCI) && !fe) {
1049 for (i = 0; i < ARRAY_SIZE(unicam->node); i++) {
1050 if (!unicam->node[i].streaming)
1053 spin_lock(&unicam->node[i].dma_queue_lock);
1054 if (!list_empty(&unicam->node[i].dma_queue) &&
1055 !unicam->node[i].next_frm)
1056 unicam_schedule_next_buffer(&unicam->node[i]);
1057 spin_unlock(&unicam->node[i].dma_queue_lock);
1064 /* V4L2 Common IOCTLs */
1065 static int unicam_querycap(struct file *file, void *priv,
1066 struct v4l2_capability *cap)
1068 struct unicam_node *node = video_drvdata(file);
1069 struct unicam_device *dev = node->dev;
1071 strscpy(cap->driver, UNICAM_MODULE_NAME, sizeof(cap->driver));
1072 strscpy(cap->card, UNICAM_MODULE_NAME, sizeof(cap->card));
1074 snprintf(cap->bus_info, sizeof(cap->bus_info),
1075 "platform:%s", dev_name(&dev->pdev->dev));
1077 cap->capabilities |= V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_META_CAPTURE;
1082 static int unicam_log_status(struct file *file, void *fh)
1084 struct unicam_node *node = video_drvdata(file);
1085 struct unicam_device *dev = node->dev;
1088 /* status for sub devices */
1089 v4l2_device_call_all(&dev->v4l2_dev, 0, core, log_status);
1091 unicam_info(dev, "-----Receiver status-----\n");
1092 unicam_info(dev, "V4L2 width/height: %ux%u\n",
1093 node->v_fmt.fmt.pix.width, node->v_fmt.fmt.pix.height);
1094 unicam_info(dev, "Mediabus format: %08x\n", node->fmt->code);
1095 unicam_info(dev, "V4L2 format: %08x\n",
1096 node->v_fmt.fmt.pix.pixelformat);
1097 reg = reg_read(dev, UNICAM_IPIPE);
1098 unicam_info(dev, "Unpacking/packing: %u / %u\n",
1099 get_field(reg, UNICAM_PUM_MASK),
1100 get_field(reg, UNICAM_PPM_MASK));
1101 unicam_info(dev, "----Live data----\n");
1102 unicam_info(dev, "Programmed stride: %4u\n",
1103 reg_read(dev, UNICAM_IBLS));
1104 unicam_info(dev, "Detected resolution: %ux%u\n",
1105 reg_read(dev, UNICAM_IHSTA),
1106 reg_read(dev, UNICAM_IVSTA));
1107 unicam_info(dev, "Write pointer: %08x\n",
1108 reg_read(dev, UNICAM_IBWP));
1113 /* V4L2 Video Centric IOCTLs */
1114 static int unicam_enum_fmt_vid_cap(struct file *file, void *priv,
1115 struct v4l2_fmtdesc *f)
1117 struct unicam_node *node = video_drvdata(file);
1118 struct unicam_device *dev = node->dev;
1119 unsigned int index = 0;
1123 if (node->pad_id != IMAGE_PAD)
1126 for (i = 0; !ret && i < MAX_ENUM_MBUS_CODE; i++) {
1127 struct v4l2_subdev_mbus_code_enum mbus_code = {
1130 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1132 const struct unicam_fmt *fmt;
1134 ret = v4l2_subdev_call(dev->sensor, pad, enum_mbus_code,
1138 "subdev->enum_mbus_code idx %d returned %d - index invalid\n",
1143 fmt = find_format_by_code(mbus_code.code);
1146 if (index == f->index) {
1147 f->pixelformat = fmt->fourcc;
1152 if (fmt->repacked_fourcc) {
1153 if (index == f->index) {
1154 f->pixelformat = fmt->repacked_fourcc;
1165 static int unicam_g_fmt_vid_cap(struct file *file, void *priv,
1166 struct v4l2_format *f)
1168 struct v4l2_mbus_framefmt mbus_fmt = {0};
1169 struct unicam_node *node = video_drvdata(file);
1170 struct unicam_device *dev = node->dev;
1171 const struct unicam_fmt *fmt = NULL;
1174 if (node->pad_id != IMAGE_PAD)
1178 * If a flip has occurred in the sensor, the fmt code might have
1179 * changed. So we will need to re-fetch the format from the subdevice.
1181 ret = __subdev_get_format(dev, &mbus_fmt, node->pad_id);
1185 /* Find the V4L2 format from mbus code. We must match a known format. */
1186 fmt = find_format_by_code(mbus_fmt.code);
1190 if (node->fmt != fmt) {
1192 * The sensor format has changed so the pixelformat needs to
1193 * be updated. Try and retain the packed/unpacked choice if
1196 if (node->fmt->repacked_fourcc ==
1197 node->v_fmt.fmt.pix.pixelformat)
1198 /* Using the repacked format */
1199 node->v_fmt.fmt.pix.pixelformat = fmt->repacked_fourcc;
1201 /* Using the native format */
1202 node->v_fmt.fmt.pix.pixelformat = fmt->fourcc;
1212 static const struct unicam_fmt *
1213 get_first_supported_format(struct unicam_device *dev)
1215 struct v4l2_subdev_mbus_code_enum mbus_code;
1216 const struct unicam_fmt *fmt = NULL;
1220 for (i = 0; ret != -EINVAL && ret != -ENOIOCTLCMD; ++i) {
1221 memset(&mbus_code, 0, sizeof(mbus_code));
1222 mbus_code.index = i;
1223 mbus_code.pad = IMAGE_PAD;
1224 mbus_code.which = V4L2_SUBDEV_FORMAT_ACTIVE;
1226 ret = v4l2_subdev_call(dev->sensor, pad, enum_mbus_code, NULL,
1230 "subdev->enum_mbus_code idx %u returned %d - continue\n",
1235 unicam_dbg(2, dev, "subdev %s: code: 0x%08x idx: %u\n",
1236 dev->sensor->name, mbus_code.code, i);
1238 fmt = find_format_by_code(mbus_code.code);
1239 unicam_dbg(2, dev, "fmt 0x%08x returned as %p, V4L2 FOURCC 0x%08x, csi_dt 0x%02x\n",
1240 mbus_code.code, fmt, fmt ? fmt->fourcc : 0,
1241 fmt ? fmt->csi_dt : 0);
1249 static int unicam_try_fmt_vid_cap(struct file *file, void *priv,
1250 struct v4l2_format *f)
1252 struct unicam_node *node = video_drvdata(file);
1253 struct unicam_device *dev = node->dev;
1254 struct v4l2_subdev_format sd_fmt = {
1255 .which = V4L2_SUBDEV_FORMAT_TRY,
1258 struct v4l2_mbus_framefmt *mbus_fmt = &sd_fmt.format;
1259 const struct unicam_fmt *fmt;
1262 if (node->pad_id != IMAGE_PAD)
1265 fmt = find_format_by_pix(dev, f->fmt.pix.pixelformat);
1268 * Pixel format not supported by unicam. Choose the first
1269 * supported format, and let the sensor choose something else.
1271 unicam_dbg(3, dev, "Fourcc format (0x%08x) not found. Use first format.\n",
1272 f->fmt.pix.pixelformat);
1275 f->fmt.pix.pixelformat = fmt->fourcc;
1278 v4l2_fill_mbus_format(mbus_fmt, &f->fmt.pix, fmt->code);
1280 * No support for receiving interlaced video, so never
1281 * request it from the sensor subdev.
1283 mbus_fmt->field = V4L2_FIELD_NONE;
1285 ret = v4l2_subdev_call(dev->sensor, pad, set_fmt, dev->sensor_state,
1287 if (ret && ret != -ENOIOCTLCMD && ret != -ENODEV)
1290 if (mbus_fmt->field != V4L2_FIELD_NONE)
1291 unicam_info(dev, "Sensor trying to send interlaced video - results may be unpredictable\n");
1293 v4l2_fill_pix_format(&f->fmt.pix, &sd_fmt.format);
1294 if (mbus_fmt->code != fmt->code) {
1295 /* Sensor has returned an alternate format */
1296 fmt = find_format_by_code(mbus_fmt->code);
1299 * The alternate format is one unicam can't support.
1300 * Find the first format that is supported by both, and
1303 fmt = get_first_supported_format(dev);
1304 mbus_fmt->code = fmt->code;
1306 ret = v4l2_subdev_call(dev->sensor, pad, set_fmt,
1307 dev->sensor_state, &sd_fmt);
1308 if (ret && ret != -ENOIOCTLCMD && ret != -ENODEV)
1311 if (mbus_fmt->field != V4L2_FIELD_NONE)
1312 unicam_info(dev, "Sensor trying to send interlaced video - results may be unpredictable\n");
1314 v4l2_fill_pix_format(&f->fmt.pix, &sd_fmt.format);
1316 if (mbus_fmt->code != fmt->code) {
1318 * We've set a format that the sensor reports
1319 * as being supported, but it refuses to set it.
1320 * Not much else we can do.
1321 * Assume that the sensor driver may accept the
1322 * format when it is set (rather than tried).
1324 unicam_err(dev, "Sensor won't accept default format, and Unicam can't support sensor default\n");
1329 f->fmt.pix.pixelformat = fmt->fourcc;
1331 f->fmt.pix.pixelformat = fmt->repacked_fourcc;
1334 return unicam_calc_format_size_bpl(dev, fmt, f);
1337 static int unicam_s_fmt_vid_cap(struct file *file, void *priv,
1338 struct v4l2_format *f)
1340 struct unicam_node *node = video_drvdata(file);
1341 struct unicam_device *dev = node->dev;
1342 struct vb2_queue *q = &node->buffer_queue;
1343 struct v4l2_mbus_framefmt mbus_fmt = {0};
1344 const struct unicam_fmt *fmt;
1350 ret = unicam_try_fmt_vid_cap(file, priv, f);
1354 fmt = find_format_by_pix(dev, f->fmt.pix.pixelformat);
1357 * Unknown pixel format - adopt a default.
1358 * This shouldn't happen as try_fmt should have resolved any
1361 fmt = get_first_supported_format(dev);
1364 * It shouldn't be possible to get here with no
1368 f->fmt.pix.pixelformat = fmt->fourcc;
1372 v4l2_fill_mbus_format(&mbus_fmt, &f->fmt.pix, fmt->code);
1374 ret = __subdev_set_format(dev, &mbus_fmt, node->pad_id);
1376 unicam_dbg(3, dev, "%s __subdev_set_format failed %d\n",
1381 /* Just double check nothing has gone wrong */
1382 if (mbus_fmt.code != fmt->code) {
1384 "%s subdev changed format on us, this should not happen\n",
1390 node->v_fmt.fmt.pix.pixelformat = f->fmt.pix.pixelformat;
1391 node->v_fmt.fmt.pix.bytesperline = f->fmt.pix.bytesperline;
1392 unicam_reset_format(node);
1395 "%s %dx%d, mbus_fmt 0x%08X, V4L2 pix 0x%08X.\n",
1396 __func__, node->v_fmt.fmt.pix.width,
1397 node->v_fmt.fmt.pix.height, mbus_fmt.code,
1398 node->v_fmt.fmt.pix.pixelformat);
1405 static int unicam_enum_fmt_meta_cap(struct file *file, void *priv,
1406 struct v4l2_fmtdesc *f)
1408 struct unicam_node *node = video_drvdata(file);
1409 struct unicam_device *dev = node->dev;
1410 const struct unicam_fmt *fmt;
1414 if (node->pad_id != METADATA_PAD || f->index != 0)
1417 if (dev->sensor_embedded_data) {
1418 struct v4l2_subdev_mbus_code_enum mbus_code = {
1420 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1421 .pad = METADATA_PAD,
1424 ret = v4l2_subdev_call(dev->sensor, pad, enum_mbus_code, NULL,
1428 "subdev->enum_mbus_code idx 0 returned %d - index invalid\n",
1433 code = mbus_code.code;
1435 code = MEDIA_BUS_FMT_SENSOR_DATA;
1438 fmt = find_format_by_code(code);
1440 f->pixelformat = fmt->fourcc;
1445 static int unicam_g_fmt_meta_cap(struct file *file, void *priv,
1446 struct v4l2_format *f)
1448 struct unicam_node *node = video_drvdata(file);
1450 if (node->pad_id != METADATA_PAD)
1458 static int unicam_enum_input(struct file *file, void *priv,
1459 struct v4l2_input *inp)
1461 struct unicam_node *node = video_drvdata(file);
1462 struct unicam_device *dev = node->dev;
1465 if (inp->index != 0)
1468 inp->type = V4L2_INPUT_TYPE_CAMERA;
1469 if (v4l2_subdev_has_op(dev->sensor, video, s_dv_timings)) {
1470 inp->capabilities = V4L2_IN_CAP_DV_TIMINGS;
1472 } else if (v4l2_subdev_has_op(dev->sensor, video, s_std)) {
1473 inp->capabilities = V4L2_IN_CAP_STD;
1474 if (v4l2_subdev_call(dev->sensor, video, g_tvnorms, &inp->std) < 0)
1475 inp->std = V4L2_STD_ALL;
1477 inp->capabilities = 0;
1481 if (v4l2_subdev_has_op(dev->sensor, video, g_input_status)) {
1482 ret = v4l2_subdev_call(dev->sensor, video, g_input_status,
1488 snprintf(inp->name, sizeof(inp->name), "Camera 0");
1492 static int unicam_g_input(struct file *file, void *priv, unsigned int *i)
1499 static int unicam_s_input(struct file *file, void *priv, unsigned int i)
1502 * FIXME: Ideally we would like to be able to query the source
1503 * subdevice for information over the input connectors it supports,
1504 * and map that through in to a call to video_ops->s_routing.
1505 * There is no infrastructure support for defining that within
1506 * devicetree at present. Until that is implemented we can't
1507 * map a user physical connector number to s_routing input number.
1515 static int unicam_querystd(struct file *file, void *priv,
1518 struct unicam_node *node = video_drvdata(file);
1519 struct unicam_device *dev = node->dev;
1521 return v4l2_subdev_call(dev->sensor, video, querystd, std);
1524 static int unicam_g_std(struct file *file, void *priv, v4l2_std_id *std)
1526 struct unicam_node *node = video_drvdata(file);
1527 struct unicam_device *dev = node->dev;
1529 return v4l2_subdev_call(dev->sensor, video, g_std, std);
1532 static int unicam_s_std(struct file *file, void *priv, v4l2_std_id std)
1534 struct unicam_node *node = video_drvdata(file);
1535 struct unicam_device *dev = node->dev;
1537 v4l2_std_id current_std;
1539 ret = v4l2_subdev_call(dev->sensor, video, g_std, ¤t_std);
1543 if (std == current_std)
1546 if (vb2_is_busy(&node->buffer_queue))
1549 ret = v4l2_subdev_call(dev->sensor, video, s_std, std);
1551 /* Force recomputation of bytesperline */
1552 node->v_fmt.fmt.pix.bytesperline = 0;
1554 unicam_reset_format(node);
1559 static int unicam_s_edid(struct file *file, void *priv, struct v4l2_edid *edid)
1561 struct unicam_node *node = video_drvdata(file);
1562 struct unicam_device *dev = node->dev;
1564 return v4l2_subdev_call(dev->sensor, pad, set_edid, edid);
1567 static int unicam_g_edid(struct file *file, void *priv, struct v4l2_edid *edid)
1569 struct unicam_node *node = video_drvdata(file);
1570 struct unicam_device *dev = node->dev;
1572 return v4l2_subdev_call(dev->sensor, pad, get_edid, edid);
1575 static int unicam_s_selection(struct file *file, void *priv,
1576 struct v4l2_selection *sel)
1578 struct unicam_node *node = video_drvdata(file);
1579 struct unicam_device *dev = node->dev;
1580 struct v4l2_subdev_selection sdsel = {
1581 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1582 .target = sel->target,
1583 .flags = sel->flags,
1587 if (sel->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
1590 return v4l2_subdev_call(dev->sensor, pad, set_selection, NULL, &sdsel);
1593 static int unicam_g_selection(struct file *file, void *priv,
1594 struct v4l2_selection *sel)
1596 struct unicam_node *node = video_drvdata(file);
1597 struct unicam_device *dev = node->dev;
1598 struct v4l2_subdev_selection sdsel = {
1599 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1600 .target = sel->target,
1604 if (sel->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
1607 ret = v4l2_subdev_call(dev->sensor, pad, get_selection, NULL, &sdsel);
1614 static int unicam_enum_framesizes(struct file *file, void *priv,
1615 struct v4l2_frmsizeenum *fsize)
1617 struct unicam_node *node = video_drvdata(file);
1618 struct unicam_device *dev = node->dev;
1619 const struct unicam_fmt *fmt;
1620 struct v4l2_subdev_frame_size_enum fse;
1623 /* check for valid format */
1624 fmt = find_format_by_pix(dev, fsize->pixel_format);
1626 unicam_dbg(3, dev, "Invalid pixel code: %x\n",
1627 fsize->pixel_format);
1630 fse.code = fmt->code;
1632 fse.which = V4L2_SUBDEV_FORMAT_ACTIVE;
1633 fse.index = fsize->index;
1634 fse.pad = node->src_pad_id;
1636 ret = v4l2_subdev_call(dev->sensor, pad, enum_frame_size, NULL, &fse);
1640 unicam_dbg(1, dev, "%s: index: %d code: %x W:[%d,%d] H:[%d,%d]\n",
1641 __func__, fse.index, fse.code, fse.min_width, fse.max_width,
1642 fse.min_height, fse.max_height);
1644 fsize->type = V4L2_FRMSIZE_TYPE_DISCRETE;
1645 fsize->discrete.width = fse.max_width;
1646 fsize->discrete.height = fse.max_height;
1651 static int unicam_enum_frameintervals(struct file *file, void *priv,
1652 struct v4l2_frmivalenum *fival)
1654 struct unicam_node *node = video_drvdata(file);
1655 struct unicam_device *dev = node->dev;
1656 const struct unicam_fmt *fmt;
1657 struct v4l2_subdev_frame_interval_enum fie = {
1658 .index = fival->index,
1659 .pad = node->src_pad_id,
1660 .width = fival->width,
1661 .height = fival->height,
1662 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1666 fmt = find_format_by_pix(dev, fival->pixel_format);
1670 fie.code = fmt->code;
1671 ret = v4l2_subdev_call(dev->sensor, pad, enum_frame_interval,
1676 fival->type = V4L2_FRMIVAL_TYPE_DISCRETE;
1677 fival->discrete = fie.interval;
1682 static int unicam_g_parm(struct file *file, void *fh, struct v4l2_streamparm *a)
1684 struct unicam_node *node = video_drvdata(file);
1685 struct unicam_device *dev = node->dev;
1687 return v4l2_g_parm_cap(video_devdata(file), dev->sensor, a);
1690 static int unicam_s_parm(struct file *file, void *fh, struct v4l2_streamparm *a)
1692 struct unicam_node *node = video_drvdata(file);
1693 struct unicam_device *dev = node->dev;
1695 return v4l2_s_parm_cap(video_devdata(file), dev->sensor, a);
1698 static int unicam_g_dv_timings(struct file *file, void *priv,
1699 struct v4l2_dv_timings *timings)
1701 struct unicam_node *node = video_drvdata(file);
1702 struct unicam_device *dev = node->dev;
1704 return v4l2_subdev_call(dev->sensor, video, g_dv_timings, timings);
1707 static int unicam_s_dv_timings(struct file *file, void *priv,
1708 struct v4l2_dv_timings *timings)
1710 struct unicam_node *node = video_drvdata(file);
1711 struct unicam_device *dev = node->dev;
1712 struct v4l2_dv_timings current_timings;
1715 ret = v4l2_subdev_call(dev->sensor, video, g_dv_timings,
1721 if (v4l2_match_dv_timings(timings, ¤t_timings, 0, false))
1724 if (vb2_is_busy(&node->buffer_queue))
1727 ret = v4l2_subdev_call(dev->sensor, video, s_dv_timings, timings);
1729 /* Force recomputation of bytesperline */
1730 node->v_fmt.fmt.pix.bytesperline = 0;
1732 unicam_reset_format(node);
1737 static int unicam_query_dv_timings(struct file *file, void *priv,
1738 struct v4l2_dv_timings *timings)
1740 struct unicam_node *node = video_drvdata(file);
1741 struct unicam_device *dev = node->dev;
1743 return v4l2_subdev_call(dev->sensor, video, query_dv_timings, timings);
1746 static int unicam_enum_dv_timings(struct file *file, void *priv,
1747 struct v4l2_enum_dv_timings *timings)
1749 struct unicam_node *node = video_drvdata(file);
1750 struct unicam_device *dev = node->dev;
1753 timings->pad = node->src_pad_id;
1754 ret = v4l2_subdev_call(dev->sensor, pad, enum_dv_timings, timings);
1755 timings->pad = node->pad_id;
1760 static int unicam_dv_timings_cap(struct file *file, void *priv,
1761 struct v4l2_dv_timings_cap *cap)
1763 struct unicam_node *node = video_drvdata(file);
1764 struct unicam_device *dev = node->dev;
1767 cap->pad = node->src_pad_id;
1768 ret = v4l2_subdev_call(dev->sensor, pad, dv_timings_cap, cap);
1769 cap->pad = node->pad_id;
1774 static int unicam_subscribe_event(struct v4l2_fh *fh,
1775 const struct v4l2_event_subscription *sub)
1777 switch (sub->type) {
1778 case V4L2_EVENT_FRAME_SYNC:
1779 return v4l2_event_subscribe(fh, sub, 2, NULL);
1780 case V4L2_EVENT_SOURCE_CHANGE:
1781 return v4l2_event_subscribe(fh, sub, 4, NULL);
1784 return v4l2_ctrl_subscribe_event(fh, sub);
1787 static void unicam_notify(struct v4l2_subdev *sd,
1788 unsigned int notification, void *arg)
1790 struct unicam_device *dev = to_unicam_device(sd->v4l2_dev);
1792 switch (notification) {
1793 case V4L2_DEVICE_NOTIFY_EVENT:
1794 v4l2_event_queue(&dev->node[IMAGE_PAD].video_dev, arg);
1801 /* unicam capture ioctl operations */
1802 static const struct v4l2_ioctl_ops unicam_ioctl_ops = {
1803 .vidioc_querycap = unicam_querycap,
1804 .vidioc_enum_fmt_vid_cap = unicam_enum_fmt_vid_cap,
1805 .vidioc_g_fmt_vid_cap = unicam_g_fmt_vid_cap,
1806 .vidioc_s_fmt_vid_cap = unicam_s_fmt_vid_cap,
1807 .vidioc_try_fmt_vid_cap = unicam_try_fmt_vid_cap,
1809 .vidioc_enum_fmt_meta_cap = unicam_enum_fmt_meta_cap,
1810 .vidioc_g_fmt_meta_cap = unicam_g_fmt_meta_cap,
1811 .vidioc_s_fmt_meta_cap = unicam_g_fmt_meta_cap,
1812 .vidioc_try_fmt_meta_cap = unicam_g_fmt_meta_cap,
1814 .vidioc_enum_input = unicam_enum_input,
1815 .vidioc_g_input = unicam_g_input,
1816 .vidioc_s_input = unicam_s_input,
1818 .vidioc_querystd = unicam_querystd,
1819 .vidioc_s_std = unicam_s_std,
1820 .vidioc_g_std = unicam_g_std,
1822 .vidioc_g_edid = unicam_g_edid,
1823 .vidioc_s_edid = unicam_s_edid,
1825 .vidioc_enum_framesizes = unicam_enum_framesizes,
1826 .vidioc_enum_frameintervals = unicam_enum_frameintervals,
1828 .vidioc_g_selection = unicam_g_selection,
1829 .vidioc_s_selection = unicam_s_selection,
1831 .vidioc_g_parm = unicam_g_parm,
1832 .vidioc_s_parm = unicam_s_parm,
1834 .vidioc_s_dv_timings = unicam_s_dv_timings,
1835 .vidioc_g_dv_timings = unicam_g_dv_timings,
1836 .vidioc_query_dv_timings = unicam_query_dv_timings,
1837 .vidioc_enum_dv_timings = unicam_enum_dv_timings,
1838 .vidioc_dv_timings_cap = unicam_dv_timings_cap,
1840 .vidioc_reqbufs = vb2_ioctl_reqbufs,
1841 .vidioc_create_bufs = vb2_ioctl_create_bufs,
1842 .vidioc_prepare_buf = vb2_ioctl_prepare_buf,
1843 .vidioc_querybuf = vb2_ioctl_querybuf,
1844 .vidioc_qbuf = vb2_ioctl_qbuf,
1845 .vidioc_dqbuf = vb2_ioctl_dqbuf,
1846 .vidioc_expbuf = vb2_ioctl_expbuf,
1847 .vidioc_streamon = vb2_ioctl_streamon,
1848 .vidioc_streamoff = vb2_ioctl_streamoff,
1850 .vidioc_log_status = unicam_log_status,
1851 .vidioc_subscribe_event = unicam_subscribe_event,
1852 .vidioc_unsubscribe_event = v4l2_event_unsubscribe,
1855 /* V4L2 Media Controller Centric IOCTLs */
1857 static int unicam_mc_enum_fmt_vid_cap(struct file *file, void *priv,
1858 struct v4l2_fmtdesc *f)
1862 for (i = 0, j = 0; i < ARRAY_SIZE(formats); i++) {
1863 if (f->mbus_code && formats[i].code != f->mbus_code)
1865 if (formats[i].mc_skip || formats[i].metadata_fmt)
1868 if (formats[i].fourcc) {
1869 if (j == f->index) {
1870 f->pixelformat = formats[i].fourcc;
1871 f->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1876 if (formats[i].repacked_fourcc) {
1877 if (j == f->index) {
1878 f->pixelformat = formats[i].repacked_fourcc;
1879 f->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1889 static int unicam_mc_g_fmt_vid_cap(struct file *file, void *priv,
1890 struct v4l2_format *f)
1892 struct unicam_node *node = video_drvdata(file);
1894 if (node->pad_id != IMAGE_PAD)
1902 static void unicam_mc_try_fmt(struct unicam_node *node, struct v4l2_format *f,
1903 const struct unicam_fmt **ret_fmt)
1905 struct v4l2_pix_format *v4l2_format = &f->fmt.pix;
1906 struct unicam_device *dev = node->dev;
1907 const struct unicam_fmt *fmt;
1911 * Default to the first format if the requested pixel format code isn't
1914 fmt = find_format_by_pix(dev, v4l2_format->pixelformat);
1917 v4l2_format->pixelformat = fmt->fourcc;
1920 unicam_calc_format_size_bpl(dev, fmt, f);
1922 if (v4l2_format->field == V4L2_FIELD_ANY)
1923 v4l2_format->field = V4L2_FIELD_NONE;
1928 if (v4l2_format->colorspace >= MAX_COLORSPACE ||
1929 !(fmt->valid_colorspaces & (1 << v4l2_format->colorspace))) {
1930 v4l2_format->colorspace = __ffs(fmt->valid_colorspaces);
1932 v4l2_format->xfer_func =
1933 V4L2_MAP_XFER_FUNC_DEFAULT(v4l2_format->colorspace);
1934 v4l2_format->ycbcr_enc =
1935 V4L2_MAP_YCBCR_ENC_DEFAULT(v4l2_format->colorspace);
1936 is_rgb = v4l2_format->colorspace == V4L2_COLORSPACE_SRGB;
1937 v4l2_format->quantization =
1938 V4L2_MAP_QUANTIZATION_DEFAULT(is_rgb,
1939 v4l2_format->colorspace,
1940 v4l2_format->ycbcr_enc);
1943 unicam_dbg(3, dev, "%s: %08x %ux%u (bytesperline %u sizeimage %u)\n",
1944 __func__, v4l2_format->pixelformat,
1945 v4l2_format->width, v4l2_format->height,
1946 v4l2_format->bytesperline, v4l2_format->sizeimage);
1949 static int unicam_mc_try_fmt_vid_cap(struct file *file, void *priv,
1950 struct v4l2_format *f)
1952 struct unicam_node *node = video_drvdata(file);
1954 unicam_mc_try_fmt(node, f, NULL);
1958 static int unicam_mc_s_fmt_vid_cap(struct file *file, void *priv,
1959 struct v4l2_format *f)
1961 struct unicam_node *node = video_drvdata(file);
1962 struct unicam_device *dev = node->dev;
1963 const struct unicam_fmt *fmt;
1965 if (vb2_is_busy(&node->buffer_queue)) {
1966 unicam_dbg(3, dev, "%s device busy\n", __func__);
1970 unicam_mc_try_fmt(node, f, &fmt);
1978 static int unicam_mc_enum_framesizes(struct file *file, void *fh,
1979 struct v4l2_frmsizeenum *fsize)
1981 struct unicam_node *node = video_drvdata(file);
1982 struct unicam_device *dev = node->dev;
1984 if (fsize->index > 0)
1987 if (!find_format_by_pix(dev, fsize->pixel_format)) {
1988 unicam_dbg(3, dev, "Invalid pixel format 0x%08x\n",
1989 fsize->pixel_format);
1993 fsize->type = V4L2_FRMSIZE_TYPE_STEPWISE;
1994 fsize->stepwise.min_width = MIN_WIDTH;
1995 fsize->stepwise.max_width = MAX_WIDTH;
1996 fsize->stepwise.step_width = 1;
1997 fsize->stepwise.min_height = MIN_HEIGHT;
1998 fsize->stepwise.max_height = MAX_HEIGHT;
1999 fsize->stepwise.step_height = 1;
2004 static int unicam_mc_enum_fmt_meta_cap(struct file *file, void *priv,
2005 struct v4l2_fmtdesc *f)
2009 for (i = 0, j = 0; i < ARRAY_SIZE(formats); i++) {
2010 if (f->mbus_code && formats[i].code != f->mbus_code)
2012 if (!formats[i].metadata_fmt)
2015 if (formats[i].fourcc) {
2016 if (j == f->index) {
2017 f->pixelformat = formats[i].fourcc;
2018 f->type = V4L2_BUF_TYPE_META_CAPTURE;
2028 static int unicam_mc_g_fmt_meta_cap(struct file *file, void *priv,
2029 struct v4l2_format *f)
2031 struct unicam_node *node = video_drvdata(file);
2033 if (node->pad_id != METADATA_PAD)
2041 static int unicam_mc_try_fmt_meta_cap(struct file *file, void *priv,
2042 struct v4l2_format *f)
2044 struct unicam_node *node = video_drvdata(file);
2046 if (node->pad_id != METADATA_PAD)
2049 f->fmt.meta.dataformat = V4L2_META_FMT_SENSOR_DATA;
2054 static int unicam_mc_s_fmt_meta_cap(struct file *file, void *priv,
2055 struct v4l2_format *f)
2057 struct unicam_node *node = video_drvdata(file);
2059 if (node->pad_id != METADATA_PAD)
2062 unicam_mc_try_fmt_meta_cap(file, priv, f);
2069 static const struct v4l2_ioctl_ops unicam_mc_ioctl_ops = {
2070 .vidioc_querycap = unicam_querycap,
2071 .vidioc_enum_fmt_vid_cap = unicam_mc_enum_fmt_vid_cap,
2072 .vidioc_g_fmt_vid_cap = unicam_mc_g_fmt_vid_cap,
2073 .vidioc_try_fmt_vid_cap = unicam_mc_try_fmt_vid_cap,
2074 .vidioc_s_fmt_vid_cap = unicam_mc_s_fmt_vid_cap,
2076 .vidioc_enum_fmt_meta_cap = unicam_mc_enum_fmt_meta_cap,
2077 .vidioc_g_fmt_meta_cap = unicam_mc_g_fmt_meta_cap,
2078 .vidioc_try_fmt_meta_cap = unicam_mc_try_fmt_meta_cap,
2079 .vidioc_s_fmt_meta_cap = unicam_mc_s_fmt_meta_cap,
2081 .vidioc_enum_framesizes = unicam_mc_enum_framesizes,
2082 .vidioc_reqbufs = vb2_ioctl_reqbufs,
2083 .vidioc_create_bufs = vb2_ioctl_create_bufs,
2084 .vidioc_prepare_buf = vb2_ioctl_prepare_buf,
2085 .vidioc_querybuf = vb2_ioctl_querybuf,
2086 .vidioc_qbuf = vb2_ioctl_qbuf,
2087 .vidioc_dqbuf = vb2_ioctl_dqbuf,
2088 .vidioc_expbuf = vb2_ioctl_expbuf,
2089 .vidioc_streamon = vb2_ioctl_streamon,
2090 .vidioc_streamoff = vb2_ioctl_streamoff,
2092 .vidioc_log_status = unicam_log_status,
2093 .vidioc_subscribe_event = unicam_subscribe_event,
2094 .vidioc_unsubscribe_event = v4l2_event_unsubscribe,
2098 unicam_mc_subdev_link_validate_get_format(struct media_pad *pad,
2099 struct v4l2_subdev_format *fmt)
2101 if (is_media_entity_v4l2_subdev(pad->entity)) {
2102 struct v4l2_subdev *sd =
2103 media_entity_to_v4l2_subdev(pad->entity);
2105 fmt->which = V4L2_SUBDEV_FORMAT_ACTIVE;
2106 fmt->pad = pad->index;
2107 return v4l2_subdev_call(sd, pad, get_fmt, NULL, fmt);
2113 static int unicam_mc_video_link_validate(struct media_link *link)
2115 struct video_device *vd = container_of(link->sink->entity,
2116 struct video_device, entity);
2117 struct unicam_node *node = container_of(vd, struct unicam_node,
2119 struct unicam_device *unicam = node->dev;
2120 struct v4l2_subdev_format source_fmt;
2123 if (!media_entity_remote_source_pad_unique(link->sink->entity)) {
2124 unicam_dbg(1, unicam,
2125 "video node %s pad not connected\n", vd->name);
2129 ret = unicam_mc_subdev_link_validate_get_format(link->source,
2134 if (node->pad_id == IMAGE_PAD) {
2135 struct v4l2_pix_format *pix_fmt = &node->v_fmt.fmt.pix;
2136 const struct unicam_fmt *fmt;
2138 if (source_fmt.format.width != pix_fmt->width ||
2139 source_fmt.format.height != pix_fmt->height) {
2141 "Wrong width or height %ux%u (remote pad set to %ux%u)\n",
2142 pix_fmt->width, pix_fmt->height,
2143 source_fmt.format.width,
2144 source_fmt.format.height);
2148 fmt = find_format_by_code(source_fmt.format.code);
2150 if (!fmt || (fmt->fourcc != pix_fmt->pixelformat &&
2151 fmt->repacked_fourcc != pix_fmt->pixelformat))
2154 struct v4l2_meta_format *meta_fmt = &node->v_fmt.fmt.meta;
2156 if (source_fmt.format.width != meta_fmt->buffersize ||
2157 source_fmt.format.height != 1 ||
2158 source_fmt.format.code != MEDIA_BUS_FMT_SENSOR_DATA) {
2160 "Wrong metadata width/height/code %ux%u %08x (remote pad set to %ux%u %08x)\n",
2161 meta_fmt->buffersize, 1,
2162 MEDIA_BUS_FMT_SENSOR_DATA,
2163 source_fmt.format.width,
2164 source_fmt.format.height,
2165 source_fmt.format.code);
2173 static const struct media_entity_operations unicam_mc_entity_ops = {
2174 .link_validate = unicam_mc_video_link_validate,
2177 /* videobuf2 Operations */
2179 static int unicam_queue_setup(struct vb2_queue *vq,
2180 unsigned int *nbuffers,
2181 unsigned int *nplanes,
2182 unsigned int sizes[],
2183 struct device *alloc_devs[])
2185 struct unicam_node *node = vb2_get_drv_priv(vq);
2186 struct unicam_device *dev = node->dev;
2187 unsigned int size = node->pad_id == IMAGE_PAD ?
2188 node->v_fmt.fmt.pix.sizeimage :
2189 node->v_fmt.fmt.meta.buffersize;
2191 if (vq->num_buffers + *nbuffers < 3)
2192 *nbuffers = 3 - vq->num_buffers;
2195 if (sizes[0] < size) {
2196 unicam_err(dev, "sizes[0] %i < size %u\n", sizes[0],
2209 static int unicam_buffer_prepare(struct vb2_buffer *vb)
2211 struct unicam_node *node = vb2_get_drv_priv(vb->vb2_queue);
2212 struct unicam_device *dev = node->dev;
2213 struct unicam_buffer *buf = to_unicam_buffer(vb);
2216 if (WARN_ON(!node->fmt))
2219 size = node->pad_id == IMAGE_PAD ? node->v_fmt.fmt.pix.sizeimage :
2220 node->v_fmt.fmt.meta.buffersize;
2221 if (vb2_plane_size(vb, 0) < size) {
2222 unicam_err(dev, "data will not fit into plane (%lu < %lu)\n",
2223 vb2_plane_size(vb, 0), size);
2227 vb2_set_plane_payload(&buf->vb.vb2_buf, 0, size);
2231 static void unicam_buffer_queue(struct vb2_buffer *vb)
2233 struct unicam_node *node = vb2_get_drv_priv(vb->vb2_queue);
2234 struct unicam_buffer *buf = to_unicam_buffer(vb);
2235 unsigned long flags;
2237 spin_lock_irqsave(&node->dma_queue_lock, flags);
2238 list_add_tail(&buf->list, &node->dma_queue);
2239 spin_unlock_irqrestore(&node->dma_queue_lock, flags);
2242 static void unicam_set_packing_config(struct unicam_device *dev)
2247 if (dev->node[IMAGE_PAD].v_fmt.fmt.pix.pixelformat ==
2248 dev->node[IMAGE_PAD].fmt->fourcc) {
2249 unpack = UNICAM_PUM_NONE;
2250 pack = UNICAM_PPM_NONE;
2252 switch (dev->node[IMAGE_PAD].fmt->depth) {
2254 unpack = UNICAM_PUM_UNPACK8;
2257 unpack = UNICAM_PUM_UNPACK10;
2260 unpack = UNICAM_PUM_UNPACK12;
2263 unpack = UNICAM_PUM_UNPACK14;
2266 unpack = UNICAM_PUM_UNPACK16;
2269 unpack = UNICAM_PUM_NONE;
2273 /* Repacking is always to 16bpp */
2274 pack = UNICAM_PPM_PACK16;
2278 set_field(&val, unpack, UNICAM_PUM_MASK);
2279 set_field(&val, pack, UNICAM_PPM_MASK);
2280 reg_write(dev, UNICAM_IPIPE, val);
2283 static void unicam_cfg_image_id(struct unicam_device *dev)
2285 if (dev->bus_type == V4L2_MBUS_CSI2_DPHY) {
2286 /* CSI2 mode, hardcode VC 0 for now. */
2287 reg_write(dev, UNICAM_IDI0,
2288 (0 << 6) | dev->node[IMAGE_PAD].fmt->csi_dt);
2291 reg_write(dev, UNICAM_IDI0,
2292 0x80 | dev->node[IMAGE_PAD].fmt->csi_dt);
2296 static void unicam_enable_ed(struct unicam_device *dev)
2298 u32 val = reg_read(dev, UNICAM_DCS);
2300 set_field(&val, 2, UNICAM_EDL_MASK);
2301 /* Do not wrap at the end of the embedded data buffer */
2302 set_field(&val, 0, UNICAM_DBOB);
2304 reg_write(dev, UNICAM_DCS, val);
2307 static void unicam_start_rx(struct unicam_device *dev, dma_addr_t *addr)
2309 int line_int_freq = dev->node[IMAGE_PAD].v_fmt.fmt.pix.height >> 2;
2310 unsigned int size, i;
2313 if (line_int_freq < 128)
2314 line_int_freq = 128;
2316 /* Enable lane clocks */
2318 for (i = 0; i < dev->active_data_lanes; i++)
2320 clk_write(dev, val);
2323 reg_write(dev, UNICAM_CTRL, UNICAM_MEM);
2325 /* Enable analogue control, and leave in reset. */
2327 set_field(&val, 7, UNICAM_CTATADJ_MASK);
2328 set_field(&val, 7, UNICAM_PTATADJ_MASK);
2329 reg_write(dev, UNICAM_ANA, val);
2330 usleep_range(1000, 2000);
2332 /* Come out of reset */
2333 reg_write_field(dev, UNICAM_ANA, 0, UNICAM_AR);
2335 /* Peripheral reset */
2336 reg_write_field(dev, UNICAM_CTRL, 1, UNICAM_CPR);
2337 reg_write_field(dev, UNICAM_CTRL, 0, UNICAM_CPR);
2339 reg_write_field(dev, UNICAM_CTRL, 0, UNICAM_CPE);
2341 /* Enable Rx control. */
2342 val = reg_read(dev, UNICAM_CTRL);
2343 if (dev->bus_type == V4L2_MBUS_CSI2_DPHY) {
2344 set_field(&val, UNICAM_CPM_CSI2, UNICAM_CPM_MASK);
2345 set_field(&val, UNICAM_DCM_STROBE, UNICAM_DCM_MASK);
2347 set_field(&val, UNICAM_CPM_CCP2, UNICAM_CPM_MASK);
2348 set_field(&val, dev->bus_flags, UNICAM_DCM_MASK);
2350 /* Packet framer timeout */
2351 set_field(&val, 0xf, UNICAM_PFT_MASK);
2352 set_field(&val, 128, UNICAM_OET_MASK);
2353 reg_write(dev, UNICAM_CTRL, val);
2355 reg_write(dev, UNICAM_IHWIN, 0);
2356 reg_write(dev, UNICAM_IVWIN, 0);
2358 /* AXI bus access QoS setup */
2359 val = reg_read(dev, UNICAM_PRI);
2360 set_field(&val, 0, UNICAM_BL_MASK);
2361 set_field(&val, 0, UNICAM_BS_MASK);
2362 set_field(&val, 0xe, UNICAM_PP_MASK);
2363 set_field(&val, 8, UNICAM_NP_MASK);
2364 set_field(&val, 2, UNICAM_PT_MASK);
2365 set_field(&val, 1, UNICAM_PE);
2366 reg_write(dev, UNICAM_PRI, val);
2368 reg_write_field(dev, UNICAM_ANA, 0, UNICAM_DDL);
2370 val = UNICAM_FSIE | UNICAM_FEIE | UNICAM_IBOB;
2371 set_field(&val, line_int_freq, UNICAM_LCIE_MASK);
2372 reg_write(dev, UNICAM_ICTL, val);
2373 reg_write(dev, UNICAM_STA, UNICAM_STA_MASK_ALL);
2374 reg_write(dev, UNICAM_ISTA, UNICAM_ISTA_MASK_ALL);
2377 reg_write_field(dev, UNICAM_CLT, 2, UNICAM_CLT1_MASK);
2379 reg_write_field(dev, UNICAM_CLT, 6, UNICAM_CLT2_MASK);
2381 reg_write_field(dev, UNICAM_DLT, 2, UNICAM_DLT1_MASK);
2383 reg_write_field(dev, UNICAM_DLT, 6, UNICAM_DLT2_MASK);
2385 reg_write_field(dev, UNICAM_DLT, 0, UNICAM_DLT3_MASK);
2387 reg_write_field(dev, UNICAM_CTRL, 0, UNICAM_SOE);
2389 /* Packet compare setup - required to avoid missing frame ends */
2391 set_field(&val, 1, UNICAM_PCE);
2392 set_field(&val, 1, UNICAM_GI);
2393 set_field(&val, 1, UNICAM_CPH);
2394 set_field(&val, 0, UNICAM_PCVC_MASK);
2395 set_field(&val, 1, UNICAM_PCDT_MASK);
2396 reg_write(dev, UNICAM_CMP0, val);
2398 /* Enable clock lane and set up terminations */
2400 if (dev->bus_type == V4L2_MBUS_CSI2_DPHY) {
2402 set_field(&val, 1, UNICAM_CLE);
2403 set_field(&val, 1, UNICAM_CLLPE);
2404 if (!(dev->bus_flags & V4L2_MBUS_CSI2_NONCONTINUOUS_CLOCK)) {
2405 set_field(&val, 1, UNICAM_CLTRE);
2406 set_field(&val, 1, UNICAM_CLHSE);
2410 set_field(&val, 1, UNICAM_CLE);
2411 set_field(&val, 1, UNICAM_CLHSE);
2412 set_field(&val, 1, UNICAM_CLTRE);
2414 reg_write(dev, UNICAM_CLK, val);
2417 * Enable required data lanes with appropriate terminations.
2418 * The same value needs to be written to UNICAM_DATn registers for
2419 * the active lanes, and 0 for inactive ones.
2422 if (dev->bus_type == V4L2_MBUS_CSI2_DPHY) {
2424 set_field(&val, 1, UNICAM_DLE);
2425 set_field(&val, 1, UNICAM_DLLPE);
2426 if (!(dev->bus_flags & V4L2_MBUS_CSI2_NONCONTINUOUS_CLOCK)) {
2427 set_field(&val, 1, UNICAM_DLTRE);
2428 set_field(&val, 1, UNICAM_DLHSE);
2432 set_field(&val, 1, UNICAM_DLE);
2433 set_field(&val, 1, UNICAM_DLHSE);
2434 set_field(&val, 1, UNICAM_DLTRE);
2436 reg_write(dev, UNICAM_DAT0, val);
2438 if (dev->active_data_lanes == 1)
2440 reg_write(dev, UNICAM_DAT1, val);
2442 if (dev->max_data_lanes > 2) {
2444 * Registers UNICAM_DAT2 and UNICAM_DAT3 only valid if the
2445 * instance supports more than 2 data lanes.
2447 if (dev->active_data_lanes == 2)
2449 reg_write(dev, UNICAM_DAT2, val);
2451 if (dev->active_data_lanes == 3)
2453 reg_write(dev, UNICAM_DAT3, val);
2456 reg_write(dev, UNICAM_IBLS,
2457 dev->node[IMAGE_PAD].v_fmt.fmt.pix.bytesperline);
2458 size = dev->node[IMAGE_PAD].v_fmt.fmt.pix.sizeimage;
2459 unicam_wr_dma_addr(dev, addr[IMAGE_PAD], size, IMAGE_PAD);
2460 unicam_set_packing_config(dev);
2461 unicam_cfg_image_id(dev);
2463 val = reg_read(dev, UNICAM_MISC);
2464 set_field(&val, 1, UNICAM_FL0);
2465 set_field(&val, 1, UNICAM_FL1);
2466 reg_write(dev, UNICAM_MISC, val);
2468 if (dev->node[METADATA_PAD].streaming && dev->sensor_embedded_data) {
2469 size = dev->node[METADATA_PAD].v_fmt.fmt.meta.buffersize;
2470 unicam_enable_ed(dev);
2471 unicam_wr_dma_addr(dev, addr[METADATA_PAD], size, METADATA_PAD);
2474 /* Enable peripheral */
2475 reg_write_field(dev, UNICAM_CTRL, 1, UNICAM_CPE);
2477 /* Load image pointers */
2478 reg_write_field(dev, UNICAM_ICTL, 1, UNICAM_LIP_MASK);
2480 /* Load embedded data buffer pointers if needed */
2481 if (dev->node[METADATA_PAD].streaming && dev->sensor_embedded_data)
2482 reg_write_field(dev, UNICAM_DCS, 1, UNICAM_LDP);
2485 static void unicam_disable(struct unicam_device *dev)
2487 /* Analogue lane control disable */
2488 reg_write_field(dev, UNICAM_ANA, 1, UNICAM_DDL);
2490 /* Stop the output engine */
2491 reg_write_field(dev, UNICAM_CTRL, 1, UNICAM_SOE);
2493 /* Disable the data lanes. */
2494 reg_write(dev, UNICAM_DAT0, 0);
2495 reg_write(dev, UNICAM_DAT1, 0);
2497 if (dev->max_data_lanes > 2) {
2498 reg_write(dev, UNICAM_DAT2, 0);
2499 reg_write(dev, UNICAM_DAT3, 0);
2502 /* Peripheral reset */
2503 reg_write_field(dev, UNICAM_CTRL, 1, UNICAM_CPR);
2504 usleep_range(50, 100);
2505 reg_write_field(dev, UNICAM_CTRL, 0, UNICAM_CPR);
2507 /* Disable peripheral */
2508 reg_write_field(dev, UNICAM_CTRL, 0, UNICAM_CPE);
2510 /* Clear ED setup */
2511 reg_write(dev, UNICAM_DCS, 0);
2513 /* Disable all lane clocks */
2517 static void unicam_return_buffers(struct unicam_node *node,
2518 enum vb2_buffer_state state)
2520 struct unicam_buffer *buf, *tmp;
2521 unsigned long flags;
2523 spin_lock_irqsave(&node->dma_queue_lock, flags);
2524 list_for_each_entry_safe(buf, tmp, &node->dma_queue, list) {
2525 list_del(&buf->list);
2526 vb2_buffer_done(&buf->vb.vb2_buf, state);
2530 vb2_buffer_done(&node->cur_frm->vb.vb2_buf,
2532 if (node->next_frm && node->cur_frm != node->next_frm)
2533 vb2_buffer_done(&node->next_frm->vb.vb2_buf,
2536 node->cur_frm = NULL;
2537 node->next_frm = NULL;
2538 spin_unlock_irqrestore(&node->dma_queue_lock, flags);
2541 static int unicam_start_streaming(struct vb2_queue *vq, unsigned int count)
2543 struct unicam_node *node = vb2_get_drv_priv(vq);
2544 struct unicam_device *dev = node->dev;
2545 dma_addr_t buffer_addr[MAX_NODES] = { 0 };
2546 unsigned long flags;
2550 node->streaming = true;
2551 if (!(dev->node[IMAGE_PAD].open && dev->node[IMAGE_PAD].streaming &&
2552 (!dev->node[METADATA_PAD].open ||
2553 dev->node[METADATA_PAD].streaming))) {
2555 * Metadata pad must be enabled before image pad if it is
2558 unicam_dbg(3, dev, "Not all nodes are streaming yet.");
2563 ret = unicam_runtime_get(dev);
2565 unicam_dbg(3, dev, "unicam_runtime_get failed\n");
2569 ret = media_pipeline_start(dev->node[IMAGE_PAD].video_dev.entity.pads,
2570 &dev->node[IMAGE_PAD].pipe);
2572 unicam_err(dev, "Failed to start media pipeline: %d\n", ret);
2576 dev->active_data_lanes = dev->max_data_lanes;
2578 if (dev->bus_type == V4L2_MBUS_CSI2_DPHY) {
2579 struct v4l2_mbus_config mbus_config = { 0 };
2581 ret = v4l2_subdev_call(dev->sensor, pad, get_mbus_config,
2583 if (ret < 0 && ret != -ENOIOCTLCMD) {
2584 unicam_dbg(3, dev, "g_mbus_config failed\n");
2585 goto error_pipeline;
2588 dev->active_data_lanes = mbus_config.bus.mipi_csi2.num_data_lanes;
2589 if (!dev->active_data_lanes)
2590 dev->active_data_lanes = dev->max_data_lanes;
2591 if (dev->active_data_lanes > dev->max_data_lanes) {
2592 unicam_err(dev, "Device has requested %u data lanes, which is >%u configured in DT\n",
2593 dev->active_data_lanes,
2594 dev->max_data_lanes);
2596 goto error_pipeline;
2600 unicam_dbg(1, dev, "Running with %u data lanes\n",
2601 dev->active_data_lanes);
2603 ret = clk_set_min_rate(dev->vpu_clock, MIN_VPU_CLOCK_RATE);
2605 unicam_err(dev, "failed to set up VPU clock\n");
2606 goto error_pipeline;
2609 ret = clk_prepare_enable(dev->vpu_clock);
2611 unicam_err(dev, "Failed to enable VPU clock: %d\n", ret);
2612 goto error_pipeline;
2615 ret = clk_set_rate(dev->clock, 100 * 1000 * 1000);
2617 unicam_err(dev, "failed to set up CSI clock\n");
2621 ret = clk_prepare_enable(dev->clock);
2623 unicam_err(dev, "Failed to enable CSI clock: %d\n", ret);
2627 for (i = 0; i < ARRAY_SIZE(dev->node); i++) {
2628 struct unicam_buffer *buf;
2630 if (!dev->node[i].streaming)
2633 spin_lock_irqsave(&dev->node[i].dma_queue_lock, flags);
2634 buf = list_first_entry(&dev->node[i].dma_queue,
2635 struct unicam_buffer, list);
2636 dev->node[i].cur_frm = buf;
2637 dev->node[i].next_frm = buf;
2638 list_del(&buf->list);
2639 spin_unlock_irqrestore(&dev->node[i].dma_queue_lock, flags);
2642 vb2_dma_contig_plane_dma_addr(&buf->vb.vb2_buf, 0);
2645 dev->frame_started = false;
2646 unicam_start_rx(dev, buffer_addr);
2648 ret = v4l2_subdev_call(dev->sensor, video, s_stream, 1);
2650 unicam_err(dev, "stream on failed in subdev\n");
2651 goto err_disable_unicam;
2654 dev->clocks_enabled = true;
2658 unicam_disable(dev);
2659 clk_disable_unprepare(dev->clock);
2661 if (clk_set_min_rate(dev->vpu_clock, 0))
2662 unicam_err(dev, "failed to reset the VPU clock\n");
2663 clk_disable_unprepare(dev->vpu_clock);
2665 if (node->pad_id == IMAGE_PAD)
2666 media_pipeline_stop(dev->node[IMAGE_PAD].video_dev.entity.pads);
2668 unicam_runtime_put(dev);
2670 unicam_return_buffers(node, VB2_BUF_STATE_QUEUED);
2671 node->streaming = false;
2676 static void unicam_stop_streaming(struct vb2_queue *vq)
2678 struct unicam_node *node = vb2_get_drv_priv(vq);
2679 struct unicam_device *dev = node->dev;
2681 node->streaming = false;
2683 if (node->pad_id == IMAGE_PAD) {
2685 * Stop streaming the sensor and disable the peripheral.
2686 * We cannot continue streaming embedded data with the
2687 * image pad disabled.
2689 if (v4l2_subdev_call(dev->sensor, video, s_stream, 0) < 0)
2690 unicam_err(dev, "stream off failed in subdev\n");
2692 unicam_disable(dev);
2694 media_pipeline_stop(node->video_dev.entity.pads);
2696 if (dev->clocks_enabled) {
2697 if (clk_set_min_rate(dev->vpu_clock, 0))
2698 unicam_err(dev, "failed to reset the min VPU clock\n");
2700 clk_disable_unprepare(dev->vpu_clock);
2701 clk_disable_unprepare(dev->clock);
2702 dev->clocks_enabled = false;
2704 unicam_runtime_put(dev);
2706 } else if (node->pad_id == METADATA_PAD) {
2708 * Allow the hardware to spin in the dummy buffer.
2709 * This is only really needed if the embedded data pad is
2710 * disabled before the image pad.
2712 unicam_wr_dma_addr(dev, node->dummy_buf_dma_addr, 0,
2716 /* Clear all queued buffers for the node */
2717 unicam_return_buffers(node, VB2_BUF_STATE_ERROR);
2721 static const struct vb2_ops unicam_video_qops = {
2722 .wait_prepare = vb2_ops_wait_prepare,
2723 .wait_finish = vb2_ops_wait_finish,
2724 .queue_setup = unicam_queue_setup,
2725 .buf_prepare = unicam_buffer_prepare,
2726 .buf_queue = unicam_buffer_queue,
2727 .start_streaming = unicam_start_streaming,
2728 .stop_streaming = unicam_stop_streaming,
2732 * unicam_v4l2_open : This function is based on the v4l2_fh_open helper
2733 * function. It has been augmented to handle sensor subdevice power management,
2735 static int unicam_v4l2_open(struct file *file)
2737 struct unicam_node *node = video_drvdata(file);
2738 struct unicam_device *dev = node->dev;
2741 mutex_lock(&node->lock);
2743 ret = v4l2_fh_open(file);
2745 unicam_err(dev, "v4l2_fh_open failed\n");
2751 if (!v4l2_fh_is_singular_file(file))
2754 ret = v4l2_subdev_call(dev->sensor, core, s_power, 1);
2755 if (ret < 0 && ret != -ENOIOCTLCMD) {
2756 v4l2_fh_release(file);
2764 mutex_unlock(&node->lock);
2768 static int unicam_v4l2_release(struct file *file)
2770 struct unicam_node *node = video_drvdata(file);
2771 struct unicam_device *dev = node->dev;
2772 struct v4l2_subdev *sd = dev->sensor;
2776 mutex_lock(&node->lock);
2778 fh_singular = v4l2_fh_is_singular_file(file);
2780 ret = _vb2_fop_release(file, NULL);
2783 v4l2_subdev_call(sd, core, s_power, 0);
2786 mutex_unlock(&node->lock);
2791 /* unicam capture driver file operations */
2792 static const struct v4l2_file_operations unicam_fops = {
2793 .owner = THIS_MODULE,
2794 .open = unicam_v4l2_open,
2795 .release = unicam_v4l2_release,
2796 .read = vb2_fop_read,
2797 .poll = vb2_fop_poll,
2798 .unlocked_ioctl = video_ioctl2,
2799 .mmap = vb2_fop_mmap,
2803 unicam_async_bound(struct v4l2_async_notifier *notifier,
2804 struct v4l2_subdev *subdev,
2805 struct v4l2_async_subdev *asd)
2807 struct unicam_device *unicam = to_unicam_device(notifier->v4l2_dev);
2809 if (unicam->sensor) {
2810 unicam_info(unicam, "Rejecting subdev %s (Already set!!)",
2815 unicam->sensor = subdev;
2816 unicam_dbg(1, unicam, "Using sensor %s for capture\n", subdev->name);
2821 static void unicam_release(struct kref *kref)
2823 struct unicam_device *unicam =
2824 container_of(kref, struct unicam_device, kref);
2826 v4l2_ctrl_handler_free(&unicam->ctrl_handler);
2827 media_device_cleanup(&unicam->mdev);
2829 if (unicam->sensor_state)
2830 __v4l2_subdev_state_free(unicam->sensor_state);
2835 static void unicam_put(struct unicam_device *unicam)
2837 kref_put(&unicam->kref, unicam_release);
2840 static void unicam_get(struct unicam_device *unicam)
2842 kref_get(&unicam->kref);
2845 static void unicam_node_release(struct video_device *vdev)
2847 struct unicam_node *node = video_get_drvdata(vdev);
2849 unicam_put(node->dev);
2852 static int unicam_set_default_format(struct unicam_device *unicam,
2853 struct unicam_node *node,
2855 const struct unicam_fmt **ret_fmt)
2857 struct v4l2_mbus_framefmt mbus_fmt = {0};
2858 const struct unicam_fmt *fmt;
2861 if (pad_id == IMAGE_PAD) {
2862 ret = __subdev_get_format(unicam, &mbus_fmt, pad_id);
2864 unicam_err(unicam, "Failed to get_format - ret %d\n",
2869 fmt = find_format_by_code(mbus_fmt.code);
2872 * Find the first format that the sensor and unicam both
2875 fmt = get_first_supported_format(unicam);
2878 mbus_fmt.code = fmt->code;
2879 ret = __subdev_set_format(unicam, &mbus_fmt, pad_id);
2884 if (mbus_fmt.field != V4L2_FIELD_NONE) {
2885 /* Interlaced not supported - disable it now. */
2886 mbus_fmt.field = V4L2_FIELD_NONE;
2887 ret = __subdev_set_format(unicam, &mbus_fmt, pad_id);
2893 node->v_fmt.fmt.pix.pixelformat = fmt->fourcc ? fmt->fourcc
2894 : fmt->repacked_fourcc;
2896 /* Fix this node format as embedded data. */
2897 fmt = find_format_by_code(MEDIA_BUS_FMT_SENSOR_DATA);
2898 node->v_fmt.fmt.meta.dataformat = fmt->fourcc;
2906 static void unicam_mc_set_default_format(struct unicam_node *node, int pad_id)
2908 if (pad_id == IMAGE_PAD) {
2909 struct v4l2_pix_format *pix_fmt = &node->v_fmt.fmt.pix;
2911 pix_fmt->width = 640;
2912 pix_fmt->height = 480;
2913 pix_fmt->field = V4L2_FIELD_NONE;
2914 pix_fmt->colorspace = V4L2_COLORSPACE_SRGB;
2915 pix_fmt->ycbcr_enc = V4L2_YCBCR_ENC_601;
2916 pix_fmt->quantization = V4L2_QUANTIZATION_LIM_RANGE;
2917 pix_fmt->xfer_func = V4L2_XFER_FUNC_SRGB;
2918 pix_fmt->pixelformat = formats[0].fourcc;
2919 unicam_calc_format_size_bpl(node->dev, &formats[0],
2921 node->v_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
2923 node->fmt = &formats[0];
2925 const struct unicam_fmt *fmt;
2927 /* Fix this node format as embedded data. */
2928 fmt = find_format_by_code(MEDIA_BUS_FMT_SENSOR_DATA);
2929 node->v_fmt.fmt.meta.dataformat = fmt->fourcc;
2932 node->v_fmt.fmt.meta.buffersize = UNICAM_EMBEDDED_SIZE;
2933 node->embedded_lines = 1;
2934 node->v_fmt.type = V4L2_BUF_TYPE_META_CAPTURE;
2938 static int register_node(struct unicam_device *unicam, struct unicam_node *node,
2939 enum v4l2_buf_type type, int pad_id)
2941 struct video_device *vdev;
2942 struct vb2_queue *q;
2946 node->pad_id = pad_id;
2948 if (!unicam->mc_api) {
2949 const struct unicam_fmt *fmt;
2951 ret = unicam_set_default_format(unicam, node, pad_id, &fmt);
2955 /* Read current subdev format */
2957 unicam_reset_format(node);
2959 unicam_mc_set_default_format(node, pad_id);
2962 if (!unicam->mc_api &&
2963 v4l2_subdev_has_op(unicam->sensor, video, s_std)) {
2964 v4l2_std_id tvnorms;
2966 if (WARN_ON(!v4l2_subdev_has_op(unicam->sensor, video,
2969 * Subdevice should not advertise s_std but not
2974 ret = v4l2_subdev_call(unicam->sensor, video,
2975 g_tvnorms, &tvnorms);
2978 node->video_dev.tvnorms |= tvnorms;
2981 spin_lock_init(&node->dma_queue_lock);
2982 mutex_init(&node->lock);
2984 vdev = &node->video_dev;
2985 if (pad_id == IMAGE_PAD) {
2986 if (!unicam->mc_api) {
2987 /* Add controls from the subdevice */
2988 ret = v4l2_ctrl_add_handler(&unicam->ctrl_handler,
2989 unicam->sensor->ctrl_handler,
2997 * If the sensor subdevice has any controls, associate the node
2998 * with the ctrl handler to allow access from userland.
3000 if (!list_empty(&unicam->ctrl_handler.ctrls))
3001 vdev->ctrl_handler = &unicam->ctrl_handler;
3004 q = &node->buffer_queue;
3006 q->io_modes = VB2_MMAP | VB2_DMABUF | VB2_READ;
3008 q->ops = &unicam_video_qops;
3009 q->mem_ops = &vb2_dma_contig_memops;
3010 q->buf_struct_size = sizeof(struct unicam_buffer);
3011 q->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC;
3012 q->lock = &node->lock;
3013 q->min_buffers_needed = 1;
3014 q->dev = &unicam->pdev->dev;
3016 ret = vb2_queue_init(q);
3018 unicam_err(unicam, "vb2_queue_init() failed\n");
3022 INIT_LIST_HEAD(&node->dma_queue);
3024 vdev->release = unicam_node_release;
3025 vdev->fops = &unicam_fops;
3026 vdev->ioctl_ops = unicam->mc_api ? &unicam_mc_ioctl_ops :
3028 vdev->v4l2_dev = &unicam->v4l2_dev;
3029 vdev->vfl_dir = VFL_DIR_RX;
3031 vdev->lock = &node->lock;
3032 vdev->device_caps = (pad_id == IMAGE_PAD) ?
3033 V4L2_CAP_VIDEO_CAPTURE : V4L2_CAP_META_CAPTURE;
3034 vdev->device_caps |= V4L2_CAP_READWRITE | V4L2_CAP_STREAMING;
3035 if (unicam->mc_api) {
3036 vdev->device_caps |= V4L2_CAP_IO_MC;
3037 vdev->entity.ops = &unicam_mc_entity_ops;
3040 /* Define the device names */
3041 snprintf(vdev->name, sizeof(vdev->name), "%s-%s", UNICAM_MODULE_NAME,
3042 pad_id == IMAGE_PAD ? "image" : "embedded");
3044 video_set_drvdata(vdev, node);
3045 if (pad_id == IMAGE_PAD)
3046 vdev->entity.flags |= MEDIA_ENT_FL_DEFAULT;
3047 node->pad.flags = MEDIA_PAD_FL_SINK;
3048 media_entity_pads_init(&vdev->entity, 1, &node->pad);
3050 node->dummy_buf_cpu_addr = dma_alloc_coherent(&unicam->pdev->dev,
3052 &node->dummy_buf_dma_addr,
3054 if (!node->dummy_buf_cpu_addr) {
3055 unicam_err(unicam, "Unable to allocate dummy buffer.\n");
3058 if (!unicam->mc_api) {
3059 if (pad_id == METADATA_PAD ||
3060 !v4l2_subdev_has_op(unicam->sensor, video, s_std)) {
3061 v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_STD);
3062 v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_STD);
3063 v4l2_disable_ioctl(&node->video_dev, VIDIOC_ENUMSTD);
3065 if (pad_id == METADATA_PAD ||
3066 !v4l2_subdev_has_op(unicam->sensor, video, querystd))
3067 v4l2_disable_ioctl(&node->video_dev, VIDIOC_QUERYSTD);
3068 if (pad_id == METADATA_PAD ||
3069 !v4l2_subdev_has_op(unicam->sensor, video, s_dv_timings)) {
3070 v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_EDID);
3071 v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_EDID);
3072 v4l2_disable_ioctl(&node->video_dev,
3073 VIDIOC_DV_TIMINGS_CAP);
3074 v4l2_disable_ioctl(&node->video_dev,
3075 VIDIOC_G_DV_TIMINGS);
3076 v4l2_disable_ioctl(&node->video_dev,
3077 VIDIOC_S_DV_TIMINGS);
3078 v4l2_disable_ioctl(&node->video_dev,
3079 VIDIOC_ENUM_DV_TIMINGS);
3080 v4l2_disable_ioctl(&node->video_dev,
3081 VIDIOC_QUERY_DV_TIMINGS);
3083 if (pad_id == METADATA_PAD ||
3084 !v4l2_subdev_has_op(unicam->sensor, pad,
3085 enum_frame_interval))
3086 v4l2_disable_ioctl(&node->video_dev,
3087 VIDIOC_ENUM_FRAMEINTERVALS);
3088 if (pad_id == METADATA_PAD ||
3089 !v4l2_subdev_has_op(unicam->sensor, video,
3091 v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_PARM);
3092 if (pad_id == METADATA_PAD ||
3093 !v4l2_subdev_has_op(unicam->sensor, video,
3095 v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_PARM);
3097 if (pad_id == METADATA_PAD ||
3098 !v4l2_subdev_has_op(unicam->sensor, pad,
3100 v4l2_disable_ioctl(&node->video_dev,
3101 VIDIOC_ENUM_FRAMESIZES);
3103 if (node->pad_id == METADATA_PAD ||
3104 !v4l2_subdev_has_op(unicam->sensor, pad, set_selection))
3105 v4l2_disable_ioctl(&node->video_dev,
3106 VIDIOC_S_SELECTION);
3108 if (node->pad_id == METADATA_PAD ||
3109 !v4l2_subdev_has_op(unicam->sensor, pad, get_selection))
3110 v4l2_disable_ioctl(&node->video_dev,
3111 VIDIOC_G_SELECTION);
3114 ret = video_register_device(vdev, VFL_TYPE_VIDEO, -1);
3116 unicam_err(unicam, "Unable to register video device %s\n",
3122 * Acquire a reference to unicam, which will be released when the video
3123 * device will be unregistered and userspace will have closed all open
3127 node->registered = true;
3129 if (pad_id != METADATA_PAD || unicam->sensor_embedded_data) {
3130 ret = media_create_pad_link(&unicam->sensor->entity,
3132 &node->video_dev.entity, 0,
3133 MEDIA_LNK_FL_ENABLED |
3134 MEDIA_LNK_FL_IMMUTABLE);
3136 unicam_err(unicam, "Unable to create pad link for %s\n",
3143 static void unregister_nodes(struct unicam_device *unicam)
3147 for (i = 0; i < ARRAY_SIZE(unicam->node); i++) {
3148 struct unicam_node *node = &unicam->node[i];
3150 if (node->dummy_buf_cpu_addr) {
3151 dma_free_coherent(&unicam->pdev->dev, DUMMY_BUF_SIZE,
3152 node->dummy_buf_cpu_addr,
3153 node->dummy_buf_dma_addr);
3156 if (node->registered) {
3157 node->registered = false;
3158 video_unregister_device(&node->video_dev);
3163 static int unicam_async_complete(struct v4l2_async_notifier *notifier)
3165 static struct lock_class_key key;
3166 struct unicam_device *unicam = to_unicam_device(notifier->v4l2_dev);
3167 unsigned int i, source_pads = 0;
3170 unicam->v4l2_dev.notify = unicam_notify;
3172 unicam->sensor_state = __v4l2_subdev_state_alloc(unicam->sensor,
3173 "unicam:async->lock", &key);
3174 if (!unicam->sensor_state)
3177 for (i = 0; i < unicam->sensor->entity.num_pads; i++) {
3178 if (unicam->sensor->entity.pads[i].flags & MEDIA_PAD_FL_SOURCE) {
3179 if (source_pads < MAX_NODES) {
3180 unicam->node[source_pads].src_pad_id = i;
3181 unicam_dbg(3, unicam, "source pad %u is index %u\n",
3188 unicam_err(unicam, "No source pads on sensor.\n");
3193 ret = register_node(unicam, &unicam->node[IMAGE_PAD],
3194 V4L2_BUF_TYPE_VIDEO_CAPTURE, IMAGE_PAD);
3196 unicam_err(unicam, "Unable to register image video device.\n");
3200 if (source_pads >= 2) {
3201 unicam->sensor_embedded_data = true;
3203 ret = register_node(unicam, &unicam->node[METADATA_PAD],
3204 V4L2_BUF_TYPE_META_CAPTURE, METADATA_PAD);
3206 unicam_err(unicam, "Unable to register metadata video device.\n");
3212 ret = v4l2_device_register_subdev_nodes(&unicam->v4l2_dev);
3214 ret = v4l2_device_register_ro_subdev_nodes(&unicam->v4l2_dev);
3216 unicam_err(unicam, "Unable to register subdev nodes.\n");
3221 * Release the initial reference, all references are now owned by the
3228 unregister_nodes(unicam);
3234 static const struct v4l2_async_notifier_operations unicam_async_ops = {
3235 .bound = unicam_async_bound,
3236 .complete = unicam_async_complete,
3239 static int of_unicam_connect_subdevs(struct unicam_device *dev)
3241 struct platform_device *pdev = dev->pdev;
3242 struct v4l2_fwnode_endpoint ep = { };
3243 struct device_node *ep_node;
3244 struct device_node *sensor_node;
3248 if (of_property_read_u32(pdev->dev.of_node, "brcm,num-data-lanes",
3249 &dev->max_data_lanes) < 0) {
3250 unicam_err(dev, "number of data lanes not set\n");
3254 /* Get the local endpoint and remote device. */
3255 ep_node = of_graph_get_next_endpoint(pdev->dev.of_node, NULL);
3257 unicam_dbg(3, dev, "can't get next endpoint\n");
3261 unicam_dbg(3, dev, "ep_node is %pOF\n", ep_node);
3263 sensor_node = of_graph_get_remote_port_parent(ep_node);
3265 unicam_dbg(3, dev, "can't get remote parent\n");
3269 unicam_dbg(1, dev, "found subdevice %pOF\n", sensor_node);
3271 /* Parse the local endpoint and validate its configuration. */
3272 v4l2_fwnode_endpoint_parse(of_fwnode_handle(ep_node), &ep);
3274 unicam_dbg(3, dev, "parsed local endpoint, bus_type %u\n",
3277 dev->bus_type = ep.bus_type;
3279 switch (ep.bus_type) {
3280 case V4L2_MBUS_CSI2_DPHY:
3281 switch (ep.bus.mipi_csi2.num_data_lanes) {
3288 unicam_err(dev, "subdevice %pOF: %u data lanes not supported\n",
3290 ep.bus.mipi_csi2.num_data_lanes);
3294 for (lane = 0; lane < ep.bus.mipi_csi2.num_data_lanes; lane++) {
3295 if (ep.bus.mipi_csi2.data_lanes[lane] != lane + 1) {
3296 unicam_err(dev, "subdevice %pOF: data lanes reordering not supported\n",
3302 if (ep.bus.mipi_csi2.num_data_lanes > dev->max_data_lanes) {
3303 unicam_err(dev, "subdevice requires %u data lanes when %u are supported\n",
3304 ep.bus.mipi_csi2.num_data_lanes,
3305 dev->max_data_lanes);
3308 dev->max_data_lanes = ep.bus.mipi_csi2.num_data_lanes;
3309 dev->bus_flags = ep.bus.mipi_csi2.flags;
3313 case V4L2_MBUS_CCP2:
3314 if (ep.bus.mipi_csi1.clock_lane != 0 ||
3315 ep.bus.mipi_csi1.data_lane != 1) {
3316 unicam_err(dev, "subdevice %pOF: unsupported lanes configuration\n",
3321 dev->max_data_lanes = 1;
3322 dev->bus_flags = ep.bus.mipi_csi1.strobe;
3326 /* Unsupported bus type */
3327 unicam_err(dev, "subdevice %pOF: unsupported bus type %u\n",
3328 sensor_node, ep.bus_type);
3332 unicam_dbg(3, dev, "subdevice %pOF: %s bus, %u data lanes, flags=0x%08x\n",
3334 dev->bus_type == V4L2_MBUS_CSI2_DPHY ? "CSI-2" : "CCP2",
3335 dev->max_data_lanes, dev->bus_flags);
3337 /* Initialize and register the async notifier. */
3338 v4l2_async_nf_init(&dev->notifier);
3339 dev->notifier.ops = &unicam_async_ops;
3341 dev->asd.match_type = V4L2_ASYNC_MATCH_FWNODE;
3342 dev->asd.match.fwnode = fwnode_graph_get_remote_endpoint(of_fwnode_handle(ep_node));
3343 ret = __v4l2_async_nf_add_subdev(&dev->notifier, &dev->asd);
3345 unicam_err(dev, "Error adding subdevice: %d\n", ret);
3349 ret = v4l2_async_nf_register(&dev->v4l2_dev, &dev->notifier);
3351 unicam_err(dev, "Error registering async notifier: %d\n", ret);
3356 of_node_put(sensor_node);
3357 of_node_put(ep_node);
3362 static int unicam_probe(struct platform_device *pdev)
3364 struct unicam_device *unicam;
3367 unicam = kzalloc(sizeof(*unicam), GFP_KERNEL);
3371 kref_init(&unicam->kref);
3372 unicam->pdev = pdev;
3375 * Adopt the current setting of the module parameter, and check if
3376 * device tree requests it.
3378 unicam->mc_api = media_controller;
3379 if (of_property_read_bool(pdev->dev.of_node, "brcm,media-controller"))
3380 unicam->mc_api = true;
3382 unicam->base = devm_platform_ioremap_resource(pdev, 0);
3383 if (IS_ERR(unicam->base)) {
3384 unicam_err(unicam, "Failed to get main io block\n");
3385 ret = PTR_ERR(unicam->base);
3386 goto err_unicam_put;
3389 unicam->clk_gate_base = devm_platform_ioremap_resource(pdev, 1);
3390 if (IS_ERR(unicam->clk_gate_base)) {
3391 unicam_err(unicam, "Failed to get 2nd io block\n");
3392 ret = PTR_ERR(unicam->clk_gate_base);
3393 goto err_unicam_put;
3396 unicam->clock = devm_clk_get(&pdev->dev, "lp");
3397 if (IS_ERR(unicam->clock)) {
3398 unicam_err(unicam, "Failed to get lp clock\n");
3399 ret = PTR_ERR(unicam->clock);
3400 goto err_unicam_put;
3403 unicam->vpu_clock = devm_clk_get(&pdev->dev, "vpu");
3404 if (IS_ERR(unicam->vpu_clock)) {
3405 unicam_err(unicam, "Failed to get vpu clock\n");
3406 ret = PTR_ERR(unicam->vpu_clock);
3407 goto err_unicam_put;
3410 ret = platform_get_irq(pdev, 0);
3412 dev_err(&pdev->dev, "No IRQ resource\n");
3414 goto err_unicam_put;
3417 ret = devm_request_irq(&pdev->dev, ret, unicam_isr, 0,
3418 "unicam_capture0", unicam);
3420 dev_err(&pdev->dev, "Unable to request interrupt\n");
3422 goto err_unicam_put;
3425 unicam->mdev.dev = &pdev->dev;
3426 strscpy(unicam->mdev.model, UNICAM_MODULE_NAME,
3427 sizeof(unicam->mdev.model));
3428 strscpy(unicam->mdev.serial, "", sizeof(unicam->mdev.serial));
3429 snprintf(unicam->mdev.bus_info, sizeof(unicam->mdev.bus_info),
3430 "platform:%s", dev_name(&pdev->dev));
3431 unicam->mdev.hw_revision = 0;
3433 media_device_init(&unicam->mdev);
3435 unicam->v4l2_dev.mdev = &unicam->mdev;
3437 ret = v4l2_device_register(&pdev->dev, &unicam->v4l2_dev);
3440 "Unable to register v4l2 device.\n");
3441 goto err_unicam_put;
3444 ret = media_device_register(&unicam->mdev);
3447 "Unable to register media-controller device.\n");
3448 goto err_v4l2_unregister;
3451 /* Reserve space for the controls */
3452 ret = v4l2_ctrl_handler_init(&unicam->ctrl_handler, 16);
3454 goto err_media_unregister;
3456 /* set the driver data in platform device */
3457 platform_set_drvdata(pdev, unicam);
3459 ret = of_unicam_connect_subdevs(unicam);
3461 dev_err(&pdev->dev, "Failed to connect subdevs\n");
3462 goto err_media_unregister;
3465 /* Enable the block power domain */
3466 pm_runtime_enable(&pdev->dev);
3470 err_media_unregister:
3471 media_device_unregister(&unicam->mdev);
3472 err_v4l2_unregister:
3473 v4l2_device_unregister(&unicam->v4l2_dev);
3480 static int unicam_remove(struct platform_device *pdev)
3482 struct unicam_device *unicam = platform_get_drvdata(pdev);
3484 unicam_dbg(2, unicam, "%s\n", __func__);
3486 v4l2_async_nf_unregister(&unicam->notifier);
3487 v4l2_device_unregister(&unicam->v4l2_dev);
3488 media_device_unregister(&unicam->mdev);
3489 unregister_nodes(unicam);
3491 pm_runtime_disable(&pdev->dev);
3496 static const struct of_device_id unicam_of_match[] = {
3497 { .compatible = "brcm,bcm2835-unicam", },
3500 MODULE_DEVICE_TABLE(of, unicam_of_match);
3502 static struct platform_driver unicam_driver = {
3503 .probe = unicam_probe,
3504 .remove = unicam_remove,
3506 .name = UNICAM_MODULE_NAME,
3507 .of_match_table = of_match_ptr(unicam_of_match),
3511 module_platform_driver(unicam_driver);
3513 MODULE_AUTHOR("Dave Stevenson <dave.stevenson@raspberrypi.com>");
3514 MODULE_DESCRIPTION("BCM2835 Unicam driver");
3515 MODULE_LICENSE("GPL");
3516 MODULE_VERSION(UNICAM_VERSION);