1 // SPDX-License-Identifier: GPL-2.0-only
3 * BCM283x / BCM271x Unicam Capture Driver
5 * Copyright (C) 2017-2020 - Raspberry Pi (Trading) Ltd.
7 * Dave Stevenson <dave.stevenson@raspberrypi.com>
9 * Based on TI am437x driver by
10 * Benoit Parrot <bparrot@ti.com>
11 * Lad, Prabhakar <prabhakar.csengg@gmail.com>
13 * and TI CAL camera interface driver by
14 * Benoit Parrot <bparrot@ti.com>
17 * There are two camera drivers in the kernel for BCM283x - this one
18 * and bcm2835-camera (currently in staging).
20 * This driver directly controls the Unicam peripheral - there is no
21 * involvement with the VideoCore firmware. Unicam receives CSI-2 or
22 * CCP2 data and writes it into SDRAM.
23 * The only potential processing options are to repack Bayer data into an
24 * alternate format, and applying windowing.
25 * The repacking does not shift the data, so can repack V4L2_PIX_FMT_Sxxxx10P
26 * to V4L2_PIX_FMT_Sxxxx10, or V4L2_PIX_FMT_Sxxxx12P to V4L2_PIX_FMT_Sxxxx12,
27 * but not generically up to V4L2_PIX_FMT_Sxxxx16. The driver will add both
28 * formats where the relevant formats are defined, and will automatically
29 * configure the repacking as required.
30 * Support for windowing may be added later.
32 * It should be possible to connect this driver to any sensor with a
33 * suitable output interface and V4L2 subdevice driver.
35 * bcm2835-camera uses the VideoCore firmware to control the sensor,
36 * Unicam, ISP, and all tuner control loops. Fully processed frames are
37 * delivered to the driver by the firmware. It only has sensor drivers
38 * for Omnivision OV5647, and Sony IMX219 sensors.
40 * The two drivers are mutually exclusive for the same Unicam instance.
41 * The VideoCore firmware checks the device tree configuration during boot.
42 * If it finds device tree nodes called csi0 or csi1 it will block the
43 * firmware from accessing the peripheral, and bcm2835-camera will
44 * not be able to stream data.
47 #include <linux/clk.h>
48 #include <linux/delay.h>
49 #include <linux/device.h>
50 #include <linux/dma-mapping.h>
51 #include <linux/err.h>
52 #include <linux/init.h>
53 #include <linux/interrupt.h>
55 #include <linux/module.h>
56 #include <linux/of_device.h>
57 #include <linux/of_graph.h>
58 #include <linux/pinctrl/consumer.h>
59 #include <linux/platform_device.h>
60 #include <linux/pm_runtime.h>
61 #include <linux/slab.h>
62 #include <linux/uaccess.h>
63 #include <linux/videodev2.h>
65 #include <media/v4l2-common.h>
66 #include <media/v4l2-ctrls.h>
67 #include <media/v4l2-dev.h>
68 #include <media/v4l2-device.h>
69 #include <media/v4l2-dv-timings.h>
70 #include <media/v4l2-event.h>
71 #include <media/v4l2-ioctl.h>
72 #include <media/v4l2-fwnode.h>
73 #include <media/videobuf2-dma-contig.h>
75 #include <media/v4l2-async.h>
76 #define v4l2_async_notifier_add_subdev __v4l2_async_notifier_add_subdev
78 #include "vc4-regs-unicam.h"
80 #define UNICAM_MODULE_NAME "unicam"
81 #define UNICAM_VERSION "0.1.0"
84 module_param(debug, int, 0644);
85 MODULE_PARM_DESC(debug, "Debug level 0-3");
87 static int media_controller;
88 module_param(media_controller, int, 0644);
89 MODULE_PARM_DESC(media_controller, "Use media controller API");
91 #define unicam_dbg(level, dev, fmt, arg...) \
92 v4l2_dbg(level, debug, &(dev)->v4l2_dev, fmt, ##arg)
93 #define unicam_info(dev, fmt, arg...) \
94 v4l2_info(&(dev)->v4l2_dev, fmt, ##arg)
95 #define unicam_err(dev, fmt, arg...) \
96 v4l2_err(&(dev)->v4l2_dev, fmt, ##arg)
99 * Unicam must request a minimum of 250Mhz from the VPU clock.
100 * Otherwise the input FIFOs overrun and cause image corruption.
102 #define MIN_VPU_CLOCK_RATE (250 * 1000 * 1000)
104 * To protect against a dodgy sensor driver never returning an error from
105 * enum_mbus_code, set a maximum index value to be used.
107 #define MAX_ENUM_MBUS_CODE 128
110 * Stride is a 16 bit register, but also has to be a multiple of 32.
112 #define BPL_ALIGNMENT 32
113 #define MAX_BYTESPERLINE ((1 << 16) - BPL_ALIGNMENT)
115 * Max width is therefore determined by the max stride divided by
116 * the number of bits per pixel. Take 32bpp as a
118 * No imposed limit on the height, so adopt a square image for want
119 * of anything better.
121 #define MAX_WIDTH (MAX_BYTESPERLINE / 4)
122 #define MAX_HEIGHT MAX_WIDTH
123 /* Define a nominal minimum image size */
125 #define MIN_HEIGHT 16
126 /* Default size of the embedded buffer */
127 #define UNICAM_EMBEDDED_SIZE 16384
130 * Size of the dummy buffer. Can be any size really, but the DMA
131 * allocation works in units of page sizes.
133 #define DUMMY_BUF_SIZE (PAGE_SIZE)
141 #define MASK_CS_DEFAULT BIT(V4L2_COLORSPACE_DEFAULT)
142 #define MASK_CS_SMPTE170M BIT(V4L2_COLORSPACE_SMPTE170M)
143 #define MASK_CS_SMPTE240M BIT(V4L2_COLORSPACE_SMPTE240M)
144 #define MASK_CS_REC709 BIT(V4L2_COLORSPACE_REC709)
145 #define MASK_CS_BT878 BIT(V4L2_COLORSPACE_BT878)
146 #define MASK_CS_470_M BIT(V4L2_COLORSPACE_470_SYSTEM_M)
147 #define MASK_CS_470_BG BIT(V4L2_COLORSPACE_470_SYSTEM_BG)
148 #define MASK_CS_JPEG BIT(V4L2_COLORSPACE_JPEG)
149 #define MASK_CS_SRGB BIT(V4L2_COLORSPACE_SRGB)
150 #define MASK_CS_OPRGB BIT(V4L2_COLORSPACE_OPRGB)
151 #define MASK_CS_BT2020 BIT(V4L2_COLORSPACE_BT2020)
152 #define MASK_CS_RAW BIT(V4L2_COLORSPACE_RAW)
153 #define MASK_CS_DCI_P3 BIT(V4L2_COLORSPACE_DCI_P3)
155 #define MAX_COLORSPACE 32
158 * struct unicam_fmt - Unicam media bus format information
159 * @pixelformat: V4L2 pixel format FCC identifier. 0 if n/a.
160 * @repacked_fourcc: V4L2 pixel format FCC identifier if the data is expanded
161 * out to 16bpp. 0 if n/a.
162 * @code: V4L2 media bus format code.
163 * @depth: Bits per pixel as delivered from the source.
164 * @csi_dt: CSI data type.
165 * @valid_colorspaces: Bitmask of valid colorspaces so that the Media Controller
166 * centric try_fmt can validate the colorspace and pass
168 * @check_variants: Flag to denote that there are multiple mediabus formats
169 * still in the list that could match this V4L2 format.
170 * @mc_skip: Media Controller shouldn't list this format via ENUM_FMT as it is
171 * a duplicate of an earlier format.
172 * @metadata_fmt: This format only applies to the metadata pad.
180 u32 valid_colorspaces;
186 static const struct unicam_fmt formats[] = {
189 .fourcc = V4L2_PIX_FMT_YUYV,
190 .code = MEDIA_BUS_FMT_YUYV8_2X8,
194 .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
197 .fourcc = V4L2_PIX_FMT_UYVY,
198 .code = MEDIA_BUS_FMT_UYVY8_2X8,
202 .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
205 .fourcc = V4L2_PIX_FMT_YVYU,
206 .code = MEDIA_BUS_FMT_YVYU8_2X8,
210 .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
213 .fourcc = V4L2_PIX_FMT_VYUY,
214 .code = MEDIA_BUS_FMT_VYUY8_2X8,
218 .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
221 .fourcc = V4L2_PIX_FMT_YUYV,
222 .code = MEDIA_BUS_FMT_YUYV8_1X16,
226 .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
229 .fourcc = V4L2_PIX_FMT_UYVY,
230 .code = MEDIA_BUS_FMT_UYVY8_1X16,
234 .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
237 .fourcc = V4L2_PIX_FMT_YVYU,
238 .code = MEDIA_BUS_FMT_YVYU8_1X16,
242 .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
245 .fourcc = V4L2_PIX_FMT_VYUY,
246 .code = MEDIA_BUS_FMT_VYUY8_1X16,
250 .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
254 .fourcc = V4L2_PIX_FMT_RGB565, /* gggbbbbb rrrrrggg */
255 .code = MEDIA_BUS_FMT_RGB565_2X8_LE,
258 .valid_colorspaces = MASK_CS_SRGB,
260 .fourcc = V4L2_PIX_FMT_RGB565X, /* rrrrrggg gggbbbbb */
261 .code = MEDIA_BUS_FMT_RGB565_2X8_BE,
264 .valid_colorspaces = MASK_CS_SRGB,
266 .fourcc = V4L2_PIX_FMT_RGB555, /* gggbbbbb arrrrrgg */
267 .code = MEDIA_BUS_FMT_RGB555_2X8_PADHI_LE,
270 .valid_colorspaces = MASK_CS_SRGB,
272 .fourcc = V4L2_PIX_FMT_RGB555X, /* arrrrrgg gggbbbbb */
273 .code = MEDIA_BUS_FMT_RGB555_2X8_PADHI_BE,
276 .valid_colorspaces = MASK_CS_SRGB,
278 .fourcc = V4L2_PIX_FMT_RGB24, /* rgb */
279 .code = MEDIA_BUS_FMT_RGB888_1X24,
282 .valid_colorspaces = MASK_CS_SRGB,
284 .fourcc = V4L2_PIX_FMT_BGR24, /* bgr */
285 .code = MEDIA_BUS_FMT_BGR888_1X24,
288 .valid_colorspaces = MASK_CS_SRGB,
290 .fourcc = V4L2_PIX_FMT_RGB32, /* argb */
291 .code = MEDIA_BUS_FMT_ARGB8888_1X32,
294 .valid_colorspaces = MASK_CS_SRGB,
297 .fourcc = V4L2_PIX_FMT_SBGGR8,
298 .code = MEDIA_BUS_FMT_SBGGR8_1X8,
301 .valid_colorspaces = MASK_CS_RAW,
303 .fourcc = V4L2_PIX_FMT_SGBRG8,
304 .code = MEDIA_BUS_FMT_SGBRG8_1X8,
307 .valid_colorspaces = MASK_CS_RAW,
309 .fourcc = V4L2_PIX_FMT_SGRBG8,
310 .code = MEDIA_BUS_FMT_SGRBG8_1X8,
313 .valid_colorspaces = MASK_CS_RAW,
315 .fourcc = V4L2_PIX_FMT_SRGGB8,
316 .code = MEDIA_BUS_FMT_SRGGB8_1X8,
319 .valid_colorspaces = MASK_CS_RAW,
321 .fourcc = V4L2_PIX_FMT_SBGGR10P,
322 .repacked_fourcc = V4L2_PIX_FMT_SBGGR10,
323 .code = MEDIA_BUS_FMT_SBGGR10_1X10,
326 .valid_colorspaces = MASK_CS_RAW,
328 .fourcc = V4L2_PIX_FMT_SGBRG10P,
329 .repacked_fourcc = V4L2_PIX_FMT_SGBRG10,
330 .code = MEDIA_BUS_FMT_SGBRG10_1X10,
333 .valid_colorspaces = MASK_CS_RAW,
335 .fourcc = V4L2_PIX_FMT_SGRBG10P,
336 .repacked_fourcc = V4L2_PIX_FMT_SGRBG10,
337 .code = MEDIA_BUS_FMT_SGRBG10_1X10,
340 .valid_colorspaces = MASK_CS_RAW,
342 .fourcc = V4L2_PIX_FMT_SRGGB10P,
343 .repacked_fourcc = V4L2_PIX_FMT_SRGGB10,
344 .code = MEDIA_BUS_FMT_SRGGB10_1X10,
347 .valid_colorspaces = MASK_CS_RAW,
349 .fourcc = V4L2_PIX_FMT_SBGGR12P,
350 .repacked_fourcc = V4L2_PIX_FMT_SBGGR12,
351 .code = MEDIA_BUS_FMT_SBGGR12_1X12,
354 .valid_colorspaces = MASK_CS_RAW,
356 .fourcc = V4L2_PIX_FMT_SGBRG12P,
357 .repacked_fourcc = V4L2_PIX_FMT_SGBRG12,
358 .code = MEDIA_BUS_FMT_SGBRG12_1X12,
361 .valid_colorspaces = MASK_CS_RAW,
363 .fourcc = V4L2_PIX_FMT_SGRBG12P,
364 .repacked_fourcc = V4L2_PIX_FMT_SGRBG12,
365 .code = MEDIA_BUS_FMT_SGRBG12_1X12,
368 .valid_colorspaces = MASK_CS_RAW,
370 .fourcc = V4L2_PIX_FMT_SRGGB12P,
371 .repacked_fourcc = V4L2_PIX_FMT_SRGGB12,
372 .code = MEDIA_BUS_FMT_SRGGB12_1X12,
375 .valid_colorspaces = MASK_CS_RAW,
377 .fourcc = V4L2_PIX_FMT_SBGGR14P,
378 .repacked_fourcc = V4L2_PIX_FMT_SBGGR14,
379 .code = MEDIA_BUS_FMT_SBGGR14_1X14,
382 .valid_colorspaces = MASK_CS_RAW,
384 .fourcc = V4L2_PIX_FMT_SGBRG14P,
385 .repacked_fourcc = V4L2_PIX_FMT_SGBRG14,
386 .code = MEDIA_BUS_FMT_SGBRG14_1X14,
389 .valid_colorspaces = MASK_CS_RAW,
391 .fourcc = V4L2_PIX_FMT_SGRBG14P,
392 .repacked_fourcc = V4L2_PIX_FMT_SGRBG14,
393 .code = MEDIA_BUS_FMT_SGRBG14_1X14,
396 .valid_colorspaces = MASK_CS_RAW,
398 .fourcc = V4L2_PIX_FMT_SRGGB14P,
399 .repacked_fourcc = V4L2_PIX_FMT_SRGGB14,
400 .code = MEDIA_BUS_FMT_SRGGB14_1X14,
403 .valid_colorspaces = MASK_CS_RAW,
406 * 16 bit Bayer formats could be supported, but there is no CSI2
407 * data_type defined for raw 16, and no sensors that produce it at
411 /* Greyscale formats */
412 .fourcc = V4L2_PIX_FMT_GREY,
413 .code = MEDIA_BUS_FMT_Y8_1X8,
416 .valid_colorspaces = MASK_CS_RAW,
418 .fourcc = V4L2_PIX_FMT_Y10P,
419 .repacked_fourcc = V4L2_PIX_FMT_Y10,
420 .code = MEDIA_BUS_FMT_Y10_1X10,
423 .valid_colorspaces = MASK_CS_RAW,
425 .fourcc = V4L2_PIX_FMT_Y12P,
426 .repacked_fourcc = V4L2_PIX_FMT_Y12,
427 .code = MEDIA_BUS_FMT_Y12_1X12,
430 .valid_colorspaces = MASK_CS_RAW,
432 .fourcc = V4L2_PIX_FMT_Y14P,
433 .repacked_fourcc = V4L2_PIX_FMT_Y14,
434 .code = MEDIA_BUS_FMT_Y14_1X14,
437 .valid_colorspaces = MASK_CS_RAW,
439 /* Embedded data format */
441 .fourcc = V4L2_META_FMT_SENSOR_DATA,
442 .code = MEDIA_BUS_FMT_SENSOR_DATA,
448 struct unicam_buffer {
449 struct vb2_v4l2_buffer vb;
450 struct list_head list;
453 static inline struct unicam_buffer *to_unicam_buffer(struct vb2_buffer *vb)
455 return container_of(vb, struct unicam_buffer, vb.vb2_buf);
463 /* Source pad id on the sensor for this node */
464 unsigned int src_pad_id;
465 /* Pointer pointing to current v4l2_buffer */
466 struct unicam_buffer *cur_frm;
467 /* Pointer pointing to next v4l2_buffer */
468 struct unicam_buffer *next_frm;
470 const struct unicam_fmt *fmt;
471 /* Used to store current pixel format */
472 struct v4l2_format v_fmt;
473 /* Used to store current mbus frame format */
474 struct v4l2_mbus_framefmt m_fmt;
475 /* Buffer queue used in video-buf */
476 struct vb2_queue buffer_queue;
477 /* Queue of filled frames */
478 struct list_head dma_queue;
479 /* IRQ lock for DMA queue */
480 spinlock_t dma_queue_lock;
481 /* lock used to access this structure */
483 /* Identifies video device for this channel */
484 struct video_device video_dev;
485 /* Pointer to the parent handle */
486 struct unicam_device *dev;
487 struct media_pad pad;
488 unsigned int embedded_lines;
489 struct media_pipeline pipe;
491 * Dummy buffer intended to be used by unicam
492 * if we have no other queued buffers to swap to.
494 void *dummy_buf_cpu_addr;
495 dma_addr_t dummy_buf_dma_addr;
498 struct unicam_device {
501 /* V4l2 specific parameters */
502 struct v4l2_async_subdev asd;
504 /* peripheral base address */
506 /* clock gating base address */
507 void __iomem *clk_gate_base;
508 /* lp clock handle */
510 /* vpu clock handle */
511 struct clk *vpu_clock;
512 /* clock status for error handling */
515 struct v4l2_device v4l2_dev;
516 struct media_device mdev;
519 struct platform_device *pdev;
520 /* subdevice async Notifier */
521 struct v4l2_async_notifier notifier;
522 unsigned int sequence;
524 /* ptr to sub device */
525 struct v4l2_subdev *sensor;
526 /* Pad config for the sensor */
527 struct v4l2_subdev_state *sensor_state;
529 enum v4l2_mbus_type bus_type;
531 * Stores bus.mipi_csi2.flags for CSI2 sensors, or
532 * bus.mipi_csi1.strobe for CCP2.
534 unsigned int bus_flags;
535 unsigned int max_data_lanes;
536 unsigned int active_data_lanes;
537 bool sensor_embedded_data;
539 struct unicam_node node[MAX_NODES];
540 struct v4l2_ctrl_handler ctrl_handler;
545 static inline struct unicam_device *
546 to_unicam_device(struct v4l2_device *v4l2_dev)
548 return container_of(v4l2_dev, struct unicam_device, v4l2_dev);
551 /* Hardware access */
552 static inline void clk_write(struct unicam_device *dev, u32 val)
554 writel(val | 0x5a000000, dev->clk_gate_base);
557 static inline u32 reg_read(struct unicam_device *dev, u32 offset)
559 return readl(dev->base + offset);
562 static inline void reg_write(struct unicam_device *dev, u32 offset, u32 val)
564 writel(val, dev->base + offset);
567 static inline int get_field(u32 value, u32 mask)
569 return (value & mask) >> __ffs(mask);
572 static inline void set_field(u32 *valp, u32 field, u32 mask)
577 val |= (field << __ffs(mask)) & mask;
581 static inline u32 reg_read_field(struct unicam_device *dev, u32 offset,
584 return get_field(reg_read(dev, offset), mask);
587 static inline void reg_write_field(struct unicam_device *dev, u32 offset,
590 u32 val = reg_read(dev, offset);
592 set_field(&val, field, mask);
593 reg_write(dev, offset, val);
596 /* Power management functions */
597 static inline int unicam_runtime_get(struct unicam_device *dev)
599 return pm_runtime_get_sync(&dev->pdev->dev);
602 static inline void unicam_runtime_put(struct unicam_device *dev)
604 pm_runtime_put_sync(&dev->pdev->dev);
607 /* Format setup functions */
608 static const struct unicam_fmt *find_format_by_code(u32 code)
612 for (i = 0; i < ARRAY_SIZE(formats); i++) {
613 if (formats[i].code == code)
620 static int check_mbus_format(struct unicam_device *dev,
621 const struct unicam_fmt *format)
626 for (i = 0; !ret && i < MAX_ENUM_MBUS_CODE; i++) {
627 struct v4l2_subdev_mbus_code_enum mbus_code = {
630 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
633 ret = v4l2_subdev_call(dev->sensor, pad, enum_mbus_code,
636 if (!ret && mbus_code.code == format->code)
643 static const struct unicam_fmt *find_format_by_pix(struct unicam_device *dev,
648 for (i = 0; i < ARRAY_SIZE(formats); i++) {
649 if (formats[i].fourcc == pixelformat ||
650 formats[i].repacked_fourcc == pixelformat) {
651 if (formats[i].check_variants &&
652 !check_mbus_format(dev, &formats[i]))
661 static unsigned int bytes_per_line(u32 width, const struct unicam_fmt *fmt,
664 if (v4l2_fourcc == fmt->repacked_fourcc)
665 /* Repacking always goes to 16bpp */
666 return ALIGN(width << 1, BPL_ALIGNMENT);
668 return ALIGN((width * fmt->depth) >> 3, BPL_ALIGNMENT);
671 static int __subdev_get_format(struct unicam_device *dev,
672 struct v4l2_mbus_framefmt *fmt, int pad_id)
674 struct v4l2_subdev_format sd_fmt = {
675 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
676 .pad = dev->node[pad_id].src_pad_id,
680 ret = v4l2_subdev_call(dev->sensor, pad, get_fmt, dev->sensor_state,
685 *fmt = sd_fmt.format;
687 unicam_dbg(1, dev, "%s %dx%d code:%04x\n", __func__,
688 fmt->width, fmt->height, fmt->code);
693 static int __subdev_set_format(struct unicam_device *dev,
694 struct v4l2_mbus_framefmt *fmt, int pad_id)
696 struct v4l2_subdev_format sd_fmt = {
697 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
698 .pad = dev->node[pad_id].src_pad_id,
702 sd_fmt.format = *fmt;
704 ret = v4l2_subdev_call(dev->sensor, pad, set_fmt, dev->sensor_state,
709 *fmt = sd_fmt.format;
711 if (pad_id == IMAGE_PAD)
712 unicam_dbg(1, dev, "%s %dx%d code:%04x\n", __func__, fmt->width,
713 fmt->height, fmt->code);
715 unicam_dbg(1, dev, "%s Embedded data code:%04x\n", __func__,
721 static int unicam_calc_format_size_bpl(struct unicam_device *dev,
722 const struct unicam_fmt *fmt,
723 struct v4l2_format *f)
725 unsigned int min_bytesperline;
727 v4l_bound_align_image(&f->fmt.pix.width, MIN_WIDTH, MAX_WIDTH, 2,
728 &f->fmt.pix.height, MIN_HEIGHT, MAX_HEIGHT, 0,
731 min_bytesperline = bytes_per_line(f->fmt.pix.width, fmt,
732 f->fmt.pix.pixelformat);
734 if (f->fmt.pix.bytesperline > min_bytesperline &&
735 f->fmt.pix.bytesperline <= MAX_BYTESPERLINE)
736 f->fmt.pix.bytesperline = ALIGN(f->fmt.pix.bytesperline,
739 f->fmt.pix.bytesperline = min_bytesperline;
741 f->fmt.pix.sizeimage = f->fmt.pix.height * f->fmt.pix.bytesperline;
743 unicam_dbg(3, dev, "%s: fourcc: %08X size: %dx%d bpl:%d img_size:%d\n",
745 f->fmt.pix.pixelformat,
746 f->fmt.pix.width, f->fmt.pix.height,
747 f->fmt.pix.bytesperline, f->fmt.pix.sizeimage);
752 static int unicam_reset_format(struct unicam_node *node)
754 struct unicam_device *dev = node->dev;
755 struct v4l2_mbus_framefmt mbus_fmt;
758 if (dev->sensor_embedded_data || node->pad_id != METADATA_PAD) {
759 ret = __subdev_get_format(dev, &mbus_fmt, node->pad_id);
761 unicam_err(dev, "Failed to get_format - ret %d\n", ret);
765 if (mbus_fmt.code != node->fmt->code) {
766 unicam_err(dev, "code mismatch - fmt->code %08x, mbus_fmt.code %08x\n",
767 node->fmt->code, mbus_fmt.code);
772 if (node->pad_id == IMAGE_PAD) {
773 v4l2_fill_pix_format(&node->v_fmt.fmt.pix, &mbus_fmt);
774 node->v_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
775 unicam_calc_format_size_bpl(dev, node->fmt, &node->v_fmt);
777 node->v_fmt.type = V4L2_BUF_TYPE_META_CAPTURE;
778 node->v_fmt.fmt.meta.dataformat = V4L2_META_FMT_SENSOR_DATA;
779 if (dev->sensor_embedded_data) {
780 node->v_fmt.fmt.meta.buffersize =
781 mbus_fmt.width * mbus_fmt.height;
782 node->embedded_lines = mbus_fmt.height;
784 node->v_fmt.fmt.meta.buffersize = UNICAM_EMBEDDED_SIZE;
785 node->embedded_lines = 1;
789 node->m_fmt = mbus_fmt;
793 static void unicam_wr_dma_addr(struct unicam_device *dev, dma_addr_t dmaaddr,
794 unsigned int buffer_size, int pad_id)
796 dma_addr_t endaddr = dmaaddr + buffer_size;
798 if (pad_id == IMAGE_PAD) {
799 reg_write(dev, UNICAM_IBSA0, dmaaddr);
800 reg_write(dev, UNICAM_IBEA0, endaddr);
802 reg_write(dev, UNICAM_DBSA0, dmaaddr);
803 reg_write(dev, UNICAM_DBEA0, endaddr);
807 static unsigned int unicam_get_lines_done(struct unicam_device *dev)
809 dma_addr_t start_addr, cur_addr;
810 unsigned int stride = dev->node[IMAGE_PAD].v_fmt.fmt.pix.bytesperline;
811 struct unicam_buffer *frm = dev->node[IMAGE_PAD].cur_frm;
816 start_addr = vb2_dma_contig_plane_dma_addr(&frm->vb.vb2_buf, 0);
817 cur_addr = reg_read(dev, UNICAM_IBWP);
818 return (unsigned int)(cur_addr - start_addr) / stride;
821 static void unicam_schedule_next_buffer(struct unicam_node *node)
823 struct unicam_device *dev = node->dev;
824 struct unicam_buffer *buf;
828 buf = list_first_entry(&node->dma_queue, struct unicam_buffer, list);
829 node->next_frm = buf;
830 list_del(&buf->list);
832 addr = vb2_dma_contig_plane_dma_addr(&buf->vb.vb2_buf, 0);
833 size = (node->pad_id == IMAGE_PAD) ?
834 node->v_fmt.fmt.pix.sizeimage :
835 node->v_fmt.fmt.meta.buffersize;
837 unicam_wr_dma_addr(dev, addr, size, node->pad_id);
840 static void unicam_schedule_dummy_buffer(struct unicam_node *node)
842 struct unicam_device *dev = node->dev;
844 unicam_dbg(3, dev, "Scheduling dummy buffer for node %d\n",
847 unicam_wr_dma_addr(dev, node->dummy_buf_dma_addr, DUMMY_BUF_SIZE,
849 node->next_frm = NULL;
852 static void unicam_process_buffer_complete(struct unicam_node *node,
853 unsigned int sequence)
855 node->cur_frm->vb.field = node->m_fmt.field;
856 node->cur_frm->vb.sequence = sequence;
858 vb2_buffer_done(&node->cur_frm->vb.vb2_buf, VB2_BUF_STATE_DONE);
861 static void unicam_queue_event_sof(struct unicam_device *unicam)
863 struct v4l2_event event = {
864 .type = V4L2_EVENT_FRAME_SYNC,
865 .u.frame_sync.frame_sequence = unicam->sequence,
868 v4l2_event_queue(&unicam->node[IMAGE_PAD].video_dev, &event);
872 * unicam_isr : ISR handler for unicam capture
874 * @dev_id: dev_id ptr
876 * It changes status of the captured buffer, takes next buffer from the queue
877 * and sets its address in unicam registers
879 static irqreturn_t unicam_isr(int irq, void *dev)
881 struct unicam_device *unicam = dev;
882 unsigned int lines_done = unicam_get_lines_done(dev);
883 unsigned int sequence = unicam->sequence;
889 sta = reg_read(unicam, UNICAM_STA);
890 /* Write value back to clear the interrupts */
891 reg_write(unicam, UNICAM_STA, sta);
893 ista = reg_read(unicam, UNICAM_ISTA);
894 /* Write value back to clear the interrupts */
895 reg_write(unicam, UNICAM_ISTA, ista);
897 unicam_dbg(3, unicam, "ISR: ISTA: 0x%X, STA: 0x%X, sequence %d, lines done %d",
898 ista, sta, sequence, lines_done);
900 if (!(sta & (UNICAM_IS | UNICAM_PI0)))
904 * Look for either the Frame End interrupt or the Packet Capture status
905 * to signal a frame end.
907 fe = (ista & UNICAM_FEI || sta & UNICAM_PI0);
910 * We must run the frame end handler first. If we have a valid next_frm
911 * and we get a simultaneout FE + FS interrupt, running the FS handler
912 * first would null out the next_frm ptr and we would have lost the
917 * Ensure we have swapped buffers already as we can't
918 * stop the peripheral. If no buffer is available, use a
919 * dummy buffer to dump out frames until we get a new buffer
922 for (i = 0; i < ARRAY_SIZE(unicam->node); i++) {
923 struct unicam_node *node = &unicam->node[i];
925 if (!node->streaming)
929 * If cur_frm == next_frm, it means we have not had
930 * a chance to swap buffers, likely due to having
931 * multiple interrupts occurring simultaneously (like FE
932 * + FS + LS). In this case, we cannot signal the buffer
933 * as complete, as the HW will reuse that buffer.
935 if (node->cur_frm && node->cur_frm != node->next_frm) {
937 * This condition checks if FE + FS for the same
938 * frame has occurred. In such cases, we cannot
939 * return out the frame, as no buffer handling
940 * or timestamping has yet been done as part of
943 if (!node->cur_frm->vb.vb2_buf.timestamp) {
944 unicam_dbg(2, unicam, "ISR: FE without FS, dropping frame\n");
948 unicam_process_buffer_complete(node, sequence);
949 node->cur_frm = node->next_frm;
950 node->next_frm = NULL;
952 node->cur_frm = node->next_frm;
958 if (ista & UNICAM_FSI) {
960 * Timestamp is to be when the first data byte was captured,
964 for (i = 0; i < ARRAY_SIZE(unicam->node); i++) {
965 if (!unicam->node[i].streaming)
968 if (unicam->node[i].cur_frm)
969 unicam->node[i].cur_frm->vb.vb2_buf.timestamp =
972 unicam_dbg(2, unicam, "ISR: [%d] Dropping frame, buffer not available at FS\n",
975 * Set the next frame output to go to a dummy frame
976 * if no buffer currently queued.
978 if (!unicam->node[i].next_frm ||
979 unicam->node[i].next_frm == unicam->node[i].cur_frm) {
980 unicam_schedule_dummy_buffer(&unicam->node[i]);
981 } else if (unicam->node[i].cur_frm) {
983 * Repeated FS without FE. Hardware will have
984 * swapped buffers, but the cur_frm doesn't
985 * contain valid data. Return cur_frm to the
988 spin_lock(&unicam->node[i].dma_queue_lock);
989 list_add_tail(&unicam->node[i].cur_frm->list,
990 &unicam->node[i].dma_queue);
991 spin_unlock(&unicam->node[i].dma_queue_lock);
992 unicam->node[i].cur_frm = unicam->node[i].next_frm;
993 unicam->node[i].next_frm = NULL;
997 unicam_queue_event_sof(unicam);
1001 * Cannot swap buffer at frame end, there may be a race condition
1002 * where the HW does not actually swap it if the new frame has
1005 if (ista & (UNICAM_FSI | UNICAM_LCI) && !fe) {
1006 for (i = 0; i < ARRAY_SIZE(unicam->node); i++) {
1007 if (!unicam->node[i].streaming)
1010 spin_lock(&unicam->node[i].dma_queue_lock);
1011 if (!list_empty(&unicam->node[i].dma_queue) &&
1012 !unicam->node[i].next_frm)
1013 unicam_schedule_next_buffer(&unicam->node[i]);
1014 spin_unlock(&unicam->node[i].dma_queue_lock);
1021 /* V4L2 Common IOCTLs */
1022 static int unicam_querycap(struct file *file, void *priv,
1023 struct v4l2_capability *cap)
1025 struct unicam_node *node = video_drvdata(file);
1026 struct unicam_device *dev = node->dev;
1028 strscpy(cap->driver, UNICAM_MODULE_NAME, sizeof(cap->driver));
1029 strscpy(cap->card, UNICAM_MODULE_NAME, sizeof(cap->card));
1031 snprintf(cap->bus_info, sizeof(cap->bus_info),
1032 "platform:%s", dev_name(&dev->pdev->dev));
1034 cap->capabilities |= V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_META_CAPTURE;
1039 static int unicam_log_status(struct file *file, void *fh)
1041 struct unicam_node *node = video_drvdata(file);
1042 struct unicam_device *dev = node->dev;
1045 /* status for sub devices */
1046 v4l2_device_call_all(&dev->v4l2_dev, 0, core, log_status);
1048 unicam_info(dev, "-----Receiver status-----\n");
1049 unicam_info(dev, "V4L2 width/height: %ux%u\n",
1050 node->v_fmt.fmt.pix.width, node->v_fmt.fmt.pix.height);
1051 unicam_info(dev, "Mediabus format: %08x\n", node->fmt->code);
1052 unicam_info(dev, "V4L2 format: %08x\n",
1053 node->v_fmt.fmt.pix.pixelformat);
1054 reg = reg_read(dev, UNICAM_IPIPE);
1055 unicam_info(dev, "Unpacking/packing: %u / %u\n",
1056 get_field(reg, UNICAM_PUM_MASK),
1057 get_field(reg, UNICAM_PPM_MASK));
1058 unicam_info(dev, "----Live data----\n");
1059 unicam_info(dev, "Programmed stride: %4u\n",
1060 reg_read(dev, UNICAM_IBLS));
1061 unicam_info(dev, "Detected resolution: %ux%u\n",
1062 reg_read(dev, UNICAM_IHSTA),
1063 reg_read(dev, UNICAM_IVSTA));
1064 unicam_info(dev, "Write pointer: %08x\n",
1065 reg_read(dev, UNICAM_IBWP));
1070 /* V4L2 Video Centric IOCTLs */
1071 static int unicam_enum_fmt_vid_cap(struct file *file, void *priv,
1072 struct v4l2_fmtdesc *f)
1074 struct unicam_node *node = video_drvdata(file);
1075 struct unicam_device *dev = node->dev;
1076 unsigned int index = 0;
1080 if (node->pad_id != IMAGE_PAD)
1083 for (i = 0; !ret && i < MAX_ENUM_MBUS_CODE; i++) {
1084 struct v4l2_subdev_mbus_code_enum mbus_code = {
1087 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1089 const struct unicam_fmt *fmt;
1091 ret = v4l2_subdev_call(dev->sensor, pad, enum_mbus_code,
1095 "subdev->enum_mbus_code idx %d returned %d - index invalid\n",
1100 fmt = find_format_by_code(mbus_code.code);
1103 if (index == f->index) {
1104 f->pixelformat = fmt->fourcc;
1109 if (fmt->repacked_fourcc) {
1110 if (index == f->index) {
1111 f->pixelformat = fmt->repacked_fourcc;
1122 static int unicam_g_fmt_vid_cap(struct file *file, void *priv,
1123 struct v4l2_format *f)
1125 struct v4l2_mbus_framefmt mbus_fmt = {0};
1126 struct unicam_node *node = video_drvdata(file);
1127 struct unicam_device *dev = node->dev;
1128 const struct unicam_fmt *fmt = NULL;
1131 if (node->pad_id != IMAGE_PAD)
1135 * If a flip has occurred in the sensor, the fmt code might have
1136 * changed. So we will need to re-fetch the format from the subdevice.
1138 ret = __subdev_get_format(dev, &mbus_fmt, node->pad_id);
1142 /* Find the V4L2 format from mbus code. We must match a known format. */
1143 fmt = find_format_by_code(mbus_fmt.code);
1147 if (node->fmt != fmt) {
1149 * The sensor format has changed so the pixelformat needs to
1150 * be updated. Try and retain the packed/unpacked choice if
1153 if (node->fmt->repacked_fourcc ==
1154 node->v_fmt.fmt.pix.pixelformat)
1155 /* Using the repacked format */
1156 node->v_fmt.fmt.pix.pixelformat = fmt->repacked_fourcc;
1158 /* Using the native format */
1159 node->v_fmt.fmt.pix.pixelformat = fmt->fourcc;
1169 static const struct unicam_fmt *
1170 get_first_supported_format(struct unicam_device *dev)
1172 struct v4l2_subdev_mbus_code_enum mbus_code;
1173 const struct unicam_fmt *fmt = NULL;
1177 for (i = 0; ret != -EINVAL && ret != -ENOIOCTLCMD; ++i) {
1178 memset(&mbus_code, 0, sizeof(mbus_code));
1179 mbus_code.index = i;
1180 mbus_code.pad = IMAGE_PAD;
1181 mbus_code.which = V4L2_SUBDEV_FORMAT_ACTIVE;
1183 ret = v4l2_subdev_call(dev->sensor, pad, enum_mbus_code, NULL,
1187 "subdev->enum_mbus_code idx %u returned %d - continue\n",
1192 unicam_dbg(2, dev, "subdev %s: code: 0x%08x idx: %u\n",
1193 dev->sensor->name, mbus_code.code, i);
1195 fmt = find_format_by_code(mbus_code.code);
1196 unicam_dbg(2, dev, "fmt 0x%08x returned as %p, V4L2 FOURCC 0x%08x, csi_dt 0x%02x\n",
1197 mbus_code.code, fmt, fmt ? fmt->fourcc : 0,
1198 fmt ? fmt->csi_dt : 0);
1206 static int unicam_try_fmt_vid_cap(struct file *file, void *priv,
1207 struct v4l2_format *f)
1209 struct unicam_node *node = video_drvdata(file);
1210 struct unicam_device *dev = node->dev;
1211 struct v4l2_subdev_format sd_fmt = {
1212 .which = V4L2_SUBDEV_FORMAT_TRY,
1215 struct v4l2_mbus_framefmt *mbus_fmt = &sd_fmt.format;
1216 const struct unicam_fmt *fmt;
1219 if (node->pad_id != IMAGE_PAD)
1222 fmt = find_format_by_pix(dev, f->fmt.pix.pixelformat);
1225 * Pixel format not supported by unicam. Choose the first
1226 * supported format, and let the sensor choose something else.
1228 unicam_dbg(3, dev, "Fourcc format (0x%08x) not found. Use first format.\n",
1229 f->fmt.pix.pixelformat);
1232 f->fmt.pix.pixelformat = fmt->fourcc;
1235 v4l2_fill_mbus_format(mbus_fmt, &f->fmt.pix, fmt->code);
1237 * No support for receiving interlaced video, so never
1238 * request it from the sensor subdev.
1240 mbus_fmt->field = V4L2_FIELD_NONE;
1242 ret = v4l2_subdev_call(dev->sensor, pad, set_fmt, dev->sensor_state,
1244 if (ret && ret != -ENOIOCTLCMD && ret != -ENODEV)
1247 if (mbus_fmt->field != V4L2_FIELD_NONE)
1248 unicam_info(dev, "Sensor trying to send interlaced video - results may be unpredictable\n");
1250 v4l2_fill_pix_format(&f->fmt.pix, &sd_fmt.format);
1251 if (mbus_fmt->code != fmt->code) {
1252 /* Sensor has returned an alternate format */
1253 fmt = find_format_by_code(mbus_fmt->code);
1256 * The alternate format is one unicam can't support.
1257 * Find the first format that is supported by both, and
1260 fmt = get_first_supported_format(dev);
1261 mbus_fmt->code = fmt->code;
1263 ret = v4l2_subdev_call(dev->sensor, pad, set_fmt,
1264 dev->sensor_state, &sd_fmt);
1265 if (ret && ret != -ENOIOCTLCMD && ret != -ENODEV)
1268 if (mbus_fmt->field != V4L2_FIELD_NONE)
1269 unicam_info(dev, "Sensor trying to send interlaced video - results may be unpredictable\n");
1271 v4l2_fill_pix_format(&f->fmt.pix, &sd_fmt.format);
1273 if (mbus_fmt->code != fmt->code) {
1275 * We've set a format that the sensor reports
1276 * as being supported, but it refuses to set it.
1277 * Not much else we can do.
1278 * Assume that the sensor driver may accept the
1279 * format when it is set (rather than tried).
1281 unicam_err(dev, "Sensor won't accept default format, and Unicam can't support sensor default\n");
1286 f->fmt.pix.pixelformat = fmt->fourcc;
1288 f->fmt.pix.pixelformat = fmt->repacked_fourcc;
1291 return unicam_calc_format_size_bpl(dev, fmt, f);
1294 static int unicam_s_fmt_vid_cap(struct file *file, void *priv,
1295 struct v4l2_format *f)
1297 struct unicam_node *node = video_drvdata(file);
1298 struct unicam_device *dev = node->dev;
1299 struct vb2_queue *q = &node->buffer_queue;
1300 struct v4l2_mbus_framefmt mbus_fmt = {0};
1301 const struct unicam_fmt *fmt;
1307 ret = unicam_try_fmt_vid_cap(file, priv, f);
1311 fmt = find_format_by_pix(dev, f->fmt.pix.pixelformat);
1314 * Unknown pixel format - adopt a default.
1315 * This shouldn't happen as try_fmt should have resolved any
1318 fmt = get_first_supported_format(dev);
1321 * It shouldn't be possible to get here with no
1325 f->fmt.pix.pixelformat = fmt->fourcc;
1329 v4l2_fill_mbus_format(&mbus_fmt, &f->fmt.pix, fmt->code);
1331 ret = __subdev_set_format(dev, &mbus_fmt, node->pad_id);
1333 unicam_dbg(3, dev, "%s __subdev_set_format failed %d\n",
1338 /* Just double check nothing has gone wrong */
1339 if (mbus_fmt.code != fmt->code) {
1341 "%s subdev changed format on us, this should not happen\n",
1347 node->v_fmt.fmt.pix.pixelformat = f->fmt.pix.pixelformat;
1348 node->v_fmt.fmt.pix.bytesperline = f->fmt.pix.bytesperline;
1349 unicam_reset_format(node);
1352 "%s %dx%d, mbus_fmt 0x%08X, V4L2 pix 0x%08X.\n",
1353 __func__, node->v_fmt.fmt.pix.width,
1354 node->v_fmt.fmt.pix.height, mbus_fmt.code,
1355 node->v_fmt.fmt.pix.pixelformat);
1362 static int unicam_enum_fmt_meta_cap(struct file *file, void *priv,
1363 struct v4l2_fmtdesc *f)
1365 struct unicam_node *node = video_drvdata(file);
1366 struct unicam_device *dev = node->dev;
1367 const struct unicam_fmt *fmt;
1371 if (node->pad_id != METADATA_PAD || f->index != 0)
1374 if (dev->sensor_embedded_data) {
1375 struct v4l2_subdev_mbus_code_enum mbus_code = {
1377 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1378 .pad = METADATA_PAD,
1381 ret = v4l2_subdev_call(dev->sensor, pad, enum_mbus_code, NULL,
1385 "subdev->enum_mbus_code idx 0 returned %d - index invalid\n",
1390 code = mbus_code.code;
1392 code = MEDIA_BUS_FMT_SENSOR_DATA;
1395 fmt = find_format_by_code(code);
1397 f->pixelformat = fmt->fourcc;
1402 static int unicam_g_fmt_meta_cap(struct file *file, void *priv,
1403 struct v4l2_format *f)
1405 struct unicam_node *node = video_drvdata(file);
1407 if (node->pad_id != METADATA_PAD)
1415 static int unicam_enum_input(struct file *file, void *priv,
1416 struct v4l2_input *inp)
1418 struct unicam_node *node = video_drvdata(file);
1419 struct unicam_device *dev = node->dev;
1422 if (inp->index != 0)
1425 inp->type = V4L2_INPUT_TYPE_CAMERA;
1426 if (v4l2_subdev_has_op(dev->sensor, video, s_dv_timings)) {
1427 inp->capabilities = V4L2_IN_CAP_DV_TIMINGS;
1429 } else if (v4l2_subdev_has_op(dev->sensor, video, s_std)) {
1430 inp->capabilities = V4L2_IN_CAP_STD;
1431 if (v4l2_subdev_call(dev->sensor, video, g_tvnorms, &inp->std) < 0)
1432 inp->std = V4L2_STD_ALL;
1434 inp->capabilities = 0;
1438 if (v4l2_subdev_has_op(dev->sensor, video, g_input_status)) {
1439 ret = v4l2_subdev_call(dev->sensor, video, g_input_status,
1445 snprintf(inp->name, sizeof(inp->name), "Camera 0");
1449 static int unicam_g_input(struct file *file, void *priv, unsigned int *i)
1456 static int unicam_s_input(struct file *file, void *priv, unsigned int i)
1459 * FIXME: Ideally we would like to be able to query the source
1460 * subdevice for information over the input connectors it supports,
1461 * and map that through in to a call to video_ops->s_routing.
1462 * There is no infrastructure support for defining that within
1463 * devicetree at present. Until that is implemented we can't
1464 * map a user physical connector number to s_routing input number.
1472 static int unicam_querystd(struct file *file, void *priv,
1475 struct unicam_node *node = video_drvdata(file);
1476 struct unicam_device *dev = node->dev;
1478 return v4l2_subdev_call(dev->sensor, video, querystd, std);
1481 static int unicam_g_std(struct file *file, void *priv, v4l2_std_id *std)
1483 struct unicam_node *node = video_drvdata(file);
1484 struct unicam_device *dev = node->dev;
1486 return v4l2_subdev_call(dev->sensor, video, g_std, std);
1489 static int unicam_s_std(struct file *file, void *priv, v4l2_std_id std)
1491 struct unicam_node *node = video_drvdata(file);
1492 struct unicam_device *dev = node->dev;
1494 v4l2_std_id current_std;
1496 ret = v4l2_subdev_call(dev->sensor, video, g_std, ¤t_std);
1500 if (std == current_std)
1503 if (vb2_is_busy(&node->buffer_queue))
1506 ret = v4l2_subdev_call(dev->sensor, video, s_std, std);
1508 /* Force recomputation of bytesperline */
1509 node->v_fmt.fmt.pix.bytesperline = 0;
1511 unicam_reset_format(node);
1516 static int unicam_s_edid(struct file *file, void *priv, struct v4l2_edid *edid)
1518 struct unicam_node *node = video_drvdata(file);
1519 struct unicam_device *dev = node->dev;
1521 return v4l2_subdev_call(dev->sensor, pad, set_edid, edid);
1524 static int unicam_g_edid(struct file *file, void *priv, struct v4l2_edid *edid)
1526 struct unicam_node *node = video_drvdata(file);
1527 struct unicam_device *dev = node->dev;
1529 return v4l2_subdev_call(dev->sensor, pad, get_edid, edid);
1532 static int unicam_s_selection(struct file *file, void *priv,
1533 struct v4l2_selection *sel)
1535 struct unicam_node *node = video_drvdata(file);
1536 struct unicam_device *dev = node->dev;
1537 struct v4l2_subdev_selection sdsel = {
1538 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1539 .target = sel->target,
1540 .flags = sel->flags,
1544 if (sel->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
1547 return v4l2_subdev_call(dev->sensor, pad, set_selection, NULL, &sdsel);
1550 static int unicam_g_selection(struct file *file, void *priv,
1551 struct v4l2_selection *sel)
1553 struct unicam_node *node = video_drvdata(file);
1554 struct unicam_device *dev = node->dev;
1555 struct v4l2_subdev_selection sdsel = {
1556 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1557 .target = sel->target,
1561 if (sel->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
1564 ret = v4l2_subdev_call(dev->sensor, pad, get_selection, NULL, &sdsel);
1571 static int unicam_enum_framesizes(struct file *file, void *priv,
1572 struct v4l2_frmsizeenum *fsize)
1574 struct unicam_node *node = video_drvdata(file);
1575 struct unicam_device *dev = node->dev;
1576 const struct unicam_fmt *fmt;
1577 struct v4l2_subdev_frame_size_enum fse;
1580 /* check for valid format */
1581 fmt = find_format_by_pix(dev, fsize->pixel_format);
1583 unicam_dbg(3, dev, "Invalid pixel code: %x\n",
1584 fsize->pixel_format);
1587 fse.code = fmt->code;
1589 fse.which = V4L2_SUBDEV_FORMAT_ACTIVE;
1590 fse.index = fsize->index;
1591 fse.pad = node->src_pad_id;
1593 ret = v4l2_subdev_call(dev->sensor, pad, enum_frame_size, NULL, &fse);
1597 unicam_dbg(1, dev, "%s: index: %d code: %x W:[%d,%d] H:[%d,%d]\n",
1598 __func__, fse.index, fse.code, fse.min_width, fse.max_width,
1599 fse.min_height, fse.max_height);
1601 fsize->type = V4L2_FRMSIZE_TYPE_DISCRETE;
1602 fsize->discrete.width = fse.max_width;
1603 fsize->discrete.height = fse.max_height;
1608 static int unicam_enum_frameintervals(struct file *file, void *priv,
1609 struct v4l2_frmivalenum *fival)
1611 struct unicam_node *node = video_drvdata(file);
1612 struct unicam_device *dev = node->dev;
1613 const struct unicam_fmt *fmt;
1614 struct v4l2_subdev_frame_interval_enum fie = {
1615 .index = fival->index,
1616 .pad = node->src_pad_id,
1617 .width = fival->width,
1618 .height = fival->height,
1619 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1623 fmt = find_format_by_pix(dev, fival->pixel_format);
1627 fie.code = fmt->code;
1628 ret = v4l2_subdev_call(dev->sensor, pad, enum_frame_interval,
1633 fival->type = V4L2_FRMIVAL_TYPE_DISCRETE;
1634 fival->discrete = fie.interval;
1639 static int unicam_g_parm(struct file *file, void *fh, struct v4l2_streamparm *a)
1641 struct unicam_node *node = video_drvdata(file);
1642 struct unicam_device *dev = node->dev;
1644 return v4l2_g_parm_cap(video_devdata(file), dev->sensor, a);
1647 static int unicam_s_parm(struct file *file, void *fh, struct v4l2_streamparm *a)
1649 struct unicam_node *node = video_drvdata(file);
1650 struct unicam_device *dev = node->dev;
1652 return v4l2_s_parm_cap(video_devdata(file), dev->sensor, a);
1655 static int unicam_g_dv_timings(struct file *file, void *priv,
1656 struct v4l2_dv_timings *timings)
1658 struct unicam_node *node = video_drvdata(file);
1659 struct unicam_device *dev = node->dev;
1661 return v4l2_subdev_call(dev->sensor, video, g_dv_timings, timings);
1664 static int unicam_s_dv_timings(struct file *file, void *priv,
1665 struct v4l2_dv_timings *timings)
1667 struct unicam_node *node = video_drvdata(file);
1668 struct unicam_device *dev = node->dev;
1669 struct v4l2_dv_timings current_timings;
1672 ret = v4l2_subdev_call(dev->sensor, video, g_dv_timings,
1678 if (v4l2_match_dv_timings(timings, ¤t_timings, 0, false))
1681 if (vb2_is_busy(&node->buffer_queue))
1684 ret = v4l2_subdev_call(dev->sensor, video, s_dv_timings, timings);
1686 /* Force recomputation of bytesperline */
1687 node->v_fmt.fmt.pix.bytesperline = 0;
1689 unicam_reset_format(node);
1694 static int unicam_query_dv_timings(struct file *file, void *priv,
1695 struct v4l2_dv_timings *timings)
1697 struct unicam_node *node = video_drvdata(file);
1698 struct unicam_device *dev = node->dev;
1700 return v4l2_subdev_call(dev->sensor, video, query_dv_timings, timings);
1703 static int unicam_enum_dv_timings(struct file *file, void *priv,
1704 struct v4l2_enum_dv_timings *timings)
1706 struct unicam_node *node = video_drvdata(file);
1707 struct unicam_device *dev = node->dev;
1710 timings->pad = node->src_pad_id;
1711 ret = v4l2_subdev_call(dev->sensor, pad, enum_dv_timings, timings);
1712 timings->pad = node->pad_id;
1717 static int unicam_dv_timings_cap(struct file *file, void *priv,
1718 struct v4l2_dv_timings_cap *cap)
1720 struct unicam_node *node = video_drvdata(file);
1721 struct unicam_device *dev = node->dev;
1724 cap->pad = node->src_pad_id;
1725 ret = v4l2_subdev_call(dev->sensor, pad, dv_timings_cap, cap);
1726 cap->pad = node->pad_id;
1731 static int unicam_subscribe_event(struct v4l2_fh *fh,
1732 const struct v4l2_event_subscription *sub)
1734 switch (sub->type) {
1735 case V4L2_EVENT_FRAME_SYNC:
1736 return v4l2_event_subscribe(fh, sub, 2, NULL);
1737 case V4L2_EVENT_SOURCE_CHANGE:
1738 return v4l2_event_subscribe(fh, sub, 4, NULL);
1741 return v4l2_ctrl_subscribe_event(fh, sub);
1744 static void unicam_notify(struct v4l2_subdev *sd,
1745 unsigned int notification, void *arg)
1747 struct unicam_device *dev = to_unicam_device(sd->v4l2_dev);
1749 switch (notification) {
1750 case V4L2_DEVICE_NOTIFY_EVENT:
1751 v4l2_event_queue(&dev->node[IMAGE_PAD].video_dev, arg);
1758 /* unicam capture ioctl operations */
1759 static const struct v4l2_ioctl_ops unicam_ioctl_ops = {
1760 .vidioc_querycap = unicam_querycap,
1761 .vidioc_enum_fmt_vid_cap = unicam_enum_fmt_vid_cap,
1762 .vidioc_g_fmt_vid_cap = unicam_g_fmt_vid_cap,
1763 .vidioc_s_fmt_vid_cap = unicam_s_fmt_vid_cap,
1764 .vidioc_try_fmt_vid_cap = unicam_try_fmt_vid_cap,
1766 .vidioc_enum_fmt_meta_cap = unicam_enum_fmt_meta_cap,
1767 .vidioc_g_fmt_meta_cap = unicam_g_fmt_meta_cap,
1768 .vidioc_s_fmt_meta_cap = unicam_g_fmt_meta_cap,
1769 .vidioc_try_fmt_meta_cap = unicam_g_fmt_meta_cap,
1771 .vidioc_enum_input = unicam_enum_input,
1772 .vidioc_g_input = unicam_g_input,
1773 .vidioc_s_input = unicam_s_input,
1775 .vidioc_querystd = unicam_querystd,
1776 .vidioc_s_std = unicam_s_std,
1777 .vidioc_g_std = unicam_g_std,
1779 .vidioc_g_edid = unicam_g_edid,
1780 .vidioc_s_edid = unicam_s_edid,
1782 .vidioc_enum_framesizes = unicam_enum_framesizes,
1783 .vidioc_enum_frameintervals = unicam_enum_frameintervals,
1785 .vidioc_g_selection = unicam_g_selection,
1786 .vidioc_s_selection = unicam_s_selection,
1788 .vidioc_g_parm = unicam_g_parm,
1789 .vidioc_s_parm = unicam_s_parm,
1791 .vidioc_s_dv_timings = unicam_s_dv_timings,
1792 .vidioc_g_dv_timings = unicam_g_dv_timings,
1793 .vidioc_query_dv_timings = unicam_query_dv_timings,
1794 .vidioc_enum_dv_timings = unicam_enum_dv_timings,
1795 .vidioc_dv_timings_cap = unicam_dv_timings_cap,
1797 .vidioc_reqbufs = vb2_ioctl_reqbufs,
1798 .vidioc_create_bufs = vb2_ioctl_create_bufs,
1799 .vidioc_prepare_buf = vb2_ioctl_prepare_buf,
1800 .vidioc_querybuf = vb2_ioctl_querybuf,
1801 .vidioc_qbuf = vb2_ioctl_qbuf,
1802 .vidioc_dqbuf = vb2_ioctl_dqbuf,
1803 .vidioc_expbuf = vb2_ioctl_expbuf,
1804 .vidioc_streamon = vb2_ioctl_streamon,
1805 .vidioc_streamoff = vb2_ioctl_streamoff,
1807 .vidioc_log_status = unicam_log_status,
1808 .vidioc_subscribe_event = unicam_subscribe_event,
1809 .vidioc_unsubscribe_event = v4l2_event_unsubscribe,
1812 /* V4L2 Media Controller Centric IOCTLs */
1814 static int unicam_mc_enum_fmt_vid_cap(struct file *file, void *priv,
1815 struct v4l2_fmtdesc *f)
1819 for (i = 0, j = 0; i < ARRAY_SIZE(formats); i++) {
1820 if (f->mbus_code && formats[i].code != f->mbus_code)
1822 if (formats[i].mc_skip || formats[i].metadata_fmt)
1825 if (formats[i].fourcc) {
1826 if (j == f->index) {
1827 f->pixelformat = formats[i].fourcc;
1828 f->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1833 if (formats[i].repacked_fourcc) {
1834 if (j == f->index) {
1835 f->pixelformat = formats[i].repacked_fourcc;
1836 f->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1846 static int unicam_mc_g_fmt_vid_cap(struct file *file, void *priv,
1847 struct v4l2_format *f)
1849 struct unicam_node *node = video_drvdata(file);
1851 if (node->pad_id != IMAGE_PAD)
1859 static void unicam_mc_try_fmt(struct unicam_node *node, struct v4l2_format *f,
1860 const struct unicam_fmt **ret_fmt)
1862 struct v4l2_pix_format *v4l2_format = &f->fmt.pix;
1863 struct unicam_device *dev = node->dev;
1864 const struct unicam_fmt *fmt;
1868 * Default to the first format if the requested pixel format code isn't
1871 fmt = find_format_by_pix(dev, v4l2_format->pixelformat);
1874 v4l2_format->pixelformat = fmt->fourcc;
1877 unicam_calc_format_size_bpl(dev, fmt, f);
1879 if (v4l2_format->field == V4L2_FIELD_ANY)
1880 v4l2_format->field = V4L2_FIELD_NONE;
1885 if (v4l2_format->colorspace >= MAX_COLORSPACE ||
1886 !(fmt->valid_colorspaces & (1 << v4l2_format->colorspace))) {
1887 v4l2_format->colorspace = __ffs(fmt->valid_colorspaces);
1889 v4l2_format->xfer_func =
1890 V4L2_MAP_XFER_FUNC_DEFAULT(v4l2_format->colorspace);
1891 v4l2_format->ycbcr_enc =
1892 V4L2_MAP_YCBCR_ENC_DEFAULT(v4l2_format->colorspace);
1893 is_rgb = v4l2_format->colorspace == V4L2_COLORSPACE_SRGB;
1894 v4l2_format->quantization =
1895 V4L2_MAP_QUANTIZATION_DEFAULT(is_rgb,
1896 v4l2_format->colorspace,
1897 v4l2_format->ycbcr_enc);
1900 unicam_dbg(3, dev, "%s: %08x %ux%u (bytesperline %u sizeimage %u)\n",
1901 __func__, v4l2_format->pixelformat,
1902 v4l2_format->width, v4l2_format->height,
1903 v4l2_format->bytesperline, v4l2_format->sizeimage);
1906 static int unicam_mc_try_fmt_vid_cap(struct file *file, void *priv,
1907 struct v4l2_format *f)
1909 struct unicam_node *node = video_drvdata(file);
1911 unicam_mc_try_fmt(node, f, NULL);
1915 static int unicam_mc_s_fmt_vid_cap(struct file *file, void *priv,
1916 struct v4l2_format *f)
1918 struct unicam_node *node = video_drvdata(file);
1919 struct unicam_device *dev = node->dev;
1920 const struct unicam_fmt *fmt;
1922 if (vb2_is_busy(&node->buffer_queue)) {
1923 unicam_dbg(3, dev, "%s device busy\n", __func__);
1927 unicam_mc_try_fmt(node, f, &fmt);
1935 static int unicam_mc_enum_framesizes(struct file *file, void *fh,
1936 struct v4l2_frmsizeenum *fsize)
1938 struct unicam_node *node = video_drvdata(file);
1939 struct unicam_device *dev = node->dev;
1941 if (fsize->index > 0)
1944 if (!find_format_by_pix(dev, fsize->pixel_format)) {
1945 unicam_dbg(3, dev, "Invalid pixel format 0x%08x\n",
1946 fsize->pixel_format);
1950 fsize->type = V4L2_FRMSIZE_TYPE_STEPWISE;
1951 fsize->stepwise.min_width = MIN_WIDTH;
1952 fsize->stepwise.max_width = MAX_WIDTH;
1953 fsize->stepwise.step_width = 1;
1954 fsize->stepwise.min_height = MIN_HEIGHT;
1955 fsize->stepwise.max_height = MAX_HEIGHT;
1956 fsize->stepwise.step_height = 1;
1961 static int unicam_mc_enum_fmt_meta_cap(struct file *file, void *priv,
1962 struct v4l2_fmtdesc *f)
1966 for (i = 0, j = 0; i < ARRAY_SIZE(formats); i++) {
1967 if (f->mbus_code && formats[i].code != f->mbus_code)
1969 if (!formats[i].metadata_fmt)
1972 if (formats[i].fourcc) {
1973 if (j == f->index) {
1974 f->pixelformat = formats[i].fourcc;
1975 f->type = V4L2_BUF_TYPE_META_CAPTURE;
1985 static int unicam_mc_g_fmt_meta_cap(struct file *file, void *priv,
1986 struct v4l2_format *f)
1988 struct unicam_node *node = video_drvdata(file);
1990 if (node->pad_id != METADATA_PAD)
1998 static int unicam_mc_try_fmt_meta_cap(struct file *file, void *priv,
1999 struct v4l2_format *f)
2001 struct unicam_node *node = video_drvdata(file);
2003 if (node->pad_id != METADATA_PAD)
2006 f->fmt.meta.dataformat = V4L2_META_FMT_SENSOR_DATA;
2011 static int unicam_mc_s_fmt_meta_cap(struct file *file, void *priv,
2012 struct v4l2_format *f)
2014 struct unicam_node *node = video_drvdata(file);
2016 if (node->pad_id != METADATA_PAD)
2019 unicam_mc_try_fmt_meta_cap(file, priv, f);
2026 static const struct v4l2_ioctl_ops unicam_mc_ioctl_ops = {
2027 .vidioc_querycap = unicam_querycap,
2028 .vidioc_enum_fmt_vid_cap = unicam_mc_enum_fmt_vid_cap,
2029 .vidioc_g_fmt_vid_cap = unicam_mc_g_fmt_vid_cap,
2030 .vidioc_try_fmt_vid_cap = unicam_mc_try_fmt_vid_cap,
2031 .vidioc_s_fmt_vid_cap = unicam_mc_s_fmt_vid_cap,
2033 .vidioc_enum_fmt_meta_cap = unicam_mc_enum_fmt_meta_cap,
2034 .vidioc_g_fmt_meta_cap = unicam_mc_g_fmt_meta_cap,
2035 .vidioc_try_fmt_meta_cap = unicam_mc_try_fmt_meta_cap,
2036 .vidioc_s_fmt_meta_cap = unicam_mc_s_fmt_meta_cap,
2038 .vidioc_enum_framesizes = unicam_mc_enum_framesizes,
2039 .vidioc_reqbufs = vb2_ioctl_reqbufs,
2040 .vidioc_create_bufs = vb2_ioctl_create_bufs,
2041 .vidioc_prepare_buf = vb2_ioctl_prepare_buf,
2042 .vidioc_querybuf = vb2_ioctl_querybuf,
2043 .vidioc_qbuf = vb2_ioctl_qbuf,
2044 .vidioc_dqbuf = vb2_ioctl_dqbuf,
2045 .vidioc_expbuf = vb2_ioctl_expbuf,
2046 .vidioc_streamon = vb2_ioctl_streamon,
2047 .vidioc_streamoff = vb2_ioctl_streamoff,
2049 .vidioc_log_status = unicam_log_status,
2050 .vidioc_subscribe_event = unicam_subscribe_event,
2051 .vidioc_unsubscribe_event = v4l2_event_unsubscribe,
2055 unicam_mc_subdev_link_validate_get_format(struct media_pad *pad,
2056 struct v4l2_subdev_format *fmt)
2058 if (is_media_entity_v4l2_subdev(pad->entity)) {
2059 struct v4l2_subdev *sd =
2060 media_entity_to_v4l2_subdev(pad->entity);
2062 fmt->which = V4L2_SUBDEV_FORMAT_ACTIVE;
2063 fmt->pad = pad->index;
2064 return v4l2_subdev_call(sd, pad, get_fmt, NULL, fmt);
2070 static int unicam_mc_video_link_validate(struct media_link *link)
2072 struct video_device *vd = container_of(link->sink->entity,
2073 struct video_device, entity);
2074 struct unicam_node *node = container_of(vd, struct unicam_node,
2076 struct unicam_device *unicam = node->dev;
2077 struct v4l2_subdev_format source_fmt;
2080 if (!media_entity_remote_pad(link->sink->entity->pads)) {
2081 unicam_dbg(1, unicam,
2082 "video node %s pad not connected\n", vd->name);
2086 ret = unicam_mc_subdev_link_validate_get_format(link->source,
2091 if (node->pad_id == IMAGE_PAD) {
2092 struct v4l2_pix_format *pix_fmt = &node->v_fmt.fmt.pix;
2093 const struct unicam_fmt *fmt;
2095 if (source_fmt.format.width != pix_fmt->width ||
2096 source_fmt.format.height != pix_fmt->height) {
2098 "Wrong width or height %ux%u (remote pad set to %ux%u)\n",
2099 pix_fmt->width, pix_fmt->height,
2100 source_fmt.format.width,
2101 source_fmt.format.height);
2105 fmt = find_format_by_code(source_fmt.format.code);
2107 if (!fmt || (fmt->fourcc != pix_fmt->pixelformat &&
2108 fmt->repacked_fourcc != pix_fmt->pixelformat))
2111 struct v4l2_meta_format *meta_fmt = &node->v_fmt.fmt.meta;
2113 if (source_fmt.format.width != meta_fmt->buffersize ||
2114 source_fmt.format.height != 1 ||
2115 source_fmt.format.code != MEDIA_BUS_FMT_SENSOR_DATA) {
2117 "Wrong metadata width/height/code %ux%u %08x (remote pad set to %ux%u %08x)\n",
2118 meta_fmt->buffersize, 1,
2119 MEDIA_BUS_FMT_SENSOR_DATA,
2120 source_fmt.format.width,
2121 source_fmt.format.height,
2122 source_fmt.format.code);
2130 static const struct media_entity_operations unicam_mc_entity_ops = {
2131 .link_validate = unicam_mc_video_link_validate,
2134 /* videobuf2 Operations */
2136 static int unicam_queue_setup(struct vb2_queue *vq,
2137 unsigned int *nbuffers,
2138 unsigned int *nplanes,
2139 unsigned int sizes[],
2140 struct device *alloc_devs[])
2142 struct unicam_node *node = vb2_get_drv_priv(vq);
2143 struct unicam_device *dev = node->dev;
2144 unsigned int size = node->pad_id == IMAGE_PAD ?
2145 node->v_fmt.fmt.pix.sizeimage :
2146 node->v_fmt.fmt.meta.buffersize;
2148 if (vq->num_buffers + *nbuffers < 3)
2149 *nbuffers = 3 - vq->num_buffers;
2152 if (sizes[0] < size) {
2153 unicam_err(dev, "sizes[0] %i < size %u\n", sizes[0],
2166 static int unicam_buffer_prepare(struct vb2_buffer *vb)
2168 struct unicam_node *node = vb2_get_drv_priv(vb->vb2_queue);
2169 struct unicam_device *dev = node->dev;
2170 struct unicam_buffer *buf = to_unicam_buffer(vb);
2173 if (WARN_ON(!node->fmt))
2176 size = node->pad_id == IMAGE_PAD ? node->v_fmt.fmt.pix.sizeimage :
2177 node->v_fmt.fmt.meta.buffersize;
2178 if (vb2_plane_size(vb, 0) < size) {
2179 unicam_err(dev, "data will not fit into plane (%lu < %lu)\n",
2180 vb2_plane_size(vb, 0), size);
2184 vb2_set_plane_payload(&buf->vb.vb2_buf, 0, size);
2188 static void unicam_buffer_queue(struct vb2_buffer *vb)
2190 struct unicam_node *node = vb2_get_drv_priv(vb->vb2_queue);
2191 struct unicam_buffer *buf = to_unicam_buffer(vb);
2192 unsigned long flags;
2194 spin_lock_irqsave(&node->dma_queue_lock, flags);
2195 list_add_tail(&buf->list, &node->dma_queue);
2196 spin_unlock_irqrestore(&node->dma_queue_lock, flags);
2199 static void unicam_set_packing_config(struct unicam_device *dev)
2204 if (dev->node[IMAGE_PAD].v_fmt.fmt.pix.pixelformat ==
2205 dev->node[IMAGE_PAD].fmt->fourcc) {
2206 unpack = UNICAM_PUM_NONE;
2207 pack = UNICAM_PPM_NONE;
2209 switch (dev->node[IMAGE_PAD].fmt->depth) {
2211 unpack = UNICAM_PUM_UNPACK8;
2214 unpack = UNICAM_PUM_UNPACK10;
2217 unpack = UNICAM_PUM_UNPACK12;
2220 unpack = UNICAM_PUM_UNPACK14;
2223 unpack = UNICAM_PUM_UNPACK16;
2226 unpack = UNICAM_PUM_NONE;
2230 /* Repacking is always to 16bpp */
2231 pack = UNICAM_PPM_PACK16;
2235 set_field(&val, unpack, UNICAM_PUM_MASK);
2236 set_field(&val, pack, UNICAM_PPM_MASK);
2237 reg_write(dev, UNICAM_IPIPE, val);
2240 static void unicam_cfg_image_id(struct unicam_device *dev)
2242 if (dev->bus_type == V4L2_MBUS_CSI2_DPHY) {
2243 /* CSI2 mode, hardcode VC 0 for now. */
2244 reg_write(dev, UNICAM_IDI0,
2245 (0 << 6) | dev->node[IMAGE_PAD].fmt->csi_dt);
2248 reg_write(dev, UNICAM_IDI0,
2249 0x80 | dev->node[IMAGE_PAD].fmt->csi_dt);
2253 static void unicam_enable_ed(struct unicam_device *dev)
2255 u32 val = reg_read(dev, UNICAM_DCS);
2257 set_field(&val, 2, UNICAM_EDL_MASK);
2258 /* Do not wrap at the end of the embedded data buffer */
2259 set_field(&val, 0, UNICAM_DBOB);
2261 reg_write(dev, UNICAM_DCS, val);
2264 static void unicam_start_rx(struct unicam_device *dev, dma_addr_t *addr)
2266 int line_int_freq = dev->node[IMAGE_PAD].v_fmt.fmt.pix.height >> 2;
2267 unsigned int size, i;
2270 if (line_int_freq < 128)
2271 line_int_freq = 128;
2273 /* Enable lane clocks */
2275 for (i = 0; i < dev->active_data_lanes; i++)
2277 clk_write(dev, val);
2280 reg_write(dev, UNICAM_CTRL, UNICAM_MEM);
2282 /* Enable analogue control, and leave in reset. */
2284 set_field(&val, 7, UNICAM_CTATADJ_MASK);
2285 set_field(&val, 7, UNICAM_PTATADJ_MASK);
2286 reg_write(dev, UNICAM_ANA, val);
2287 usleep_range(1000, 2000);
2289 /* Come out of reset */
2290 reg_write_field(dev, UNICAM_ANA, 0, UNICAM_AR);
2292 /* Peripheral reset */
2293 reg_write_field(dev, UNICAM_CTRL, 1, UNICAM_CPR);
2294 reg_write_field(dev, UNICAM_CTRL, 0, UNICAM_CPR);
2296 reg_write_field(dev, UNICAM_CTRL, 0, UNICAM_CPE);
2298 /* Enable Rx control. */
2299 val = reg_read(dev, UNICAM_CTRL);
2300 if (dev->bus_type == V4L2_MBUS_CSI2_DPHY) {
2301 set_field(&val, UNICAM_CPM_CSI2, UNICAM_CPM_MASK);
2302 set_field(&val, UNICAM_DCM_STROBE, UNICAM_DCM_MASK);
2304 set_field(&val, UNICAM_CPM_CCP2, UNICAM_CPM_MASK);
2305 set_field(&val, dev->bus_flags, UNICAM_DCM_MASK);
2307 /* Packet framer timeout */
2308 set_field(&val, 0xf, UNICAM_PFT_MASK);
2309 set_field(&val, 128, UNICAM_OET_MASK);
2310 reg_write(dev, UNICAM_CTRL, val);
2312 reg_write(dev, UNICAM_IHWIN, 0);
2313 reg_write(dev, UNICAM_IVWIN, 0);
2315 /* AXI bus access QoS setup */
2316 val = reg_read(dev, UNICAM_PRI);
2317 set_field(&val, 0, UNICAM_BL_MASK);
2318 set_field(&val, 0, UNICAM_BS_MASK);
2319 set_field(&val, 0xe, UNICAM_PP_MASK);
2320 set_field(&val, 8, UNICAM_NP_MASK);
2321 set_field(&val, 2, UNICAM_PT_MASK);
2322 set_field(&val, 1, UNICAM_PE);
2323 reg_write(dev, UNICAM_PRI, val);
2325 reg_write_field(dev, UNICAM_ANA, 0, UNICAM_DDL);
2327 val = UNICAM_FSIE | UNICAM_FEIE | UNICAM_IBOB;
2328 set_field(&val, line_int_freq, UNICAM_LCIE_MASK);
2329 reg_write(dev, UNICAM_ICTL, val);
2330 reg_write(dev, UNICAM_STA, UNICAM_STA_MASK_ALL);
2331 reg_write(dev, UNICAM_ISTA, UNICAM_ISTA_MASK_ALL);
2334 reg_write_field(dev, UNICAM_CLT, 2, UNICAM_CLT1_MASK);
2336 reg_write_field(dev, UNICAM_CLT, 6, UNICAM_CLT2_MASK);
2338 reg_write_field(dev, UNICAM_DLT, 2, UNICAM_DLT1_MASK);
2340 reg_write_field(dev, UNICAM_DLT, 6, UNICAM_DLT2_MASK);
2342 reg_write_field(dev, UNICAM_DLT, 0, UNICAM_DLT3_MASK);
2344 reg_write_field(dev, UNICAM_CTRL, 0, UNICAM_SOE);
2346 /* Packet compare setup - required to avoid missing frame ends */
2348 set_field(&val, 1, UNICAM_PCE);
2349 set_field(&val, 1, UNICAM_GI);
2350 set_field(&val, 1, UNICAM_CPH);
2351 set_field(&val, 0, UNICAM_PCVC_MASK);
2352 set_field(&val, 1, UNICAM_PCDT_MASK);
2353 reg_write(dev, UNICAM_CMP0, val);
2355 /* Enable clock lane and set up terminations */
2357 if (dev->bus_type == V4L2_MBUS_CSI2_DPHY) {
2359 set_field(&val, 1, UNICAM_CLE);
2360 set_field(&val, 1, UNICAM_CLLPE);
2361 if (dev->bus_flags & V4L2_MBUS_CSI2_CONTINUOUS_CLOCK) {
2362 set_field(&val, 1, UNICAM_CLTRE);
2363 set_field(&val, 1, UNICAM_CLHSE);
2367 set_field(&val, 1, UNICAM_CLE);
2368 set_field(&val, 1, UNICAM_CLHSE);
2369 set_field(&val, 1, UNICAM_CLTRE);
2371 reg_write(dev, UNICAM_CLK, val);
2374 * Enable required data lanes with appropriate terminations.
2375 * The same value needs to be written to UNICAM_DATn registers for
2376 * the active lanes, and 0 for inactive ones.
2379 if (dev->bus_type == V4L2_MBUS_CSI2_DPHY) {
2381 set_field(&val, 1, UNICAM_DLE);
2382 set_field(&val, 1, UNICAM_DLLPE);
2383 if (dev->bus_flags & V4L2_MBUS_CSI2_CONTINUOUS_CLOCK) {
2384 set_field(&val, 1, UNICAM_DLTRE);
2385 set_field(&val, 1, UNICAM_DLHSE);
2389 set_field(&val, 1, UNICAM_DLE);
2390 set_field(&val, 1, UNICAM_DLHSE);
2391 set_field(&val, 1, UNICAM_DLTRE);
2393 reg_write(dev, UNICAM_DAT0, val);
2395 if (dev->active_data_lanes == 1)
2397 reg_write(dev, UNICAM_DAT1, val);
2399 if (dev->max_data_lanes > 2) {
2401 * Registers UNICAM_DAT2 and UNICAM_DAT3 only valid if the
2402 * instance supports more than 2 data lanes.
2404 if (dev->active_data_lanes == 2)
2406 reg_write(dev, UNICAM_DAT2, val);
2408 if (dev->active_data_lanes == 3)
2410 reg_write(dev, UNICAM_DAT3, val);
2413 reg_write(dev, UNICAM_IBLS,
2414 dev->node[IMAGE_PAD].v_fmt.fmt.pix.bytesperline);
2415 size = dev->node[IMAGE_PAD].v_fmt.fmt.pix.sizeimage;
2416 unicam_wr_dma_addr(dev, addr[IMAGE_PAD], size, IMAGE_PAD);
2417 unicam_set_packing_config(dev);
2418 unicam_cfg_image_id(dev);
2420 val = reg_read(dev, UNICAM_MISC);
2421 set_field(&val, 1, UNICAM_FL0);
2422 set_field(&val, 1, UNICAM_FL1);
2423 reg_write(dev, UNICAM_MISC, val);
2425 if (dev->node[METADATA_PAD].streaming && dev->sensor_embedded_data) {
2426 size = dev->node[METADATA_PAD].v_fmt.fmt.meta.buffersize;
2427 unicam_enable_ed(dev);
2428 unicam_wr_dma_addr(dev, addr[METADATA_PAD], size, METADATA_PAD);
2431 /* Enable peripheral */
2432 reg_write_field(dev, UNICAM_CTRL, 1, UNICAM_CPE);
2434 /* Load image pointers */
2435 reg_write_field(dev, UNICAM_ICTL, 1, UNICAM_LIP_MASK);
2437 /* Load embedded data buffer pointers if needed */
2438 if (dev->node[METADATA_PAD].streaming && dev->sensor_embedded_data)
2439 reg_write_field(dev, UNICAM_DCS, 1, UNICAM_LDP);
2442 static void unicam_disable(struct unicam_device *dev)
2444 /* Analogue lane control disable */
2445 reg_write_field(dev, UNICAM_ANA, 1, UNICAM_DDL);
2447 /* Stop the output engine */
2448 reg_write_field(dev, UNICAM_CTRL, 1, UNICAM_SOE);
2450 /* Disable the data lanes. */
2451 reg_write(dev, UNICAM_DAT0, 0);
2452 reg_write(dev, UNICAM_DAT1, 0);
2454 if (dev->max_data_lanes > 2) {
2455 reg_write(dev, UNICAM_DAT2, 0);
2456 reg_write(dev, UNICAM_DAT3, 0);
2459 /* Peripheral reset */
2460 reg_write_field(dev, UNICAM_CTRL, 1, UNICAM_CPR);
2461 usleep_range(50, 100);
2462 reg_write_field(dev, UNICAM_CTRL, 0, UNICAM_CPR);
2464 /* Disable peripheral */
2465 reg_write_field(dev, UNICAM_CTRL, 0, UNICAM_CPE);
2467 /* Clear ED setup */
2468 reg_write(dev, UNICAM_DCS, 0);
2470 /* Disable all lane clocks */
2474 static void unicam_return_buffers(struct unicam_node *node,
2475 enum vb2_buffer_state state)
2477 struct unicam_buffer *buf, *tmp;
2478 unsigned long flags;
2480 spin_lock_irqsave(&node->dma_queue_lock, flags);
2481 list_for_each_entry_safe(buf, tmp, &node->dma_queue, list) {
2482 list_del(&buf->list);
2483 vb2_buffer_done(&buf->vb.vb2_buf, state);
2487 vb2_buffer_done(&node->cur_frm->vb.vb2_buf,
2489 if (node->next_frm && node->cur_frm != node->next_frm)
2490 vb2_buffer_done(&node->next_frm->vb.vb2_buf,
2493 node->cur_frm = NULL;
2494 node->next_frm = NULL;
2495 spin_unlock_irqrestore(&node->dma_queue_lock, flags);
2498 static int unicam_start_streaming(struct vb2_queue *vq, unsigned int count)
2500 struct unicam_node *node = vb2_get_drv_priv(vq);
2501 struct unicam_device *dev = node->dev;
2502 dma_addr_t buffer_addr[MAX_NODES] = { 0 };
2503 unsigned long flags;
2507 node->streaming = true;
2508 if (!(dev->node[IMAGE_PAD].open && dev->node[IMAGE_PAD].streaming &&
2509 (!dev->node[METADATA_PAD].open ||
2510 dev->node[METADATA_PAD].streaming))) {
2512 * Metadata pad must be enabled before image pad if it is
2515 unicam_dbg(3, dev, "Not all nodes are streaming yet.");
2520 ret = unicam_runtime_get(dev);
2522 unicam_dbg(3, dev, "unicam_runtime_get failed\n");
2526 ret = media_pipeline_start(&node->video_dev.entity, &node->pipe);
2528 unicam_err(dev, "Failed to start media pipeline: %d\n", ret);
2532 dev->active_data_lanes = dev->max_data_lanes;
2534 if (dev->bus_type == V4L2_MBUS_CSI2_DPHY) {
2535 struct v4l2_mbus_config mbus_config = { 0 };
2537 ret = v4l2_subdev_call(dev->sensor, pad, get_mbus_config,
2539 if (ret < 0 && ret != -ENOIOCTLCMD) {
2540 unicam_dbg(3, dev, "g_mbus_config failed\n");
2541 goto error_pipeline;
2544 dev->active_data_lanes =
2545 (mbus_config.flags & V4L2_MBUS_CSI2_LANE_MASK) >>
2546 __ffs(V4L2_MBUS_CSI2_LANE_MASK);
2547 if (!dev->active_data_lanes)
2548 dev->active_data_lanes = dev->max_data_lanes;
2549 if (dev->active_data_lanes > dev->max_data_lanes) {
2550 unicam_err(dev, "Device has requested %u data lanes, which is >%u configured in DT\n",
2551 dev->active_data_lanes,
2552 dev->max_data_lanes);
2554 goto error_pipeline;
2558 unicam_dbg(1, dev, "Running with %u data lanes\n",
2559 dev->active_data_lanes);
2561 ret = clk_set_min_rate(dev->vpu_clock, MIN_VPU_CLOCK_RATE);
2563 unicam_err(dev, "failed to set up VPU clock\n");
2564 goto error_pipeline;
2567 ret = clk_prepare_enable(dev->vpu_clock);
2569 unicam_err(dev, "Failed to enable VPU clock: %d\n", ret);
2570 goto error_pipeline;
2573 ret = clk_set_rate(dev->clock, 100 * 1000 * 1000);
2575 unicam_err(dev, "failed to set up CSI clock\n");
2579 ret = clk_prepare_enable(dev->clock);
2581 unicam_err(dev, "Failed to enable CSI clock: %d\n", ret);
2585 for (i = 0; i < ARRAY_SIZE(dev->node); i++) {
2586 struct unicam_buffer *buf;
2588 if (!dev->node[i].streaming)
2591 spin_lock_irqsave(&dev->node[i].dma_queue_lock, flags);
2592 buf = list_first_entry(&dev->node[i].dma_queue,
2593 struct unicam_buffer, list);
2594 dev->node[i].cur_frm = buf;
2595 dev->node[i].next_frm = buf;
2596 list_del(&buf->list);
2597 spin_unlock_irqrestore(&dev->node[i].dma_queue_lock, flags);
2600 vb2_dma_contig_plane_dma_addr(&buf->vb.vb2_buf, 0);
2603 unicam_start_rx(dev, buffer_addr);
2605 ret = v4l2_subdev_call(dev->sensor, video, s_stream, 1);
2607 unicam_err(dev, "stream on failed in subdev\n");
2608 goto err_disable_unicam;
2611 dev->clocks_enabled = true;
2615 unicam_disable(dev);
2616 clk_disable_unprepare(dev->clock);
2618 if (clk_set_min_rate(dev->vpu_clock, 0))
2619 unicam_err(dev, "failed to reset the VPU clock\n");
2620 clk_disable_unprepare(dev->vpu_clock);
2622 media_pipeline_stop(&node->video_dev.entity);
2624 unicam_runtime_put(dev);
2626 unicam_return_buffers(node, VB2_BUF_STATE_QUEUED);
2627 node->streaming = false;
2632 static void unicam_stop_streaming(struct vb2_queue *vq)
2634 struct unicam_node *node = vb2_get_drv_priv(vq);
2635 struct unicam_device *dev = node->dev;
2637 node->streaming = false;
2639 if (node->pad_id == IMAGE_PAD) {
2641 * Stop streaming the sensor and disable the peripheral.
2642 * We cannot continue streaming embedded data with the
2643 * image pad disabled.
2645 if (v4l2_subdev_call(dev->sensor, video, s_stream, 0) < 0)
2646 unicam_err(dev, "stream off failed in subdev\n");
2648 unicam_disable(dev);
2650 media_pipeline_stop(&node->video_dev.entity);
2652 if (dev->clocks_enabled) {
2653 if (clk_set_min_rate(dev->vpu_clock, 0))
2654 unicam_err(dev, "failed to reset the min VPU clock\n");
2656 clk_disable_unprepare(dev->vpu_clock);
2657 clk_disable_unprepare(dev->clock);
2658 dev->clocks_enabled = false;
2660 unicam_runtime_put(dev);
2662 } else if (node->pad_id == METADATA_PAD) {
2664 * Allow the hardware to spin in the dummy buffer.
2665 * This is only really needed if the embedded data pad is
2666 * disabled before the image pad.
2668 unicam_wr_dma_addr(dev, node->dummy_buf_dma_addr,
2669 DUMMY_BUF_SIZE, METADATA_PAD);
2672 /* Clear all queued buffers for the node */
2673 unicam_return_buffers(node, VB2_BUF_STATE_ERROR);
2677 static const struct vb2_ops unicam_video_qops = {
2678 .wait_prepare = vb2_ops_wait_prepare,
2679 .wait_finish = vb2_ops_wait_finish,
2680 .queue_setup = unicam_queue_setup,
2681 .buf_prepare = unicam_buffer_prepare,
2682 .buf_queue = unicam_buffer_queue,
2683 .start_streaming = unicam_start_streaming,
2684 .stop_streaming = unicam_stop_streaming,
2688 * unicam_v4l2_open : This function is based on the v4l2_fh_open helper
2689 * function. It has been augmented to handle sensor subdevice power management,
2691 static int unicam_v4l2_open(struct file *file)
2693 struct unicam_node *node = video_drvdata(file);
2694 struct unicam_device *dev = node->dev;
2697 mutex_lock(&node->lock);
2699 ret = v4l2_fh_open(file);
2701 unicam_err(dev, "v4l2_fh_open failed\n");
2707 if (!v4l2_fh_is_singular_file(file))
2710 ret = v4l2_subdev_call(dev->sensor, core, s_power, 1);
2711 if (ret < 0 && ret != -ENOIOCTLCMD) {
2712 v4l2_fh_release(file);
2720 mutex_unlock(&node->lock);
2724 static int unicam_v4l2_release(struct file *file)
2726 struct unicam_node *node = video_drvdata(file);
2727 struct unicam_device *dev = node->dev;
2728 struct v4l2_subdev *sd = dev->sensor;
2732 mutex_lock(&node->lock);
2734 fh_singular = v4l2_fh_is_singular_file(file);
2736 ret = _vb2_fop_release(file, NULL);
2739 v4l2_subdev_call(sd, core, s_power, 0);
2742 mutex_unlock(&node->lock);
2747 /* unicam capture driver file operations */
2748 static const struct v4l2_file_operations unicam_fops = {
2749 .owner = THIS_MODULE,
2750 .open = unicam_v4l2_open,
2751 .release = unicam_v4l2_release,
2752 .read = vb2_fop_read,
2753 .poll = vb2_fop_poll,
2754 .unlocked_ioctl = video_ioctl2,
2755 .mmap = vb2_fop_mmap,
2759 unicam_async_bound(struct v4l2_async_notifier *notifier,
2760 struct v4l2_subdev *subdev,
2761 struct v4l2_async_subdev *asd)
2763 struct unicam_device *unicam = to_unicam_device(notifier->v4l2_dev);
2765 if (unicam->sensor) {
2766 unicam_info(unicam, "Rejecting subdev %s (Already set!!)",
2771 unicam->sensor = subdev;
2772 unicam_dbg(1, unicam, "Using sensor %s for capture\n", subdev->name);
2777 static void unicam_release(struct kref *kref)
2779 struct unicam_device *unicam =
2780 container_of(kref, struct unicam_device, kref);
2782 v4l2_ctrl_handler_free(&unicam->ctrl_handler);
2783 media_device_cleanup(&unicam->mdev);
2785 if (unicam->sensor_state)
2786 v4l2_subdev_free_state(unicam->sensor_state);
2791 static void unicam_put(struct unicam_device *unicam)
2793 kref_put(&unicam->kref, unicam_release);
2796 static void unicam_get(struct unicam_device *unicam)
2798 kref_get(&unicam->kref);
2801 static void unicam_node_release(struct video_device *vdev)
2803 struct unicam_node *node = video_get_drvdata(vdev);
2805 unicam_put(node->dev);
2808 static int unicam_set_default_format(struct unicam_device *unicam,
2809 struct unicam_node *node,
2811 const struct unicam_fmt **ret_fmt)
2813 struct v4l2_mbus_framefmt mbus_fmt = {0};
2814 const struct unicam_fmt *fmt;
2817 if (pad_id == IMAGE_PAD) {
2818 ret = __subdev_get_format(unicam, &mbus_fmt, pad_id);
2820 unicam_err(unicam, "Failed to get_format - ret %d\n",
2825 fmt = find_format_by_code(mbus_fmt.code);
2828 * Find the first format that the sensor and unicam both
2831 fmt = get_first_supported_format(unicam);
2834 mbus_fmt.code = fmt->code;
2835 ret = __subdev_set_format(unicam, &mbus_fmt, pad_id);
2840 if (mbus_fmt.field != V4L2_FIELD_NONE) {
2841 /* Interlaced not supported - disable it now. */
2842 mbus_fmt.field = V4L2_FIELD_NONE;
2843 ret = __subdev_set_format(unicam, &mbus_fmt, pad_id);
2849 node->v_fmt.fmt.pix.pixelformat = fmt->fourcc ? fmt->fourcc
2850 : fmt->repacked_fourcc;
2852 /* Fix this node format as embedded data. */
2853 fmt = find_format_by_code(MEDIA_BUS_FMT_SENSOR_DATA);
2854 node->v_fmt.fmt.meta.dataformat = fmt->fourcc;
2862 static void unicam_mc_set_default_format(struct unicam_node *node, int pad_id)
2864 if (pad_id == IMAGE_PAD) {
2865 struct v4l2_pix_format *pix_fmt = &node->v_fmt.fmt.pix;
2867 pix_fmt->width = 640;
2868 pix_fmt->height = 480;
2869 pix_fmt->field = V4L2_FIELD_NONE;
2870 pix_fmt->colorspace = V4L2_COLORSPACE_SRGB;
2871 pix_fmt->ycbcr_enc = V4L2_YCBCR_ENC_601;
2872 pix_fmt->quantization = V4L2_QUANTIZATION_LIM_RANGE;
2873 pix_fmt->xfer_func = V4L2_XFER_FUNC_SRGB;
2874 pix_fmt->pixelformat = formats[0].fourcc;
2875 unicam_calc_format_size_bpl(node->dev, &formats[0],
2877 node->v_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
2879 node->fmt = &formats[0];
2881 const struct unicam_fmt *fmt;
2883 /* Fix this node format as embedded data. */
2884 fmt = find_format_by_code(MEDIA_BUS_FMT_SENSOR_DATA);
2885 node->v_fmt.fmt.meta.dataformat = fmt->fourcc;
2888 node->v_fmt.fmt.meta.buffersize = UNICAM_EMBEDDED_SIZE;
2889 node->embedded_lines = 1;
2890 node->v_fmt.type = V4L2_BUF_TYPE_META_CAPTURE;
2894 static int register_node(struct unicam_device *unicam, struct unicam_node *node,
2895 enum v4l2_buf_type type, int pad_id)
2897 struct video_device *vdev;
2898 struct vb2_queue *q;
2902 node->pad_id = pad_id;
2904 if (!unicam->mc_api) {
2905 const struct unicam_fmt *fmt;
2907 ret = unicam_set_default_format(unicam, node, pad_id, &fmt);
2911 /* Read current subdev format */
2913 unicam_reset_format(node);
2915 unicam_mc_set_default_format(node, pad_id);
2918 if (!unicam->mc_api &&
2919 v4l2_subdev_has_op(unicam->sensor, video, s_std)) {
2920 v4l2_std_id tvnorms;
2922 if (WARN_ON(!v4l2_subdev_has_op(unicam->sensor, video,
2925 * Subdevice should not advertise s_std but not
2930 ret = v4l2_subdev_call(unicam->sensor, video,
2931 g_tvnorms, &tvnorms);
2934 node->video_dev.tvnorms |= tvnorms;
2937 spin_lock_init(&node->dma_queue_lock);
2938 mutex_init(&node->lock);
2940 vdev = &node->video_dev;
2941 if (pad_id == IMAGE_PAD) {
2942 if (!unicam->mc_api) {
2943 /* Add controls from the subdevice */
2944 ret = v4l2_ctrl_add_handler(&unicam->ctrl_handler,
2945 unicam->sensor->ctrl_handler,
2953 * If the sensor subdevice has any controls, associate the node
2954 * with the ctrl handler to allow access from userland.
2956 if (!list_empty(&unicam->ctrl_handler.ctrls))
2957 vdev->ctrl_handler = &unicam->ctrl_handler;
2960 q = &node->buffer_queue;
2962 q->io_modes = VB2_MMAP | VB2_DMABUF | VB2_READ;
2964 q->ops = &unicam_video_qops;
2965 q->mem_ops = &vb2_dma_contig_memops;
2966 q->buf_struct_size = sizeof(struct unicam_buffer);
2967 q->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC;
2968 q->lock = &node->lock;
2969 q->min_buffers_needed = 1;
2970 q->dev = &unicam->pdev->dev;
2972 ret = vb2_queue_init(q);
2974 unicam_err(unicam, "vb2_queue_init() failed\n");
2978 INIT_LIST_HEAD(&node->dma_queue);
2980 vdev->release = unicam_node_release;
2981 vdev->fops = &unicam_fops;
2982 vdev->ioctl_ops = unicam->mc_api ? &unicam_mc_ioctl_ops :
2984 vdev->v4l2_dev = &unicam->v4l2_dev;
2985 vdev->vfl_dir = VFL_DIR_RX;
2987 vdev->lock = &node->lock;
2988 vdev->device_caps = (pad_id == IMAGE_PAD) ?
2989 V4L2_CAP_VIDEO_CAPTURE : V4L2_CAP_META_CAPTURE;
2990 vdev->device_caps |= V4L2_CAP_READWRITE | V4L2_CAP_STREAMING;
2991 if (unicam->mc_api) {
2992 vdev->device_caps |= V4L2_CAP_IO_MC;
2993 vdev->entity.ops = &unicam_mc_entity_ops;
2996 /* Define the device names */
2997 snprintf(vdev->name, sizeof(vdev->name), "%s-%s", UNICAM_MODULE_NAME,
2998 pad_id == IMAGE_PAD ? "image" : "embedded");
3000 video_set_drvdata(vdev, node);
3001 if (pad_id == IMAGE_PAD)
3002 vdev->entity.flags |= MEDIA_ENT_FL_DEFAULT;
3003 node->pad.flags = MEDIA_PAD_FL_SINK;
3004 media_entity_pads_init(&vdev->entity, 1, &node->pad);
3006 node->dummy_buf_cpu_addr = dma_alloc_coherent(&unicam->pdev->dev,
3008 &node->dummy_buf_dma_addr,
3010 if (!node->dummy_buf_cpu_addr) {
3011 unicam_err(unicam, "Unable to allocate dummy buffer.\n");
3014 if (!unicam->mc_api) {
3015 if (pad_id == METADATA_PAD ||
3016 !v4l2_subdev_has_op(unicam->sensor, video, s_std)) {
3017 v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_STD);
3018 v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_STD);
3019 v4l2_disable_ioctl(&node->video_dev, VIDIOC_ENUMSTD);
3021 if (pad_id == METADATA_PAD ||
3022 !v4l2_subdev_has_op(unicam->sensor, video, querystd))
3023 v4l2_disable_ioctl(&node->video_dev, VIDIOC_QUERYSTD);
3024 if (pad_id == METADATA_PAD ||
3025 !v4l2_subdev_has_op(unicam->sensor, video, s_dv_timings)) {
3026 v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_EDID);
3027 v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_EDID);
3028 v4l2_disable_ioctl(&node->video_dev,
3029 VIDIOC_DV_TIMINGS_CAP);
3030 v4l2_disable_ioctl(&node->video_dev,
3031 VIDIOC_G_DV_TIMINGS);
3032 v4l2_disable_ioctl(&node->video_dev,
3033 VIDIOC_S_DV_TIMINGS);
3034 v4l2_disable_ioctl(&node->video_dev,
3035 VIDIOC_ENUM_DV_TIMINGS);
3036 v4l2_disable_ioctl(&node->video_dev,
3037 VIDIOC_QUERY_DV_TIMINGS);
3039 if (pad_id == METADATA_PAD ||
3040 !v4l2_subdev_has_op(unicam->sensor, pad,
3041 enum_frame_interval))
3042 v4l2_disable_ioctl(&node->video_dev,
3043 VIDIOC_ENUM_FRAMEINTERVALS);
3044 if (pad_id == METADATA_PAD ||
3045 !v4l2_subdev_has_op(unicam->sensor, video,
3047 v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_PARM);
3048 if (pad_id == METADATA_PAD ||
3049 !v4l2_subdev_has_op(unicam->sensor, video,
3051 v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_PARM);
3053 if (pad_id == METADATA_PAD ||
3054 !v4l2_subdev_has_op(unicam->sensor, pad,
3056 v4l2_disable_ioctl(&node->video_dev,
3057 VIDIOC_ENUM_FRAMESIZES);
3059 if (node->pad_id == METADATA_PAD ||
3060 !v4l2_subdev_has_op(unicam->sensor, pad, set_selection))
3061 v4l2_disable_ioctl(&node->video_dev,
3062 VIDIOC_S_SELECTION);
3064 if (node->pad_id == METADATA_PAD ||
3065 !v4l2_subdev_has_op(unicam->sensor, pad, get_selection))
3066 v4l2_disable_ioctl(&node->video_dev,
3067 VIDIOC_G_SELECTION);
3070 ret = video_register_device(vdev, VFL_TYPE_VIDEO, -1);
3072 unicam_err(unicam, "Unable to register video device %s\n",
3078 * Acquire a reference to unicam, which will be released when the video
3079 * device will be unregistered and userspace will have closed all open
3083 node->registered = true;
3085 if (pad_id != METADATA_PAD || unicam->sensor_embedded_data) {
3086 ret = media_create_pad_link(&unicam->sensor->entity,
3088 &node->video_dev.entity, 0,
3089 MEDIA_LNK_FL_ENABLED |
3090 MEDIA_LNK_FL_IMMUTABLE);
3092 unicam_err(unicam, "Unable to create pad link for %s\n",
3099 static void unregister_nodes(struct unicam_device *unicam)
3103 for (i = 0; i < ARRAY_SIZE(unicam->node); i++) {
3104 struct unicam_node *node = &unicam->node[i];
3106 if (node->dummy_buf_cpu_addr) {
3107 dma_free_coherent(&unicam->pdev->dev, DUMMY_BUF_SIZE,
3108 node->dummy_buf_cpu_addr,
3109 node->dummy_buf_dma_addr);
3112 if (node->registered) {
3113 node->registered = false;
3114 video_unregister_device(&node->video_dev);
3119 static int unicam_async_complete(struct v4l2_async_notifier *notifier)
3121 struct unicam_device *unicam = to_unicam_device(notifier->v4l2_dev);
3122 unsigned int i, source_pads = 0;
3125 unicam->v4l2_dev.notify = unicam_notify;
3127 unicam->sensor_state = v4l2_subdev_alloc_state(unicam->sensor);
3128 if (!unicam->sensor_state)
3131 for (i = 0; i < unicam->sensor->entity.num_pads; i++) {
3132 if (unicam->sensor->entity.pads[i].flags & MEDIA_PAD_FL_SOURCE) {
3133 if (source_pads < MAX_NODES) {
3134 unicam->node[source_pads].src_pad_id = i;
3135 unicam_dbg(3, unicam, "source pad %u is index %u\n",
3142 unicam_err(unicam, "No source pads on sensor.\n");
3147 ret = register_node(unicam, &unicam->node[IMAGE_PAD],
3148 V4L2_BUF_TYPE_VIDEO_CAPTURE, IMAGE_PAD);
3150 unicam_err(unicam, "Unable to register image video device.\n");
3154 if (source_pads >= 2) {
3155 unicam->sensor_embedded_data = true;
3157 ret = register_node(unicam, &unicam->node[METADATA_PAD],
3158 V4L2_BUF_TYPE_META_CAPTURE, METADATA_PAD);
3160 unicam_err(unicam, "Unable to register metadata video device.\n");
3166 ret = v4l2_device_register_subdev_nodes(&unicam->v4l2_dev);
3168 ret = v4l2_device_register_ro_subdev_nodes(&unicam->v4l2_dev);
3170 unicam_err(unicam, "Unable to register subdev nodes.\n");
3175 * Release the initial reference, all references are now owned by the
3182 unregister_nodes(unicam);
3188 static const struct v4l2_async_notifier_operations unicam_async_ops = {
3189 .bound = unicam_async_bound,
3190 .complete = unicam_async_complete,
3193 static int of_unicam_connect_subdevs(struct unicam_device *dev)
3195 struct platform_device *pdev = dev->pdev;
3196 struct v4l2_fwnode_endpoint ep = { };
3197 struct device_node *ep_node;
3198 struct device_node *sensor_node;
3202 if (of_property_read_u32(pdev->dev.of_node, "brcm,num-data-lanes",
3203 &dev->max_data_lanes) < 0) {
3204 unicam_err(dev, "number of data lanes not set\n");
3208 /* Get the local endpoint and remote device. */
3209 ep_node = of_graph_get_next_endpoint(pdev->dev.of_node, NULL);
3211 unicam_dbg(3, dev, "can't get next endpoint\n");
3215 unicam_dbg(3, dev, "ep_node is %pOF\n", ep_node);
3217 sensor_node = of_graph_get_remote_port_parent(ep_node);
3219 unicam_dbg(3, dev, "can't get remote parent\n");
3223 unicam_dbg(1, dev, "found subdevice %pOF\n", sensor_node);
3225 /* Parse the local endpoint and validate its configuration. */
3226 v4l2_fwnode_endpoint_parse(of_fwnode_handle(ep_node), &ep);
3228 unicam_dbg(3, dev, "parsed local endpoint, bus_type %u\n",
3231 dev->bus_type = ep.bus_type;
3233 switch (ep.bus_type) {
3234 case V4L2_MBUS_CSI2_DPHY:
3235 switch (ep.bus.mipi_csi2.num_data_lanes) {
3242 unicam_err(dev, "subdevice %pOF: %u data lanes not supported\n",
3244 ep.bus.mipi_csi2.num_data_lanes);
3248 for (lane = 0; lane < ep.bus.mipi_csi2.num_data_lanes; lane++) {
3249 if (ep.bus.mipi_csi2.data_lanes[lane] != lane + 1) {
3250 unicam_err(dev, "subdevice %pOF: data lanes reordering not supported\n",
3256 if (ep.bus.mipi_csi2.num_data_lanes > dev->max_data_lanes) {
3257 unicam_err(dev, "subdevice requires %u data lanes when %u are supported\n",
3258 ep.bus.mipi_csi2.num_data_lanes,
3259 dev->max_data_lanes);
3262 dev->max_data_lanes = ep.bus.mipi_csi2.num_data_lanes;
3263 dev->bus_flags = ep.bus.mipi_csi2.flags;
3267 case V4L2_MBUS_CCP2:
3268 if (ep.bus.mipi_csi1.clock_lane != 0 ||
3269 ep.bus.mipi_csi1.data_lane != 1) {
3270 unicam_err(dev, "subdevice %pOF: unsupported lanes configuration\n",
3275 dev->max_data_lanes = 1;
3276 dev->bus_flags = ep.bus.mipi_csi1.strobe;
3280 /* Unsupported bus type */
3281 unicam_err(dev, "subdevice %pOF: unsupported bus type %u\n",
3282 sensor_node, ep.bus_type);
3286 unicam_dbg(3, dev, "subdevice %pOF: %s bus, %u data lanes, flags=0x%08x\n",
3288 dev->bus_type == V4L2_MBUS_CSI2_DPHY ? "CSI-2" : "CCP2",
3289 dev->max_data_lanes, dev->bus_flags);
3291 /* Initialize and register the async notifier. */
3292 v4l2_async_notifier_init(&dev->notifier);
3293 dev->notifier.ops = &unicam_async_ops;
3295 dev->asd.match_type = V4L2_ASYNC_MATCH_FWNODE;
3296 dev->asd.match.fwnode = fwnode_graph_get_remote_endpoint(of_fwnode_handle(ep_node));
3297 ret = v4l2_async_notifier_add_subdev(&dev->notifier, &dev->asd);
3299 unicam_err(dev, "Error adding subdevice: %d\n", ret);
3303 ret = v4l2_async_notifier_register(&dev->v4l2_dev, &dev->notifier);
3305 unicam_err(dev, "Error registering async notifier: %d\n", ret);
3310 of_node_put(sensor_node);
3311 of_node_put(ep_node);
3316 static int unicam_probe(struct platform_device *pdev)
3318 struct unicam_device *unicam;
3321 unicam = kzalloc(sizeof(*unicam), GFP_KERNEL);
3325 kref_init(&unicam->kref);
3326 unicam->pdev = pdev;
3329 * Adopt the current setting of the module parameter, and check if
3330 * device tree requests it.
3332 unicam->mc_api = media_controller;
3333 if (of_property_read_bool(pdev->dev.of_node, "brcm,media-controller"))
3334 unicam->mc_api = true;
3336 unicam->base = devm_platform_ioremap_resource(pdev, 0);
3337 if (IS_ERR(unicam->base)) {
3338 unicam_err(unicam, "Failed to get main io block\n");
3339 ret = PTR_ERR(unicam->base);
3340 goto err_unicam_put;
3343 unicam->clk_gate_base = devm_platform_ioremap_resource(pdev, 1);
3344 if (IS_ERR(unicam->clk_gate_base)) {
3345 unicam_err(unicam, "Failed to get 2nd io block\n");
3346 ret = PTR_ERR(unicam->clk_gate_base);
3347 goto err_unicam_put;
3350 unicam->clock = devm_clk_get(&pdev->dev, "lp");
3351 if (IS_ERR(unicam->clock)) {
3352 unicam_err(unicam, "Failed to get lp clock\n");
3353 ret = PTR_ERR(unicam->clock);
3354 goto err_unicam_put;
3357 unicam->vpu_clock = devm_clk_get(&pdev->dev, "vpu");
3358 if (IS_ERR(unicam->vpu_clock)) {
3359 unicam_err(unicam, "Failed to get vpu clock\n");
3360 ret = PTR_ERR(unicam->vpu_clock);
3361 goto err_unicam_put;
3364 ret = platform_get_irq(pdev, 0);
3366 dev_err(&pdev->dev, "No IRQ resource\n");
3368 goto err_unicam_put;
3371 ret = devm_request_irq(&pdev->dev, ret, unicam_isr, 0,
3372 "unicam_capture0", unicam);
3374 dev_err(&pdev->dev, "Unable to request interrupt\n");
3376 goto err_unicam_put;
3379 unicam->mdev.dev = &pdev->dev;
3380 strscpy(unicam->mdev.model, UNICAM_MODULE_NAME,
3381 sizeof(unicam->mdev.model));
3382 strscpy(unicam->mdev.serial, "", sizeof(unicam->mdev.serial));
3383 snprintf(unicam->mdev.bus_info, sizeof(unicam->mdev.bus_info),
3384 "platform:%s", dev_name(&pdev->dev));
3385 unicam->mdev.hw_revision = 0;
3387 media_device_init(&unicam->mdev);
3389 unicam->v4l2_dev.mdev = &unicam->mdev;
3391 ret = v4l2_device_register(&pdev->dev, &unicam->v4l2_dev);
3394 "Unable to register v4l2 device.\n");
3395 goto err_unicam_put;
3398 ret = media_device_register(&unicam->mdev);
3401 "Unable to register media-controller device.\n");
3402 goto err_v4l2_unregister;
3405 /* Reserve space for the controls */
3406 ret = v4l2_ctrl_handler_init(&unicam->ctrl_handler, 16);
3408 goto err_media_unregister;
3410 /* set the driver data in platform device */
3411 platform_set_drvdata(pdev, unicam);
3413 ret = of_unicam_connect_subdevs(unicam);
3415 dev_err(&pdev->dev, "Failed to connect subdevs\n");
3416 goto err_media_unregister;
3419 /* Enable the block power domain */
3420 pm_runtime_enable(&pdev->dev);
3424 err_media_unregister:
3425 media_device_unregister(&unicam->mdev);
3426 err_v4l2_unregister:
3427 v4l2_device_unregister(&unicam->v4l2_dev);
3434 static int unicam_remove(struct platform_device *pdev)
3436 struct unicam_device *unicam = platform_get_drvdata(pdev);
3438 unicam_dbg(2, unicam, "%s\n", __func__);
3440 v4l2_async_notifier_unregister(&unicam->notifier);
3441 v4l2_device_unregister(&unicam->v4l2_dev);
3442 media_device_unregister(&unicam->mdev);
3443 unregister_nodes(unicam);
3445 pm_runtime_disable(&pdev->dev);
3450 static const struct of_device_id unicam_of_match[] = {
3451 { .compatible = "brcm,bcm2835-unicam", },
3454 MODULE_DEVICE_TABLE(of, unicam_of_match);
3456 static struct platform_driver unicam_driver = {
3457 .probe = unicam_probe,
3458 .remove = unicam_remove,
3460 .name = UNICAM_MODULE_NAME,
3461 .of_match_table = of_match_ptr(unicam_of_match),
3465 module_platform_driver(unicam_driver);
3467 MODULE_AUTHOR("Dave Stevenson <dave.stevenson@raspberrypi.com>");
3468 MODULE_DESCRIPTION("BCM2835 Unicam driver");
3469 MODULE_LICENSE("GPL");
3470 MODULE_VERSION(UNICAM_VERSION);