drivers: media: bcm2835_unicam: Improve frame sequence count handling
[platform/kernel/linux-rpi.git] / drivers / media / platform / bcm2835 / bcm2835-unicam.c
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * BCM283x / BCM271x Unicam Capture Driver
4  *
5  * Copyright (C) 2017-2020 - Raspberry Pi (Trading) Ltd.
6  *
7  * Dave Stevenson <dave.stevenson@raspberrypi.com>
8  *
9  * Based on TI am437x driver by
10  *   Benoit Parrot <bparrot@ti.com>
11  *   Lad, Prabhakar <prabhakar.csengg@gmail.com>
12  *
13  * and TI CAL camera interface driver by
14  *    Benoit Parrot <bparrot@ti.com>
15  *
16  *
17  * There are two camera drivers in the kernel for BCM283x - this one
18  * and bcm2835-camera (currently in staging).
19  *
20  * This driver directly controls the Unicam peripheral - there is no
21  * involvement with the VideoCore firmware. Unicam receives CSI-2 or
22  * CCP2 data and writes it into SDRAM.
23  * The only potential processing options are to repack Bayer data into an
24  * alternate format, and applying windowing.
25  * The repacking does not shift the data, so can repack V4L2_PIX_FMT_Sxxxx10P
26  * to V4L2_PIX_FMT_Sxxxx10, or V4L2_PIX_FMT_Sxxxx12P to V4L2_PIX_FMT_Sxxxx12,
27  * but not generically up to V4L2_PIX_FMT_Sxxxx16. The driver will add both
28  * formats where the relevant formats are defined, and will automatically
29  * configure the repacking as required.
30  * Support for windowing may be added later.
31  *
32  * It should be possible to connect this driver to any sensor with a
33  * suitable output interface and V4L2 subdevice driver.
34  *
35  * bcm2835-camera uses the VideoCore firmware to control the sensor,
36  * Unicam, ISP, and all tuner control loops. Fully processed frames are
37  * delivered to the driver by the firmware. It only has sensor drivers
38  * for Omnivision OV5647, and Sony IMX219 sensors.
39  *
40  * The two drivers are mutually exclusive for the same Unicam instance.
41  * The VideoCore firmware checks the device tree configuration during boot.
42  * If it finds device tree nodes called csi0 or csi1 it will block the
43  * firmware from accessing the peripheral, and bcm2835-camera will
44  * not be able to stream data.
45  */
46
47 #include <linux/clk.h>
48 #include <linux/delay.h>
49 #include <linux/device.h>
50 #include <linux/dma-mapping.h>
51 #include <linux/err.h>
52 #include <linux/init.h>
53 #include <linux/interrupt.h>
54 #include <linux/io.h>
55 #include <linux/module.h>
56 #include <linux/of_device.h>
57 #include <linux/of_graph.h>
58 #include <linux/pinctrl/consumer.h>
59 #include <linux/platform_device.h>
60 #include <linux/pm_runtime.h>
61 #include <linux/slab.h>
62 #include <linux/uaccess.h>
63 #include <linux/videodev2.h>
64
65 #include <media/mipi-csi2.h>
66 #include <media/v4l2-common.h>
67 #include <media/v4l2-ctrls.h>
68 #include <media/v4l2-dev.h>
69 #include <media/v4l2-device.h>
70 #include <media/v4l2-dv-timings.h>
71 #include <media/v4l2-event.h>
72 #include <media/v4l2-ioctl.h>
73 #include <media/v4l2-fwnode.h>
74 #include <media/videobuf2-dma-contig.h>
75
76 #include <media/v4l2-async.h>
77
78 #include "vc4-regs-unicam.h"
79
80 #define UNICAM_MODULE_NAME      "unicam"
81 #define UNICAM_VERSION          "0.1.0"
82
83 static int debug;
84 module_param(debug, int, 0644);
85 MODULE_PARM_DESC(debug, "Debug level 0-3");
86
87 static int media_controller;
88 module_param(media_controller, int, 0644);
89 MODULE_PARM_DESC(media_controller, "Use media controller API");
90
91 #define unicam_dbg(level, dev, fmt, arg...)     \
92                 v4l2_dbg(level, debug, &(dev)->v4l2_dev, fmt, ##arg)
93 #define unicam_info(dev, fmt, arg...)   \
94                 v4l2_info(&(dev)->v4l2_dev, fmt, ##arg)
95 #define unicam_err(dev, fmt, arg...)    \
96                 v4l2_err(&(dev)->v4l2_dev, fmt, ##arg)
97
98 /*
99  * Unicam must request a minimum of 250Mhz from the VPU clock.
100  * Otherwise the input FIFOs overrun and cause image corruption.
101  */
102 #define MIN_VPU_CLOCK_RATE (250 * 1000 * 1000)
103 /*
104  * To protect against a dodgy sensor driver never returning an error from
105  * enum_mbus_code, set a maximum index value to be used.
106  */
107 #define MAX_ENUM_MBUS_CODE      128
108
109 /*
110  * Stride is a 16 bit register, but also has to be a multiple of 32.
111  */
112 #define BPL_ALIGNMENT           32
113 #define MAX_BYTESPERLINE        ((1 << 16) - BPL_ALIGNMENT)
114 /*
115  * Max width is therefore determined by the max stride divided by
116  * the number of bits per pixel. Take 32bpp as a
117  * worst case.
118  * No imposed limit on the height, so adopt a square image for want
119  * of anything better.
120  */
121 #define MAX_WIDTH               (MAX_BYTESPERLINE / 4)
122 #define MAX_HEIGHT              MAX_WIDTH
123 /* Define a nominal minimum image size */
124 #define MIN_WIDTH               16
125 #define MIN_HEIGHT              16
126 /* Default size of the embedded buffer */
127 #define UNICAM_EMBEDDED_SIZE    16384
128
129 /*
130  * Size of the dummy buffer allocation.
131  *
132  * Due to a HW bug causing buffer overruns in circular buffer mode under certain
133  * (not yet fully known) conditions, the dummy buffer allocation is set to a
134  * a single page size, but the hardware gets programmed with a buffer size of 0.
135  */
136 #define DUMMY_BUF_SIZE          (PAGE_SIZE)
137
138 enum pad_types {
139         IMAGE_PAD,
140         METADATA_PAD,
141         MAX_NODES
142 };
143
144 #define MASK_CS_DEFAULT         BIT(V4L2_COLORSPACE_DEFAULT)
145 #define MASK_CS_SMPTE170M       BIT(V4L2_COLORSPACE_SMPTE170M)
146 #define MASK_CS_SMPTE240M       BIT(V4L2_COLORSPACE_SMPTE240M)
147 #define MASK_CS_REC709          BIT(V4L2_COLORSPACE_REC709)
148 #define MASK_CS_BT878           BIT(V4L2_COLORSPACE_BT878)
149 #define MASK_CS_470_M           BIT(V4L2_COLORSPACE_470_SYSTEM_M)
150 #define MASK_CS_470_BG          BIT(V4L2_COLORSPACE_470_SYSTEM_BG)
151 #define MASK_CS_JPEG            BIT(V4L2_COLORSPACE_JPEG)
152 #define MASK_CS_SRGB            BIT(V4L2_COLORSPACE_SRGB)
153 #define MASK_CS_OPRGB           BIT(V4L2_COLORSPACE_OPRGB)
154 #define MASK_CS_BT2020          BIT(V4L2_COLORSPACE_BT2020)
155 #define MASK_CS_RAW             BIT(V4L2_COLORSPACE_RAW)
156 #define MASK_CS_DCI_P3          BIT(V4L2_COLORSPACE_DCI_P3)
157
158 #define MAX_COLORSPACE          32
159
160 /*
161  * struct unicam_fmt - Unicam media bus format information
162  * @pixelformat: V4L2 pixel format FCC identifier. 0 if n/a.
163  * @repacked_fourcc: V4L2 pixel format FCC identifier if the data is expanded
164  * out to 16bpp. 0 if n/a.
165  * @code: V4L2 media bus format code.
166  * @depth: Bits per pixel as delivered from the source.
167  * @csi_dt: CSI data type.
168  * @valid_colorspaces: Bitmask of valid colorspaces so that the Media Controller
169  *              centric try_fmt can validate the colorspace and pass
170  *              v4l2-compliance.
171  * @check_variants: Flag to denote that there are multiple mediabus formats
172  *              still in the list that could match this V4L2 format.
173  * @mc_skip: Media Controller shouldn't list this format via ENUM_FMT as it is
174  *              a duplicate of an earlier format.
175  * @metadata_fmt: This format only applies to the metadata pad.
176  */
177 struct unicam_fmt {
178         u32     fourcc;
179         u32     repacked_fourcc;
180         u32     code;
181         u8      depth;
182         u8      csi_dt;
183         u32     valid_colorspaces;
184         u8      check_variants:1;
185         u8      mc_skip:1;
186         u8      metadata_fmt:1;
187 };
188
189 static const struct unicam_fmt formats[] = {
190         /* YUV Formats */
191         {
192                 .fourcc         = V4L2_PIX_FMT_YUYV,
193                 .code           = MEDIA_BUS_FMT_YUYV8_2X8,
194                 .depth          = 16,
195                 .csi_dt         = MIPI_CSI2_DT_YUV422_8B,
196                 .check_variants = 1,
197                 .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
198                                      MASK_CS_JPEG,
199         }, {
200                 .fourcc         = V4L2_PIX_FMT_UYVY,
201                 .code           = MEDIA_BUS_FMT_UYVY8_2X8,
202                 .depth          = 16,
203                 .csi_dt         = MIPI_CSI2_DT_YUV422_8B,
204                 .check_variants = 1,
205                 .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
206                                      MASK_CS_JPEG,
207         }, {
208                 .fourcc         = V4L2_PIX_FMT_YVYU,
209                 .code           = MEDIA_BUS_FMT_YVYU8_2X8,
210                 .depth          = 16,
211                 .csi_dt         = MIPI_CSI2_DT_YUV422_8B,
212                 .check_variants = 1,
213                 .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
214                                      MASK_CS_JPEG,
215         }, {
216                 .fourcc         = V4L2_PIX_FMT_VYUY,
217                 .code           = MEDIA_BUS_FMT_VYUY8_2X8,
218                 .depth          = 16,
219                 .csi_dt         = MIPI_CSI2_DT_YUV422_8B,
220                 .check_variants = 1,
221                 .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
222                                      MASK_CS_JPEG,
223         }, {
224                 .fourcc         = V4L2_PIX_FMT_YUYV,
225                 .code           = MEDIA_BUS_FMT_YUYV8_1X16,
226                 .depth          = 16,
227                 .csi_dt         = MIPI_CSI2_DT_YUV422_8B,
228                 .mc_skip        = 1,
229                 .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
230                                      MASK_CS_JPEG,
231         }, {
232                 .fourcc         = V4L2_PIX_FMT_UYVY,
233                 .code           = MEDIA_BUS_FMT_UYVY8_1X16,
234                 .depth          = 16,
235                 .csi_dt         = MIPI_CSI2_DT_YUV422_8B,
236                 .mc_skip        = 1,
237                 .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
238                                      MASK_CS_JPEG,
239         }, {
240                 .fourcc         = V4L2_PIX_FMT_YVYU,
241                 .code           = MEDIA_BUS_FMT_YVYU8_1X16,
242                 .depth          = 16,
243                 .csi_dt         = MIPI_CSI2_DT_YUV422_8B,
244                 .mc_skip        = 1,
245                 .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
246                                      MASK_CS_JPEG,
247         }, {
248                 .fourcc         = V4L2_PIX_FMT_VYUY,
249                 .code           = MEDIA_BUS_FMT_VYUY8_1X16,
250                 .depth          = 16,
251                 .csi_dt         = MIPI_CSI2_DT_YUV422_8B,
252                 .mc_skip        = 1,
253                 .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
254                                      MASK_CS_JPEG,
255         }, {
256         /* RGB Formats */
257                 .fourcc         = V4L2_PIX_FMT_RGB565, /* gggbbbbb rrrrrggg */
258                 .code           = MEDIA_BUS_FMT_RGB565_2X8_LE,
259                 .depth          = 16,
260                 .csi_dt         = MIPI_CSI2_DT_RGB565,
261                 .valid_colorspaces = MASK_CS_SRGB,
262         }, {
263                 .fourcc         = V4L2_PIX_FMT_RGB565X, /* rrrrrggg gggbbbbb */
264                 .code           = MEDIA_BUS_FMT_RGB565_2X8_BE,
265                 .depth          = 16,
266                 .csi_dt         = MIPI_CSI2_DT_RGB565,
267                 .valid_colorspaces = MASK_CS_SRGB,
268         }, {
269                 .fourcc         = V4L2_PIX_FMT_RGB555, /* gggbbbbb arrrrrgg */
270                 .code           = MEDIA_BUS_FMT_RGB555_2X8_PADHI_LE,
271                 .depth          = 16,
272                 .csi_dt         = MIPI_CSI2_DT_RGB555,
273                 .valid_colorspaces = MASK_CS_SRGB,
274         }, {
275                 .fourcc         = V4L2_PIX_FMT_RGB555X, /* arrrrrgg gggbbbbb */
276                 .code           = MEDIA_BUS_FMT_RGB555_2X8_PADHI_BE,
277                 .depth          = 16,
278                 .csi_dt         = MIPI_CSI2_DT_RGB555,
279                 .valid_colorspaces = MASK_CS_SRGB,
280         }, {
281                 .fourcc         = V4L2_PIX_FMT_RGB24, /* rgb */
282                 .code           = MEDIA_BUS_FMT_RGB888_1X24,
283                 .depth          = 24,
284                 .csi_dt         = MIPI_CSI2_DT_RGB888,
285                 .valid_colorspaces = MASK_CS_SRGB,
286         }, {
287                 .fourcc         = V4L2_PIX_FMT_BGR24, /* bgr */
288                 .code           = MEDIA_BUS_FMT_BGR888_1X24,
289                 .depth          = 24,
290                 .csi_dt         = MIPI_CSI2_DT_RGB888,
291                 .valid_colorspaces = MASK_CS_SRGB,
292         }, {
293                 .fourcc         = V4L2_PIX_FMT_RGB32, /* argb */
294                 .code           = MEDIA_BUS_FMT_ARGB8888_1X32,
295                 .depth          = 32,
296                 .csi_dt         = 0x0,
297                 .valid_colorspaces = MASK_CS_SRGB,
298         }, {
299         /* Bayer Formats */
300                 .fourcc         = V4L2_PIX_FMT_SBGGR8,
301                 .code           = MEDIA_BUS_FMT_SBGGR8_1X8,
302                 .depth          = 8,
303                 .csi_dt         = MIPI_CSI2_DT_RAW8,
304                 .valid_colorspaces = MASK_CS_RAW,
305         }, {
306                 .fourcc         = V4L2_PIX_FMT_SGBRG8,
307                 .code           = MEDIA_BUS_FMT_SGBRG8_1X8,
308                 .depth          = 8,
309                 .csi_dt         = MIPI_CSI2_DT_RAW8,
310                 .valid_colorspaces = MASK_CS_RAW,
311         }, {
312                 .fourcc         = V4L2_PIX_FMT_SGRBG8,
313                 .code           = MEDIA_BUS_FMT_SGRBG8_1X8,
314                 .depth          = 8,
315                 .csi_dt         = MIPI_CSI2_DT_RAW8,
316                 .valid_colorspaces = MASK_CS_RAW,
317         }, {
318                 .fourcc         = V4L2_PIX_FMT_SRGGB8,
319                 .code           = MEDIA_BUS_FMT_SRGGB8_1X8,
320                 .depth          = 8,
321                 .csi_dt         = MIPI_CSI2_DT_RAW8,
322                 .valid_colorspaces = MASK_CS_RAW,
323         }, {
324                 .fourcc         = V4L2_PIX_FMT_SBGGR10P,
325                 .repacked_fourcc = V4L2_PIX_FMT_SBGGR10,
326                 .code           = MEDIA_BUS_FMT_SBGGR10_1X10,
327                 .depth          = 10,
328                 .csi_dt         = MIPI_CSI2_DT_RAW10,
329                 .valid_colorspaces = MASK_CS_RAW,
330         }, {
331                 .fourcc         = V4L2_PIX_FMT_SGBRG10P,
332                 .repacked_fourcc = V4L2_PIX_FMT_SGBRG10,
333                 .code           = MEDIA_BUS_FMT_SGBRG10_1X10,
334                 .depth          = 10,
335                 .csi_dt         = MIPI_CSI2_DT_RAW10,
336                 .valid_colorspaces = MASK_CS_RAW,
337         }, {
338                 .fourcc         = V4L2_PIX_FMT_SGRBG10P,
339                 .repacked_fourcc = V4L2_PIX_FMT_SGRBG10,
340                 .code           = MEDIA_BUS_FMT_SGRBG10_1X10,
341                 .depth          = 10,
342                 .csi_dt         = MIPI_CSI2_DT_RAW10,
343                 .valid_colorspaces = MASK_CS_RAW,
344         }, {
345                 .fourcc         = V4L2_PIX_FMT_SRGGB10P,
346                 .repacked_fourcc = V4L2_PIX_FMT_SRGGB10,
347                 .code           = MEDIA_BUS_FMT_SRGGB10_1X10,
348                 .depth          = 10,
349                 .csi_dt         = MIPI_CSI2_DT_RAW10,
350                 .valid_colorspaces = MASK_CS_RAW,
351         }, {
352                 .fourcc         = V4L2_PIX_FMT_SBGGR12P,
353                 .repacked_fourcc = V4L2_PIX_FMT_SBGGR12,
354                 .code           = MEDIA_BUS_FMT_SBGGR12_1X12,
355                 .depth          = 12,
356                 .csi_dt         = MIPI_CSI2_DT_RAW12,
357                 .valid_colorspaces = MASK_CS_RAW,
358         }, {
359                 .fourcc         = V4L2_PIX_FMT_SGBRG12P,
360                 .repacked_fourcc = V4L2_PIX_FMT_SGBRG12,
361                 .code           = MEDIA_BUS_FMT_SGBRG12_1X12,
362                 .depth          = 12,
363                 .csi_dt         = MIPI_CSI2_DT_RAW12,
364                 .valid_colorspaces = MASK_CS_RAW,
365         }, {
366                 .fourcc         = V4L2_PIX_FMT_SGRBG12P,
367                 .repacked_fourcc = V4L2_PIX_FMT_SGRBG12,
368                 .code           = MEDIA_BUS_FMT_SGRBG12_1X12,
369                 .depth          = 12,
370                 .csi_dt         = MIPI_CSI2_DT_RAW12,
371                 .valid_colorspaces = MASK_CS_RAW,
372         }, {
373                 .fourcc         = V4L2_PIX_FMT_SRGGB12P,
374                 .repacked_fourcc = V4L2_PIX_FMT_SRGGB12,
375                 .code           = MEDIA_BUS_FMT_SRGGB12_1X12,
376                 .depth          = 12,
377                 .csi_dt         = MIPI_CSI2_DT_RAW12,
378                 .valid_colorspaces = MASK_CS_RAW,
379         }, {
380                 .fourcc         = V4L2_PIX_FMT_SBGGR14P,
381                 .repacked_fourcc = V4L2_PIX_FMT_SBGGR14,
382                 .code           = MEDIA_BUS_FMT_SBGGR14_1X14,
383                 .depth          = 14,
384                 .csi_dt         = MIPI_CSI2_DT_RAW14,
385                 .valid_colorspaces = MASK_CS_RAW,
386         }, {
387                 .fourcc         = V4L2_PIX_FMT_SGBRG14P,
388                 .repacked_fourcc = V4L2_PIX_FMT_SGBRG14,
389                 .code           = MEDIA_BUS_FMT_SGBRG14_1X14,
390                 .depth          = 14,
391                 .csi_dt         = MIPI_CSI2_DT_RAW14,
392                 .valid_colorspaces = MASK_CS_RAW,
393         }, {
394                 .fourcc         = V4L2_PIX_FMT_SGRBG14P,
395                 .repacked_fourcc = V4L2_PIX_FMT_SGRBG14,
396                 .code           = MEDIA_BUS_FMT_SGRBG14_1X14,
397                 .depth          = 14,
398                 .csi_dt         = MIPI_CSI2_DT_RAW14,
399                 .valid_colorspaces = MASK_CS_RAW,
400         }, {
401                 .fourcc         = V4L2_PIX_FMT_SRGGB14P,
402                 .repacked_fourcc = V4L2_PIX_FMT_SRGGB14,
403                 .code           = MEDIA_BUS_FMT_SRGGB14_1X14,
404                 .depth          = 14,
405                 .csi_dt         = MIPI_CSI2_DT_RAW14,
406                 .valid_colorspaces = MASK_CS_RAW,
407         }, {
408                 .fourcc         = V4L2_PIX_FMT_SBGGR16,
409                 .code           = MEDIA_BUS_FMT_SBGGR16_1X16,
410                 .depth          = 16,
411                 .csi_dt         = MIPI_CSI2_DT_RAW16,
412                 .valid_colorspaces = MASK_CS_RAW,
413         }, {
414                 .fourcc         = V4L2_PIX_FMT_SGBRG16,
415                 .code           = MEDIA_BUS_FMT_SGBRG16_1X16,
416                 .depth          = 16,
417                 .csi_dt         = MIPI_CSI2_DT_RAW16,
418                 .valid_colorspaces = MASK_CS_RAW,
419         }, {
420                 .fourcc         = V4L2_PIX_FMT_SGRBG16,
421                 .code           = MEDIA_BUS_FMT_SGRBG16_1X16,
422                 .depth          = 16,
423                 .csi_dt         = MIPI_CSI2_DT_RAW16,
424                 .valid_colorspaces = MASK_CS_RAW,
425         }, {
426                 .fourcc         = V4L2_PIX_FMT_SRGGB16,
427                 .code           = MEDIA_BUS_FMT_SRGGB16_1X16,
428                 .depth          = 16,
429                 .csi_dt         = MIPI_CSI2_DT_RAW16,
430                 .valid_colorspaces = MASK_CS_RAW,
431         }, {
432
433         /* Greyscale formats */
434                 .fourcc         = V4L2_PIX_FMT_GREY,
435                 .code           = MEDIA_BUS_FMT_Y8_1X8,
436                 .depth          = 8,
437                 .csi_dt         = MIPI_CSI2_DT_RAW8,
438                 .valid_colorspaces = MASK_CS_RAW,
439         }, {
440                 .fourcc         = V4L2_PIX_FMT_Y10P,
441                 .repacked_fourcc = V4L2_PIX_FMT_Y10,
442                 .code           = MEDIA_BUS_FMT_Y10_1X10,
443                 .depth          = 10,
444                 .csi_dt         = MIPI_CSI2_DT_RAW10,
445                 .valid_colorspaces = MASK_CS_RAW,
446         }, {
447                 .fourcc         = V4L2_PIX_FMT_Y12P,
448                 .repacked_fourcc = V4L2_PIX_FMT_Y12,
449                 .code           = MEDIA_BUS_FMT_Y12_1X12,
450                 .depth          = 12,
451                 .csi_dt         = MIPI_CSI2_DT_RAW12,
452                 .valid_colorspaces = MASK_CS_RAW,
453         }, {
454                 .fourcc         = V4L2_PIX_FMT_Y14P,
455                 .repacked_fourcc = V4L2_PIX_FMT_Y14,
456                 .code           = MEDIA_BUS_FMT_Y14_1X14,
457                 .depth          = 14,
458                 .csi_dt         = MIPI_CSI2_DT_RAW14,
459                 .valid_colorspaces = MASK_CS_RAW,
460         }, {
461                 .fourcc         = V4L2_PIX_FMT_Y16,
462                 .code           = MEDIA_BUS_FMT_Y16_1X16,
463                 .depth          = 16,
464                 .csi_dt         = MIPI_CSI2_DT_RAW16,
465                 .valid_colorspaces = MASK_CS_RAW,
466         },
467         /* Embedded data format */
468         {
469                 .fourcc         = V4L2_META_FMT_SENSOR_DATA,
470                 .code           = MEDIA_BUS_FMT_SENSOR_DATA,
471                 .depth          = 8,
472                 .metadata_fmt   = 1,
473         }
474 };
475
476 struct unicam_buffer {
477         struct vb2_v4l2_buffer vb;
478         struct list_head list;
479 };
480
481 static inline struct unicam_buffer *to_unicam_buffer(struct vb2_buffer *vb)
482 {
483         return container_of(vb, struct unicam_buffer, vb.vb2_buf);
484 }
485
486 struct unicam_node {
487         bool registered;
488         int open;
489         bool streaming;
490         unsigned int pad_id;
491         /* Source pad id on the sensor for this node */
492         unsigned int src_pad_id;
493         /* Pointer pointing to current v4l2_buffer */
494         struct unicam_buffer *cur_frm;
495         /* Pointer pointing to next v4l2_buffer */
496         struct unicam_buffer *next_frm;
497         /* video capture */
498         const struct unicam_fmt *fmt;
499         /* Used to store current pixel format */
500         struct v4l2_format v_fmt;
501         /* Used to store current mbus frame format */
502         struct v4l2_mbus_framefmt m_fmt;
503         /* Buffer queue used in video-buf */
504         struct vb2_queue buffer_queue;
505         /* Queue of filled frames */
506         struct list_head dma_queue;
507         /* IRQ lock for DMA queue */
508         spinlock_t dma_queue_lock;
509         /* lock used to access this structure */
510         struct mutex lock;
511         /* Identifies video device for this channel */
512         struct video_device video_dev;
513         /* Pointer to the parent handle */
514         struct unicam_device *dev;
515         struct media_pad pad;
516         unsigned int embedded_lines;
517         struct media_pipeline pipe;
518         /*
519          * Dummy buffer intended to be used by unicam
520          * if we have no other queued buffers to swap to.
521          */
522         void *dummy_buf_cpu_addr;
523         dma_addr_t dummy_buf_dma_addr;
524 };
525
526 struct unicam_device {
527         struct kref kref;
528
529         /* V4l2 specific parameters */
530         struct v4l2_async_subdev asd;
531
532         /* peripheral base address */
533         void __iomem *base;
534         /* clock gating base address */
535         void __iomem *clk_gate_base;
536         /* lp clock handle */
537         struct clk *clock;
538         /* vpu clock handle */
539         struct clk *vpu_clock;
540         /* clock status for error handling */
541         bool clocks_enabled;
542         /* V4l2 device */
543         struct v4l2_device v4l2_dev;
544         struct media_device mdev;
545
546         /* parent device */
547         struct platform_device *pdev;
548         /* subdevice async Notifier */
549         struct v4l2_async_notifier notifier;
550         unsigned int sequence;
551         bool frame_started;
552
553         /* ptr to  sub device */
554         struct v4l2_subdev *sensor;
555         /* Pad config for the sensor */
556         struct v4l2_subdev_state *sensor_state;
557
558         enum v4l2_mbus_type bus_type;
559         /*
560          * Stores bus.mipi_csi2.flags for CSI2 sensors, or
561          * bus.mipi_csi1.strobe for CCP2.
562          */
563         unsigned int bus_flags;
564         unsigned int max_data_lanes;
565         unsigned int active_data_lanes;
566         bool sensor_embedded_data;
567
568         struct unicam_node node[MAX_NODES];
569         struct v4l2_ctrl_handler ctrl_handler;
570
571         bool mc_api;
572 };
573
574 static inline struct unicam_device *
575 to_unicam_device(struct v4l2_device *v4l2_dev)
576 {
577         return container_of(v4l2_dev, struct unicam_device, v4l2_dev);
578 }
579
580 /* Hardware access */
581 static inline void clk_write(struct unicam_device *dev, u32 val)
582 {
583         writel(val | 0x5a000000, dev->clk_gate_base);
584 }
585
586 static inline u32 reg_read(struct unicam_device *dev, u32 offset)
587 {
588         return readl(dev->base + offset);
589 }
590
591 static inline void reg_write(struct unicam_device *dev, u32 offset, u32 val)
592 {
593         writel(val, dev->base + offset);
594 }
595
596 static inline int get_field(u32 value, u32 mask)
597 {
598         return (value & mask) >> __ffs(mask);
599 }
600
601 static inline void set_field(u32 *valp, u32 field, u32 mask)
602 {
603         u32 val = *valp;
604
605         val &= ~mask;
606         val |= (field << __ffs(mask)) & mask;
607         *valp = val;
608 }
609
610 static inline u32 reg_read_field(struct unicam_device *dev, u32 offset,
611                                  u32 mask)
612 {
613         return get_field(reg_read(dev, offset), mask);
614 }
615
616 static inline void reg_write_field(struct unicam_device *dev, u32 offset,
617                                    u32 field, u32 mask)
618 {
619         u32 val = reg_read(dev, offset);
620
621         set_field(&val, field, mask);
622         reg_write(dev, offset, val);
623 }
624
625 /* Power management functions */
626 static inline int unicam_runtime_get(struct unicam_device *dev)
627 {
628         return pm_runtime_get_sync(&dev->pdev->dev);
629 }
630
631 static inline void unicam_runtime_put(struct unicam_device *dev)
632 {
633         pm_runtime_put_sync(&dev->pdev->dev);
634 }
635
636 /* Format setup functions */
637 static const struct unicam_fmt *find_format_by_code(u32 code)
638 {
639         unsigned int i;
640
641         for (i = 0; i < ARRAY_SIZE(formats); i++) {
642                 if (formats[i].code == code)
643                         return &formats[i];
644         }
645
646         return NULL;
647 }
648
649 static int check_mbus_format(struct unicam_device *dev,
650                              const struct unicam_fmt *format)
651 {
652         unsigned int i;
653         int ret = 0;
654
655         for (i = 0; !ret && i < MAX_ENUM_MBUS_CODE; i++) {
656                 struct v4l2_subdev_mbus_code_enum mbus_code = {
657                         .index = i,
658                         .pad = IMAGE_PAD,
659                         .which = V4L2_SUBDEV_FORMAT_ACTIVE,
660                 };
661
662                 ret = v4l2_subdev_call(dev->sensor, pad, enum_mbus_code,
663                                        NULL, &mbus_code);
664
665                 if (!ret && mbus_code.code == format->code)
666                         return 1;
667         }
668
669         return 0;
670 }
671
672 static const struct unicam_fmt *find_format_by_pix(struct unicam_device *dev,
673                                                    u32 pixelformat)
674 {
675         unsigned int i;
676
677         for (i = 0; i < ARRAY_SIZE(formats); i++) {
678                 if (formats[i].fourcc == pixelformat ||
679                     formats[i].repacked_fourcc == pixelformat) {
680                         if (formats[i].check_variants &&
681                             !check_mbus_format(dev, &formats[i]))
682                                 continue;
683                         return &formats[i];
684                 }
685         }
686
687         return NULL;
688 }
689
690 static unsigned int bytes_per_line(u32 width, const struct unicam_fmt *fmt,
691                                    u32 v4l2_fourcc)
692 {
693         if (v4l2_fourcc == fmt->repacked_fourcc)
694                 /* Repacking always goes to 16bpp */
695                 return ALIGN(width << 1, BPL_ALIGNMENT);
696         else
697                 return ALIGN((width * fmt->depth) >> 3, BPL_ALIGNMENT);
698 }
699
700 static int __subdev_get_format(struct unicam_device *dev,
701                                struct v4l2_mbus_framefmt *fmt, int pad_id)
702 {
703         struct v4l2_subdev_format sd_fmt = {
704                 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
705                 .pad = dev->node[pad_id].src_pad_id,
706         };
707         int ret;
708
709         ret = v4l2_subdev_call(dev->sensor, pad, get_fmt, dev->sensor_state,
710                                &sd_fmt);
711         if (ret < 0)
712                 return ret;
713
714         *fmt = sd_fmt.format;
715
716         unicam_dbg(1, dev, "%s %dx%d code:%04x\n", __func__,
717                    fmt->width, fmt->height, fmt->code);
718
719         return 0;
720 }
721
722 static int __subdev_set_format(struct unicam_device *dev,
723                                struct v4l2_mbus_framefmt *fmt, int pad_id)
724 {
725         struct v4l2_subdev_format sd_fmt = {
726                 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
727                 .pad = dev->node[pad_id].src_pad_id,
728         };
729         int ret;
730
731         sd_fmt.format = *fmt;
732
733         ret = v4l2_subdev_call(dev->sensor, pad, set_fmt, dev->sensor_state,
734                                &sd_fmt);
735         if (ret < 0)
736                 return ret;
737
738         *fmt = sd_fmt.format;
739
740         if (pad_id == IMAGE_PAD)
741                 unicam_dbg(1, dev, "%s %dx%d code:%04x\n", __func__, fmt->width,
742                            fmt->height, fmt->code);
743         else
744                 unicam_dbg(1, dev, "%s Embedded data code:%04x\n", __func__,
745                            sd_fmt.format.code);
746
747         return 0;
748 }
749
750 static int unicam_calc_format_size_bpl(struct unicam_device *dev,
751                                        const struct unicam_fmt *fmt,
752                                        struct v4l2_format *f)
753 {
754         unsigned int min_bytesperline;
755
756         v4l_bound_align_image(&f->fmt.pix.width, MIN_WIDTH, MAX_WIDTH, 2,
757                               &f->fmt.pix.height, MIN_HEIGHT, MAX_HEIGHT, 0,
758                               0);
759
760         min_bytesperline = bytes_per_line(f->fmt.pix.width, fmt,
761                                           f->fmt.pix.pixelformat);
762
763         if (f->fmt.pix.bytesperline > min_bytesperline &&
764             f->fmt.pix.bytesperline <= MAX_BYTESPERLINE)
765                 f->fmt.pix.bytesperline = ALIGN(f->fmt.pix.bytesperline,
766                                                 BPL_ALIGNMENT);
767         else
768                 f->fmt.pix.bytesperline = min_bytesperline;
769
770         f->fmt.pix.sizeimage = f->fmt.pix.height * f->fmt.pix.bytesperline;
771
772         unicam_dbg(3, dev, "%s: fourcc: %08X size: %dx%d bpl:%d img_size:%d\n",
773                    __func__,
774                    f->fmt.pix.pixelformat,
775                    f->fmt.pix.width, f->fmt.pix.height,
776                    f->fmt.pix.bytesperline, f->fmt.pix.sizeimage);
777
778         return 0;
779 }
780
781 static int unicam_reset_format(struct unicam_node *node)
782 {
783         struct unicam_device *dev = node->dev;
784         struct v4l2_mbus_framefmt mbus_fmt;
785         int ret;
786
787         if (dev->sensor_embedded_data || node->pad_id != METADATA_PAD) {
788                 ret = __subdev_get_format(dev, &mbus_fmt, node->pad_id);
789                 if (ret) {
790                         unicam_err(dev, "Failed to get_format - ret %d\n", ret);
791                         return ret;
792                 }
793
794                 if (mbus_fmt.code != node->fmt->code) {
795                         unicam_err(dev, "code mismatch - fmt->code %08x, mbus_fmt.code %08x\n",
796                                    node->fmt->code, mbus_fmt.code);
797                         return ret;
798                 }
799         }
800
801         if (node->pad_id == IMAGE_PAD) {
802                 v4l2_fill_pix_format(&node->v_fmt.fmt.pix, &mbus_fmt);
803                 node->v_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
804                 unicam_calc_format_size_bpl(dev, node->fmt, &node->v_fmt);
805         } else {
806                 node->v_fmt.type = V4L2_BUF_TYPE_META_CAPTURE;
807                 node->v_fmt.fmt.meta.dataformat = V4L2_META_FMT_SENSOR_DATA;
808                 if (dev->sensor_embedded_data) {
809                         node->v_fmt.fmt.meta.buffersize =
810                                         mbus_fmt.width * mbus_fmt.height;
811                         node->embedded_lines = mbus_fmt.height;
812                 } else {
813                         node->v_fmt.fmt.meta.buffersize = UNICAM_EMBEDDED_SIZE;
814                         node->embedded_lines = 1;
815                 }
816         }
817
818         node->m_fmt = mbus_fmt;
819         return 0;
820 }
821
822 static void unicam_wr_dma_addr(struct unicam_device *dev, dma_addr_t dmaaddr,
823                                unsigned int buffer_size, int pad_id)
824 {
825         dma_addr_t endaddr = dmaaddr + buffer_size;
826
827         if (pad_id == IMAGE_PAD) {
828                 reg_write(dev, UNICAM_IBSA0, dmaaddr);
829                 reg_write(dev, UNICAM_IBEA0, endaddr);
830         } else {
831                 reg_write(dev, UNICAM_DBSA0, dmaaddr);
832                 reg_write(dev, UNICAM_DBEA0, endaddr);
833         }
834 }
835
836 static unsigned int unicam_get_lines_done(struct unicam_device *dev)
837 {
838         dma_addr_t start_addr, cur_addr;
839         unsigned int stride = dev->node[IMAGE_PAD].v_fmt.fmt.pix.bytesperline;
840         struct unicam_buffer *frm = dev->node[IMAGE_PAD].cur_frm;
841
842         if (!frm)
843                 return 0;
844
845         start_addr = vb2_dma_contig_plane_dma_addr(&frm->vb.vb2_buf, 0);
846         cur_addr = reg_read(dev, UNICAM_IBWP);
847         return (unsigned int)(cur_addr - start_addr) / stride;
848 }
849
850 static void unicam_schedule_next_buffer(struct unicam_node *node)
851 {
852         struct unicam_device *dev = node->dev;
853         struct unicam_buffer *buf;
854         unsigned int size;
855         dma_addr_t addr;
856
857         buf = list_first_entry(&node->dma_queue, struct unicam_buffer, list);
858         node->next_frm = buf;
859         list_del(&buf->list);
860
861         addr = vb2_dma_contig_plane_dma_addr(&buf->vb.vb2_buf, 0);
862         size = (node->pad_id == IMAGE_PAD) ?
863                         node->v_fmt.fmt.pix.sizeimage :
864                         node->v_fmt.fmt.meta.buffersize;
865
866         unicam_wr_dma_addr(dev, addr, size, node->pad_id);
867 }
868
869 static void unicam_schedule_dummy_buffer(struct unicam_node *node)
870 {
871         struct unicam_device *dev = node->dev;
872
873         unicam_dbg(3, dev, "Scheduling dummy buffer for node %d\n",
874                    node->pad_id);
875
876         unicam_wr_dma_addr(dev, node->dummy_buf_dma_addr, 0, node->pad_id);
877         node->next_frm = NULL;
878 }
879
880 static void unicam_process_buffer_complete(struct unicam_node *node,
881                                            unsigned int sequence)
882 {
883         node->cur_frm->vb.field = node->m_fmt.field;
884         node->cur_frm->vb.sequence = sequence;
885
886         vb2_buffer_done(&node->cur_frm->vb.vb2_buf, VB2_BUF_STATE_DONE);
887 }
888
889 static void unicam_queue_event_sof(struct unicam_device *unicam)
890 {
891         struct v4l2_event event = {
892                 .type = V4L2_EVENT_FRAME_SYNC,
893                 .u.frame_sync.frame_sequence = unicam->sequence,
894         };
895
896         v4l2_event_queue(&unicam->node[IMAGE_PAD].video_dev, &event);
897 }
898
899 /*
900  * unicam_isr : ISR handler for unicam capture
901  * @irq: irq number
902  * @dev_id: dev_id ptr
903  *
904  * It changes status of the captured buffer, takes next buffer from the queue
905  * and sets its address in unicam registers
906  */
907 static irqreturn_t unicam_isr(int irq, void *dev)
908 {
909         struct unicam_device *unicam = dev;
910         unsigned int lines_done = unicam_get_lines_done(dev);
911         unsigned int sequence = unicam->sequence;
912         unsigned int i;
913         u32 ista, sta;
914         bool fe;
915         u64 ts;
916
917         sta = reg_read(unicam, UNICAM_STA);
918         /* Write value back to clear the interrupts */
919         reg_write(unicam, UNICAM_STA, sta);
920
921         ista = reg_read(unicam, UNICAM_ISTA);
922         /* Write value back to clear the interrupts */
923         reg_write(unicam, UNICAM_ISTA, ista);
924
925         unicam_dbg(3, unicam, "ISR: ISTA: 0x%X, STA: 0x%X, sequence %d, lines done %d",
926                    ista, sta, sequence, lines_done);
927
928         if (!(sta & (UNICAM_IS | UNICAM_PI0)))
929                 return IRQ_HANDLED;
930
931         /*
932          * Look for either the Frame End interrupt or the Packet Capture status
933          * to signal a frame end.
934          */
935         fe = (ista & UNICAM_FEI || sta & UNICAM_PI0);
936
937         /*
938          * We must run the frame end handler first. If we have a valid next_frm
939          * and we get a simultaneout FE + FS interrupt, running the FS handler
940          * first would null out the next_frm ptr and we would have lost the
941          * buffer forever.
942          */
943         if (fe) {
944                 bool inc_seq = unicam->frame_started;
945
946                 /*
947                  * Ensure we have swapped buffers already as we can't
948                  * stop the peripheral. If no buffer is available, use a
949                  * dummy buffer to dump out frames until we get a new buffer
950                  * to use.
951                  */
952                 for (i = 0; i < ARRAY_SIZE(unicam->node); i++) {
953                         struct unicam_node *node = &unicam->node[i];
954
955                         if (!node->streaming)
956                                 continue;
957
958                         /*
959                          * If cur_frm == next_frm, it means we have not had
960                          * a chance to swap buffers, likely due to having
961                          * multiple interrupts occurring simultaneously (like FE
962                          * + FS + LS). In this case, we cannot signal the buffer
963                          * as complete, as the HW will reuse that buffer.
964                          */
965                         if (node->cur_frm && node->cur_frm != node->next_frm) {
966                                 /*
967                                  * This condition checks if FE + FS for the same
968                                  * frame has occurred. In such cases, we cannot
969                                  * return out the frame, as no buffer handling
970                                  * or timestamping has yet been done as part of
971                                  * the FS handler.
972                                  */
973                                 if (!node->cur_frm->vb.vb2_buf.timestamp) {
974                                         unicam_dbg(2, unicam, "ISR: FE without FS, dropping frame\n");
975                                         continue;
976                                 }
977
978                                 unicam_process_buffer_complete(node, sequence);
979                                 node->cur_frm = node->next_frm;
980                                 node->next_frm = NULL;
981                                 inc_seq = true;
982                         } else {
983                                 node->cur_frm = node->next_frm;
984                         }
985                 }
986
987                 /*
988                  * Increment the sequence number conditionally on either a FS
989                  * having already occurred, or in the FE + FS condition as
990                  * caught in the FE handler above. This ensures the sequence
991                  * number corresponds to the frames generated by the sensor, not
992                  * the frames dequeued to userland.
993                  */
994                 if (inc_seq) {
995                         unicam->sequence++;
996                         unicam->frame_started = false;
997                 }
998         }
999
1000         if (ista & UNICAM_FSI) {
1001                 /*
1002                  * Timestamp is to be when the first data byte was captured,
1003                  * aka frame start.
1004                  */
1005                 ts = ktime_get_ns();
1006                 for (i = 0; i < ARRAY_SIZE(unicam->node); i++) {
1007                         if (!unicam->node[i].streaming)
1008                                 continue;
1009
1010                         if (unicam->node[i].cur_frm)
1011                                 unicam->node[i].cur_frm->vb.vb2_buf.timestamp =
1012                                                                 ts;
1013                         else
1014                                 unicam_dbg(2, unicam, "ISR: [%d] Dropping frame, buffer not available at FS\n",
1015                                            i);
1016                         /*
1017                          * Set the next frame output to go to a dummy frame
1018                          * if no buffer currently queued.
1019                          */
1020                         if (!unicam->node[i].next_frm ||
1021                             unicam->node[i].next_frm == unicam->node[i].cur_frm) {
1022                                 unicam_schedule_dummy_buffer(&unicam->node[i]);
1023                         } else if (unicam->node[i].cur_frm) {
1024                                 /*
1025                                  * Repeated FS without FE. Hardware will have
1026                                  * swapped buffers, but the cur_frm doesn't
1027                                  * contain valid data. Return cur_frm to the
1028                                  * queue.
1029                                  */
1030                                 spin_lock(&unicam->node[i].dma_queue_lock);
1031                                 list_add_tail(&unicam->node[i].cur_frm->list,
1032                                               &unicam->node[i].dma_queue);
1033                                 spin_unlock(&unicam->node[i].dma_queue_lock);
1034                                 unicam->node[i].cur_frm = unicam->node[i].next_frm;
1035                                 unicam->node[i].next_frm = NULL;
1036                         }
1037                 }
1038
1039                 unicam_queue_event_sof(unicam);
1040                 unicam->frame_started = true;
1041         }
1042
1043         /*
1044          * Cannot swap buffer at frame end, there may be a race condition
1045          * where the HW does not actually swap it if the new frame has
1046          * already started.
1047          */
1048         if (ista & (UNICAM_FSI | UNICAM_LCI) && !fe) {
1049                 for (i = 0; i < ARRAY_SIZE(unicam->node); i++) {
1050                         if (!unicam->node[i].streaming)
1051                                 continue;
1052
1053                         spin_lock(&unicam->node[i].dma_queue_lock);
1054                         if (!list_empty(&unicam->node[i].dma_queue) &&
1055                             !unicam->node[i].next_frm)
1056                                 unicam_schedule_next_buffer(&unicam->node[i]);
1057                         spin_unlock(&unicam->node[i].dma_queue_lock);
1058                 }
1059         }
1060
1061         return IRQ_HANDLED;
1062 }
1063
1064 /* V4L2 Common IOCTLs */
1065 static int unicam_querycap(struct file *file, void *priv,
1066                            struct v4l2_capability *cap)
1067 {
1068         struct unicam_node *node = video_drvdata(file);
1069         struct unicam_device *dev = node->dev;
1070
1071         strscpy(cap->driver, UNICAM_MODULE_NAME, sizeof(cap->driver));
1072         strscpy(cap->card, UNICAM_MODULE_NAME, sizeof(cap->card));
1073
1074         snprintf(cap->bus_info, sizeof(cap->bus_info),
1075                  "platform:%s", dev_name(&dev->pdev->dev));
1076
1077         cap->capabilities |= V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_META_CAPTURE;
1078
1079         return 0;
1080 }
1081
1082 static int unicam_log_status(struct file *file, void *fh)
1083 {
1084         struct unicam_node *node = video_drvdata(file);
1085         struct unicam_device *dev = node->dev;
1086         u32 reg;
1087
1088         /* status for sub devices */
1089         v4l2_device_call_all(&dev->v4l2_dev, 0, core, log_status);
1090
1091         unicam_info(dev, "-----Receiver status-----\n");
1092         unicam_info(dev, "V4L2 width/height:   %ux%u\n",
1093                     node->v_fmt.fmt.pix.width, node->v_fmt.fmt.pix.height);
1094         unicam_info(dev, "Mediabus format:     %08x\n", node->fmt->code);
1095         unicam_info(dev, "V4L2 format:         %08x\n",
1096                     node->v_fmt.fmt.pix.pixelformat);
1097         reg = reg_read(dev, UNICAM_IPIPE);
1098         unicam_info(dev, "Unpacking/packing:   %u / %u\n",
1099                     get_field(reg, UNICAM_PUM_MASK),
1100                     get_field(reg, UNICAM_PPM_MASK));
1101         unicam_info(dev, "----Live data----\n");
1102         unicam_info(dev, "Programmed stride:   %4u\n",
1103                     reg_read(dev, UNICAM_IBLS));
1104         unicam_info(dev, "Detected resolution: %ux%u\n",
1105                     reg_read(dev, UNICAM_IHSTA),
1106                     reg_read(dev, UNICAM_IVSTA));
1107         unicam_info(dev, "Write pointer:       %08x\n",
1108                     reg_read(dev, UNICAM_IBWP));
1109
1110         return 0;
1111 }
1112
1113 /* V4L2 Video Centric IOCTLs */
1114 static int unicam_enum_fmt_vid_cap(struct file *file, void  *priv,
1115                                    struct v4l2_fmtdesc *f)
1116 {
1117         struct unicam_node *node = video_drvdata(file);
1118         struct unicam_device *dev = node->dev;
1119         unsigned int index = 0;
1120         unsigned int i;
1121         int ret = 0;
1122
1123         if (node->pad_id != IMAGE_PAD)
1124                 return -EINVAL;
1125
1126         for (i = 0; !ret && i < MAX_ENUM_MBUS_CODE; i++) {
1127                 struct v4l2_subdev_mbus_code_enum mbus_code = {
1128                         .index = i,
1129                         .pad = IMAGE_PAD,
1130                         .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1131                 };
1132                 const struct unicam_fmt *fmt;
1133
1134                 ret = v4l2_subdev_call(dev->sensor, pad, enum_mbus_code,
1135                                        NULL, &mbus_code);
1136                 if (ret < 0) {
1137                         unicam_dbg(2, dev,
1138                                    "subdev->enum_mbus_code idx %d returned %d - index invalid\n",
1139                                    i, ret);
1140                         return -EINVAL;
1141                 }
1142
1143                 fmt = find_format_by_code(mbus_code.code);
1144                 if (fmt) {
1145                         if (fmt->fourcc) {
1146                                 if (index == f->index) {
1147                                         f->pixelformat = fmt->fourcc;
1148                                         break;
1149                                 }
1150                                 index++;
1151                         }
1152                         if (fmt->repacked_fourcc) {
1153                                 if (index == f->index) {
1154                                         f->pixelformat = fmt->repacked_fourcc;
1155                                         break;
1156                                 }
1157                                 index++;
1158                         }
1159                 }
1160         }
1161
1162         return 0;
1163 }
1164
1165 static int unicam_g_fmt_vid_cap(struct file *file, void *priv,
1166                                 struct v4l2_format *f)
1167 {
1168         struct v4l2_mbus_framefmt mbus_fmt = {0};
1169         struct unicam_node *node = video_drvdata(file);
1170         struct unicam_device *dev = node->dev;
1171         const struct unicam_fmt *fmt = NULL;
1172         int ret;
1173
1174         if (node->pad_id != IMAGE_PAD)
1175                 return -EINVAL;
1176
1177         /*
1178          * If a flip has occurred in the sensor, the fmt code might have
1179          * changed. So we will need to re-fetch the format from the subdevice.
1180          */
1181         ret = __subdev_get_format(dev, &mbus_fmt, node->pad_id);
1182         if (ret)
1183                 return -EINVAL;
1184
1185         /* Find the V4L2 format from mbus code. We must match a known format. */
1186         fmt = find_format_by_code(mbus_fmt.code);
1187         if (!fmt)
1188                 return -EINVAL;
1189
1190         if (node->fmt != fmt) {
1191                 /*
1192                  * The sensor format has changed so the pixelformat needs to
1193                  * be updated. Try and retain the packed/unpacked choice if
1194                  * at all possible.
1195                  */
1196                 if (node->fmt->repacked_fourcc ==
1197                                                 node->v_fmt.fmt.pix.pixelformat)
1198                         /* Using the repacked format */
1199                         node->v_fmt.fmt.pix.pixelformat = fmt->repacked_fourcc;
1200                 else
1201                         /* Using the native format */
1202                         node->v_fmt.fmt.pix.pixelformat = fmt->fourcc;
1203
1204                 node->fmt = fmt;
1205         }
1206
1207         *f = node->v_fmt;
1208
1209         return 0;
1210 }
1211
1212 static const struct unicam_fmt *
1213 get_first_supported_format(struct unicam_device *dev)
1214 {
1215         struct v4l2_subdev_mbus_code_enum mbus_code;
1216         const struct unicam_fmt *fmt = NULL;
1217         unsigned int i;
1218         int ret = 0;
1219
1220         for (i = 0; ret != -EINVAL && ret != -ENOIOCTLCMD; ++i) {
1221                 memset(&mbus_code, 0, sizeof(mbus_code));
1222                 mbus_code.index = i;
1223                 mbus_code.pad = IMAGE_PAD;
1224                 mbus_code.which = V4L2_SUBDEV_FORMAT_ACTIVE;
1225
1226                 ret = v4l2_subdev_call(dev->sensor, pad, enum_mbus_code, NULL,
1227                                        &mbus_code);
1228                 if (ret < 0) {
1229                         unicam_dbg(2, dev,
1230                                    "subdev->enum_mbus_code idx %u returned %d - continue\n",
1231                                    i, ret);
1232                         continue;
1233                 }
1234
1235                 unicam_dbg(2, dev, "subdev %s: code: 0x%08x idx: %u\n",
1236                            dev->sensor->name, mbus_code.code, i);
1237
1238                 fmt = find_format_by_code(mbus_code.code);
1239                 unicam_dbg(2, dev, "fmt 0x%08x returned as %p, V4L2 FOURCC 0x%08x, csi_dt 0x%02x\n",
1240                            mbus_code.code, fmt, fmt ? fmt->fourcc : 0,
1241                            fmt ? fmt->csi_dt : 0);
1242                 if (fmt)
1243                         return fmt;
1244         }
1245
1246         return NULL;
1247 }
1248
1249 static int unicam_try_fmt_vid_cap(struct file *file, void *priv,
1250                                   struct v4l2_format *f)
1251 {
1252         struct unicam_node *node = video_drvdata(file);
1253         struct unicam_device *dev = node->dev;
1254         struct v4l2_subdev_format sd_fmt = {
1255                 .which = V4L2_SUBDEV_FORMAT_TRY,
1256                 .pad = IMAGE_PAD
1257         };
1258         struct v4l2_mbus_framefmt *mbus_fmt = &sd_fmt.format;
1259         const struct unicam_fmt *fmt;
1260         int ret;
1261
1262         if (node->pad_id != IMAGE_PAD)
1263                 return -EINVAL;
1264
1265         fmt = find_format_by_pix(dev, f->fmt.pix.pixelformat);
1266         if (!fmt) {
1267                 /*
1268                  * Pixel format not supported by unicam. Choose the first
1269                  * supported format, and let the sensor choose something else.
1270                  */
1271                 unicam_dbg(3, dev, "Fourcc format (0x%08x) not found. Use first format.\n",
1272                            f->fmt.pix.pixelformat);
1273
1274                 fmt = &formats[0];
1275                 f->fmt.pix.pixelformat = fmt->fourcc;
1276         }
1277
1278         v4l2_fill_mbus_format(mbus_fmt, &f->fmt.pix, fmt->code);
1279         /*
1280          * No support for receiving interlaced video, so never
1281          * request it from the sensor subdev.
1282          */
1283         mbus_fmt->field = V4L2_FIELD_NONE;
1284
1285         ret = v4l2_subdev_call(dev->sensor, pad, set_fmt, dev->sensor_state,
1286                                &sd_fmt);
1287         if (ret && ret != -ENOIOCTLCMD && ret != -ENODEV)
1288                 return ret;
1289
1290         if (mbus_fmt->field != V4L2_FIELD_NONE)
1291                 unicam_info(dev, "Sensor trying to send interlaced video - results may be unpredictable\n");
1292
1293         v4l2_fill_pix_format(&f->fmt.pix, &sd_fmt.format);
1294         if (mbus_fmt->code != fmt->code) {
1295                 /* Sensor has returned an alternate format */
1296                 fmt = find_format_by_code(mbus_fmt->code);
1297                 if (!fmt) {
1298                         /*
1299                          * The alternate format is one unicam can't support.
1300                          * Find the first format that is supported by both, and
1301                          * then set that.
1302                          */
1303                         fmt = get_first_supported_format(dev);
1304                         mbus_fmt->code = fmt->code;
1305
1306                         ret = v4l2_subdev_call(dev->sensor, pad, set_fmt,
1307                                                dev->sensor_state, &sd_fmt);
1308                         if (ret && ret != -ENOIOCTLCMD && ret != -ENODEV)
1309                                 return ret;
1310
1311                         if (mbus_fmt->field != V4L2_FIELD_NONE)
1312                                 unicam_info(dev, "Sensor trying to send interlaced video - results may be unpredictable\n");
1313
1314                         v4l2_fill_pix_format(&f->fmt.pix, &sd_fmt.format);
1315
1316                         if (mbus_fmt->code != fmt->code) {
1317                                 /*
1318                                  * We've set a format that the sensor reports
1319                                  * as being supported, but it refuses to set it.
1320                                  * Not much else we can do.
1321                                  * Assume that the sensor driver may accept the
1322                                  * format when it is set (rather than tried).
1323                                  */
1324                                 unicam_err(dev, "Sensor won't accept default format, and Unicam can't support sensor default\n");
1325                         }
1326                 }
1327
1328                 if (fmt->fourcc)
1329                         f->fmt.pix.pixelformat = fmt->fourcc;
1330                 else
1331                         f->fmt.pix.pixelformat = fmt->repacked_fourcc;
1332         }
1333
1334         return unicam_calc_format_size_bpl(dev, fmt, f);
1335 }
1336
1337 static int unicam_s_fmt_vid_cap(struct file *file, void *priv,
1338                                 struct v4l2_format *f)
1339 {
1340         struct unicam_node *node = video_drvdata(file);
1341         struct unicam_device *dev = node->dev;
1342         struct vb2_queue *q = &node->buffer_queue;
1343         struct v4l2_mbus_framefmt mbus_fmt = {0};
1344         const struct unicam_fmt *fmt;
1345         int ret;
1346
1347         if (vb2_is_busy(q))
1348                 return -EBUSY;
1349
1350         ret = unicam_try_fmt_vid_cap(file, priv, f);
1351         if (ret < 0)
1352                 return ret;
1353
1354         fmt = find_format_by_pix(dev, f->fmt.pix.pixelformat);
1355         if (!fmt) {
1356                 /*
1357                  * Unknown pixel format - adopt a default.
1358                  * This shouldn't happen as try_fmt should have resolved any
1359                  * issues first.
1360                  */
1361                 fmt = get_first_supported_format(dev);
1362                 if (!fmt)
1363                         /*
1364                          * It shouldn't be possible to get here with no
1365                          * supported formats
1366                          */
1367                         return -EINVAL;
1368                 f->fmt.pix.pixelformat = fmt->fourcc;
1369                 return -EINVAL;
1370         }
1371
1372         v4l2_fill_mbus_format(&mbus_fmt, &f->fmt.pix, fmt->code);
1373
1374         ret = __subdev_set_format(dev, &mbus_fmt, node->pad_id);
1375         if (ret) {
1376                 unicam_dbg(3, dev, "%s __subdev_set_format failed %d\n",
1377                            __func__, ret);
1378                 return ret;
1379         }
1380
1381         /* Just double check nothing has gone wrong */
1382         if (mbus_fmt.code != fmt->code) {
1383                 unicam_dbg(3, dev,
1384                            "%s subdev changed format on us, this should not happen\n",
1385                            __func__);
1386                 return -EINVAL;
1387         }
1388
1389         node->fmt = fmt;
1390         node->v_fmt.fmt.pix.pixelformat = f->fmt.pix.pixelformat;
1391         node->v_fmt.fmt.pix.bytesperline = f->fmt.pix.bytesperline;
1392         unicam_reset_format(node);
1393
1394         unicam_dbg(3, dev,
1395                    "%s %dx%d, mbus_fmt 0x%08X, V4L2 pix 0x%08X.\n",
1396                    __func__, node->v_fmt.fmt.pix.width,
1397                    node->v_fmt.fmt.pix.height, mbus_fmt.code,
1398                    node->v_fmt.fmt.pix.pixelformat);
1399
1400         *f = node->v_fmt;
1401
1402         return 0;
1403 }
1404
1405 static int unicam_enum_fmt_meta_cap(struct file *file, void *priv,
1406                                     struct v4l2_fmtdesc *f)
1407 {
1408         struct unicam_node *node = video_drvdata(file);
1409         struct unicam_device *dev = node->dev;
1410         const struct unicam_fmt *fmt;
1411         u32 code;
1412         int ret = 0;
1413
1414         if (node->pad_id != METADATA_PAD || f->index != 0)
1415                 return -EINVAL;
1416
1417         if (dev->sensor_embedded_data) {
1418                 struct v4l2_subdev_mbus_code_enum mbus_code = {
1419                         .index = f->index,
1420                         .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1421                         .pad = METADATA_PAD,
1422                 };
1423
1424                 ret = v4l2_subdev_call(dev->sensor, pad, enum_mbus_code, NULL,
1425                                        &mbus_code);
1426                 if (ret < 0) {
1427                         unicam_dbg(2, dev,
1428                                    "subdev->enum_mbus_code idx 0 returned %d - index invalid\n",
1429                                    ret);
1430                         return -EINVAL;
1431                 }
1432
1433                 code = mbus_code.code;
1434         } else {
1435                 code = MEDIA_BUS_FMT_SENSOR_DATA;
1436         }
1437
1438         fmt = find_format_by_code(code);
1439         if (fmt)
1440                 f->pixelformat = fmt->fourcc;
1441
1442         return 0;
1443 }
1444
1445 static int unicam_g_fmt_meta_cap(struct file *file, void *priv,
1446                                  struct v4l2_format *f)
1447 {
1448         struct unicam_node *node = video_drvdata(file);
1449
1450         if (node->pad_id != METADATA_PAD)
1451                 return -EINVAL;
1452
1453         *f = node->v_fmt;
1454
1455         return 0;
1456 }
1457
1458 static int unicam_enum_input(struct file *file, void *priv,
1459                              struct v4l2_input *inp)
1460 {
1461         struct unicam_node *node = video_drvdata(file);
1462         struct unicam_device *dev = node->dev;
1463         int ret;
1464
1465         if (inp->index != 0)
1466                 return -EINVAL;
1467
1468         inp->type = V4L2_INPUT_TYPE_CAMERA;
1469         if (v4l2_subdev_has_op(dev->sensor, video, s_dv_timings)) {
1470                 inp->capabilities = V4L2_IN_CAP_DV_TIMINGS;
1471                 inp->std = 0;
1472         } else if (v4l2_subdev_has_op(dev->sensor, video, s_std)) {
1473                 inp->capabilities = V4L2_IN_CAP_STD;
1474                 if (v4l2_subdev_call(dev->sensor, video, g_tvnorms, &inp->std) < 0)
1475                         inp->std = V4L2_STD_ALL;
1476         } else {
1477                 inp->capabilities = 0;
1478                 inp->std = 0;
1479         }
1480
1481         if (v4l2_subdev_has_op(dev->sensor, video, g_input_status)) {
1482                 ret = v4l2_subdev_call(dev->sensor, video, g_input_status,
1483                                        &inp->status);
1484                 if (ret < 0)
1485                         return ret;
1486         }
1487
1488         snprintf(inp->name, sizeof(inp->name), "Camera 0");
1489         return 0;
1490 }
1491
1492 static int unicam_g_input(struct file *file, void *priv, unsigned int *i)
1493 {
1494         *i = 0;
1495
1496         return 0;
1497 }
1498
1499 static int unicam_s_input(struct file *file, void *priv, unsigned int i)
1500 {
1501         /*
1502          * FIXME: Ideally we would like to be able to query the source
1503          * subdevice for information over the input connectors it supports,
1504          * and map that through in to a call to video_ops->s_routing.
1505          * There is no infrastructure support for defining that within
1506          * devicetree at present. Until that is implemented we can't
1507          * map a user physical connector number to s_routing input number.
1508          */
1509         if (i > 0)
1510                 return -EINVAL;
1511
1512         return 0;
1513 }
1514
1515 static int unicam_querystd(struct file *file, void *priv,
1516                            v4l2_std_id *std)
1517 {
1518         struct unicam_node *node = video_drvdata(file);
1519         struct unicam_device *dev = node->dev;
1520
1521         return v4l2_subdev_call(dev->sensor, video, querystd, std);
1522 }
1523
1524 static int unicam_g_std(struct file *file, void *priv, v4l2_std_id *std)
1525 {
1526         struct unicam_node *node = video_drvdata(file);
1527         struct unicam_device *dev = node->dev;
1528
1529         return v4l2_subdev_call(dev->sensor, video, g_std, std);
1530 }
1531
1532 static int unicam_s_std(struct file *file, void *priv, v4l2_std_id std)
1533 {
1534         struct unicam_node *node = video_drvdata(file);
1535         struct unicam_device *dev = node->dev;
1536         int ret;
1537         v4l2_std_id current_std;
1538
1539         ret = v4l2_subdev_call(dev->sensor, video, g_std, &current_std);
1540         if (ret)
1541                 return ret;
1542
1543         if (std == current_std)
1544                 return 0;
1545
1546         if (vb2_is_busy(&node->buffer_queue))
1547                 return -EBUSY;
1548
1549         ret = v4l2_subdev_call(dev->sensor, video, s_std, std);
1550
1551         /* Force recomputation of bytesperline */
1552         node->v_fmt.fmt.pix.bytesperline = 0;
1553
1554         unicam_reset_format(node);
1555
1556         return ret;
1557 }
1558
1559 static int unicam_s_edid(struct file *file, void *priv, struct v4l2_edid *edid)
1560 {
1561         struct unicam_node *node = video_drvdata(file);
1562         struct unicam_device *dev = node->dev;
1563
1564         return v4l2_subdev_call(dev->sensor, pad, set_edid, edid);
1565 }
1566
1567 static int unicam_g_edid(struct file *file, void *priv, struct v4l2_edid *edid)
1568 {
1569         struct unicam_node *node = video_drvdata(file);
1570         struct unicam_device *dev = node->dev;
1571
1572         return v4l2_subdev_call(dev->sensor, pad, get_edid, edid);
1573 }
1574
1575 static int unicam_s_selection(struct file *file, void *priv,
1576                               struct v4l2_selection *sel)
1577 {
1578         struct unicam_node *node = video_drvdata(file);
1579         struct unicam_device *dev = node->dev;
1580         struct v4l2_subdev_selection sdsel = {
1581                 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1582                 .target = sel->target,
1583                 .flags = sel->flags,
1584                 .r = sel->r,
1585         };
1586
1587         if (sel->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
1588                 return -EINVAL;
1589
1590         return v4l2_subdev_call(dev->sensor, pad, set_selection, NULL, &sdsel);
1591 }
1592
1593 static int unicam_g_selection(struct file *file, void *priv,
1594                               struct v4l2_selection *sel)
1595 {
1596         struct unicam_node *node = video_drvdata(file);
1597         struct unicam_device *dev = node->dev;
1598         struct v4l2_subdev_selection sdsel = {
1599                 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1600                 .target = sel->target,
1601         };
1602         int ret;
1603
1604         if (sel->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
1605                 return -EINVAL;
1606
1607         ret = v4l2_subdev_call(dev->sensor, pad, get_selection, NULL, &sdsel);
1608         if (!ret)
1609                 sel->r = sdsel.r;
1610
1611         return ret;
1612 }
1613
1614 static int unicam_enum_framesizes(struct file *file, void *priv,
1615                                   struct v4l2_frmsizeenum *fsize)
1616 {
1617         struct unicam_node *node = video_drvdata(file);
1618         struct unicam_device *dev = node->dev;
1619         const struct unicam_fmt *fmt;
1620         struct v4l2_subdev_frame_size_enum fse;
1621         int ret;
1622
1623         /* check for valid format */
1624         fmt = find_format_by_pix(dev, fsize->pixel_format);
1625         if (!fmt) {
1626                 unicam_dbg(3, dev, "Invalid pixel code: %x\n",
1627                            fsize->pixel_format);
1628                 return -EINVAL;
1629         }
1630         fse.code = fmt->code;
1631
1632         fse.which = V4L2_SUBDEV_FORMAT_ACTIVE;
1633         fse.index = fsize->index;
1634         fse.pad = node->src_pad_id;
1635
1636         ret = v4l2_subdev_call(dev->sensor, pad, enum_frame_size, NULL, &fse);
1637         if (ret)
1638                 return ret;
1639
1640         unicam_dbg(1, dev, "%s: index: %d code: %x W:[%d,%d] H:[%d,%d]\n",
1641                    __func__, fse.index, fse.code, fse.min_width, fse.max_width,
1642                    fse.min_height, fse.max_height);
1643
1644         fsize->type = V4L2_FRMSIZE_TYPE_DISCRETE;
1645         fsize->discrete.width = fse.max_width;
1646         fsize->discrete.height = fse.max_height;
1647
1648         return 0;
1649 }
1650
1651 static int unicam_enum_frameintervals(struct file *file, void *priv,
1652                                       struct v4l2_frmivalenum *fival)
1653 {
1654         struct unicam_node *node = video_drvdata(file);
1655         struct unicam_device *dev = node->dev;
1656         const struct unicam_fmt *fmt;
1657         struct v4l2_subdev_frame_interval_enum fie = {
1658                 .index = fival->index,
1659                 .pad = node->src_pad_id,
1660                 .width = fival->width,
1661                 .height = fival->height,
1662                 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1663         };
1664         int ret;
1665
1666         fmt = find_format_by_pix(dev, fival->pixel_format);
1667         if (!fmt)
1668                 return -EINVAL;
1669
1670         fie.code = fmt->code;
1671         ret = v4l2_subdev_call(dev->sensor, pad, enum_frame_interval,
1672                                NULL, &fie);
1673         if (ret)
1674                 return ret;
1675
1676         fival->type = V4L2_FRMIVAL_TYPE_DISCRETE;
1677         fival->discrete = fie.interval;
1678
1679         return 0;
1680 }
1681
1682 static int unicam_g_parm(struct file *file, void *fh, struct v4l2_streamparm *a)
1683 {
1684         struct unicam_node *node = video_drvdata(file);
1685         struct unicam_device *dev = node->dev;
1686
1687         return v4l2_g_parm_cap(video_devdata(file), dev->sensor, a);
1688 }
1689
1690 static int unicam_s_parm(struct file *file, void *fh, struct v4l2_streamparm *a)
1691 {
1692         struct unicam_node *node = video_drvdata(file);
1693         struct unicam_device *dev = node->dev;
1694
1695         return v4l2_s_parm_cap(video_devdata(file), dev->sensor, a);
1696 }
1697
1698 static int unicam_g_dv_timings(struct file *file, void *priv,
1699                                struct v4l2_dv_timings *timings)
1700 {
1701         struct unicam_node *node = video_drvdata(file);
1702         struct unicam_device *dev = node->dev;
1703
1704         return v4l2_subdev_call(dev->sensor, video, g_dv_timings, timings);
1705 }
1706
1707 static int unicam_s_dv_timings(struct file *file, void *priv,
1708                                struct v4l2_dv_timings *timings)
1709 {
1710         struct unicam_node *node = video_drvdata(file);
1711         struct unicam_device *dev = node->dev;
1712         struct v4l2_dv_timings current_timings;
1713         int ret;
1714
1715         ret = v4l2_subdev_call(dev->sensor, video, g_dv_timings,
1716                                &current_timings);
1717
1718         if (ret < 0)
1719                 return ret;
1720
1721         if (v4l2_match_dv_timings(timings, &current_timings, 0, false))
1722                 return 0;
1723
1724         if (vb2_is_busy(&node->buffer_queue))
1725                 return -EBUSY;
1726
1727         ret = v4l2_subdev_call(dev->sensor, video, s_dv_timings, timings);
1728
1729         /* Force recomputation of bytesperline */
1730         node->v_fmt.fmt.pix.bytesperline = 0;
1731
1732         unicam_reset_format(node);
1733
1734         return ret;
1735 }
1736
1737 static int unicam_query_dv_timings(struct file *file, void *priv,
1738                                    struct v4l2_dv_timings *timings)
1739 {
1740         struct unicam_node *node = video_drvdata(file);
1741         struct unicam_device *dev = node->dev;
1742
1743         return v4l2_subdev_call(dev->sensor, video, query_dv_timings, timings);
1744 }
1745
1746 static int unicam_enum_dv_timings(struct file *file, void *priv,
1747                                   struct v4l2_enum_dv_timings *timings)
1748 {
1749         struct unicam_node *node = video_drvdata(file);
1750         struct unicam_device *dev = node->dev;
1751         int ret;
1752
1753         timings->pad = node->src_pad_id;
1754         ret = v4l2_subdev_call(dev->sensor, pad, enum_dv_timings, timings);
1755         timings->pad = node->pad_id;
1756
1757         return ret;
1758 }
1759
1760 static int unicam_dv_timings_cap(struct file *file, void *priv,
1761                                  struct v4l2_dv_timings_cap *cap)
1762 {
1763         struct unicam_node *node = video_drvdata(file);
1764         struct unicam_device *dev = node->dev;
1765         int ret;
1766
1767         cap->pad = node->src_pad_id;
1768         ret = v4l2_subdev_call(dev->sensor, pad, dv_timings_cap, cap);
1769         cap->pad = node->pad_id;
1770
1771         return ret;
1772 }
1773
1774 static int unicam_subscribe_event(struct v4l2_fh *fh,
1775                                   const struct v4l2_event_subscription *sub)
1776 {
1777         switch (sub->type) {
1778         case V4L2_EVENT_FRAME_SYNC:
1779                 return v4l2_event_subscribe(fh, sub, 2, NULL);
1780         case V4L2_EVENT_SOURCE_CHANGE:
1781                 return v4l2_event_subscribe(fh, sub, 4, NULL);
1782         }
1783
1784         return v4l2_ctrl_subscribe_event(fh, sub);
1785 }
1786
1787 static void unicam_notify(struct v4l2_subdev *sd,
1788                           unsigned int notification, void *arg)
1789 {
1790         struct unicam_device *dev = to_unicam_device(sd->v4l2_dev);
1791
1792         switch (notification) {
1793         case V4L2_DEVICE_NOTIFY_EVENT:
1794                 v4l2_event_queue(&dev->node[IMAGE_PAD].video_dev, arg);
1795                 break;
1796         default:
1797                 break;
1798         }
1799 }
1800
1801 /* unicam capture ioctl operations */
1802 static const struct v4l2_ioctl_ops unicam_ioctl_ops = {
1803         .vidioc_querycap                = unicam_querycap,
1804         .vidioc_enum_fmt_vid_cap        = unicam_enum_fmt_vid_cap,
1805         .vidioc_g_fmt_vid_cap           = unicam_g_fmt_vid_cap,
1806         .vidioc_s_fmt_vid_cap           = unicam_s_fmt_vid_cap,
1807         .vidioc_try_fmt_vid_cap         = unicam_try_fmt_vid_cap,
1808
1809         .vidioc_enum_fmt_meta_cap       = unicam_enum_fmt_meta_cap,
1810         .vidioc_g_fmt_meta_cap          = unicam_g_fmt_meta_cap,
1811         .vidioc_s_fmt_meta_cap          = unicam_g_fmt_meta_cap,
1812         .vidioc_try_fmt_meta_cap        = unicam_g_fmt_meta_cap,
1813
1814         .vidioc_enum_input              = unicam_enum_input,
1815         .vidioc_g_input                 = unicam_g_input,
1816         .vidioc_s_input                 = unicam_s_input,
1817
1818         .vidioc_querystd                = unicam_querystd,
1819         .vidioc_s_std                   = unicam_s_std,
1820         .vidioc_g_std                   = unicam_g_std,
1821
1822         .vidioc_g_edid                  = unicam_g_edid,
1823         .vidioc_s_edid                  = unicam_s_edid,
1824
1825         .vidioc_enum_framesizes         = unicam_enum_framesizes,
1826         .vidioc_enum_frameintervals     = unicam_enum_frameintervals,
1827
1828         .vidioc_g_selection             = unicam_g_selection,
1829         .vidioc_s_selection             = unicam_s_selection,
1830
1831         .vidioc_g_parm                  = unicam_g_parm,
1832         .vidioc_s_parm                  = unicam_s_parm,
1833
1834         .vidioc_s_dv_timings            = unicam_s_dv_timings,
1835         .vidioc_g_dv_timings            = unicam_g_dv_timings,
1836         .vidioc_query_dv_timings        = unicam_query_dv_timings,
1837         .vidioc_enum_dv_timings         = unicam_enum_dv_timings,
1838         .vidioc_dv_timings_cap          = unicam_dv_timings_cap,
1839
1840         .vidioc_reqbufs                 = vb2_ioctl_reqbufs,
1841         .vidioc_create_bufs             = vb2_ioctl_create_bufs,
1842         .vidioc_prepare_buf             = vb2_ioctl_prepare_buf,
1843         .vidioc_querybuf                = vb2_ioctl_querybuf,
1844         .vidioc_qbuf                    = vb2_ioctl_qbuf,
1845         .vidioc_dqbuf                   = vb2_ioctl_dqbuf,
1846         .vidioc_expbuf                  = vb2_ioctl_expbuf,
1847         .vidioc_streamon                = vb2_ioctl_streamon,
1848         .vidioc_streamoff               = vb2_ioctl_streamoff,
1849
1850         .vidioc_log_status              = unicam_log_status,
1851         .vidioc_subscribe_event         = unicam_subscribe_event,
1852         .vidioc_unsubscribe_event       = v4l2_event_unsubscribe,
1853 };
1854
1855 /* V4L2 Media Controller Centric IOCTLs */
1856
1857 static int unicam_mc_enum_fmt_vid_cap(struct file *file, void  *priv,
1858                                       struct v4l2_fmtdesc *f)
1859 {
1860         int i, j;
1861
1862         for (i = 0, j = 0; i < ARRAY_SIZE(formats); i++) {
1863                 if (f->mbus_code && formats[i].code != f->mbus_code)
1864                         continue;
1865                 if (formats[i].mc_skip || formats[i].metadata_fmt)
1866                         continue;
1867
1868                 if (formats[i].fourcc) {
1869                         if (j == f->index) {
1870                                 f->pixelformat = formats[i].fourcc;
1871                                 f->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1872                                 return 0;
1873                         }
1874                         j++;
1875                 }
1876                 if (formats[i].repacked_fourcc) {
1877                         if (j == f->index) {
1878                                 f->pixelformat = formats[i].repacked_fourcc;
1879                                 f->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1880                                 return 0;
1881                         }
1882                         j++;
1883                 }
1884         }
1885
1886         return -EINVAL;
1887 }
1888
1889 static int unicam_mc_g_fmt_vid_cap(struct file *file, void *priv,
1890                                    struct v4l2_format *f)
1891 {
1892         struct unicam_node *node = video_drvdata(file);
1893
1894         if (node->pad_id != IMAGE_PAD)
1895                 return -EINVAL;
1896
1897         *f = node->v_fmt;
1898
1899         return 0;
1900 }
1901
1902 static void unicam_mc_try_fmt(struct unicam_node *node, struct v4l2_format *f,
1903                               const struct unicam_fmt **ret_fmt)
1904 {
1905         struct v4l2_pix_format *v4l2_format = &f->fmt.pix;
1906         struct unicam_device *dev = node->dev;
1907         const struct unicam_fmt *fmt;
1908         int is_rgb;
1909
1910         /*
1911          * Default to the first format if the requested pixel format code isn't
1912          * supported.
1913          */
1914         fmt = find_format_by_pix(dev, v4l2_format->pixelformat);
1915         if (!fmt) {
1916                 fmt = &formats[0];
1917                 v4l2_format->pixelformat = fmt->fourcc;
1918         }
1919
1920         unicam_calc_format_size_bpl(dev, fmt, f);
1921
1922         if (v4l2_format->field == V4L2_FIELD_ANY)
1923                 v4l2_format->field = V4L2_FIELD_NONE;
1924
1925         if (ret_fmt)
1926                 *ret_fmt = fmt;
1927
1928         if (v4l2_format->colorspace >= MAX_COLORSPACE ||
1929             !(fmt->valid_colorspaces & (1 << v4l2_format->colorspace))) {
1930                 v4l2_format->colorspace = __ffs(fmt->valid_colorspaces);
1931
1932                 v4l2_format->xfer_func =
1933                         V4L2_MAP_XFER_FUNC_DEFAULT(v4l2_format->colorspace);
1934                 v4l2_format->ycbcr_enc =
1935                         V4L2_MAP_YCBCR_ENC_DEFAULT(v4l2_format->colorspace);
1936                 is_rgb = v4l2_format->colorspace == V4L2_COLORSPACE_SRGB;
1937                 v4l2_format->quantization =
1938                         V4L2_MAP_QUANTIZATION_DEFAULT(is_rgb,
1939                                                       v4l2_format->colorspace,
1940                                                       v4l2_format->ycbcr_enc);
1941         }
1942
1943         unicam_dbg(3, dev, "%s: %08x %ux%u (bytesperline %u sizeimage %u)\n",
1944                    __func__, v4l2_format->pixelformat,
1945                    v4l2_format->width, v4l2_format->height,
1946                    v4l2_format->bytesperline, v4l2_format->sizeimage);
1947 }
1948
1949 static int unicam_mc_try_fmt_vid_cap(struct file *file, void *priv,
1950                                      struct v4l2_format *f)
1951 {
1952         struct unicam_node *node = video_drvdata(file);
1953
1954         unicam_mc_try_fmt(node, f, NULL);
1955         return 0;
1956 }
1957
1958 static int unicam_mc_s_fmt_vid_cap(struct file *file, void *priv,
1959                                    struct v4l2_format *f)
1960 {
1961         struct unicam_node *node = video_drvdata(file);
1962         struct unicam_device *dev = node->dev;
1963         const struct unicam_fmt *fmt;
1964
1965         if (vb2_is_busy(&node->buffer_queue)) {
1966                 unicam_dbg(3, dev, "%s device busy\n", __func__);
1967                 return -EBUSY;
1968         }
1969
1970         unicam_mc_try_fmt(node, f, &fmt);
1971
1972         node->v_fmt = *f;
1973         node->fmt = fmt;
1974
1975         return 0;
1976 }
1977
1978 static int unicam_mc_enum_framesizes(struct file *file, void *fh,
1979                                      struct v4l2_frmsizeenum *fsize)
1980 {
1981         struct unicam_node *node = video_drvdata(file);
1982         struct unicam_device *dev = node->dev;
1983
1984         if (fsize->index > 0)
1985                 return -EINVAL;
1986
1987         if (!find_format_by_pix(dev, fsize->pixel_format)) {
1988                 unicam_dbg(3, dev, "Invalid pixel format 0x%08x\n",
1989                            fsize->pixel_format);
1990                 return -EINVAL;
1991         }
1992
1993         fsize->type = V4L2_FRMSIZE_TYPE_STEPWISE;
1994         fsize->stepwise.min_width = MIN_WIDTH;
1995         fsize->stepwise.max_width = MAX_WIDTH;
1996         fsize->stepwise.step_width = 1;
1997         fsize->stepwise.min_height = MIN_HEIGHT;
1998         fsize->stepwise.max_height = MAX_HEIGHT;
1999         fsize->stepwise.step_height = 1;
2000
2001         return 0;
2002 }
2003
2004 static int unicam_mc_enum_fmt_meta_cap(struct file *file, void  *priv,
2005                                        struct v4l2_fmtdesc *f)
2006 {
2007         int i, j;
2008
2009         for (i = 0, j = 0; i < ARRAY_SIZE(formats); i++) {
2010                 if (f->mbus_code && formats[i].code != f->mbus_code)
2011                         continue;
2012                 if (!formats[i].metadata_fmt)
2013                         continue;
2014
2015                 if (formats[i].fourcc) {
2016                         if (j == f->index) {
2017                                 f->pixelformat = formats[i].fourcc;
2018                                 f->type = V4L2_BUF_TYPE_META_CAPTURE;
2019                                 return 0;
2020                         }
2021                         j++;
2022                 }
2023         }
2024
2025         return -EINVAL;
2026 }
2027
2028 static int unicam_mc_g_fmt_meta_cap(struct file *file, void *priv,
2029                                     struct v4l2_format *f)
2030 {
2031         struct unicam_node *node = video_drvdata(file);
2032
2033         if (node->pad_id != METADATA_PAD)
2034                 return -EINVAL;
2035
2036         *f = node->v_fmt;
2037
2038         return 0;
2039 }
2040
2041 static int unicam_mc_try_fmt_meta_cap(struct file *file, void *priv,
2042                                       struct v4l2_format *f)
2043 {
2044         struct unicam_node *node = video_drvdata(file);
2045
2046         if (node->pad_id != METADATA_PAD)
2047                 return -EINVAL;
2048
2049         f->fmt.meta.dataformat = V4L2_META_FMT_SENSOR_DATA;
2050
2051         return 0;
2052 }
2053
2054 static int unicam_mc_s_fmt_meta_cap(struct file *file, void *priv,
2055                                     struct v4l2_format *f)
2056 {
2057         struct unicam_node *node = video_drvdata(file);
2058
2059         if (node->pad_id != METADATA_PAD)
2060                 return -EINVAL;
2061
2062         unicam_mc_try_fmt_meta_cap(file, priv, f);
2063
2064         node->v_fmt = *f;
2065
2066         return 0;
2067 }
2068
2069 static const struct v4l2_ioctl_ops unicam_mc_ioctl_ops = {
2070         .vidioc_querycap      = unicam_querycap,
2071         .vidioc_enum_fmt_vid_cap  = unicam_mc_enum_fmt_vid_cap,
2072         .vidioc_g_fmt_vid_cap     = unicam_mc_g_fmt_vid_cap,
2073         .vidioc_try_fmt_vid_cap   = unicam_mc_try_fmt_vid_cap,
2074         .vidioc_s_fmt_vid_cap     = unicam_mc_s_fmt_vid_cap,
2075
2076         .vidioc_enum_fmt_meta_cap       = unicam_mc_enum_fmt_meta_cap,
2077         .vidioc_g_fmt_meta_cap          = unicam_mc_g_fmt_meta_cap,
2078         .vidioc_try_fmt_meta_cap        = unicam_mc_try_fmt_meta_cap,
2079         .vidioc_s_fmt_meta_cap          = unicam_mc_s_fmt_meta_cap,
2080
2081         .vidioc_enum_framesizes   = unicam_mc_enum_framesizes,
2082         .vidioc_reqbufs       = vb2_ioctl_reqbufs,
2083         .vidioc_create_bufs   = vb2_ioctl_create_bufs,
2084         .vidioc_prepare_buf   = vb2_ioctl_prepare_buf,
2085         .vidioc_querybuf      = vb2_ioctl_querybuf,
2086         .vidioc_qbuf          = vb2_ioctl_qbuf,
2087         .vidioc_dqbuf         = vb2_ioctl_dqbuf,
2088         .vidioc_expbuf        = vb2_ioctl_expbuf,
2089         .vidioc_streamon      = vb2_ioctl_streamon,
2090         .vidioc_streamoff     = vb2_ioctl_streamoff,
2091
2092         .vidioc_log_status              = unicam_log_status,
2093         .vidioc_subscribe_event         = unicam_subscribe_event,
2094         .vidioc_unsubscribe_event       = v4l2_event_unsubscribe,
2095 };
2096
2097 static int
2098 unicam_mc_subdev_link_validate_get_format(struct media_pad *pad,
2099                                           struct v4l2_subdev_format *fmt)
2100 {
2101         if (is_media_entity_v4l2_subdev(pad->entity)) {
2102                 struct v4l2_subdev *sd =
2103                         media_entity_to_v4l2_subdev(pad->entity);
2104
2105                 fmt->which = V4L2_SUBDEV_FORMAT_ACTIVE;
2106                 fmt->pad = pad->index;
2107                 return v4l2_subdev_call(sd, pad, get_fmt, NULL, fmt);
2108         }
2109
2110         return -EINVAL;
2111 }
2112
2113 static int unicam_mc_video_link_validate(struct media_link *link)
2114 {
2115         struct video_device *vd = container_of(link->sink->entity,
2116                                                 struct video_device, entity);
2117         struct unicam_node *node = container_of(vd, struct unicam_node,
2118                                                 video_dev);
2119         struct unicam_device *unicam = node->dev;
2120         struct v4l2_subdev_format source_fmt;
2121         int ret;
2122
2123         if (!media_entity_remote_source_pad_unique(link->sink->entity)) {
2124                 unicam_dbg(1, unicam,
2125                            "video node %s pad not connected\n", vd->name);
2126                 return -ENOTCONN;
2127         }
2128
2129         ret = unicam_mc_subdev_link_validate_get_format(link->source,
2130                                                         &source_fmt);
2131         if (ret < 0)
2132                 return 0;
2133
2134         if (node->pad_id == IMAGE_PAD) {
2135                 struct v4l2_pix_format *pix_fmt = &node->v_fmt.fmt.pix;
2136                 const struct unicam_fmt *fmt;
2137
2138                 if (source_fmt.format.width != pix_fmt->width ||
2139                     source_fmt.format.height != pix_fmt->height) {
2140                         unicam_err(unicam,
2141                                    "Wrong width or height %ux%u (remote pad set to %ux%u)\n",
2142                                    pix_fmt->width, pix_fmt->height,
2143                                    source_fmt.format.width,
2144                                    source_fmt.format.height);
2145                         return -EINVAL;
2146                 }
2147
2148                 fmt = find_format_by_code(source_fmt.format.code);
2149
2150                 if (!fmt || (fmt->fourcc != pix_fmt->pixelformat &&
2151                              fmt->repacked_fourcc != pix_fmt->pixelformat))
2152                         return -EINVAL;
2153         } else {
2154                 struct v4l2_meta_format *meta_fmt = &node->v_fmt.fmt.meta;
2155
2156                 if (source_fmt.format.width != meta_fmt->buffersize ||
2157                     source_fmt.format.height != 1 ||
2158                     source_fmt.format.code != MEDIA_BUS_FMT_SENSOR_DATA) {
2159                         unicam_err(unicam,
2160                                    "Wrong metadata width/height/code %ux%u %08x (remote pad set to %ux%u %08x)\n",
2161                                    meta_fmt->buffersize, 1,
2162                                    MEDIA_BUS_FMT_SENSOR_DATA,
2163                                    source_fmt.format.width,
2164                                    source_fmt.format.height,
2165                                    source_fmt.format.code);
2166                         return -EINVAL;
2167                 }
2168         }
2169
2170         return 0;
2171 }
2172
2173 static const struct media_entity_operations unicam_mc_entity_ops = {
2174         .link_validate = unicam_mc_video_link_validate,
2175 };
2176
2177 /* videobuf2 Operations */
2178
2179 static int unicam_queue_setup(struct vb2_queue *vq,
2180                               unsigned int *nbuffers,
2181                               unsigned int *nplanes,
2182                               unsigned int sizes[],
2183                               struct device *alloc_devs[])
2184 {
2185         struct unicam_node *node = vb2_get_drv_priv(vq);
2186         struct unicam_device *dev = node->dev;
2187         unsigned int size = node->pad_id == IMAGE_PAD ?
2188                                     node->v_fmt.fmt.pix.sizeimage :
2189                                     node->v_fmt.fmt.meta.buffersize;
2190
2191         if (vq->num_buffers + *nbuffers < 3)
2192                 *nbuffers = 3 - vq->num_buffers;
2193
2194         if (*nplanes) {
2195                 if (sizes[0] < size) {
2196                         unicam_err(dev, "sizes[0] %i < size %u\n", sizes[0],
2197                                    size);
2198                         return -EINVAL;
2199                 }
2200                 size = sizes[0];
2201         }
2202
2203         *nplanes = 1;
2204         sizes[0] = size;
2205
2206         return 0;
2207 }
2208
2209 static int unicam_buffer_prepare(struct vb2_buffer *vb)
2210 {
2211         struct unicam_node *node = vb2_get_drv_priv(vb->vb2_queue);
2212         struct unicam_device *dev = node->dev;
2213         struct unicam_buffer *buf = to_unicam_buffer(vb);
2214         unsigned long size;
2215
2216         if (WARN_ON(!node->fmt))
2217                 return -EINVAL;
2218
2219         size = node->pad_id == IMAGE_PAD ? node->v_fmt.fmt.pix.sizeimage :
2220                                            node->v_fmt.fmt.meta.buffersize;
2221         if (vb2_plane_size(vb, 0) < size) {
2222                 unicam_err(dev, "data will not fit into plane (%lu < %lu)\n",
2223                            vb2_plane_size(vb, 0), size);
2224                 return -EINVAL;
2225         }
2226
2227         vb2_set_plane_payload(&buf->vb.vb2_buf, 0, size);
2228         return 0;
2229 }
2230
2231 static void unicam_buffer_queue(struct vb2_buffer *vb)
2232 {
2233         struct unicam_node *node = vb2_get_drv_priv(vb->vb2_queue);
2234         struct unicam_buffer *buf = to_unicam_buffer(vb);
2235         unsigned long flags;
2236
2237         spin_lock_irqsave(&node->dma_queue_lock, flags);
2238         list_add_tail(&buf->list, &node->dma_queue);
2239         spin_unlock_irqrestore(&node->dma_queue_lock, flags);
2240 }
2241
2242 static void unicam_set_packing_config(struct unicam_device *dev)
2243 {
2244         u32 pack, unpack;
2245         u32 val;
2246
2247         if (dev->node[IMAGE_PAD].v_fmt.fmt.pix.pixelformat ==
2248             dev->node[IMAGE_PAD].fmt->fourcc) {
2249                 unpack = UNICAM_PUM_NONE;
2250                 pack = UNICAM_PPM_NONE;
2251         } else {
2252                 switch (dev->node[IMAGE_PAD].fmt->depth) {
2253                 case 8:
2254                         unpack = UNICAM_PUM_UNPACK8;
2255                         break;
2256                 case 10:
2257                         unpack = UNICAM_PUM_UNPACK10;
2258                         break;
2259                 case 12:
2260                         unpack = UNICAM_PUM_UNPACK12;
2261                         break;
2262                 case 14:
2263                         unpack = UNICAM_PUM_UNPACK14;
2264                         break;
2265                 case 16:
2266                         unpack = UNICAM_PUM_UNPACK16;
2267                         break;
2268                 default:
2269                         unpack = UNICAM_PUM_NONE;
2270                         break;
2271                 }
2272
2273                 /* Repacking is always to 16bpp */
2274                 pack = UNICAM_PPM_PACK16;
2275         }
2276
2277         val = 0;
2278         set_field(&val, unpack, UNICAM_PUM_MASK);
2279         set_field(&val, pack, UNICAM_PPM_MASK);
2280         reg_write(dev, UNICAM_IPIPE, val);
2281 }
2282
2283 static void unicam_cfg_image_id(struct unicam_device *dev)
2284 {
2285         if (dev->bus_type == V4L2_MBUS_CSI2_DPHY) {
2286                 /* CSI2 mode, hardcode VC 0 for now. */
2287                 reg_write(dev, UNICAM_IDI0,
2288                           (0 << 6) | dev->node[IMAGE_PAD].fmt->csi_dt);
2289         } else {
2290                 /* CCP2 mode */
2291                 reg_write(dev, UNICAM_IDI0,
2292                           0x80 | dev->node[IMAGE_PAD].fmt->csi_dt);
2293         }
2294 }
2295
2296 static void unicam_enable_ed(struct unicam_device *dev)
2297 {
2298         u32 val = reg_read(dev, UNICAM_DCS);
2299
2300         set_field(&val, 2, UNICAM_EDL_MASK);
2301         /* Do not wrap at the end of the embedded data buffer */
2302         set_field(&val, 0, UNICAM_DBOB);
2303
2304         reg_write(dev, UNICAM_DCS, val);
2305 }
2306
2307 static void unicam_start_rx(struct unicam_device *dev, dma_addr_t *addr)
2308 {
2309         int line_int_freq = dev->node[IMAGE_PAD].v_fmt.fmt.pix.height >> 2;
2310         unsigned int size, i;
2311         u32 val;
2312
2313         if (line_int_freq < 128)
2314                 line_int_freq = 128;
2315
2316         /* Enable lane clocks */
2317         val = 1;
2318         for (i = 0; i < dev->active_data_lanes; i++)
2319                 val = val << 2 | 1;
2320         clk_write(dev, val);
2321
2322         /* Basic init */
2323         reg_write(dev, UNICAM_CTRL, UNICAM_MEM);
2324
2325         /* Enable analogue control, and leave in reset. */
2326         val = UNICAM_AR;
2327         set_field(&val, 7, UNICAM_CTATADJ_MASK);
2328         set_field(&val, 7, UNICAM_PTATADJ_MASK);
2329         reg_write(dev, UNICAM_ANA, val);
2330         usleep_range(1000, 2000);
2331
2332         /* Come out of reset */
2333         reg_write_field(dev, UNICAM_ANA, 0, UNICAM_AR);
2334
2335         /* Peripheral reset */
2336         reg_write_field(dev, UNICAM_CTRL, 1, UNICAM_CPR);
2337         reg_write_field(dev, UNICAM_CTRL, 0, UNICAM_CPR);
2338
2339         reg_write_field(dev, UNICAM_CTRL, 0, UNICAM_CPE);
2340
2341         /* Enable Rx control. */
2342         val = reg_read(dev, UNICAM_CTRL);
2343         if (dev->bus_type == V4L2_MBUS_CSI2_DPHY) {
2344                 set_field(&val, UNICAM_CPM_CSI2, UNICAM_CPM_MASK);
2345                 set_field(&val, UNICAM_DCM_STROBE, UNICAM_DCM_MASK);
2346         } else {
2347                 set_field(&val, UNICAM_CPM_CCP2, UNICAM_CPM_MASK);
2348                 set_field(&val, dev->bus_flags, UNICAM_DCM_MASK);
2349         }
2350         /* Packet framer timeout */
2351         set_field(&val, 0xf, UNICAM_PFT_MASK);
2352         set_field(&val, 128, UNICAM_OET_MASK);
2353         reg_write(dev, UNICAM_CTRL, val);
2354
2355         reg_write(dev, UNICAM_IHWIN, 0);
2356         reg_write(dev, UNICAM_IVWIN, 0);
2357
2358         /* AXI bus access QoS setup */
2359         val = reg_read(dev, UNICAM_PRI);
2360         set_field(&val, 0, UNICAM_BL_MASK);
2361         set_field(&val, 0, UNICAM_BS_MASK);
2362         set_field(&val, 0xe, UNICAM_PP_MASK);
2363         set_field(&val, 8, UNICAM_NP_MASK);
2364         set_field(&val, 2, UNICAM_PT_MASK);
2365         set_field(&val, 1, UNICAM_PE);
2366         reg_write(dev, UNICAM_PRI, val);
2367
2368         reg_write_field(dev, UNICAM_ANA, 0, UNICAM_DDL);
2369
2370         val = UNICAM_FSIE | UNICAM_FEIE | UNICAM_IBOB;
2371         set_field(&val, line_int_freq, UNICAM_LCIE_MASK);
2372         reg_write(dev, UNICAM_ICTL, val);
2373         reg_write(dev, UNICAM_STA, UNICAM_STA_MASK_ALL);
2374         reg_write(dev, UNICAM_ISTA, UNICAM_ISTA_MASK_ALL);
2375
2376         /* tclk_term_en */
2377         reg_write_field(dev, UNICAM_CLT, 2, UNICAM_CLT1_MASK);
2378         /* tclk_settle */
2379         reg_write_field(dev, UNICAM_CLT, 6, UNICAM_CLT2_MASK);
2380         /* td_term_en */
2381         reg_write_field(dev, UNICAM_DLT, 2, UNICAM_DLT1_MASK);
2382         /* ths_settle */
2383         reg_write_field(dev, UNICAM_DLT, 6, UNICAM_DLT2_MASK);
2384         /* trx_enable */
2385         reg_write_field(dev, UNICAM_DLT, 0, UNICAM_DLT3_MASK);
2386
2387         reg_write_field(dev, UNICAM_CTRL, 0, UNICAM_SOE);
2388
2389         /* Packet compare setup - required to avoid missing frame ends */
2390         val = 0;
2391         set_field(&val, 1, UNICAM_PCE);
2392         set_field(&val, 1, UNICAM_GI);
2393         set_field(&val, 1, UNICAM_CPH);
2394         set_field(&val, 0, UNICAM_PCVC_MASK);
2395         set_field(&val, 1, UNICAM_PCDT_MASK);
2396         reg_write(dev, UNICAM_CMP0, val);
2397
2398         /* Enable clock lane and set up terminations */
2399         val = 0;
2400         if (dev->bus_type == V4L2_MBUS_CSI2_DPHY) {
2401                 /* CSI2 */
2402                 set_field(&val, 1, UNICAM_CLE);
2403                 set_field(&val, 1, UNICAM_CLLPE);
2404                 if (!(dev->bus_flags & V4L2_MBUS_CSI2_NONCONTINUOUS_CLOCK)) {
2405                         set_field(&val, 1, UNICAM_CLTRE);
2406                         set_field(&val, 1, UNICAM_CLHSE);
2407                 }
2408         } else {
2409                 /* CCP2 */
2410                 set_field(&val, 1, UNICAM_CLE);
2411                 set_field(&val, 1, UNICAM_CLHSE);
2412                 set_field(&val, 1, UNICAM_CLTRE);
2413         }
2414         reg_write(dev, UNICAM_CLK, val);
2415
2416         /*
2417          * Enable required data lanes with appropriate terminations.
2418          * The same value needs to be written to UNICAM_DATn registers for
2419          * the active lanes, and 0 for inactive ones.
2420          */
2421         val = 0;
2422         if (dev->bus_type == V4L2_MBUS_CSI2_DPHY) {
2423                 /* CSI2 */
2424                 set_field(&val, 1, UNICAM_DLE);
2425                 set_field(&val, 1, UNICAM_DLLPE);
2426                 if (!(dev->bus_flags & V4L2_MBUS_CSI2_NONCONTINUOUS_CLOCK)) {
2427                         set_field(&val, 1, UNICAM_DLTRE);
2428                         set_field(&val, 1, UNICAM_DLHSE);
2429                 }
2430         } else {
2431                 /* CCP2 */
2432                 set_field(&val, 1, UNICAM_DLE);
2433                 set_field(&val, 1, UNICAM_DLHSE);
2434                 set_field(&val, 1, UNICAM_DLTRE);
2435         }
2436         reg_write(dev, UNICAM_DAT0, val);
2437
2438         if (dev->active_data_lanes == 1)
2439                 val = 0;
2440         reg_write(dev, UNICAM_DAT1, val);
2441
2442         if (dev->max_data_lanes > 2) {
2443                 /*
2444                  * Registers UNICAM_DAT2 and UNICAM_DAT3 only valid if the
2445                  * instance supports more than 2 data lanes.
2446                  */
2447                 if (dev->active_data_lanes == 2)
2448                         val = 0;
2449                 reg_write(dev, UNICAM_DAT2, val);
2450
2451                 if (dev->active_data_lanes == 3)
2452                         val = 0;
2453                 reg_write(dev, UNICAM_DAT3, val);
2454         }
2455
2456         reg_write(dev, UNICAM_IBLS,
2457                   dev->node[IMAGE_PAD].v_fmt.fmt.pix.bytesperline);
2458         size = dev->node[IMAGE_PAD].v_fmt.fmt.pix.sizeimage;
2459         unicam_wr_dma_addr(dev, addr[IMAGE_PAD], size, IMAGE_PAD);
2460         unicam_set_packing_config(dev);
2461         unicam_cfg_image_id(dev);
2462
2463         val = reg_read(dev, UNICAM_MISC);
2464         set_field(&val, 1, UNICAM_FL0);
2465         set_field(&val, 1, UNICAM_FL1);
2466         reg_write(dev, UNICAM_MISC, val);
2467
2468         if (dev->node[METADATA_PAD].streaming && dev->sensor_embedded_data) {
2469                 size = dev->node[METADATA_PAD].v_fmt.fmt.meta.buffersize;
2470                 unicam_enable_ed(dev);
2471                 unicam_wr_dma_addr(dev, addr[METADATA_PAD], size, METADATA_PAD);
2472         }
2473
2474         /* Enable peripheral */
2475         reg_write_field(dev, UNICAM_CTRL, 1, UNICAM_CPE);
2476
2477         /* Load image pointers */
2478         reg_write_field(dev, UNICAM_ICTL, 1, UNICAM_LIP_MASK);
2479
2480         /* Load embedded data buffer pointers if needed */
2481         if (dev->node[METADATA_PAD].streaming && dev->sensor_embedded_data)
2482                 reg_write_field(dev, UNICAM_DCS, 1, UNICAM_LDP);
2483 }
2484
2485 static void unicam_disable(struct unicam_device *dev)
2486 {
2487         /* Analogue lane control disable */
2488         reg_write_field(dev, UNICAM_ANA, 1, UNICAM_DDL);
2489
2490         /* Stop the output engine */
2491         reg_write_field(dev, UNICAM_CTRL, 1, UNICAM_SOE);
2492
2493         /* Disable the data lanes. */
2494         reg_write(dev, UNICAM_DAT0, 0);
2495         reg_write(dev, UNICAM_DAT1, 0);
2496
2497         if (dev->max_data_lanes > 2) {
2498                 reg_write(dev, UNICAM_DAT2, 0);
2499                 reg_write(dev, UNICAM_DAT3, 0);
2500         }
2501
2502         /* Peripheral reset */
2503         reg_write_field(dev, UNICAM_CTRL, 1, UNICAM_CPR);
2504         usleep_range(50, 100);
2505         reg_write_field(dev, UNICAM_CTRL, 0, UNICAM_CPR);
2506
2507         /* Disable peripheral */
2508         reg_write_field(dev, UNICAM_CTRL, 0, UNICAM_CPE);
2509
2510         /* Clear ED setup */
2511         reg_write(dev, UNICAM_DCS, 0);
2512
2513         /* Disable all lane clocks */
2514         clk_write(dev, 0);
2515 }
2516
2517 static void unicam_return_buffers(struct unicam_node *node,
2518                                   enum vb2_buffer_state state)
2519 {
2520         struct unicam_buffer *buf, *tmp;
2521         unsigned long flags;
2522
2523         spin_lock_irqsave(&node->dma_queue_lock, flags);
2524         list_for_each_entry_safe(buf, tmp, &node->dma_queue, list) {
2525                 list_del(&buf->list);
2526                 vb2_buffer_done(&buf->vb.vb2_buf, state);
2527         }
2528
2529         if (node->cur_frm)
2530                 vb2_buffer_done(&node->cur_frm->vb.vb2_buf,
2531                                 state);
2532         if (node->next_frm && node->cur_frm != node->next_frm)
2533                 vb2_buffer_done(&node->next_frm->vb.vb2_buf,
2534                                 state);
2535
2536         node->cur_frm = NULL;
2537         node->next_frm = NULL;
2538         spin_unlock_irqrestore(&node->dma_queue_lock, flags);
2539 }
2540
2541 static int unicam_start_streaming(struct vb2_queue *vq, unsigned int count)
2542 {
2543         struct unicam_node *node = vb2_get_drv_priv(vq);
2544         struct unicam_device *dev = node->dev;
2545         dma_addr_t buffer_addr[MAX_NODES] = { 0 };
2546         unsigned long flags;
2547         unsigned int i;
2548         int ret;
2549
2550         node->streaming = true;
2551         if (!(dev->node[IMAGE_PAD].open && dev->node[IMAGE_PAD].streaming &&
2552               (!dev->node[METADATA_PAD].open ||
2553                dev->node[METADATA_PAD].streaming))) {
2554                 /*
2555                  * Metadata pad must be enabled before image pad if it is
2556                  * wanted.
2557                  */
2558                 unicam_dbg(3, dev, "Not all nodes are streaming yet.");
2559                 return 0;
2560         }
2561
2562         dev->sequence = 0;
2563         ret = unicam_runtime_get(dev);
2564         if (ret < 0) {
2565                 unicam_dbg(3, dev, "unicam_runtime_get failed\n");
2566                 goto err_streaming;
2567         }
2568
2569         ret = media_pipeline_start(dev->node[IMAGE_PAD].video_dev.entity.pads,
2570                                    &dev->node[IMAGE_PAD].pipe);
2571         if (ret < 0) {
2572                 unicam_err(dev, "Failed to start media pipeline: %d\n", ret);
2573                 goto err_pm_put;
2574         }
2575
2576         dev->active_data_lanes = dev->max_data_lanes;
2577
2578         if (dev->bus_type == V4L2_MBUS_CSI2_DPHY) {
2579                 struct v4l2_mbus_config mbus_config = { 0 };
2580
2581                 ret = v4l2_subdev_call(dev->sensor, pad, get_mbus_config,
2582                                        0, &mbus_config);
2583                 if (ret < 0 && ret != -ENOIOCTLCMD) {
2584                         unicam_dbg(3, dev, "g_mbus_config failed\n");
2585                         goto error_pipeline;
2586                 }
2587
2588                 dev->active_data_lanes = mbus_config.bus.mipi_csi2.num_data_lanes;
2589                 if (!dev->active_data_lanes)
2590                         dev->active_data_lanes = dev->max_data_lanes;
2591                 if (dev->active_data_lanes > dev->max_data_lanes) {
2592                         unicam_err(dev, "Device has requested %u data lanes, which is >%u configured in DT\n",
2593                                    dev->active_data_lanes,
2594                                    dev->max_data_lanes);
2595                         ret = -EINVAL;
2596                         goto error_pipeline;
2597                 }
2598         }
2599
2600         unicam_dbg(1, dev, "Running with %u data lanes\n",
2601                    dev->active_data_lanes);
2602
2603         ret = clk_set_min_rate(dev->vpu_clock, MIN_VPU_CLOCK_RATE);
2604         if (ret) {
2605                 unicam_err(dev, "failed to set up VPU clock\n");
2606                 goto error_pipeline;
2607         }
2608
2609         ret = clk_prepare_enable(dev->vpu_clock);
2610         if (ret) {
2611                 unicam_err(dev, "Failed to enable VPU clock: %d\n", ret);
2612                 goto error_pipeline;
2613         }
2614
2615         ret = clk_set_rate(dev->clock, 100 * 1000 * 1000);
2616         if (ret) {
2617                 unicam_err(dev, "failed to set up CSI clock\n");
2618                 goto err_vpu_clock;
2619         }
2620
2621         ret = clk_prepare_enable(dev->clock);
2622         if (ret) {
2623                 unicam_err(dev, "Failed to enable CSI clock: %d\n", ret);
2624                 goto err_vpu_clock;
2625         }
2626
2627         for (i = 0; i < ARRAY_SIZE(dev->node); i++) {
2628                 struct unicam_buffer *buf;
2629
2630                 if (!dev->node[i].streaming)
2631                         continue;
2632
2633                 spin_lock_irqsave(&dev->node[i].dma_queue_lock, flags);
2634                 buf = list_first_entry(&dev->node[i].dma_queue,
2635                                        struct unicam_buffer, list);
2636                 dev->node[i].cur_frm = buf;
2637                 dev->node[i].next_frm = buf;
2638                 list_del(&buf->list);
2639                 spin_unlock_irqrestore(&dev->node[i].dma_queue_lock, flags);
2640
2641                 buffer_addr[i] =
2642                         vb2_dma_contig_plane_dma_addr(&buf->vb.vb2_buf, 0);
2643         }
2644
2645         dev->frame_started = false;
2646         unicam_start_rx(dev, buffer_addr);
2647
2648         ret = v4l2_subdev_call(dev->sensor, video, s_stream, 1);
2649         if (ret < 0) {
2650                 unicam_err(dev, "stream on failed in subdev\n");
2651                 goto err_disable_unicam;
2652         }
2653
2654         dev->clocks_enabled = true;
2655         return 0;
2656
2657 err_disable_unicam:
2658         unicam_disable(dev);
2659         clk_disable_unprepare(dev->clock);
2660 err_vpu_clock:
2661         if (clk_set_min_rate(dev->vpu_clock, 0))
2662                 unicam_err(dev, "failed to reset the VPU clock\n");
2663         clk_disable_unprepare(dev->vpu_clock);
2664 error_pipeline:
2665         if (node->pad_id == IMAGE_PAD)
2666                 media_pipeline_stop(dev->node[IMAGE_PAD].video_dev.entity.pads);
2667 err_pm_put:
2668         unicam_runtime_put(dev);
2669 err_streaming:
2670         unicam_return_buffers(node, VB2_BUF_STATE_QUEUED);
2671         node->streaming = false;
2672
2673         return ret;
2674 }
2675
2676 static void unicam_stop_streaming(struct vb2_queue *vq)
2677 {
2678         struct unicam_node *node = vb2_get_drv_priv(vq);
2679         struct unicam_device *dev = node->dev;
2680
2681         node->streaming = false;
2682
2683         if (node->pad_id == IMAGE_PAD) {
2684                 /*
2685                  * Stop streaming the sensor and disable the peripheral.
2686                  * We cannot continue streaming embedded data with the
2687                  * image pad disabled.
2688                  */
2689                 if (v4l2_subdev_call(dev->sensor, video, s_stream, 0) < 0)
2690                         unicam_err(dev, "stream off failed in subdev\n");
2691
2692                 unicam_disable(dev);
2693
2694                 media_pipeline_stop(node->video_dev.entity.pads);
2695
2696                 if (dev->clocks_enabled) {
2697                         if (clk_set_min_rate(dev->vpu_clock, 0))
2698                                 unicam_err(dev, "failed to reset the min VPU clock\n");
2699
2700                         clk_disable_unprepare(dev->vpu_clock);
2701                         clk_disable_unprepare(dev->clock);
2702                         dev->clocks_enabled = false;
2703                 }
2704                 unicam_runtime_put(dev);
2705
2706         } else if (node->pad_id == METADATA_PAD) {
2707                 /*
2708                  * Allow the hardware to spin in the dummy buffer.
2709                  * This is only really needed if the embedded data pad is
2710                  * disabled before the image pad.
2711                  */
2712                 unicam_wr_dma_addr(dev, node->dummy_buf_dma_addr, 0,
2713                                    METADATA_PAD);
2714         }
2715
2716         /* Clear all queued buffers for the node */
2717         unicam_return_buffers(node, VB2_BUF_STATE_ERROR);
2718 }
2719
2720
2721 static const struct vb2_ops unicam_video_qops = {
2722         .wait_prepare           = vb2_ops_wait_prepare,
2723         .wait_finish            = vb2_ops_wait_finish,
2724         .queue_setup            = unicam_queue_setup,
2725         .buf_prepare            = unicam_buffer_prepare,
2726         .buf_queue              = unicam_buffer_queue,
2727         .start_streaming        = unicam_start_streaming,
2728         .stop_streaming         = unicam_stop_streaming,
2729 };
2730
2731 /*
2732  * unicam_v4l2_open : This function is based on the v4l2_fh_open helper
2733  * function. It has been augmented to handle sensor subdevice power management,
2734  */
2735 static int unicam_v4l2_open(struct file *file)
2736 {
2737         struct unicam_node *node = video_drvdata(file);
2738         struct unicam_device *dev = node->dev;
2739         int ret;
2740
2741         mutex_lock(&node->lock);
2742
2743         ret = v4l2_fh_open(file);
2744         if (ret) {
2745                 unicam_err(dev, "v4l2_fh_open failed\n");
2746                 goto unlock;
2747         }
2748
2749         node->open++;
2750
2751         if (!v4l2_fh_is_singular_file(file))
2752                 goto unlock;
2753
2754         ret = v4l2_subdev_call(dev->sensor, core, s_power, 1);
2755         if (ret < 0 && ret != -ENOIOCTLCMD) {
2756                 v4l2_fh_release(file);
2757                 node->open--;
2758                 goto unlock;
2759         }
2760
2761         ret = 0;
2762
2763 unlock:
2764         mutex_unlock(&node->lock);
2765         return ret;
2766 }
2767
2768 static int unicam_v4l2_release(struct file *file)
2769 {
2770         struct unicam_node *node = video_drvdata(file);
2771         struct unicam_device *dev = node->dev;
2772         struct v4l2_subdev *sd = dev->sensor;
2773         bool fh_singular;
2774         int ret;
2775
2776         mutex_lock(&node->lock);
2777
2778         fh_singular = v4l2_fh_is_singular_file(file);
2779
2780         ret = _vb2_fop_release(file, NULL);
2781
2782         if (fh_singular)
2783                 v4l2_subdev_call(sd, core, s_power, 0);
2784
2785         node->open--;
2786         mutex_unlock(&node->lock);
2787
2788         return ret;
2789 }
2790
2791 /* unicam capture driver file operations */
2792 static const struct v4l2_file_operations unicam_fops = {
2793         .owner          = THIS_MODULE,
2794         .open           = unicam_v4l2_open,
2795         .release        = unicam_v4l2_release,
2796         .read           = vb2_fop_read,
2797         .poll           = vb2_fop_poll,
2798         .unlocked_ioctl = video_ioctl2,
2799         .mmap           = vb2_fop_mmap,
2800 };
2801
2802 static int
2803 unicam_async_bound(struct v4l2_async_notifier *notifier,
2804                    struct v4l2_subdev *subdev,
2805                    struct v4l2_async_subdev *asd)
2806 {
2807         struct unicam_device *unicam = to_unicam_device(notifier->v4l2_dev);
2808
2809         if (unicam->sensor) {
2810                 unicam_info(unicam, "Rejecting subdev %s (Already set!!)",
2811                             subdev->name);
2812                 return 0;
2813         }
2814
2815         unicam->sensor = subdev;
2816         unicam_dbg(1, unicam, "Using sensor %s for capture\n", subdev->name);
2817
2818         return 0;
2819 }
2820
2821 static void unicam_release(struct kref *kref)
2822 {
2823         struct unicam_device *unicam =
2824                 container_of(kref, struct unicam_device, kref);
2825
2826         v4l2_ctrl_handler_free(&unicam->ctrl_handler);
2827         media_device_cleanup(&unicam->mdev);
2828
2829         if (unicam->sensor_state)
2830                 __v4l2_subdev_state_free(unicam->sensor_state);
2831
2832         kfree(unicam);
2833 }
2834
2835 static void unicam_put(struct unicam_device *unicam)
2836 {
2837         kref_put(&unicam->kref, unicam_release);
2838 }
2839
2840 static void unicam_get(struct unicam_device *unicam)
2841 {
2842         kref_get(&unicam->kref);
2843 }
2844
2845 static void unicam_node_release(struct video_device *vdev)
2846 {
2847         struct unicam_node *node = video_get_drvdata(vdev);
2848
2849         unicam_put(node->dev);
2850 }
2851
2852 static int unicam_set_default_format(struct unicam_device *unicam,
2853                                      struct unicam_node *node,
2854                                      int pad_id,
2855                                      const struct unicam_fmt **ret_fmt)
2856 {
2857         struct v4l2_mbus_framefmt mbus_fmt = {0};
2858         const struct unicam_fmt *fmt;
2859         int ret;
2860
2861         if (pad_id == IMAGE_PAD) {
2862                 ret = __subdev_get_format(unicam, &mbus_fmt, pad_id);
2863                 if (ret) {
2864                         unicam_err(unicam, "Failed to get_format - ret %d\n",
2865                                    ret);
2866                         return ret;
2867                 }
2868
2869                 fmt = find_format_by_code(mbus_fmt.code);
2870                 if (!fmt) {
2871                         /*
2872                          * Find the first format that the sensor and unicam both
2873                          * support
2874                          */
2875                         fmt = get_first_supported_format(unicam);
2876
2877                         if (fmt) {
2878                                 mbus_fmt.code = fmt->code;
2879                                 ret = __subdev_set_format(unicam, &mbus_fmt, pad_id);
2880                                 if (ret)
2881                                         return -EINVAL;
2882                         }
2883                 }
2884                 if (mbus_fmt.field != V4L2_FIELD_NONE) {
2885                         /* Interlaced not supported - disable it now. */
2886                         mbus_fmt.field = V4L2_FIELD_NONE;
2887                         ret = __subdev_set_format(unicam, &mbus_fmt, pad_id);
2888                         if (ret)
2889                                 return -EINVAL;
2890                 }
2891
2892                 if (fmt)
2893                         node->v_fmt.fmt.pix.pixelformat = fmt->fourcc ? fmt->fourcc
2894                                                 : fmt->repacked_fourcc;
2895         } else {
2896                 /* Fix this node format as embedded data. */
2897                 fmt = find_format_by_code(MEDIA_BUS_FMT_SENSOR_DATA);
2898                 node->v_fmt.fmt.meta.dataformat = fmt->fourcc;
2899         }
2900
2901         *ret_fmt = fmt;
2902
2903         return 0;
2904 }
2905
2906 static void unicam_mc_set_default_format(struct unicam_node *node, int pad_id)
2907 {
2908         if (pad_id == IMAGE_PAD) {
2909                 struct v4l2_pix_format *pix_fmt = &node->v_fmt.fmt.pix;
2910
2911                 pix_fmt->width = 640;
2912                 pix_fmt->height = 480;
2913                 pix_fmt->field = V4L2_FIELD_NONE;
2914                 pix_fmt->colorspace = V4L2_COLORSPACE_SRGB;
2915                 pix_fmt->ycbcr_enc = V4L2_YCBCR_ENC_601;
2916                 pix_fmt->quantization = V4L2_QUANTIZATION_LIM_RANGE;
2917                 pix_fmt->xfer_func = V4L2_XFER_FUNC_SRGB;
2918                 pix_fmt->pixelformat = formats[0].fourcc;
2919                 unicam_calc_format_size_bpl(node->dev, &formats[0],
2920                                             &node->v_fmt);
2921                 node->v_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
2922
2923                 node->fmt = &formats[0];
2924         } else {
2925                 const struct unicam_fmt *fmt;
2926
2927                 /* Fix this node format as embedded data. */
2928                 fmt = find_format_by_code(MEDIA_BUS_FMT_SENSOR_DATA);
2929                 node->v_fmt.fmt.meta.dataformat = fmt->fourcc;
2930                 node->fmt = fmt;
2931
2932                 node->v_fmt.fmt.meta.buffersize = UNICAM_EMBEDDED_SIZE;
2933                 node->embedded_lines = 1;
2934                 node->v_fmt.type = V4L2_BUF_TYPE_META_CAPTURE;
2935         }
2936 }
2937
2938 static int register_node(struct unicam_device *unicam, struct unicam_node *node,
2939                          enum v4l2_buf_type type, int pad_id)
2940 {
2941         struct video_device *vdev;
2942         struct vb2_queue *q;
2943         int ret;
2944
2945         node->dev = unicam;
2946         node->pad_id = pad_id;
2947
2948         if (!unicam->mc_api) {
2949                 const struct unicam_fmt *fmt;
2950
2951                 ret = unicam_set_default_format(unicam, node, pad_id, &fmt);
2952                 if (ret)
2953                         return ret;
2954                 node->fmt = fmt;
2955                 /* Read current subdev format */
2956                 if (fmt)
2957                         unicam_reset_format(node);
2958         } else {
2959                 unicam_mc_set_default_format(node, pad_id);
2960         }
2961
2962         if (!unicam->mc_api &&
2963             v4l2_subdev_has_op(unicam->sensor, video, s_std)) {
2964                 v4l2_std_id tvnorms;
2965
2966                 if (WARN_ON(!v4l2_subdev_has_op(unicam->sensor, video,
2967                                                 g_tvnorms)))
2968                         /*
2969                          * Subdevice should not advertise s_std but not
2970                          * g_tvnorms
2971                          */
2972                         return -EINVAL;
2973
2974                 ret = v4l2_subdev_call(unicam->sensor, video,
2975                                        g_tvnorms, &tvnorms);
2976                 if (WARN_ON(ret))
2977                         return -EINVAL;
2978                 node->video_dev.tvnorms |= tvnorms;
2979         }
2980
2981         spin_lock_init(&node->dma_queue_lock);
2982         mutex_init(&node->lock);
2983
2984         vdev = &node->video_dev;
2985         if (pad_id == IMAGE_PAD) {
2986                 if (!unicam->mc_api) {
2987                         /* Add controls from the subdevice */
2988                         ret = v4l2_ctrl_add_handler(&unicam->ctrl_handler,
2989                                                     unicam->sensor->ctrl_handler,
2990                                                     NULL,
2991                                                     true);
2992                         if (ret < 0)
2993                                 return ret;
2994                 }
2995
2996                 /*
2997                  * If the sensor subdevice has any controls, associate the node
2998                  *  with the ctrl handler to allow access from userland.
2999                  */
3000                 if (!list_empty(&unicam->ctrl_handler.ctrls))
3001                         vdev->ctrl_handler = &unicam->ctrl_handler;
3002         }
3003
3004         q = &node->buffer_queue;
3005         q->type = type;
3006         q->io_modes = VB2_MMAP | VB2_DMABUF | VB2_READ;
3007         q->drv_priv = node;
3008         q->ops = &unicam_video_qops;
3009         q->mem_ops = &vb2_dma_contig_memops;
3010         q->buf_struct_size = sizeof(struct unicam_buffer);
3011         q->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC;
3012         q->lock = &node->lock;
3013         q->min_buffers_needed = 1;
3014         q->dev = &unicam->pdev->dev;
3015
3016         ret = vb2_queue_init(q);
3017         if (ret) {
3018                 unicam_err(unicam, "vb2_queue_init() failed\n");
3019                 return ret;
3020         }
3021
3022         INIT_LIST_HEAD(&node->dma_queue);
3023
3024         vdev->release = unicam_node_release;
3025         vdev->fops = &unicam_fops;
3026         vdev->ioctl_ops = unicam->mc_api ? &unicam_mc_ioctl_ops :
3027                                            &unicam_ioctl_ops;
3028         vdev->v4l2_dev = &unicam->v4l2_dev;
3029         vdev->vfl_dir = VFL_DIR_RX;
3030         vdev->queue = q;
3031         vdev->lock = &node->lock;
3032         vdev->device_caps = (pad_id == IMAGE_PAD) ?
3033                                 V4L2_CAP_VIDEO_CAPTURE : V4L2_CAP_META_CAPTURE;
3034         vdev->device_caps |= V4L2_CAP_READWRITE | V4L2_CAP_STREAMING;
3035         if (unicam->mc_api) {
3036                 vdev->device_caps |= V4L2_CAP_IO_MC;
3037                 vdev->entity.ops = &unicam_mc_entity_ops;
3038         }
3039
3040         /* Define the device names */
3041         snprintf(vdev->name, sizeof(vdev->name), "%s-%s", UNICAM_MODULE_NAME,
3042                  pad_id == IMAGE_PAD ? "image" : "embedded");
3043
3044         video_set_drvdata(vdev, node);
3045         if (pad_id == IMAGE_PAD)
3046                 vdev->entity.flags |= MEDIA_ENT_FL_DEFAULT;
3047         node->pad.flags = MEDIA_PAD_FL_SINK;
3048         media_entity_pads_init(&vdev->entity, 1, &node->pad);
3049
3050         node->dummy_buf_cpu_addr = dma_alloc_coherent(&unicam->pdev->dev,
3051                                                       DUMMY_BUF_SIZE,
3052                                                       &node->dummy_buf_dma_addr,
3053                                                       GFP_KERNEL);
3054         if (!node->dummy_buf_cpu_addr) {
3055                 unicam_err(unicam, "Unable to allocate dummy buffer.\n");
3056                 return -ENOMEM;
3057         }
3058         if (!unicam->mc_api) {
3059                 if (pad_id == METADATA_PAD ||
3060                     !v4l2_subdev_has_op(unicam->sensor, video, s_std)) {
3061                         v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_STD);
3062                         v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_STD);
3063                         v4l2_disable_ioctl(&node->video_dev, VIDIOC_ENUMSTD);
3064                 }
3065                 if (pad_id == METADATA_PAD ||
3066                     !v4l2_subdev_has_op(unicam->sensor, video, querystd))
3067                         v4l2_disable_ioctl(&node->video_dev, VIDIOC_QUERYSTD);
3068                 if (pad_id == METADATA_PAD ||
3069                     !v4l2_subdev_has_op(unicam->sensor, video, s_dv_timings)) {
3070                         v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_EDID);
3071                         v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_EDID);
3072                         v4l2_disable_ioctl(&node->video_dev,
3073                                            VIDIOC_DV_TIMINGS_CAP);
3074                         v4l2_disable_ioctl(&node->video_dev,
3075                                            VIDIOC_G_DV_TIMINGS);
3076                         v4l2_disable_ioctl(&node->video_dev,
3077                                            VIDIOC_S_DV_TIMINGS);
3078                         v4l2_disable_ioctl(&node->video_dev,
3079                                            VIDIOC_ENUM_DV_TIMINGS);
3080                         v4l2_disable_ioctl(&node->video_dev,
3081                                            VIDIOC_QUERY_DV_TIMINGS);
3082                 }
3083                 if (pad_id == METADATA_PAD ||
3084                     !v4l2_subdev_has_op(unicam->sensor, pad,
3085                                         enum_frame_interval))
3086                         v4l2_disable_ioctl(&node->video_dev,
3087                                            VIDIOC_ENUM_FRAMEINTERVALS);
3088                 if (pad_id == METADATA_PAD ||
3089                     !v4l2_subdev_has_op(unicam->sensor, video,
3090                                         g_frame_interval))
3091                         v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_PARM);
3092                 if (pad_id == METADATA_PAD ||
3093                     !v4l2_subdev_has_op(unicam->sensor, video,
3094                                         s_frame_interval))
3095                         v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_PARM);
3096
3097                 if (pad_id == METADATA_PAD ||
3098                     !v4l2_subdev_has_op(unicam->sensor, pad,
3099                                         enum_frame_size))
3100                         v4l2_disable_ioctl(&node->video_dev,
3101                                            VIDIOC_ENUM_FRAMESIZES);
3102
3103                 if (node->pad_id == METADATA_PAD ||
3104                     !v4l2_subdev_has_op(unicam->sensor, pad, set_selection))
3105                         v4l2_disable_ioctl(&node->video_dev,
3106                                            VIDIOC_S_SELECTION);
3107
3108                 if (node->pad_id == METADATA_PAD ||
3109                     !v4l2_subdev_has_op(unicam->sensor, pad, get_selection))
3110                         v4l2_disable_ioctl(&node->video_dev,
3111                                            VIDIOC_G_SELECTION);
3112         }
3113
3114         ret = video_register_device(vdev, VFL_TYPE_VIDEO, -1);
3115         if (ret) {
3116                 unicam_err(unicam, "Unable to register video device %s\n",
3117                            vdev->name);
3118                 return ret;
3119         }
3120
3121         /*
3122          * Acquire a reference to unicam, which will be released when the video
3123          * device will be unregistered and userspace will have closed all open
3124          * file handles.
3125          */
3126         unicam_get(unicam);
3127         node->registered = true;
3128
3129         if (pad_id != METADATA_PAD || unicam->sensor_embedded_data) {
3130                 ret = media_create_pad_link(&unicam->sensor->entity,
3131                                             node->src_pad_id,
3132                                             &node->video_dev.entity, 0,
3133                                             MEDIA_LNK_FL_ENABLED |
3134                                             MEDIA_LNK_FL_IMMUTABLE);
3135                 if (ret)
3136                         unicam_err(unicam, "Unable to create pad link for %s\n",
3137                                    vdev->name);
3138         }
3139
3140         return ret;
3141 }
3142
3143 static void unregister_nodes(struct unicam_device *unicam)
3144 {
3145         unsigned int i;
3146
3147         for (i = 0; i < ARRAY_SIZE(unicam->node); i++) {
3148                 struct unicam_node *node = &unicam->node[i];
3149
3150                 if (node->dummy_buf_cpu_addr) {
3151                         dma_free_coherent(&unicam->pdev->dev, DUMMY_BUF_SIZE,
3152                                           node->dummy_buf_cpu_addr,
3153                                           node->dummy_buf_dma_addr);
3154                 }
3155
3156                 if (node->registered) {
3157                         node->registered = false;
3158                         video_unregister_device(&node->video_dev);
3159                 }
3160         }
3161 }
3162
3163 static int unicam_async_complete(struct v4l2_async_notifier *notifier)
3164 {
3165         static struct lock_class_key key;
3166         struct unicam_device *unicam = to_unicam_device(notifier->v4l2_dev);
3167         unsigned int i, source_pads = 0;
3168         int ret;
3169
3170         unicam->v4l2_dev.notify = unicam_notify;
3171
3172         unicam->sensor_state = __v4l2_subdev_state_alloc(unicam->sensor,
3173                                                          "unicam:async->lock", &key);
3174         if (!unicam->sensor_state)
3175                 return -ENOMEM;
3176
3177         for (i = 0; i < unicam->sensor->entity.num_pads; i++) {
3178                 if (unicam->sensor->entity.pads[i].flags & MEDIA_PAD_FL_SOURCE) {
3179                         if (source_pads < MAX_NODES) {
3180                                 unicam->node[source_pads].src_pad_id = i;
3181                                 unicam_dbg(3, unicam, "source pad %u is index %u\n",
3182                                            source_pads, i);
3183                         }
3184                         source_pads++;
3185                 }
3186         }
3187         if (!source_pads) {
3188                 unicam_err(unicam, "No source pads on sensor.\n");
3189                 ret = -ENODEV;
3190                 goto unregister;
3191         }
3192
3193         ret = register_node(unicam, &unicam->node[IMAGE_PAD],
3194                             V4L2_BUF_TYPE_VIDEO_CAPTURE, IMAGE_PAD);
3195         if (ret) {
3196                 unicam_err(unicam, "Unable to register image video device.\n");
3197                 goto unregister;
3198         }
3199
3200         if (source_pads >= 2) {
3201                 unicam->sensor_embedded_data = true;
3202
3203                 ret = register_node(unicam, &unicam->node[METADATA_PAD],
3204                                     V4L2_BUF_TYPE_META_CAPTURE, METADATA_PAD);
3205                 if (ret) {
3206                         unicam_err(unicam, "Unable to register metadata video device.\n");
3207                         goto unregister;
3208                 }
3209         }
3210
3211         if (unicam->mc_api)
3212                 ret = v4l2_device_register_subdev_nodes(&unicam->v4l2_dev);
3213         else
3214                 ret = v4l2_device_register_ro_subdev_nodes(&unicam->v4l2_dev);
3215         if (ret) {
3216                 unicam_err(unicam, "Unable to register subdev nodes.\n");
3217                 goto unregister;
3218         }
3219
3220         /*
3221          * Release the initial reference, all references are now owned by the
3222          * video devices.
3223          */
3224         unicam_put(unicam);
3225         return 0;
3226
3227 unregister:
3228         unregister_nodes(unicam);
3229         unicam_put(unicam);
3230
3231         return ret;
3232 }
3233
3234 static const struct v4l2_async_notifier_operations unicam_async_ops = {
3235         .bound = unicam_async_bound,
3236         .complete = unicam_async_complete,
3237 };
3238
3239 static int of_unicam_connect_subdevs(struct unicam_device *dev)
3240 {
3241         struct platform_device *pdev = dev->pdev;
3242         struct v4l2_fwnode_endpoint ep = { };
3243         struct device_node *ep_node;
3244         struct device_node *sensor_node;
3245         unsigned int lane;
3246         int ret = -EINVAL;
3247
3248         if (of_property_read_u32(pdev->dev.of_node, "brcm,num-data-lanes",
3249                                  &dev->max_data_lanes) < 0) {
3250                 unicam_err(dev, "number of data lanes not set\n");
3251                 return -EINVAL;
3252         }
3253
3254         /* Get the local endpoint and remote device. */
3255         ep_node = of_graph_get_next_endpoint(pdev->dev.of_node, NULL);
3256         if (!ep_node) {
3257                 unicam_dbg(3, dev, "can't get next endpoint\n");
3258                 return -EINVAL;
3259         }
3260
3261         unicam_dbg(3, dev, "ep_node is %pOF\n", ep_node);
3262
3263         sensor_node = of_graph_get_remote_port_parent(ep_node);
3264         if (!sensor_node) {
3265                 unicam_dbg(3, dev, "can't get remote parent\n");
3266                 goto cleanup_exit;
3267         }
3268
3269         unicam_dbg(1, dev, "found subdevice %pOF\n", sensor_node);
3270
3271         /* Parse the local endpoint and validate its configuration. */
3272         v4l2_fwnode_endpoint_parse(of_fwnode_handle(ep_node), &ep);
3273
3274         unicam_dbg(3, dev, "parsed local endpoint, bus_type %u\n",
3275                    ep.bus_type);
3276
3277         dev->bus_type = ep.bus_type;
3278
3279         switch (ep.bus_type) {
3280         case V4L2_MBUS_CSI2_DPHY:
3281                 switch (ep.bus.mipi_csi2.num_data_lanes) {
3282                 case 1:
3283                 case 2:
3284                 case 4:
3285                         break;
3286
3287                 default:
3288                         unicam_err(dev, "subdevice %pOF: %u data lanes not supported\n",
3289                                    sensor_node,
3290                                    ep.bus.mipi_csi2.num_data_lanes);
3291                         goto cleanup_exit;
3292                 }
3293
3294                 for (lane = 0; lane < ep.bus.mipi_csi2.num_data_lanes; lane++) {
3295                         if (ep.bus.mipi_csi2.data_lanes[lane] != lane + 1) {
3296                                 unicam_err(dev, "subdevice %pOF: data lanes reordering not supported\n",
3297                                            sensor_node);
3298                                 goto cleanup_exit;
3299                         }
3300                 }
3301
3302                 if (ep.bus.mipi_csi2.num_data_lanes > dev->max_data_lanes) {
3303                         unicam_err(dev, "subdevice requires %u data lanes when %u are supported\n",
3304                                    ep.bus.mipi_csi2.num_data_lanes,
3305                                    dev->max_data_lanes);
3306                 }
3307
3308                 dev->max_data_lanes = ep.bus.mipi_csi2.num_data_lanes;
3309                 dev->bus_flags = ep.bus.mipi_csi2.flags;
3310
3311                 break;
3312
3313         case V4L2_MBUS_CCP2:
3314                 if (ep.bus.mipi_csi1.clock_lane != 0 ||
3315                     ep.bus.mipi_csi1.data_lane != 1) {
3316                         unicam_err(dev, "subdevice %pOF: unsupported lanes configuration\n",
3317                                    sensor_node);
3318                         goto cleanup_exit;
3319                 }
3320
3321                 dev->max_data_lanes = 1;
3322                 dev->bus_flags = ep.bus.mipi_csi1.strobe;
3323                 break;
3324
3325         default:
3326                 /* Unsupported bus type */
3327                 unicam_err(dev, "subdevice %pOF: unsupported bus type %u\n",
3328                            sensor_node, ep.bus_type);
3329                 goto cleanup_exit;
3330         }
3331
3332         unicam_dbg(3, dev, "subdevice %pOF: %s bus, %u data lanes, flags=0x%08x\n",
3333                    sensor_node,
3334                    dev->bus_type == V4L2_MBUS_CSI2_DPHY ? "CSI-2" : "CCP2",
3335                    dev->max_data_lanes, dev->bus_flags);
3336
3337         /* Initialize and register the async notifier. */
3338         v4l2_async_nf_init(&dev->notifier);
3339         dev->notifier.ops = &unicam_async_ops;
3340
3341         dev->asd.match_type = V4L2_ASYNC_MATCH_FWNODE;
3342         dev->asd.match.fwnode = fwnode_graph_get_remote_endpoint(of_fwnode_handle(ep_node));
3343         ret = __v4l2_async_nf_add_subdev(&dev->notifier, &dev->asd);
3344         if (ret) {
3345                 unicam_err(dev, "Error adding subdevice: %d\n", ret);
3346                 goto cleanup_exit;
3347         }
3348
3349         ret = v4l2_async_nf_register(&dev->v4l2_dev, &dev->notifier);
3350         if (ret) {
3351                 unicam_err(dev, "Error registering async notifier: %d\n", ret);
3352                 ret = -EINVAL;
3353         }
3354
3355 cleanup_exit:
3356         of_node_put(sensor_node);
3357         of_node_put(ep_node);
3358
3359         return ret;
3360 }
3361
3362 static int unicam_probe(struct platform_device *pdev)
3363 {
3364         struct unicam_device *unicam;
3365         int ret;
3366
3367         unicam = kzalloc(sizeof(*unicam), GFP_KERNEL);
3368         if (!unicam)
3369                 return -ENOMEM;
3370
3371         kref_init(&unicam->kref);
3372         unicam->pdev = pdev;
3373
3374         /*
3375          * Adopt the current setting of the module parameter, and check if
3376          * device tree requests it.
3377          */
3378         unicam->mc_api = media_controller;
3379         if (of_property_read_bool(pdev->dev.of_node, "brcm,media-controller"))
3380                 unicam->mc_api = true;
3381
3382         unicam->base = devm_platform_ioremap_resource(pdev, 0);
3383         if (IS_ERR(unicam->base)) {
3384                 unicam_err(unicam, "Failed to get main io block\n");
3385                 ret = PTR_ERR(unicam->base);
3386                 goto err_unicam_put;
3387         }
3388
3389         unicam->clk_gate_base = devm_platform_ioremap_resource(pdev, 1);
3390         if (IS_ERR(unicam->clk_gate_base)) {
3391                 unicam_err(unicam, "Failed to get 2nd io block\n");
3392                 ret = PTR_ERR(unicam->clk_gate_base);
3393                 goto err_unicam_put;
3394         }
3395
3396         unicam->clock = devm_clk_get(&pdev->dev, "lp");
3397         if (IS_ERR(unicam->clock)) {
3398                 unicam_err(unicam, "Failed to get lp clock\n");
3399                 ret = PTR_ERR(unicam->clock);
3400                 goto err_unicam_put;
3401         }
3402
3403         unicam->vpu_clock = devm_clk_get(&pdev->dev, "vpu");
3404         if (IS_ERR(unicam->vpu_clock)) {
3405                 unicam_err(unicam, "Failed to get vpu clock\n");
3406                 ret = PTR_ERR(unicam->vpu_clock);
3407                 goto err_unicam_put;
3408         }
3409
3410         ret = platform_get_irq(pdev, 0);
3411         if (ret <= 0) {
3412                 dev_err(&pdev->dev, "No IRQ resource\n");
3413                 ret = -EINVAL;
3414                 goto err_unicam_put;
3415         }
3416
3417         ret = devm_request_irq(&pdev->dev, ret, unicam_isr, 0,
3418                                "unicam_capture0", unicam);
3419         if (ret) {
3420                 dev_err(&pdev->dev, "Unable to request interrupt\n");
3421                 ret = -EINVAL;
3422                 goto err_unicam_put;
3423         }
3424
3425         unicam->mdev.dev = &pdev->dev;
3426         strscpy(unicam->mdev.model, UNICAM_MODULE_NAME,
3427                 sizeof(unicam->mdev.model));
3428         strscpy(unicam->mdev.serial, "", sizeof(unicam->mdev.serial));
3429         snprintf(unicam->mdev.bus_info, sizeof(unicam->mdev.bus_info),
3430                  "platform:%s", dev_name(&pdev->dev));
3431         unicam->mdev.hw_revision = 0;
3432
3433         media_device_init(&unicam->mdev);
3434
3435         unicam->v4l2_dev.mdev = &unicam->mdev;
3436
3437         ret = v4l2_device_register(&pdev->dev, &unicam->v4l2_dev);
3438         if (ret) {
3439                 unicam_err(unicam,
3440                            "Unable to register v4l2 device.\n");
3441                 goto err_unicam_put;
3442         }
3443
3444         ret = media_device_register(&unicam->mdev);
3445         if (ret < 0) {
3446                 unicam_err(unicam,
3447                            "Unable to register media-controller device.\n");
3448                 goto err_v4l2_unregister;
3449         }
3450
3451         /* Reserve space for the controls */
3452         ret = v4l2_ctrl_handler_init(&unicam->ctrl_handler, 16);
3453         if (ret < 0)
3454                 goto err_media_unregister;
3455
3456         /* set the driver data in platform device */
3457         platform_set_drvdata(pdev, unicam);
3458
3459         ret = of_unicam_connect_subdevs(unicam);
3460         if (ret) {
3461                 dev_err(&pdev->dev, "Failed to connect subdevs\n");
3462                 goto err_media_unregister;
3463         }
3464
3465         /* Enable the block power domain */
3466         pm_runtime_enable(&pdev->dev);
3467
3468         return 0;
3469
3470 err_media_unregister:
3471         media_device_unregister(&unicam->mdev);
3472 err_v4l2_unregister:
3473         v4l2_device_unregister(&unicam->v4l2_dev);
3474 err_unicam_put:
3475         unicam_put(unicam);
3476
3477         return ret;
3478 }
3479
3480 static int unicam_remove(struct platform_device *pdev)
3481 {
3482         struct unicam_device *unicam = platform_get_drvdata(pdev);
3483
3484         unicam_dbg(2, unicam, "%s\n", __func__);
3485
3486         v4l2_async_nf_unregister(&unicam->notifier);
3487         v4l2_device_unregister(&unicam->v4l2_dev);
3488         media_device_unregister(&unicam->mdev);
3489         unregister_nodes(unicam);
3490
3491         pm_runtime_disable(&pdev->dev);
3492
3493         return 0;
3494 }
3495
3496 static const struct of_device_id unicam_of_match[] = {
3497         { .compatible = "brcm,bcm2835-unicam", },
3498         { /* sentinel */ },
3499 };
3500 MODULE_DEVICE_TABLE(of, unicam_of_match);
3501
3502 static struct platform_driver unicam_driver = {
3503         .probe          = unicam_probe,
3504         .remove         = unicam_remove,
3505         .driver = {
3506                 .name   = UNICAM_MODULE_NAME,
3507                 .of_match_table = of_match_ptr(unicam_of_match),
3508         },
3509 };
3510
3511 module_platform_driver(unicam_driver);
3512
3513 MODULE_AUTHOR("Dave Stevenson <dave.stevenson@raspberrypi.com>");
3514 MODULE_DESCRIPTION("BCM2835 Unicam driver");
3515 MODULE_LICENSE("GPL");
3516 MODULE_VERSION(UNICAM_VERSION);