Merge remote-tracking branch 'stable/linux-5.15.y' into rpi-5.15.y
[platform/kernel/linux-rpi.git] / drivers / gpu / drm / vc4 / vc4_hvs.c
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (C) 2015 Broadcom
4  */
5
6 /**
7  * DOC: VC4 HVS module.
8  *
9  * The Hardware Video Scaler (HVS) is the piece of hardware that does
10  * translation, scaling, colorspace conversion, and compositing of
11  * pixels stored in framebuffers into a FIFO of pixels going out to
12  * the Pixel Valve (CRTC).  It operates at the system clock rate (the
13  * system audio clock gate, specifically), which is much higher than
14  * the pixel clock rate.
15  *
16  * There is a single global HVS, with multiple output FIFOs that can
17  * be consumed by the PVs.  This file just manages the resources for
18  * the HVS, while the vc4_crtc.c code actually drives HVS setup for
19  * each CRTC.
20  */
21
22 #include <linux/bitfield.h>
23 #include <linux/clk.h>
24 #include <linux/component.h>
25 #include <linux/platform_device.h>
26
27 #include <drm/drm_atomic_helper.h>
28 #include <drm/drm_drv.h>
29 #include <drm/drm_vblank.h>
30
31 #include "vc4_drv.h"
32 #include "vc4_regs.h"
33
34 static const struct debugfs_reg32 hvs_regs[] = {
35         VC4_REG32(SCALER_DISPCTRL),
36         VC4_REG32(SCALER_DISPSTAT),
37         VC4_REG32(SCALER_DISPID),
38         VC4_REG32(SCALER_DISPECTRL),
39         VC4_REG32(SCALER_DISPPROF),
40         VC4_REG32(SCALER_DISPDITHER),
41         VC4_REG32(SCALER_DISPEOLN),
42         VC4_REG32(SCALER_DISPLIST0),
43         VC4_REG32(SCALER_DISPLIST1),
44         VC4_REG32(SCALER_DISPLIST2),
45         VC4_REG32(SCALER_DISPLSTAT),
46         VC4_REG32(SCALER_DISPLACT0),
47         VC4_REG32(SCALER_DISPLACT1),
48         VC4_REG32(SCALER_DISPLACT2),
49         VC4_REG32(SCALER_DISPCTRL0),
50         VC4_REG32(SCALER_DISPBKGND0),
51         VC4_REG32(SCALER_DISPSTAT0),
52         VC4_REG32(SCALER_DISPBASE0),
53         VC4_REG32(SCALER_DISPCTRL1),
54         VC4_REG32(SCALER_DISPBKGND1),
55         VC4_REG32(SCALER_DISPSTAT1),
56         VC4_REG32(SCALER_DISPBASE1),
57         VC4_REG32(SCALER_DISPCTRL2),
58         VC4_REG32(SCALER_DISPBKGND2),
59         VC4_REG32(SCALER_DISPSTAT2),
60         VC4_REG32(SCALER_DISPBASE2),
61         VC4_REG32(SCALER_DISPALPHA2),
62         VC4_REG32(SCALER_OLEDOFFS),
63         VC4_REG32(SCALER_OLEDCOEF0),
64         VC4_REG32(SCALER_OLEDCOEF1),
65         VC4_REG32(SCALER_OLEDCOEF2),
66 };
67
68 void vc4_hvs_dump_state(struct vc4_hvs *hvs)
69 {
70         struct drm_device *drm = &hvs->vc4->base;
71         struct drm_printer p = drm_info_printer(&hvs->pdev->dev);
72         int idx, i;
73
74         drm_print_regset32(&p, &hvs->regset);
75
76         if (!drm_dev_enter(drm, &idx))
77                 return;
78
79         DRM_INFO("HVS ctx:\n");
80         for (i = 0; i < 64; i += 4) {
81                 DRM_INFO("0x%08x (%s): 0x%08x 0x%08x 0x%08x 0x%08x\n",
82                          i * 4, i < HVS_BOOTLOADER_DLIST_END ? "B" : "D",
83                          readl((u32 __iomem *)hvs->dlist + i + 0),
84                          readl((u32 __iomem *)hvs->dlist + i + 1),
85                          readl((u32 __iomem *)hvs->dlist + i + 2),
86                          readl((u32 __iomem *)hvs->dlist + i + 3));
87         }
88
89         drm_dev_exit(idx);
90 }
91
92 static int vc4_hvs_debugfs_underrun(struct seq_file *m, void *data)
93 {
94         struct drm_info_node *node = m->private;
95         struct drm_device *dev = node->minor->dev;
96         struct vc4_dev *vc4 = to_vc4_dev(dev);
97         struct drm_printer p = drm_seq_file_printer(m);
98
99         drm_printf(&p, "%d\n", atomic_read(&vc4->underrun));
100
101         return 0;
102 }
103
104 static int vc4_hvs_debugfs_dlist(struct seq_file *m, void *data)
105 {
106         struct drm_info_node *node = m->private;
107         struct drm_device *dev = node->minor->dev;
108         struct vc4_dev *vc4 = to_vc4_dev(dev);
109         struct vc4_hvs *hvs = vc4->hvs;
110         struct drm_printer p = drm_seq_file_printer(m);
111         unsigned int next_entry_start = 0;
112         unsigned int i, j;
113         u32 dlist_word, dispstat;
114
115         for (i = 0; i < SCALER_CHANNELS_COUNT; i++) {
116                 dispstat = VC4_GET_FIELD(HVS_READ(SCALER_DISPSTATX(i)),
117                                          SCALER_DISPSTATX_MODE);
118                 if (dispstat == SCALER_DISPSTATX_MODE_DISABLED ||
119                     dispstat == SCALER_DISPSTATX_MODE_EOF) {
120                         drm_printf(&p, "HVS chan %u disabled\n", i);
121                         continue;
122                 }
123
124                 drm_printf(&p, "HVS chan %u:\n", i);
125
126                 for (j = HVS_READ(SCALER_DISPLISTX(i)); j < 256; j++) {
127                         dlist_word = readl((u32 __iomem *)vc4->hvs->dlist + j);
128                         drm_printf(&p, "dlist: %02d: 0x%08x\n", j,
129                                    dlist_word);
130                         if (!next_entry_start ||
131                             next_entry_start == j) {
132                                 if (dlist_word & SCALER_CTL0_END)
133                                         break;
134                                 next_entry_start = j +
135                                         VC4_GET_FIELD(dlist_word,
136                                                       SCALER_CTL0_SIZE);
137                         }
138                 }
139         }
140
141         return 0;
142 }
143
144 static int vc5_hvs_debugfs_gamma(struct seq_file *m, void *data)
145 {
146         struct drm_info_node *node = m->private;
147         struct drm_device *dev = node->minor->dev;
148         struct vc4_dev *vc4 = to_vc4_dev(dev);
149         struct vc4_hvs *hvs = vc4->hvs;
150         struct drm_printer p = drm_seq_file_printer(m);
151         unsigned int i, chan;
152         u32 dispstat, dispbkgndx;
153
154         for (chan = 0; chan < SCALER_CHANNELS_COUNT; chan++) {
155                 u32 x_c, grad;
156                 u32 offset = SCALER5_DSPGAMMA_START +
157                         chan * SCALER5_DSPGAMMA_CHAN_OFFSET;
158
159                 dispstat = VC4_GET_FIELD(HVS_READ(SCALER_DISPSTATX(chan)),
160                                          SCALER_DISPSTATX_MODE);
161                 if (dispstat == SCALER_DISPSTATX_MODE_DISABLED ||
162                     dispstat == SCALER_DISPSTATX_MODE_EOF) {
163                         drm_printf(&p, "HVS channel %u: Channel disabled\n", chan);
164                         continue;
165                 }
166
167                 dispbkgndx = HVS_READ(SCALER_DISPBKGNDX(chan));
168                 if (!(dispbkgndx & SCALER_DISPBKGND_GAMMA)) {
169                         drm_printf(&p, "HVS channel %u: Gamma disabled\n", chan);
170                         continue;
171                 }
172
173                 drm_printf(&p, "HVS channel %u:\n", chan);
174                 drm_printf(&p, "  red:\n");
175                 for (i = 0; i < SCALER5_DSPGAMMA_NUM_POINTS; i++, offset += 8) {
176                         x_c = HVS_READ(offset);
177                         grad = HVS_READ(offset + 4);
178                         drm_printf(&p, "  %08x %08x - x %u, c %u, grad %u\n",
179                                    x_c, grad,
180                                    VC4_GET_FIELD(x_c, SCALER5_DSPGAMMA_OFF_X),
181                                    VC4_GET_FIELD(x_c, SCALER5_DSPGAMMA_OFF_C),
182                                    grad);
183                 }
184                 drm_printf(&p, "  green:\n");
185                 for (i = 0; i < SCALER5_DSPGAMMA_NUM_POINTS; i++, offset += 8) {
186                         x_c = HVS_READ(offset);
187                         grad = HVS_READ(offset + 4);
188                         drm_printf(&p, "  %08x %08x - x %u, c %u, grad %u\n",
189                                    x_c, grad,
190                                    VC4_GET_FIELD(x_c, SCALER5_DSPGAMMA_OFF_X),
191                                    VC4_GET_FIELD(x_c, SCALER5_DSPGAMMA_OFF_C),
192                                    grad);
193                 }
194                 drm_printf(&p, "  blue:\n");
195                 for (i = 0; i < SCALER5_DSPGAMMA_NUM_POINTS; i++, offset += 8) {
196                         x_c = HVS_READ(offset);
197                         grad = HVS_READ(offset + 4);
198                         drm_printf(&p, "  %08x %08x - x %u, c %u, grad %u\n",
199                                    x_c, grad,
200                                    VC4_GET_FIELD(x_c, SCALER5_DSPGAMMA_OFF_X),
201                                    VC4_GET_FIELD(x_c, SCALER5_DSPGAMMA_OFF_C),
202                                    grad);
203                 }
204
205                 /* Alpha only valid on channel 2 */
206                 if (chan != 2)
207                         continue;
208
209                 drm_printf(&p, "  alpha:\n");
210                 for (i = 0; i < SCALER5_DSPGAMMA_NUM_POINTS; i++, offset += 8) {
211                         x_c = HVS_READ(offset);
212                         grad = HVS_READ(offset + 4);
213                         drm_printf(&p, "  %08x %08x - x %u, c %u, grad %u\n",
214                                    x_c, grad,
215                                    VC4_GET_FIELD(x_c, SCALER5_DSPGAMMA_OFF_X),
216                                    VC4_GET_FIELD(x_c, SCALER5_DSPGAMMA_OFF_C),
217                                    grad);
218                 }
219         }
220         return 0;
221 }
222
223 /* The filter kernel is composed of dwords each containing 3 9-bit
224  * signed integers packed next to each other.
225  */
226 #define VC4_INT_TO_COEFF(coeff) (coeff & 0x1ff)
227 #define VC4_PPF_FILTER_WORD(c0, c1, c2)                         \
228         ((((c0) & 0x1ff) << 0) |                                \
229          (((c1) & 0x1ff) << 9) |                                \
230          (((c2) & 0x1ff) << 18))
231
232 /* The whole filter kernel is arranged as the coefficients 0-16 going
233  * up, then a pad, then 17-31 going down and reversed within the
234  * dwords.  This means that a linear phase kernel (where it's
235  * symmetrical at the boundary between 15 and 16) has the last 5
236  * dwords matching the first 5, but reversed.
237  */
238 #define VC4_LINEAR_PHASE_KERNEL(c0, c1, c2, c3, c4, c5, c6, c7, c8,     \
239                                 c9, c10, c11, c12, c13, c14, c15)       \
240         {VC4_PPF_FILTER_WORD(c0, c1, c2),                               \
241          VC4_PPF_FILTER_WORD(c3, c4, c5),                               \
242          VC4_PPF_FILTER_WORD(c6, c7, c8),                               \
243          VC4_PPF_FILTER_WORD(c9, c10, c11),                             \
244          VC4_PPF_FILTER_WORD(c12, c13, c14),                            \
245          VC4_PPF_FILTER_WORD(c15, c15, 0)}
246
247 #define VC4_LINEAR_PHASE_KERNEL_DWORDS 6
248 #define VC4_KERNEL_DWORDS (VC4_LINEAR_PHASE_KERNEL_DWORDS * 2 - 1)
249
250 /* Recommended B=1/3, C=1/3 filter choice from Mitchell/Netravali.
251  * http://www.cs.utexas.edu/~fussell/courses/cs384g/lectures/mitchell/Mitchell.pdf
252  */
253 static const u32 mitchell_netravali_1_3_1_3_kernel[] =
254         VC4_LINEAR_PHASE_KERNEL(0, -2, -6, -8, -10, -8, -3, 2, 18,
255                                 50, 82, 119, 155, 187, 213, 227);
256
257 static int vc4_hvs_upload_linear_kernel(struct vc4_hvs *hvs,
258                                         struct drm_mm_node *space,
259                                         const u32 *kernel)
260 {
261         int ret, i;
262         u32 __iomem *dst_kernel;
263
264         /*
265          * NOTE: We don't need a call to drm_dev_enter()/drm_dev_exit()
266          * here since that function is only called from vc4_hvs_bind().
267          */
268
269         ret = drm_mm_insert_node(&hvs->dlist_mm, space, VC4_KERNEL_DWORDS);
270         if (ret) {
271                 DRM_ERROR("Failed to allocate space for filter kernel: %d\n",
272                           ret);
273                 return ret;
274         }
275
276         dst_kernel = hvs->dlist + space->start;
277
278         for (i = 0; i < VC4_KERNEL_DWORDS; i++) {
279                 if (i < VC4_LINEAR_PHASE_KERNEL_DWORDS)
280                         writel(kernel[i], &dst_kernel[i]);
281                 else {
282                         writel(kernel[VC4_KERNEL_DWORDS - i - 1],
283                                &dst_kernel[i]);
284                 }
285         }
286
287         return 0;
288 }
289
290 static void vc4_hvs_lut_load(struct vc4_hvs *hvs,
291                              struct vc4_crtc *vc4_crtc)
292 {
293         struct drm_device *drm = &hvs->vc4->base;
294         struct drm_crtc *crtc = &vc4_crtc->base;
295         struct vc4_crtc_state *vc4_state = to_vc4_crtc_state(crtc->state);
296         int idx;
297         u32 i;
298
299         if (!drm_dev_enter(drm, &idx))
300                 return;
301
302         /* The LUT memory is laid out with each HVS channel in order,
303          * each of which takes 256 writes for R, 256 for G, then 256
304          * for B.
305          */
306         HVS_WRITE(SCALER_GAMADDR,
307                   SCALER_GAMADDR_AUTOINC |
308                   (vc4_state->assigned_channel * 3 * crtc->gamma_size));
309
310         for (i = 0; i < crtc->gamma_size; i++)
311                 HVS_WRITE(SCALER_GAMDATA, vc4_crtc->lut_r[i]);
312         for (i = 0; i < crtc->gamma_size; i++)
313                 HVS_WRITE(SCALER_GAMDATA, vc4_crtc->lut_g[i]);
314         for (i = 0; i < crtc->gamma_size; i++)
315                 HVS_WRITE(SCALER_GAMDATA, vc4_crtc->lut_b[i]);
316
317         drm_dev_exit(idx);
318 }
319
320 static void vc4_hvs_update_gamma_lut(struct vc4_hvs *hvs,
321                                      struct vc4_crtc *vc4_crtc)
322 {
323         struct drm_crtc_state *crtc_state = vc4_crtc->base.state;
324         struct drm_color_lut *lut = crtc_state->gamma_lut->data;
325         u32 length = drm_color_lut_size(crtc_state->gamma_lut);
326         u32 i;
327
328         for (i = 0; i < length; i++) {
329                 vc4_crtc->lut_r[i] = drm_color_lut_extract(lut[i].red, 8);
330                 vc4_crtc->lut_g[i] = drm_color_lut_extract(lut[i].green, 8);
331                 vc4_crtc->lut_b[i] = drm_color_lut_extract(lut[i].blue, 8);
332         }
333
334         vc4_hvs_lut_load(hvs, vc4_crtc);
335 }
336
337 static void vc5_hvs_write_gamma_entry(struct vc4_hvs *hvs,
338                                       u32 offset,
339                                       struct vc5_gamma_entry *gamma)
340 {
341         HVS_WRITE(offset, gamma->x_c_terms);
342         HVS_WRITE(offset + 4, gamma->grad_term);
343 }
344
345 static void vc5_hvs_lut_load(struct vc4_hvs *hvs,
346                              struct vc4_crtc *vc4_crtc)
347 {
348         struct drm_crtc_state *crtc_state = vc4_crtc->base.state;
349         struct vc4_crtc_state *vc4_state = to_vc4_crtc_state(crtc_state);
350         u32 i;
351         u32 offset = SCALER5_DSPGAMMA_START +
352                 vc4_state->assigned_channel * SCALER5_DSPGAMMA_CHAN_OFFSET;
353
354         for (i = 0; i < SCALER5_DSPGAMMA_NUM_POINTS; i++, offset += 8)
355                 vc5_hvs_write_gamma_entry(hvs, offset, &vc4_crtc->pwl_r[i]);
356         for (i = 0; i < SCALER5_DSPGAMMA_NUM_POINTS; i++, offset += 8)
357                 vc5_hvs_write_gamma_entry(hvs, offset, &vc4_crtc->pwl_g[i]);
358         for (i = 0; i < SCALER5_DSPGAMMA_NUM_POINTS; i++, offset += 8)
359                 vc5_hvs_write_gamma_entry(hvs, offset, &vc4_crtc->pwl_b[i]);
360
361         if (vc4_state->assigned_channel == 2) {
362                 /* Alpha only valid on channel 2 */
363                 for (i = 0; i < SCALER5_DSPGAMMA_NUM_POINTS; i++, offset += 8)
364                         vc5_hvs_write_gamma_entry(hvs, offset, &vc4_crtc->pwl_a[i]);
365         }
366 }
367
368 static void vc5_hvs_update_gamma_lut(struct vc4_hvs *hvs,
369                                      struct vc4_crtc *vc4_crtc)
370 {
371         struct drm_crtc *crtc = &vc4_crtc->base;
372         struct drm_color_lut *lut = crtc->state->gamma_lut->data;
373         unsigned int step, i;
374         u32 start, end;
375
376 #define VC5_HVS_UPDATE_GAMMA_ENTRY_FROM_LUT(pwl, chan)                  \
377         start = drm_color_lut_extract(lut[i * step].chan, 12);          \
378         end = drm_color_lut_extract(lut[(i + 1) * step - 1].chan, 12);  \
379                                                                         \
380         /* Negative gradients not permitted by the hardware, so         \
381          * flatten such points out.                                     \
382          */                                                             \
383         if (end < start)                                                \
384                 end = start;                                            \
385                                                                         \
386         /* Assume 12bit pipeline.                                       \
387          * X evenly spread over full range (12 bit).                    \
388          * C as U12.4 format.                                           \
389          * Gradient as U4.8 format.                                     \
390         */                                                              \
391         vc4_crtc->pwl[i] =                                              \
392                 VC5_HVS_SET_GAMMA_ENTRY(i << 8, start << 4,             \
393                                 ((end - start) << 4) / (step - 1))
394
395         /* HVS5 has a 16 point piecewise linear function for each colour
396          * channel (including alpha on channel 2) on each display channel.
397          *
398          * Currently take a crude subsample of the gamma LUT, but this could
399          * be improved to implement curve fitting.
400          */
401         step = crtc->gamma_size / SCALER5_DSPGAMMA_NUM_POINTS;
402         for (i = 0; i < SCALER5_DSPGAMMA_NUM_POINTS; i++) {
403                 VC5_HVS_UPDATE_GAMMA_ENTRY_FROM_LUT(pwl_r, red);
404                 VC5_HVS_UPDATE_GAMMA_ENTRY_FROM_LUT(pwl_g, green);
405                 VC5_HVS_UPDATE_GAMMA_ENTRY_FROM_LUT(pwl_b, blue);
406         }
407
408         vc5_hvs_lut_load(hvs, vc4_crtc);
409 }
410
411 u8 vc4_hvs_get_fifo_frame_count(struct vc4_hvs *hvs, unsigned int fifo)
412 {
413         struct drm_device *drm = &hvs->vc4->base;
414         u8 field = 0;
415         int idx;
416
417         if (!drm_dev_enter(drm, &idx))
418                 return 0;
419
420         switch (fifo) {
421         case 0:
422                 field = VC4_GET_FIELD(HVS_READ(SCALER_DISPSTAT1),
423                                       SCALER_DISPSTAT1_FRCNT0);
424                 break;
425         case 1:
426                 field = VC4_GET_FIELD(HVS_READ(SCALER_DISPSTAT1),
427                                       SCALER_DISPSTAT1_FRCNT1);
428                 break;
429         case 2:
430                 field = VC4_GET_FIELD(HVS_READ(SCALER_DISPSTAT2),
431                                       SCALER_DISPSTAT2_FRCNT2);
432                 break;
433         }
434
435         drm_dev_exit(idx);
436         return field;
437 }
438
439 int vc4_hvs_get_fifo_from_output(struct vc4_hvs *hvs, unsigned int output)
440 {
441         struct vc4_dev *vc4 = hvs->vc4;
442         u32 reg;
443         int ret;
444
445         if (!vc4->is_vc5)
446                 return output;
447
448         /*
449          * NOTE: We should probably use drm_dev_enter()/drm_dev_exit()
450          * here, but this function is only used during the DRM device
451          * initialization, so we should be fine.
452          */
453
454         switch (output) {
455         case 0:
456                 return 0;
457
458         case 1:
459                 return 1;
460
461         case 2:
462                 reg = HVS_READ(SCALER_DISPECTRL);
463                 ret = FIELD_GET(SCALER_DISPECTRL_DSP2_MUX_MASK, reg);
464                 if (ret == 0)
465                         return 2;
466
467                 return 0;
468
469         case 3:
470                 reg = HVS_READ(SCALER_DISPCTRL);
471                 ret = FIELD_GET(SCALER_DISPCTRL_DSP3_MUX_MASK, reg);
472                 if (ret == 3)
473                         return -EPIPE;
474
475                 return ret;
476
477         case 4:
478                 reg = HVS_READ(SCALER_DISPEOLN);
479                 ret = FIELD_GET(SCALER_DISPEOLN_DSP4_MUX_MASK, reg);
480                 if (ret == 3)
481                         return -EPIPE;
482
483                 return ret;
484
485         case 5:
486                 reg = HVS_READ(SCALER_DISPDITHER);
487                 ret = FIELD_GET(SCALER_DISPDITHER_DSP5_MUX_MASK, reg);
488                 if (ret == 3)
489                         return -EPIPE;
490
491                 return ret;
492
493         default:
494                 return -EPIPE;
495         }
496 }
497
498 static int vc4_hvs_init_channel(struct vc4_hvs *hvs, struct drm_crtc *crtc,
499                                 struct drm_display_mode *mode, bool oneshot)
500 {
501         struct vc4_dev *vc4 = hvs->vc4;
502         struct drm_device *drm = &vc4->base;
503         struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
504         struct vc4_crtc_state *vc4_crtc_state = to_vc4_crtc_state(crtc->state);
505         unsigned int chan = vc4_crtc_state->assigned_channel;
506         bool interlace = mode->flags & DRM_MODE_FLAG_INTERLACE;
507         u32 dispbkgndx;
508         u32 dispctrl;
509         int idx;
510
511         if (!drm_dev_enter(drm, &idx))
512                 return -ENODEV;
513
514         HVS_WRITE(SCALER_DISPCTRLX(chan), 0);
515         HVS_WRITE(SCALER_DISPCTRLX(chan), SCALER_DISPCTRLX_RESET);
516         HVS_WRITE(SCALER_DISPCTRLX(chan), 0);
517
518         /* Turn on the scaler, which will wait for vstart to start
519          * compositing.
520          * When feeding the transposer, we should operate in oneshot
521          * mode.
522          */
523         dispctrl = SCALER_DISPCTRLX_ENABLE;
524         dispbkgndx = HVS_READ(SCALER_DISPBKGNDX(chan));
525
526         if (!vc4->is_vc5) {
527                 dispctrl |= VC4_SET_FIELD(mode->hdisplay,
528                                           SCALER_DISPCTRLX_WIDTH) |
529                             VC4_SET_FIELD(mode->vdisplay,
530                                           SCALER_DISPCTRLX_HEIGHT) |
531                             (oneshot ? SCALER_DISPCTRLX_ONESHOT : 0);
532                 dispbkgndx |= SCALER_DISPBKGND_AUTOHS;
533         } else {
534                 dispctrl |= VC4_SET_FIELD(mode->hdisplay,
535                                           SCALER5_DISPCTRLX_WIDTH) |
536                             VC4_SET_FIELD(mode->vdisplay,
537                                           SCALER5_DISPCTRLX_HEIGHT) |
538                             (oneshot ? SCALER5_DISPCTRLX_ONESHOT : 0);
539                 dispbkgndx &= ~SCALER5_DISPBKGND_BCK2BCK;
540         }
541
542         HVS_WRITE(SCALER_DISPCTRLX(chan), dispctrl);
543
544         dispbkgndx &= ~SCALER_DISPBKGND_GAMMA;
545         dispbkgndx &= ~SCALER_DISPBKGND_INTERLACE;
546
547         if (crtc->state->gamma_lut)
548                 /* Enable gamma on if required */
549                 dispbkgndx |= SCALER_DISPBKGND_GAMMA;
550
551         HVS_WRITE(SCALER_DISPBKGNDX(chan), dispbkgndx |
552                   (interlace ? SCALER_DISPBKGND_INTERLACE : 0));
553
554         /* Reload the LUT, since the SRAMs would have been disabled if
555          * all CRTCs had SCALER_DISPBKGND_GAMMA unset at once.
556          */
557         if (!vc4->is_vc5)
558                 vc4_hvs_lut_load(hvs, vc4_crtc);
559         else
560                 vc5_hvs_lut_load(hvs, vc4_crtc);
561
562         drm_dev_exit(idx);
563
564         return 0;
565 }
566
567 void vc4_hvs_stop_channel(struct vc4_hvs *hvs, unsigned int chan)
568 {
569         struct drm_device *drm = &hvs->vc4->base;
570         int idx;
571
572         if (!drm_dev_enter(drm, &idx))
573                 return;
574
575         if (HVS_READ(SCALER_DISPCTRLX(chan)) & SCALER_DISPCTRLX_ENABLE)
576                 goto out;
577
578         HVS_WRITE(SCALER_DISPCTRLX(chan),
579                   HVS_READ(SCALER_DISPCTRLX(chan)) | SCALER_DISPCTRLX_RESET);
580         HVS_WRITE(SCALER_DISPCTRLX(chan),
581                   HVS_READ(SCALER_DISPCTRLX(chan)) & ~SCALER_DISPCTRLX_ENABLE);
582
583         /* Once we leave, the scaler should be disabled and its fifo empty. */
584         WARN_ON_ONCE(HVS_READ(SCALER_DISPCTRLX(chan)) & SCALER_DISPCTRLX_RESET);
585
586         WARN_ON_ONCE(VC4_GET_FIELD(HVS_READ(SCALER_DISPSTATX(chan)),
587                                    SCALER_DISPSTATX_MODE) !=
588                      SCALER_DISPSTATX_MODE_DISABLED);
589
590         WARN_ON_ONCE((HVS_READ(SCALER_DISPSTATX(chan)) &
591                       (SCALER_DISPSTATX_FULL | SCALER_DISPSTATX_EMPTY)) !=
592                      SCALER_DISPSTATX_EMPTY);
593
594 out:
595         drm_dev_exit(idx);
596 }
597
598 static int vc4_hvs_gamma_check(struct drm_crtc *crtc,
599                                struct drm_atomic_state *state)
600 {
601         struct drm_crtc_state *crtc_state = drm_atomic_get_new_crtc_state(state, crtc);
602         struct drm_connector_state *conn_state;
603         struct drm_connector *connector;
604         struct drm_device *dev = crtc->dev;
605         struct vc4_dev *vc4 = to_vc4_dev(dev);
606
607         if (!vc4->is_vc5)
608                 return 0;
609
610         if (!crtc_state->color_mgmt_changed)
611                 return 0;
612
613         if (crtc_state->gamma_lut) {
614                 unsigned int len = drm_color_lut_size(crtc_state->gamma_lut);
615
616                 if (len != crtc->gamma_size) {
617                         DRM_DEBUG_KMS("Invalid LUT size; got %u, expected %u\n",
618                                       len, crtc->gamma_size);
619                         return -EINVAL;
620                 }
621         }
622
623         connector = vc4_get_crtc_connector(crtc, crtc_state);
624         if (!connector)
625                 return -EINVAL;
626
627         if (!(connector->connector_type == DRM_MODE_CONNECTOR_HDMIA))
628                 return 0;
629
630         conn_state = drm_atomic_get_connector_state(state, connector);
631         if (!conn_state)
632                 return -EINVAL;
633
634         crtc_state->mode_changed = true;
635         return 0;
636 }
637
638 int vc4_hvs_atomic_check(struct drm_crtc *crtc, struct drm_atomic_state *state)
639 {
640         struct drm_crtc_state *crtc_state = drm_atomic_get_new_crtc_state(state, crtc);
641         struct vc4_crtc_state *vc4_state = to_vc4_crtc_state(crtc_state);
642         struct drm_device *dev = crtc->dev;
643         struct vc4_dev *vc4 = to_vc4_dev(dev);
644         struct drm_plane *plane;
645         unsigned long flags;
646         const struct drm_plane_state *plane_state;
647         u32 dlist_count = 0;
648         int ret;
649
650         /* The pixelvalve can only feed one encoder (and encoders are
651          * 1:1 with connectors.)
652          */
653         if (hweight32(crtc_state->connector_mask) > 1)
654                 return -EINVAL;
655
656         drm_atomic_crtc_state_for_each_plane_state(plane, plane_state, crtc_state)
657                 dlist_count += vc4_plane_dlist_size(plane_state);
658
659         dlist_count++; /* Account for SCALER_CTL0_END. */
660
661         spin_lock_irqsave(&vc4->hvs->mm_lock, flags);
662         ret = drm_mm_insert_node(&vc4->hvs->dlist_mm, &vc4_state->mm,
663                                  dlist_count);
664         spin_unlock_irqrestore(&vc4->hvs->mm_lock, flags);
665         if (ret)
666                 return ret;
667
668         return vc4_hvs_gamma_check(crtc, state);
669 }
670
671 static void vc4_hvs_install_dlist(struct drm_crtc *crtc)
672 {
673         struct drm_device *dev = crtc->dev;
674         struct vc4_dev *vc4 = to_vc4_dev(dev);
675         struct vc4_hvs *hvs = vc4->hvs;
676         struct vc4_crtc_state *vc4_state = to_vc4_crtc_state(crtc->state);
677         int idx;
678
679         if (!drm_dev_enter(dev, &idx))
680                 return;
681
682         HVS_WRITE(SCALER_DISPLISTX(vc4_state->assigned_channel),
683                   vc4_state->mm.start);
684
685         drm_dev_exit(idx);
686 }
687
688 static void vc4_hvs_update_dlist(struct drm_crtc *crtc)
689 {
690         struct drm_device *dev = crtc->dev;
691         struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
692         struct vc4_crtc_state *vc4_state = to_vc4_crtc_state(crtc->state);
693         unsigned long flags;
694
695         if (crtc->state->event) {
696                 crtc->state->event->pipe = drm_crtc_index(crtc);
697
698                 WARN_ON(drm_crtc_vblank_get(crtc) != 0);
699
700                 spin_lock_irqsave(&dev->event_lock, flags);
701
702                 if (!vc4_crtc->feeds_txp || vc4_state->txp_armed) {
703                         vc4_crtc->event = crtc->state->event;
704                         crtc->state->event = NULL;
705                 }
706
707                 spin_unlock_irqrestore(&dev->event_lock, flags);
708         }
709
710         spin_lock_irqsave(&vc4_crtc->irq_lock, flags);
711         vc4_crtc->current_dlist = vc4_state->mm.start;
712         spin_unlock_irqrestore(&vc4_crtc->irq_lock, flags);
713 }
714
715 void vc4_hvs_atomic_begin(struct drm_crtc *crtc,
716                           struct drm_atomic_state *state)
717 {
718         struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
719         struct vc4_crtc_state *vc4_state = to_vc4_crtc_state(crtc->state);
720         unsigned long flags;
721
722         spin_lock_irqsave(&vc4_crtc->irq_lock, flags);
723         vc4_crtc->current_hvs_channel = vc4_state->assigned_channel;
724         spin_unlock_irqrestore(&vc4_crtc->irq_lock, flags);
725 }
726
727 void vc4_hvs_atomic_enable(struct drm_crtc *crtc,
728                            struct drm_atomic_state *state)
729 {
730         struct drm_device *dev = crtc->dev;
731         struct vc4_dev *vc4 = to_vc4_dev(dev);
732         struct drm_display_mode *mode = &crtc->state->adjusted_mode;
733         struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
734         bool oneshot = vc4_crtc->feeds_txp;
735
736         vc4_hvs_install_dlist(crtc);
737         vc4_hvs_update_dlist(crtc);
738         vc4_hvs_init_channel(vc4->hvs, crtc, mode, oneshot);
739 }
740
741 void vc4_hvs_atomic_disable(struct drm_crtc *crtc,
742                             struct drm_atomic_state *state)
743 {
744         struct drm_device *dev = crtc->dev;
745         struct vc4_dev *vc4 = to_vc4_dev(dev);
746         struct drm_crtc_state *old_state = drm_atomic_get_old_crtc_state(state, crtc);
747         struct vc4_crtc_state *vc4_state = to_vc4_crtc_state(old_state);
748         unsigned int chan = vc4_state->assigned_channel;
749
750         vc4_hvs_stop_channel(vc4->hvs, chan);
751 }
752
753 void vc4_hvs_atomic_flush(struct drm_crtc *crtc,
754                           struct drm_atomic_state *state)
755 {
756         struct drm_crtc_state *old_state = drm_atomic_get_old_crtc_state(state,
757                                                                          crtc);
758         struct drm_device *dev = crtc->dev;
759         struct vc4_dev *vc4 = to_vc4_dev(dev);
760         struct vc4_hvs *hvs = vc4->hvs;
761         struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
762         struct vc4_crtc_state *vc4_state = to_vc4_crtc_state(crtc->state);
763         unsigned int channel = vc4_state->assigned_channel;
764         struct drm_plane *plane;
765         struct vc4_plane_state *vc4_plane_state;
766         bool debug_dump_regs = false;
767         bool enable_bg_fill = false;
768         u32 __iomem *dlist_start = vc4->hvs->dlist + vc4_state->mm.start;
769         u32 __iomem *dlist_next = dlist_start;
770         unsigned int zpos = 0;
771         bool found = false;
772         int idx;
773
774         if (!drm_dev_enter(dev, &idx)) {
775                 vc4_crtc_send_vblank(crtc);
776                 return;
777         }
778
779         if (vc4_state->assigned_channel == VC4_HVS_CHANNEL_DISABLED) {
780                 drm_dev_exit(idx);
781                 return;
782         }
783
784         if (debug_dump_regs) {
785                 DRM_INFO("CRTC %d HVS before:\n", drm_crtc_index(crtc));
786                 vc4_hvs_dump_state(hvs);
787         }
788
789         /* Copy all the active planes' dlist contents to the hardware dlist. */
790         do {
791                 found = false;
792
793                 drm_atomic_crtc_for_each_plane(plane, crtc) {
794                         if (plane->state->normalized_zpos != zpos)
795                                 continue;
796
797                         /* Is this the first active plane? */
798                         if (dlist_next == dlist_start) {
799                                 /* We need to enable background fill when a plane
800                                  * could be alpha blending from the background, i.e.
801                                  * where no other plane is underneath. It suffices to
802                                  * consider the first active plane here since we set
803                                  * needs_bg_fill such that either the first plane
804                                  * already needs it or all planes on top blend from
805                                  * the first or a lower plane.
806                                  */
807                                 vc4_plane_state = to_vc4_plane_state(plane->state);
808                                 enable_bg_fill = vc4_plane_state->needs_bg_fill;
809                         }
810
811                         dlist_next += vc4_plane_write_dlist(plane, dlist_next);
812
813                         found = true;
814                 }
815
816                 zpos++;
817         } while (found);
818
819         writel(SCALER_CTL0_END, dlist_next);
820         dlist_next++;
821
822         WARN_ON_ONCE(dlist_next - dlist_start != vc4_state->mm.size);
823
824         if (enable_bg_fill)
825                 /* This sets a black background color fill, as is the case
826                  * with other DRM drivers.
827                  */
828                 HVS_WRITE(SCALER_DISPBKGNDX(channel),
829                           HVS_READ(SCALER_DISPBKGNDX(channel)) |
830                           SCALER_DISPBKGND_FILL);
831
832         /* Only update DISPLIST if the CRTC was already running and is not
833          * being disabled.
834          * vc4_crtc_enable() takes care of updating the dlist just after
835          * re-enabling VBLANK interrupts and before enabling the engine.
836          * If the CRTC is being disabled, there's no point in updating this
837          * information.
838          */
839         if (crtc->state->active && old_state->active) {
840                 vc4_hvs_install_dlist(crtc);
841                 vc4_hvs_update_dlist(crtc);
842         }
843
844         if (crtc->state->color_mgmt_changed) {
845                 u32 dispbkgndx = HVS_READ(SCALER_DISPBKGNDX(channel));
846
847                 if (crtc->state->gamma_lut) {
848                         if (!vc4->is_vc5) {
849                                 vc4_hvs_update_gamma_lut(hvs, vc4_crtc);
850                                 dispbkgndx |= SCALER_DISPBKGND_GAMMA;
851                         } else {
852                                 vc5_hvs_update_gamma_lut(hvs, vc4_crtc);
853                         }
854                 } else {
855                         /* Unsetting DISPBKGND_GAMMA skips the gamma lut step
856                          * in hardware, which is the same as a linear lut that
857                          * DRM expects us to use in absence of a user lut.
858                          *
859                          * Do NOT change state dynamically for hvs5 as it
860                          * inserts a delay in the pipeline that will cause
861                          * stalls if enabled/disabled whilst running. The other
862                          * should already be disabling/enabling the pipeline
863                          * when gamma changes.
864                          */
865                         if (!vc4->is_vc5)
866                                 dispbkgndx &= ~SCALER_DISPBKGND_GAMMA;
867                 }
868                 HVS_WRITE(SCALER_DISPBKGNDX(channel), dispbkgndx);
869         }
870
871         if (debug_dump_regs) {
872                 DRM_INFO("CRTC %d HVS after:\n", drm_crtc_index(crtc));
873                 vc4_hvs_dump_state(hvs);
874         }
875
876         drm_dev_exit(idx);
877 }
878
879 void vc4_hvs_mask_underrun(struct vc4_hvs *hvs, int channel)
880 {
881         struct drm_device *drm = &hvs->vc4->base;
882         u32 dispctrl;
883         int idx;
884
885         if (!drm_dev_enter(drm, &idx))
886                 return;
887
888         dispctrl = HVS_READ(SCALER_DISPCTRL);
889         dispctrl &= ~(hvs->vc4->is_vc5 ? SCALER5_DISPCTRL_DSPEISLUR(channel) :
890                                          SCALER_DISPCTRL_DSPEISLUR(channel));
891
892         HVS_WRITE(SCALER_DISPCTRL, dispctrl);
893
894         drm_dev_exit(idx);
895 }
896
897 void vc4_hvs_unmask_underrun(struct vc4_hvs *hvs, int channel)
898 {
899         struct drm_device *drm = &hvs->vc4->base;
900         u32 dispctrl;
901         int idx;
902
903         if (!drm_dev_enter(drm, &idx))
904                 return;
905
906         dispctrl = HVS_READ(SCALER_DISPCTRL);
907         dispctrl |= (hvs->vc4->is_vc5 ? SCALER5_DISPCTRL_DSPEISLUR(channel) :
908                                         SCALER_DISPCTRL_DSPEISLUR(channel));
909
910         HVS_WRITE(SCALER_DISPSTAT,
911                   SCALER_DISPSTAT_EUFLOW(channel));
912         HVS_WRITE(SCALER_DISPCTRL, dispctrl);
913
914         drm_dev_exit(idx);
915 }
916
917 static void vc4_hvs_report_underrun(struct drm_device *dev)
918 {
919         struct vc4_dev *vc4 = to_vc4_dev(dev);
920
921         atomic_inc(&vc4->underrun);
922         DRM_DEV_ERROR(dev->dev, "HVS underrun\n");
923 }
924
925 static irqreturn_t vc4_hvs_irq_handler(int irq, void *data)
926 {
927         struct drm_device *dev = data;
928         struct vc4_dev *vc4 = to_vc4_dev(dev);
929         struct vc4_hvs *hvs = vc4->hvs;
930         irqreturn_t irqret = IRQ_NONE;
931         int channel;
932         u32 control;
933         u32 status;
934         u32 dspeislur;
935
936         /*
937          * NOTE: We don't need to protect the register access using
938          * drm_dev_enter() there because the interrupt handler lifetime
939          * is tied to the device itself, and not to the DRM device.
940          *
941          * So when the device will be gone, one of the first thing we
942          * will be doing will be to unregister the interrupt handler,
943          * and then unregister the DRM device. drm_dev_enter() would
944          * thus always succeed if we are here.
945          */
946
947         status = HVS_READ(SCALER_DISPSTAT);
948         control = HVS_READ(SCALER_DISPCTRL);
949
950         for (channel = 0; channel < SCALER_CHANNELS_COUNT; channel++) {
951                 dspeislur = vc4->is_vc5 ? SCALER5_DISPCTRL_DSPEISLUR(channel) :
952                                           SCALER_DISPCTRL_DSPEISLUR(channel);
953                 /* Interrupt masking is not always honored, so check it here. */
954                 if (status & SCALER_DISPSTAT_EUFLOW(channel) &&
955                     control & dspeislur) {
956                         vc4_hvs_mask_underrun(hvs, channel);
957                         vc4_hvs_report_underrun(dev);
958
959                         irqret = IRQ_HANDLED;
960                 }
961         }
962
963         /* Clear every per-channel interrupt flag. */
964         HVS_WRITE(SCALER_DISPSTAT, SCALER_DISPSTAT_IRQMASK(0) |
965                                    SCALER_DISPSTAT_IRQMASK(1) |
966                                    SCALER_DISPSTAT_IRQMASK(2));
967
968         return irqret;
969 }
970
971 int vc4_hvs_debugfs_init(struct drm_minor *minor)
972 {
973         struct drm_device *drm = minor->dev;
974         struct vc4_dev *vc4 = to_vc4_dev(drm);
975         struct vc4_hvs *hvs = vc4->hvs;
976         int ret;
977
978         if (vc4->firmware_kms)
979                 return 0;
980
981         if (!vc4->hvs)
982                 return -ENODEV;
983
984         if (!vc4->is_vc5)
985                 debugfs_create_bool("hvs_load_tracker", S_IRUGO | S_IWUSR,
986                                     minor->debugfs_root,
987                                     &vc4->load_tracker_enabled);
988
989         if (vc4->is_vc5)
990                 vc4_debugfs_add_file(minor, "hvs_gamma",
991                                      vc5_hvs_debugfs_gamma, NULL);
992
993         ret = vc4_debugfs_add_file(minor, "hvs_underrun",
994                                    vc4_hvs_debugfs_underrun, NULL);
995         if (ret)
996                 return ret;
997
998         ret = vc4_debugfs_add_regset32(minor, "hvs_regs",
999                                        &hvs->regset);
1000         if (ret)
1001                 return ret;
1002
1003         ret = vc4_debugfs_add_file(minor, "hvs_dlists",
1004                                    vc4_hvs_debugfs_dlist, NULL);
1005         if (ret)
1006                 return ret;
1007
1008         return 0;
1009 }
1010
1011 static int vc4_hvs_bind(struct device *dev, struct device *master, void *data)
1012 {
1013         struct platform_device *pdev = to_platform_device(dev);
1014         struct drm_device *drm = dev_get_drvdata(master);
1015         struct vc4_dev *vc4 = to_vc4_dev(drm);
1016         struct vc4_hvs *hvs = NULL;
1017         int ret;
1018         u32 dispctrl;
1019         u32 reg, top;
1020
1021         hvs = drmm_kzalloc(drm, sizeof(*hvs), GFP_KERNEL);
1022         if (!hvs)
1023                 return -ENOMEM;
1024         hvs->vc4 = vc4;
1025         hvs->pdev = pdev;
1026
1027         hvs->regs = vc4_ioremap_regs(pdev, 0);
1028         if (IS_ERR(hvs->regs))
1029                 return PTR_ERR(hvs->regs);
1030
1031         hvs->regset.base = hvs->regs;
1032         hvs->regset.regs = hvs_regs;
1033         hvs->regset.nregs = ARRAY_SIZE(hvs_regs);
1034
1035         if (vc4->is_vc5) {
1036                 unsigned long min_rate;
1037                 unsigned long max_rate;
1038
1039                 hvs->core_clk = devm_clk_get(&pdev->dev, NULL);
1040                 if (IS_ERR(hvs->core_clk)) {
1041                         dev_err(&pdev->dev, "Couldn't get core clock\n");
1042                         return PTR_ERR(hvs->core_clk);
1043                 }
1044
1045                 max_rate = clk_get_max_rate(hvs->core_clk);
1046                 if (max_rate >= 550000000)
1047                         hvs->vc5_hdmi_enable_scrambling = true;
1048
1049                 min_rate = clk_get_min_rate(hvs->core_clk);
1050                 if (min_rate >= 600000000)
1051                         hvs->vc5_hdmi_enable_4096by2160 = true;
1052
1053                 ret = clk_prepare_enable(hvs->core_clk);
1054                 if (ret) {
1055                         dev_err(&pdev->dev, "Couldn't enable the core clock\n");
1056                         return ret;
1057                 }
1058         }
1059
1060         if (!vc4->is_vc5)
1061                 hvs->dlist = hvs->regs + SCALER_DLIST_START;
1062         else
1063                 hvs->dlist = hvs->regs + SCALER5_DLIST_START;
1064
1065         spin_lock_init(&hvs->mm_lock);
1066
1067         /* Set up the HVS display list memory manager.  We never
1068          * overwrite the setup from the bootloader (just 128b out of
1069          * our 16K), since we don't want to scramble the screen when
1070          * transitioning from the firmware's boot setup to runtime.
1071          */
1072         drm_mm_init(&hvs->dlist_mm,
1073                     HVS_BOOTLOADER_DLIST_END,
1074                     (SCALER_DLIST_SIZE >> 2) - HVS_BOOTLOADER_DLIST_END);
1075
1076         /* Set up the HVS LBM memory manager.  We could have some more
1077          * complicated data structure that allowed reuse of LBM areas
1078          * between planes when they don't overlap on the screen, but
1079          * for now we just allocate globally.
1080          */
1081         if (!vc4->is_vc5)
1082                 /* 48k words of 2x12-bit pixels */
1083                 drm_mm_init(&hvs->lbm_mm, 0, 48 * 1024);
1084         else
1085                 /* 60k words of 4x12-bit pixels */
1086                 drm_mm_init(&hvs->lbm_mm, 0, 60 * 1024);
1087
1088         /* Upload filter kernels.  We only have the one for now, so we
1089          * keep it around for the lifetime of the driver.
1090          */
1091         ret = vc4_hvs_upload_linear_kernel(hvs,
1092                                            &hvs->mitchell_netravali_filter,
1093                                            mitchell_netravali_1_3_1_3_kernel);
1094         if (ret)
1095                 return ret;
1096
1097         vc4->hvs = hvs;
1098
1099         reg = HVS_READ(SCALER_DISPECTRL);
1100         reg &= ~SCALER_DISPECTRL_DSP2_MUX_MASK;
1101         HVS_WRITE(SCALER_DISPECTRL,
1102                   reg | VC4_SET_FIELD(0, SCALER_DISPECTRL_DSP2_MUX));
1103
1104         reg = HVS_READ(SCALER_DISPCTRL);
1105         reg &= ~SCALER_DISPCTRL_DSP3_MUX_MASK;
1106         HVS_WRITE(SCALER_DISPCTRL,
1107                   reg | VC4_SET_FIELD(3, SCALER_DISPCTRL_DSP3_MUX));
1108
1109         reg = HVS_READ(SCALER_DISPEOLN);
1110         reg &= ~SCALER_DISPEOLN_DSP4_MUX_MASK;
1111         HVS_WRITE(SCALER_DISPEOLN,
1112                   reg | VC4_SET_FIELD(3, SCALER_DISPEOLN_DSP4_MUX));
1113
1114         reg = HVS_READ(SCALER_DISPDITHER);
1115         reg &= ~SCALER_DISPDITHER_DSP5_MUX_MASK;
1116         HVS_WRITE(SCALER_DISPDITHER,
1117                   reg | VC4_SET_FIELD(3, SCALER_DISPDITHER_DSP5_MUX));
1118
1119         dispctrl = HVS_READ(SCALER_DISPCTRL);
1120
1121         dispctrl |= SCALER_DISPCTRL_ENABLE;
1122         dispctrl |= SCALER_DISPCTRL_DISPEIRQ(0) |
1123                     SCALER_DISPCTRL_DISPEIRQ(1) |
1124                     SCALER_DISPCTRL_DISPEIRQ(2);
1125
1126         if (!vc4->is_vc5)
1127                 dispctrl &= ~(SCALER_DISPCTRL_DMAEIRQ |
1128                               SCALER_DISPCTRL_SLVWREIRQ |
1129                               SCALER_DISPCTRL_SLVRDEIRQ |
1130                               SCALER_DISPCTRL_DSPEIEOF(0) |
1131                               SCALER_DISPCTRL_DSPEIEOF(1) |
1132                               SCALER_DISPCTRL_DSPEIEOF(2) |
1133                               SCALER_DISPCTRL_DSPEIEOLN(0) |
1134                               SCALER_DISPCTRL_DSPEIEOLN(1) |
1135                               SCALER_DISPCTRL_DSPEIEOLN(2) |
1136                               SCALER_DISPCTRL_DSPEISLUR(0) |
1137                               SCALER_DISPCTRL_DSPEISLUR(1) |
1138                               SCALER_DISPCTRL_DSPEISLUR(2) |
1139                               SCALER_DISPCTRL_SCLEIRQ);
1140         else
1141                 dispctrl &= ~(SCALER_DISPCTRL_DMAEIRQ |
1142                               SCALER5_DISPCTRL_SLVEIRQ |
1143                               SCALER5_DISPCTRL_DSPEIEOF(0) |
1144                               SCALER5_DISPCTRL_DSPEIEOF(1) |
1145                               SCALER5_DISPCTRL_DSPEIEOF(2) |
1146                               SCALER5_DISPCTRL_DSPEIEOLN(0) |
1147                               SCALER5_DISPCTRL_DSPEIEOLN(1) |
1148                               SCALER5_DISPCTRL_DSPEIEOLN(2) |
1149                               SCALER5_DISPCTRL_DSPEISLUR(0) |
1150                               SCALER5_DISPCTRL_DSPEISLUR(1) |
1151                               SCALER5_DISPCTRL_DSPEISLUR(2) |
1152                               SCALER_DISPCTRL_SCLEIRQ);
1153
1154
1155         /* Set AXI panic mode.
1156          * VC4 panics when < 2 lines in FIFO.
1157          * VC5 panics when less than 1 line in the FIFO.
1158          */
1159         dispctrl &= ~(SCALER_DISPCTRL_PANIC0_MASK |
1160                       SCALER_DISPCTRL_PANIC1_MASK |
1161                       SCALER_DISPCTRL_PANIC2_MASK);
1162         dispctrl |= VC4_SET_FIELD(2, SCALER_DISPCTRL_PANIC0);
1163         dispctrl |= VC4_SET_FIELD(2, SCALER_DISPCTRL_PANIC1);
1164         dispctrl |= VC4_SET_FIELD(2, SCALER_DISPCTRL_PANIC2);
1165
1166         HVS_WRITE(SCALER_DISPCTRL, dispctrl);
1167
1168         /* Recompute Composite Output Buffer (COB) allocations for the displays
1169          */
1170         if (!vc4->is_vc5) {
1171                 /* The COB is 20736 pixels, or just over 10 lines at 2048 wide.
1172                  * The bottom 2048 pixels are full 32bpp RGBA (intended for the
1173                  * TXP composing RGBA to memory), whilst the remainder are only
1174                  * 24bpp RGB.
1175                  *
1176                  * Assign 3 lines to channels 1 & 2, and just over 4 lines to
1177                  * channel 0.
1178                  */
1179                 #define VC4_COB_SIZE            20736
1180                 #define VC4_COB_LINE_WIDTH      2048
1181                 #define VC4_COB_NUM_LINES       3
1182                 reg = 0;
1183                 top = VC4_COB_LINE_WIDTH * VC4_COB_NUM_LINES;
1184                 reg |= (top - 1) << 16;
1185                 HVS_WRITE(SCALER_DISPBASE2, reg);
1186                 reg = top;
1187                 top += VC4_COB_LINE_WIDTH * VC4_COB_NUM_LINES;
1188                 reg |= (top - 1) << 16;
1189                 HVS_WRITE(SCALER_DISPBASE1, reg);
1190                 reg = top;
1191                 top = VC4_COB_SIZE;
1192                 reg |= (top - 1) << 16;
1193                 HVS_WRITE(SCALER_DISPBASE0, reg);
1194         } else {
1195                 /* The COB is 44416 pixels, or 10.8 lines at 4096 wide.
1196                  * The bottom 4096 pixels are full RGBA (intended for the TXP
1197                  * composing RGBA to memory), whilst the remainder are only
1198                  * RGB. Addressing is always pixel wide.
1199                  *
1200                  * Assign 3 lines of 4096 to channels 1 & 2, and just over 4
1201                  * lines. to channel 0.
1202                  */
1203                 #define VC5_COB_SIZE            44416
1204                 #define VC5_COB_LINE_WIDTH      4096
1205                 #define VC5_COB_NUM_LINES       3
1206                 reg = 0;
1207                 top = VC5_COB_LINE_WIDTH * VC5_COB_NUM_LINES;
1208                 reg |= top << 16;
1209                 HVS_WRITE(SCALER_DISPBASE2, reg);
1210                 top += 16;
1211                 reg = top;
1212                 top += VC5_COB_LINE_WIDTH * VC5_COB_NUM_LINES;
1213                 reg |= top << 16;
1214                 HVS_WRITE(SCALER_DISPBASE1, reg);
1215                 top += 16;
1216                 reg = top;
1217                 top = VC5_COB_SIZE;
1218                 reg |= top << 16;
1219                 HVS_WRITE(SCALER_DISPBASE0, reg);
1220         }
1221
1222         ret = devm_request_irq(dev, platform_get_irq(pdev, 0),
1223                                vc4_hvs_irq_handler, 0, "vc4 hvs", drm);
1224         if (ret)
1225                 return ret;
1226
1227         return 0;
1228 }
1229
1230 static void vc4_hvs_unbind(struct device *dev, struct device *master,
1231                            void *data)
1232 {
1233         struct drm_device *drm = dev_get_drvdata(master);
1234         struct vc4_dev *vc4 = to_vc4_dev(drm);
1235         struct vc4_hvs *hvs = vc4->hvs;
1236         struct drm_mm_node *node, *next;
1237
1238         if (drm_mm_node_allocated(&vc4->hvs->mitchell_netravali_filter))
1239                 drm_mm_remove_node(&vc4->hvs->mitchell_netravali_filter);
1240
1241         drm_mm_for_each_node_safe(node, next, &vc4->hvs->dlist_mm)
1242                 drm_mm_remove_node(node);
1243
1244         drm_mm_takedown(&vc4->hvs->dlist_mm);
1245
1246         drm_mm_for_each_node_safe(node, next, &vc4->hvs->lbm_mm)
1247                 drm_mm_remove_node(node);
1248         drm_mm_takedown(&vc4->hvs->lbm_mm);
1249
1250         clk_disable_unprepare(hvs->core_clk);
1251
1252         vc4->hvs = NULL;
1253 }
1254
1255 static const struct component_ops vc4_hvs_ops = {
1256         .bind   = vc4_hvs_bind,
1257         .unbind = vc4_hvs_unbind,
1258 };
1259
1260 static int vc4_hvs_dev_probe(struct platform_device *pdev)
1261 {
1262         return component_add(&pdev->dev, &vc4_hvs_ops);
1263 }
1264
1265 static int vc4_hvs_dev_remove(struct platform_device *pdev)
1266 {
1267         component_del(&pdev->dev, &vc4_hvs_ops);
1268         return 0;
1269 }
1270
1271 static const struct of_device_id vc4_hvs_dt_match[] = {
1272         { .compatible = "brcm,bcm2711-hvs" },
1273         { .compatible = "brcm,bcm2835-hvs" },
1274         {}
1275 };
1276
1277 struct platform_driver vc4_hvs_driver = {
1278         .probe = vc4_hvs_dev_probe,
1279         .remove = vc4_hvs_dev_remove,
1280         .driver = {
1281                 .name = "vc4_hvs",
1282                 .of_match_table = vc4_hvs_dt_match,
1283         },
1284 };