Merge tag 'kvm-riscv-6.4-1' of https://github.com/kvm-riscv/linux into HEAD
[platform/kernel/linux-starfive.git] / drivers / gpu / drm / msm / dp / dp_catalog.c
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved.
4  */
5
6 #define pr_fmt(fmt)     "[drm-dp] %s: " fmt, __func__
7
8 #include <linux/delay.h>
9 #include <linux/iopoll.h>
10 #include <linux/phy/phy.h>
11 #include <linux/phy/phy-dp.h>
12 #include <linux/rational.h>
13 #include <drm/display/drm_dp_helper.h>
14 #include <drm/drm_print.h>
15
16 #include "dp_catalog.h"
17 #include "dp_reg.h"
18
19 #define POLLING_SLEEP_US                        1000
20 #define POLLING_TIMEOUT_US                      10000
21
22 #define SCRAMBLER_RESET_COUNT_VALUE             0xFC
23
24 #define DP_INTERRUPT_STATUS_ACK_SHIFT   1
25 #define DP_INTERRUPT_STATUS_MASK_SHIFT  2
26
27 #define DP_INTF_CONFIG_DATABUS_WIDEN     BIT(4)
28
29 #define DP_INTERRUPT_STATUS1 \
30         (DP_INTR_AUX_XFER_DONE| \
31         DP_INTR_WRONG_ADDR | DP_INTR_TIMEOUT | \
32         DP_INTR_NACK_DEFER | DP_INTR_WRONG_DATA_CNT | \
33         DP_INTR_I2C_NACK | DP_INTR_I2C_DEFER | \
34         DP_INTR_PLL_UNLOCKED | DP_INTR_AUX_ERROR)
35
36 #define DP_INTERRUPT_STATUS1_ACK \
37         (DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_ACK_SHIFT)
38 #define DP_INTERRUPT_STATUS1_MASK \
39         (DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_MASK_SHIFT)
40
41 #define DP_INTERRUPT_STATUS2 \
42         (DP_INTR_READY_FOR_VIDEO | DP_INTR_IDLE_PATTERN_SENT | \
43         DP_INTR_FRAME_END | DP_INTR_CRC_UPDATED)
44
45 #define DP_INTERRUPT_STATUS2_ACK \
46         (DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_ACK_SHIFT)
47 #define DP_INTERRUPT_STATUS2_MASK \
48         (DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_MASK_SHIFT)
49
50 #define DP_INTERRUPT_STATUS4 \
51         (PSR_UPDATE_INT | PSR_CAPTURE_INT | PSR_EXIT_INT | \
52         PSR_UPDATE_ERROR_INT | PSR_WAKE_ERROR_INT)
53
54 #define DP_INTERRUPT_MASK4 \
55         (PSR_UPDATE_MASK | PSR_CAPTURE_MASK | PSR_EXIT_MASK | \
56         PSR_UPDATE_ERROR_MASK | PSR_WAKE_ERROR_MASK)
57
58 struct dp_catalog_private {
59         struct device *dev;
60         struct drm_device *drm_dev;
61         struct dp_io *io;
62         u32 (*audio_map)[DP_AUDIO_SDP_HEADER_MAX];
63         struct dp_catalog dp_catalog;
64         u8 aux_lut_cfg_index[PHY_AUX_CFG_MAX];
65 };
66
67 void dp_catalog_snapshot(struct dp_catalog *dp_catalog, struct msm_disp_state *disp_state)
68 {
69         struct dp_catalog_private *catalog = container_of(dp_catalog,
70                         struct dp_catalog_private, dp_catalog);
71         struct dss_io_data *dss = &catalog->io->dp_controller;
72
73         msm_disp_snapshot_add_block(disp_state, dss->ahb.len, dss->ahb.base, "dp_ahb");
74         msm_disp_snapshot_add_block(disp_state, dss->aux.len, dss->aux.base, "dp_aux");
75         msm_disp_snapshot_add_block(disp_state, dss->link.len, dss->link.base, "dp_link");
76         msm_disp_snapshot_add_block(disp_state, dss->p0.len, dss->p0.base, "dp_p0");
77 }
78
79 static inline u32 dp_read_aux(struct dp_catalog_private *catalog, u32 offset)
80 {
81         return readl_relaxed(catalog->io->dp_controller.aux.base + offset);
82 }
83
84 static inline void dp_write_aux(struct dp_catalog_private *catalog,
85                                u32 offset, u32 data)
86 {
87         /*
88          * To make sure aux reg writes happens before any other operation,
89          * this function uses writel() instread of writel_relaxed()
90          */
91         writel(data, catalog->io->dp_controller.aux.base + offset);
92 }
93
94 static inline u32 dp_read_ahb(const struct dp_catalog_private *catalog, u32 offset)
95 {
96         return readl_relaxed(catalog->io->dp_controller.ahb.base + offset);
97 }
98
99 static inline void dp_write_ahb(struct dp_catalog_private *catalog,
100                                u32 offset, u32 data)
101 {
102         /*
103          * To make sure phy reg writes happens before any other operation,
104          * this function uses writel() instread of writel_relaxed()
105          */
106         writel(data, catalog->io->dp_controller.ahb.base + offset);
107 }
108
109 static inline void dp_write_p0(struct dp_catalog_private *catalog,
110                                u32 offset, u32 data)
111 {
112         /*
113          * To make sure interface reg writes happens before any other operation,
114          * this function uses writel() instread of writel_relaxed()
115          */
116         writel(data, catalog->io->dp_controller.p0.base + offset);
117 }
118
119 static inline u32 dp_read_p0(struct dp_catalog_private *catalog,
120                                u32 offset)
121 {
122         /*
123          * To make sure interface reg writes happens before any other operation,
124          * this function uses writel() instread of writel_relaxed()
125          */
126         return readl_relaxed(catalog->io->dp_controller.p0.base + offset);
127 }
128
129 static inline u32 dp_read_link(struct dp_catalog_private *catalog, u32 offset)
130 {
131         return readl_relaxed(catalog->io->dp_controller.link.base + offset);
132 }
133
134 static inline void dp_write_link(struct dp_catalog_private *catalog,
135                                u32 offset, u32 data)
136 {
137         /*
138          * To make sure link reg writes happens before any other operation,
139          * this function uses writel() instread of writel_relaxed()
140          */
141         writel(data, catalog->io->dp_controller.link.base + offset);
142 }
143
144 /* aux related catalog functions */
145 u32 dp_catalog_aux_read_data(struct dp_catalog *dp_catalog)
146 {
147         struct dp_catalog_private *catalog = container_of(dp_catalog,
148                                 struct dp_catalog_private, dp_catalog);
149
150         return dp_read_aux(catalog, REG_DP_AUX_DATA);
151 }
152
153 int dp_catalog_aux_write_data(struct dp_catalog *dp_catalog)
154 {
155         struct dp_catalog_private *catalog = container_of(dp_catalog,
156                                 struct dp_catalog_private, dp_catalog);
157
158         dp_write_aux(catalog, REG_DP_AUX_DATA, dp_catalog->aux_data);
159         return 0;
160 }
161
162 int dp_catalog_aux_write_trans(struct dp_catalog *dp_catalog)
163 {
164         struct dp_catalog_private *catalog = container_of(dp_catalog,
165                                 struct dp_catalog_private, dp_catalog);
166
167         dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, dp_catalog->aux_data);
168         return 0;
169 }
170
171 int dp_catalog_aux_clear_trans(struct dp_catalog *dp_catalog, bool read)
172 {
173         u32 data;
174         struct dp_catalog_private *catalog = container_of(dp_catalog,
175                                 struct dp_catalog_private, dp_catalog);
176
177         if (read) {
178                 data = dp_read_aux(catalog, REG_DP_AUX_TRANS_CTRL);
179                 data &= ~DP_AUX_TRANS_CTRL_GO;
180                 dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data);
181         } else {
182                 dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, 0);
183         }
184         return 0;
185 }
186
187 int dp_catalog_aux_clear_hw_interrupts(struct dp_catalog *dp_catalog)
188 {
189         struct dp_catalog_private *catalog = container_of(dp_catalog,
190                                 struct dp_catalog_private, dp_catalog);
191
192         dp_read_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_STATUS);
193         dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x1f);
194         dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x9f);
195         dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0);
196         return 0;
197 }
198
199 /**
200  * dp_catalog_aux_reset() - reset AUX controller
201  *
202  * @dp_catalog: DP catalog structure
203  *
204  * return: void
205  *
206  * This function reset AUX controller
207  *
208  * NOTE: reset AUX controller will also clear any pending HPD related interrupts
209  * 
210  */
211 void dp_catalog_aux_reset(struct dp_catalog *dp_catalog)
212 {
213         u32 aux_ctrl;
214         struct dp_catalog_private *catalog = container_of(dp_catalog,
215                                 struct dp_catalog_private, dp_catalog);
216
217         aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
218
219         aux_ctrl |= DP_AUX_CTRL_RESET;
220         dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
221         usleep_range(1000, 1100); /* h/w recommended delay */
222
223         aux_ctrl &= ~DP_AUX_CTRL_RESET;
224         dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
225 }
226
227 void dp_catalog_aux_enable(struct dp_catalog *dp_catalog, bool enable)
228 {
229         u32 aux_ctrl;
230         struct dp_catalog_private *catalog = container_of(dp_catalog,
231                                 struct dp_catalog_private, dp_catalog);
232
233         aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
234
235         if (enable) {
236                 dp_write_aux(catalog, REG_DP_TIMEOUT_COUNT, 0xffff);
237                 dp_write_aux(catalog, REG_DP_AUX_LIMITS, 0xffff);
238                 aux_ctrl |= DP_AUX_CTRL_ENABLE;
239         } else {
240                 aux_ctrl &= ~DP_AUX_CTRL_ENABLE;
241         }
242
243         dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
244 }
245
246 void dp_catalog_aux_update_cfg(struct dp_catalog *dp_catalog)
247 {
248         struct dp_catalog_private *catalog = container_of(dp_catalog,
249                                 struct dp_catalog_private, dp_catalog);
250         struct dp_io *dp_io = catalog->io;
251         struct phy *phy = dp_io->phy;
252
253         phy_calibrate(phy);
254 }
255
256 int dp_catalog_aux_wait_for_hpd_connect_state(struct dp_catalog *dp_catalog)
257 {
258         u32 state;
259         struct dp_catalog_private *catalog = container_of(dp_catalog,
260                                 struct dp_catalog_private, dp_catalog);
261
262         /* poll for hpd connected status every 2ms and timeout after 500ms */
263         return readl_poll_timeout(catalog->io->dp_controller.aux.base +
264                                 REG_DP_DP_HPD_INT_STATUS,
265                                 state, state & DP_DP_HPD_STATE_STATUS_CONNECTED,
266                                 2000, 500000);
267 }
268
269 static void dump_regs(void __iomem *base, int len)
270 {
271         int i;
272         u32 x0, x4, x8, xc;
273         u32 addr_off = 0;
274
275         len = DIV_ROUND_UP(len, 16);
276         for (i = 0; i < len; i++) {
277                 x0 = readl_relaxed(base + addr_off);
278                 x4 = readl_relaxed(base + addr_off + 0x04);
279                 x8 = readl_relaxed(base + addr_off + 0x08);
280                 xc = readl_relaxed(base + addr_off + 0x0c);
281
282                 pr_info("%08x: %08x %08x %08x %08x", addr_off, x0, x4, x8, xc);
283                 addr_off += 16;
284         }
285 }
286
287 void dp_catalog_dump_regs(struct dp_catalog *dp_catalog)
288 {
289         struct dp_catalog_private *catalog = container_of(dp_catalog,
290                 struct dp_catalog_private, dp_catalog);
291         struct dss_io_data *io = &catalog->io->dp_controller;
292
293         pr_info("AHB regs\n");
294         dump_regs(io->ahb.base, io->ahb.len);
295
296         pr_info("AUXCLK regs\n");
297         dump_regs(io->aux.base, io->aux.len);
298
299         pr_info("LCLK regs\n");
300         dump_regs(io->link.base, io->link.len);
301
302         pr_info("P0CLK regs\n");
303         dump_regs(io->p0.base, io->p0.len);
304 }
305
306 u32 dp_catalog_aux_get_irq(struct dp_catalog *dp_catalog)
307 {
308         struct dp_catalog_private *catalog = container_of(dp_catalog,
309                                 struct dp_catalog_private, dp_catalog);
310         u32 intr, intr_ack;
311
312         intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS);
313         intr &= ~DP_INTERRUPT_STATUS1_MASK;
314         intr_ack = (intr & DP_INTERRUPT_STATUS1)
315                         << DP_INTERRUPT_STATUS_ACK_SHIFT;
316         dp_write_ahb(catalog, REG_DP_INTR_STATUS, intr_ack |
317                         DP_INTERRUPT_STATUS1_MASK);
318
319         return intr;
320
321 }
322
323 /* controller related catalog functions */
324 void dp_catalog_ctrl_update_transfer_unit(struct dp_catalog *dp_catalog,
325                                 u32 dp_tu, u32 valid_boundary,
326                                 u32 valid_boundary2)
327 {
328         struct dp_catalog_private *catalog = container_of(dp_catalog,
329                                 struct dp_catalog_private, dp_catalog);
330
331         dp_write_link(catalog, REG_DP_VALID_BOUNDARY, valid_boundary);
332         dp_write_link(catalog, REG_DP_TU, dp_tu);
333         dp_write_link(catalog, REG_DP_VALID_BOUNDARY_2, valid_boundary2);
334 }
335
336 void dp_catalog_ctrl_state_ctrl(struct dp_catalog *dp_catalog, u32 state)
337 {
338         struct dp_catalog_private *catalog = container_of(dp_catalog,
339                                 struct dp_catalog_private, dp_catalog);
340
341         dp_write_link(catalog, REG_DP_STATE_CTRL, state);
342 }
343
344 void dp_catalog_ctrl_config_ctrl(struct dp_catalog *dp_catalog, u32 cfg)
345 {
346         struct dp_catalog_private *catalog = container_of(dp_catalog,
347                                 struct dp_catalog_private, dp_catalog);
348
349         drm_dbg_dp(catalog->drm_dev, "DP_CONFIGURATION_CTRL=0x%x\n", cfg);
350
351         dp_write_link(catalog, REG_DP_CONFIGURATION_CTRL, cfg);
352 }
353
354 void dp_catalog_ctrl_lane_mapping(struct dp_catalog *dp_catalog)
355 {
356         struct dp_catalog_private *catalog = container_of(dp_catalog,
357                                 struct dp_catalog_private, dp_catalog);
358         u32 ln_0 = 0, ln_1 = 1, ln_2 = 2, ln_3 = 3; /* One-to-One mapping */
359         u32 ln_mapping;
360
361         ln_mapping = ln_0 << LANE0_MAPPING_SHIFT;
362         ln_mapping |= ln_1 << LANE1_MAPPING_SHIFT;
363         ln_mapping |= ln_2 << LANE2_MAPPING_SHIFT;
364         ln_mapping |= ln_3 << LANE3_MAPPING_SHIFT;
365
366         dp_write_link(catalog, REG_DP_LOGICAL2PHYSICAL_LANE_MAPPING,
367                         ln_mapping);
368 }
369
370 void dp_catalog_ctrl_psr_mainlink_enable(struct dp_catalog *dp_catalog,
371                                                 bool enable)
372 {
373         u32 val;
374         struct dp_catalog_private *catalog = container_of(dp_catalog,
375                                 struct dp_catalog_private, dp_catalog);
376
377         val = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
378
379         if (enable)
380                 val |= DP_MAINLINK_CTRL_ENABLE;
381         else
382                 val &= ~DP_MAINLINK_CTRL_ENABLE;
383
384         dp_write_link(catalog, REG_DP_MAINLINK_CTRL, val);
385 }
386
387 void dp_catalog_ctrl_mainlink_ctrl(struct dp_catalog *dp_catalog,
388                                                 bool enable)
389 {
390         u32 mainlink_ctrl;
391         struct dp_catalog_private *catalog = container_of(dp_catalog,
392                                 struct dp_catalog_private, dp_catalog);
393
394         drm_dbg_dp(catalog->drm_dev, "enable=%d\n", enable);
395         if (enable) {
396                 /*
397                  * To make sure link reg writes happens before other operation,
398                  * dp_write_link() function uses writel()
399                  */
400                 mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
401
402                 mainlink_ctrl &= ~(DP_MAINLINK_CTRL_RESET |
403                                                 DP_MAINLINK_CTRL_ENABLE);
404                 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
405
406                 mainlink_ctrl |= DP_MAINLINK_CTRL_RESET;
407                 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
408
409                 mainlink_ctrl &= ~DP_MAINLINK_CTRL_RESET;
410                 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
411
412                 mainlink_ctrl |= (DP_MAINLINK_CTRL_ENABLE |
413                                         DP_MAINLINK_FB_BOUNDARY_SEL);
414                 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
415         } else {
416                 mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
417                 mainlink_ctrl &= ~DP_MAINLINK_CTRL_ENABLE;
418                 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
419         }
420 }
421
422 void dp_catalog_ctrl_config_misc(struct dp_catalog *dp_catalog,
423                                         u32 colorimetry_cfg,
424                                         u32 test_bits_depth)
425 {
426         u32 misc_val;
427         struct dp_catalog_private *catalog = container_of(dp_catalog,
428                                 struct dp_catalog_private, dp_catalog);
429
430         misc_val = dp_read_link(catalog, REG_DP_MISC1_MISC0);
431
432         /* clear bpp bits */
433         misc_val &= ~(0x07 << DP_MISC0_TEST_BITS_DEPTH_SHIFT);
434         misc_val |= colorimetry_cfg << DP_MISC0_COLORIMETRY_CFG_SHIFT;
435         misc_val |= test_bits_depth << DP_MISC0_TEST_BITS_DEPTH_SHIFT;
436         /* Configure clock to synchronous mode */
437         misc_val |= DP_MISC0_SYNCHRONOUS_CLK;
438
439         drm_dbg_dp(catalog->drm_dev, "misc settings = 0x%x\n", misc_val);
440         dp_write_link(catalog, REG_DP_MISC1_MISC0, misc_val);
441 }
442
443 void dp_catalog_ctrl_config_msa(struct dp_catalog *dp_catalog,
444                                         u32 rate, u32 stream_rate_khz,
445                                         bool fixed_nvid)
446 {
447         u32 pixel_m, pixel_n;
448         u32 mvid, nvid, pixel_div = 0, dispcc_input_rate;
449         u32 const nvid_fixed = DP_LINK_CONSTANT_N_VALUE;
450         u32 const link_rate_hbr2 = 540000;
451         u32 const link_rate_hbr3 = 810000;
452         unsigned long den, num;
453
454         struct dp_catalog_private *catalog = container_of(dp_catalog,
455                                 struct dp_catalog_private, dp_catalog);
456
457         if (rate == link_rate_hbr3)
458                 pixel_div = 6;
459         else if (rate == 162000 || rate == 270000)
460                 pixel_div = 2;
461         else if (rate == link_rate_hbr2)
462                 pixel_div = 4;
463         else
464                 DRM_ERROR("Invalid pixel mux divider\n");
465
466         dispcc_input_rate = (rate * 10) / pixel_div;
467
468         rational_best_approximation(dispcc_input_rate, stream_rate_khz,
469                         (unsigned long)(1 << 16) - 1,
470                         (unsigned long)(1 << 16) - 1, &den, &num);
471
472         den = ~(den - num);
473         den = den & 0xFFFF;
474         pixel_m = num;
475         pixel_n = den;
476
477         mvid = (pixel_m & 0xFFFF) * 5;
478         nvid = (0xFFFF & (~pixel_n)) + (pixel_m & 0xFFFF);
479
480         if (nvid < nvid_fixed) {
481                 u32 temp;
482
483                 temp = (nvid_fixed / nvid) * nvid;
484                 mvid = (nvid_fixed / nvid) * mvid;
485                 nvid = temp;
486         }
487
488         if (link_rate_hbr2 == rate)
489                 nvid *= 2;
490
491         if (link_rate_hbr3 == rate)
492                 nvid *= 3;
493
494         drm_dbg_dp(catalog->drm_dev, "mvid=0x%x, nvid=0x%x\n", mvid, nvid);
495         dp_write_link(catalog, REG_DP_SOFTWARE_MVID, mvid);
496         dp_write_link(catalog, REG_DP_SOFTWARE_NVID, nvid);
497         dp_write_p0(catalog, MMSS_DP_DSC_DTO, 0x0);
498 }
499
500 int dp_catalog_ctrl_set_pattern_state_bit(struct dp_catalog *dp_catalog,
501                                         u32 state_bit)
502 {
503         int bit, ret;
504         u32 data;
505         struct dp_catalog_private *catalog = container_of(dp_catalog,
506                                 struct dp_catalog_private, dp_catalog);
507
508         bit = BIT(state_bit - 1);
509         drm_dbg_dp(catalog->drm_dev, "hw: bit=%d train=%d\n", bit, state_bit);
510         dp_catalog_ctrl_state_ctrl(dp_catalog, bit);
511
512         bit = BIT(state_bit - 1) << DP_MAINLINK_READY_LINK_TRAINING_SHIFT;
513
514         /* Poll for mainlink ready status */
515         ret = readx_poll_timeout(readl, catalog->io->dp_controller.link.base +
516                                         REG_DP_MAINLINK_READY,
517                                         data, data & bit,
518                                         POLLING_SLEEP_US, POLLING_TIMEOUT_US);
519         if (ret < 0) {
520                 DRM_ERROR("set state_bit for link_train=%d failed\n", state_bit);
521                 return ret;
522         }
523         return 0;
524 }
525
526 /**
527  * dp_catalog_hw_revision() - retrieve DP hw revision
528  *
529  * @dp_catalog: DP catalog structure
530  *
531  * Return: DP controller hw revision
532  *
533  */
534 u32 dp_catalog_hw_revision(const struct dp_catalog *dp_catalog)
535 {
536         const struct dp_catalog_private *catalog = container_of(dp_catalog,
537                                 struct dp_catalog_private, dp_catalog);
538
539         return dp_read_ahb(catalog, REG_DP_HW_VERSION);
540 }
541
542 /**
543  * dp_catalog_ctrl_reset() - reset DP controller
544  *
545  * @dp_catalog: DP catalog structure
546  *
547  * return: void
548  *
549  * This function reset the DP controller
550  *
551  * NOTE: reset DP controller will also clear any pending HPD related interrupts
552  * 
553  */
554 void dp_catalog_ctrl_reset(struct dp_catalog *dp_catalog)
555 {
556         u32 sw_reset;
557         struct dp_catalog_private *catalog = container_of(dp_catalog,
558                                 struct dp_catalog_private, dp_catalog);
559
560         sw_reset = dp_read_ahb(catalog, REG_DP_SW_RESET);
561
562         sw_reset |= DP_SW_RESET;
563         dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
564         usleep_range(1000, 1100); /* h/w recommended delay */
565
566         sw_reset &= ~DP_SW_RESET;
567         dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
568 }
569
570 bool dp_catalog_ctrl_mainlink_ready(struct dp_catalog *dp_catalog)
571 {
572         u32 data;
573         int ret;
574         struct dp_catalog_private *catalog = container_of(dp_catalog,
575                                 struct dp_catalog_private, dp_catalog);
576
577         /* Poll for mainlink ready status */
578         ret = readl_poll_timeout(catalog->io->dp_controller.link.base +
579                                 REG_DP_MAINLINK_READY,
580                                 data, data & DP_MAINLINK_READY_FOR_VIDEO,
581                                 POLLING_SLEEP_US, POLLING_TIMEOUT_US);
582         if (ret < 0) {
583                 DRM_ERROR("mainlink not ready\n");
584                 return false;
585         }
586
587         return true;
588 }
589
590 void dp_catalog_ctrl_enable_irq(struct dp_catalog *dp_catalog,
591                                                 bool enable)
592 {
593         struct dp_catalog_private *catalog = container_of(dp_catalog,
594                                 struct dp_catalog_private, dp_catalog);
595
596         if (enable) {
597                 dp_write_ahb(catalog, REG_DP_INTR_STATUS,
598                                 DP_INTERRUPT_STATUS1_MASK);
599                 dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
600                                 DP_INTERRUPT_STATUS2_MASK);
601         } else {
602                 dp_write_ahb(catalog, REG_DP_INTR_STATUS, 0x00);
603                 dp_write_ahb(catalog, REG_DP_INTR_STATUS2, 0x00);
604         }
605 }
606
607 void dp_catalog_hpd_config_intr(struct dp_catalog *dp_catalog,
608                         u32 intr_mask, bool en)
609 {
610         struct dp_catalog_private *catalog = container_of(dp_catalog,
611                                 struct dp_catalog_private, dp_catalog);
612
613         u32 config = dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
614
615         config = (en ? config | intr_mask : config & ~intr_mask);
616
617         drm_dbg_dp(catalog->drm_dev, "intr_mask=%#x config=%#x\n",
618                                         intr_mask, config);
619         dp_write_aux(catalog, REG_DP_DP_HPD_INT_MASK,
620                                 config & DP_DP_HPD_INT_MASK);
621 }
622
623 void dp_catalog_ctrl_hpd_config(struct dp_catalog *dp_catalog)
624 {
625         struct dp_catalog_private *catalog = container_of(dp_catalog,
626                                 struct dp_catalog_private, dp_catalog);
627
628         u32 reftimer = dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
629
630         /* Configure REFTIMER and enable it */
631         reftimer |= DP_DP_HPD_REFTIMER_ENABLE;
632         dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer);
633
634         /* Enable HPD */
635         dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, DP_DP_HPD_CTRL_HPD_EN);
636 }
637
638 static void dp_catalog_enable_sdp(struct dp_catalog_private *catalog)
639 {
640         /* trigger sdp */
641         dp_write_link(catalog, MMSS_DP_SDP_CFG3, UPDATE_SDP);
642         dp_write_link(catalog, MMSS_DP_SDP_CFG3, 0x0);
643 }
644
645 void dp_catalog_ctrl_config_psr(struct dp_catalog *dp_catalog)
646 {
647         struct dp_catalog_private *catalog = container_of(dp_catalog,
648                                 struct dp_catalog_private, dp_catalog);
649         u32 config;
650
651         /* enable PSR1 function */
652         config = dp_read_link(catalog, REG_PSR_CONFIG);
653         config |= PSR1_SUPPORTED;
654         dp_write_link(catalog, REG_PSR_CONFIG, config);
655
656         dp_write_ahb(catalog, REG_DP_INTR_MASK4, DP_INTERRUPT_MASK4);
657         dp_catalog_enable_sdp(catalog);
658 }
659
660 void dp_catalog_ctrl_set_psr(struct dp_catalog *dp_catalog, bool enter)
661 {
662         struct dp_catalog_private *catalog = container_of(dp_catalog,
663                         struct dp_catalog_private, dp_catalog);
664         u32 cmd;
665
666         cmd = dp_read_link(catalog, REG_PSR_CMD);
667
668         cmd &= ~(PSR_ENTER | PSR_EXIT);
669
670         if (enter)
671                 cmd |= PSR_ENTER;
672         else
673                 cmd |= PSR_EXIT;
674
675         dp_catalog_enable_sdp(catalog);
676         dp_write_link(catalog, REG_PSR_CMD, cmd);
677 }
678
679 u32 dp_catalog_link_is_connected(struct dp_catalog *dp_catalog)
680 {
681         struct dp_catalog_private *catalog = container_of(dp_catalog,
682                                 struct dp_catalog_private, dp_catalog);
683         u32 status;
684
685         status = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
686         drm_dbg_dp(catalog->drm_dev, "aux status: %#x\n", status);
687         status >>= DP_DP_HPD_STATE_STATUS_BITS_SHIFT;
688         status &= DP_DP_HPD_STATE_STATUS_BITS_MASK;
689
690         return status;
691 }
692
693 u32 dp_catalog_hpd_get_intr_status(struct dp_catalog *dp_catalog)
694 {
695         struct dp_catalog_private *catalog = container_of(dp_catalog,
696                                 struct dp_catalog_private, dp_catalog);
697         int isr, mask;
698
699         isr = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
700         dp_write_aux(catalog, REG_DP_DP_HPD_INT_ACK,
701                                  (isr & DP_DP_HPD_INT_MASK));
702         mask = dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
703
704         /*
705          * We only want to return interrupts that are unmasked to the caller.
706          * However, the interrupt status field also contains other
707          * informational bits about the HPD state status, so we only mask
708          * out the part of the register that tells us about which interrupts
709          * are pending.
710          */
711         return isr & (mask | ~DP_DP_HPD_INT_MASK);
712 }
713
714 u32 dp_catalog_ctrl_read_psr_interrupt_status(struct dp_catalog *dp_catalog)
715 {
716         struct dp_catalog_private *catalog = container_of(dp_catalog,
717                                 struct dp_catalog_private, dp_catalog);
718         u32 intr, intr_ack;
719
720         intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS4);
721         intr_ack = (intr & DP_INTERRUPT_STATUS4)
722                         << DP_INTERRUPT_STATUS_ACK_SHIFT;
723         dp_write_ahb(catalog, REG_DP_INTR_STATUS4, intr_ack);
724
725         return intr;
726 }
727
728 int dp_catalog_ctrl_get_interrupt(struct dp_catalog *dp_catalog)
729 {
730         struct dp_catalog_private *catalog = container_of(dp_catalog,
731                                 struct dp_catalog_private, dp_catalog);
732         u32 intr, intr_ack;
733
734         intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS2);
735         intr &= ~DP_INTERRUPT_STATUS2_MASK;
736         intr_ack = (intr & DP_INTERRUPT_STATUS2)
737                         << DP_INTERRUPT_STATUS_ACK_SHIFT;
738         dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
739                         intr_ack | DP_INTERRUPT_STATUS2_MASK);
740
741         return intr;
742 }
743
744 void dp_catalog_ctrl_phy_reset(struct dp_catalog *dp_catalog)
745 {
746         struct dp_catalog_private *catalog = container_of(dp_catalog,
747                                 struct dp_catalog_private, dp_catalog);
748
749         dp_write_ahb(catalog, REG_DP_PHY_CTRL,
750                         DP_PHY_CTRL_SW_RESET | DP_PHY_CTRL_SW_RESET_PLL);
751         usleep_range(1000, 1100); /* h/w recommended delay */
752         dp_write_ahb(catalog, REG_DP_PHY_CTRL, 0x0);
753 }
754
755 int dp_catalog_ctrl_update_vx_px(struct dp_catalog *dp_catalog,
756                 u8 v_level, u8 p_level)
757 {
758         struct dp_catalog_private *catalog = container_of(dp_catalog,
759                                 struct dp_catalog_private, dp_catalog);
760         struct dp_io *dp_io = catalog->io;
761         struct phy *phy = dp_io->phy;
762         struct phy_configure_opts_dp *opts_dp = &dp_io->phy_opts.dp;
763
764         /* TODO: Update for all lanes instead of just first one */
765         opts_dp->voltage[0] = v_level;
766         opts_dp->pre[0] = p_level;
767         opts_dp->set_voltages = 1;
768         phy_configure(phy, &dp_io->phy_opts);
769         opts_dp->set_voltages = 0;
770
771         return 0;
772 }
773
774 void dp_catalog_ctrl_send_phy_pattern(struct dp_catalog *dp_catalog,
775                         u32 pattern)
776 {
777         struct dp_catalog_private *catalog = container_of(dp_catalog,
778                                 struct dp_catalog_private, dp_catalog);
779         u32 value = 0x0;
780
781         /* Make sure to clear the current pattern before starting a new one */
782         dp_write_link(catalog, REG_DP_STATE_CTRL, 0x0);
783
784         drm_dbg_dp(catalog->drm_dev, "pattern: %#x\n", pattern);
785         switch (pattern) {
786         case DP_PHY_TEST_PATTERN_D10_2:
787                 dp_write_link(catalog, REG_DP_STATE_CTRL,
788                                 DP_STATE_CTRL_LINK_TRAINING_PATTERN1);
789                 break;
790         case DP_PHY_TEST_PATTERN_ERROR_COUNT:
791                 value &= ~(1 << 16);
792                 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
793                                         value);
794                 value |= SCRAMBLER_RESET_COUNT_VALUE;
795                 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
796                                         value);
797                 dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
798                                         DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
799                 dp_write_link(catalog, REG_DP_STATE_CTRL,
800                                         DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
801                 break;
802         case DP_PHY_TEST_PATTERN_PRBS7:
803                 dp_write_link(catalog, REG_DP_STATE_CTRL,
804                                 DP_STATE_CTRL_LINK_PRBS7);
805                 break;
806         case DP_PHY_TEST_PATTERN_80BIT_CUSTOM:
807                 dp_write_link(catalog, REG_DP_STATE_CTRL,
808                                 DP_STATE_CTRL_LINK_TEST_CUSTOM_PATTERN);
809                 /* 00111110000011111000001111100000 */
810                 dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG0,
811                                 0x3E0F83E0);
812                 /* 00001111100000111110000011111000 */
813                 dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG1,
814                                 0x0F83E0F8);
815                 /* 1111100000111110 */
816                 dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG2,
817                                 0x0000F83E);
818                 break;
819         case DP_PHY_TEST_PATTERN_CP2520:
820                 value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
821                 value &= ~DP_MAINLINK_CTRL_SW_BYPASS_SCRAMBLER;
822                 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
823
824                 value = DP_HBR2_ERM_PATTERN;
825                 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
826                                 value);
827                 value |= SCRAMBLER_RESET_COUNT_VALUE;
828                 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
829                                         value);
830                 dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
831                                         DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
832                 dp_write_link(catalog, REG_DP_STATE_CTRL,
833                                         DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
834                 value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
835                 value |= DP_MAINLINK_CTRL_ENABLE;
836                 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
837                 break;
838         case DP_PHY_TEST_PATTERN_SEL_MASK:
839                 dp_write_link(catalog, REG_DP_MAINLINK_CTRL,
840                                 DP_MAINLINK_CTRL_ENABLE);
841                 dp_write_link(catalog, REG_DP_STATE_CTRL,
842                                 DP_STATE_CTRL_LINK_TRAINING_PATTERN4);
843                 break;
844         default:
845                 drm_dbg_dp(catalog->drm_dev,
846                                 "No valid test pattern requested: %#x\n", pattern);
847                 break;
848         }
849 }
850
851 u32 dp_catalog_ctrl_read_phy_pattern(struct dp_catalog *dp_catalog)
852 {
853         struct dp_catalog_private *catalog = container_of(dp_catalog,
854                                 struct dp_catalog_private, dp_catalog);
855
856         return dp_read_link(catalog, REG_DP_MAINLINK_READY);
857 }
858
859 /* panel related catalog functions */
860 int dp_catalog_panel_timing_cfg(struct dp_catalog *dp_catalog)
861 {
862         struct dp_catalog_private *catalog = container_of(dp_catalog,
863                                 struct dp_catalog_private, dp_catalog);
864         u32 reg;
865
866         dp_write_link(catalog, REG_DP_TOTAL_HOR_VER,
867                                 dp_catalog->total);
868         dp_write_link(catalog, REG_DP_START_HOR_VER_FROM_SYNC,
869                                 dp_catalog->sync_start);
870         dp_write_link(catalog, REG_DP_HSYNC_VSYNC_WIDTH_POLARITY,
871                                 dp_catalog->width_blanking);
872         dp_write_link(catalog, REG_DP_ACTIVE_HOR_VER, dp_catalog->dp_active);
873
874         reg = dp_read_p0(catalog, MMSS_DP_INTF_CONFIG);
875
876         if (dp_catalog->wide_bus_en)
877                 reg |= DP_INTF_CONFIG_DATABUS_WIDEN;
878         else
879                 reg &= ~DP_INTF_CONFIG_DATABUS_WIDEN;
880
881
882         DRM_DEBUG_DP("wide_bus_en=%d reg=%#x\n", dp_catalog->wide_bus_en, reg);
883
884         dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, reg);
885         return 0;
886 }
887
888 void dp_catalog_panel_tpg_enable(struct dp_catalog *dp_catalog,
889                                 struct drm_display_mode *drm_mode)
890 {
891         struct dp_catalog_private *catalog = container_of(dp_catalog,
892                                 struct dp_catalog_private, dp_catalog);
893         u32 hsync_period, vsync_period;
894         u32 display_v_start, display_v_end;
895         u32 hsync_start_x, hsync_end_x;
896         u32 v_sync_width;
897         u32 hsync_ctl;
898         u32 display_hctl;
899
900         /* TPG config parameters*/
901         hsync_period = drm_mode->htotal;
902         vsync_period = drm_mode->vtotal;
903
904         display_v_start = ((drm_mode->vtotal - drm_mode->vsync_start) *
905                                         hsync_period);
906         display_v_end = ((vsync_period - (drm_mode->vsync_start -
907                                         drm_mode->vdisplay))
908                                         * hsync_period) - 1;
909
910         display_v_start += drm_mode->htotal - drm_mode->hsync_start;
911         display_v_end -= (drm_mode->hsync_start - drm_mode->hdisplay);
912
913         hsync_start_x = drm_mode->htotal - drm_mode->hsync_start;
914         hsync_end_x = hsync_period - (drm_mode->hsync_start -
915                                         drm_mode->hdisplay) - 1;
916
917         v_sync_width = drm_mode->vsync_end - drm_mode->vsync_start;
918
919         hsync_ctl = (hsync_period << 16) |
920                         (drm_mode->hsync_end - drm_mode->hsync_start);
921         display_hctl = (hsync_end_x << 16) | hsync_start_x;
922
923
924         dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, 0x0);
925         dp_write_p0(catalog, MMSS_DP_INTF_HSYNC_CTL, hsync_ctl);
926         dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F0, vsync_period *
927                         hsync_period);
928         dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F0, v_sync_width *
929                         hsync_period);
930         dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F1, 0);
931         dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F1, 0);
932         dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_HCTL, display_hctl);
933         dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_HCTL, 0);
934         dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F0, display_v_start);
935         dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F0, display_v_end);
936         dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F1, 0);
937         dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F1, 0);
938         dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F0, 0);
939         dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F0, 0);
940         dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F1, 0);
941         dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F1, 0);
942         dp_write_p0(catalog, MMSS_DP_INTF_POLARITY_CTL, 0);
943
944         dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL,
945                                 DP_TPG_CHECKERED_RECT_PATTERN);
946         dp_write_p0(catalog, MMSS_DP_TPG_VIDEO_CONFIG,
947                                 DP_TPG_VIDEO_CONFIG_BPP_8BIT |
948                                 DP_TPG_VIDEO_CONFIG_RGB);
949         dp_write_p0(catalog, MMSS_DP_BIST_ENABLE,
950                                 DP_BIST_ENABLE_DPBIST_EN);
951         dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN,
952                                 DP_TIMING_ENGINE_EN_EN);
953         drm_dbg_dp(catalog->drm_dev, "%s: enabled tpg\n", __func__);
954 }
955
956 void dp_catalog_panel_tpg_disable(struct dp_catalog *dp_catalog)
957 {
958         struct dp_catalog_private *catalog = container_of(dp_catalog,
959                                 struct dp_catalog_private, dp_catalog);
960
961         dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL, 0x0);
962         dp_write_p0(catalog, MMSS_DP_BIST_ENABLE, 0x0);
963         dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN, 0x0);
964 }
965
966 struct dp_catalog *dp_catalog_get(struct device *dev, struct dp_io *io)
967 {
968         struct dp_catalog_private *catalog;
969
970         if (!io) {
971                 DRM_ERROR("invalid input\n");
972                 return ERR_PTR(-EINVAL);
973         }
974
975         catalog  = devm_kzalloc(dev, sizeof(*catalog), GFP_KERNEL);
976         if (!catalog)
977                 return ERR_PTR(-ENOMEM);
978
979         catalog->dev = dev;
980         catalog->io = io;
981
982         return &catalog->dp_catalog;
983 }
984
985 void dp_catalog_audio_get_header(struct dp_catalog *dp_catalog)
986 {
987         struct dp_catalog_private *catalog;
988         u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
989         enum dp_catalog_audio_sdp_type sdp;
990         enum dp_catalog_audio_header_type header;
991
992         if (!dp_catalog)
993                 return;
994
995         catalog = container_of(dp_catalog,
996                 struct dp_catalog_private, dp_catalog);
997
998         sdp_map = catalog->audio_map;
999         sdp     = dp_catalog->sdp_type;
1000         header  = dp_catalog->sdp_header;
1001
1002         dp_catalog->audio_data = dp_read_link(catalog,
1003                         sdp_map[sdp][header]);
1004 }
1005
1006 void dp_catalog_audio_set_header(struct dp_catalog *dp_catalog)
1007 {
1008         struct dp_catalog_private *catalog;
1009         u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
1010         enum dp_catalog_audio_sdp_type sdp;
1011         enum dp_catalog_audio_header_type header;
1012         u32 data;
1013
1014         if (!dp_catalog)
1015                 return;
1016
1017         catalog = container_of(dp_catalog,
1018                 struct dp_catalog_private, dp_catalog);
1019
1020         sdp_map = catalog->audio_map;
1021         sdp     = dp_catalog->sdp_type;
1022         header  = dp_catalog->sdp_header;
1023         data    = dp_catalog->audio_data;
1024
1025         dp_write_link(catalog, sdp_map[sdp][header], data);
1026 }
1027
1028 void dp_catalog_audio_config_acr(struct dp_catalog *dp_catalog)
1029 {
1030         struct dp_catalog_private *catalog;
1031         u32 acr_ctrl, select;
1032
1033         if (!dp_catalog)
1034                 return;
1035
1036         catalog = container_of(dp_catalog,
1037                 struct dp_catalog_private, dp_catalog);
1038
1039         select = dp_catalog->audio_data;
1040         acr_ctrl = select << 4 | BIT(31) | BIT(8) | BIT(14);
1041
1042         drm_dbg_dp(catalog->drm_dev, "select: %#x, acr_ctrl: %#x\n",
1043                                         select, acr_ctrl);
1044
1045         dp_write_link(catalog, MMSS_DP_AUDIO_ACR_CTRL, acr_ctrl);
1046 }
1047
1048 void dp_catalog_audio_enable(struct dp_catalog *dp_catalog)
1049 {
1050         struct dp_catalog_private *catalog;
1051         bool enable;
1052         u32 audio_ctrl;
1053
1054         if (!dp_catalog)
1055                 return;
1056
1057         catalog = container_of(dp_catalog,
1058                 struct dp_catalog_private, dp_catalog);
1059
1060         enable = !!dp_catalog->audio_data;
1061         audio_ctrl = dp_read_link(catalog, MMSS_DP_AUDIO_CFG);
1062
1063         if (enable)
1064                 audio_ctrl |= BIT(0);
1065         else
1066                 audio_ctrl &= ~BIT(0);
1067
1068         drm_dbg_dp(catalog->drm_dev, "dp_audio_cfg = 0x%x\n", audio_ctrl);
1069
1070         dp_write_link(catalog, MMSS_DP_AUDIO_CFG, audio_ctrl);
1071         /* make sure audio engine is disabled */
1072         wmb();
1073 }
1074
1075 void dp_catalog_audio_config_sdp(struct dp_catalog *dp_catalog)
1076 {
1077         struct dp_catalog_private *catalog;
1078         u32 sdp_cfg = 0;
1079         u32 sdp_cfg2 = 0;
1080
1081         if (!dp_catalog)
1082                 return;
1083
1084         catalog = container_of(dp_catalog,
1085                 struct dp_catalog_private, dp_catalog);
1086
1087         sdp_cfg = dp_read_link(catalog, MMSS_DP_SDP_CFG);
1088         /* AUDIO_TIMESTAMP_SDP_EN */
1089         sdp_cfg |= BIT(1);
1090         /* AUDIO_STREAM_SDP_EN */
1091         sdp_cfg |= BIT(2);
1092         /* AUDIO_COPY_MANAGEMENT_SDP_EN */
1093         sdp_cfg |= BIT(5);
1094         /* AUDIO_ISRC_SDP_EN  */
1095         sdp_cfg |= BIT(6);
1096         /* AUDIO_INFOFRAME_SDP_EN  */
1097         sdp_cfg |= BIT(20);
1098
1099         drm_dbg_dp(catalog->drm_dev, "sdp_cfg = 0x%x\n", sdp_cfg);
1100
1101         dp_write_link(catalog, MMSS_DP_SDP_CFG, sdp_cfg);
1102
1103         sdp_cfg2 = dp_read_link(catalog, MMSS_DP_SDP_CFG2);
1104         /* IFRM_REGSRC -> Do not use reg values */
1105         sdp_cfg2 &= ~BIT(0);
1106         /* AUDIO_STREAM_HB3_REGSRC-> Do not use reg values */
1107         sdp_cfg2 &= ~BIT(1);
1108
1109         drm_dbg_dp(catalog->drm_dev, "sdp_cfg2 = 0x%x\n", sdp_cfg2);
1110
1111         dp_write_link(catalog, MMSS_DP_SDP_CFG2, sdp_cfg2);
1112 }
1113
1114 void dp_catalog_audio_init(struct dp_catalog *dp_catalog)
1115 {
1116         struct dp_catalog_private *catalog;
1117
1118         static u32 sdp_map[][DP_AUDIO_SDP_HEADER_MAX] = {
1119                 {
1120                         MMSS_DP_AUDIO_STREAM_0,
1121                         MMSS_DP_AUDIO_STREAM_1,
1122                         MMSS_DP_AUDIO_STREAM_1,
1123                 },
1124                 {
1125                         MMSS_DP_AUDIO_TIMESTAMP_0,
1126                         MMSS_DP_AUDIO_TIMESTAMP_1,
1127                         MMSS_DP_AUDIO_TIMESTAMP_1,
1128                 },
1129                 {
1130                         MMSS_DP_AUDIO_INFOFRAME_0,
1131                         MMSS_DP_AUDIO_INFOFRAME_1,
1132                         MMSS_DP_AUDIO_INFOFRAME_1,
1133                 },
1134                 {
1135                         MMSS_DP_AUDIO_COPYMANAGEMENT_0,
1136                         MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1137                         MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1138                 },
1139                 {
1140                         MMSS_DP_AUDIO_ISRC_0,
1141                         MMSS_DP_AUDIO_ISRC_1,
1142                         MMSS_DP_AUDIO_ISRC_1,
1143                 },
1144         };
1145
1146         if (!dp_catalog)
1147                 return;
1148
1149         catalog = container_of(dp_catalog,
1150                 struct dp_catalog_private, dp_catalog);
1151
1152         catalog->audio_map = sdp_map;
1153 }
1154
1155 void dp_catalog_audio_sfe_level(struct dp_catalog *dp_catalog)
1156 {
1157         struct dp_catalog_private *catalog;
1158         u32 mainlink_levels, safe_to_exit_level;
1159
1160         if (!dp_catalog)
1161                 return;
1162
1163         catalog = container_of(dp_catalog,
1164                 struct dp_catalog_private, dp_catalog);
1165
1166         safe_to_exit_level = dp_catalog->audio_data;
1167         mainlink_levels = dp_read_link(catalog, REG_DP_MAINLINK_LEVELS);
1168         mainlink_levels &= 0xFE0;
1169         mainlink_levels |= safe_to_exit_level;
1170
1171         drm_dbg_dp(catalog->drm_dev,
1172                         "mainlink_level = 0x%x, safe_to_exit_level = 0x%x\n",
1173                          mainlink_levels, safe_to_exit_level);
1174
1175         dp_write_link(catalog, REG_DP_MAINLINK_LEVELS, mainlink_levels);
1176 }