Merge tag 'backport/v3.14.24-ltsi-rc1/phy-rcar-gen2-usb-to-v3.15' into backport/v3...
[platform/adaptation/renesas_rcar/renesas_kernel.git] / drivers / gpu / drm / radeon / evergreen.c
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <linux/platform_device.h>
26 #include <linux/slab.h>
27 #include <drm/drmP.h>
28 #include "radeon.h"
29 #include "radeon_asic.h"
30 #include <drm/radeon_drm.h>
31 #include "evergreend.h"
32 #include "atom.h"
33 #include "avivod.h"
34 #include "evergreen_reg.h"
35 #include "evergreen_blit_shaders.h"
36 #include "radeon_ucode.h"
37
38 static const u32 crtc_offsets[6] =
39 {
40         EVERGREEN_CRTC0_REGISTER_OFFSET,
41         EVERGREEN_CRTC1_REGISTER_OFFSET,
42         EVERGREEN_CRTC2_REGISTER_OFFSET,
43         EVERGREEN_CRTC3_REGISTER_OFFSET,
44         EVERGREEN_CRTC4_REGISTER_OFFSET,
45         EVERGREEN_CRTC5_REGISTER_OFFSET
46 };
47
48 #include "clearstate_evergreen.h"
49
50 static const u32 sumo_rlc_save_restore_register_list[] =
51 {
52         0x98fc,
53         0x9830,
54         0x9834,
55         0x9838,
56         0x9870,
57         0x9874,
58         0x8a14,
59         0x8b24,
60         0x8bcc,
61         0x8b10,
62         0x8d00,
63         0x8d04,
64         0x8c00,
65         0x8c04,
66         0x8c08,
67         0x8c0c,
68         0x8d8c,
69         0x8c20,
70         0x8c24,
71         0x8c28,
72         0x8c18,
73         0x8c1c,
74         0x8cf0,
75         0x8e2c,
76         0x8e38,
77         0x8c30,
78         0x9508,
79         0x9688,
80         0x9608,
81         0x960c,
82         0x9610,
83         0x9614,
84         0x88c4,
85         0x88d4,
86         0xa008,
87         0x900c,
88         0x9100,
89         0x913c,
90         0x98f8,
91         0x98f4,
92         0x9b7c,
93         0x3f8c,
94         0x8950,
95         0x8954,
96         0x8a18,
97         0x8b28,
98         0x9144,
99         0x9148,
100         0x914c,
101         0x3f90,
102         0x3f94,
103         0x915c,
104         0x9160,
105         0x9178,
106         0x917c,
107         0x9180,
108         0x918c,
109         0x9190,
110         0x9194,
111         0x9198,
112         0x919c,
113         0x91a8,
114         0x91ac,
115         0x91b0,
116         0x91b4,
117         0x91b8,
118         0x91c4,
119         0x91c8,
120         0x91cc,
121         0x91d0,
122         0x91d4,
123         0x91e0,
124         0x91e4,
125         0x91ec,
126         0x91f0,
127         0x91f4,
128         0x9200,
129         0x9204,
130         0x929c,
131         0x9150,
132         0x802c,
133 };
134
135 static void evergreen_gpu_init(struct radeon_device *rdev);
136 void evergreen_fini(struct radeon_device *rdev);
137 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
138 void evergreen_program_aspm(struct radeon_device *rdev);
139 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
140                                      int ring, u32 cp_int_cntl);
141 extern void cayman_vm_decode_fault(struct radeon_device *rdev,
142                                    u32 status, u32 addr);
143 void cik_init_cp_pg_table(struct radeon_device *rdev);
144
145 extern u32 si_get_csb_size(struct radeon_device *rdev);
146 extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
147 extern u32 cik_get_csb_size(struct radeon_device *rdev);
148 extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
149 extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
150
151 static const u32 evergreen_golden_registers[] =
152 {
153         0x3f90, 0xffff0000, 0xff000000,
154         0x9148, 0xffff0000, 0xff000000,
155         0x3f94, 0xffff0000, 0xff000000,
156         0x914c, 0xffff0000, 0xff000000,
157         0x9b7c, 0xffffffff, 0x00000000,
158         0x8a14, 0xffffffff, 0x00000007,
159         0x8b10, 0xffffffff, 0x00000000,
160         0x960c, 0xffffffff, 0x54763210,
161         0x88c4, 0xffffffff, 0x000000c2,
162         0x88d4, 0xffffffff, 0x00000010,
163         0x8974, 0xffffffff, 0x00000000,
164         0xc78, 0x00000080, 0x00000080,
165         0x5eb4, 0xffffffff, 0x00000002,
166         0x5e78, 0xffffffff, 0x001000f0,
167         0x6104, 0x01000300, 0x00000000,
168         0x5bc0, 0x00300000, 0x00000000,
169         0x7030, 0xffffffff, 0x00000011,
170         0x7c30, 0xffffffff, 0x00000011,
171         0x10830, 0xffffffff, 0x00000011,
172         0x11430, 0xffffffff, 0x00000011,
173         0x12030, 0xffffffff, 0x00000011,
174         0x12c30, 0xffffffff, 0x00000011,
175         0xd02c, 0xffffffff, 0x08421000,
176         0x240c, 0xffffffff, 0x00000380,
177         0x8b24, 0xffffffff, 0x00ff0fff,
178         0x28a4c, 0x06000000, 0x06000000,
179         0x10c, 0x00000001, 0x00000001,
180         0x8d00, 0xffffffff, 0x100e4848,
181         0x8d04, 0xffffffff, 0x00164745,
182         0x8c00, 0xffffffff, 0xe4000003,
183         0x8c04, 0xffffffff, 0x40600060,
184         0x8c08, 0xffffffff, 0x001c001c,
185         0x8cf0, 0xffffffff, 0x08e00620,
186         0x8c20, 0xffffffff, 0x00800080,
187         0x8c24, 0xffffffff, 0x00800080,
188         0x8c18, 0xffffffff, 0x20202078,
189         0x8c1c, 0xffffffff, 0x00001010,
190         0x28350, 0xffffffff, 0x00000000,
191         0xa008, 0xffffffff, 0x00010000,
192         0x5c4, 0xffffffff, 0x00000001,
193         0x9508, 0xffffffff, 0x00000002,
194         0x913c, 0x0000000f, 0x0000000a
195 };
196
197 static const u32 evergreen_golden_registers2[] =
198 {
199         0x2f4c, 0xffffffff, 0x00000000,
200         0x54f4, 0xffffffff, 0x00000000,
201         0x54f0, 0xffffffff, 0x00000000,
202         0x5498, 0xffffffff, 0x00000000,
203         0x549c, 0xffffffff, 0x00000000,
204         0x5494, 0xffffffff, 0x00000000,
205         0x53cc, 0xffffffff, 0x00000000,
206         0x53c8, 0xffffffff, 0x00000000,
207         0x53c4, 0xffffffff, 0x00000000,
208         0x53c0, 0xffffffff, 0x00000000,
209         0x53bc, 0xffffffff, 0x00000000,
210         0x53b8, 0xffffffff, 0x00000000,
211         0x53b4, 0xffffffff, 0x00000000,
212         0x53b0, 0xffffffff, 0x00000000
213 };
214
215 static const u32 cypress_mgcg_init[] =
216 {
217         0x802c, 0xffffffff, 0xc0000000,
218         0x5448, 0xffffffff, 0x00000100,
219         0x55e4, 0xffffffff, 0x00000100,
220         0x160c, 0xffffffff, 0x00000100,
221         0x5644, 0xffffffff, 0x00000100,
222         0xc164, 0xffffffff, 0x00000100,
223         0x8a18, 0xffffffff, 0x00000100,
224         0x897c, 0xffffffff, 0x06000100,
225         0x8b28, 0xffffffff, 0x00000100,
226         0x9144, 0xffffffff, 0x00000100,
227         0x9a60, 0xffffffff, 0x00000100,
228         0x9868, 0xffffffff, 0x00000100,
229         0x8d58, 0xffffffff, 0x00000100,
230         0x9510, 0xffffffff, 0x00000100,
231         0x949c, 0xffffffff, 0x00000100,
232         0x9654, 0xffffffff, 0x00000100,
233         0x9030, 0xffffffff, 0x00000100,
234         0x9034, 0xffffffff, 0x00000100,
235         0x9038, 0xffffffff, 0x00000100,
236         0x903c, 0xffffffff, 0x00000100,
237         0x9040, 0xffffffff, 0x00000100,
238         0xa200, 0xffffffff, 0x00000100,
239         0xa204, 0xffffffff, 0x00000100,
240         0xa208, 0xffffffff, 0x00000100,
241         0xa20c, 0xffffffff, 0x00000100,
242         0x971c, 0xffffffff, 0x00000100,
243         0x977c, 0xffffffff, 0x00000100,
244         0x3f80, 0xffffffff, 0x00000100,
245         0xa210, 0xffffffff, 0x00000100,
246         0xa214, 0xffffffff, 0x00000100,
247         0x4d8, 0xffffffff, 0x00000100,
248         0x9784, 0xffffffff, 0x00000100,
249         0x9698, 0xffffffff, 0x00000100,
250         0x4d4, 0xffffffff, 0x00000200,
251         0x30cc, 0xffffffff, 0x00000100,
252         0xd0c0, 0xffffffff, 0xff000100,
253         0x802c, 0xffffffff, 0x40000000,
254         0x915c, 0xffffffff, 0x00010000,
255         0x9160, 0xffffffff, 0x00030002,
256         0x9178, 0xffffffff, 0x00070000,
257         0x917c, 0xffffffff, 0x00030002,
258         0x9180, 0xffffffff, 0x00050004,
259         0x918c, 0xffffffff, 0x00010006,
260         0x9190, 0xffffffff, 0x00090008,
261         0x9194, 0xffffffff, 0x00070000,
262         0x9198, 0xffffffff, 0x00030002,
263         0x919c, 0xffffffff, 0x00050004,
264         0x91a8, 0xffffffff, 0x00010006,
265         0x91ac, 0xffffffff, 0x00090008,
266         0x91b0, 0xffffffff, 0x00070000,
267         0x91b4, 0xffffffff, 0x00030002,
268         0x91b8, 0xffffffff, 0x00050004,
269         0x91c4, 0xffffffff, 0x00010006,
270         0x91c8, 0xffffffff, 0x00090008,
271         0x91cc, 0xffffffff, 0x00070000,
272         0x91d0, 0xffffffff, 0x00030002,
273         0x91d4, 0xffffffff, 0x00050004,
274         0x91e0, 0xffffffff, 0x00010006,
275         0x91e4, 0xffffffff, 0x00090008,
276         0x91e8, 0xffffffff, 0x00000000,
277         0x91ec, 0xffffffff, 0x00070000,
278         0x91f0, 0xffffffff, 0x00030002,
279         0x91f4, 0xffffffff, 0x00050004,
280         0x9200, 0xffffffff, 0x00010006,
281         0x9204, 0xffffffff, 0x00090008,
282         0x9208, 0xffffffff, 0x00070000,
283         0x920c, 0xffffffff, 0x00030002,
284         0x9210, 0xffffffff, 0x00050004,
285         0x921c, 0xffffffff, 0x00010006,
286         0x9220, 0xffffffff, 0x00090008,
287         0x9224, 0xffffffff, 0x00070000,
288         0x9228, 0xffffffff, 0x00030002,
289         0x922c, 0xffffffff, 0x00050004,
290         0x9238, 0xffffffff, 0x00010006,
291         0x923c, 0xffffffff, 0x00090008,
292         0x9240, 0xffffffff, 0x00070000,
293         0x9244, 0xffffffff, 0x00030002,
294         0x9248, 0xffffffff, 0x00050004,
295         0x9254, 0xffffffff, 0x00010006,
296         0x9258, 0xffffffff, 0x00090008,
297         0x925c, 0xffffffff, 0x00070000,
298         0x9260, 0xffffffff, 0x00030002,
299         0x9264, 0xffffffff, 0x00050004,
300         0x9270, 0xffffffff, 0x00010006,
301         0x9274, 0xffffffff, 0x00090008,
302         0x9278, 0xffffffff, 0x00070000,
303         0x927c, 0xffffffff, 0x00030002,
304         0x9280, 0xffffffff, 0x00050004,
305         0x928c, 0xffffffff, 0x00010006,
306         0x9290, 0xffffffff, 0x00090008,
307         0x9294, 0xffffffff, 0x00000000,
308         0x929c, 0xffffffff, 0x00000001,
309         0x802c, 0xffffffff, 0x40010000,
310         0x915c, 0xffffffff, 0x00010000,
311         0x9160, 0xffffffff, 0x00030002,
312         0x9178, 0xffffffff, 0x00070000,
313         0x917c, 0xffffffff, 0x00030002,
314         0x9180, 0xffffffff, 0x00050004,
315         0x918c, 0xffffffff, 0x00010006,
316         0x9190, 0xffffffff, 0x00090008,
317         0x9194, 0xffffffff, 0x00070000,
318         0x9198, 0xffffffff, 0x00030002,
319         0x919c, 0xffffffff, 0x00050004,
320         0x91a8, 0xffffffff, 0x00010006,
321         0x91ac, 0xffffffff, 0x00090008,
322         0x91b0, 0xffffffff, 0x00070000,
323         0x91b4, 0xffffffff, 0x00030002,
324         0x91b8, 0xffffffff, 0x00050004,
325         0x91c4, 0xffffffff, 0x00010006,
326         0x91c8, 0xffffffff, 0x00090008,
327         0x91cc, 0xffffffff, 0x00070000,
328         0x91d0, 0xffffffff, 0x00030002,
329         0x91d4, 0xffffffff, 0x00050004,
330         0x91e0, 0xffffffff, 0x00010006,
331         0x91e4, 0xffffffff, 0x00090008,
332         0x91e8, 0xffffffff, 0x00000000,
333         0x91ec, 0xffffffff, 0x00070000,
334         0x91f0, 0xffffffff, 0x00030002,
335         0x91f4, 0xffffffff, 0x00050004,
336         0x9200, 0xffffffff, 0x00010006,
337         0x9204, 0xffffffff, 0x00090008,
338         0x9208, 0xffffffff, 0x00070000,
339         0x920c, 0xffffffff, 0x00030002,
340         0x9210, 0xffffffff, 0x00050004,
341         0x921c, 0xffffffff, 0x00010006,
342         0x9220, 0xffffffff, 0x00090008,
343         0x9224, 0xffffffff, 0x00070000,
344         0x9228, 0xffffffff, 0x00030002,
345         0x922c, 0xffffffff, 0x00050004,
346         0x9238, 0xffffffff, 0x00010006,
347         0x923c, 0xffffffff, 0x00090008,
348         0x9240, 0xffffffff, 0x00070000,
349         0x9244, 0xffffffff, 0x00030002,
350         0x9248, 0xffffffff, 0x00050004,
351         0x9254, 0xffffffff, 0x00010006,
352         0x9258, 0xffffffff, 0x00090008,
353         0x925c, 0xffffffff, 0x00070000,
354         0x9260, 0xffffffff, 0x00030002,
355         0x9264, 0xffffffff, 0x00050004,
356         0x9270, 0xffffffff, 0x00010006,
357         0x9274, 0xffffffff, 0x00090008,
358         0x9278, 0xffffffff, 0x00070000,
359         0x927c, 0xffffffff, 0x00030002,
360         0x9280, 0xffffffff, 0x00050004,
361         0x928c, 0xffffffff, 0x00010006,
362         0x9290, 0xffffffff, 0x00090008,
363         0x9294, 0xffffffff, 0x00000000,
364         0x929c, 0xffffffff, 0x00000001,
365         0x802c, 0xffffffff, 0xc0000000
366 };
367
368 static const u32 redwood_mgcg_init[] =
369 {
370         0x802c, 0xffffffff, 0xc0000000,
371         0x5448, 0xffffffff, 0x00000100,
372         0x55e4, 0xffffffff, 0x00000100,
373         0x160c, 0xffffffff, 0x00000100,
374         0x5644, 0xffffffff, 0x00000100,
375         0xc164, 0xffffffff, 0x00000100,
376         0x8a18, 0xffffffff, 0x00000100,
377         0x897c, 0xffffffff, 0x06000100,
378         0x8b28, 0xffffffff, 0x00000100,
379         0x9144, 0xffffffff, 0x00000100,
380         0x9a60, 0xffffffff, 0x00000100,
381         0x9868, 0xffffffff, 0x00000100,
382         0x8d58, 0xffffffff, 0x00000100,
383         0x9510, 0xffffffff, 0x00000100,
384         0x949c, 0xffffffff, 0x00000100,
385         0x9654, 0xffffffff, 0x00000100,
386         0x9030, 0xffffffff, 0x00000100,
387         0x9034, 0xffffffff, 0x00000100,
388         0x9038, 0xffffffff, 0x00000100,
389         0x903c, 0xffffffff, 0x00000100,
390         0x9040, 0xffffffff, 0x00000100,
391         0xa200, 0xffffffff, 0x00000100,
392         0xa204, 0xffffffff, 0x00000100,
393         0xa208, 0xffffffff, 0x00000100,
394         0xa20c, 0xffffffff, 0x00000100,
395         0x971c, 0xffffffff, 0x00000100,
396         0x977c, 0xffffffff, 0x00000100,
397         0x3f80, 0xffffffff, 0x00000100,
398         0xa210, 0xffffffff, 0x00000100,
399         0xa214, 0xffffffff, 0x00000100,
400         0x4d8, 0xffffffff, 0x00000100,
401         0x9784, 0xffffffff, 0x00000100,
402         0x9698, 0xffffffff, 0x00000100,
403         0x4d4, 0xffffffff, 0x00000200,
404         0x30cc, 0xffffffff, 0x00000100,
405         0xd0c0, 0xffffffff, 0xff000100,
406         0x802c, 0xffffffff, 0x40000000,
407         0x915c, 0xffffffff, 0x00010000,
408         0x9160, 0xffffffff, 0x00030002,
409         0x9178, 0xffffffff, 0x00070000,
410         0x917c, 0xffffffff, 0x00030002,
411         0x9180, 0xffffffff, 0x00050004,
412         0x918c, 0xffffffff, 0x00010006,
413         0x9190, 0xffffffff, 0x00090008,
414         0x9194, 0xffffffff, 0x00070000,
415         0x9198, 0xffffffff, 0x00030002,
416         0x919c, 0xffffffff, 0x00050004,
417         0x91a8, 0xffffffff, 0x00010006,
418         0x91ac, 0xffffffff, 0x00090008,
419         0x91b0, 0xffffffff, 0x00070000,
420         0x91b4, 0xffffffff, 0x00030002,
421         0x91b8, 0xffffffff, 0x00050004,
422         0x91c4, 0xffffffff, 0x00010006,
423         0x91c8, 0xffffffff, 0x00090008,
424         0x91cc, 0xffffffff, 0x00070000,
425         0x91d0, 0xffffffff, 0x00030002,
426         0x91d4, 0xffffffff, 0x00050004,
427         0x91e0, 0xffffffff, 0x00010006,
428         0x91e4, 0xffffffff, 0x00090008,
429         0x91e8, 0xffffffff, 0x00000000,
430         0x91ec, 0xffffffff, 0x00070000,
431         0x91f0, 0xffffffff, 0x00030002,
432         0x91f4, 0xffffffff, 0x00050004,
433         0x9200, 0xffffffff, 0x00010006,
434         0x9204, 0xffffffff, 0x00090008,
435         0x9294, 0xffffffff, 0x00000000,
436         0x929c, 0xffffffff, 0x00000001,
437         0x802c, 0xffffffff, 0xc0000000
438 };
439
440 static const u32 cedar_golden_registers[] =
441 {
442         0x3f90, 0xffff0000, 0xff000000,
443         0x9148, 0xffff0000, 0xff000000,
444         0x3f94, 0xffff0000, 0xff000000,
445         0x914c, 0xffff0000, 0xff000000,
446         0x9b7c, 0xffffffff, 0x00000000,
447         0x8a14, 0xffffffff, 0x00000007,
448         0x8b10, 0xffffffff, 0x00000000,
449         0x960c, 0xffffffff, 0x54763210,
450         0x88c4, 0xffffffff, 0x000000c2,
451         0x88d4, 0xffffffff, 0x00000000,
452         0x8974, 0xffffffff, 0x00000000,
453         0xc78, 0x00000080, 0x00000080,
454         0x5eb4, 0xffffffff, 0x00000002,
455         0x5e78, 0xffffffff, 0x001000f0,
456         0x6104, 0x01000300, 0x00000000,
457         0x5bc0, 0x00300000, 0x00000000,
458         0x7030, 0xffffffff, 0x00000011,
459         0x7c30, 0xffffffff, 0x00000011,
460         0x10830, 0xffffffff, 0x00000011,
461         0x11430, 0xffffffff, 0x00000011,
462         0xd02c, 0xffffffff, 0x08421000,
463         0x240c, 0xffffffff, 0x00000380,
464         0x8b24, 0xffffffff, 0x00ff0fff,
465         0x28a4c, 0x06000000, 0x06000000,
466         0x10c, 0x00000001, 0x00000001,
467         0x8d00, 0xffffffff, 0x100e4848,
468         0x8d04, 0xffffffff, 0x00164745,
469         0x8c00, 0xffffffff, 0xe4000003,
470         0x8c04, 0xffffffff, 0x40600060,
471         0x8c08, 0xffffffff, 0x001c001c,
472         0x8cf0, 0xffffffff, 0x08e00410,
473         0x8c20, 0xffffffff, 0x00800080,
474         0x8c24, 0xffffffff, 0x00800080,
475         0x8c18, 0xffffffff, 0x20202078,
476         0x8c1c, 0xffffffff, 0x00001010,
477         0x28350, 0xffffffff, 0x00000000,
478         0xa008, 0xffffffff, 0x00010000,
479         0x5c4, 0xffffffff, 0x00000001,
480         0x9508, 0xffffffff, 0x00000002
481 };
482
483 static const u32 cedar_mgcg_init[] =
484 {
485         0x802c, 0xffffffff, 0xc0000000,
486         0x5448, 0xffffffff, 0x00000100,
487         0x55e4, 0xffffffff, 0x00000100,
488         0x160c, 0xffffffff, 0x00000100,
489         0x5644, 0xffffffff, 0x00000100,
490         0xc164, 0xffffffff, 0x00000100,
491         0x8a18, 0xffffffff, 0x00000100,
492         0x897c, 0xffffffff, 0x06000100,
493         0x8b28, 0xffffffff, 0x00000100,
494         0x9144, 0xffffffff, 0x00000100,
495         0x9a60, 0xffffffff, 0x00000100,
496         0x9868, 0xffffffff, 0x00000100,
497         0x8d58, 0xffffffff, 0x00000100,
498         0x9510, 0xffffffff, 0x00000100,
499         0x949c, 0xffffffff, 0x00000100,
500         0x9654, 0xffffffff, 0x00000100,
501         0x9030, 0xffffffff, 0x00000100,
502         0x9034, 0xffffffff, 0x00000100,
503         0x9038, 0xffffffff, 0x00000100,
504         0x903c, 0xffffffff, 0x00000100,
505         0x9040, 0xffffffff, 0x00000100,
506         0xa200, 0xffffffff, 0x00000100,
507         0xa204, 0xffffffff, 0x00000100,
508         0xa208, 0xffffffff, 0x00000100,
509         0xa20c, 0xffffffff, 0x00000100,
510         0x971c, 0xffffffff, 0x00000100,
511         0x977c, 0xffffffff, 0x00000100,
512         0x3f80, 0xffffffff, 0x00000100,
513         0xa210, 0xffffffff, 0x00000100,
514         0xa214, 0xffffffff, 0x00000100,
515         0x4d8, 0xffffffff, 0x00000100,
516         0x9784, 0xffffffff, 0x00000100,
517         0x9698, 0xffffffff, 0x00000100,
518         0x4d4, 0xffffffff, 0x00000200,
519         0x30cc, 0xffffffff, 0x00000100,
520         0xd0c0, 0xffffffff, 0xff000100,
521         0x802c, 0xffffffff, 0x40000000,
522         0x915c, 0xffffffff, 0x00010000,
523         0x9178, 0xffffffff, 0x00050000,
524         0x917c, 0xffffffff, 0x00030002,
525         0x918c, 0xffffffff, 0x00010004,
526         0x9190, 0xffffffff, 0x00070006,
527         0x9194, 0xffffffff, 0x00050000,
528         0x9198, 0xffffffff, 0x00030002,
529         0x91a8, 0xffffffff, 0x00010004,
530         0x91ac, 0xffffffff, 0x00070006,
531         0x91e8, 0xffffffff, 0x00000000,
532         0x9294, 0xffffffff, 0x00000000,
533         0x929c, 0xffffffff, 0x00000001,
534         0x802c, 0xffffffff, 0xc0000000
535 };
536
537 static const u32 juniper_mgcg_init[] =
538 {
539         0x802c, 0xffffffff, 0xc0000000,
540         0x5448, 0xffffffff, 0x00000100,
541         0x55e4, 0xffffffff, 0x00000100,
542         0x160c, 0xffffffff, 0x00000100,
543         0x5644, 0xffffffff, 0x00000100,
544         0xc164, 0xffffffff, 0x00000100,
545         0x8a18, 0xffffffff, 0x00000100,
546         0x897c, 0xffffffff, 0x06000100,
547         0x8b28, 0xffffffff, 0x00000100,
548         0x9144, 0xffffffff, 0x00000100,
549         0x9a60, 0xffffffff, 0x00000100,
550         0x9868, 0xffffffff, 0x00000100,
551         0x8d58, 0xffffffff, 0x00000100,
552         0x9510, 0xffffffff, 0x00000100,
553         0x949c, 0xffffffff, 0x00000100,
554         0x9654, 0xffffffff, 0x00000100,
555         0x9030, 0xffffffff, 0x00000100,
556         0x9034, 0xffffffff, 0x00000100,
557         0x9038, 0xffffffff, 0x00000100,
558         0x903c, 0xffffffff, 0x00000100,
559         0x9040, 0xffffffff, 0x00000100,
560         0xa200, 0xffffffff, 0x00000100,
561         0xa204, 0xffffffff, 0x00000100,
562         0xa208, 0xffffffff, 0x00000100,
563         0xa20c, 0xffffffff, 0x00000100,
564         0x971c, 0xffffffff, 0x00000100,
565         0xd0c0, 0xffffffff, 0xff000100,
566         0x802c, 0xffffffff, 0x40000000,
567         0x915c, 0xffffffff, 0x00010000,
568         0x9160, 0xffffffff, 0x00030002,
569         0x9178, 0xffffffff, 0x00070000,
570         0x917c, 0xffffffff, 0x00030002,
571         0x9180, 0xffffffff, 0x00050004,
572         0x918c, 0xffffffff, 0x00010006,
573         0x9190, 0xffffffff, 0x00090008,
574         0x9194, 0xffffffff, 0x00070000,
575         0x9198, 0xffffffff, 0x00030002,
576         0x919c, 0xffffffff, 0x00050004,
577         0x91a8, 0xffffffff, 0x00010006,
578         0x91ac, 0xffffffff, 0x00090008,
579         0x91b0, 0xffffffff, 0x00070000,
580         0x91b4, 0xffffffff, 0x00030002,
581         0x91b8, 0xffffffff, 0x00050004,
582         0x91c4, 0xffffffff, 0x00010006,
583         0x91c8, 0xffffffff, 0x00090008,
584         0x91cc, 0xffffffff, 0x00070000,
585         0x91d0, 0xffffffff, 0x00030002,
586         0x91d4, 0xffffffff, 0x00050004,
587         0x91e0, 0xffffffff, 0x00010006,
588         0x91e4, 0xffffffff, 0x00090008,
589         0x91e8, 0xffffffff, 0x00000000,
590         0x91ec, 0xffffffff, 0x00070000,
591         0x91f0, 0xffffffff, 0x00030002,
592         0x91f4, 0xffffffff, 0x00050004,
593         0x9200, 0xffffffff, 0x00010006,
594         0x9204, 0xffffffff, 0x00090008,
595         0x9208, 0xffffffff, 0x00070000,
596         0x920c, 0xffffffff, 0x00030002,
597         0x9210, 0xffffffff, 0x00050004,
598         0x921c, 0xffffffff, 0x00010006,
599         0x9220, 0xffffffff, 0x00090008,
600         0x9224, 0xffffffff, 0x00070000,
601         0x9228, 0xffffffff, 0x00030002,
602         0x922c, 0xffffffff, 0x00050004,
603         0x9238, 0xffffffff, 0x00010006,
604         0x923c, 0xffffffff, 0x00090008,
605         0x9240, 0xffffffff, 0x00070000,
606         0x9244, 0xffffffff, 0x00030002,
607         0x9248, 0xffffffff, 0x00050004,
608         0x9254, 0xffffffff, 0x00010006,
609         0x9258, 0xffffffff, 0x00090008,
610         0x925c, 0xffffffff, 0x00070000,
611         0x9260, 0xffffffff, 0x00030002,
612         0x9264, 0xffffffff, 0x00050004,
613         0x9270, 0xffffffff, 0x00010006,
614         0x9274, 0xffffffff, 0x00090008,
615         0x9278, 0xffffffff, 0x00070000,
616         0x927c, 0xffffffff, 0x00030002,
617         0x9280, 0xffffffff, 0x00050004,
618         0x928c, 0xffffffff, 0x00010006,
619         0x9290, 0xffffffff, 0x00090008,
620         0x9294, 0xffffffff, 0x00000000,
621         0x929c, 0xffffffff, 0x00000001,
622         0x802c, 0xffffffff, 0xc0000000,
623         0x977c, 0xffffffff, 0x00000100,
624         0x3f80, 0xffffffff, 0x00000100,
625         0xa210, 0xffffffff, 0x00000100,
626         0xa214, 0xffffffff, 0x00000100,
627         0x4d8, 0xffffffff, 0x00000100,
628         0x9784, 0xffffffff, 0x00000100,
629         0x9698, 0xffffffff, 0x00000100,
630         0x4d4, 0xffffffff, 0x00000200,
631         0x30cc, 0xffffffff, 0x00000100,
632         0x802c, 0xffffffff, 0xc0000000
633 };
634
635 static const u32 supersumo_golden_registers[] =
636 {
637         0x5eb4, 0xffffffff, 0x00000002,
638         0x5c4, 0xffffffff, 0x00000001,
639         0x7030, 0xffffffff, 0x00000011,
640         0x7c30, 0xffffffff, 0x00000011,
641         0x6104, 0x01000300, 0x00000000,
642         0x5bc0, 0x00300000, 0x00000000,
643         0x8c04, 0xffffffff, 0x40600060,
644         0x8c08, 0xffffffff, 0x001c001c,
645         0x8c20, 0xffffffff, 0x00800080,
646         0x8c24, 0xffffffff, 0x00800080,
647         0x8c18, 0xffffffff, 0x20202078,
648         0x8c1c, 0xffffffff, 0x00001010,
649         0x918c, 0xffffffff, 0x00010006,
650         0x91a8, 0xffffffff, 0x00010006,
651         0x91c4, 0xffffffff, 0x00010006,
652         0x91e0, 0xffffffff, 0x00010006,
653         0x9200, 0xffffffff, 0x00010006,
654         0x9150, 0xffffffff, 0x6e944040,
655         0x917c, 0xffffffff, 0x00030002,
656         0x9180, 0xffffffff, 0x00050004,
657         0x9198, 0xffffffff, 0x00030002,
658         0x919c, 0xffffffff, 0x00050004,
659         0x91b4, 0xffffffff, 0x00030002,
660         0x91b8, 0xffffffff, 0x00050004,
661         0x91d0, 0xffffffff, 0x00030002,
662         0x91d4, 0xffffffff, 0x00050004,
663         0x91f0, 0xffffffff, 0x00030002,
664         0x91f4, 0xffffffff, 0x00050004,
665         0x915c, 0xffffffff, 0x00010000,
666         0x9160, 0xffffffff, 0x00030002,
667         0x3f90, 0xffff0000, 0xff000000,
668         0x9178, 0xffffffff, 0x00070000,
669         0x9194, 0xffffffff, 0x00070000,
670         0x91b0, 0xffffffff, 0x00070000,
671         0x91cc, 0xffffffff, 0x00070000,
672         0x91ec, 0xffffffff, 0x00070000,
673         0x9148, 0xffff0000, 0xff000000,
674         0x9190, 0xffffffff, 0x00090008,
675         0x91ac, 0xffffffff, 0x00090008,
676         0x91c8, 0xffffffff, 0x00090008,
677         0x91e4, 0xffffffff, 0x00090008,
678         0x9204, 0xffffffff, 0x00090008,
679         0x3f94, 0xffff0000, 0xff000000,
680         0x914c, 0xffff0000, 0xff000000,
681         0x929c, 0xffffffff, 0x00000001,
682         0x8a18, 0xffffffff, 0x00000100,
683         0x8b28, 0xffffffff, 0x00000100,
684         0x9144, 0xffffffff, 0x00000100,
685         0x5644, 0xffffffff, 0x00000100,
686         0x9b7c, 0xffffffff, 0x00000000,
687         0x8030, 0xffffffff, 0x0000100a,
688         0x8a14, 0xffffffff, 0x00000007,
689         0x8b24, 0xffffffff, 0x00ff0fff,
690         0x8b10, 0xffffffff, 0x00000000,
691         0x28a4c, 0x06000000, 0x06000000,
692         0x4d8, 0xffffffff, 0x00000100,
693         0x913c, 0xffff000f, 0x0100000a,
694         0x960c, 0xffffffff, 0x54763210,
695         0x88c4, 0xffffffff, 0x000000c2,
696         0x88d4, 0xffffffff, 0x00000010,
697         0x8974, 0xffffffff, 0x00000000,
698         0xc78, 0x00000080, 0x00000080,
699         0x5e78, 0xffffffff, 0x001000f0,
700         0xd02c, 0xffffffff, 0x08421000,
701         0xa008, 0xffffffff, 0x00010000,
702         0x8d00, 0xffffffff, 0x100e4848,
703         0x8d04, 0xffffffff, 0x00164745,
704         0x8c00, 0xffffffff, 0xe4000003,
705         0x8cf0, 0x1fffffff, 0x08e00620,
706         0x28350, 0xffffffff, 0x00000000,
707         0x9508, 0xffffffff, 0x00000002
708 };
709
710 static const u32 sumo_golden_registers[] =
711 {
712         0x900c, 0x00ffffff, 0x0017071f,
713         0x8c18, 0xffffffff, 0x10101060,
714         0x8c1c, 0xffffffff, 0x00001010,
715         0x8c30, 0x0000000f, 0x00000005,
716         0x9688, 0x0000000f, 0x00000007
717 };
718
719 static const u32 wrestler_golden_registers[] =
720 {
721         0x5eb4, 0xffffffff, 0x00000002,
722         0x5c4, 0xffffffff, 0x00000001,
723         0x7030, 0xffffffff, 0x00000011,
724         0x7c30, 0xffffffff, 0x00000011,
725         0x6104, 0x01000300, 0x00000000,
726         0x5bc0, 0x00300000, 0x00000000,
727         0x918c, 0xffffffff, 0x00010006,
728         0x91a8, 0xffffffff, 0x00010006,
729         0x9150, 0xffffffff, 0x6e944040,
730         0x917c, 0xffffffff, 0x00030002,
731         0x9198, 0xffffffff, 0x00030002,
732         0x915c, 0xffffffff, 0x00010000,
733         0x3f90, 0xffff0000, 0xff000000,
734         0x9178, 0xffffffff, 0x00070000,
735         0x9194, 0xffffffff, 0x00070000,
736         0x9148, 0xffff0000, 0xff000000,
737         0x9190, 0xffffffff, 0x00090008,
738         0x91ac, 0xffffffff, 0x00090008,
739         0x3f94, 0xffff0000, 0xff000000,
740         0x914c, 0xffff0000, 0xff000000,
741         0x929c, 0xffffffff, 0x00000001,
742         0x8a18, 0xffffffff, 0x00000100,
743         0x8b28, 0xffffffff, 0x00000100,
744         0x9144, 0xffffffff, 0x00000100,
745         0x9b7c, 0xffffffff, 0x00000000,
746         0x8030, 0xffffffff, 0x0000100a,
747         0x8a14, 0xffffffff, 0x00000001,
748         0x8b24, 0xffffffff, 0x00ff0fff,
749         0x8b10, 0xffffffff, 0x00000000,
750         0x28a4c, 0x06000000, 0x06000000,
751         0x4d8, 0xffffffff, 0x00000100,
752         0x913c, 0xffff000f, 0x0100000a,
753         0x960c, 0xffffffff, 0x54763210,
754         0x88c4, 0xffffffff, 0x000000c2,
755         0x88d4, 0xffffffff, 0x00000010,
756         0x8974, 0xffffffff, 0x00000000,
757         0xc78, 0x00000080, 0x00000080,
758         0x5e78, 0xffffffff, 0x001000f0,
759         0xd02c, 0xffffffff, 0x08421000,
760         0xa008, 0xffffffff, 0x00010000,
761         0x8d00, 0xffffffff, 0x100e4848,
762         0x8d04, 0xffffffff, 0x00164745,
763         0x8c00, 0xffffffff, 0xe4000003,
764         0x8cf0, 0x1fffffff, 0x08e00410,
765         0x28350, 0xffffffff, 0x00000000,
766         0x9508, 0xffffffff, 0x00000002,
767         0x900c, 0xffffffff, 0x0017071f,
768         0x8c18, 0xffffffff, 0x10101060,
769         0x8c1c, 0xffffffff, 0x00001010
770 };
771
772 static const u32 barts_golden_registers[] =
773 {
774         0x5eb4, 0xffffffff, 0x00000002,
775         0x5e78, 0x8f311ff1, 0x001000f0,
776         0x3f90, 0xffff0000, 0xff000000,
777         0x9148, 0xffff0000, 0xff000000,
778         0x3f94, 0xffff0000, 0xff000000,
779         0x914c, 0xffff0000, 0xff000000,
780         0xc78, 0x00000080, 0x00000080,
781         0xbd4, 0x70073777, 0x00010001,
782         0xd02c, 0xbfffff1f, 0x08421000,
783         0xd0b8, 0x03773777, 0x02011003,
784         0x5bc0, 0x00200000, 0x50100000,
785         0x98f8, 0x33773777, 0x02011003,
786         0x98fc, 0xffffffff, 0x76543210,
787         0x7030, 0x31000311, 0x00000011,
788         0x2f48, 0x00000007, 0x02011003,
789         0x6b28, 0x00000010, 0x00000012,
790         0x7728, 0x00000010, 0x00000012,
791         0x10328, 0x00000010, 0x00000012,
792         0x10f28, 0x00000010, 0x00000012,
793         0x11b28, 0x00000010, 0x00000012,
794         0x12728, 0x00000010, 0x00000012,
795         0x240c, 0x000007ff, 0x00000380,
796         0x8a14, 0xf000001f, 0x00000007,
797         0x8b24, 0x3fff3fff, 0x00ff0fff,
798         0x8b10, 0x0000ff0f, 0x00000000,
799         0x28a4c, 0x07ffffff, 0x06000000,
800         0x10c, 0x00000001, 0x00010003,
801         0xa02c, 0xffffffff, 0x0000009b,
802         0x913c, 0x0000000f, 0x0100000a,
803         0x8d00, 0xffff7f7f, 0x100e4848,
804         0x8d04, 0x00ffffff, 0x00164745,
805         0x8c00, 0xfffc0003, 0xe4000003,
806         0x8c04, 0xf8ff00ff, 0x40600060,
807         0x8c08, 0x00ff00ff, 0x001c001c,
808         0x8cf0, 0x1fff1fff, 0x08e00620,
809         0x8c20, 0x0fff0fff, 0x00800080,
810         0x8c24, 0x0fff0fff, 0x00800080,
811         0x8c18, 0xffffffff, 0x20202078,
812         0x8c1c, 0x0000ffff, 0x00001010,
813         0x28350, 0x00000f01, 0x00000000,
814         0x9508, 0x3700001f, 0x00000002,
815         0x960c, 0xffffffff, 0x54763210,
816         0x88c4, 0x001f3ae3, 0x000000c2,
817         0x88d4, 0x0000001f, 0x00000010,
818         0x8974, 0xffffffff, 0x00000000
819 };
820
821 static const u32 turks_golden_registers[] =
822 {
823         0x5eb4, 0xffffffff, 0x00000002,
824         0x5e78, 0x8f311ff1, 0x001000f0,
825         0x8c8, 0x00003000, 0x00001070,
826         0x8cc, 0x000fffff, 0x00040035,
827         0x3f90, 0xffff0000, 0xfff00000,
828         0x9148, 0xffff0000, 0xfff00000,
829         0x3f94, 0xffff0000, 0xfff00000,
830         0x914c, 0xffff0000, 0xfff00000,
831         0xc78, 0x00000080, 0x00000080,
832         0xbd4, 0x00073007, 0x00010002,
833         0xd02c, 0xbfffff1f, 0x08421000,
834         0xd0b8, 0x03773777, 0x02010002,
835         0x5bc0, 0x00200000, 0x50100000,
836         0x98f8, 0x33773777, 0x00010002,
837         0x98fc, 0xffffffff, 0x33221100,
838         0x7030, 0x31000311, 0x00000011,
839         0x2f48, 0x33773777, 0x00010002,
840         0x6b28, 0x00000010, 0x00000012,
841         0x7728, 0x00000010, 0x00000012,
842         0x10328, 0x00000010, 0x00000012,
843         0x10f28, 0x00000010, 0x00000012,
844         0x11b28, 0x00000010, 0x00000012,
845         0x12728, 0x00000010, 0x00000012,
846         0x240c, 0x000007ff, 0x00000380,
847         0x8a14, 0xf000001f, 0x00000007,
848         0x8b24, 0x3fff3fff, 0x00ff0fff,
849         0x8b10, 0x0000ff0f, 0x00000000,
850         0x28a4c, 0x07ffffff, 0x06000000,
851         0x10c, 0x00000001, 0x00010003,
852         0xa02c, 0xffffffff, 0x0000009b,
853         0x913c, 0x0000000f, 0x0100000a,
854         0x8d00, 0xffff7f7f, 0x100e4848,
855         0x8d04, 0x00ffffff, 0x00164745,
856         0x8c00, 0xfffc0003, 0xe4000003,
857         0x8c04, 0xf8ff00ff, 0x40600060,
858         0x8c08, 0x00ff00ff, 0x001c001c,
859         0x8cf0, 0x1fff1fff, 0x08e00410,
860         0x8c20, 0x0fff0fff, 0x00800080,
861         0x8c24, 0x0fff0fff, 0x00800080,
862         0x8c18, 0xffffffff, 0x20202078,
863         0x8c1c, 0x0000ffff, 0x00001010,
864         0x28350, 0x00000f01, 0x00000000,
865         0x9508, 0x3700001f, 0x00000002,
866         0x960c, 0xffffffff, 0x54763210,
867         0x88c4, 0x001f3ae3, 0x000000c2,
868         0x88d4, 0x0000001f, 0x00000010,
869         0x8974, 0xffffffff, 0x00000000
870 };
871
872 static const u32 caicos_golden_registers[] =
873 {
874         0x5eb4, 0xffffffff, 0x00000002,
875         0x5e78, 0x8f311ff1, 0x001000f0,
876         0x8c8, 0x00003420, 0x00001450,
877         0x8cc, 0x000fffff, 0x00040035,
878         0x3f90, 0xffff0000, 0xfffc0000,
879         0x9148, 0xffff0000, 0xfffc0000,
880         0x3f94, 0xffff0000, 0xfffc0000,
881         0x914c, 0xffff0000, 0xfffc0000,
882         0xc78, 0x00000080, 0x00000080,
883         0xbd4, 0x00073007, 0x00010001,
884         0xd02c, 0xbfffff1f, 0x08421000,
885         0xd0b8, 0x03773777, 0x02010001,
886         0x5bc0, 0x00200000, 0x50100000,
887         0x98f8, 0x33773777, 0x02010001,
888         0x98fc, 0xffffffff, 0x33221100,
889         0x7030, 0x31000311, 0x00000011,
890         0x2f48, 0x33773777, 0x02010001,
891         0x6b28, 0x00000010, 0x00000012,
892         0x7728, 0x00000010, 0x00000012,
893         0x10328, 0x00000010, 0x00000012,
894         0x10f28, 0x00000010, 0x00000012,
895         0x11b28, 0x00000010, 0x00000012,
896         0x12728, 0x00000010, 0x00000012,
897         0x240c, 0x000007ff, 0x00000380,
898         0x8a14, 0xf000001f, 0x00000001,
899         0x8b24, 0x3fff3fff, 0x00ff0fff,
900         0x8b10, 0x0000ff0f, 0x00000000,
901         0x28a4c, 0x07ffffff, 0x06000000,
902         0x10c, 0x00000001, 0x00010003,
903         0xa02c, 0xffffffff, 0x0000009b,
904         0x913c, 0x0000000f, 0x0100000a,
905         0x8d00, 0xffff7f7f, 0x100e4848,
906         0x8d04, 0x00ffffff, 0x00164745,
907         0x8c00, 0xfffc0003, 0xe4000003,
908         0x8c04, 0xf8ff00ff, 0x40600060,
909         0x8c08, 0x00ff00ff, 0x001c001c,
910         0x8cf0, 0x1fff1fff, 0x08e00410,
911         0x8c20, 0x0fff0fff, 0x00800080,
912         0x8c24, 0x0fff0fff, 0x00800080,
913         0x8c18, 0xffffffff, 0x20202078,
914         0x8c1c, 0x0000ffff, 0x00001010,
915         0x28350, 0x00000f01, 0x00000000,
916         0x9508, 0x3700001f, 0x00000002,
917         0x960c, 0xffffffff, 0x54763210,
918         0x88c4, 0x001f3ae3, 0x000000c2,
919         0x88d4, 0x0000001f, 0x00000010,
920         0x8974, 0xffffffff, 0x00000000
921 };
922
923 static void evergreen_init_golden_registers(struct radeon_device *rdev)
924 {
925         switch (rdev->family) {
926         case CHIP_CYPRESS:
927         case CHIP_HEMLOCK:
928                 radeon_program_register_sequence(rdev,
929                                                  evergreen_golden_registers,
930                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
931                 radeon_program_register_sequence(rdev,
932                                                  evergreen_golden_registers2,
933                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
934                 radeon_program_register_sequence(rdev,
935                                                  cypress_mgcg_init,
936                                                  (const u32)ARRAY_SIZE(cypress_mgcg_init));
937                 break;
938         case CHIP_JUNIPER:
939                 radeon_program_register_sequence(rdev,
940                                                  evergreen_golden_registers,
941                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
942                 radeon_program_register_sequence(rdev,
943                                                  evergreen_golden_registers2,
944                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
945                 radeon_program_register_sequence(rdev,
946                                                  juniper_mgcg_init,
947                                                  (const u32)ARRAY_SIZE(juniper_mgcg_init));
948                 break;
949         case CHIP_REDWOOD:
950                 radeon_program_register_sequence(rdev,
951                                                  evergreen_golden_registers,
952                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
953                 radeon_program_register_sequence(rdev,
954                                                  evergreen_golden_registers2,
955                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
956                 radeon_program_register_sequence(rdev,
957                                                  redwood_mgcg_init,
958                                                  (const u32)ARRAY_SIZE(redwood_mgcg_init));
959                 break;
960         case CHIP_CEDAR:
961                 radeon_program_register_sequence(rdev,
962                                                  cedar_golden_registers,
963                                                  (const u32)ARRAY_SIZE(cedar_golden_registers));
964                 radeon_program_register_sequence(rdev,
965                                                  evergreen_golden_registers2,
966                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
967                 radeon_program_register_sequence(rdev,
968                                                  cedar_mgcg_init,
969                                                  (const u32)ARRAY_SIZE(cedar_mgcg_init));
970                 break;
971         case CHIP_PALM:
972                 radeon_program_register_sequence(rdev,
973                                                  wrestler_golden_registers,
974                                                  (const u32)ARRAY_SIZE(wrestler_golden_registers));
975                 break;
976         case CHIP_SUMO:
977                 radeon_program_register_sequence(rdev,
978                                                  supersumo_golden_registers,
979                                                  (const u32)ARRAY_SIZE(supersumo_golden_registers));
980                 break;
981         case CHIP_SUMO2:
982                 radeon_program_register_sequence(rdev,
983                                                  supersumo_golden_registers,
984                                                  (const u32)ARRAY_SIZE(supersumo_golden_registers));
985                 radeon_program_register_sequence(rdev,
986                                                  sumo_golden_registers,
987                                                  (const u32)ARRAY_SIZE(sumo_golden_registers));
988                 break;
989         case CHIP_BARTS:
990                 radeon_program_register_sequence(rdev,
991                                                  barts_golden_registers,
992                                                  (const u32)ARRAY_SIZE(barts_golden_registers));
993                 break;
994         case CHIP_TURKS:
995                 radeon_program_register_sequence(rdev,
996                                                  turks_golden_registers,
997                                                  (const u32)ARRAY_SIZE(turks_golden_registers));
998                 break;
999         case CHIP_CAICOS:
1000                 radeon_program_register_sequence(rdev,
1001                                                  caicos_golden_registers,
1002                                                  (const u32)ARRAY_SIZE(caicos_golden_registers));
1003                 break;
1004         default:
1005                 break;
1006         }
1007 }
1008
1009 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1010                              unsigned *bankh, unsigned *mtaspect,
1011                              unsigned *tile_split)
1012 {
1013         *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1014         *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1015         *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1016         *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1017         switch (*bankw) {
1018         default:
1019         case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1020         case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1021         case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1022         case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1023         }
1024         switch (*bankh) {
1025         default:
1026         case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1027         case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1028         case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1029         case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1030         }
1031         switch (*mtaspect) {
1032         default:
1033         case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1034         case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1035         case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1036         case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1037         }
1038 }
1039
1040 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1041                               u32 cntl_reg, u32 status_reg)
1042 {
1043         int r, i;
1044         struct atom_clock_dividers dividers;
1045
1046         r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1047                                            clock, false, &dividers);
1048         if (r)
1049                 return r;
1050
1051         WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1052
1053         for (i = 0; i < 100; i++) {
1054                 if (RREG32(status_reg) & DCLK_STATUS)
1055                         break;
1056                 mdelay(10);
1057         }
1058         if (i == 100)
1059                 return -ETIMEDOUT;
1060
1061         return 0;
1062 }
1063
1064 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1065 {
1066         int r = 0;
1067         u32 cg_scratch = RREG32(CG_SCRATCH1);
1068
1069         r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1070         if (r)
1071                 goto done;
1072         cg_scratch &= 0xffff0000;
1073         cg_scratch |= vclk / 100; /* Mhz */
1074
1075         r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1076         if (r)
1077                 goto done;
1078         cg_scratch &= 0x0000ffff;
1079         cg_scratch |= (dclk / 100) << 16; /* Mhz */
1080
1081 done:
1082         WREG32(CG_SCRATCH1, cg_scratch);
1083
1084         return r;
1085 }
1086
1087 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1088 {
1089         /* start off with something large */
1090         unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1091         int r;
1092
1093         /* bypass vclk and dclk with bclk */
1094         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1095                 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1096                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1097
1098         /* put PLL in bypass mode */
1099         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1100
1101         if (!vclk || !dclk) {
1102                 /* keep the Bypass mode, put PLL to sleep */
1103                 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1104                 return 0;
1105         }
1106
1107         r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1108                                           16384, 0x03FFFFFF, 0, 128, 5,
1109                                           &fb_div, &vclk_div, &dclk_div);
1110         if (r)
1111                 return r;
1112
1113         /* set VCO_MODE to 1 */
1114         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1115
1116         /* toggle UPLL_SLEEP to 1 then back to 0 */
1117         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1118         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1119
1120         /* deassert UPLL_RESET */
1121         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1122
1123         mdelay(1);
1124
1125         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1126         if (r)
1127                 return r;
1128
1129         /* assert UPLL_RESET again */
1130         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1131
1132         /* disable spread spectrum. */
1133         WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1134
1135         /* set feedback divider */
1136         WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1137
1138         /* set ref divider to 0 */
1139         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1140
1141         if (fb_div < 307200)
1142                 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1143         else
1144                 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1145
1146         /* set PDIV_A and PDIV_B */
1147         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1148                 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1149                 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1150
1151         /* give the PLL some time to settle */
1152         mdelay(15);
1153
1154         /* deassert PLL_RESET */
1155         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1156
1157         mdelay(15);
1158
1159         /* switch from bypass mode to normal mode */
1160         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1161
1162         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1163         if (r)
1164                 return r;
1165
1166         /* switch VCLK and DCLK selection */
1167         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1168                 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1169                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1170
1171         mdelay(100);
1172
1173         return 0;
1174 }
1175
1176 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1177 {
1178         int readrq;
1179         u16 v;
1180
1181         readrq = pcie_get_readrq(rdev->pdev);
1182         v = ffs(readrq) - 8;
1183         /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1184          * to avoid hangs or perfomance issues
1185          */
1186         if ((v == 0) || (v == 6) || (v == 7))
1187                 pcie_set_readrq(rdev->pdev, 512);
1188 }
1189
1190 void dce4_program_fmt(struct drm_encoder *encoder)
1191 {
1192         struct drm_device *dev = encoder->dev;
1193         struct radeon_device *rdev = dev->dev_private;
1194         struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1195         struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1196         struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1197         int bpc = 0;
1198         u32 tmp = 0;
1199         enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1200
1201         if (connector) {
1202                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1203                 bpc = radeon_get_monitor_bpc(connector);
1204                 dither = radeon_connector->dither;
1205         }
1206
1207         /* LVDS/eDP FMT is set up by atom */
1208         if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1209                 return;
1210
1211         /* not needed for analog */
1212         if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1213             (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1214                 return;
1215
1216         if (bpc == 0)
1217                 return;
1218
1219         switch (bpc) {
1220         case 6:
1221                 if (dither == RADEON_FMT_DITHER_ENABLE)
1222                         /* XXX sort out optimal dither settings */
1223                         tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1224                                 FMT_SPATIAL_DITHER_EN);
1225                 else
1226                         tmp |= FMT_TRUNCATE_EN;
1227                 break;
1228         case 8:
1229                 if (dither == RADEON_FMT_DITHER_ENABLE)
1230                         /* XXX sort out optimal dither settings */
1231                         tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1232                                 FMT_RGB_RANDOM_ENABLE |
1233                                 FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1234                 else
1235                         tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1236                 break;
1237         case 10:
1238         default:
1239                 /* not needed */
1240                 break;
1241         }
1242
1243         WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1244 }
1245
1246 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1247 {
1248         if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1249                 return true;
1250         else
1251                 return false;
1252 }
1253
1254 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1255 {
1256         u32 pos1, pos2;
1257
1258         pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1259         pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1260
1261         if (pos1 != pos2)
1262                 return true;
1263         else
1264                 return false;
1265 }
1266
1267 /**
1268  * dce4_wait_for_vblank - vblank wait asic callback.
1269  *
1270  * @rdev: radeon_device pointer
1271  * @crtc: crtc to wait for vblank on
1272  *
1273  * Wait for vblank on the requested crtc (evergreen+).
1274  */
1275 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1276 {
1277         unsigned i = 0;
1278
1279         if (crtc >= rdev->num_crtc)
1280                 return;
1281
1282         if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1283                 return;
1284
1285         /* depending on when we hit vblank, we may be close to active; if so,
1286          * wait for another frame.
1287          */
1288         while (dce4_is_in_vblank(rdev, crtc)) {
1289                 if (i++ % 100 == 0) {
1290                         if (!dce4_is_counter_moving(rdev, crtc))
1291                                 break;
1292                 }
1293         }
1294
1295         while (!dce4_is_in_vblank(rdev, crtc)) {
1296                 if (i++ % 100 == 0) {
1297                         if (!dce4_is_counter_moving(rdev, crtc))
1298                                 break;
1299                 }
1300         }
1301 }
1302
1303 /**
1304  * radeon_irq_kms_pflip_irq_get - pre-pageflip callback.
1305  *
1306  * @rdev: radeon_device pointer
1307  * @crtc: crtc to prepare for pageflip on
1308  *
1309  * Pre-pageflip callback (evergreen+).
1310  * Enables the pageflip irq (vblank irq).
1311  */
1312 void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc)
1313 {
1314         /* enable the pflip int */
1315         radeon_irq_kms_pflip_irq_get(rdev, crtc);
1316 }
1317
1318 /**
1319  * evergreen_post_page_flip - pos-pageflip callback.
1320  *
1321  * @rdev: radeon_device pointer
1322  * @crtc: crtc to cleanup pageflip on
1323  *
1324  * Post-pageflip callback (evergreen+).
1325  * Disables the pageflip irq (vblank irq).
1326  */
1327 void evergreen_post_page_flip(struct radeon_device *rdev, int crtc)
1328 {
1329         /* disable the pflip int */
1330         radeon_irq_kms_pflip_irq_put(rdev, crtc);
1331 }
1332
1333 /**
1334  * evergreen_page_flip - pageflip callback.
1335  *
1336  * @rdev: radeon_device pointer
1337  * @crtc_id: crtc to cleanup pageflip on
1338  * @crtc_base: new address of the crtc (GPU MC address)
1339  *
1340  * Does the actual pageflip (evergreen+).
1341  * During vblank we take the crtc lock and wait for the update_pending
1342  * bit to go high, when it does, we release the lock, and allow the
1343  * double buffered update to take place.
1344  * Returns the current update pending status.
1345  */
1346 u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1347 {
1348         struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1349         u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
1350         int i;
1351
1352         /* Lock the graphics update lock */
1353         tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
1354         WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1355
1356         /* update the scanout addresses */
1357         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1358                upper_32_bits(crtc_base));
1359         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1360                (u32)crtc_base);
1361
1362         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1363                upper_32_bits(crtc_base));
1364         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1365                (u32)crtc_base);
1366
1367         /* Wait for update_pending to go high. */
1368         for (i = 0; i < rdev->usec_timeout; i++) {
1369                 if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
1370                         break;
1371                 udelay(1);
1372         }
1373         DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
1374
1375         /* Unlock the lock, so double-buffering can take place inside vblank */
1376         tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
1377         WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1378
1379         /* Return current update_pending status: */
1380         return RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING;
1381 }
1382
1383 /* get temperature in millidegrees */
1384 int evergreen_get_temp(struct radeon_device *rdev)
1385 {
1386         u32 temp, toffset;
1387         int actual_temp = 0;
1388
1389         if (rdev->family == CHIP_JUNIPER) {
1390                 toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1391                         TOFFSET_SHIFT;
1392                 temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1393                         TS0_ADC_DOUT_SHIFT;
1394
1395                 if (toffset & 0x100)
1396                         actual_temp = temp / 2 - (0x200 - toffset);
1397                 else
1398                         actual_temp = temp / 2 + toffset;
1399
1400                 actual_temp = actual_temp * 1000;
1401
1402         } else {
1403                 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1404                         ASIC_T_SHIFT;
1405
1406                 if (temp & 0x400)
1407                         actual_temp = -256;
1408                 else if (temp & 0x200)
1409                         actual_temp = 255;
1410                 else if (temp & 0x100) {
1411                         actual_temp = temp & 0x1ff;
1412                         actual_temp |= ~0x1ff;
1413                 } else
1414                         actual_temp = temp & 0xff;
1415
1416                 actual_temp = (actual_temp * 1000) / 2;
1417         }
1418
1419         return actual_temp;
1420 }
1421
1422 int sumo_get_temp(struct radeon_device *rdev)
1423 {
1424         u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1425         int actual_temp = temp - 49;
1426
1427         return actual_temp * 1000;
1428 }
1429
1430 /**
1431  * sumo_pm_init_profile - Initialize power profiles callback.
1432  *
1433  * @rdev: radeon_device pointer
1434  *
1435  * Initialize the power states used in profile mode
1436  * (sumo, trinity, SI).
1437  * Used for profile mode only.
1438  */
1439 void sumo_pm_init_profile(struct radeon_device *rdev)
1440 {
1441         int idx;
1442
1443         /* default */
1444         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1445         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1446         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1447         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1448
1449         /* low,mid sh/mh */
1450         if (rdev->flags & RADEON_IS_MOBILITY)
1451                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1452         else
1453                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1454
1455         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1456         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1457         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1458         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1459
1460         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1461         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1462         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1463         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1464
1465         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1466         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1467         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1468         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1469
1470         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1471         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1472         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1473         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1474
1475         /* high sh/mh */
1476         idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1477         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1478         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1479         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1480         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1481                 rdev->pm.power_state[idx].num_clock_modes - 1;
1482
1483         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1484         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1485         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1486         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1487                 rdev->pm.power_state[idx].num_clock_modes - 1;
1488 }
1489
1490 /**
1491  * btc_pm_init_profile - Initialize power profiles callback.
1492  *
1493  * @rdev: radeon_device pointer
1494  *
1495  * Initialize the power states used in profile mode
1496  * (BTC, cayman).
1497  * Used for profile mode only.
1498  */
1499 void btc_pm_init_profile(struct radeon_device *rdev)
1500 {
1501         int idx;
1502
1503         /* default */
1504         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1505         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1506         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1507         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1508         /* starting with BTC, there is one state that is used for both
1509          * MH and SH.  Difference is that we always use the high clock index for
1510          * mclk.
1511          */
1512         if (rdev->flags & RADEON_IS_MOBILITY)
1513                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1514         else
1515                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1516         /* low sh */
1517         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1518         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1519         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1520         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1521         /* mid sh */
1522         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1523         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1524         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1525         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1526         /* high sh */
1527         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1528         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1529         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1530         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1531         /* low mh */
1532         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1533         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1534         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1535         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1536         /* mid mh */
1537         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1538         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1539         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1540         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1541         /* high mh */
1542         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1543         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1544         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1545         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1546 }
1547
1548 /**
1549  * evergreen_pm_misc - set additional pm hw parameters callback.
1550  *
1551  * @rdev: radeon_device pointer
1552  *
1553  * Set non-clock parameters associated with a power state
1554  * (voltage, etc.) (evergreen+).
1555  */
1556 void evergreen_pm_misc(struct radeon_device *rdev)
1557 {
1558         int req_ps_idx = rdev->pm.requested_power_state_index;
1559         int req_cm_idx = rdev->pm.requested_clock_mode_index;
1560         struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1561         struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1562
1563         if (voltage->type == VOLTAGE_SW) {
1564                 /* 0xff0x are flags rather then an actual voltage */
1565                 if ((voltage->voltage & 0xff00) == 0xff00)
1566                         return;
1567                 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1568                         radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1569                         rdev->pm.current_vddc = voltage->voltage;
1570                         DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1571                 }
1572
1573                 /* starting with BTC, there is one state that is used for both
1574                  * MH and SH.  Difference is that we always use the high clock index for
1575                  * mclk and vddci.
1576                  */
1577                 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1578                     (rdev->family >= CHIP_BARTS) &&
1579                     rdev->pm.active_crtc_count &&
1580                     ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1581                      (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1582                         voltage = &rdev->pm.power_state[req_ps_idx].
1583                                 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1584
1585                 /* 0xff0x are flags rather then an actual voltage */
1586                 if ((voltage->vddci & 0xff00) == 0xff00)
1587                         return;
1588                 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1589                         radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1590                         rdev->pm.current_vddci = voltage->vddci;
1591                         DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1592                 }
1593         }
1594 }
1595
1596 /**
1597  * evergreen_pm_prepare - pre-power state change callback.
1598  *
1599  * @rdev: radeon_device pointer
1600  *
1601  * Prepare for a power state change (evergreen+).
1602  */
1603 void evergreen_pm_prepare(struct radeon_device *rdev)
1604 {
1605         struct drm_device *ddev = rdev->ddev;
1606         struct drm_crtc *crtc;
1607         struct radeon_crtc *radeon_crtc;
1608         u32 tmp;
1609
1610         /* disable any active CRTCs */
1611         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1612                 radeon_crtc = to_radeon_crtc(crtc);
1613                 if (radeon_crtc->enabled) {
1614                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1615                         tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1616                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1617                 }
1618         }
1619 }
1620
1621 /**
1622  * evergreen_pm_finish - post-power state change callback.
1623  *
1624  * @rdev: radeon_device pointer
1625  *
1626  * Clean up after a power state change (evergreen+).
1627  */
1628 void evergreen_pm_finish(struct radeon_device *rdev)
1629 {
1630         struct drm_device *ddev = rdev->ddev;
1631         struct drm_crtc *crtc;
1632         struct radeon_crtc *radeon_crtc;
1633         u32 tmp;
1634
1635         /* enable any active CRTCs */
1636         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1637                 radeon_crtc = to_radeon_crtc(crtc);
1638                 if (radeon_crtc->enabled) {
1639                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1640                         tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1641                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1642                 }
1643         }
1644 }
1645
1646 /**
1647  * evergreen_hpd_sense - hpd sense callback.
1648  *
1649  * @rdev: radeon_device pointer
1650  * @hpd: hpd (hotplug detect) pin
1651  *
1652  * Checks if a digital monitor is connected (evergreen+).
1653  * Returns true if connected, false if not connected.
1654  */
1655 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1656 {
1657         bool connected = false;
1658
1659         switch (hpd) {
1660         case RADEON_HPD_1:
1661                 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1662                         connected = true;
1663                 break;
1664         case RADEON_HPD_2:
1665                 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1666                         connected = true;
1667                 break;
1668         case RADEON_HPD_3:
1669                 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1670                         connected = true;
1671                 break;
1672         case RADEON_HPD_4:
1673                 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1674                         connected = true;
1675                 break;
1676         case RADEON_HPD_5:
1677                 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1678                         connected = true;
1679                 break;
1680         case RADEON_HPD_6:
1681                 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1682                         connected = true;
1683                 break;
1684         default:
1685                 break;
1686         }
1687
1688         return connected;
1689 }
1690
1691 /**
1692  * evergreen_hpd_set_polarity - hpd set polarity callback.
1693  *
1694  * @rdev: radeon_device pointer
1695  * @hpd: hpd (hotplug detect) pin
1696  *
1697  * Set the polarity of the hpd pin (evergreen+).
1698  */
1699 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1700                                 enum radeon_hpd_id hpd)
1701 {
1702         u32 tmp;
1703         bool connected = evergreen_hpd_sense(rdev, hpd);
1704
1705         switch (hpd) {
1706         case RADEON_HPD_1:
1707                 tmp = RREG32(DC_HPD1_INT_CONTROL);
1708                 if (connected)
1709                         tmp &= ~DC_HPDx_INT_POLARITY;
1710                 else
1711                         tmp |= DC_HPDx_INT_POLARITY;
1712                 WREG32(DC_HPD1_INT_CONTROL, tmp);
1713                 break;
1714         case RADEON_HPD_2:
1715                 tmp = RREG32(DC_HPD2_INT_CONTROL);
1716                 if (connected)
1717                         tmp &= ~DC_HPDx_INT_POLARITY;
1718                 else
1719                         tmp |= DC_HPDx_INT_POLARITY;
1720                 WREG32(DC_HPD2_INT_CONTROL, tmp);
1721                 break;
1722         case RADEON_HPD_3:
1723                 tmp = RREG32(DC_HPD3_INT_CONTROL);
1724                 if (connected)
1725                         tmp &= ~DC_HPDx_INT_POLARITY;
1726                 else
1727                         tmp |= DC_HPDx_INT_POLARITY;
1728                 WREG32(DC_HPD3_INT_CONTROL, tmp);
1729                 break;
1730         case RADEON_HPD_4:
1731                 tmp = RREG32(DC_HPD4_INT_CONTROL);
1732                 if (connected)
1733                         tmp &= ~DC_HPDx_INT_POLARITY;
1734                 else
1735                         tmp |= DC_HPDx_INT_POLARITY;
1736                 WREG32(DC_HPD4_INT_CONTROL, tmp);
1737                 break;
1738         case RADEON_HPD_5:
1739                 tmp = RREG32(DC_HPD5_INT_CONTROL);
1740                 if (connected)
1741                         tmp &= ~DC_HPDx_INT_POLARITY;
1742                 else
1743                         tmp |= DC_HPDx_INT_POLARITY;
1744                 WREG32(DC_HPD5_INT_CONTROL, tmp);
1745                         break;
1746         case RADEON_HPD_6:
1747                 tmp = RREG32(DC_HPD6_INT_CONTROL);
1748                 if (connected)
1749                         tmp &= ~DC_HPDx_INT_POLARITY;
1750                 else
1751                         tmp |= DC_HPDx_INT_POLARITY;
1752                 WREG32(DC_HPD6_INT_CONTROL, tmp);
1753                 break;
1754         default:
1755                 break;
1756         }
1757 }
1758
1759 /**
1760  * evergreen_hpd_init - hpd setup callback.
1761  *
1762  * @rdev: radeon_device pointer
1763  *
1764  * Setup the hpd pins used by the card (evergreen+).
1765  * Enable the pin, set the polarity, and enable the hpd interrupts.
1766  */
1767 void evergreen_hpd_init(struct radeon_device *rdev)
1768 {
1769         struct drm_device *dev = rdev->ddev;
1770         struct drm_connector *connector;
1771         unsigned enabled = 0;
1772         u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1773                 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1774
1775         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1776                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1777
1778                 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1779                     connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1780                         /* don't try to enable hpd on eDP or LVDS avoid breaking the
1781                          * aux dp channel on imac and help (but not completely fix)
1782                          * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1783                          * also avoid interrupt storms during dpms.
1784                          */
1785                         continue;
1786                 }
1787                 switch (radeon_connector->hpd.hpd) {
1788                 case RADEON_HPD_1:
1789                         WREG32(DC_HPD1_CONTROL, tmp);
1790                         break;
1791                 case RADEON_HPD_2:
1792                         WREG32(DC_HPD2_CONTROL, tmp);
1793                         break;
1794                 case RADEON_HPD_3:
1795                         WREG32(DC_HPD3_CONTROL, tmp);
1796                         break;
1797                 case RADEON_HPD_4:
1798                         WREG32(DC_HPD4_CONTROL, tmp);
1799                         break;
1800                 case RADEON_HPD_5:
1801                         WREG32(DC_HPD5_CONTROL, tmp);
1802                         break;
1803                 case RADEON_HPD_6:
1804                         WREG32(DC_HPD6_CONTROL, tmp);
1805                         break;
1806                 default:
1807                         break;
1808                 }
1809                 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1810                 enabled |= 1 << radeon_connector->hpd.hpd;
1811         }
1812         radeon_irq_kms_enable_hpd(rdev, enabled);
1813 }
1814
1815 /**
1816  * evergreen_hpd_fini - hpd tear down callback.
1817  *
1818  * @rdev: radeon_device pointer
1819  *
1820  * Tear down the hpd pins used by the card (evergreen+).
1821  * Disable the hpd interrupts.
1822  */
1823 void evergreen_hpd_fini(struct radeon_device *rdev)
1824 {
1825         struct drm_device *dev = rdev->ddev;
1826         struct drm_connector *connector;
1827         unsigned disabled = 0;
1828
1829         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1830                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1831                 switch (radeon_connector->hpd.hpd) {
1832                 case RADEON_HPD_1:
1833                         WREG32(DC_HPD1_CONTROL, 0);
1834                         break;
1835                 case RADEON_HPD_2:
1836                         WREG32(DC_HPD2_CONTROL, 0);
1837                         break;
1838                 case RADEON_HPD_3:
1839                         WREG32(DC_HPD3_CONTROL, 0);
1840                         break;
1841                 case RADEON_HPD_4:
1842                         WREG32(DC_HPD4_CONTROL, 0);
1843                         break;
1844                 case RADEON_HPD_5:
1845                         WREG32(DC_HPD5_CONTROL, 0);
1846                         break;
1847                 case RADEON_HPD_6:
1848                         WREG32(DC_HPD6_CONTROL, 0);
1849                         break;
1850                 default:
1851                         break;
1852                 }
1853                 disabled |= 1 << radeon_connector->hpd.hpd;
1854         }
1855         radeon_irq_kms_disable_hpd(rdev, disabled);
1856 }
1857
1858 /* watermark setup */
1859
1860 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1861                                         struct radeon_crtc *radeon_crtc,
1862                                         struct drm_display_mode *mode,
1863                                         struct drm_display_mode *other_mode)
1864 {
1865         u32 tmp, buffer_alloc, i;
1866         u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1867         /*
1868          * Line Buffer Setup
1869          * There are 3 line buffers, each one shared by 2 display controllers.
1870          * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1871          * the display controllers.  The paritioning is done via one of four
1872          * preset allocations specified in bits 2:0:
1873          * first display controller
1874          *  0 - first half of lb (3840 * 2)
1875          *  1 - first 3/4 of lb (5760 * 2)
1876          *  2 - whole lb (7680 * 2), other crtc must be disabled
1877          *  3 - first 1/4 of lb (1920 * 2)
1878          * second display controller
1879          *  4 - second half of lb (3840 * 2)
1880          *  5 - second 3/4 of lb (5760 * 2)
1881          *  6 - whole lb (7680 * 2), other crtc must be disabled
1882          *  7 - last 1/4 of lb (1920 * 2)
1883          */
1884         /* this can get tricky if we have two large displays on a paired group
1885          * of crtcs.  Ideally for multiple large displays we'd assign them to
1886          * non-linked crtcs for maximum line buffer allocation.
1887          */
1888         if (radeon_crtc->base.enabled && mode) {
1889                 if (other_mode) {
1890                         tmp = 0; /* 1/2 */
1891                         buffer_alloc = 1;
1892                 } else {
1893                         tmp = 2; /* whole */
1894                         buffer_alloc = 2;
1895                 }
1896         } else {
1897                 tmp = 0;
1898                 buffer_alloc = 0;
1899         }
1900
1901         /* second controller of the pair uses second half of the lb */
1902         if (radeon_crtc->crtc_id % 2)
1903                 tmp += 4;
1904         WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1905
1906         if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1907                 WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1908                        DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1909                 for (i = 0; i < rdev->usec_timeout; i++) {
1910                         if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1911                             DMIF_BUFFERS_ALLOCATED_COMPLETED)
1912                                 break;
1913                         udelay(1);
1914                 }
1915         }
1916
1917         if (radeon_crtc->base.enabled && mode) {
1918                 switch (tmp) {
1919                 case 0:
1920                 case 4:
1921                 default:
1922                         if (ASIC_IS_DCE5(rdev))
1923                                 return 4096 * 2;
1924                         else
1925                                 return 3840 * 2;
1926                 case 1:
1927                 case 5:
1928                         if (ASIC_IS_DCE5(rdev))
1929                                 return 6144 * 2;
1930                         else
1931                                 return 5760 * 2;
1932                 case 2:
1933                 case 6:
1934                         if (ASIC_IS_DCE5(rdev))
1935                                 return 8192 * 2;
1936                         else
1937                                 return 7680 * 2;
1938                 case 3:
1939                 case 7:
1940                         if (ASIC_IS_DCE5(rdev))
1941                                 return 2048 * 2;
1942                         else
1943                                 return 1920 * 2;
1944                 }
1945         }
1946
1947         /* controller not enabled, so no lb used */
1948         return 0;
1949 }
1950
1951 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1952 {
1953         u32 tmp = RREG32(MC_SHARED_CHMAP);
1954
1955         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1956         case 0:
1957         default:
1958                 return 1;
1959         case 1:
1960                 return 2;
1961         case 2:
1962                 return 4;
1963         case 3:
1964                 return 8;
1965         }
1966 }
1967
1968 struct evergreen_wm_params {
1969         u32 dram_channels; /* number of dram channels */
1970         u32 yclk;          /* bandwidth per dram data pin in kHz */
1971         u32 sclk;          /* engine clock in kHz */
1972         u32 disp_clk;      /* display clock in kHz */
1973         u32 src_width;     /* viewport width */
1974         u32 active_time;   /* active display time in ns */
1975         u32 blank_time;    /* blank time in ns */
1976         bool interlaced;    /* mode is interlaced */
1977         fixed20_12 vsc;    /* vertical scale ratio */
1978         u32 num_heads;     /* number of active crtcs */
1979         u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1980         u32 lb_size;       /* line buffer allocated to pipe */
1981         u32 vtaps;         /* vertical scaler taps */
1982 };
1983
1984 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1985 {
1986         /* Calculate DRAM Bandwidth and the part allocated to display. */
1987         fixed20_12 dram_efficiency; /* 0.7 */
1988         fixed20_12 yclk, dram_channels, bandwidth;
1989         fixed20_12 a;
1990
1991         a.full = dfixed_const(1000);
1992         yclk.full = dfixed_const(wm->yclk);
1993         yclk.full = dfixed_div(yclk, a);
1994         dram_channels.full = dfixed_const(wm->dram_channels * 4);
1995         a.full = dfixed_const(10);
1996         dram_efficiency.full = dfixed_const(7);
1997         dram_efficiency.full = dfixed_div(dram_efficiency, a);
1998         bandwidth.full = dfixed_mul(dram_channels, yclk);
1999         bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
2000
2001         return dfixed_trunc(bandwidth);
2002 }
2003
2004 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2005 {
2006         /* Calculate DRAM Bandwidth and the part allocated to display. */
2007         fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
2008         fixed20_12 yclk, dram_channels, bandwidth;
2009         fixed20_12 a;
2010
2011         a.full = dfixed_const(1000);
2012         yclk.full = dfixed_const(wm->yclk);
2013         yclk.full = dfixed_div(yclk, a);
2014         dram_channels.full = dfixed_const(wm->dram_channels * 4);
2015         a.full = dfixed_const(10);
2016         disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
2017         disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
2018         bandwidth.full = dfixed_mul(dram_channels, yclk);
2019         bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
2020
2021         return dfixed_trunc(bandwidth);
2022 }
2023
2024 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
2025 {
2026         /* Calculate the display Data return Bandwidth */
2027         fixed20_12 return_efficiency; /* 0.8 */
2028         fixed20_12 sclk, bandwidth;
2029         fixed20_12 a;
2030
2031         a.full = dfixed_const(1000);
2032         sclk.full = dfixed_const(wm->sclk);
2033         sclk.full = dfixed_div(sclk, a);
2034         a.full = dfixed_const(10);
2035         return_efficiency.full = dfixed_const(8);
2036         return_efficiency.full = dfixed_div(return_efficiency, a);
2037         a.full = dfixed_const(32);
2038         bandwidth.full = dfixed_mul(a, sclk);
2039         bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2040
2041         return dfixed_trunc(bandwidth);
2042 }
2043
2044 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2045 {
2046         /* Calculate the DMIF Request Bandwidth */
2047         fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2048         fixed20_12 disp_clk, bandwidth;
2049         fixed20_12 a;
2050
2051         a.full = dfixed_const(1000);
2052         disp_clk.full = dfixed_const(wm->disp_clk);
2053         disp_clk.full = dfixed_div(disp_clk, a);
2054         a.full = dfixed_const(10);
2055         disp_clk_request_efficiency.full = dfixed_const(8);
2056         disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2057         a.full = dfixed_const(32);
2058         bandwidth.full = dfixed_mul(a, disp_clk);
2059         bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2060
2061         return dfixed_trunc(bandwidth);
2062 }
2063
2064 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2065 {
2066         /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2067         u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2068         u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2069         u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2070
2071         return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2072 }
2073
2074 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2075 {
2076         /* Calculate the display mode Average Bandwidth
2077          * DisplayMode should contain the source and destination dimensions,
2078          * timing, etc.
2079          */
2080         fixed20_12 bpp;
2081         fixed20_12 line_time;
2082         fixed20_12 src_width;
2083         fixed20_12 bandwidth;
2084         fixed20_12 a;
2085
2086         a.full = dfixed_const(1000);
2087         line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2088         line_time.full = dfixed_div(line_time, a);
2089         bpp.full = dfixed_const(wm->bytes_per_pixel);
2090         src_width.full = dfixed_const(wm->src_width);
2091         bandwidth.full = dfixed_mul(src_width, bpp);
2092         bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2093         bandwidth.full = dfixed_div(bandwidth, line_time);
2094
2095         return dfixed_trunc(bandwidth);
2096 }
2097
2098 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2099 {
2100         /* First calcualte the latency in ns */
2101         u32 mc_latency = 2000; /* 2000 ns. */
2102         u32 available_bandwidth = evergreen_available_bandwidth(wm);
2103         u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2104         u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2105         u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2106         u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2107                 (wm->num_heads * cursor_line_pair_return_time);
2108         u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2109         u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2110         fixed20_12 a, b, c;
2111
2112         if (wm->num_heads == 0)
2113                 return 0;
2114
2115         a.full = dfixed_const(2);
2116         b.full = dfixed_const(1);
2117         if ((wm->vsc.full > a.full) ||
2118             ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2119             (wm->vtaps >= 5) ||
2120             ((wm->vsc.full >= a.full) && wm->interlaced))
2121                 max_src_lines_per_dst_line = 4;
2122         else
2123                 max_src_lines_per_dst_line = 2;
2124
2125         a.full = dfixed_const(available_bandwidth);
2126         b.full = dfixed_const(wm->num_heads);
2127         a.full = dfixed_div(a, b);
2128
2129         b.full = dfixed_const(1000);
2130         c.full = dfixed_const(wm->disp_clk);
2131         b.full = dfixed_div(c, b);
2132         c.full = dfixed_const(wm->bytes_per_pixel);
2133         b.full = dfixed_mul(b, c);
2134
2135         lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2136
2137         a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2138         b.full = dfixed_const(1000);
2139         c.full = dfixed_const(lb_fill_bw);
2140         b.full = dfixed_div(c, b);
2141         a.full = dfixed_div(a, b);
2142         line_fill_time = dfixed_trunc(a);
2143
2144         if (line_fill_time < wm->active_time)
2145                 return latency;
2146         else
2147                 return latency + (line_fill_time - wm->active_time);
2148
2149 }
2150
2151 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2152 {
2153         if (evergreen_average_bandwidth(wm) <=
2154             (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2155                 return true;
2156         else
2157                 return false;
2158 };
2159
2160 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2161 {
2162         if (evergreen_average_bandwidth(wm) <=
2163             (evergreen_available_bandwidth(wm) / wm->num_heads))
2164                 return true;
2165         else
2166                 return false;
2167 };
2168
2169 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2170 {
2171         u32 lb_partitions = wm->lb_size / wm->src_width;
2172         u32 line_time = wm->active_time + wm->blank_time;
2173         u32 latency_tolerant_lines;
2174         u32 latency_hiding;
2175         fixed20_12 a;
2176
2177         a.full = dfixed_const(1);
2178         if (wm->vsc.full > a.full)
2179                 latency_tolerant_lines = 1;
2180         else {
2181                 if (lb_partitions <= (wm->vtaps + 1))
2182                         latency_tolerant_lines = 1;
2183                 else
2184                         latency_tolerant_lines = 2;
2185         }
2186
2187         latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2188
2189         if (evergreen_latency_watermark(wm) <= latency_hiding)
2190                 return true;
2191         else
2192                 return false;
2193 }
2194
2195 static void evergreen_program_watermarks(struct radeon_device *rdev,
2196                                          struct radeon_crtc *radeon_crtc,
2197                                          u32 lb_size, u32 num_heads)
2198 {
2199         struct drm_display_mode *mode = &radeon_crtc->base.mode;
2200         struct evergreen_wm_params wm_low, wm_high;
2201         u32 dram_channels;
2202         u32 pixel_period;
2203         u32 line_time = 0;
2204         u32 latency_watermark_a = 0, latency_watermark_b = 0;
2205         u32 priority_a_mark = 0, priority_b_mark = 0;
2206         u32 priority_a_cnt = PRIORITY_OFF;
2207         u32 priority_b_cnt = PRIORITY_OFF;
2208         u32 pipe_offset = radeon_crtc->crtc_id * 16;
2209         u32 tmp, arb_control3;
2210         fixed20_12 a, b, c;
2211
2212         if (radeon_crtc->base.enabled && num_heads && mode) {
2213                 pixel_period = 1000000 / (u32)mode->clock;
2214                 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2215                 priority_a_cnt = 0;
2216                 priority_b_cnt = 0;
2217                 dram_channels = evergreen_get_number_of_dram_channels(rdev);
2218
2219                 /* watermark for high clocks */
2220                 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2221                         wm_high.yclk =
2222                                 radeon_dpm_get_mclk(rdev, false) * 10;
2223                         wm_high.sclk =
2224                                 radeon_dpm_get_sclk(rdev, false) * 10;
2225                 } else {
2226                         wm_high.yclk = rdev->pm.current_mclk * 10;
2227                         wm_high.sclk = rdev->pm.current_sclk * 10;
2228                 }
2229
2230                 wm_high.disp_clk = mode->clock;
2231                 wm_high.src_width = mode->crtc_hdisplay;
2232                 wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2233                 wm_high.blank_time = line_time - wm_high.active_time;
2234                 wm_high.interlaced = false;
2235                 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2236                         wm_high.interlaced = true;
2237                 wm_high.vsc = radeon_crtc->vsc;
2238                 wm_high.vtaps = 1;
2239                 if (radeon_crtc->rmx_type != RMX_OFF)
2240                         wm_high.vtaps = 2;
2241                 wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2242                 wm_high.lb_size = lb_size;
2243                 wm_high.dram_channels = dram_channels;
2244                 wm_high.num_heads = num_heads;
2245
2246                 /* watermark for low clocks */
2247                 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2248                         wm_low.yclk =
2249                                 radeon_dpm_get_mclk(rdev, true) * 10;
2250                         wm_low.sclk =
2251                                 radeon_dpm_get_sclk(rdev, true) * 10;
2252                 } else {
2253                         wm_low.yclk = rdev->pm.current_mclk * 10;
2254                         wm_low.sclk = rdev->pm.current_sclk * 10;
2255                 }
2256
2257                 wm_low.disp_clk = mode->clock;
2258                 wm_low.src_width = mode->crtc_hdisplay;
2259                 wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2260                 wm_low.blank_time = line_time - wm_low.active_time;
2261                 wm_low.interlaced = false;
2262                 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2263                         wm_low.interlaced = true;
2264                 wm_low.vsc = radeon_crtc->vsc;
2265                 wm_low.vtaps = 1;
2266                 if (radeon_crtc->rmx_type != RMX_OFF)
2267                         wm_low.vtaps = 2;
2268                 wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2269                 wm_low.lb_size = lb_size;
2270                 wm_low.dram_channels = dram_channels;
2271                 wm_low.num_heads = num_heads;
2272
2273                 /* set for high clocks */
2274                 latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2275                 /* set for low clocks */
2276                 latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2277
2278                 /* possibly force display priority to high */
2279                 /* should really do this at mode validation time... */
2280                 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2281                     !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2282                     !evergreen_check_latency_hiding(&wm_high) ||
2283                     (rdev->disp_priority == 2)) {
2284                         DRM_DEBUG_KMS("force priority a to high\n");
2285                         priority_a_cnt |= PRIORITY_ALWAYS_ON;
2286                 }
2287                 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2288                     !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2289                     !evergreen_check_latency_hiding(&wm_low) ||
2290                     (rdev->disp_priority == 2)) {
2291                         DRM_DEBUG_KMS("force priority b to high\n");
2292                         priority_b_cnt |= PRIORITY_ALWAYS_ON;
2293                 }
2294
2295                 a.full = dfixed_const(1000);
2296                 b.full = dfixed_const(mode->clock);
2297                 b.full = dfixed_div(b, a);
2298                 c.full = dfixed_const(latency_watermark_a);
2299                 c.full = dfixed_mul(c, b);
2300                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2301                 c.full = dfixed_div(c, a);
2302                 a.full = dfixed_const(16);
2303                 c.full = dfixed_div(c, a);
2304                 priority_a_mark = dfixed_trunc(c);
2305                 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2306
2307                 a.full = dfixed_const(1000);
2308                 b.full = dfixed_const(mode->clock);
2309                 b.full = dfixed_div(b, a);
2310                 c.full = dfixed_const(latency_watermark_b);
2311                 c.full = dfixed_mul(c, b);
2312                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2313                 c.full = dfixed_div(c, a);
2314                 a.full = dfixed_const(16);
2315                 c.full = dfixed_div(c, a);
2316                 priority_b_mark = dfixed_trunc(c);
2317                 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2318         }
2319
2320         /* select wm A */
2321         arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2322         tmp = arb_control3;
2323         tmp &= ~LATENCY_WATERMARK_MASK(3);
2324         tmp |= LATENCY_WATERMARK_MASK(1);
2325         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2326         WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2327                (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2328                 LATENCY_HIGH_WATERMARK(line_time)));
2329         /* select wm B */
2330         tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2331         tmp &= ~LATENCY_WATERMARK_MASK(3);
2332         tmp |= LATENCY_WATERMARK_MASK(2);
2333         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2334         WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2335                (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2336                 LATENCY_HIGH_WATERMARK(line_time)));
2337         /* restore original selection */
2338         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2339
2340         /* write the priority marks */
2341         WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2342         WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2343
2344         /* save values for DPM */
2345         radeon_crtc->line_time = line_time;
2346         radeon_crtc->wm_high = latency_watermark_a;
2347         radeon_crtc->wm_low = latency_watermark_b;
2348 }
2349
2350 /**
2351  * evergreen_bandwidth_update - update display watermarks callback.
2352  *
2353  * @rdev: radeon_device pointer
2354  *
2355  * Update the display watermarks based on the requested mode(s)
2356  * (evergreen+).
2357  */
2358 void evergreen_bandwidth_update(struct radeon_device *rdev)
2359 {
2360         struct drm_display_mode *mode0 = NULL;
2361         struct drm_display_mode *mode1 = NULL;
2362         u32 num_heads = 0, lb_size;
2363         int i;
2364
2365         if (!rdev->mode_info.mode_config_initialized)
2366                 return;
2367
2368         radeon_update_display_priority(rdev);
2369
2370         for (i = 0; i < rdev->num_crtc; i++) {
2371                 if (rdev->mode_info.crtcs[i]->base.enabled)
2372                         num_heads++;
2373         }
2374         for (i = 0; i < rdev->num_crtc; i += 2) {
2375                 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2376                 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2377                 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2378                 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2379                 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2380                 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2381         }
2382 }
2383
2384 /**
2385  * evergreen_mc_wait_for_idle - wait for MC idle callback.
2386  *
2387  * @rdev: radeon_device pointer
2388  *
2389  * Wait for the MC (memory controller) to be idle.
2390  * (evergreen+).
2391  * Returns 0 if the MC is idle, -1 if not.
2392  */
2393 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2394 {
2395         unsigned i;
2396         u32 tmp;
2397
2398         for (i = 0; i < rdev->usec_timeout; i++) {
2399                 /* read MC_STATUS */
2400                 tmp = RREG32(SRBM_STATUS) & 0x1F00;
2401                 if (!tmp)
2402                         return 0;
2403                 udelay(1);
2404         }
2405         return -1;
2406 }
2407
2408 /*
2409  * GART
2410  */
2411 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2412 {
2413         unsigned i;
2414         u32 tmp;
2415
2416         WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2417
2418         WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2419         for (i = 0; i < rdev->usec_timeout; i++) {
2420                 /* read MC_STATUS */
2421                 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2422                 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2423                 if (tmp == 2) {
2424                         printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2425                         return;
2426                 }
2427                 if (tmp) {
2428                         return;
2429                 }
2430                 udelay(1);
2431         }
2432 }
2433
2434 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2435 {
2436         u32 tmp;
2437         int r;
2438
2439         if (rdev->gart.robj == NULL) {
2440                 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2441                 return -EINVAL;
2442         }
2443         r = radeon_gart_table_vram_pin(rdev);
2444         if (r)
2445                 return r;
2446         radeon_gart_restore(rdev);
2447         /* Setup L2 cache */
2448         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2449                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2450                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2451         WREG32(VM_L2_CNTL2, 0);
2452         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2453         /* Setup TLB control */
2454         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2455                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2456                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2457                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2458         if (rdev->flags & RADEON_IS_IGP) {
2459                 WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2460                 WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2461                 WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2462         } else {
2463                 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2464                 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2465                 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2466                 if ((rdev->family == CHIP_JUNIPER) ||
2467                     (rdev->family == CHIP_CYPRESS) ||
2468                     (rdev->family == CHIP_HEMLOCK) ||
2469                     (rdev->family == CHIP_BARTS))
2470                         WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2471         }
2472         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2473         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2474         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2475         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2476         WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2477         WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2478         WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2479         WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2480                                 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2481         WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2482                         (u32)(rdev->dummy_page.addr >> 12));
2483         WREG32(VM_CONTEXT1_CNTL, 0);
2484
2485         evergreen_pcie_gart_tlb_flush(rdev);
2486         DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2487                  (unsigned)(rdev->mc.gtt_size >> 20),
2488                  (unsigned long long)rdev->gart.table_addr);
2489         rdev->gart.ready = true;
2490         return 0;
2491 }
2492
2493 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2494 {
2495         u32 tmp;
2496
2497         /* Disable all tables */
2498         WREG32(VM_CONTEXT0_CNTL, 0);
2499         WREG32(VM_CONTEXT1_CNTL, 0);
2500
2501         /* Setup L2 cache */
2502         WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2503                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2504         WREG32(VM_L2_CNTL2, 0);
2505         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2506         /* Setup TLB control */
2507         tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2508         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2509         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2510         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2511         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2512         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2513         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2514         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2515         radeon_gart_table_vram_unpin(rdev);
2516 }
2517
2518 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2519 {
2520         evergreen_pcie_gart_disable(rdev);
2521         radeon_gart_table_vram_free(rdev);
2522         radeon_gart_fini(rdev);
2523 }
2524
2525
2526 static void evergreen_agp_enable(struct radeon_device *rdev)
2527 {
2528         u32 tmp;
2529
2530         /* Setup L2 cache */
2531         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2532                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2533                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2534         WREG32(VM_L2_CNTL2, 0);
2535         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2536         /* Setup TLB control */
2537         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2538                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2539                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2540                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2541         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2542         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2543         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2544         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2545         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2546         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2547         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2548         WREG32(VM_CONTEXT0_CNTL, 0);
2549         WREG32(VM_CONTEXT1_CNTL, 0);
2550 }
2551
2552 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2553 {
2554         u32 crtc_enabled, tmp, frame_count, blackout;
2555         int i, j;
2556
2557         if (!ASIC_IS_NODCE(rdev)) {
2558                 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2559                 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2560
2561                 /* disable VGA render */
2562                 WREG32(VGA_RENDER_CONTROL, 0);
2563         }
2564         /* blank the display controllers */
2565         for (i = 0; i < rdev->num_crtc; i++) {
2566                 crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2567                 if (crtc_enabled) {
2568                         save->crtc_enabled[i] = true;
2569                         if (ASIC_IS_DCE6(rdev)) {
2570                                 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2571                                 if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2572                                         radeon_wait_for_vblank(rdev, i);
2573                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2574                                         tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2575                                         WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2576                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2577                                 }
2578                         } else {
2579                                 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2580                                 if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2581                                         radeon_wait_for_vblank(rdev, i);
2582                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2583                                         tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2584                                         WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2585                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2586                                 }
2587                         }
2588                         /* wait for the next frame */
2589                         frame_count = radeon_get_vblank_counter(rdev, i);
2590                         for (j = 0; j < rdev->usec_timeout; j++) {
2591                                 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2592                                         break;
2593                                 udelay(1);
2594                         }
2595
2596                         /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2597                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2598                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2599                         tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2600                         WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2601                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2602                         save->crtc_enabled[i] = false;
2603                         /* ***** */
2604                 } else {
2605                         save->crtc_enabled[i] = false;
2606                 }
2607         }
2608
2609         radeon_mc_wait_for_idle(rdev);
2610
2611         blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2612         if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2613                 /* Block CPU access */
2614                 WREG32(BIF_FB_EN, 0);
2615                 /* blackout the MC */
2616                 blackout &= ~BLACKOUT_MODE_MASK;
2617                 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2618         }
2619         /* wait for the MC to settle */
2620         udelay(100);
2621
2622         /* lock double buffered regs */
2623         for (i = 0; i < rdev->num_crtc; i++) {
2624                 if (save->crtc_enabled[i]) {
2625                         tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2626                         if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2627                                 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2628                                 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2629                         }
2630                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2631                         if (!(tmp & 1)) {
2632                                 tmp |= 1;
2633                                 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2634                         }
2635                 }
2636         }
2637 }
2638
2639 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2640 {
2641         u32 tmp, frame_count;
2642         int i, j;
2643
2644         /* update crtc base addresses */
2645         for (i = 0; i < rdev->num_crtc; i++) {
2646                 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2647                        upper_32_bits(rdev->mc.vram_start));
2648                 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2649                        upper_32_bits(rdev->mc.vram_start));
2650                 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2651                        (u32)rdev->mc.vram_start);
2652                 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2653                        (u32)rdev->mc.vram_start);
2654         }
2655
2656         if (!ASIC_IS_NODCE(rdev)) {
2657                 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2658                 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2659         }
2660
2661         /* unlock regs and wait for update */
2662         for (i = 0; i < rdev->num_crtc; i++) {
2663                 if (save->crtc_enabled[i]) {
2664                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2665                         if ((tmp & 0x3) != 0) {
2666                                 tmp &= ~0x3;
2667                                 WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2668                         }
2669                         tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2670                         if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2671                                 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2672                                 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2673                         }
2674                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2675                         if (tmp & 1) {
2676                                 tmp &= ~1;
2677                                 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2678                         }
2679                         for (j = 0; j < rdev->usec_timeout; j++) {
2680                                 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2681                                 if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2682                                         break;
2683                                 udelay(1);
2684                         }
2685                 }
2686         }
2687
2688         /* unblackout the MC */
2689         tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2690         tmp &= ~BLACKOUT_MODE_MASK;
2691         WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2692         /* allow CPU access */
2693         WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2694
2695         for (i = 0; i < rdev->num_crtc; i++) {
2696                 if (save->crtc_enabled[i]) {
2697                         if (ASIC_IS_DCE6(rdev)) {
2698                                 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2699                                 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2700                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2701                                 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2702                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2703                         } else {
2704                                 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2705                                 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2706                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2707                                 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2708                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2709                         }
2710                         /* wait for the next frame */
2711                         frame_count = radeon_get_vblank_counter(rdev, i);
2712                         for (j = 0; j < rdev->usec_timeout; j++) {
2713                                 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2714                                         break;
2715                                 udelay(1);
2716                         }
2717                 }
2718         }
2719         if (!ASIC_IS_NODCE(rdev)) {
2720                 /* Unlock vga access */
2721                 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2722                 mdelay(1);
2723                 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2724         }
2725 }
2726
2727 void evergreen_mc_program(struct radeon_device *rdev)
2728 {
2729         struct evergreen_mc_save save;
2730         u32 tmp;
2731         int i, j;
2732
2733         /* Initialize HDP */
2734         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2735                 WREG32((0x2c14 + j), 0x00000000);
2736                 WREG32((0x2c18 + j), 0x00000000);
2737                 WREG32((0x2c1c + j), 0x00000000);
2738                 WREG32((0x2c20 + j), 0x00000000);
2739                 WREG32((0x2c24 + j), 0x00000000);
2740         }
2741         WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2742
2743         evergreen_mc_stop(rdev, &save);
2744         if (evergreen_mc_wait_for_idle(rdev)) {
2745                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2746         }
2747         /* Lockout access through VGA aperture*/
2748         WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2749         /* Update configuration */
2750         if (rdev->flags & RADEON_IS_AGP) {
2751                 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2752                         /* VRAM before AGP */
2753                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2754                                 rdev->mc.vram_start >> 12);
2755                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2756                                 rdev->mc.gtt_end >> 12);
2757                 } else {
2758                         /* VRAM after AGP */
2759                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2760                                 rdev->mc.gtt_start >> 12);
2761                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2762                                 rdev->mc.vram_end >> 12);
2763                 }
2764         } else {
2765                 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2766                         rdev->mc.vram_start >> 12);
2767                 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2768                         rdev->mc.vram_end >> 12);
2769         }
2770         WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2771         /* llano/ontario only */
2772         if ((rdev->family == CHIP_PALM) ||
2773             (rdev->family == CHIP_SUMO) ||
2774             (rdev->family == CHIP_SUMO2)) {
2775                 tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2776                 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2777                 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2778                 WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2779         }
2780         tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2781         tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2782         WREG32(MC_VM_FB_LOCATION, tmp);
2783         WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2784         WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2785         WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2786         if (rdev->flags & RADEON_IS_AGP) {
2787                 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2788                 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2789                 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2790         } else {
2791                 WREG32(MC_VM_AGP_BASE, 0);
2792                 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2793                 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2794         }
2795         if (evergreen_mc_wait_for_idle(rdev)) {
2796                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2797         }
2798         evergreen_mc_resume(rdev, &save);
2799         /* we need to own VRAM, so turn off the VGA renderer here
2800          * to stop it overwriting our objects */
2801         rv515_vga_render_disable(rdev);
2802 }
2803
2804 /*
2805  * CP.
2806  */
2807 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2808 {
2809         struct radeon_ring *ring = &rdev->ring[ib->ring];
2810         u32 next_rptr;
2811
2812         /* set to DX10/11 mode */
2813         radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2814         radeon_ring_write(ring, 1);
2815
2816         if (ring->rptr_save_reg) {
2817                 next_rptr = ring->wptr + 3 + 4;
2818                 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2819                 radeon_ring_write(ring, ((ring->rptr_save_reg - 
2820                                           PACKET3_SET_CONFIG_REG_START) >> 2));
2821                 radeon_ring_write(ring, next_rptr);
2822         } else if (rdev->wb.enabled) {
2823                 next_rptr = ring->wptr + 5 + 4;
2824                 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2825                 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2826                 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2827                 radeon_ring_write(ring, next_rptr);
2828                 radeon_ring_write(ring, 0);
2829         }
2830
2831         radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2832         radeon_ring_write(ring,
2833 #ifdef __BIG_ENDIAN
2834                           (2 << 0) |
2835 #endif
2836                           (ib->gpu_addr & 0xFFFFFFFC));
2837         radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2838         radeon_ring_write(ring, ib->length_dw);
2839 }
2840
2841
2842 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2843 {
2844         const __be32 *fw_data;
2845         int i;
2846
2847         if (!rdev->me_fw || !rdev->pfp_fw)
2848                 return -EINVAL;
2849
2850         r700_cp_stop(rdev);
2851         WREG32(CP_RB_CNTL,
2852 #ifdef __BIG_ENDIAN
2853                BUF_SWAP_32BIT |
2854 #endif
2855                RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2856
2857         fw_data = (const __be32 *)rdev->pfp_fw->data;
2858         WREG32(CP_PFP_UCODE_ADDR, 0);
2859         for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2860                 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2861         WREG32(CP_PFP_UCODE_ADDR, 0);
2862
2863         fw_data = (const __be32 *)rdev->me_fw->data;
2864         WREG32(CP_ME_RAM_WADDR, 0);
2865         for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2866                 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2867
2868         WREG32(CP_PFP_UCODE_ADDR, 0);
2869         WREG32(CP_ME_RAM_WADDR, 0);
2870         WREG32(CP_ME_RAM_RADDR, 0);
2871         return 0;
2872 }
2873
2874 static int evergreen_cp_start(struct radeon_device *rdev)
2875 {
2876         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2877         int r, i;
2878         uint32_t cp_me;
2879
2880         r = radeon_ring_lock(rdev, ring, 7);
2881         if (r) {
2882                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2883                 return r;
2884         }
2885         radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2886         radeon_ring_write(ring, 0x1);
2887         radeon_ring_write(ring, 0x0);
2888         radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2889         radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2890         radeon_ring_write(ring, 0);
2891         radeon_ring_write(ring, 0);
2892         radeon_ring_unlock_commit(rdev, ring);
2893
2894         cp_me = 0xff;
2895         WREG32(CP_ME_CNTL, cp_me);
2896
2897         r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
2898         if (r) {
2899                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2900                 return r;
2901         }
2902
2903         /* setup clear context state */
2904         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2905         radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
2906
2907         for (i = 0; i < evergreen_default_size; i++)
2908                 radeon_ring_write(ring, evergreen_default_state[i]);
2909
2910         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2911         radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
2912
2913         /* set clear context state */
2914         radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2915         radeon_ring_write(ring, 0);
2916
2917         /* SQ_VTX_BASE_VTX_LOC */
2918         radeon_ring_write(ring, 0xc0026f00);
2919         radeon_ring_write(ring, 0x00000000);
2920         radeon_ring_write(ring, 0x00000000);
2921         radeon_ring_write(ring, 0x00000000);
2922
2923         /* Clear consts */
2924         radeon_ring_write(ring, 0xc0036f00);
2925         radeon_ring_write(ring, 0x00000bc4);
2926         radeon_ring_write(ring, 0xffffffff);
2927         radeon_ring_write(ring, 0xffffffff);
2928         radeon_ring_write(ring, 0xffffffff);
2929
2930         radeon_ring_write(ring, 0xc0026900);
2931         radeon_ring_write(ring, 0x00000316);
2932         radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2933         radeon_ring_write(ring, 0x00000010); /*  */
2934
2935         radeon_ring_unlock_commit(rdev, ring);
2936
2937         return 0;
2938 }
2939
2940 static int evergreen_cp_resume(struct radeon_device *rdev)
2941 {
2942         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2943         u32 tmp;
2944         u32 rb_bufsz;
2945         int r;
2946
2947         /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
2948         WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
2949                                  SOFT_RESET_PA |
2950                                  SOFT_RESET_SH |
2951                                  SOFT_RESET_VGT |
2952                                  SOFT_RESET_SPI |
2953                                  SOFT_RESET_SX));
2954         RREG32(GRBM_SOFT_RESET);
2955         mdelay(15);
2956         WREG32(GRBM_SOFT_RESET, 0);
2957         RREG32(GRBM_SOFT_RESET);
2958
2959         /* Set ring buffer size */
2960         rb_bufsz = order_base_2(ring->ring_size / 8);
2961         tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
2962 #ifdef __BIG_ENDIAN
2963         tmp |= BUF_SWAP_32BIT;
2964 #endif
2965         WREG32(CP_RB_CNTL, tmp);
2966         WREG32(CP_SEM_WAIT_TIMER, 0x0);
2967         WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
2968
2969         /* Set the write pointer delay */
2970         WREG32(CP_RB_WPTR_DELAY, 0);
2971
2972         /* Initialize the ring buffer's read and write pointers */
2973         WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2974         WREG32(CP_RB_RPTR_WR, 0);
2975         ring->wptr = 0;
2976         WREG32(CP_RB_WPTR, ring->wptr);
2977
2978         /* set the wb address whether it's enabled or not */
2979         WREG32(CP_RB_RPTR_ADDR,
2980                ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2981         WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2982         WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2983
2984         if (rdev->wb.enabled)
2985                 WREG32(SCRATCH_UMSK, 0xff);
2986         else {
2987                 tmp |= RB_NO_UPDATE;
2988                 WREG32(SCRATCH_UMSK, 0);
2989         }
2990
2991         mdelay(1);
2992         WREG32(CP_RB_CNTL, tmp);
2993
2994         WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
2995         WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2996
2997         ring->rptr = RREG32(CP_RB_RPTR);
2998
2999         evergreen_cp_start(rdev);
3000         ring->ready = true;
3001         r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
3002         if (r) {
3003                 ring->ready = false;
3004                 return r;
3005         }
3006         return 0;
3007 }
3008
3009 /*
3010  * Core functions
3011  */
3012 static void evergreen_gpu_init(struct radeon_device *rdev)
3013 {
3014         u32 gb_addr_config;
3015         u32 mc_shared_chmap, mc_arb_ramcfg;
3016         u32 sx_debug_1;
3017         u32 smx_dc_ctl0;
3018         u32 sq_config;
3019         u32 sq_lds_resource_mgmt;
3020         u32 sq_gpr_resource_mgmt_1;
3021         u32 sq_gpr_resource_mgmt_2;
3022         u32 sq_gpr_resource_mgmt_3;
3023         u32 sq_thread_resource_mgmt;
3024         u32 sq_thread_resource_mgmt_2;
3025         u32 sq_stack_resource_mgmt_1;
3026         u32 sq_stack_resource_mgmt_2;
3027         u32 sq_stack_resource_mgmt_3;
3028         u32 vgt_cache_invalidation;
3029         u32 hdp_host_path_cntl, tmp;
3030         u32 disabled_rb_mask;
3031         int i, j, num_shader_engines, ps_thread_count;
3032
3033         switch (rdev->family) {
3034         case CHIP_CYPRESS:
3035         case CHIP_HEMLOCK:
3036                 rdev->config.evergreen.num_ses = 2;
3037                 rdev->config.evergreen.max_pipes = 4;
3038                 rdev->config.evergreen.max_tile_pipes = 8;
3039                 rdev->config.evergreen.max_simds = 10;
3040                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3041                 rdev->config.evergreen.max_gprs = 256;
3042                 rdev->config.evergreen.max_threads = 248;
3043                 rdev->config.evergreen.max_gs_threads = 32;
3044                 rdev->config.evergreen.max_stack_entries = 512;
3045                 rdev->config.evergreen.sx_num_of_sets = 4;
3046                 rdev->config.evergreen.sx_max_export_size = 256;
3047                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3048                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3049                 rdev->config.evergreen.max_hw_contexts = 8;
3050                 rdev->config.evergreen.sq_num_cf_insts = 2;
3051
3052                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3053                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3054                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3055                 gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3056                 break;
3057         case CHIP_JUNIPER:
3058                 rdev->config.evergreen.num_ses = 1;
3059                 rdev->config.evergreen.max_pipes = 4;
3060                 rdev->config.evergreen.max_tile_pipes = 4;
3061                 rdev->config.evergreen.max_simds = 10;
3062                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3063                 rdev->config.evergreen.max_gprs = 256;
3064                 rdev->config.evergreen.max_threads = 248;
3065                 rdev->config.evergreen.max_gs_threads = 32;
3066                 rdev->config.evergreen.max_stack_entries = 512;
3067                 rdev->config.evergreen.sx_num_of_sets = 4;
3068                 rdev->config.evergreen.sx_max_export_size = 256;
3069                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3070                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3071                 rdev->config.evergreen.max_hw_contexts = 8;
3072                 rdev->config.evergreen.sq_num_cf_insts = 2;
3073
3074                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3075                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3076                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3077                 gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3078                 break;
3079         case CHIP_REDWOOD:
3080                 rdev->config.evergreen.num_ses = 1;
3081                 rdev->config.evergreen.max_pipes = 4;
3082                 rdev->config.evergreen.max_tile_pipes = 4;
3083                 rdev->config.evergreen.max_simds = 5;
3084                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3085                 rdev->config.evergreen.max_gprs = 256;
3086                 rdev->config.evergreen.max_threads = 248;
3087                 rdev->config.evergreen.max_gs_threads = 32;
3088                 rdev->config.evergreen.max_stack_entries = 256;
3089                 rdev->config.evergreen.sx_num_of_sets = 4;
3090                 rdev->config.evergreen.sx_max_export_size = 256;
3091                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3092                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3093                 rdev->config.evergreen.max_hw_contexts = 8;
3094                 rdev->config.evergreen.sq_num_cf_insts = 2;
3095
3096                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3097                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3098                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3099                 gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3100                 break;
3101         case CHIP_CEDAR:
3102         default:
3103                 rdev->config.evergreen.num_ses = 1;
3104                 rdev->config.evergreen.max_pipes = 2;
3105                 rdev->config.evergreen.max_tile_pipes = 2;
3106                 rdev->config.evergreen.max_simds = 2;
3107                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3108                 rdev->config.evergreen.max_gprs = 256;
3109                 rdev->config.evergreen.max_threads = 192;
3110                 rdev->config.evergreen.max_gs_threads = 16;
3111                 rdev->config.evergreen.max_stack_entries = 256;
3112                 rdev->config.evergreen.sx_num_of_sets = 4;
3113                 rdev->config.evergreen.sx_max_export_size = 128;
3114                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3115                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3116                 rdev->config.evergreen.max_hw_contexts = 4;
3117                 rdev->config.evergreen.sq_num_cf_insts = 1;
3118
3119                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3120                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3121                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3122                 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3123                 break;
3124         case CHIP_PALM:
3125                 rdev->config.evergreen.num_ses = 1;
3126                 rdev->config.evergreen.max_pipes = 2;
3127                 rdev->config.evergreen.max_tile_pipes = 2;
3128                 rdev->config.evergreen.max_simds = 2;
3129                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3130                 rdev->config.evergreen.max_gprs = 256;
3131                 rdev->config.evergreen.max_threads = 192;
3132                 rdev->config.evergreen.max_gs_threads = 16;
3133                 rdev->config.evergreen.max_stack_entries = 256;
3134                 rdev->config.evergreen.sx_num_of_sets = 4;
3135                 rdev->config.evergreen.sx_max_export_size = 128;
3136                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3137                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3138                 rdev->config.evergreen.max_hw_contexts = 4;
3139                 rdev->config.evergreen.sq_num_cf_insts = 1;
3140
3141                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3142                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3143                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3144                 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3145                 break;
3146         case CHIP_SUMO:
3147                 rdev->config.evergreen.num_ses = 1;
3148                 rdev->config.evergreen.max_pipes = 4;
3149                 rdev->config.evergreen.max_tile_pipes = 4;
3150                 if (rdev->pdev->device == 0x9648)
3151                         rdev->config.evergreen.max_simds = 3;
3152                 else if ((rdev->pdev->device == 0x9647) ||
3153                          (rdev->pdev->device == 0x964a))
3154                         rdev->config.evergreen.max_simds = 4;
3155                 else
3156                         rdev->config.evergreen.max_simds = 5;
3157                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3158                 rdev->config.evergreen.max_gprs = 256;
3159                 rdev->config.evergreen.max_threads = 248;
3160                 rdev->config.evergreen.max_gs_threads = 32;
3161                 rdev->config.evergreen.max_stack_entries = 256;
3162                 rdev->config.evergreen.sx_num_of_sets = 4;
3163                 rdev->config.evergreen.sx_max_export_size = 256;
3164                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3165                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3166                 rdev->config.evergreen.max_hw_contexts = 8;
3167                 rdev->config.evergreen.sq_num_cf_insts = 2;
3168
3169                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3170                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3171                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3172                 gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3173                 break;
3174         case CHIP_SUMO2:
3175                 rdev->config.evergreen.num_ses = 1;
3176                 rdev->config.evergreen.max_pipes = 4;
3177                 rdev->config.evergreen.max_tile_pipes = 4;
3178                 rdev->config.evergreen.max_simds = 2;
3179                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3180                 rdev->config.evergreen.max_gprs = 256;
3181                 rdev->config.evergreen.max_threads = 248;
3182                 rdev->config.evergreen.max_gs_threads = 32;
3183                 rdev->config.evergreen.max_stack_entries = 512;
3184                 rdev->config.evergreen.sx_num_of_sets = 4;
3185                 rdev->config.evergreen.sx_max_export_size = 256;
3186                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3187                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3188                 rdev->config.evergreen.max_hw_contexts = 4;
3189                 rdev->config.evergreen.sq_num_cf_insts = 2;
3190
3191                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3192                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3193                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3194                 gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3195                 break;
3196         case CHIP_BARTS:
3197                 rdev->config.evergreen.num_ses = 2;
3198                 rdev->config.evergreen.max_pipes = 4;
3199                 rdev->config.evergreen.max_tile_pipes = 8;
3200                 rdev->config.evergreen.max_simds = 7;
3201                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3202                 rdev->config.evergreen.max_gprs = 256;
3203                 rdev->config.evergreen.max_threads = 248;
3204                 rdev->config.evergreen.max_gs_threads = 32;
3205                 rdev->config.evergreen.max_stack_entries = 512;
3206                 rdev->config.evergreen.sx_num_of_sets = 4;
3207                 rdev->config.evergreen.sx_max_export_size = 256;
3208                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3209                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3210                 rdev->config.evergreen.max_hw_contexts = 8;
3211                 rdev->config.evergreen.sq_num_cf_insts = 2;
3212
3213                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3214                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3215                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3216                 gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3217                 break;
3218         case CHIP_TURKS:
3219                 rdev->config.evergreen.num_ses = 1;
3220                 rdev->config.evergreen.max_pipes = 4;
3221                 rdev->config.evergreen.max_tile_pipes = 4;
3222                 rdev->config.evergreen.max_simds = 6;
3223                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3224                 rdev->config.evergreen.max_gprs = 256;
3225                 rdev->config.evergreen.max_threads = 248;
3226                 rdev->config.evergreen.max_gs_threads = 32;
3227                 rdev->config.evergreen.max_stack_entries = 256;
3228                 rdev->config.evergreen.sx_num_of_sets = 4;
3229                 rdev->config.evergreen.sx_max_export_size = 256;
3230                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3231                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3232                 rdev->config.evergreen.max_hw_contexts = 8;
3233                 rdev->config.evergreen.sq_num_cf_insts = 2;
3234
3235                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3236                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3237                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3238                 gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3239                 break;
3240         case CHIP_CAICOS:
3241                 rdev->config.evergreen.num_ses = 1;
3242                 rdev->config.evergreen.max_pipes = 2;
3243                 rdev->config.evergreen.max_tile_pipes = 2;
3244                 rdev->config.evergreen.max_simds = 2;
3245                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3246                 rdev->config.evergreen.max_gprs = 256;
3247                 rdev->config.evergreen.max_threads = 192;
3248                 rdev->config.evergreen.max_gs_threads = 16;
3249                 rdev->config.evergreen.max_stack_entries = 256;
3250                 rdev->config.evergreen.sx_num_of_sets = 4;
3251                 rdev->config.evergreen.sx_max_export_size = 128;
3252                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3253                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3254                 rdev->config.evergreen.max_hw_contexts = 4;
3255                 rdev->config.evergreen.sq_num_cf_insts = 1;
3256
3257                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3258                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3259                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3260                 gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3261                 break;
3262         }
3263
3264         /* Initialize HDP */
3265         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3266                 WREG32((0x2c14 + j), 0x00000000);
3267                 WREG32((0x2c18 + j), 0x00000000);
3268                 WREG32((0x2c1c + j), 0x00000000);
3269                 WREG32((0x2c20 + j), 0x00000000);
3270                 WREG32((0x2c24 + j), 0x00000000);
3271         }
3272
3273         WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3274
3275         evergreen_fix_pci_max_read_req_size(rdev);
3276
3277         mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3278         if ((rdev->family == CHIP_PALM) ||
3279             (rdev->family == CHIP_SUMO) ||
3280             (rdev->family == CHIP_SUMO2))
3281                 mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3282         else
3283                 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3284
3285         /* setup tiling info dword.  gb_addr_config is not adequate since it does
3286          * not have bank info, so create a custom tiling dword.
3287          * bits 3:0   num_pipes
3288          * bits 7:4   num_banks
3289          * bits 11:8  group_size
3290          * bits 15:12 row_size
3291          */
3292         rdev->config.evergreen.tile_config = 0;
3293         switch (rdev->config.evergreen.max_tile_pipes) {
3294         case 1:
3295         default:
3296                 rdev->config.evergreen.tile_config |= (0 << 0);
3297                 break;
3298         case 2:
3299                 rdev->config.evergreen.tile_config |= (1 << 0);
3300                 break;
3301         case 4:
3302                 rdev->config.evergreen.tile_config |= (2 << 0);
3303                 break;
3304         case 8:
3305                 rdev->config.evergreen.tile_config |= (3 << 0);
3306                 break;
3307         }
3308         /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3309         if (rdev->flags & RADEON_IS_IGP)
3310                 rdev->config.evergreen.tile_config |= 1 << 4;
3311         else {
3312                 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3313                 case 0: /* four banks */
3314                         rdev->config.evergreen.tile_config |= 0 << 4;
3315                         break;
3316                 case 1: /* eight banks */
3317                         rdev->config.evergreen.tile_config |= 1 << 4;
3318                         break;
3319                 case 2: /* sixteen banks */
3320                 default:
3321                         rdev->config.evergreen.tile_config |= 2 << 4;
3322                         break;
3323                 }
3324         }
3325         rdev->config.evergreen.tile_config |= 0 << 8;
3326         rdev->config.evergreen.tile_config |=
3327                 ((gb_addr_config & 0x30000000) >> 28) << 12;
3328
3329         num_shader_engines = (gb_addr_config & NUM_SHADER_ENGINES(3) >> 12) + 1;
3330
3331         if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3332                 u32 efuse_straps_4;
3333                 u32 efuse_straps_3;
3334
3335                 efuse_straps_4 = RREG32_RCU(0x204);
3336                 efuse_straps_3 = RREG32_RCU(0x203);
3337                 tmp = (((efuse_straps_4 & 0xf) << 4) |
3338                       ((efuse_straps_3 & 0xf0000000) >> 28));
3339         } else {
3340                 tmp = 0;
3341                 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3342                         u32 rb_disable_bitmap;
3343
3344                         WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3345                         WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3346                         rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3347                         tmp <<= 4;
3348                         tmp |= rb_disable_bitmap;
3349                 }
3350         }
3351         /* enabled rb are just the one not disabled :) */
3352         disabled_rb_mask = tmp;
3353         tmp = 0;
3354         for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3355                 tmp |= (1 << i);
3356         /* if all the backends are disabled, fix it up here */
3357         if ((disabled_rb_mask & tmp) == tmp) {
3358                 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3359                         disabled_rb_mask &= ~(1 << i);
3360         }
3361
3362         WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3363         WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3364
3365         WREG32(GB_ADDR_CONFIG, gb_addr_config);
3366         WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3367         WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3368         WREG32(DMA_TILING_CONFIG, gb_addr_config);
3369         WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3370         WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3371         WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3372
3373         if ((rdev->config.evergreen.max_backends == 1) &&
3374             (rdev->flags & RADEON_IS_IGP)) {
3375                 if ((disabled_rb_mask & 3) == 1) {
3376                         /* RB0 disabled, RB1 enabled */
3377                         tmp = 0x11111111;
3378                 } else {
3379                         /* RB1 disabled, RB0 enabled */
3380                         tmp = 0x00000000;
3381                 }
3382         } else {
3383                 tmp = gb_addr_config & NUM_PIPES_MASK;
3384                 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3385                                                 EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3386         }
3387         WREG32(GB_BACKEND_MAP, tmp);
3388
3389         WREG32(CGTS_SYS_TCC_DISABLE, 0);
3390         WREG32(CGTS_TCC_DISABLE, 0);
3391         WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3392         WREG32(CGTS_USER_TCC_DISABLE, 0);
3393
3394         /* set HW defaults for 3D engine */
3395         WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3396                                      ROQ_IB2_START(0x2b)));
3397
3398         WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3399
3400         WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3401                              SYNC_GRADIENT |
3402                              SYNC_WALKER |
3403                              SYNC_ALIGNER));
3404
3405         sx_debug_1 = RREG32(SX_DEBUG_1);
3406         sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3407         WREG32(SX_DEBUG_1, sx_debug_1);
3408
3409
3410         smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3411         smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3412         smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3413         WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3414
3415         if (rdev->family <= CHIP_SUMO2)
3416                 WREG32(SMX_SAR_CTL0, 0x00010000);
3417
3418         WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3419                                         POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3420                                         SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3421
3422         WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3423                                  SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3424                                  SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3425
3426         WREG32(VGT_NUM_INSTANCES, 1);
3427         WREG32(SPI_CONFIG_CNTL, 0);
3428         WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3429         WREG32(CP_PERFMON_CNTL, 0);
3430
3431         WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3432                                   FETCH_FIFO_HIWATER(0x4) |
3433                                   DONE_FIFO_HIWATER(0xe0) |
3434                                   ALU_UPDATE_FIFO_HIWATER(0x8)));
3435
3436         sq_config = RREG32(SQ_CONFIG);
3437         sq_config &= ~(PS_PRIO(3) |
3438                        VS_PRIO(3) |
3439                        GS_PRIO(3) |
3440                        ES_PRIO(3));
3441         sq_config |= (VC_ENABLE |
3442                       EXPORT_SRC_C |
3443                       PS_PRIO(0) |
3444                       VS_PRIO(1) |
3445                       GS_PRIO(2) |
3446                       ES_PRIO(3));
3447
3448         switch (rdev->family) {
3449         case CHIP_CEDAR:
3450         case CHIP_PALM:
3451         case CHIP_SUMO:
3452         case CHIP_SUMO2:
3453         case CHIP_CAICOS:
3454                 /* no vertex cache */
3455                 sq_config &= ~VC_ENABLE;
3456                 break;
3457         default:
3458                 break;
3459         }
3460
3461         sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3462
3463         sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3464         sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3465         sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3466         sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3467         sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3468         sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3469         sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3470
3471         switch (rdev->family) {
3472         case CHIP_CEDAR:
3473         case CHIP_PALM:
3474         case CHIP_SUMO:
3475         case CHIP_SUMO2:
3476                 ps_thread_count = 96;
3477                 break;
3478         default:
3479                 ps_thread_count = 128;
3480                 break;
3481         }
3482
3483         sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3484         sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3485         sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3486         sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3487         sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3488         sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3489
3490         sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3491         sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3492         sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3493         sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3494         sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3495         sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3496
3497         WREG32(SQ_CONFIG, sq_config);
3498         WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3499         WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3500         WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3501         WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3502         WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3503         WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3504         WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3505         WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3506         WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3507         WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3508
3509         WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3510                                           FORCE_EOV_MAX_REZ_CNT(255)));
3511
3512         switch (rdev->family) {
3513         case CHIP_CEDAR:
3514         case CHIP_PALM:
3515         case CHIP_SUMO:
3516         case CHIP_SUMO2:
3517         case CHIP_CAICOS:
3518                 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3519                 break;
3520         default:
3521                 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3522                 break;
3523         }
3524         vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3525         WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3526
3527         WREG32(VGT_GS_VERTEX_REUSE, 16);
3528         WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3529         WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3530
3531         WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3532         WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3533
3534         WREG32(CB_PERF_CTR0_SEL_0, 0);
3535         WREG32(CB_PERF_CTR0_SEL_1, 0);
3536         WREG32(CB_PERF_CTR1_SEL_0, 0);
3537         WREG32(CB_PERF_CTR1_SEL_1, 0);
3538         WREG32(CB_PERF_CTR2_SEL_0, 0);
3539         WREG32(CB_PERF_CTR2_SEL_1, 0);
3540         WREG32(CB_PERF_CTR3_SEL_0, 0);
3541         WREG32(CB_PERF_CTR3_SEL_1, 0);
3542
3543         /* clear render buffer base addresses */
3544         WREG32(CB_COLOR0_BASE, 0);
3545         WREG32(CB_COLOR1_BASE, 0);
3546         WREG32(CB_COLOR2_BASE, 0);
3547         WREG32(CB_COLOR3_BASE, 0);
3548         WREG32(CB_COLOR4_BASE, 0);
3549         WREG32(CB_COLOR5_BASE, 0);
3550         WREG32(CB_COLOR6_BASE, 0);
3551         WREG32(CB_COLOR7_BASE, 0);
3552         WREG32(CB_COLOR8_BASE, 0);
3553         WREG32(CB_COLOR9_BASE, 0);
3554         WREG32(CB_COLOR10_BASE, 0);
3555         WREG32(CB_COLOR11_BASE, 0);
3556
3557         /* set the shader const cache sizes to 0 */
3558         for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3559                 WREG32(i, 0);
3560         for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3561                 WREG32(i, 0);
3562
3563         tmp = RREG32(HDP_MISC_CNTL);
3564         tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3565         WREG32(HDP_MISC_CNTL, tmp);
3566
3567         hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3568         WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3569
3570         WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3571
3572         udelay(50);
3573
3574 }
3575
3576 int evergreen_mc_init(struct radeon_device *rdev)
3577 {
3578         u32 tmp;
3579         int chansize, numchan;
3580
3581         /* Get VRAM informations */
3582         rdev->mc.vram_is_ddr = true;
3583         if ((rdev->family == CHIP_PALM) ||
3584             (rdev->family == CHIP_SUMO) ||
3585             (rdev->family == CHIP_SUMO2))
3586                 tmp = RREG32(FUS_MC_ARB_RAMCFG);
3587         else
3588                 tmp = RREG32(MC_ARB_RAMCFG);
3589         if (tmp & CHANSIZE_OVERRIDE) {
3590                 chansize = 16;
3591         } else if (tmp & CHANSIZE_MASK) {
3592                 chansize = 64;
3593         } else {
3594                 chansize = 32;
3595         }
3596         tmp = RREG32(MC_SHARED_CHMAP);
3597         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3598         case 0:
3599         default:
3600                 numchan = 1;
3601                 break;
3602         case 1:
3603                 numchan = 2;
3604                 break;
3605         case 2:
3606                 numchan = 4;
3607                 break;
3608         case 3:
3609                 numchan = 8;
3610                 break;
3611         }
3612         rdev->mc.vram_width = numchan * chansize;
3613         /* Could aper size report 0 ? */
3614         rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3615         rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3616         /* Setup GPU memory space */
3617         if ((rdev->family == CHIP_PALM) ||
3618             (rdev->family == CHIP_SUMO) ||
3619             (rdev->family == CHIP_SUMO2)) {
3620                 /* size in bytes on fusion */
3621                 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3622                 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3623         } else {
3624                 /* size in MB on evergreen/cayman/tn */
3625                 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3626                 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3627         }
3628         rdev->mc.visible_vram_size = rdev->mc.aper_size;
3629         r700_vram_gtt_location(rdev, &rdev->mc);
3630         radeon_update_bandwidth_info(rdev);
3631
3632         return 0;
3633 }
3634
3635 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3636 {
3637         dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3638                 RREG32(GRBM_STATUS));
3639         dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3640                 RREG32(GRBM_STATUS_SE0));
3641         dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3642                 RREG32(GRBM_STATUS_SE1));
3643         dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3644                 RREG32(SRBM_STATUS));
3645         dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3646                 RREG32(SRBM_STATUS2));
3647         dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3648                 RREG32(CP_STALLED_STAT1));
3649         dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3650                 RREG32(CP_STALLED_STAT2));
3651         dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3652                 RREG32(CP_BUSY_STAT));
3653         dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3654                 RREG32(CP_STAT));
3655         dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3656                 RREG32(DMA_STATUS_REG));
3657         if (rdev->family >= CHIP_CAYMAN) {
3658                 dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3659                          RREG32(DMA_STATUS_REG + 0x800));
3660         }
3661 }
3662
3663 bool evergreen_is_display_hung(struct radeon_device *rdev)
3664 {
3665         u32 crtc_hung = 0;
3666         u32 crtc_status[6];
3667         u32 i, j, tmp;
3668
3669         for (i = 0; i < rdev->num_crtc; i++) {
3670                 if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3671                         crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3672                         crtc_hung |= (1 << i);
3673                 }
3674         }
3675
3676         for (j = 0; j < 10; j++) {
3677                 for (i = 0; i < rdev->num_crtc; i++) {
3678                         if (crtc_hung & (1 << i)) {
3679                                 tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3680                                 if (tmp != crtc_status[i])
3681                                         crtc_hung &= ~(1 << i);
3682                         }
3683                 }
3684                 if (crtc_hung == 0)
3685                         return false;
3686                 udelay(100);
3687         }
3688
3689         return true;
3690 }
3691
3692 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3693 {
3694         u32 reset_mask = 0;
3695         u32 tmp;
3696
3697         /* GRBM_STATUS */
3698         tmp = RREG32(GRBM_STATUS);
3699         if (tmp & (PA_BUSY | SC_BUSY |
3700                    SH_BUSY | SX_BUSY |
3701                    TA_BUSY | VGT_BUSY |
3702                    DB_BUSY | CB_BUSY |
3703                    SPI_BUSY | VGT_BUSY_NO_DMA))
3704                 reset_mask |= RADEON_RESET_GFX;
3705
3706         if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3707                    CP_BUSY | CP_COHERENCY_BUSY))
3708                 reset_mask |= RADEON_RESET_CP;
3709
3710         if (tmp & GRBM_EE_BUSY)
3711                 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3712
3713         /* DMA_STATUS_REG */
3714         tmp = RREG32(DMA_STATUS_REG);
3715         if (!(tmp & DMA_IDLE))
3716                 reset_mask |= RADEON_RESET_DMA;
3717
3718         /* SRBM_STATUS2 */
3719         tmp = RREG32(SRBM_STATUS2);
3720         if (tmp & DMA_BUSY)
3721                 reset_mask |= RADEON_RESET_DMA;
3722
3723         /* SRBM_STATUS */
3724         tmp = RREG32(SRBM_STATUS);
3725         if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3726                 reset_mask |= RADEON_RESET_RLC;
3727
3728         if (tmp & IH_BUSY)
3729                 reset_mask |= RADEON_RESET_IH;
3730
3731         if (tmp & SEM_BUSY)
3732                 reset_mask |= RADEON_RESET_SEM;
3733
3734         if (tmp & GRBM_RQ_PENDING)
3735                 reset_mask |= RADEON_RESET_GRBM;
3736
3737         if (tmp & VMC_BUSY)
3738                 reset_mask |= RADEON_RESET_VMC;
3739
3740         if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3741                    MCC_BUSY | MCD_BUSY))
3742                 reset_mask |= RADEON_RESET_MC;
3743
3744         if (evergreen_is_display_hung(rdev))
3745                 reset_mask |= RADEON_RESET_DISPLAY;
3746
3747         /* VM_L2_STATUS */
3748         tmp = RREG32(VM_L2_STATUS);
3749         if (tmp & L2_BUSY)
3750                 reset_mask |= RADEON_RESET_VMC;
3751
3752         /* Skip MC reset as it's mostly likely not hung, just busy */
3753         if (reset_mask & RADEON_RESET_MC) {
3754                 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3755                 reset_mask &= ~RADEON_RESET_MC;
3756         }
3757
3758         return reset_mask;
3759 }
3760
3761 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3762 {
3763         struct evergreen_mc_save save;
3764         u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3765         u32 tmp;
3766
3767         if (reset_mask == 0)
3768                 return;
3769
3770         dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3771
3772         evergreen_print_gpu_status_regs(rdev);
3773
3774         /* Disable CP parsing/prefetching */
3775         WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3776
3777         if (reset_mask & RADEON_RESET_DMA) {
3778                 /* Disable DMA */
3779                 tmp = RREG32(DMA_RB_CNTL);
3780                 tmp &= ~DMA_RB_ENABLE;
3781                 WREG32(DMA_RB_CNTL, tmp);
3782         }
3783
3784         udelay(50);
3785
3786         evergreen_mc_stop(rdev, &save);
3787         if (evergreen_mc_wait_for_idle(rdev)) {
3788                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3789         }
3790
3791         if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3792                 grbm_soft_reset |= SOFT_RESET_DB |
3793                         SOFT_RESET_CB |
3794                         SOFT_RESET_PA |
3795                         SOFT_RESET_SC |
3796                         SOFT_RESET_SPI |
3797                         SOFT_RESET_SX |
3798                         SOFT_RESET_SH |
3799                         SOFT_RESET_TC |
3800                         SOFT_RESET_TA |
3801                         SOFT_RESET_VC |
3802                         SOFT_RESET_VGT;
3803         }
3804
3805         if (reset_mask & RADEON_RESET_CP) {
3806                 grbm_soft_reset |= SOFT_RESET_CP |
3807                         SOFT_RESET_VGT;
3808
3809                 srbm_soft_reset |= SOFT_RESET_GRBM;
3810         }
3811
3812         if (reset_mask & RADEON_RESET_DMA)
3813                 srbm_soft_reset |= SOFT_RESET_DMA;
3814
3815         if (reset_mask & RADEON_RESET_DISPLAY)
3816                 srbm_soft_reset |= SOFT_RESET_DC;
3817
3818         if (reset_mask & RADEON_RESET_RLC)
3819                 srbm_soft_reset |= SOFT_RESET_RLC;
3820
3821         if (reset_mask & RADEON_RESET_SEM)
3822                 srbm_soft_reset |= SOFT_RESET_SEM;
3823
3824         if (reset_mask & RADEON_RESET_IH)
3825                 srbm_soft_reset |= SOFT_RESET_IH;
3826
3827         if (reset_mask & RADEON_RESET_GRBM)
3828                 srbm_soft_reset |= SOFT_RESET_GRBM;
3829
3830         if (reset_mask & RADEON_RESET_VMC)
3831                 srbm_soft_reset |= SOFT_RESET_VMC;
3832
3833         if (!(rdev->flags & RADEON_IS_IGP)) {
3834                 if (reset_mask & RADEON_RESET_MC)
3835                         srbm_soft_reset |= SOFT_RESET_MC;
3836         }
3837
3838         if (grbm_soft_reset) {
3839                 tmp = RREG32(GRBM_SOFT_RESET);
3840                 tmp |= grbm_soft_reset;
3841                 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3842                 WREG32(GRBM_SOFT_RESET, tmp);
3843                 tmp = RREG32(GRBM_SOFT_RESET);
3844
3845                 udelay(50);
3846
3847                 tmp &= ~grbm_soft_reset;
3848                 WREG32(GRBM_SOFT_RESET, tmp);
3849                 tmp = RREG32(GRBM_SOFT_RESET);
3850         }
3851
3852         if (srbm_soft_reset) {
3853                 tmp = RREG32(SRBM_SOFT_RESET);
3854                 tmp |= srbm_soft_reset;
3855                 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3856                 WREG32(SRBM_SOFT_RESET, tmp);
3857                 tmp = RREG32(SRBM_SOFT_RESET);
3858
3859                 udelay(50);
3860
3861                 tmp &= ~srbm_soft_reset;
3862                 WREG32(SRBM_SOFT_RESET, tmp);
3863                 tmp = RREG32(SRBM_SOFT_RESET);
3864         }
3865
3866         /* Wait a little for things to settle down */
3867         udelay(50);
3868
3869         evergreen_mc_resume(rdev, &save);
3870         udelay(50);
3871
3872         evergreen_print_gpu_status_regs(rdev);
3873 }
3874
3875 void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
3876 {
3877         struct evergreen_mc_save save;
3878         u32 tmp, i;
3879
3880         dev_info(rdev->dev, "GPU pci config reset\n");
3881
3882         /* disable dpm? */
3883
3884         /* Disable CP parsing/prefetching */
3885         WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3886         udelay(50);
3887         /* Disable DMA */
3888         tmp = RREG32(DMA_RB_CNTL);
3889         tmp &= ~DMA_RB_ENABLE;
3890         WREG32(DMA_RB_CNTL, tmp);
3891         /* XXX other engines? */
3892
3893         /* halt the rlc */
3894         r600_rlc_stop(rdev);
3895
3896         udelay(50);
3897
3898         /* set mclk/sclk to bypass */
3899         rv770_set_clk_bypass_mode(rdev);
3900         /* disable BM */
3901         pci_clear_master(rdev->pdev);
3902         /* disable mem access */
3903         evergreen_mc_stop(rdev, &save);
3904         if (evergreen_mc_wait_for_idle(rdev)) {
3905                 dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
3906         }
3907         /* reset */
3908         radeon_pci_config_reset(rdev);
3909         /* wait for asic to come out of reset */
3910         for (i = 0; i < rdev->usec_timeout; i++) {
3911                 if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
3912                         break;
3913                 udelay(1);
3914         }
3915 }
3916
3917 int evergreen_asic_reset(struct radeon_device *rdev)
3918 {
3919         u32 reset_mask;
3920
3921         reset_mask = evergreen_gpu_check_soft_reset(rdev);
3922
3923         if (reset_mask)
3924                 r600_set_bios_scratch_engine_hung(rdev, true);
3925
3926         /* try soft reset */
3927         evergreen_gpu_soft_reset(rdev, reset_mask);
3928
3929         reset_mask = evergreen_gpu_check_soft_reset(rdev);
3930
3931         /* try pci config reset */
3932         if (reset_mask && radeon_hard_reset)
3933                 evergreen_gpu_pci_config_reset(rdev);
3934
3935         reset_mask = evergreen_gpu_check_soft_reset(rdev);
3936
3937         if (!reset_mask)
3938                 r600_set_bios_scratch_engine_hung(rdev, false);
3939
3940         return 0;
3941 }
3942
3943 /**
3944  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
3945  *
3946  * @rdev: radeon_device pointer
3947  * @ring: radeon_ring structure holding ring information
3948  *
3949  * Check if the GFX engine is locked up.
3950  * Returns true if the engine appears to be locked up, false if not.
3951  */
3952 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3953 {
3954         u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3955
3956         if (!(reset_mask & (RADEON_RESET_GFX |
3957                             RADEON_RESET_COMPUTE |
3958                             RADEON_RESET_CP))) {
3959                 radeon_ring_lockup_update(ring);
3960                 return false;
3961         }
3962         /* force CP activities */
3963         radeon_ring_force_activity(rdev, ring);
3964         return radeon_ring_test_lockup(rdev, ring);
3965 }
3966
3967 /*
3968  * RLC
3969  */
3970 #define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
3971 #define RLC_CLEAR_STATE_END_MARKER          0x00000001
3972
3973 void sumo_rlc_fini(struct radeon_device *rdev)
3974 {
3975         int r;
3976
3977         /* save restore block */
3978         if (rdev->rlc.save_restore_obj) {
3979                 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3980                 if (unlikely(r != 0))
3981                         dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
3982                 radeon_bo_unpin(rdev->rlc.save_restore_obj);
3983                 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3984
3985                 radeon_bo_unref(&rdev->rlc.save_restore_obj);
3986                 rdev->rlc.save_restore_obj = NULL;
3987         }
3988
3989         /* clear state block */
3990         if (rdev->rlc.clear_state_obj) {
3991                 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
3992                 if (unlikely(r != 0))
3993                         dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
3994                 radeon_bo_unpin(rdev->rlc.clear_state_obj);
3995                 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
3996
3997                 radeon_bo_unref(&rdev->rlc.clear_state_obj);
3998                 rdev->rlc.clear_state_obj = NULL;
3999         }
4000
4001         /* clear state block */
4002         if (rdev->rlc.cp_table_obj) {
4003                 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4004                 if (unlikely(r != 0))
4005                         dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4006                 radeon_bo_unpin(rdev->rlc.cp_table_obj);
4007                 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4008
4009                 radeon_bo_unref(&rdev->rlc.cp_table_obj);
4010                 rdev->rlc.cp_table_obj = NULL;
4011         }
4012 }
4013
4014 #define CP_ME_TABLE_SIZE    96
4015
4016 int sumo_rlc_init(struct radeon_device *rdev)
4017 {
4018         const u32 *src_ptr;
4019         volatile u32 *dst_ptr;
4020         u32 dws, data, i, j, k, reg_num;
4021         u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4022         u64 reg_list_mc_addr;
4023         const struct cs_section_def *cs_data;
4024         int r;
4025
4026         src_ptr = rdev->rlc.reg_list;
4027         dws = rdev->rlc.reg_list_size;
4028         if (rdev->family >= CHIP_BONAIRE) {
4029                 dws += (5 * 16) + 48 + 48 + 64;
4030         }
4031         cs_data = rdev->rlc.cs_data;
4032
4033         if (src_ptr) {
4034                 /* save restore block */
4035                 if (rdev->rlc.save_restore_obj == NULL) {
4036                         r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4037                                              RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.save_restore_obj);
4038                         if (r) {
4039                                 dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4040                                 return r;
4041                         }
4042                 }
4043
4044                 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4045                 if (unlikely(r != 0)) {
4046                         sumo_rlc_fini(rdev);
4047                         return r;
4048                 }
4049                 r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4050                                   &rdev->rlc.save_restore_gpu_addr);
4051                 if (r) {
4052                         radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4053                         dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4054                         sumo_rlc_fini(rdev);
4055                         return r;
4056                 }
4057
4058                 r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
4059                 if (r) {
4060                         dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4061                         sumo_rlc_fini(rdev);
4062                         return r;
4063                 }
4064                 /* write the sr buffer */
4065                 dst_ptr = rdev->rlc.sr_ptr;
4066                 if (rdev->family >= CHIP_TAHITI) {
4067                         /* SI */
4068                         for (i = 0; i < rdev->rlc.reg_list_size; i++)
4069                                 dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4070                 } else {
4071                         /* ON/LN/TN */
4072                         /* format:
4073                          * dw0: (reg2 << 16) | reg1
4074                          * dw1: reg1 save space
4075                          * dw2: reg2 save space
4076                          */
4077                         for (i = 0; i < dws; i++) {
4078                                 data = src_ptr[i] >> 2;
4079                                 i++;
4080                                 if (i < dws)
4081                                         data |= (src_ptr[i] >> 2) << 16;
4082                                 j = (((i - 1) * 3) / 2);
4083                                 dst_ptr[j] = cpu_to_le32(data);
4084                         }
4085                         j = ((i * 3) / 2);
4086                         dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4087                 }
4088                 radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4089                 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4090         }
4091
4092         if (cs_data) {
4093                 /* clear state block */
4094                 if (rdev->family >= CHIP_BONAIRE) {
4095                         rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4096                 } else if (rdev->family >= CHIP_TAHITI) {
4097                         rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4098                         dws = rdev->rlc.clear_state_size + (256 / 4);
4099                 } else {
4100                         reg_list_num = 0;
4101                         dws = 0;
4102                         for (i = 0; cs_data[i].section != NULL; i++) {
4103                                 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4104                                         reg_list_num++;
4105                                         dws += cs_data[i].section[j].reg_count;
4106                                 }
4107                         }
4108                         reg_list_blk_index = (3 * reg_list_num + 2);
4109                         dws += reg_list_blk_index;
4110                         rdev->rlc.clear_state_size = dws;
4111                 }
4112
4113                 if (rdev->rlc.clear_state_obj == NULL) {
4114                         r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4115                                              RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.clear_state_obj);
4116                         if (r) {
4117                                 dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4118                                 sumo_rlc_fini(rdev);
4119                                 return r;
4120                         }
4121                 }
4122                 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4123                 if (unlikely(r != 0)) {
4124                         sumo_rlc_fini(rdev);
4125                         return r;
4126                 }
4127                 r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4128                                   &rdev->rlc.clear_state_gpu_addr);
4129                 if (r) {
4130                         radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4131                         dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4132                         sumo_rlc_fini(rdev);
4133                         return r;
4134                 }
4135
4136                 r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4137                 if (r) {
4138                         dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4139                         sumo_rlc_fini(rdev);
4140                         return r;
4141                 }
4142                 /* set up the cs buffer */
4143                 dst_ptr = rdev->rlc.cs_ptr;
4144                 if (rdev->family >= CHIP_BONAIRE) {
4145                         cik_get_csb_buffer(rdev, dst_ptr);
4146                 } else if (rdev->family >= CHIP_TAHITI) {
4147                         reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4148                         dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4149                         dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4150                         dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4151                         si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4152                 } else {
4153                         reg_list_hdr_blk_index = 0;
4154                         reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4155                         data = upper_32_bits(reg_list_mc_addr);
4156                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4157                         reg_list_hdr_blk_index++;
4158                         for (i = 0; cs_data[i].section != NULL; i++) {
4159                                 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4160                                         reg_num = cs_data[i].section[j].reg_count;
4161                                         data = reg_list_mc_addr & 0xffffffff;
4162                                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4163                                         reg_list_hdr_blk_index++;
4164
4165                                         data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4166                                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4167                                         reg_list_hdr_blk_index++;
4168
4169                                         data = 0x08000000 | (reg_num * 4);
4170                                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4171                                         reg_list_hdr_blk_index++;
4172
4173                                         for (k = 0; k < reg_num; k++) {
4174                                                 data = cs_data[i].section[j].extent[k];
4175                                                 dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4176                                         }
4177                                         reg_list_mc_addr += reg_num * 4;
4178                                         reg_list_blk_index += reg_num;
4179                                 }
4180                         }
4181                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4182                 }
4183                 radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4184                 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4185         }
4186
4187         if (rdev->rlc.cp_table_size) {
4188                 if (rdev->rlc.cp_table_obj == NULL) {
4189                         r = radeon_bo_create(rdev, rdev->rlc.cp_table_size, PAGE_SIZE, true,
4190                                              RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.cp_table_obj);
4191                         if (r) {
4192                                 dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4193                                 sumo_rlc_fini(rdev);
4194                                 return r;
4195                         }
4196                 }
4197
4198                 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4199                 if (unlikely(r != 0)) {
4200                         dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4201                         sumo_rlc_fini(rdev);
4202                         return r;
4203                 }
4204                 r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4205                                   &rdev->rlc.cp_table_gpu_addr);
4206                 if (r) {
4207                         radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4208                         dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4209                         sumo_rlc_fini(rdev);
4210                         return r;
4211                 }
4212                 r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4213                 if (r) {
4214                         dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4215                         sumo_rlc_fini(rdev);
4216                         return r;
4217                 }
4218
4219                 cik_init_cp_pg_table(rdev);
4220
4221                 radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4222                 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4223
4224         }
4225
4226         return 0;
4227 }
4228
4229 static void evergreen_rlc_start(struct radeon_device *rdev)
4230 {
4231         u32 mask = RLC_ENABLE;
4232
4233         if (rdev->flags & RADEON_IS_IGP) {
4234                 mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4235         }
4236
4237         WREG32(RLC_CNTL, mask);
4238 }
4239
4240 int evergreen_rlc_resume(struct radeon_device *rdev)
4241 {
4242         u32 i;
4243         const __be32 *fw_data;
4244
4245         if (!rdev->rlc_fw)
4246                 return -EINVAL;
4247
4248         r600_rlc_stop(rdev);
4249
4250         WREG32(RLC_HB_CNTL, 0);
4251
4252         if (rdev->flags & RADEON_IS_IGP) {
4253                 if (rdev->family == CHIP_ARUBA) {
4254                         u32 always_on_bitmap =
4255                                 3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4256                         /* find out the number of active simds */
4257                         u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4258                         tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4259                         tmp = hweight32(~tmp);
4260                         if (tmp == rdev->config.cayman.max_simds_per_se) {
4261                                 WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4262                                 WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4263                                 WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4264                                 WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4265                                 WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4266                         }
4267                 } else {
4268                         WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4269                         WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4270                 }
4271                 WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4272                 WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4273         } else {
4274                 WREG32(RLC_HB_BASE, 0);
4275                 WREG32(RLC_HB_RPTR, 0);
4276                 WREG32(RLC_HB_WPTR, 0);
4277                 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4278                 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4279         }
4280         WREG32(RLC_MC_CNTL, 0);
4281         WREG32(RLC_UCODE_CNTL, 0);
4282
4283         fw_data = (const __be32 *)rdev->rlc_fw->data;
4284         if (rdev->family >= CHIP_ARUBA) {
4285                 for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4286                         WREG32(RLC_UCODE_ADDR, i);
4287                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4288                 }
4289         } else if (rdev->family >= CHIP_CAYMAN) {
4290                 for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4291                         WREG32(RLC_UCODE_ADDR, i);
4292                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4293                 }
4294         } else {
4295                 for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4296                         WREG32(RLC_UCODE_ADDR, i);
4297                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4298                 }
4299         }
4300         WREG32(RLC_UCODE_ADDR, 0);
4301
4302         evergreen_rlc_start(rdev);
4303
4304         return 0;
4305 }
4306
4307 /* Interrupts */
4308
4309 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4310 {
4311         if (crtc >= rdev->num_crtc)
4312                 return 0;
4313         else
4314                 return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4315 }
4316
4317 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4318 {
4319         u32 tmp;
4320
4321         if (rdev->family >= CHIP_CAYMAN) {
4322                 cayman_cp_int_cntl_setup(rdev, 0,
4323                                          CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4324                 cayman_cp_int_cntl_setup(rdev, 1, 0);
4325                 cayman_cp_int_cntl_setup(rdev, 2, 0);
4326                 tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4327                 WREG32(CAYMAN_DMA1_CNTL, tmp);
4328         } else
4329                 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4330         tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4331         WREG32(DMA_CNTL, tmp);
4332         WREG32(GRBM_INT_CNTL, 0);
4333         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4334         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4335         if (rdev->num_crtc >= 4) {
4336                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4337                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4338         }
4339         if (rdev->num_crtc >= 6) {
4340                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4341                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4342         }
4343
4344         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4345         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4346         if (rdev->num_crtc >= 4) {
4347                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4348                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4349         }
4350         if (rdev->num_crtc >= 6) {
4351                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4352                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4353         }
4354
4355         /* only one DAC on DCE5 */
4356         if (!ASIC_IS_DCE5(rdev))
4357                 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4358         WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4359
4360         tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4361         WREG32(DC_HPD1_INT_CONTROL, tmp);
4362         tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4363         WREG32(DC_HPD2_INT_CONTROL, tmp);
4364         tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4365         WREG32(DC_HPD3_INT_CONTROL, tmp);
4366         tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4367         WREG32(DC_HPD4_INT_CONTROL, tmp);
4368         tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4369         WREG32(DC_HPD5_INT_CONTROL, tmp);
4370         tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4371         WREG32(DC_HPD6_INT_CONTROL, tmp);
4372
4373 }
4374
4375 int evergreen_irq_set(struct radeon_device *rdev)
4376 {
4377         u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4378         u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4379         u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4380         u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4381         u32 grbm_int_cntl = 0;
4382         u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4383         u32 dma_cntl, dma_cntl1 = 0;
4384         u32 thermal_int = 0;
4385
4386         if (!rdev->irq.installed) {
4387                 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4388                 return -EINVAL;
4389         }
4390         /* don't enable anything if the ih is disabled */
4391         if (!rdev->ih.enabled) {
4392                 r600_disable_interrupts(rdev);
4393                 /* force the active interrupt state to all disabled */
4394                 evergreen_disable_interrupt_state(rdev);
4395                 return 0;
4396         }
4397
4398         hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
4399         hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
4400         hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
4401         hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
4402         hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
4403         hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
4404         if (rdev->family == CHIP_ARUBA)
4405                 thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4406                         ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4407         else
4408                 thermal_int = RREG32(CG_THERMAL_INT) &
4409                         ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4410
4411         afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4412         afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4413         afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4414         afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4415         afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4416         afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4417
4418         dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4419
4420         if (rdev->family >= CHIP_CAYMAN) {
4421                 /* enable CP interrupts on all rings */
4422                 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4423                         DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4424                         cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4425                 }
4426                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4427                         DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4428                         cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4429                 }
4430                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4431                         DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4432                         cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4433                 }
4434         } else {
4435                 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4436                         DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4437                         cp_int_cntl |= RB_INT_ENABLE;
4438                         cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4439                 }
4440         }
4441
4442         if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4443                 DRM_DEBUG("r600_irq_set: sw int dma\n");
4444                 dma_cntl |= TRAP_ENABLE;
4445         }
4446
4447         if (rdev->family >= CHIP_CAYMAN) {
4448                 dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4449                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4450                         DRM_DEBUG("r600_irq_set: sw int dma1\n");
4451                         dma_cntl1 |= TRAP_ENABLE;
4452                 }
4453         }
4454
4455         if (rdev->irq.dpm_thermal) {
4456                 DRM_DEBUG("dpm thermal\n");
4457                 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4458         }
4459
4460         if (rdev->irq.crtc_vblank_int[0] ||
4461             atomic_read(&rdev->irq.pflip[0])) {
4462                 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4463                 crtc1 |= VBLANK_INT_MASK;
4464         }
4465         if (rdev->irq.crtc_vblank_int[1] ||
4466             atomic_read(&rdev->irq.pflip[1])) {
4467                 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4468                 crtc2 |= VBLANK_INT_MASK;
4469         }
4470         if (rdev->irq.crtc_vblank_int[2] ||
4471             atomic_read(&rdev->irq.pflip[2])) {
4472                 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4473                 crtc3 |= VBLANK_INT_MASK;
4474         }
4475         if (rdev->irq.crtc_vblank_int[3] ||
4476             atomic_read(&rdev->irq.pflip[3])) {
4477                 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4478                 crtc4 |= VBLANK_INT_MASK;
4479         }
4480         if (rdev->irq.crtc_vblank_int[4] ||
4481             atomic_read(&rdev->irq.pflip[4])) {
4482                 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4483                 crtc5 |= VBLANK_INT_MASK;
4484         }
4485         if (rdev->irq.crtc_vblank_int[5] ||
4486             atomic_read(&rdev->irq.pflip[5])) {
4487                 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4488                 crtc6 |= VBLANK_INT_MASK;
4489         }
4490         if (rdev->irq.hpd[0]) {
4491                 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4492                 hpd1 |= DC_HPDx_INT_EN;
4493         }
4494         if (rdev->irq.hpd[1]) {
4495                 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4496                 hpd2 |= DC_HPDx_INT_EN;
4497         }
4498         if (rdev->irq.hpd[2]) {
4499                 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4500                 hpd3 |= DC_HPDx_INT_EN;
4501         }
4502         if (rdev->irq.hpd[3]) {
4503                 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4504                 hpd4 |= DC_HPDx_INT_EN;
4505         }
4506         if (rdev->irq.hpd[4]) {
4507                 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4508                 hpd5 |= DC_HPDx_INT_EN;
4509         }
4510         if (rdev->irq.hpd[5]) {
4511                 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4512                 hpd6 |= DC_HPDx_INT_EN;
4513         }
4514         if (rdev->irq.afmt[0]) {
4515                 DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4516                 afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4517         }
4518         if (rdev->irq.afmt[1]) {
4519                 DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4520                 afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4521         }
4522         if (rdev->irq.afmt[2]) {
4523                 DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4524                 afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4525         }
4526         if (rdev->irq.afmt[3]) {
4527                 DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4528                 afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4529         }
4530         if (rdev->irq.afmt[4]) {
4531                 DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4532                 afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4533         }
4534         if (rdev->irq.afmt[5]) {
4535                 DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4536                 afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4537         }
4538
4539         if (rdev->family >= CHIP_CAYMAN) {
4540                 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4541                 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4542                 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4543         } else
4544                 WREG32(CP_INT_CNTL, cp_int_cntl);
4545
4546         WREG32(DMA_CNTL, dma_cntl);
4547
4548         if (rdev->family >= CHIP_CAYMAN)
4549                 WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4550
4551         WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4552
4553         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4554         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4555         if (rdev->num_crtc >= 4) {
4556                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4557                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4558         }
4559         if (rdev->num_crtc >= 6) {
4560                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4561                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4562         }
4563
4564         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
4565                GRPH_PFLIP_INT_MASK);
4566         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
4567                GRPH_PFLIP_INT_MASK);
4568         if (rdev->num_crtc >= 4) {
4569                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
4570                        GRPH_PFLIP_INT_MASK);
4571                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
4572                        GRPH_PFLIP_INT_MASK);
4573         }
4574         if (rdev->num_crtc >= 6) {
4575                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
4576                        GRPH_PFLIP_INT_MASK);
4577                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
4578                        GRPH_PFLIP_INT_MASK);
4579         }
4580
4581         WREG32(DC_HPD1_INT_CONTROL, hpd1);
4582         WREG32(DC_HPD2_INT_CONTROL, hpd2);
4583         WREG32(DC_HPD3_INT_CONTROL, hpd3);
4584         WREG32(DC_HPD4_INT_CONTROL, hpd4);
4585         WREG32(DC_HPD5_INT_CONTROL, hpd5);
4586         WREG32(DC_HPD6_INT_CONTROL, hpd6);
4587         if (rdev->family == CHIP_ARUBA)
4588                 WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4589         else
4590                 WREG32(CG_THERMAL_INT, thermal_int);
4591
4592         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4593         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4594         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4595         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4596         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4597         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4598
4599         return 0;
4600 }
4601
4602 static void evergreen_irq_ack(struct radeon_device *rdev)
4603 {
4604         u32 tmp;
4605
4606         rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4607         rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4608         rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4609         rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4610         rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4611         rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4612         rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4613         rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4614         if (rdev->num_crtc >= 4) {
4615                 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4616                 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4617         }
4618         if (rdev->num_crtc >= 6) {
4619                 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4620                 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4621         }
4622
4623         rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4624         rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4625         rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4626         rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4627         rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4628         rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4629
4630         if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4631                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4632         if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4633                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4634         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4635                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4636         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4637                 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4638         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4639                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4640         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4641                 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4642
4643         if (rdev->num_crtc >= 4) {
4644                 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4645                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4646                 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4647                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4648                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4649                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4650                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4651                         WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4652                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4653                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4654                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4655                         WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4656         }
4657
4658         if (rdev->num_crtc >= 6) {
4659                 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4660                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4661                 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4662                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4663                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4664                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4665                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4666                         WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4667                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4668                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4669                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4670                         WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4671         }
4672
4673         if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4674                 tmp = RREG32(DC_HPD1_INT_CONTROL);
4675                 tmp |= DC_HPDx_INT_ACK;
4676                 WREG32(DC_HPD1_INT_CONTROL, tmp);
4677         }
4678         if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4679                 tmp = RREG32(DC_HPD2_INT_CONTROL);
4680                 tmp |= DC_HPDx_INT_ACK;
4681                 WREG32(DC_HPD2_INT_CONTROL, tmp);
4682         }
4683         if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4684                 tmp = RREG32(DC_HPD3_INT_CONTROL);
4685                 tmp |= DC_HPDx_INT_ACK;
4686                 WREG32(DC_HPD3_INT_CONTROL, tmp);
4687         }
4688         if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4689                 tmp = RREG32(DC_HPD4_INT_CONTROL);
4690                 tmp |= DC_HPDx_INT_ACK;
4691                 WREG32(DC_HPD4_INT_CONTROL, tmp);
4692         }
4693         if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4694                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4695                 tmp |= DC_HPDx_INT_ACK;
4696                 WREG32(DC_HPD5_INT_CONTROL, tmp);
4697         }
4698         if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4699                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4700                 tmp |= DC_HPDx_INT_ACK;
4701                 WREG32(DC_HPD6_INT_CONTROL, tmp);
4702         }
4703         if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4704                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4705                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4706                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4707         }
4708         if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4709                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4710                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4711                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4712         }
4713         if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4714                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4715                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4716                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4717         }
4718         if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4719                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4720                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4721                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4722         }
4723         if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4724                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4725                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4726                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4727         }
4728         if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4729                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4730                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4731                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4732         }
4733 }
4734
4735 static void evergreen_irq_disable(struct radeon_device *rdev)
4736 {
4737         r600_disable_interrupts(rdev);
4738         /* Wait and acknowledge irq */
4739         mdelay(1);
4740         evergreen_irq_ack(rdev);
4741         evergreen_disable_interrupt_state(rdev);
4742 }
4743
4744 void evergreen_irq_suspend(struct radeon_device *rdev)
4745 {
4746         evergreen_irq_disable(rdev);
4747         r600_rlc_stop(rdev);
4748 }
4749
4750 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4751 {
4752         u32 wptr, tmp;
4753
4754         if (rdev->wb.enabled)
4755                 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4756         else
4757                 wptr = RREG32(IH_RB_WPTR);
4758
4759         if (wptr & RB_OVERFLOW) {
4760                 /* When a ring buffer overflow happen start parsing interrupt
4761                  * from the last not overwritten vector (wptr + 16). Hopefully
4762                  * this should allow us to catchup.
4763                  */
4764                 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
4765                         wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
4766                 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4767                 tmp = RREG32(IH_RB_CNTL);
4768                 tmp |= IH_WPTR_OVERFLOW_CLEAR;
4769                 WREG32(IH_RB_CNTL, tmp);
4770                 wptr &= ~RB_OVERFLOW;
4771         }
4772         return (wptr & rdev->ih.ptr_mask);
4773 }
4774
4775 int evergreen_irq_process(struct radeon_device *rdev)
4776 {
4777         u32 wptr;
4778         u32 rptr;
4779         u32 src_id, src_data;
4780         u32 ring_index;
4781         bool queue_hotplug = false;
4782         bool queue_hdmi = false;
4783         bool queue_thermal = false;
4784         u32 status, addr;
4785
4786         if (!rdev->ih.enabled || rdev->shutdown)
4787                 return IRQ_NONE;
4788
4789         wptr = evergreen_get_ih_wptr(rdev);
4790
4791 restart_ih:
4792         /* is somebody else already processing irqs? */
4793         if (atomic_xchg(&rdev->ih.lock, 1))
4794                 return IRQ_NONE;
4795
4796         rptr = rdev->ih.rptr;
4797         DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4798
4799         /* Order reading of wptr vs. reading of IH ring data */
4800         rmb();
4801
4802         /* display interrupts */
4803         evergreen_irq_ack(rdev);
4804
4805         while (rptr != wptr) {
4806                 /* wptr/rptr are in bytes! */
4807                 ring_index = rptr / 4;
4808                 src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4809                 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4810
4811                 switch (src_id) {
4812                 case 1: /* D1 vblank/vline */
4813                         switch (src_data) {
4814                         case 0: /* D1 vblank */
4815                                 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
4816                                         if (rdev->irq.crtc_vblank_int[0]) {
4817                                                 drm_handle_vblank(rdev->ddev, 0);
4818                                                 rdev->pm.vblank_sync = true;
4819                                                 wake_up(&rdev->irq.vblank_queue);
4820                                         }
4821                                         if (atomic_read(&rdev->irq.pflip[0]))
4822                                                 radeon_crtc_handle_flip(rdev, 0);
4823                                         rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4824                                         DRM_DEBUG("IH: D1 vblank\n");
4825                                 }
4826                                 break;
4827                         case 1: /* D1 vline */
4828                                 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
4829                                         rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4830                                         DRM_DEBUG("IH: D1 vline\n");
4831                                 }
4832                                 break;
4833                         default:
4834                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4835                                 break;
4836                         }
4837                         break;
4838                 case 2: /* D2 vblank/vline */
4839                         switch (src_data) {
4840                         case 0: /* D2 vblank */
4841                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
4842                                         if (rdev->irq.crtc_vblank_int[1]) {
4843                                                 drm_handle_vblank(rdev->ddev, 1);
4844                                                 rdev->pm.vblank_sync = true;
4845                                                 wake_up(&rdev->irq.vblank_queue);
4846                                         }
4847                                         if (atomic_read(&rdev->irq.pflip[1]))
4848                                                 radeon_crtc_handle_flip(rdev, 1);
4849                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
4850                                         DRM_DEBUG("IH: D2 vblank\n");
4851                                 }
4852                                 break;
4853                         case 1: /* D2 vline */
4854                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
4855                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
4856                                         DRM_DEBUG("IH: D2 vline\n");
4857                                 }
4858                                 break;
4859                         default:
4860                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4861                                 break;
4862                         }
4863                         break;
4864                 case 3: /* D3 vblank/vline */
4865                         switch (src_data) {
4866                         case 0: /* D3 vblank */
4867                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
4868                                         if (rdev->irq.crtc_vblank_int[2]) {
4869                                                 drm_handle_vblank(rdev->ddev, 2);
4870                                                 rdev->pm.vblank_sync = true;
4871                                                 wake_up(&rdev->irq.vblank_queue);
4872                                         }
4873                                         if (atomic_read(&rdev->irq.pflip[2]))
4874                                                 radeon_crtc_handle_flip(rdev, 2);
4875                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
4876                                         DRM_DEBUG("IH: D3 vblank\n");
4877                                 }
4878                                 break;
4879                         case 1: /* D3 vline */
4880                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
4881                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
4882                                         DRM_DEBUG("IH: D3 vline\n");
4883                                 }
4884                                 break;
4885                         default:
4886                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4887                                 break;
4888                         }
4889                         break;
4890                 case 4: /* D4 vblank/vline */
4891                         switch (src_data) {
4892                         case 0: /* D4 vblank */
4893                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
4894                                         if (rdev->irq.crtc_vblank_int[3]) {
4895                                                 drm_handle_vblank(rdev->ddev, 3);
4896                                                 rdev->pm.vblank_sync = true;
4897                                                 wake_up(&rdev->irq.vblank_queue);
4898                                         }
4899                                         if (atomic_read(&rdev->irq.pflip[3]))
4900                                                 radeon_crtc_handle_flip(rdev, 3);
4901                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
4902                                         DRM_DEBUG("IH: D4 vblank\n");
4903                                 }
4904                                 break;
4905                         case 1: /* D4 vline */
4906                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
4907                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
4908                                         DRM_DEBUG("IH: D4 vline\n");
4909                                 }
4910                                 break;
4911                         default:
4912                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4913                                 break;
4914                         }
4915                         break;
4916                 case 5: /* D5 vblank/vline */
4917                         switch (src_data) {
4918                         case 0: /* D5 vblank */
4919                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
4920                                         if (rdev->irq.crtc_vblank_int[4]) {
4921                                                 drm_handle_vblank(rdev->ddev, 4);
4922                                                 rdev->pm.vblank_sync = true;
4923                                                 wake_up(&rdev->irq.vblank_queue);
4924                                         }
4925                                         if (atomic_read(&rdev->irq.pflip[4]))
4926                                                 radeon_crtc_handle_flip(rdev, 4);
4927                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
4928                                         DRM_DEBUG("IH: D5 vblank\n");
4929                                 }
4930                                 break;
4931                         case 1: /* D5 vline */
4932                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
4933                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
4934                                         DRM_DEBUG("IH: D5 vline\n");
4935                                 }
4936                                 break;
4937                         default:
4938                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4939                                 break;
4940                         }
4941                         break;
4942                 case 6: /* D6 vblank/vline */
4943                         switch (src_data) {
4944                         case 0: /* D6 vblank */
4945                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
4946                                         if (rdev->irq.crtc_vblank_int[5]) {
4947                                                 drm_handle_vblank(rdev->ddev, 5);
4948                                                 rdev->pm.vblank_sync = true;
4949                                                 wake_up(&rdev->irq.vblank_queue);
4950                                         }
4951                                         if (atomic_read(&rdev->irq.pflip[5]))
4952                                                 radeon_crtc_handle_flip(rdev, 5);
4953                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
4954                                         DRM_DEBUG("IH: D6 vblank\n");
4955                                 }
4956                                 break;
4957                         case 1: /* D6 vline */
4958                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
4959                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
4960                                         DRM_DEBUG("IH: D6 vline\n");
4961                                 }
4962                                 break;
4963                         default:
4964                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4965                                 break;
4966                         }
4967                         break;
4968                 case 8: /* D1 page flip */
4969                 case 10: /* D2 page flip */
4970                 case 12: /* D3 page flip */
4971                 case 14: /* D4 page flip */
4972                 case 16: /* D5 page flip */
4973                 case 18: /* D6 page flip */
4974                         DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
4975                         radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
4976                         break;
4977                 case 42: /* HPD hotplug */
4978                         switch (src_data) {
4979                         case 0:
4980                                 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4981                                         rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
4982                                         queue_hotplug = true;
4983                                         DRM_DEBUG("IH: HPD1\n");
4984                                 }
4985                                 break;
4986                         case 1:
4987                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4988                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
4989                                         queue_hotplug = true;
4990                                         DRM_DEBUG("IH: HPD2\n");
4991                                 }
4992                                 break;
4993                         case 2:
4994                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4995                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
4996                                         queue_hotplug = true;
4997                                         DRM_DEBUG("IH: HPD3\n");
4998                                 }
4999                                 break;
5000                         case 3:
5001                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
5002                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
5003                                         queue_hotplug = true;
5004                                         DRM_DEBUG("IH: HPD4\n");
5005                                 }
5006                                 break;
5007                         case 4:
5008                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
5009                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
5010                                         queue_hotplug = true;
5011                                         DRM_DEBUG("IH: HPD5\n");
5012                                 }
5013                                 break;
5014                         case 5:
5015                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
5016                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5017                                         queue_hotplug = true;
5018                                         DRM_DEBUG("IH: HPD6\n");
5019                                 }
5020                                 break;
5021                         default:
5022                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5023                                 break;
5024                         }
5025                         break;
5026                 case 44: /* hdmi */
5027                         switch (src_data) {
5028                         case 0:
5029                                 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
5030                                         rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
5031                                         queue_hdmi = true;
5032                                         DRM_DEBUG("IH: HDMI0\n");
5033                                 }
5034                                 break;
5035                         case 1:
5036                                 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
5037                                         rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
5038                                         queue_hdmi = true;
5039                                         DRM_DEBUG("IH: HDMI1\n");
5040                                 }
5041                                 break;
5042                         case 2:
5043                                 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
5044                                         rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
5045                                         queue_hdmi = true;
5046                                         DRM_DEBUG("IH: HDMI2\n");
5047                                 }
5048                                 break;
5049                         case 3:
5050                                 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
5051                                         rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
5052                                         queue_hdmi = true;
5053                                         DRM_DEBUG("IH: HDMI3\n");
5054                                 }
5055                                 break;
5056                         case 4:
5057                                 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
5058                                         rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5059                                         queue_hdmi = true;
5060                                         DRM_DEBUG("IH: HDMI4\n");
5061                                 }
5062                                 break;
5063                         case 5:
5064                                 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
5065                                         rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5066                                         queue_hdmi = true;
5067                                         DRM_DEBUG("IH: HDMI5\n");
5068                                 }
5069                                 break;
5070                         default:
5071                                 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5072                                 break;
5073                         }
5074                 case 124: /* UVD */
5075                         DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5076                         radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5077                         break;
5078                 case 146:
5079                 case 147:
5080                         addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5081                         status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5082                         dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5083                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
5084                                 addr);
5085                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5086                                 status);
5087                         cayman_vm_decode_fault(rdev, status, addr);
5088                         /* reset addr and status */
5089                         WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5090                         break;
5091                 case 176: /* CP_INT in ring buffer */
5092                 case 177: /* CP_INT in IB1 */
5093                 case 178: /* CP_INT in IB2 */
5094                         DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5095                         radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5096                         break;
5097                 case 181: /* CP EOP event */
5098                         DRM_DEBUG("IH: CP EOP\n");
5099                         if (rdev->family >= CHIP_CAYMAN) {
5100                                 switch (src_data) {
5101                                 case 0:
5102                                         radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5103                                         break;
5104                                 case 1:
5105                                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5106                                         break;
5107                                 case 2:
5108                                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5109                                         break;
5110                                 }
5111                         } else
5112                                 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5113                         break;
5114                 case 224: /* DMA trap event */
5115                         DRM_DEBUG("IH: DMA trap\n");
5116                         radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5117                         break;
5118                 case 230: /* thermal low to high */
5119                         DRM_DEBUG("IH: thermal low to high\n");
5120                         rdev->pm.dpm.thermal.high_to_low = false;
5121                         queue_thermal = true;
5122                         break;
5123                 case 231: /* thermal high to low */
5124                         DRM_DEBUG("IH: thermal high to low\n");
5125                         rdev->pm.dpm.thermal.high_to_low = true;
5126                         queue_thermal = true;
5127                         break;
5128                 case 233: /* GUI IDLE */
5129                         DRM_DEBUG("IH: GUI idle\n");
5130                         break;
5131                 case 244: /* DMA trap event */
5132                         if (rdev->family >= CHIP_CAYMAN) {
5133                                 DRM_DEBUG("IH: DMA1 trap\n");
5134                                 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5135                         }
5136                         break;
5137                 default:
5138                         DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5139                         break;
5140                 }
5141
5142                 /* wptr/rptr are in bytes! */
5143                 rptr += 16;
5144                 rptr &= rdev->ih.ptr_mask;
5145         }
5146         if (queue_hotplug)
5147                 schedule_work(&rdev->hotplug_work);
5148         if (queue_hdmi)
5149                 schedule_work(&rdev->audio_work);
5150         if (queue_thermal && rdev->pm.dpm_enabled)
5151                 schedule_work(&rdev->pm.dpm.thermal.work);
5152         rdev->ih.rptr = rptr;
5153         WREG32(IH_RB_RPTR, rdev->ih.rptr);
5154         atomic_set(&rdev->ih.lock, 0);
5155
5156         /* make sure wptr hasn't changed while processing */
5157         wptr = evergreen_get_ih_wptr(rdev);
5158         if (wptr != rptr)
5159                 goto restart_ih;
5160
5161         return IRQ_HANDLED;
5162 }
5163
5164 static int evergreen_startup(struct radeon_device *rdev)
5165 {
5166         struct radeon_ring *ring;
5167         int r;
5168
5169         /* enable pcie gen2 link */
5170         evergreen_pcie_gen2_enable(rdev);
5171         /* enable aspm */
5172         evergreen_program_aspm(rdev);
5173
5174         /* scratch needs to be initialized before MC */
5175         r = r600_vram_scratch_init(rdev);
5176         if (r)
5177                 return r;
5178
5179         evergreen_mc_program(rdev);
5180
5181         if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5182                 r = ni_mc_load_microcode(rdev);
5183                 if (r) {
5184                         DRM_ERROR("Failed to load MC firmware!\n");
5185                         return r;
5186                 }
5187         }
5188
5189         if (rdev->flags & RADEON_IS_AGP) {
5190                 evergreen_agp_enable(rdev);
5191         } else {
5192                 r = evergreen_pcie_gart_enable(rdev);
5193                 if (r)
5194                         return r;
5195         }
5196         evergreen_gpu_init(rdev);
5197
5198         /* allocate rlc buffers */
5199         if (rdev->flags & RADEON_IS_IGP) {
5200                 rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5201                 rdev->rlc.reg_list_size =
5202                         (u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5203                 rdev->rlc.cs_data = evergreen_cs_data;
5204                 r = sumo_rlc_init(rdev);
5205                 if (r) {
5206                         DRM_ERROR("Failed to init rlc BOs!\n");
5207                         return r;
5208                 }
5209         }
5210
5211         /* allocate wb buffer */
5212         r = radeon_wb_init(rdev);
5213         if (r)
5214                 return r;
5215
5216         r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5217         if (r) {
5218                 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5219                 return r;
5220         }
5221
5222         r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5223         if (r) {
5224                 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5225                 return r;
5226         }
5227
5228         r = uvd_v2_2_resume(rdev);
5229         if (!r) {
5230                 r = radeon_fence_driver_start_ring(rdev,
5231                                                    R600_RING_TYPE_UVD_INDEX);
5232                 if (r)
5233                         dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
5234         }
5235
5236         if (r)
5237                 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5238
5239         /* Enable IRQ */
5240         if (!rdev->irq.installed) {
5241                 r = radeon_irq_kms_init(rdev);
5242                 if (r)
5243                         return r;
5244         }
5245
5246         r = r600_irq_init(rdev);
5247         if (r) {
5248                 DRM_ERROR("radeon: IH init failed (%d).\n", r);
5249                 radeon_irq_kms_fini(rdev);
5250                 return r;
5251         }
5252         evergreen_irq_set(rdev);
5253
5254         ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5255         r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5256                              RADEON_CP_PACKET2);
5257         if (r)
5258                 return r;
5259
5260         ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5261         r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5262                              DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5263         if (r)
5264                 return r;
5265
5266         r = evergreen_cp_load_microcode(rdev);
5267         if (r)
5268                 return r;
5269         r = evergreen_cp_resume(rdev);
5270         if (r)
5271                 return r;
5272         r = r600_dma_resume(rdev);
5273         if (r)
5274                 return r;
5275
5276         ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5277         if (ring->ring_size) {
5278                 r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
5279                                      RADEON_CP_PACKET2);
5280                 if (!r)
5281                         r = uvd_v1_0_init(rdev);
5282
5283                 if (r)
5284                         DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
5285         }
5286
5287         r = radeon_ib_pool_init(rdev);
5288         if (r) {
5289                 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5290                 return r;
5291         }
5292
5293         r = r600_audio_init(rdev);
5294         if (r) {
5295                 DRM_ERROR("radeon: audio init failed\n");
5296                 return r;
5297         }
5298
5299         return 0;
5300 }
5301
5302 int evergreen_resume(struct radeon_device *rdev)
5303 {
5304         int r;
5305
5306         /* reset the asic, the gfx blocks are often in a bad state
5307          * after the driver is unloaded or after a resume
5308          */
5309         if (radeon_asic_reset(rdev))
5310                 dev_warn(rdev->dev, "GPU reset failed !\n");
5311         /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5312          * posting will perform necessary task to bring back GPU into good
5313          * shape.
5314          */
5315         /* post card */
5316         atom_asic_init(rdev->mode_info.atom_context);
5317
5318         /* init golden registers */
5319         evergreen_init_golden_registers(rdev);
5320
5321         if (rdev->pm.pm_method == PM_METHOD_DPM)
5322                 radeon_pm_resume(rdev);
5323
5324         rdev->accel_working = true;
5325         r = evergreen_startup(rdev);
5326         if (r) {
5327                 DRM_ERROR("evergreen startup failed on resume\n");
5328                 rdev->accel_working = false;
5329                 return r;
5330         }
5331
5332         return r;
5333
5334 }
5335
5336 int evergreen_suspend(struct radeon_device *rdev)
5337 {
5338         radeon_pm_suspend(rdev);
5339         r600_audio_fini(rdev);
5340         uvd_v1_0_fini(rdev);
5341         radeon_uvd_suspend(rdev);
5342         r700_cp_stop(rdev);
5343         r600_dma_stop(rdev);
5344         evergreen_irq_suspend(rdev);
5345         radeon_wb_disable(rdev);
5346         evergreen_pcie_gart_disable(rdev);
5347
5348         return 0;
5349 }
5350
5351 /* Plan is to move initialization in that function and use
5352  * helper function so that radeon_device_init pretty much
5353  * do nothing more than calling asic specific function. This
5354  * should also allow to remove a bunch of callback function
5355  * like vram_info.
5356  */
5357 int evergreen_init(struct radeon_device *rdev)
5358 {
5359         int r;
5360
5361         /* Read BIOS */
5362         if (!radeon_get_bios(rdev)) {
5363                 if (ASIC_IS_AVIVO(rdev))
5364                         return -EINVAL;
5365         }
5366         /* Must be an ATOMBIOS */
5367         if (!rdev->is_atom_bios) {
5368                 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5369                 return -EINVAL;
5370         }
5371         r = radeon_atombios_init(rdev);
5372         if (r)
5373                 return r;
5374         /* reset the asic, the gfx blocks are often in a bad state
5375          * after the driver is unloaded or after a resume
5376          */
5377         if (radeon_asic_reset(rdev))
5378                 dev_warn(rdev->dev, "GPU reset failed !\n");
5379         /* Post card if necessary */
5380         if (!radeon_card_posted(rdev)) {
5381                 if (!rdev->bios) {
5382                         dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5383                         return -EINVAL;
5384                 }
5385                 DRM_INFO("GPU not posted. posting now...\n");
5386                 atom_asic_init(rdev->mode_info.atom_context);
5387         }
5388         /* init golden registers */
5389         evergreen_init_golden_registers(rdev);
5390         /* Initialize scratch registers */
5391         r600_scratch_init(rdev);
5392         /* Initialize surface registers */
5393         radeon_surface_init(rdev);
5394         /* Initialize clocks */
5395         radeon_get_clock_info(rdev->ddev);
5396         /* Fence driver */
5397         r = radeon_fence_driver_init(rdev);
5398         if (r)
5399                 return r;
5400         /* initialize AGP */
5401         if (rdev->flags & RADEON_IS_AGP) {
5402                 r = radeon_agp_init(rdev);
5403                 if (r)
5404                         radeon_agp_disable(rdev);
5405         }
5406         /* initialize memory controller */
5407         r = evergreen_mc_init(rdev);
5408         if (r)
5409                 return r;
5410         /* Memory manager */
5411         r = radeon_bo_init(rdev);
5412         if (r)
5413                 return r;
5414
5415         if (ASIC_IS_DCE5(rdev)) {
5416                 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5417                         r = ni_init_microcode(rdev);
5418                         if (r) {
5419                                 DRM_ERROR("Failed to load firmware!\n");
5420                                 return r;
5421                         }
5422                 }
5423         } else {
5424                 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5425                         r = r600_init_microcode(rdev);
5426                         if (r) {
5427                                 DRM_ERROR("Failed to load firmware!\n");
5428                                 return r;
5429                         }
5430                 }
5431         }
5432
5433         /* Initialize power management */
5434         radeon_pm_init(rdev);
5435
5436         rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5437         r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5438
5439         rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5440         r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5441
5442         r = radeon_uvd_init(rdev);
5443         if (!r) {
5444                 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5445                 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
5446                                4096);
5447         }
5448
5449         rdev->ih.ring_obj = NULL;
5450         r600_ih_ring_init(rdev, 64 * 1024);
5451
5452         r = r600_pcie_gart_init(rdev);
5453         if (r)
5454                 return r;
5455
5456         rdev->accel_working = true;
5457         r = evergreen_startup(rdev);
5458         if (r) {
5459                 dev_err(rdev->dev, "disabling GPU acceleration\n");
5460                 r700_cp_fini(rdev);
5461                 r600_dma_fini(rdev);
5462                 r600_irq_fini(rdev);
5463                 if (rdev->flags & RADEON_IS_IGP)
5464                         sumo_rlc_fini(rdev);
5465                 radeon_wb_fini(rdev);
5466                 radeon_ib_pool_fini(rdev);
5467                 radeon_irq_kms_fini(rdev);
5468                 evergreen_pcie_gart_fini(rdev);
5469                 rdev->accel_working = false;
5470         }
5471
5472         /* Don't start up if the MC ucode is missing on BTC parts.
5473          * The default clocks and voltages before the MC ucode
5474          * is loaded are not suffient for advanced operations.
5475          */
5476         if (ASIC_IS_DCE5(rdev)) {
5477                 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5478                         DRM_ERROR("radeon: MC ucode required for NI+.\n");
5479                         return -EINVAL;
5480                 }
5481         }
5482
5483         return 0;
5484 }
5485
5486 void evergreen_fini(struct radeon_device *rdev)
5487 {
5488         radeon_pm_fini(rdev);
5489         r600_audio_fini(rdev);
5490         r700_cp_fini(rdev);
5491         r600_dma_fini(rdev);
5492         r600_irq_fini(rdev);
5493         if (rdev->flags & RADEON_IS_IGP)
5494                 sumo_rlc_fini(rdev);
5495         radeon_wb_fini(rdev);
5496         radeon_ib_pool_fini(rdev);
5497         radeon_irq_kms_fini(rdev);
5498         uvd_v1_0_fini(rdev);
5499         radeon_uvd_fini(rdev);
5500         evergreen_pcie_gart_fini(rdev);
5501         r600_vram_scratch_fini(rdev);
5502         radeon_gem_fini(rdev);
5503         radeon_fence_driver_fini(rdev);
5504         radeon_agp_fini(rdev);
5505         radeon_bo_fini(rdev);
5506         radeon_atombios_fini(rdev);
5507         kfree(rdev->bios);
5508         rdev->bios = NULL;
5509 }
5510
5511 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5512 {
5513         u32 link_width_cntl, speed_cntl;
5514
5515         if (radeon_pcie_gen2 == 0)
5516                 return;
5517
5518         if (rdev->flags & RADEON_IS_IGP)
5519                 return;
5520
5521         if (!(rdev->flags & RADEON_IS_PCIE))
5522                 return;
5523
5524         /* x2 cards have a special sequence */
5525         if (ASIC_IS_X2(rdev))
5526                 return;
5527
5528         if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5529                 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5530                 return;
5531
5532         speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5533         if (speed_cntl & LC_CURRENT_DATA_RATE) {
5534                 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5535                 return;
5536         }
5537
5538         DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5539
5540         if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5541             (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5542
5543                 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5544                 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5545                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5546
5547                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5548                 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5549                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5550
5551                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5552                 speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5553                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5554
5555                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5556                 speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5557                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5558
5559                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5560                 speed_cntl |= LC_GEN2_EN_STRAP;
5561                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5562
5563         } else {
5564                 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5565                 /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5566                 if (1)
5567                         link_width_cntl |= LC_UPCONFIGURE_DIS;
5568                 else
5569                         link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5570                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5571         }
5572 }
5573
5574 void evergreen_program_aspm(struct radeon_device *rdev)
5575 {
5576         u32 data, orig;
5577         u32 pcie_lc_cntl, pcie_lc_cntl_old;
5578         bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5579         /* fusion_platform = true
5580          * if the system is a fusion system
5581          * (APU or DGPU in a fusion system).
5582          * todo: check if the system is a fusion platform.
5583          */
5584         bool fusion_platform = false;
5585
5586         if (radeon_aspm == 0)
5587                 return;
5588
5589         if (!(rdev->flags & RADEON_IS_PCIE))
5590                 return;
5591
5592         switch (rdev->family) {
5593         case CHIP_CYPRESS:
5594         case CHIP_HEMLOCK:
5595         case CHIP_JUNIPER:
5596         case CHIP_REDWOOD:
5597         case CHIP_CEDAR:
5598         case CHIP_SUMO:
5599         case CHIP_SUMO2:
5600         case CHIP_PALM:
5601         case CHIP_ARUBA:
5602                 disable_l0s = true;
5603                 break;
5604         default:
5605                 disable_l0s = false;
5606                 break;
5607         }
5608
5609         if (rdev->flags & RADEON_IS_IGP)
5610                 fusion_platform = true; /* XXX also dGPUs in a fusion system */
5611
5612         data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5613         if (fusion_platform)
5614                 data &= ~MULTI_PIF;
5615         else
5616                 data |= MULTI_PIF;
5617         if (data != orig)
5618                 WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5619
5620         data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5621         if (fusion_platform)
5622                 data &= ~MULTI_PIF;
5623         else
5624                 data |= MULTI_PIF;
5625         if (data != orig)
5626                 WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5627
5628         pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5629         pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5630         if (!disable_l0s) {
5631                 if (rdev->family >= CHIP_BARTS)
5632                         pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5633                 else
5634                         pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5635         }
5636
5637         if (!disable_l1) {
5638                 if (rdev->family >= CHIP_BARTS)
5639                         pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5640                 else
5641                         pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5642
5643                 if (!disable_plloff_in_l1) {
5644                         data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5645                         data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5646                         data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5647                         if (data != orig)
5648                                 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5649
5650                         data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5651                         data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5652                         data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5653                         if (data != orig)
5654                                 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5655
5656                         data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5657                         data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5658                         data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5659                         if (data != orig)
5660                                 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5661
5662                         data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5663                         data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5664                         data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5665                         if (data != orig)
5666                                 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5667
5668                         if (rdev->family >= CHIP_BARTS) {
5669                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5670                                 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5671                                 data |= PLL_RAMP_UP_TIME_0(4);
5672                                 if (data != orig)
5673                                         WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5674
5675                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5676                                 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5677                                 data |= PLL_RAMP_UP_TIME_1(4);
5678                                 if (data != orig)
5679                                         WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5680
5681                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5682                                 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5683                                 data |= PLL_RAMP_UP_TIME_0(4);
5684                                 if (data != orig)
5685                                         WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5686
5687                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5688                                 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5689                                 data |= PLL_RAMP_UP_TIME_1(4);
5690                                 if (data != orig)
5691                                         WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5692                         }
5693
5694                         data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5695                         data &= ~LC_DYN_LANES_PWR_STATE_MASK;
5696                         data |= LC_DYN_LANES_PWR_STATE(3);
5697                         if (data != orig)
5698                                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
5699
5700                         if (rdev->family >= CHIP_BARTS) {
5701                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
5702                                 data &= ~LS2_EXIT_TIME_MASK;
5703                                 data |= LS2_EXIT_TIME(1);
5704                                 if (data != orig)
5705                                         WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
5706
5707                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
5708                                 data &= ~LS2_EXIT_TIME_MASK;
5709                                 data |= LS2_EXIT_TIME(1);
5710                                 if (data != orig)
5711                                         WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
5712                         }
5713                 }
5714         }
5715
5716         /* evergreen parts only */
5717         if (rdev->family < CHIP_BARTS)
5718                 pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
5719
5720         if (pcie_lc_cntl != pcie_lc_cntl_old)
5721                 WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
5722 }