2 * Copyright 2011 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
22 * Authors: Alex Deucher
24 #include <linux/firmware.h>
25 #include <linux/slab.h>
26 #include <linux/module.h>
29 #include "radeon_asic.h"
30 #include <drm/radeon_drm.h>
33 #include "si_blit_shaders.h"
34 #include "clearstate_si.h"
35 #include "radeon_ucode.h"
38 MODULE_FIRMWARE("radeon/TAHITI_pfp.bin");
39 MODULE_FIRMWARE("radeon/TAHITI_me.bin");
40 MODULE_FIRMWARE("radeon/TAHITI_ce.bin");
41 MODULE_FIRMWARE("radeon/TAHITI_mc.bin");
42 MODULE_FIRMWARE("radeon/TAHITI_mc2.bin");
43 MODULE_FIRMWARE("radeon/TAHITI_rlc.bin");
44 MODULE_FIRMWARE("radeon/TAHITI_smc.bin");
45 MODULE_FIRMWARE("radeon/PITCAIRN_pfp.bin");
46 MODULE_FIRMWARE("radeon/PITCAIRN_me.bin");
47 MODULE_FIRMWARE("radeon/PITCAIRN_ce.bin");
48 MODULE_FIRMWARE("radeon/PITCAIRN_mc.bin");
49 MODULE_FIRMWARE("radeon/PITCAIRN_mc2.bin");
50 MODULE_FIRMWARE("radeon/PITCAIRN_rlc.bin");
51 MODULE_FIRMWARE("radeon/PITCAIRN_smc.bin");
52 MODULE_FIRMWARE("radeon/VERDE_pfp.bin");
53 MODULE_FIRMWARE("radeon/VERDE_me.bin");
54 MODULE_FIRMWARE("radeon/VERDE_ce.bin");
55 MODULE_FIRMWARE("radeon/VERDE_mc.bin");
56 MODULE_FIRMWARE("radeon/VERDE_mc2.bin");
57 MODULE_FIRMWARE("radeon/VERDE_rlc.bin");
58 MODULE_FIRMWARE("radeon/VERDE_smc.bin");
59 MODULE_FIRMWARE("radeon/OLAND_pfp.bin");
60 MODULE_FIRMWARE("radeon/OLAND_me.bin");
61 MODULE_FIRMWARE("radeon/OLAND_ce.bin");
62 MODULE_FIRMWARE("radeon/OLAND_mc.bin");
63 MODULE_FIRMWARE("radeon/OLAND_mc2.bin");
64 MODULE_FIRMWARE("radeon/OLAND_rlc.bin");
65 MODULE_FIRMWARE("radeon/OLAND_smc.bin");
66 MODULE_FIRMWARE("radeon/HAINAN_pfp.bin");
67 MODULE_FIRMWARE("radeon/HAINAN_me.bin");
68 MODULE_FIRMWARE("radeon/HAINAN_ce.bin");
69 MODULE_FIRMWARE("radeon/HAINAN_mc.bin");
70 MODULE_FIRMWARE("radeon/HAINAN_mc2.bin");
71 MODULE_FIRMWARE("radeon/HAINAN_rlc.bin");
72 MODULE_FIRMWARE("radeon/HAINAN_smc.bin");
74 static void si_pcie_gen3_enable(struct radeon_device *rdev);
75 static void si_program_aspm(struct radeon_device *rdev);
76 extern void sumo_rlc_fini(struct radeon_device *rdev);
77 extern int sumo_rlc_init(struct radeon_device *rdev);
78 extern int r600_ih_ring_alloc(struct radeon_device *rdev);
79 extern void r600_ih_ring_fini(struct radeon_device *rdev);
80 extern void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev);
81 extern void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save);
82 extern void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save);
83 extern u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev);
84 extern void evergreen_print_gpu_status_regs(struct radeon_device *rdev);
85 extern bool evergreen_is_display_hung(struct radeon_device *rdev);
86 static void si_enable_gui_idle_interrupt(struct radeon_device *rdev,
88 static void si_init_pg(struct radeon_device *rdev);
89 static void si_init_cg(struct radeon_device *rdev);
90 static void si_fini_pg(struct radeon_device *rdev);
91 static void si_fini_cg(struct radeon_device *rdev);
92 static void si_rlc_stop(struct radeon_device *rdev);
94 static const u32 verde_rlc_save_restore_register_list[] =
96 (0x8000 << 16) | (0x98f4 >> 2),
98 (0x8040 << 16) | (0x98f4 >> 2),
100 (0x8000 << 16) | (0xe80 >> 2),
102 (0x8040 << 16) | (0xe80 >> 2),
104 (0x8000 << 16) | (0x89bc >> 2),
106 (0x8040 << 16) | (0x89bc >> 2),
108 (0x8000 << 16) | (0x8c1c >> 2),
110 (0x8040 << 16) | (0x8c1c >> 2),
112 (0x9c00 << 16) | (0x98f0 >> 2),
114 (0x9c00 << 16) | (0xe7c >> 2),
116 (0x8000 << 16) | (0x9148 >> 2),
118 (0x8040 << 16) | (0x9148 >> 2),
120 (0x9c00 << 16) | (0x9150 >> 2),
122 (0x9c00 << 16) | (0x897c >> 2),
124 (0x9c00 << 16) | (0x8d8c >> 2),
126 (0x9c00 << 16) | (0xac54 >> 2),
129 (0x9c00 << 16) | (0x98f8 >> 2),
131 (0x9c00 << 16) | (0x9910 >> 2),
133 (0x9c00 << 16) | (0x9914 >> 2),
135 (0x9c00 << 16) | (0x9918 >> 2),
137 (0x9c00 << 16) | (0x991c >> 2),
139 (0x9c00 << 16) | (0x9920 >> 2),
141 (0x9c00 << 16) | (0x9924 >> 2),
143 (0x9c00 << 16) | (0x9928 >> 2),
145 (0x9c00 << 16) | (0x992c >> 2),
147 (0x9c00 << 16) | (0x9930 >> 2),
149 (0x9c00 << 16) | (0x9934 >> 2),
151 (0x9c00 << 16) | (0x9938 >> 2),
153 (0x9c00 << 16) | (0x993c >> 2),
155 (0x9c00 << 16) | (0x9940 >> 2),
157 (0x9c00 << 16) | (0x9944 >> 2),
159 (0x9c00 << 16) | (0x9948 >> 2),
161 (0x9c00 << 16) | (0x994c >> 2),
163 (0x9c00 << 16) | (0x9950 >> 2),
165 (0x9c00 << 16) | (0x9954 >> 2),
167 (0x9c00 << 16) | (0x9958 >> 2),
169 (0x9c00 << 16) | (0x995c >> 2),
171 (0x9c00 << 16) | (0x9960 >> 2),
173 (0x9c00 << 16) | (0x9964 >> 2),
175 (0x9c00 << 16) | (0x9968 >> 2),
177 (0x9c00 << 16) | (0x996c >> 2),
179 (0x9c00 << 16) | (0x9970 >> 2),
181 (0x9c00 << 16) | (0x9974 >> 2),
183 (0x9c00 << 16) | (0x9978 >> 2),
185 (0x9c00 << 16) | (0x997c >> 2),
187 (0x9c00 << 16) | (0x9980 >> 2),
189 (0x9c00 << 16) | (0x9984 >> 2),
191 (0x9c00 << 16) | (0x9988 >> 2),
193 (0x9c00 << 16) | (0x998c >> 2),
195 (0x9c00 << 16) | (0x8c00 >> 2),
197 (0x9c00 << 16) | (0x8c14 >> 2),
199 (0x9c00 << 16) | (0x8c04 >> 2),
201 (0x9c00 << 16) | (0x8c08 >> 2),
203 (0x8000 << 16) | (0x9b7c >> 2),
205 (0x8040 << 16) | (0x9b7c >> 2),
207 (0x8000 << 16) | (0xe84 >> 2),
209 (0x8040 << 16) | (0xe84 >> 2),
211 (0x8000 << 16) | (0x89c0 >> 2),
213 (0x8040 << 16) | (0x89c0 >> 2),
215 (0x8000 << 16) | (0x914c >> 2),
217 (0x8040 << 16) | (0x914c >> 2),
219 (0x8000 << 16) | (0x8c20 >> 2),
221 (0x8040 << 16) | (0x8c20 >> 2),
223 (0x8000 << 16) | (0x9354 >> 2),
225 (0x8040 << 16) | (0x9354 >> 2),
227 (0x9c00 << 16) | (0x9060 >> 2),
229 (0x9c00 << 16) | (0x9364 >> 2),
231 (0x9c00 << 16) | (0x9100 >> 2),
233 (0x9c00 << 16) | (0x913c >> 2),
235 (0x8000 << 16) | (0x90e0 >> 2),
237 (0x8000 << 16) | (0x90e4 >> 2),
239 (0x8000 << 16) | (0x90e8 >> 2),
241 (0x8040 << 16) | (0x90e0 >> 2),
243 (0x8040 << 16) | (0x90e4 >> 2),
245 (0x8040 << 16) | (0x90e8 >> 2),
247 (0x9c00 << 16) | (0x8bcc >> 2),
249 (0x9c00 << 16) | (0x8b24 >> 2),
251 (0x9c00 << 16) | (0x88c4 >> 2),
253 (0x9c00 << 16) | (0x8e50 >> 2),
255 (0x9c00 << 16) | (0x8c0c >> 2),
257 (0x9c00 << 16) | (0x8e58 >> 2),
259 (0x9c00 << 16) | (0x8e5c >> 2),
261 (0x9c00 << 16) | (0x9508 >> 2),
263 (0x9c00 << 16) | (0x950c >> 2),
265 (0x9c00 << 16) | (0x9494 >> 2),
267 (0x9c00 << 16) | (0xac0c >> 2),
269 (0x9c00 << 16) | (0xac10 >> 2),
271 (0x9c00 << 16) | (0xac14 >> 2),
273 (0x9c00 << 16) | (0xae00 >> 2),
275 (0x9c00 << 16) | (0xac08 >> 2),
277 (0x9c00 << 16) | (0x88d4 >> 2),
279 (0x9c00 << 16) | (0x88c8 >> 2),
281 (0x9c00 << 16) | (0x88cc >> 2),
283 (0x9c00 << 16) | (0x89b0 >> 2),
285 (0x9c00 << 16) | (0x8b10 >> 2),
287 (0x9c00 << 16) | (0x8a14 >> 2),
289 (0x9c00 << 16) | (0x9830 >> 2),
291 (0x9c00 << 16) | (0x9834 >> 2),
293 (0x9c00 << 16) | (0x9838 >> 2),
295 (0x9c00 << 16) | (0x9a10 >> 2),
297 (0x8000 << 16) | (0x9870 >> 2),
299 (0x8000 << 16) | (0x9874 >> 2),
301 (0x8001 << 16) | (0x9870 >> 2),
303 (0x8001 << 16) | (0x9874 >> 2),
305 (0x8040 << 16) | (0x9870 >> 2),
307 (0x8040 << 16) | (0x9874 >> 2),
309 (0x8041 << 16) | (0x9870 >> 2),
311 (0x8041 << 16) | (0x9874 >> 2),
316 static const u32 tahiti_golden_rlc_registers[] =
318 0xc424, 0xffffffff, 0x00601005,
319 0xc47c, 0xffffffff, 0x10104040,
320 0xc488, 0xffffffff, 0x0100000a,
321 0xc314, 0xffffffff, 0x00000800,
322 0xc30c, 0xffffffff, 0x800000f4,
323 0xf4a8, 0xffffffff, 0x00000000
326 static const u32 tahiti_golden_registers[] =
328 0x9a10, 0x00010000, 0x00018208,
329 0x9830, 0xffffffff, 0x00000000,
330 0x9834, 0xf00fffff, 0x00000400,
331 0x9838, 0x0002021c, 0x00020200,
332 0xc78, 0x00000080, 0x00000000,
333 0xd030, 0x000300c0, 0x00800040,
334 0xd830, 0x000300c0, 0x00800040,
335 0x5bb0, 0x000000f0, 0x00000070,
336 0x5bc0, 0x00200000, 0x50100000,
337 0x7030, 0x31000311, 0x00000011,
338 0x277c, 0x00000003, 0x000007ff,
339 0x240c, 0x000007ff, 0x00000000,
340 0x8a14, 0xf000001f, 0x00000007,
341 0x8b24, 0xffffffff, 0x00ffffff,
342 0x8b10, 0x0000ff0f, 0x00000000,
343 0x28a4c, 0x07ffffff, 0x4e000000,
344 0x28350, 0x3f3f3fff, 0x2a00126a,
345 0x30, 0x000000ff, 0x0040,
346 0x34, 0x00000040, 0x00004040,
347 0x9100, 0x07ffffff, 0x03000000,
348 0x8e88, 0x01ff1f3f, 0x00000000,
349 0x8e84, 0x01ff1f3f, 0x00000000,
350 0x9060, 0x0000007f, 0x00000020,
351 0x9508, 0x00010000, 0x00010000,
352 0xac14, 0x00000200, 0x000002fb,
353 0xac10, 0xffffffff, 0x0000543b,
354 0xac0c, 0xffffffff, 0xa9210876,
355 0x88d0, 0xffffffff, 0x000fff40,
356 0x88d4, 0x0000001f, 0x00000010,
357 0x1410, 0x20000000, 0x20fffed8,
358 0x15c0, 0x000c0fc0, 0x000c0400
361 static const u32 tahiti_golden_registers2[] =
363 0xc64, 0x00000001, 0x00000001
366 static const u32 pitcairn_golden_rlc_registers[] =
368 0xc424, 0xffffffff, 0x00601004,
369 0xc47c, 0xffffffff, 0x10102020,
370 0xc488, 0xffffffff, 0x01000020,
371 0xc314, 0xffffffff, 0x00000800,
372 0xc30c, 0xffffffff, 0x800000a4
375 static const u32 pitcairn_golden_registers[] =
377 0x9a10, 0x00010000, 0x00018208,
378 0x9830, 0xffffffff, 0x00000000,
379 0x9834, 0xf00fffff, 0x00000400,
380 0x9838, 0x0002021c, 0x00020200,
381 0xc78, 0x00000080, 0x00000000,
382 0xd030, 0x000300c0, 0x00800040,
383 0xd830, 0x000300c0, 0x00800040,
384 0x5bb0, 0x000000f0, 0x00000070,
385 0x5bc0, 0x00200000, 0x50100000,
386 0x7030, 0x31000311, 0x00000011,
387 0x2ae4, 0x00073ffe, 0x000022a2,
388 0x240c, 0x000007ff, 0x00000000,
389 0x8a14, 0xf000001f, 0x00000007,
390 0x8b24, 0xffffffff, 0x00ffffff,
391 0x8b10, 0x0000ff0f, 0x00000000,
392 0x28a4c, 0x07ffffff, 0x4e000000,
393 0x28350, 0x3f3f3fff, 0x2a00126a,
394 0x30, 0x000000ff, 0x0040,
395 0x34, 0x00000040, 0x00004040,
396 0x9100, 0x07ffffff, 0x03000000,
397 0x9060, 0x0000007f, 0x00000020,
398 0x9508, 0x00010000, 0x00010000,
399 0xac14, 0x000003ff, 0x000000f7,
400 0xac10, 0xffffffff, 0x00000000,
401 0xac0c, 0xffffffff, 0x32761054,
402 0x88d4, 0x0000001f, 0x00000010,
403 0x15c0, 0x000c0fc0, 0x000c0400
406 static const u32 verde_golden_rlc_registers[] =
408 0xc424, 0xffffffff, 0x033f1005,
409 0xc47c, 0xffffffff, 0x10808020,
410 0xc488, 0xffffffff, 0x00800008,
411 0xc314, 0xffffffff, 0x00001000,
412 0xc30c, 0xffffffff, 0x80010014
415 static const u32 verde_golden_registers[] =
417 0x9a10, 0x00010000, 0x00018208,
418 0x9830, 0xffffffff, 0x00000000,
419 0x9834, 0xf00fffff, 0x00000400,
420 0x9838, 0x0002021c, 0x00020200,
421 0xc78, 0x00000080, 0x00000000,
422 0xd030, 0x000300c0, 0x00800040,
423 0xd030, 0x000300c0, 0x00800040,
424 0xd830, 0x000300c0, 0x00800040,
425 0xd830, 0x000300c0, 0x00800040,
426 0x5bb0, 0x000000f0, 0x00000070,
427 0x5bc0, 0x00200000, 0x50100000,
428 0x7030, 0x31000311, 0x00000011,
429 0x2ae4, 0x00073ffe, 0x000022a2,
430 0x2ae4, 0x00073ffe, 0x000022a2,
431 0x2ae4, 0x00073ffe, 0x000022a2,
432 0x240c, 0x000007ff, 0x00000000,
433 0x240c, 0x000007ff, 0x00000000,
434 0x240c, 0x000007ff, 0x00000000,
435 0x8a14, 0xf000001f, 0x00000007,
436 0x8a14, 0xf000001f, 0x00000007,
437 0x8a14, 0xf000001f, 0x00000007,
438 0x8b24, 0xffffffff, 0x00ffffff,
439 0x8b10, 0x0000ff0f, 0x00000000,
440 0x28a4c, 0x07ffffff, 0x4e000000,
441 0x28350, 0x3f3f3fff, 0x0000124a,
442 0x28350, 0x3f3f3fff, 0x0000124a,
443 0x28350, 0x3f3f3fff, 0x0000124a,
444 0x30, 0x000000ff, 0x0040,
445 0x34, 0x00000040, 0x00004040,
446 0x9100, 0x07ffffff, 0x03000000,
447 0x9100, 0x07ffffff, 0x03000000,
448 0x8e88, 0x01ff1f3f, 0x00000000,
449 0x8e88, 0x01ff1f3f, 0x00000000,
450 0x8e88, 0x01ff1f3f, 0x00000000,
451 0x8e84, 0x01ff1f3f, 0x00000000,
452 0x8e84, 0x01ff1f3f, 0x00000000,
453 0x8e84, 0x01ff1f3f, 0x00000000,
454 0x9060, 0x0000007f, 0x00000020,
455 0x9508, 0x00010000, 0x00010000,
456 0xac14, 0x000003ff, 0x00000003,
457 0xac14, 0x000003ff, 0x00000003,
458 0xac14, 0x000003ff, 0x00000003,
459 0xac10, 0xffffffff, 0x00000000,
460 0xac10, 0xffffffff, 0x00000000,
461 0xac10, 0xffffffff, 0x00000000,
462 0xac0c, 0xffffffff, 0x00001032,
463 0xac0c, 0xffffffff, 0x00001032,
464 0xac0c, 0xffffffff, 0x00001032,
465 0x88d4, 0x0000001f, 0x00000010,
466 0x88d4, 0x0000001f, 0x00000010,
467 0x88d4, 0x0000001f, 0x00000010,
468 0x15c0, 0x000c0fc0, 0x000c0400
471 static const u32 oland_golden_rlc_registers[] =
473 0xc424, 0xffffffff, 0x00601005,
474 0xc47c, 0xffffffff, 0x10104040,
475 0xc488, 0xffffffff, 0x0100000a,
476 0xc314, 0xffffffff, 0x00000800,
477 0xc30c, 0xffffffff, 0x800000f4
480 static const u32 oland_golden_registers[] =
482 0x9a10, 0x00010000, 0x00018208,
483 0x9830, 0xffffffff, 0x00000000,
484 0x9834, 0xf00fffff, 0x00000400,
485 0x9838, 0x0002021c, 0x00020200,
486 0xc78, 0x00000080, 0x00000000,
487 0xd030, 0x000300c0, 0x00800040,
488 0xd830, 0x000300c0, 0x00800040,
489 0x5bb0, 0x000000f0, 0x00000070,
490 0x5bc0, 0x00200000, 0x50100000,
491 0x7030, 0x31000311, 0x00000011,
492 0x2ae4, 0x00073ffe, 0x000022a2,
493 0x240c, 0x000007ff, 0x00000000,
494 0x8a14, 0xf000001f, 0x00000007,
495 0x8b24, 0xffffffff, 0x00ffffff,
496 0x8b10, 0x0000ff0f, 0x00000000,
497 0x28a4c, 0x07ffffff, 0x4e000000,
498 0x28350, 0x3f3f3fff, 0x00000082,
499 0x30, 0x000000ff, 0x0040,
500 0x34, 0x00000040, 0x00004040,
501 0x9100, 0x07ffffff, 0x03000000,
502 0x9060, 0x0000007f, 0x00000020,
503 0x9508, 0x00010000, 0x00010000,
504 0xac14, 0x000003ff, 0x000000f3,
505 0xac10, 0xffffffff, 0x00000000,
506 0xac0c, 0xffffffff, 0x00003210,
507 0x88d4, 0x0000001f, 0x00000010,
508 0x15c0, 0x000c0fc0, 0x000c0400
511 static const u32 hainan_golden_registers[] =
513 0x9a10, 0x00010000, 0x00018208,
514 0x9830, 0xffffffff, 0x00000000,
515 0x9834, 0xf00fffff, 0x00000400,
516 0x9838, 0x0002021c, 0x00020200,
517 0xd0c0, 0xff000fff, 0x00000100,
518 0xd030, 0x000300c0, 0x00800040,
519 0xd8c0, 0xff000fff, 0x00000100,
520 0xd830, 0x000300c0, 0x00800040,
521 0x2ae4, 0x00073ffe, 0x000022a2,
522 0x240c, 0x000007ff, 0x00000000,
523 0x8a14, 0xf000001f, 0x00000007,
524 0x8b24, 0xffffffff, 0x00ffffff,
525 0x8b10, 0x0000ff0f, 0x00000000,
526 0x28a4c, 0x07ffffff, 0x4e000000,
527 0x28350, 0x3f3f3fff, 0x00000000,
528 0x30, 0x000000ff, 0x0040,
529 0x34, 0x00000040, 0x00004040,
530 0x9100, 0x03e00000, 0x03600000,
531 0x9060, 0x0000007f, 0x00000020,
532 0x9508, 0x00010000, 0x00010000,
533 0xac14, 0x000003ff, 0x000000f1,
534 0xac10, 0xffffffff, 0x00000000,
535 0xac0c, 0xffffffff, 0x00003210,
536 0x88d4, 0x0000001f, 0x00000010,
537 0x15c0, 0x000c0fc0, 0x000c0400
540 static const u32 hainan_golden_registers2[] =
542 0x98f8, 0xffffffff, 0x02010001
545 static const u32 tahiti_mgcg_cgcg_init[] =
547 0xc400, 0xffffffff, 0xfffffffc,
548 0x802c, 0xffffffff, 0xe0000000,
549 0x9a60, 0xffffffff, 0x00000100,
550 0x92a4, 0xffffffff, 0x00000100,
551 0xc164, 0xffffffff, 0x00000100,
552 0x9774, 0xffffffff, 0x00000100,
553 0x8984, 0xffffffff, 0x06000100,
554 0x8a18, 0xffffffff, 0x00000100,
555 0x92a0, 0xffffffff, 0x00000100,
556 0xc380, 0xffffffff, 0x00000100,
557 0x8b28, 0xffffffff, 0x00000100,
558 0x9144, 0xffffffff, 0x00000100,
559 0x8d88, 0xffffffff, 0x00000100,
560 0x8d8c, 0xffffffff, 0x00000100,
561 0x9030, 0xffffffff, 0x00000100,
562 0x9034, 0xffffffff, 0x00000100,
563 0x9038, 0xffffffff, 0x00000100,
564 0x903c, 0xffffffff, 0x00000100,
565 0xad80, 0xffffffff, 0x00000100,
566 0xac54, 0xffffffff, 0x00000100,
567 0x897c, 0xffffffff, 0x06000100,
568 0x9868, 0xffffffff, 0x00000100,
569 0x9510, 0xffffffff, 0x00000100,
570 0xaf04, 0xffffffff, 0x00000100,
571 0xae04, 0xffffffff, 0x00000100,
572 0x949c, 0xffffffff, 0x00000100,
573 0x802c, 0xffffffff, 0xe0000000,
574 0x9160, 0xffffffff, 0x00010000,
575 0x9164, 0xffffffff, 0x00030002,
576 0x9168, 0xffffffff, 0x00040007,
577 0x916c, 0xffffffff, 0x00060005,
578 0x9170, 0xffffffff, 0x00090008,
579 0x9174, 0xffffffff, 0x00020001,
580 0x9178, 0xffffffff, 0x00040003,
581 0x917c, 0xffffffff, 0x00000007,
582 0x9180, 0xffffffff, 0x00060005,
583 0x9184, 0xffffffff, 0x00090008,
584 0x9188, 0xffffffff, 0x00030002,
585 0x918c, 0xffffffff, 0x00050004,
586 0x9190, 0xffffffff, 0x00000008,
587 0x9194, 0xffffffff, 0x00070006,
588 0x9198, 0xffffffff, 0x000a0009,
589 0x919c, 0xffffffff, 0x00040003,
590 0x91a0, 0xffffffff, 0x00060005,
591 0x91a4, 0xffffffff, 0x00000009,
592 0x91a8, 0xffffffff, 0x00080007,
593 0x91ac, 0xffffffff, 0x000b000a,
594 0x91b0, 0xffffffff, 0x00050004,
595 0x91b4, 0xffffffff, 0x00070006,
596 0x91b8, 0xffffffff, 0x0008000b,
597 0x91bc, 0xffffffff, 0x000a0009,
598 0x91c0, 0xffffffff, 0x000d000c,
599 0x91c4, 0xffffffff, 0x00060005,
600 0x91c8, 0xffffffff, 0x00080007,
601 0x91cc, 0xffffffff, 0x0000000b,
602 0x91d0, 0xffffffff, 0x000a0009,
603 0x91d4, 0xffffffff, 0x000d000c,
604 0x91d8, 0xffffffff, 0x00070006,
605 0x91dc, 0xffffffff, 0x00090008,
606 0x91e0, 0xffffffff, 0x0000000c,
607 0x91e4, 0xffffffff, 0x000b000a,
608 0x91e8, 0xffffffff, 0x000e000d,
609 0x91ec, 0xffffffff, 0x00080007,
610 0x91f0, 0xffffffff, 0x000a0009,
611 0x91f4, 0xffffffff, 0x0000000d,
612 0x91f8, 0xffffffff, 0x000c000b,
613 0x91fc, 0xffffffff, 0x000f000e,
614 0x9200, 0xffffffff, 0x00090008,
615 0x9204, 0xffffffff, 0x000b000a,
616 0x9208, 0xffffffff, 0x000c000f,
617 0x920c, 0xffffffff, 0x000e000d,
618 0x9210, 0xffffffff, 0x00110010,
619 0x9214, 0xffffffff, 0x000a0009,
620 0x9218, 0xffffffff, 0x000c000b,
621 0x921c, 0xffffffff, 0x0000000f,
622 0x9220, 0xffffffff, 0x000e000d,
623 0x9224, 0xffffffff, 0x00110010,
624 0x9228, 0xffffffff, 0x000b000a,
625 0x922c, 0xffffffff, 0x000d000c,
626 0x9230, 0xffffffff, 0x00000010,
627 0x9234, 0xffffffff, 0x000f000e,
628 0x9238, 0xffffffff, 0x00120011,
629 0x923c, 0xffffffff, 0x000c000b,
630 0x9240, 0xffffffff, 0x000e000d,
631 0x9244, 0xffffffff, 0x00000011,
632 0x9248, 0xffffffff, 0x0010000f,
633 0x924c, 0xffffffff, 0x00130012,
634 0x9250, 0xffffffff, 0x000d000c,
635 0x9254, 0xffffffff, 0x000f000e,
636 0x9258, 0xffffffff, 0x00100013,
637 0x925c, 0xffffffff, 0x00120011,
638 0x9260, 0xffffffff, 0x00150014,
639 0x9264, 0xffffffff, 0x000e000d,
640 0x9268, 0xffffffff, 0x0010000f,
641 0x926c, 0xffffffff, 0x00000013,
642 0x9270, 0xffffffff, 0x00120011,
643 0x9274, 0xffffffff, 0x00150014,
644 0x9278, 0xffffffff, 0x000f000e,
645 0x927c, 0xffffffff, 0x00110010,
646 0x9280, 0xffffffff, 0x00000014,
647 0x9284, 0xffffffff, 0x00130012,
648 0x9288, 0xffffffff, 0x00160015,
649 0x928c, 0xffffffff, 0x0010000f,
650 0x9290, 0xffffffff, 0x00120011,
651 0x9294, 0xffffffff, 0x00000015,
652 0x9298, 0xffffffff, 0x00140013,
653 0x929c, 0xffffffff, 0x00170016,
654 0x9150, 0xffffffff, 0x96940200,
655 0x8708, 0xffffffff, 0x00900100,
656 0xc478, 0xffffffff, 0x00000080,
657 0xc404, 0xffffffff, 0x0020003f,
658 0x30, 0xffffffff, 0x0000001c,
659 0x34, 0x000f0000, 0x000f0000,
660 0x160c, 0xffffffff, 0x00000100,
661 0x1024, 0xffffffff, 0x00000100,
662 0x102c, 0x00000101, 0x00000000,
663 0x20a8, 0xffffffff, 0x00000104,
664 0x264c, 0x000c0000, 0x000c0000,
665 0x2648, 0x000c0000, 0x000c0000,
666 0x55e4, 0xff000fff, 0x00000100,
667 0x55e8, 0x00000001, 0x00000001,
668 0x2f50, 0x00000001, 0x00000001,
669 0x30cc, 0xc0000fff, 0x00000104,
670 0xc1e4, 0x00000001, 0x00000001,
671 0xd0c0, 0xfffffff0, 0x00000100,
672 0xd8c0, 0xfffffff0, 0x00000100
675 static const u32 pitcairn_mgcg_cgcg_init[] =
677 0xc400, 0xffffffff, 0xfffffffc,
678 0x802c, 0xffffffff, 0xe0000000,
679 0x9a60, 0xffffffff, 0x00000100,
680 0x92a4, 0xffffffff, 0x00000100,
681 0xc164, 0xffffffff, 0x00000100,
682 0x9774, 0xffffffff, 0x00000100,
683 0x8984, 0xffffffff, 0x06000100,
684 0x8a18, 0xffffffff, 0x00000100,
685 0x92a0, 0xffffffff, 0x00000100,
686 0xc380, 0xffffffff, 0x00000100,
687 0x8b28, 0xffffffff, 0x00000100,
688 0x9144, 0xffffffff, 0x00000100,
689 0x8d88, 0xffffffff, 0x00000100,
690 0x8d8c, 0xffffffff, 0x00000100,
691 0x9030, 0xffffffff, 0x00000100,
692 0x9034, 0xffffffff, 0x00000100,
693 0x9038, 0xffffffff, 0x00000100,
694 0x903c, 0xffffffff, 0x00000100,
695 0xad80, 0xffffffff, 0x00000100,
696 0xac54, 0xffffffff, 0x00000100,
697 0x897c, 0xffffffff, 0x06000100,
698 0x9868, 0xffffffff, 0x00000100,
699 0x9510, 0xffffffff, 0x00000100,
700 0xaf04, 0xffffffff, 0x00000100,
701 0xae04, 0xffffffff, 0x00000100,
702 0x949c, 0xffffffff, 0x00000100,
703 0x802c, 0xffffffff, 0xe0000000,
704 0x9160, 0xffffffff, 0x00010000,
705 0x9164, 0xffffffff, 0x00030002,
706 0x9168, 0xffffffff, 0x00040007,
707 0x916c, 0xffffffff, 0x00060005,
708 0x9170, 0xffffffff, 0x00090008,
709 0x9174, 0xffffffff, 0x00020001,
710 0x9178, 0xffffffff, 0x00040003,
711 0x917c, 0xffffffff, 0x00000007,
712 0x9180, 0xffffffff, 0x00060005,
713 0x9184, 0xffffffff, 0x00090008,
714 0x9188, 0xffffffff, 0x00030002,
715 0x918c, 0xffffffff, 0x00050004,
716 0x9190, 0xffffffff, 0x00000008,
717 0x9194, 0xffffffff, 0x00070006,
718 0x9198, 0xffffffff, 0x000a0009,
719 0x919c, 0xffffffff, 0x00040003,
720 0x91a0, 0xffffffff, 0x00060005,
721 0x91a4, 0xffffffff, 0x00000009,
722 0x91a8, 0xffffffff, 0x00080007,
723 0x91ac, 0xffffffff, 0x000b000a,
724 0x91b0, 0xffffffff, 0x00050004,
725 0x91b4, 0xffffffff, 0x00070006,
726 0x91b8, 0xffffffff, 0x0008000b,
727 0x91bc, 0xffffffff, 0x000a0009,
728 0x91c0, 0xffffffff, 0x000d000c,
729 0x9200, 0xffffffff, 0x00090008,
730 0x9204, 0xffffffff, 0x000b000a,
731 0x9208, 0xffffffff, 0x000c000f,
732 0x920c, 0xffffffff, 0x000e000d,
733 0x9210, 0xffffffff, 0x00110010,
734 0x9214, 0xffffffff, 0x000a0009,
735 0x9218, 0xffffffff, 0x000c000b,
736 0x921c, 0xffffffff, 0x0000000f,
737 0x9220, 0xffffffff, 0x000e000d,
738 0x9224, 0xffffffff, 0x00110010,
739 0x9228, 0xffffffff, 0x000b000a,
740 0x922c, 0xffffffff, 0x000d000c,
741 0x9230, 0xffffffff, 0x00000010,
742 0x9234, 0xffffffff, 0x000f000e,
743 0x9238, 0xffffffff, 0x00120011,
744 0x923c, 0xffffffff, 0x000c000b,
745 0x9240, 0xffffffff, 0x000e000d,
746 0x9244, 0xffffffff, 0x00000011,
747 0x9248, 0xffffffff, 0x0010000f,
748 0x924c, 0xffffffff, 0x00130012,
749 0x9250, 0xffffffff, 0x000d000c,
750 0x9254, 0xffffffff, 0x000f000e,
751 0x9258, 0xffffffff, 0x00100013,
752 0x925c, 0xffffffff, 0x00120011,
753 0x9260, 0xffffffff, 0x00150014,
754 0x9150, 0xffffffff, 0x96940200,
755 0x8708, 0xffffffff, 0x00900100,
756 0xc478, 0xffffffff, 0x00000080,
757 0xc404, 0xffffffff, 0x0020003f,
758 0x30, 0xffffffff, 0x0000001c,
759 0x34, 0x000f0000, 0x000f0000,
760 0x160c, 0xffffffff, 0x00000100,
761 0x1024, 0xffffffff, 0x00000100,
762 0x102c, 0x00000101, 0x00000000,
763 0x20a8, 0xffffffff, 0x00000104,
764 0x55e4, 0xff000fff, 0x00000100,
765 0x55e8, 0x00000001, 0x00000001,
766 0x2f50, 0x00000001, 0x00000001,
767 0x30cc, 0xc0000fff, 0x00000104,
768 0xc1e4, 0x00000001, 0x00000001,
769 0xd0c0, 0xfffffff0, 0x00000100,
770 0xd8c0, 0xfffffff0, 0x00000100
773 static const u32 verde_mgcg_cgcg_init[] =
775 0xc400, 0xffffffff, 0xfffffffc,
776 0x802c, 0xffffffff, 0xe0000000,
777 0x9a60, 0xffffffff, 0x00000100,
778 0x92a4, 0xffffffff, 0x00000100,
779 0xc164, 0xffffffff, 0x00000100,
780 0x9774, 0xffffffff, 0x00000100,
781 0x8984, 0xffffffff, 0x06000100,
782 0x8a18, 0xffffffff, 0x00000100,
783 0x92a0, 0xffffffff, 0x00000100,
784 0xc380, 0xffffffff, 0x00000100,
785 0x8b28, 0xffffffff, 0x00000100,
786 0x9144, 0xffffffff, 0x00000100,
787 0x8d88, 0xffffffff, 0x00000100,
788 0x8d8c, 0xffffffff, 0x00000100,
789 0x9030, 0xffffffff, 0x00000100,
790 0x9034, 0xffffffff, 0x00000100,
791 0x9038, 0xffffffff, 0x00000100,
792 0x903c, 0xffffffff, 0x00000100,
793 0xad80, 0xffffffff, 0x00000100,
794 0xac54, 0xffffffff, 0x00000100,
795 0x897c, 0xffffffff, 0x06000100,
796 0x9868, 0xffffffff, 0x00000100,
797 0x9510, 0xffffffff, 0x00000100,
798 0xaf04, 0xffffffff, 0x00000100,
799 0xae04, 0xffffffff, 0x00000100,
800 0x949c, 0xffffffff, 0x00000100,
801 0x802c, 0xffffffff, 0xe0000000,
802 0x9160, 0xffffffff, 0x00010000,
803 0x9164, 0xffffffff, 0x00030002,
804 0x9168, 0xffffffff, 0x00040007,
805 0x916c, 0xffffffff, 0x00060005,
806 0x9170, 0xffffffff, 0x00090008,
807 0x9174, 0xffffffff, 0x00020001,
808 0x9178, 0xffffffff, 0x00040003,
809 0x917c, 0xffffffff, 0x00000007,
810 0x9180, 0xffffffff, 0x00060005,
811 0x9184, 0xffffffff, 0x00090008,
812 0x9188, 0xffffffff, 0x00030002,
813 0x918c, 0xffffffff, 0x00050004,
814 0x9190, 0xffffffff, 0x00000008,
815 0x9194, 0xffffffff, 0x00070006,
816 0x9198, 0xffffffff, 0x000a0009,
817 0x919c, 0xffffffff, 0x00040003,
818 0x91a0, 0xffffffff, 0x00060005,
819 0x91a4, 0xffffffff, 0x00000009,
820 0x91a8, 0xffffffff, 0x00080007,
821 0x91ac, 0xffffffff, 0x000b000a,
822 0x91b0, 0xffffffff, 0x00050004,
823 0x91b4, 0xffffffff, 0x00070006,
824 0x91b8, 0xffffffff, 0x0008000b,
825 0x91bc, 0xffffffff, 0x000a0009,
826 0x91c0, 0xffffffff, 0x000d000c,
827 0x9200, 0xffffffff, 0x00090008,
828 0x9204, 0xffffffff, 0x000b000a,
829 0x9208, 0xffffffff, 0x000c000f,
830 0x920c, 0xffffffff, 0x000e000d,
831 0x9210, 0xffffffff, 0x00110010,
832 0x9214, 0xffffffff, 0x000a0009,
833 0x9218, 0xffffffff, 0x000c000b,
834 0x921c, 0xffffffff, 0x0000000f,
835 0x9220, 0xffffffff, 0x000e000d,
836 0x9224, 0xffffffff, 0x00110010,
837 0x9228, 0xffffffff, 0x000b000a,
838 0x922c, 0xffffffff, 0x000d000c,
839 0x9230, 0xffffffff, 0x00000010,
840 0x9234, 0xffffffff, 0x000f000e,
841 0x9238, 0xffffffff, 0x00120011,
842 0x923c, 0xffffffff, 0x000c000b,
843 0x9240, 0xffffffff, 0x000e000d,
844 0x9244, 0xffffffff, 0x00000011,
845 0x9248, 0xffffffff, 0x0010000f,
846 0x924c, 0xffffffff, 0x00130012,
847 0x9250, 0xffffffff, 0x000d000c,
848 0x9254, 0xffffffff, 0x000f000e,
849 0x9258, 0xffffffff, 0x00100013,
850 0x925c, 0xffffffff, 0x00120011,
851 0x9260, 0xffffffff, 0x00150014,
852 0x9150, 0xffffffff, 0x96940200,
853 0x8708, 0xffffffff, 0x00900100,
854 0xc478, 0xffffffff, 0x00000080,
855 0xc404, 0xffffffff, 0x0020003f,
856 0x30, 0xffffffff, 0x0000001c,
857 0x34, 0x000f0000, 0x000f0000,
858 0x160c, 0xffffffff, 0x00000100,
859 0x1024, 0xffffffff, 0x00000100,
860 0x102c, 0x00000101, 0x00000000,
861 0x20a8, 0xffffffff, 0x00000104,
862 0x264c, 0x000c0000, 0x000c0000,
863 0x2648, 0x000c0000, 0x000c0000,
864 0x55e4, 0xff000fff, 0x00000100,
865 0x55e8, 0x00000001, 0x00000001,
866 0x2f50, 0x00000001, 0x00000001,
867 0x30cc, 0xc0000fff, 0x00000104,
868 0xc1e4, 0x00000001, 0x00000001,
869 0xd0c0, 0xfffffff0, 0x00000100,
870 0xd8c0, 0xfffffff0, 0x00000100
873 static const u32 oland_mgcg_cgcg_init[] =
875 0xc400, 0xffffffff, 0xfffffffc,
876 0x802c, 0xffffffff, 0xe0000000,
877 0x9a60, 0xffffffff, 0x00000100,
878 0x92a4, 0xffffffff, 0x00000100,
879 0xc164, 0xffffffff, 0x00000100,
880 0x9774, 0xffffffff, 0x00000100,
881 0x8984, 0xffffffff, 0x06000100,
882 0x8a18, 0xffffffff, 0x00000100,
883 0x92a0, 0xffffffff, 0x00000100,
884 0xc380, 0xffffffff, 0x00000100,
885 0x8b28, 0xffffffff, 0x00000100,
886 0x9144, 0xffffffff, 0x00000100,
887 0x8d88, 0xffffffff, 0x00000100,
888 0x8d8c, 0xffffffff, 0x00000100,
889 0x9030, 0xffffffff, 0x00000100,
890 0x9034, 0xffffffff, 0x00000100,
891 0x9038, 0xffffffff, 0x00000100,
892 0x903c, 0xffffffff, 0x00000100,
893 0xad80, 0xffffffff, 0x00000100,
894 0xac54, 0xffffffff, 0x00000100,
895 0x897c, 0xffffffff, 0x06000100,
896 0x9868, 0xffffffff, 0x00000100,
897 0x9510, 0xffffffff, 0x00000100,
898 0xaf04, 0xffffffff, 0x00000100,
899 0xae04, 0xffffffff, 0x00000100,
900 0x949c, 0xffffffff, 0x00000100,
901 0x802c, 0xffffffff, 0xe0000000,
902 0x9160, 0xffffffff, 0x00010000,
903 0x9164, 0xffffffff, 0x00030002,
904 0x9168, 0xffffffff, 0x00040007,
905 0x916c, 0xffffffff, 0x00060005,
906 0x9170, 0xffffffff, 0x00090008,
907 0x9174, 0xffffffff, 0x00020001,
908 0x9178, 0xffffffff, 0x00040003,
909 0x917c, 0xffffffff, 0x00000007,
910 0x9180, 0xffffffff, 0x00060005,
911 0x9184, 0xffffffff, 0x00090008,
912 0x9188, 0xffffffff, 0x00030002,
913 0x918c, 0xffffffff, 0x00050004,
914 0x9190, 0xffffffff, 0x00000008,
915 0x9194, 0xffffffff, 0x00070006,
916 0x9198, 0xffffffff, 0x000a0009,
917 0x919c, 0xffffffff, 0x00040003,
918 0x91a0, 0xffffffff, 0x00060005,
919 0x91a4, 0xffffffff, 0x00000009,
920 0x91a8, 0xffffffff, 0x00080007,
921 0x91ac, 0xffffffff, 0x000b000a,
922 0x91b0, 0xffffffff, 0x00050004,
923 0x91b4, 0xffffffff, 0x00070006,
924 0x91b8, 0xffffffff, 0x0008000b,
925 0x91bc, 0xffffffff, 0x000a0009,
926 0x91c0, 0xffffffff, 0x000d000c,
927 0x91c4, 0xffffffff, 0x00060005,
928 0x91c8, 0xffffffff, 0x00080007,
929 0x91cc, 0xffffffff, 0x0000000b,
930 0x91d0, 0xffffffff, 0x000a0009,
931 0x91d4, 0xffffffff, 0x000d000c,
932 0x9150, 0xffffffff, 0x96940200,
933 0x8708, 0xffffffff, 0x00900100,
934 0xc478, 0xffffffff, 0x00000080,
935 0xc404, 0xffffffff, 0x0020003f,
936 0x30, 0xffffffff, 0x0000001c,
937 0x34, 0x000f0000, 0x000f0000,
938 0x160c, 0xffffffff, 0x00000100,
939 0x1024, 0xffffffff, 0x00000100,
940 0x102c, 0x00000101, 0x00000000,
941 0x20a8, 0xffffffff, 0x00000104,
942 0x264c, 0x000c0000, 0x000c0000,
943 0x2648, 0x000c0000, 0x000c0000,
944 0x55e4, 0xff000fff, 0x00000100,
945 0x55e8, 0x00000001, 0x00000001,
946 0x2f50, 0x00000001, 0x00000001,
947 0x30cc, 0xc0000fff, 0x00000104,
948 0xc1e4, 0x00000001, 0x00000001,
949 0xd0c0, 0xfffffff0, 0x00000100,
950 0xd8c0, 0xfffffff0, 0x00000100
953 static const u32 hainan_mgcg_cgcg_init[] =
955 0xc400, 0xffffffff, 0xfffffffc,
956 0x802c, 0xffffffff, 0xe0000000,
957 0x9a60, 0xffffffff, 0x00000100,
958 0x92a4, 0xffffffff, 0x00000100,
959 0xc164, 0xffffffff, 0x00000100,
960 0x9774, 0xffffffff, 0x00000100,
961 0x8984, 0xffffffff, 0x06000100,
962 0x8a18, 0xffffffff, 0x00000100,
963 0x92a0, 0xffffffff, 0x00000100,
964 0xc380, 0xffffffff, 0x00000100,
965 0x8b28, 0xffffffff, 0x00000100,
966 0x9144, 0xffffffff, 0x00000100,
967 0x8d88, 0xffffffff, 0x00000100,
968 0x8d8c, 0xffffffff, 0x00000100,
969 0x9030, 0xffffffff, 0x00000100,
970 0x9034, 0xffffffff, 0x00000100,
971 0x9038, 0xffffffff, 0x00000100,
972 0x903c, 0xffffffff, 0x00000100,
973 0xad80, 0xffffffff, 0x00000100,
974 0xac54, 0xffffffff, 0x00000100,
975 0x897c, 0xffffffff, 0x06000100,
976 0x9868, 0xffffffff, 0x00000100,
977 0x9510, 0xffffffff, 0x00000100,
978 0xaf04, 0xffffffff, 0x00000100,
979 0xae04, 0xffffffff, 0x00000100,
980 0x949c, 0xffffffff, 0x00000100,
981 0x802c, 0xffffffff, 0xe0000000,
982 0x9160, 0xffffffff, 0x00010000,
983 0x9164, 0xffffffff, 0x00030002,
984 0x9168, 0xffffffff, 0x00040007,
985 0x916c, 0xffffffff, 0x00060005,
986 0x9170, 0xffffffff, 0x00090008,
987 0x9174, 0xffffffff, 0x00020001,
988 0x9178, 0xffffffff, 0x00040003,
989 0x917c, 0xffffffff, 0x00000007,
990 0x9180, 0xffffffff, 0x00060005,
991 0x9184, 0xffffffff, 0x00090008,
992 0x9188, 0xffffffff, 0x00030002,
993 0x918c, 0xffffffff, 0x00050004,
994 0x9190, 0xffffffff, 0x00000008,
995 0x9194, 0xffffffff, 0x00070006,
996 0x9198, 0xffffffff, 0x000a0009,
997 0x919c, 0xffffffff, 0x00040003,
998 0x91a0, 0xffffffff, 0x00060005,
999 0x91a4, 0xffffffff, 0x00000009,
1000 0x91a8, 0xffffffff, 0x00080007,
1001 0x91ac, 0xffffffff, 0x000b000a,
1002 0x91b0, 0xffffffff, 0x00050004,
1003 0x91b4, 0xffffffff, 0x00070006,
1004 0x91b8, 0xffffffff, 0x0008000b,
1005 0x91bc, 0xffffffff, 0x000a0009,
1006 0x91c0, 0xffffffff, 0x000d000c,
1007 0x91c4, 0xffffffff, 0x00060005,
1008 0x91c8, 0xffffffff, 0x00080007,
1009 0x91cc, 0xffffffff, 0x0000000b,
1010 0x91d0, 0xffffffff, 0x000a0009,
1011 0x91d4, 0xffffffff, 0x000d000c,
1012 0x9150, 0xffffffff, 0x96940200,
1013 0x8708, 0xffffffff, 0x00900100,
1014 0xc478, 0xffffffff, 0x00000080,
1015 0xc404, 0xffffffff, 0x0020003f,
1016 0x30, 0xffffffff, 0x0000001c,
1017 0x34, 0x000f0000, 0x000f0000,
1018 0x160c, 0xffffffff, 0x00000100,
1019 0x1024, 0xffffffff, 0x00000100,
1020 0x20a8, 0xffffffff, 0x00000104,
1021 0x264c, 0x000c0000, 0x000c0000,
1022 0x2648, 0x000c0000, 0x000c0000,
1023 0x2f50, 0x00000001, 0x00000001,
1024 0x30cc, 0xc0000fff, 0x00000104,
1025 0xc1e4, 0x00000001, 0x00000001,
1026 0xd0c0, 0xfffffff0, 0x00000100,
1027 0xd8c0, 0xfffffff0, 0x00000100
1030 static u32 verde_pg_init[] =
1032 0x353c, 0xffffffff, 0x40000,
1033 0x3538, 0xffffffff, 0x200010ff,
1034 0x353c, 0xffffffff, 0x0,
1035 0x353c, 0xffffffff, 0x0,
1036 0x353c, 0xffffffff, 0x0,
1037 0x353c, 0xffffffff, 0x0,
1038 0x353c, 0xffffffff, 0x0,
1039 0x353c, 0xffffffff, 0x7007,
1040 0x3538, 0xffffffff, 0x300010ff,
1041 0x353c, 0xffffffff, 0x0,
1042 0x353c, 0xffffffff, 0x0,
1043 0x353c, 0xffffffff, 0x0,
1044 0x353c, 0xffffffff, 0x0,
1045 0x353c, 0xffffffff, 0x0,
1046 0x353c, 0xffffffff, 0x400000,
1047 0x3538, 0xffffffff, 0x100010ff,
1048 0x353c, 0xffffffff, 0x0,
1049 0x353c, 0xffffffff, 0x0,
1050 0x353c, 0xffffffff, 0x0,
1051 0x353c, 0xffffffff, 0x0,
1052 0x353c, 0xffffffff, 0x0,
1053 0x353c, 0xffffffff, 0x120200,
1054 0x3538, 0xffffffff, 0x500010ff,
1055 0x353c, 0xffffffff, 0x0,
1056 0x353c, 0xffffffff, 0x0,
1057 0x353c, 0xffffffff, 0x0,
1058 0x353c, 0xffffffff, 0x0,
1059 0x353c, 0xffffffff, 0x0,
1060 0x353c, 0xffffffff, 0x1e1e16,
1061 0x3538, 0xffffffff, 0x600010ff,
1062 0x353c, 0xffffffff, 0x0,
1063 0x353c, 0xffffffff, 0x0,
1064 0x353c, 0xffffffff, 0x0,
1065 0x353c, 0xffffffff, 0x0,
1066 0x353c, 0xffffffff, 0x0,
1067 0x353c, 0xffffffff, 0x171f1e,
1068 0x3538, 0xffffffff, 0x700010ff,
1069 0x353c, 0xffffffff, 0x0,
1070 0x353c, 0xffffffff, 0x0,
1071 0x353c, 0xffffffff, 0x0,
1072 0x353c, 0xffffffff, 0x0,
1073 0x353c, 0xffffffff, 0x0,
1074 0x353c, 0xffffffff, 0x0,
1075 0x3538, 0xffffffff, 0x9ff,
1076 0x3500, 0xffffffff, 0x0,
1077 0x3504, 0xffffffff, 0x10000800,
1078 0x3504, 0xffffffff, 0xf,
1079 0x3504, 0xffffffff, 0xf,
1080 0x3500, 0xffffffff, 0x4,
1081 0x3504, 0xffffffff, 0x1000051e,
1082 0x3504, 0xffffffff, 0xffff,
1083 0x3504, 0xffffffff, 0xffff,
1084 0x3500, 0xffffffff, 0x8,
1085 0x3504, 0xffffffff, 0x80500,
1086 0x3500, 0xffffffff, 0x12,
1087 0x3504, 0xffffffff, 0x9050c,
1088 0x3500, 0xffffffff, 0x1d,
1089 0x3504, 0xffffffff, 0xb052c,
1090 0x3500, 0xffffffff, 0x2a,
1091 0x3504, 0xffffffff, 0x1053e,
1092 0x3500, 0xffffffff, 0x2d,
1093 0x3504, 0xffffffff, 0x10546,
1094 0x3500, 0xffffffff, 0x30,
1095 0x3504, 0xffffffff, 0xa054e,
1096 0x3500, 0xffffffff, 0x3c,
1097 0x3504, 0xffffffff, 0x1055f,
1098 0x3500, 0xffffffff, 0x3f,
1099 0x3504, 0xffffffff, 0x10567,
1100 0x3500, 0xffffffff, 0x42,
1101 0x3504, 0xffffffff, 0x1056f,
1102 0x3500, 0xffffffff, 0x45,
1103 0x3504, 0xffffffff, 0x10572,
1104 0x3500, 0xffffffff, 0x48,
1105 0x3504, 0xffffffff, 0x20575,
1106 0x3500, 0xffffffff, 0x4c,
1107 0x3504, 0xffffffff, 0x190801,
1108 0x3500, 0xffffffff, 0x67,
1109 0x3504, 0xffffffff, 0x1082a,
1110 0x3500, 0xffffffff, 0x6a,
1111 0x3504, 0xffffffff, 0x1b082d,
1112 0x3500, 0xffffffff, 0x87,
1113 0x3504, 0xffffffff, 0x310851,
1114 0x3500, 0xffffffff, 0xba,
1115 0x3504, 0xffffffff, 0x891,
1116 0x3500, 0xffffffff, 0xbc,
1117 0x3504, 0xffffffff, 0x893,
1118 0x3500, 0xffffffff, 0xbe,
1119 0x3504, 0xffffffff, 0x20895,
1120 0x3500, 0xffffffff, 0xc2,
1121 0x3504, 0xffffffff, 0x20899,
1122 0x3500, 0xffffffff, 0xc6,
1123 0x3504, 0xffffffff, 0x2089d,
1124 0x3500, 0xffffffff, 0xca,
1125 0x3504, 0xffffffff, 0x8a1,
1126 0x3500, 0xffffffff, 0xcc,
1127 0x3504, 0xffffffff, 0x8a3,
1128 0x3500, 0xffffffff, 0xce,
1129 0x3504, 0xffffffff, 0x308a5,
1130 0x3500, 0xffffffff, 0xd3,
1131 0x3504, 0xffffffff, 0x6d08cd,
1132 0x3500, 0xffffffff, 0x142,
1133 0x3504, 0xffffffff, 0x2000095a,
1134 0x3504, 0xffffffff, 0x1,
1135 0x3500, 0xffffffff, 0x144,
1136 0x3504, 0xffffffff, 0x301f095b,
1137 0x3500, 0xffffffff, 0x165,
1138 0x3504, 0xffffffff, 0xc094d,
1139 0x3500, 0xffffffff, 0x173,
1140 0x3504, 0xffffffff, 0xf096d,
1141 0x3500, 0xffffffff, 0x184,
1142 0x3504, 0xffffffff, 0x15097f,
1143 0x3500, 0xffffffff, 0x19b,
1144 0x3504, 0xffffffff, 0xc0998,
1145 0x3500, 0xffffffff, 0x1a9,
1146 0x3504, 0xffffffff, 0x409a7,
1147 0x3500, 0xffffffff, 0x1af,
1148 0x3504, 0xffffffff, 0xcdc,
1149 0x3500, 0xffffffff, 0x1b1,
1150 0x3504, 0xffffffff, 0x800,
1151 0x3508, 0xffffffff, 0x6c9b2000,
1152 0x3510, 0xfc00, 0x2000,
1153 0x3544, 0xffffffff, 0xfc0,
1154 0x28d4, 0x00000100, 0x100
1157 static void si_init_golden_registers(struct radeon_device *rdev)
1159 switch (rdev->family) {
1161 radeon_program_register_sequence(rdev,
1162 tahiti_golden_registers,
1163 (const u32)ARRAY_SIZE(tahiti_golden_registers));
1164 radeon_program_register_sequence(rdev,
1165 tahiti_golden_rlc_registers,
1166 (const u32)ARRAY_SIZE(tahiti_golden_rlc_registers));
1167 radeon_program_register_sequence(rdev,
1168 tahiti_mgcg_cgcg_init,
1169 (const u32)ARRAY_SIZE(tahiti_mgcg_cgcg_init));
1170 radeon_program_register_sequence(rdev,
1171 tahiti_golden_registers2,
1172 (const u32)ARRAY_SIZE(tahiti_golden_registers2));
1175 radeon_program_register_sequence(rdev,
1176 pitcairn_golden_registers,
1177 (const u32)ARRAY_SIZE(pitcairn_golden_registers));
1178 radeon_program_register_sequence(rdev,
1179 pitcairn_golden_rlc_registers,
1180 (const u32)ARRAY_SIZE(pitcairn_golden_rlc_registers));
1181 radeon_program_register_sequence(rdev,
1182 pitcairn_mgcg_cgcg_init,
1183 (const u32)ARRAY_SIZE(pitcairn_mgcg_cgcg_init));
1186 radeon_program_register_sequence(rdev,
1187 verde_golden_registers,
1188 (const u32)ARRAY_SIZE(verde_golden_registers));
1189 radeon_program_register_sequence(rdev,
1190 verde_golden_rlc_registers,
1191 (const u32)ARRAY_SIZE(verde_golden_rlc_registers));
1192 radeon_program_register_sequence(rdev,
1193 verde_mgcg_cgcg_init,
1194 (const u32)ARRAY_SIZE(verde_mgcg_cgcg_init));
1195 radeon_program_register_sequence(rdev,
1197 (const u32)ARRAY_SIZE(verde_pg_init));
1200 radeon_program_register_sequence(rdev,
1201 oland_golden_registers,
1202 (const u32)ARRAY_SIZE(oland_golden_registers));
1203 radeon_program_register_sequence(rdev,
1204 oland_golden_rlc_registers,
1205 (const u32)ARRAY_SIZE(oland_golden_rlc_registers));
1206 radeon_program_register_sequence(rdev,
1207 oland_mgcg_cgcg_init,
1208 (const u32)ARRAY_SIZE(oland_mgcg_cgcg_init));
1211 radeon_program_register_sequence(rdev,
1212 hainan_golden_registers,
1213 (const u32)ARRAY_SIZE(hainan_golden_registers));
1214 radeon_program_register_sequence(rdev,
1215 hainan_golden_registers2,
1216 (const u32)ARRAY_SIZE(hainan_golden_registers2));
1217 radeon_program_register_sequence(rdev,
1218 hainan_mgcg_cgcg_init,
1219 (const u32)ARRAY_SIZE(hainan_mgcg_cgcg_init));
1226 #define PCIE_BUS_CLK 10000
1227 #define TCLK (PCIE_BUS_CLK / 10)
1230 * si_get_xclk - get the xclk
1232 * @rdev: radeon_device pointer
1234 * Returns the reference clock used by the gfx engine
1237 u32 si_get_xclk(struct radeon_device *rdev)
1239 u32 reference_clock = rdev->clock.spll.reference_freq;
1242 tmp = RREG32(CG_CLKPIN_CNTL_2);
1243 if (tmp & MUX_TCLK_TO_XCLK)
1246 tmp = RREG32(CG_CLKPIN_CNTL);
1247 if (tmp & XTALIN_DIVIDE)
1248 return reference_clock / 4;
1250 return reference_clock;
1253 /* get temperature in millidegrees */
1254 int si_get_temp(struct radeon_device *rdev)
1257 int actual_temp = 0;
1259 temp = (RREG32(CG_MULT_THERMAL_STATUS) & CTF_TEMP_MASK) >>
1265 actual_temp = temp & 0x1ff;
1267 actual_temp = (actual_temp * 1000);
1272 #define TAHITI_IO_MC_REGS_SIZE 36
1274 static const u32 tahiti_io_mc_regs[TAHITI_IO_MC_REGS_SIZE][2] = {
1275 {0x0000006f, 0x03044000},
1276 {0x00000070, 0x0480c018},
1277 {0x00000071, 0x00000040},
1278 {0x00000072, 0x01000000},
1279 {0x00000074, 0x000000ff},
1280 {0x00000075, 0x00143400},
1281 {0x00000076, 0x08ec0800},
1282 {0x00000077, 0x040000cc},
1283 {0x00000079, 0x00000000},
1284 {0x0000007a, 0x21000409},
1285 {0x0000007c, 0x00000000},
1286 {0x0000007d, 0xe8000000},
1287 {0x0000007e, 0x044408a8},
1288 {0x0000007f, 0x00000003},
1289 {0x00000080, 0x00000000},
1290 {0x00000081, 0x01000000},
1291 {0x00000082, 0x02000000},
1292 {0x00000083, 0x00000000},
1293 {0x00000084, 0xe3f3e4f4},
1294 {0x00000085, 0x00052024},
1295 {0x00000087, 0x00000000},
1296 {0x00000088, 0x66036603},
1297 {0x00000089, 0x01000000},
1298 {0x0000008b, 0x1c0a0000},
1299 {0x0000008c, 0xff010000},
1300 {0x0000008e, 0xffffefff},
1301 {0x0000008f, 0xfff3efff},
1302 {0x00000090, 0xfff3efbf},
1303 {0x00000094, 0x00101101},
1304 {0x00000095, 0x00000fff},
1305 {0x00000096, 0x00116fff},
1306 {0x00000097, 0x60010000},
1307 {0x00000098, 0x10010000},
1308 {0x00000099, 0x00006000},
1309 {0x0000009a, 0x00001000},
1310 {0x0000009f, 0x00a77400}
1313 static const u32 pitcairn_io_mc_regs[TAHITI_IO_MC_REGS_SIZE][2] = {
1314 {0x0000006f, 0x03044000},
1315 {0x00000070, 0x0480c018},
1316 {0x00000071, 0x00000040},
1317 {0x00000072, 0x01000000},
1318 {0x00000074, 0x000000ff},
1319 {0x00000075, 0x00143400},
1320 {0x00000076, 0x08ec0800},
1321 {0x00000077, 0x040000cc},
1322 {0x00000079, 0x00000000},
1323 {0x0000007a, 0x21000409},
1324 {0x0000007c, 0x00000000},
1325 {0x0000007d, 0xe8000000},
1326 {0x0000007e, 0x044408a8},
1327 {0x0000007f, 0x00000003},
1328 {0x00000080, 0x00000000},
1329 {0x00000081, 0x01000000},
1330 {0x00000082, 0x02000000},
1331 {0x00000083, 0x00000000},
1332 {0x00000084, 0xe3f3e4f4},
1333 {0x00000085, 0x00052024},
1334 {0x00000087, 0x00000000},
1335 {0x00000088, 0x66036603},
1336 {0x00000089, 0x01000000},
1337 {0x0000008b, 0x1c0a0000},
1338 {0x0000008c, 0xff010000},
1339 {0x0000008e, 0xffffefff},
1340 {0x0000008f, 0xfff3efff},
1341 {0x00000090, 0xfff3efbf},
1342 {0x00000094, 0x00101101},
1343 {0x00000095, 0x00000fff},
1344 {0x00000096, 0x00116fff},
1345 {0x00000097, 0x60010000},
1346 {0x00000098, 0x10010000},
1347 {0x00000099, 0x00006000},
1348 {0x0000009a, 0x00001000},
1349 {0x0000009f, 0x00a47400}
1352 static const u32 verde_io_mc_regs[TAHITI_IO_MC_REGS_SIZE][2] = {
1353 {0x0000006f, 0x03044000},
1354 {0x00000070, 0x0480c018},
1355 {0x00000071, 0x00000040},
1356 {0x00000072, 0x01000000},
1357 {0x00000074, 0x000000ff},
1358 {0x00000075, 0x00143400},
1359 {0x00000076, 0x08ec0800},
1360 {0x00000077, 0x040000cc},
1361 {0x00000079, 0x00000000},
1362 {0x0000007a, 0x21000409},
1363 {0x0000007c, 0x00000000},
1364 {0x0000007d, 0xe8000000},
1365 {0x0000007e, 0x044408a8},
1366 {0x0000007f, 0x00000003},
1367 {0x00000080, 0x00000000},
1368 {0x00000081, 0x01000000},
1369 {0x00000082, 0x02000000},
1370 {0x00000083, 0x00000000},
1371 {0x00000084, 0xe3f3e4f4},
1372 {0x00000085, 0x00052024},
1373 {0x00000087, 0x00000000},
1374 {0x00000088, 0x66036603},
1375 {0x00000089, 0x01000000},
1376 {0x0000008b, 0x1c0a0000},
1377 {0x0000008c, 0xff010000},
1378 {0x0000008e, 0xffffefff},
1379 {0x0000008f, 0xfff3efff},
1380 {0x00000090, 0xfff3efbf},
1381 {0x00000094, 0x00101101},
1382 {0x00000095, 0x00000fff},
1383 {0x00000096, 0x00116fff},
1384 {0x00000097, 0x60010000},
1385 {0x00000098, 0x10010000},
1386 {0x00000099, 0x00006000},
1387 {0x0000009a, 0x00001000},
1388 {0x0000009f, 0x00a37400}
1391 static const u32 oland_io_mc_regs[TAHITI_IO_MC_REGS_SIZE][2] = {
1392 {0x0000006f, 0x03044000},
1393 {0x00000070, 0x0480c018},
1394 {0x00000071, 0x00000040},
1395 {0x00000072, 0x01000000},
1396 {0x00000074, 0x000000ff},
1397 {0x00000075, 0x00143400},
1398 {0x00000076, 0x08ec0800},
1399 {0x00000077, 0x040000cc},
1400 {0x00000079, 0x00000000},
1401 {0x0000007a, 0x21000409},
1402 {0x0000007c, 0x00000000},
1403 {0x0000007d, 0xe8000000},
1404 {0x0000007e, 0x044408a8},
1405 {0x0000007f, 0x00000003},
1406 {0x00000080, 0x00000000},
1407 {0x00000081, 0x01000000},
1408 {0x00000082, 0x02000000},
1409 {0x00000083, 0x00000000},
1410 {0x00000084, 0xe3f3e4f4},
1411 {0x00000085, 0x00052024},
1412 {0x00000087, 0x00000000},
1413 {0x00000088, 0x66036603},
1414 {0x00000089, 0x01000000},
1415 {0x0000008b, 0x1c0a0000},
1416 {0x0000008c, 0xff010000},
1417 {0x0000008e, 0xffffefff},
1418 {0x0000008f, 0xfff3efff},
1419 {0x00000090, 0xfff3efbf},
1420 {0x00000094, 0x00101101},
1421 {0x00000095, 0x00000fff},
1422 {0x00000096, 0x00116fff},
1423 {0x00000097, 0x60010000},
1424 {0x00000098, 0x10010000},
1425 {0x00000099, 0x00006000},
1426 {0x0000009a, 0x00001000},
1427 {0x0000009f, 0x00a17730}
1430 static const u32 hainan_io_mc_regs[TAHITI_IO_MC_REGS_SIZE][2] = {
1431 {0x0000006f, 0x03044000},
1432 {0x00000070, 0x0480c018},
1433 {0x00000071, 0x00000040},
1434 {0x00000072, 0x01000000},
1435 {0x00000074, 0x000000ff},
1436 {0x00000075, 0x00143400},
1437 {0x00000076, 0x08ec0800},
1438 {0x00000077, 0x040000cc},
1439 {0x00000079, 0x00000000},
1440 {0x0000007a, 0x21000409},
1441 {0x0000007c, 0x00000000},
1442 {0x0000007d, 0xe8000000},
1443 {0x0000007e, 0x044408a8},
1444 {0x0000007f, 0x00000003},
1445 {0x00000080, 0x00000000},
1446 {0x00000081, 0x01000000},
1447 {0x00000082, 0x02000000},
1448 {0x00000083, 0x00000000},
1449 {0x00000084, 0xe3f3e4f4},
1450 {0x00000085, 0x00052024},
1451 {0x00000087, 0x00000000},
1452 {0x00000088, 0x66036603},
1453 {0x00000089, 0x01000000},
1454 {0x0000008b, 0x1c0a0000},
1455 {0x0000008c, 0xff010000},
1456 {0x0000008e, 0xffffefff},
1457 {0x0000008f, 0xfff3efff},
1458 {0x00000090, 0xfff3efbf},
1459 {0x00000094, 0x00101101},
1460 {0x00000095, 0x00000fff},
1461 {0x00000096, 0x00116fff},
1462 {0x00000097, 0x60010000},
1463 {0x00000098, 0x10010000},
1464 {0x00000099, 0x00006000},
1465 {0x0000009a, 0x00001000},
1466 {0x0000009f, 0x00a07730}
1470 int si_mc_load_microcode(struct radeon_device *rdev)
1472 const __be32 *fw_data;
1473 u32 running, blackout = 0;
1475 int i, regs_size, ucode_size;
1480 ucode_size = rdev->mc_fw->size / 4;
1482 switch (rdev->family) {
1484 io_mc_regs = (u32 *)&tahiti_io_mc_regs;
1485 regs_size = TAHITI_IO_MC_REGS_SIZE;
1488 io_mc_regs = (u32 *)&pitcairn_io_mc_regs;
1489 regs_size = TAHITI_IO_MC_REGS_SIZE;
1493 io_mc_regs = (u32 *)&verde_io_mc_regs;
1494 regs_size = TAHITI_IO_MC_REGS_SIZE;
1497 io_mc_regs = (u32 *)&oland_io_mc_regs;
1498 regs_size = TAHITI_IO_MC_REGS_SIZE;
1501 io_mc_regs = (u32 *)&hainan_io_mc_regs;
1502 regs_size = TAHITI_IO_MC_REGS_SIZE;
1506 running = RREG32(MC_SEQ_SUP_CNTL) & RUN_MASK;
1510 blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
1511 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
1514 /* reset the engine and set to writable */
1515 WREG32(MC_SEQ_SUP_CNTL, 0x00000008);
1516 WREG32(MC_SEQ_SUP_CNTL, 0x00000010);
1518 /* load mc io regs */
1519 for (i = 0; i < regs_size; i++) {
1520 WREG32(MC_SEQ_IO_DEBUG_INDEX, io_mc_regs[(i << 1)]);
1521 WREG32(MC_SEQ_IO_DEBUG_DATA, io_mc_regs[(i << 1) + 1]);
1523 /* load the MC ucode */
1524 fw_data = (const __be32 *)rdev->mc_fw->data;
1525 for (i = 0; i < ucode_size; i++)
1526 WREG32(MC_SEQ_SUP_PGM, be32_to_cpup(fw_data++));
1528 /* put the engine back into the active state */
1529 WREG32(MC_SEQ_SUP_CNTL, 0x00000008);
1530 WREG32(MC_SEQ_SUP_CNTL, 0x00000004);
1531 WREG32(MC_SEQ_SUP_CNTL, 0x00000001);
1533 /* wait for training to complete */
1534 for (i = 0; i < rdev->usec_timeout; i++) {
1535 if (RREG32(MC_SEQ_TRAIN_WAKEUP_CNTL) & TRAIN_DONE_D0)
1539 for (i = 0; i < rdev->usec_timeout; i++) {
1540 if (RREG32(MC_SEQ_TRAIN_WAKEUP_CNTL) & TRAIN_DONE_D1)
1546 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout);
1552 static int si_init_microcode(struct radeon_device *rdev)
1554 const char *chip_name;
1555 const char *rlc_chip_name;
1556 size_t pfp_req_size, me_req_size, ce_req_size, rlc_req_size, mc_req_size;
1557 size_t smc_req_size, mc2_req_size;
1563 switch (rdev->family) {
1565 chip_name = "TAHITI";
1566 rlc_chip_name = "TAHITI";
1567 pfp_req_size = SI_PFP_UCODE_SIZE * 4;
1568 me_req_size = SI_PM4_UCODE_SIZE * 4;
1569 ce_req_size = SI_CE_UCODE_SIZE * 4;
1570 rlc_req_size = SI_RLC_UCODE_SIZE * 4;
1571 mc_req_size = SI_MC_UCODE_SIZE * 4;
1572 mc2_req_size = TAHITI_MC_UCODE_SIZE * 4;
1573 smc_req_size = ALIGN(TAHITI_SMC_UCODE_SIZE, 4);
1576 chip_name = "PITCAIRN";
1577 rlc_chip_name = "PITCAIRN";
1578 pfp_req_size = SI_PFP_UCODE_SIZE * 4;
1579 me_req_size = SI_PM4_UCODE_SIZE * 4;
1580 ce_req_size = SI_CE_UCODE_SIZE * 4;
1581 rlc_req_size = SI_RLC_UCODE_SIZE * 4;
1582 mc_req_size = SI_MC_UCODE_SIZE * 4;
1583 mc2_req_size = PITCAIRN_MC_UCODE_SIZE * 4;
1584 smc_req_size = ALIGN(PITCAIRN_SMC_UCODE_SIZE, 4);
1587 chip_name = "VERDE";
1588 rlc_chip_name = "VERDE";
1589 pfp_req_size = SI_PFP_UCODE_SIZE * 4;
1590 me_req_size = SI_PM4_UCODE_SIZE * 4;
1591 ce_req_size = SI_CE_UCODE_SIZE * 4;
1592 rlc_req_size = SI_RLC_UCODE_SIZE * 4;
1593 mc_req_size = SI_MC_UCODE_SIZE * 4;
1594 mc2_req_size = VERDE_MC_UCODE_SIZE * 4;
1595 smc_req_size = ALIGN(VERDE_SMC_UCODE_SIZE, 4);
1598 chip_name = "OLAND";
1599 rlc_chip_name = "OLAND";
1600 pfp_req_size = SI_PFP_UCODE_SIZE * 4;
1601 me_req_size = SI_PM4_UCODE_SIZE * 4;
1602 ce_req_size = SI_CE_UCODE_SIZE * 4;
1603 rlc_req_size = SI_RLC_UCODE_SIZE * 4;
1604 mc_req_size = mc2_req_size = OLAND_MC_UCODE_SIZE * 4;
1605 smc_req_size = ALIGN(OLAND_SMC_UCODE_SIZE, 4);
1608 chip_name = "HAINAN";
1609 rlc_chip_name = "HAINAN";
1610 pfp_req_size = SI_PFP_UCODE_SIZE * 4;
1611 me_req_size = SI_PM4_UCODE_SIZE * 4;
1612 ce_req_size = SI_CE_UCODE_SIZE * 4;
1613 rlc_req_size = SI_RLC_UCODE_SIZE * 4;
1614 mc_req_size = mc2_req_size = OLAND_MC_UCODE_SIZE * 4;
1615 smc_req_size = ALIGN(HAINAN_SMC_UCODE_SIZE, 4);
1620 DRM_INFO("Loading %s Microcode\n", chip_name);
1622 snprintf(fw_name, sizeof(fw_name), "radeon/%s_pfp.bin", chip_name);
1623 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev);
1626 if (rdev->pfp_fw->size != pfp_req_size) {
1628 "si_cp: Bogus length %zu in firmware \"%s\"\n",
1629 rdev->pfp_fw->size, fw_name);
1634 snprintf(fw_name, sizeof(fw_name), "radeon/%s_me.bin", chip_name);
1635 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev);
1638 if (rdev->me_fw->size != me_req_size) {
1640 "si_cp: Bogus length %zu in firmware \"%s\"\n",
1641 rdev->me_fw->size, fw_name);
1645 snprintf(fw_name, sizeof(fw_name), "radeon/%s_ce.bin", chip_name);
1646 err = request_firmware(&rdev->ce_fw, fw_name, rdev->dev);
1649 if (rdev->ce_fw->size != ce_req_size) {
1651 "si_cp: Bogus length %zu in firmware \"%s\"\n",
1652 rdev->ce_fw->size, fw_name);
1656 snprintf(fw_name, sizeof(fw_name), "radeon/%s_rlc.bin", rlc_chip_name);
1657 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev);
1660 if (rdev->rlc_fw->size != rlc_req_size) {
1662 "si_rlc: Bogus length %zu in firmware \"%s\"\n",
1663 rdev->rlc_fw->size, fw_name);
1667 snprintf(fw_name, sizeof(fw_name), "radeon/%s_mc2.bin", chip_name);
1668 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev);
1670 snprintf(fw_name, sizeof(fw_name), "radeon/%s_mc.bin", chip_name);
1671 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev);
1675 if ((rdev->mc_fw->size != mc_req_size) &&
1676 (rdev->mc_fw->size != mc2_req_size)) {
1678 "si_mc: Bogus length %zu in firmware \"%s\"\n",
1679 rdev->mc_fw->size, fw_name);
1682 DRM_INFO("%s: %zu bytes\n", fw_name, rdev->mc_fw->size);
1684 snprintf(fw_name, sizeof(fw_name), "radeon/%s_smc.bin", chip_name);
1685 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev);
1688 "smc: error loading firmware \"%s\"\n",
1690 release_firmware(rdev->smc_fw);
1691 rdev->smc_fw = NULL;
1693 } else if (rdev->smc_fw->size != smc_req_size) {
1695 "si_smc: Bogus length %zu in firmware \"%s\"\n",
1696 rdev->smc_fw->size, fw_name);
1704 "si_cp: Failed to load firmware \"%s\"\n",
1706 release_firmware(rdev->pfp_fw);
1707 rdev->pfp_fw = NULL;
1708 release_firmware(rdev->me_fw);
1710 release_firmware(rdev->ce_fw);
1712 release_firmware(rdev->rlc_fw);
1713 rdev->rlc_fw = NULL;
1714 release_firmware(rdev->mc_fw);
1716 release_firmware(rdev->smc_fw);
1717 rdev->smc_fw = NULL;
1722 /* watermark setup */
1723 static u32 dce6_line_buffer_adjust(struct radeon_device *rdev,
1724 struct radeon_crtc *radeon_crtc,
1725 struct drm_display_mode *mode,
1726 struct drm_display_mode *other_mode)
1728 u32 tmp, buffer_alloc, i;
1729 u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1732 * There are 3 line buffers, each one shared by 2 display controllers.
1733 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1734 * the display controllers. The paritioning is done via one of four
1735 * preset allocations specified in bits 21:20:
1737 * 2 - whole lb, other crtc must be disabled
1739 /* this can get tricky if we have two large displays on a paired group
1740 * of crtcs. Ideally for multiple large displays we'd assign them to
1741 * non-linked crtcs for maximum line buffer allocation.
1743 if (radeon_crtc->base.enabled && mode) {
1748 tmp = 2; /* whole */
1756 WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset,
1757 DC_LB_MEMORY_CONFIG(tmp));
1759 WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1760 DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1761 for (i = 0; i < rdev->usec_timeout; i++) {
1762 if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1763 DMIF_BUFFERS_ALLOCATED_COMPLETED)
1768 if (radeon_crtc->base.enabled && mode) {
1778 /* controller not enabled, so no lb used */
1782 static u32 si_get_number_of_dram_channels(struct radeon_device *rdev)
1784 u32 tmp = RREG32(MC_SHARED_CHMAP);
1786 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1809 struct dce6_wm_params {
1810 u32 dram_channels; /* number of dram channels */
1811 u32 yclk; /* bandwidth per dram data pin in kHz */
1812 u32 sclk; /* engine clock in kHz */
1813 u32 disp_clk; /* display clock in kHz */
1814 u32 src_width; /* viewport width */
1815 u32 active_time; /* active display time in ns */
1816 u32 blank_time; /* blank time in ns */
1817 bool interlaced; /* mode is interlaced */
1818 fixed20_12 vsc; /* vertical scale ratio */
1819 u32 num_heads; /* number of active crtcs */
1820 u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1821 u32 lb_size; /* line buffer allocated to pipe */
1822 u32 vtaps; /* vertical scaler taps */
1825 static u32 dce6_dram_bandwidth(struct dce6_wm_params *wm)
1827 /* Calculate raw DRAM Bandwidth */
1828 fixed20_12 dram_efficiency; /* 0.7 */
1829 fixed20_12 yclk, dram_channels, bandwidth;
1832 a.full = dfixed_const(1000);
1833 yclk.full = dfixed_const(wm->yclk);
1834 yclk.full = dfixed_div(yclk, a);
1835 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1836 a.full = dfixed_const(10);
1837 dram_efficiency.full = dfixed_const(7);
1838 dram_efficiency.full = dfixed_div(dram_efficiency, a);
1839 bandwidth.full = dfixed_mul(dram_channels, yclk);
1840 bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1842 return dfixed_trunc(bandwidth);
1845 static u32 dce6_dram_bandwidth_for_display(struct dce6_wm_params *wm)
1847 /* Calculate DRAM Bandwidth and the part allocated to display. */
1848 fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1849 fixed20_12 yclk, dram_channels, bandwidth;
1852 a.full = dfixed_const(1000);
1853 yclk.full = dfixed_const(wm->yclk);
1854 yclk.full = dfixed_div(yclk, a);
1855 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1856 a.full = dfixed_const(10);
1857 disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
1858 disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
1859 bandwidth.full = dfixed_mul(dram_channels, yclk);
1860 bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
1862 return dfixed_trunc(bandwidth);
1865 static u32 dce6_data_return_bandwidth(struct dce6_wm_params *wm)
1867 /* Calculate the display Data return Bandwidth */
1868 fixed20_12 return_efficiency; /* 0.8 */
1869 fixed20_12 sclk, bandwidth;
1872 a.full = dfixed_const(1000);
1873 sclk.full = dfixed_const(wm->sclk);
1874 sclk.full = dfixed_div(sclk, a);
1875 a.full = dfixed_const(10);
1876 return_efficiency.full = dfixed_const(8);
1877 return_efficiency.full = dfixed_div(return_efficiency, a);
1878 a.full = dfixed_const(32);
1879 bandwidth.full = dfixed_mul(a, sclk);
1880 bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
1882 return dfixed_trunc(bandwidth);
1885 static u32 dce6_get_dmif_bytes_per_request(struct dce6_wm_params *wm)
1890 static u32 dce6_dmif_request_bandwidth(struct dce6_wm_params *wm)
1892 /* Calculate the DMIF Request Bandwidth */
1893 fixed20_12 disp_clk_request_efficiency; /* 0.8 */
1894 fixed20_12 disp_clk, sclk, bandwidth;
1895 fixed20_12 a, b1, b2;
1898 a.full = dfixed_const(1000);
1899 disp_clk.full = dfixed_const(wm->disp_clk);
1900 disp_clk.full = dfixed_div(disp_clk, a);
1901 a.full = dfixed_const(dce6_get_dmif_bytes_per_request(wm) / 2);
1902 b1.full = dfixed_mul(a, disp_clk);
1904 a.full = dfixed_const(1000);
1905 sclk.full = dfixed_const(wm->sclk);
1906 sclk.full = dfixed_div(sclk, a);
1907 a.full = dfixed_const(dce6_get_dmif_bytes_per_request(wm));
1908 b2.full = dfixed_mul(a, sclk);
1910 a.full = dfixed_const(10);
1911 disp_clk_request_efficiency.full = dfixed_const(8);
1912 disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
1914 min_bandwidth = min(dfixed_trunc(b1), dfixed_trunc(b2));
1916 a.full = dfixed_const(min_bandwidth);
1917 bandwidth.full = dfixed_mul(a, disp_clk_request_efficiency);
1919 return dfixed_trunc(bandwidth);
1922 static u32 dce6_available_bandwidth(struct dce6_wm_params *wm)
1924 /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
1925 u32 dram_bandwidth = dce6_dram_bandwidth(wm);
1926 u32 data_return_bandwidth = dce6_data_return_bandwidth(wm);
1927 u32 dmif_req_bandwidth = dce6_dmif_request_bandwidth(wm);
1929 return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
1932 static u32 dce6_average_bandwidth(struct dce6_wm_params *wm)
1934 /* Calculate the display mode Average Bandwidth
1935 * DisplayMode should contain the source and destination dimensions,
1939 fixed20_12 line_time;
1940 fixed20_12 src_width;
1941 fixed20_12 bandwidth;
1944 a.full = dfixed_const(1000);
1945 line_time.full = dfixed_const(wm->active_time + wm->blank_time);
1946 line_time.full = dfixed_div(line_time, a);
1947 bpp.full = dfixed_const(wm->bytes_per_pixel);
1948 src_width.full = dfixed_const(wm->src_width);
1949 bandwidth.full = dfixed_mul(src_width, bpp);
1950 bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
1951 bandwidth.full = dfixed_div(bandwidth, line_time);
1953 return dfixed_trunc(bandwidth);
1956 static u32 dce6_latency_watermark(struct dce6_wm_params *wm)
1958 /* First calcualte the latency in ns */
1959 u32 mc_latency = 2000; /* 2000 ns. */
1960 u32 available_bandwidth = dce6_available_bandwidth(wm);
1961 u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
1962 u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
1963 u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
1964 u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
1965 (wm->num_heads * cursor_line_pair_return_time);
1966 u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
1967 u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
1968 u32 tmp, dmif_size = 12288;
1971 if (wm->num_heads == 0)
1974 a.full = dfixed_const(2);
1975 b.full = dfixed_const(1);
1976 if ((wm->vsc.full > a.full) ||
1977 ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
1979 ((wm->vsc.full >= a.full) && wm->interlaced))
1980 max_src_lines_per_dst_line = 4;
1982 max_src_lines_per_dst_line = 2;
1984 a.full = dfixed_const(available_bandwidth);
1985 b.full = dfixed_const(wm->num_heads);
1986 a.full = dfixed_div(a, b);
1988 b.full = dfixed_const(mc_latency + 512);
1989 c.full = dfixed_const(wm->disp_clk);
1990 b.full = dfixed_div(b, c);
1992 c.full = dfixed_const(dmif_size);
1993 b.full = dfixed_div(c, b);
1995 tmp = min(dfixed_trunc(a), dfixed_trunc(b));
1997 b.full = dfixed_const(1000);
1998 c.full = dfixed_const(wm->disp_clk);
1999 b.full = dfixed_div(c, b);
2000 c.full = dfixed_const(wm->bytes_per_pixel);
2001 b.full = dfixed_mul(b, c);
2003 lb_fill_bw = min(tmp, dfixed_trunc(b));
2005 a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2006 b.full = dfixed_const(1000);
2007 c.full = dfixed_const(lb_fill_bw);
2008 b.full = dfixed_div(c, b);
2009 a.full = dfixed_div(a, b);
2010 line_fill_time = dfixed_trunc(a);
2012 if (line_fill_time < wm->active_time)
2015 return latency + (line_fill_time - wm->active_time);
2019 static bool dce6_average_bandwidth_vs_dram_bandwidth_for_display(struct dce6_wm_params *wm)
2021 if (dce6_average_bandwidth(wm) <=
2022 (dce6_dram_bandwidth_for_display(wm) / wm->num_heads))
2028 static bool dce6_average_bandwidth_vs_available_bandwidth(struct dce6_wm_params *wm)
2030 if (dce6_average_bandwidth(wm) <=
2031 (dce6_available_bandwidth(wm) / wm->num_heads))
2037 static bool dce6_check_latency_hiding(struct dce6_wm_params *wm)
2039 u32 lb_partitions = wm->lb_size / wm->src_width;
2040 u32 line_time = wm->active_time + wm->blank_time;
2041 u32 latency_tolerant_lines;
2045 a.full = dfixed_const(1);
2046 if (wm->vsc.full > a.full)
2047 latency_tolerant_lines = 1;
2049 if (lb_partitions <= (wm->vtaps + 1))
2050 latency_tolerant_lines = 1;
2052 latency_tolerant_lines = 2;
2055 latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2057 if (dce6_latency_watermark(wm) <= latency_hiding)
2063 static void dce6_program_watermarks(struct radeon_device *rdev,
2064 struct radeon_crtc *radeon_crtc,
2065 u32 lb_size, u32 num_heads)
2067 struct drm_display_mode *mode = &radeon_crtc->base.mode;
2068 struct dce6_wm_params wm_low, wm_high;
2072 u32 latency_watermark_a = 0, latency_watermark_b = 0;
2073 u32 priority_a_mark = 0, priority_b_mark = 0;
2074 u32 priority_a_cnt = PRIORITY_OFF;
2075 u32 priority_b_cnt = PRIORITY_OFF;
2076 u32 tmp, arb_control3;
2079 if (radeon_crtc->base.enabled && num_heads && mode) {
2080 pixel_period = 1000000 / (u32)mode->clock;
2081 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2085 if (rdev->family == CHIP_ARUBA)
2086 dram_channels = evergreen_get_number_of_dram_channels(rdev);
2088 dram_channels = si_get_number_of_dram_channels(rdev);
2090 /* watermark for high clocks */
2091 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2093 radeon_dpm_get_mclk(rdev, false) * 10;
2095 radeon_dpm_get_sclk(rdev, false) * 10;
2097 wm_high.yclk = rdev->pm.current_mclk * 10;
2098 wm_high.sclk = rdev->pm.current_sclk * 10;
2101 wm_high.disp_clk = mode->clock;
2102 wm_high.src_width = mode->crtc_hdisplay;
2103 wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2104 wm_high.blank_time = line_time - wm_high.active_time;
2105 wm_high.interlaced = false;
2106 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2107 wm_high.interlaced = true;
2108 wm_high.vsc = radeon_crtc->vsc;
2110 if (radeon_crtc->rmx_type != RMX_OFF)
2112 wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2113 wm_high.lb_size = lb_size;
2114 wm_high.dram_channels = dram_channels;
2115 wm_high.num_heads = num_heads;
2117 /* watermark for low clocks */
2118 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2120 radeon_dpm_get_mclk(rdev, true) * 10;
2122 radeon_dpm_get_sclk(rdev, true) * 10;
2124 wm_low.yclk = rdev->pm.current_mclk * 10;
2125 wm_low.sclk = rdev->pm.current_sclk * 10;
2128 wm_low.disp_clk = mode->clock;
2129 wm_low.src_width = mode->crtc_hdisplay;
2130 wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2131 wm_low.blank_time = line_time - wm_low.active_time;
2132 wm_low.interlaced = false;
2133 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2134 wm_low.interlaced = true;
2135 wm_low.vsc = radeon_crtc->vsc;
2137 if (radeon_crtc->rmx_type != RMX_OFF)
2139 wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2140 wm_low.lb_size = lb_size;
2141 wm_low.dram_channels = dram_channels;
2142 wm_low.num_heads = num_heads;
2144 /* set for high clocks */
2145 latency_watermark_a = min(dce6_latency_watermark(&wm_high), (u32)65535);
2146 /* set for low clocks */
2147 latency_watermark_b = min(dce6_latency_watermark(&wm_low), (u32)65535);
2149 /* possibly force display priority to high */
2150 /* should really do this at mode validation time... */
2151 if (!dce6_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2152 !dce6_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2153 !dce6_check_latency_hiding(&wm_high) ||
2154 (rdev->disp_priority == 2)) {
2155 DRM_DEBUG_KMS("force priority to high\n");
2156 priority_a_cnt |= PRIORITY_ALWAYS_ON;
2157 priority_b_cnt |= PRIORITY_ALWAYS_ON;
2159 if (!dce6_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2160 !dce6_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2161 !dce6_check_latency_hiding(&wm_low) ||
2162 (rdev->disp_priority == 2)) {
2163 DRM_DEBUG_KMS("force priority to high\n");
2164 priority_a_cnt |= PRIORITY_ALWAYS_ON;
2165 priority_b_cnt |= PRIORITY_ALWAYS_ON;
2168 a.full = dfixed_const(1000);
2169 b.full = dfixed_const(mode->clock);
2170 b.full = dfixed_div(b, a);
2171 c.full = dfixed_const(latency_watermark_a);
2172 c.full = dfixed_mul(c, b);
2173 c.full = dfixed_mul(c, radeon_crtc->hsc);
2174 c.full = dfixed_div(c, a);
2175 a.full = dfixed_const(16);
2176 c.full = dfixed_div(c, a);
2177 priority_a_mark = dfixed_trunc(c);
2178 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2180 a.full = dfixed_const(1000);
2181 b.full = dfixed_const(mode->clock);
2182 b.full = dfixed_div(b, a);
2183 c.full = dfixed_const(latency_watermark_b);
2184 c.full = dfixed_mul(c, b);
2185 c.full = dfixed_mul(c, radeon_crtc->hsc);
2186 c.full = dfixed_div(c, a);
2187 a.full = dfixed_const(16);
2188 c.full = dfixed_div(c, a);
2189 priority_b_mark = dfixed_trunc(c);
2190 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2194 arb_control3 = RREG32(DPG_PIPE_ARBITRATION_CONTROL3 + radeon_crtc->crtc_offset);
2196 tmp &= ~LATENCY_WATERMARK_MASK(3);
2197 tmp |= LATENCY_WATERMARK_MASK(1);
2198 WREG32(DPG_PIPE_ARBITRATION_CONTROL3 + radeon_crtc->crtc_offset, tmp);
2199 WREG32(DPG_PIPE_LATENCY_CONTROL + radeon_crtc->crtc_offset,
2200 (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2201 LATENCY_HIGH_WATERMARK(line_time)));
2203 tmp = RREG32(DPG_PIPE_ARBITRATION_CONTROL3 + radeon_crtc->crtc_offset);
2204 tmp &= ~LATENCY_WATERMARK_MASK(3);
2205 tmp |= LATENCY_WATERMARK_MASK(2);
2206 WREG32(DPG_PIPE_ARBITRATION_CONTROL3 + radeon_crtc->crtc_offset, tmp);
2207 WREG32(DPG_PIPE_LATENCY_CONTROL + radeon_crtc->crtc_offset,
2208 (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2209 LATENCY_HIGH_WATERMARK(line_time)));
2210 /* restore original selection */
2211 WREG32(DPG_PIPE_ARBITRATION_CONTROL3 + radeon_crtc->crtc_offset, arb_control3);
2213 /* write the priority marks */
2214 WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2215 WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2217 /* save values for DPM */
2218 radeon_crtc->line_time = line_time;
2219 radeon_crtc->wm_high = latency_watermark_a;
2220 radeon_crtc->wm_low = latency_watermark_b;
2223 void dce6_bandwidth_update(struct radeon_device *rdev)
2225 struct drm_display_mode *mode0 = NULL;
2226 struct drm_display_mode *mode1 = NULL;
2227 u32 num_heads = 0, lb_size;
2230 if (!rdev->mode_info.mode_config_initialized)
2233 radeon_update_display_priority(rdev);
2235 for (i = 0; i < rdev->num_crtc; i++) {
2236 if (rdev->mode_info.crtcs[i]->base.enabled)
2239 for (i = 0; i < rdev->num_crtc; i += 2) {
2240 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2241 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2242 lb_size = dce6_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2243 dce6_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2244 lb_size = dce6_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2245 dce6_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2252 static void si_tiling_mode_table_init(struct radeon_device *rdev)
2254 const u32 num_tile_mode_states = 32;
2255 u32 reg_offset, gb_tile_moden, split_equal_to_row_size;
2257 switch (rdev->config.si.mem_row_size_in_kb) {
2259 split_equal_to_row_size = ADDR_SURF_TILE_SPLIT_1KB;
2263 split_equal_to_row_size = ADDR_SURF_TILE_SPLIT_2KB;
2266 split_equal_to_row_size = ADDR_SURF_TILE_SPLIT_4KB;
2270 if ((rdev->family == CHIP_TAHITI) ||
2271 (rdev->family == CHIP_PITCAIRN)) {
2272 for (reg_offset = 0; reg_offset < num_tile_mode_states; reg_offset++) {
2273 switch (reg_offset) {
2274 case 0: /* non-AA compressed depth or any compressed stencil */
2275 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2276 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2277 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2278 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2279 NUM_BANKS(ADDR_SURF_16_BANK) |
2280 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2281 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2282 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2284 case 1: /* 2xAA/4xAA compressed depth only */
2285 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2286 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2287 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2288 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_128B) |
2289 NUM_BANKS(ADDR_SURF_16_BANK) |
2290 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2291 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2292 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2294 case 2: /* 8xAA compressed depth only */
2295 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2296 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2297 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2298 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2299 NUM_BANKS(ADDR_SURF_16_BANK) |
2300 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2301 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2302 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2304 case 3: /* 2xAA/4xAA compressed depth with stencil (for depth buffer) */
2305 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2306 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2307 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2308 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_128B) |
2309 NUM_BANKS(ADDR_SURF_16_BANK) |
2310 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2311 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2312 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2314 case 4: /* Maps w/ a dimension less than the 2D macro-tile dimensions (for mipmapped depth textures) */
2315 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2316 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2317 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2318 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2319 NUM_BANKS(ADDR_SURF_16_BANK) |
2320 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2321 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2322 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2324 case 5: /* Uncompressed 16bpp depth - and stencil buffer allocated with it */
2325 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2326 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2327 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2328 TILE_SPLIT(split_equal_to_row_size) |
2329 NUM_BANKS(ADDR_SURF_16_BANK) |
2330 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2331 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2332 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2334 case 6: /* Uncompressed 32bpp depth - and stencil buffer allocated with it */
2335 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2336 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2337 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2338 TILE_SPLIT(split_equal_to_row_size) |
2339 NUM_BANKS(ADDR_SURF_16_BANK) |
2340 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2341 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2342 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2344 case 7: /* Uncompressed 8bpp stencil without depth (drivers typically do not use) */
2345 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2346 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2347 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2348 TILE_SPLIT(split_equal_to_row_size) |
2349 NUM_BANKS(ADDR_SURF_16_BANK) |
2350 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2351 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2352 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2354 case 8: /* 1D and 1D Array Surfaces */
2355 gb_tile_moden = (ARRAY_MODE(ARRAY_LINEAR_ALIGNED) |
2356 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2357 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2358 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2359 NUM_BANKS(ADDR_SURF_16_BANK) |
2360 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2361 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2362 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2364 case 9: /* Displayable maps. */
2365 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2366 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2367 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2368 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2369 NUM_BANKS(ADDR_SURF_16_BANK) |
2370 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2371 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2372 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2374 case 10: /* Display 8bpp. */
2375 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2376 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2377 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2378 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2379 NUM_BANKS(ADDR_SURF_16_BANK) |
2380 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2381 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2382 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2384 case 11: /* Display 16bpp. */
2385 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2386 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2387 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2388 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2389 NUM_BANKS(ADDR_SURF_16_BANK) |
2390 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2391 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2392 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2394 case 12: /* Display 32bpp. */
2395 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2396 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2397 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2398 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2399 NUM_BANKS(ADDR_SURF_16_BANK) |
2400 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2401 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2402 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2404 case 13: /* Thin. */
2405 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2406 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2407 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2408 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2409 NUM_BANKS(ADDR_SURF_16_BANK) |
2410 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2411 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2412 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2414 case 14: /* Thin 8 bpp. */
2415 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2416 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2417 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2418 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2419 NUM_BANKS(ADDR_SURF_16_BANK) |
2420 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2421 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2422 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2424 case 15: /* Thin 16 bpp. */
2425 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2426 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2427 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2428 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2429 NUM_BANKS(ADDR_SURF_16_BANK) |
2430 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2431 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2432 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2434 case 16: /* Thin 32 bpp. */
2435 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2436 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2437 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2438 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2439 NUM_BANKS(ADDR_SURF_16_BANK) |
2440 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2441 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2442 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2444 case 17: /* Thin 64 bpp. */
2445 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2446 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2447 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2448 TILE_SPLIT(split_equal_to_row_size) |
2449 NUM_BANKS(ADDR_SURF_16_BANK) |
2450 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2451 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2452 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2454 case 21: /* 8 bpp PRT. */
2455 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2456 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2457 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2458 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2459 NUM_BANKS(ADDR_SURF_16_BANK) |
2460 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_2) |
2461 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2462 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2464 case 22: /* 16 bpp PRT */
2465 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2466 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2467 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2468 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2469 NUM_BANKS(ADDR_SURF_16_BANK) |
2470 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2471 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2472 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2474 case 23: /* 32 bpp PRT */
2475 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2476 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2477 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2478 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2479 NUM_BANKS(ADDR_SURF_16_BANK) |
2480 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2481 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2482 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2484 case 24: /* 64 bpp PRT */
2485 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2486 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2487 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2488 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2489 NUM_BANKS(ADDR_SURF_16_BANK) |
2490 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2491 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2492 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2494 case 25: /* 128 bpp PRT */
2495 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2496 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2497 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2498 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_1KB) |
2499 NUM_BANKS(ADDR_SURF_8_BANK) |
2500 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2501 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2502 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2508 rdev->config.si.tile_mode_array[reg_offset] = gb_tile_moden;
2509 WREG32(GB_TILE_MODE0 + (reg_offset * 4), gb_tile_moden);
2511 } else if ((rdev->family == CHIP_VERDE) ||
2512 (rdev->family == CHIP_OLAND) ||
2513 (rdev->family == CHIP_HAINAN)) {
2514 for (reg_offset = 0; reg_offset < num_tile_mode_states; reg_offset++) {
2515 switch (reg_offset) {
2516 case 0: /* non-AA compressed depth or any compressed stencil */
2517 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2518 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2519 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2520 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2521 NUM_BANKS(ADDR_SURF_16_BANK) |
2522 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2523 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2524 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2526 case 1: /* 2xAA/4xAA compressed depth only */
2527 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2528 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2529 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2530 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_128B) |
2531 NUM_BANKS(ADDR_SURF_16_BANK) |
2532 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2533 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2534 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2536 case 2: /* 8xAA compressed depth only */
2537 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2538 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2539 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2540 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2541 NUM_BANKS(ADDR_SURF_16_BANK) |
2542 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2543 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2544 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2546 case 3: /* 2xAA/4xAA compressed depth with stencil (for depth buffer) */
2547 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2548 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2549 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2550 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_128B) |
2551 NUM_BANKS(ADDR_SURF_16_BANK) |
2552 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2553 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2554 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2556 case 4: /* Maps w/ a dimension less than the 2D macro-tile dimensions (for mipmapped depth textures) */
2557 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2558 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2559 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2560 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2561 NUM_BANKS(ADDR_SURF_16_BANK) |
2562 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2563 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2564 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2566 case 5: /* Uncompressed 16bpp depth - and stencil buffer allocated with it */
2567 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2568 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2569 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2570 TILE_SPLIT(split_equal_to_row_size) |
2571 NUM_BANKS(ADDR_SURF_16_BANK) |
2572 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2573 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2574 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2576 case 6: /* Uncompressed 32bpp depth - and stencil buffer allocated with it */
2577 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2578 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2579 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2580 TILE_SPLIT(split_equal_to_row_size) |
2581 NUM_BANKS(ADDR_SURF_16_BANK) |
2582 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2583 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2584 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2586 case 7: /* Uncompressed 8bpp stencil without depth (drivers typically do not use) */
2587 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2588 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2589 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2590 TILE_SPLIT(split_equal_to_row_size) |
2591 NUM_BANKS(ADDR_SURF_16_BANK) |
2592 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2593 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2594 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2596 case 8: /* 1D and 1D Array Surfaces */
2597 gb_tile_moden = (ARRAY_MODE(ARRAY_LINEAR_ALIGNED) |
2598 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2599 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2600 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2601 NUM_BANKS(ADDR_SURF_16_BANK) |
2602 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2603 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2604 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2606 case 9: /* Displayable maps. */
2607 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2608 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2609 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2610 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2611 NUM_BANKS(ADDR_SURF_16_BANK) |
2612 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2613 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2614 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2616 case 10: /* Display 8bpp. */
2617 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2618 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2619 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2620 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2621 NUM_BANKS(ADDR_SURF_16_BANK) |
2622 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2623 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2624 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2626 case 11: /* Display 16bpp. */
2627 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2628 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2629 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2630 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2631 NUM_BANKS(ADDR_SURF_16_BANK) |
2632 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2633 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2634 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2636 case 12: /* Display 32bpp. */
2637 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2638 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2639 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2640 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2641 NUM_BANKS(ADDR_SURF_16_BANK) |
2642 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2643 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2644 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2646 case 13: /* Thin. */
2647 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2648 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2649 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2650 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2651 NUM_BANKS(ADDR_SURF_16_BANK) |
2652 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2653 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2654 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2656 case 14: /* Thin 8 bpp. */
2657 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2658 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2659 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2660 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2661 NUM_BANKS(ADDR_SURF_16_BANK) |
2662 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2663 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2664 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2666 case 15: /* Thin 16 bpp. */
2667 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2668 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2669 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2670 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2671 NUM_BANKS(ADDR_SURF_16_BANK) |
2672 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2673 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2674 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2676 case 16: /* Thin 32 bpp. */
2677 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2678 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2679 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2680 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2681 NUM_BANKS(ADDR_SURF_16_BANK) |
2682 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2683 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2684 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2686 case 17: /* Thin 64 bpp. */
2687 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2688 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2689 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2690 TILE_SPLIT(split_equal_to_row_size) |
2691 NUM_BANKS(ADDR_SURF_16_BANK) |
2692 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2693 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2694 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2696 case 21: /* 8 bpp PRT. */
2697 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2698 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2699 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2700 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2701 NUM_BANKS(ADDR_SURF_16_BANK) |
2702 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_2) |
2703 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2704 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2706 case 22: /* 16 bpp PRT */
2707 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2708 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2709 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2710 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2711 NUM_BANKS(ADDR_SURF_16_BANK) |
2712 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2713 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2714 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2716 case 23: /* 32 bpp PRT */
2717 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2718 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2719 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2720 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2721 NUM_BANKS(ADDR_SURF_16_BANK) |
2722 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2723 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2724 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2726 case 24: /* 64 bpp PRT */
2727 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2728 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2729 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2730 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2731 NUM_BANKS(ADDR_SURF_16_BANK) |
2732 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2733 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2734 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2736 case 25: /* 128 bpp PRT */
2737 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2738 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2739 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2740 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_1KB) |
2741 NUM_BANKS(ADDR_SURF_8_BANK) |
2742 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2743 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2744 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2750 rdev->config.si.tile_mode_array[reg_offset] = gb_tile_moden;
2751 WREG32(GB_TILE_MODE0 + (reg_offset * 4), gb_tile_moden);
2754 DRM_ERROR("unknown asic: 0x%x\n", rdev->family);
2757 static void si_select_se_sh(struct radeon_device *rdev,
2758 u32 se_num, u32 sh_num)
2760 u32 data = INSTANCE_BROADCAST_WRITES;
2762 if ((se_num == 0xffffffff) && (sh_num == 0xffffffff))
2763 data |= SH_BROADCAST_WRITES | SE_BROADCAST_WRITES;
2764 else if (se_num == 0xffffffff)
2765 data |= SE_BROADCAST_WRITES | SH_INDEX(sh_num);
2766 else if (sh_num == 0xffffffff)
2767 data |= SH_BROADCAST_WRITES | SE_INDEX(se_num);
2769 data |= SH_INDEX(sh_num) | SE_INDEX(se_num);
2770 WREG32(GRBM_GFX_INDEX, data);
2773 static u32 si_create_bitmask(u32 bit_width)
2777 for (i = 0; i < bit_width; i++) {
2784 static u32 si_get_cu_enabled(struct radeon_device *rdev, u32 cu_per_sh)
2788 data = RREG32(CC_GC_SHADER_ARRAY_CONFIG);
2790 data &= INACTIVE_CUS_MASK;
2793 data |= RREG32(GC_USER_SHADER_ARRAY_CONFIG);
2795 data >>= INACTIVE_CUS_SHIFT;
2797 mask = si_create_bitmask(cu_per_sh);
2799 return ~data & mask;
2802 static void si_setup_spi(struct radeon_device *rdev,
2803 u32 se_num, u32 sh_per_se,
2807 u32 data, mask, active_cu;
2809 for (i = 0; i < se_num; i++) {
2810 for (j = 0; j < sh_per_se; j++) {
2811 si_select_se_sh(rdev, i, j);
2812 data = RREG32(SPI_STATIC_THREAD_MGMT_3);
2813 active_cu = si_get_cu_enabled(rdev, cu_per_sh);
2816 for (k = 0; k < 16; k++) {
2818 if (active_cu & mask) {
2820 WREG32(SPI_STATIC_THREAD_MGMT_3, data);
2826 si_select_se_sh(rdev, 0xffffffff, 0xffffffff);
2829 static u32 si_get_rb_disabled(struct radeon_device *rdev,
2830 u32 max_rb_num_per_se,
2835 data = RREG32(CC_RB_BACKEND_DISABLE);
2837 data &= BACKEND_DISABLE_MASK;
2840 data |= RREG32(GC_USER_RB_BACKEND_DISABLE);
2842 data >>= BACKEND_DISABLE_SHIFT;
2844 mask = si_create_bitmask(max_rb_num_per_se / sh_per_se);
2849 static void si_setup_rb(struct radeon_device *rdev,
2850 u32 se_num, u32 sh_per_se,
2851 u32 max_rb_num_per_se)
2855 u32 disabled_rbs = 0;
2856 u32 enabled_rbs = 0;
2858 for (i = 0; i < se_num; i++) {
2859 for (j = 0; j < sh_per_se; j++) {
2860 si_select_se_sh(rdev, i, j);
2861 data = si_get_rb_disabled(rdev, max_rb_num_per_se, sh_per_se);
2862 disabled_rbs |= data << ((i * sh_per_se + j) * TAHITI_RB_BITMAP_WIDTH_PER_SH);
2865 si_select_se_sh(rdev, 0xffffffff, 0xffffffff);
2868 for (i = 0; i < max_rb_num_per_se * se_num; i++) {
2869 if (!(disabled_rbs & mask))
2870 enabled_rbs |= mask;
2874 rdev->config.si.backend_enable_mask = enabled_rbs;
2876 for (i = 0; i < se_num; i++) {
2877 si_select_se_sh(rdev, i, 0xffffffff);
2879 for (j = 0; j < sh_per_se; j++) {
2880 switch (enabled_rbs & 3) {
2882 data |= (RASTER_CONFIG_RB_MAP_0 << (i * sh_per_se + j) * 2);
2885 data |= (RASTER_CONFIG_RB_MAP_3 << (i * sh_per_se + j) * 2);
2889 data |= (RASTER_CONFIG_RB_MAP_2 << (i * sh_per_se + j) * 2);
2894 WREG32(PA_SC_RASTER_CONFIG, data);
2896 si_select_se_sh(rdev, 0xffffffff, 0xffffffff);
2899 static void si_gpu_init(struct radeon_device *rdev)
2901 u32 gb_addr_config = 0;
2902 u32 mc_shared_chmap, mc_arb_ramcfg;
2904 u32 hdp_host_path_cntl;
2908 switch (rdev->family) {
2910 rdev->config.si.max_shader_engines = 2;
2911 rdev->config.si.max_tile_pipes = 12;
2912 rdev->config.si.max_cu_per_sh = 8;
2913 rdev->config.si.max_sh_per_se = 2;
2914 rdev->config.si.max_backends_per_se = 4;
2915 rdev->config.si.max_texture_channel_caches = 12;
2916 rdev->config.si.max_gprs = 256;
2917 rdev->config.si.max_gs_threads = 32;
2918 rdev->config.si.max_hw_contexts = 8;
2920 rdev->config.si.sc_prim_fifo_size_frontend = 0x20;
2921 rdev->config.si.sc_prim_fifo_size_backend = 0x100;
2922 rdev->config.si.sc_hiz_tile_fifo_size = 0x30;
2923 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130;
2924 gb_addr_config = TAHITI_GB_ADDR_CONFIG_GOLDEN;
2927 rdev->config.si.max_shader_engines = 2;
2928 rdev->config.si.max_tile_pipes = 8;
2929 rdev->config.si.max_cu_per_sh = 5;
2930 rdev->config.si.max_sh_per_se = 2;
2931 rdev->config.si.max_backends_per_se = 4;
2932 rdev->config.si.max_texture_channel_caches = 8;
2933 rdev->config.si.max_gprs = 256;
2934 rdev->config.si.max_gs_threads = 32;
2935 rdev->config.si.max_hw_contexts = 8;
2937 rdev->config.si.sc_prim_fifo_size_frontend = 0x20;
2938 rdev->config.si.sc_prim_fifo_size_backend = 0x100;
2939 rdev->config.si.sc_hiz_tile_fifo_size = 0x30;
2940 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130;
2941 gb_addr_config = TAHITI_GB_ADDR_CONFIG_GOLDEN;
2945 rdev->config.si.max_shader_engines = 1;
2946 rdev->config.si.max_tile_pipes = 4;
2947 rdev->config.si.max_cu_per_sh = 5;
2948 rdev->config.si.max_sh_per_se = 2;
2949 rdev->config.si.max_backends_per_se = 4;
2950 rdev->config.si.max_texture_channel_caches = 4;
2951 rdev->config.si.max_gprs = 256;
2952 rdev->config.si.max_gs_threads = 32;
2953 rdev->config.si.max_hw_contexts = 8;
2955 rdev->config.si.sc_prim_fifo_size_frontend = 0x20;
2956 rdev->config.si.sc_prim_fifo_size_backend = 0x40;
2957 rdev->config.si.sc_hiz_tile_fifo_size = 0x30;
2958 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130;
2959 gb_addr_config = VERDE_GB_ADDR_CONFIG_GOLDEN;
2962 rdev->config.si.max_shader_engines = 1;
2963 rdev->config.si.max_tile_pipes = 4;
2964 rdev->config.si.max_cu_per_sh = 6;
2965 rdev->config.si.max_sh_per_se = 1;
2966 rdev->config.si.max_backends_per_se = 2;
2967 rdev->config.si.max_texture_channel_caches = 4;
2968 rdev->config.si.max_gprs = 256;
2969 rdev->config.si.max_gs_threads = 16;
2970 rdev->config.si.max_hw_contexts = 8;
2972 rdev->config.si.sc_prim_fifo_size_frontend = 0x20;
2973 rdev->config.si.sc_prim_fifo_size_backend = 0x40;
2974 rdev->config.si.sc_hiz_tile_fifo_size = 0x30;
2975 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130;
2976 gb_addr_config = VERDE_GB_ADDR_CONFIG_GOLDEN;
2979 rdev->config.si.max_shader_engines = 1;
2980 rdev->config.si.max_tile_pipes = 4;
2981 rdev->config.si.max_cu_per_sh = 5;
2982 rdev->config.si.max_sh_per_se = 1;
2983 rdev->config.si.max_backends_per_se = 1;
2984 rdev->config.si.max_texture_channel_caches = 2;
2985 rdev->config.si.max_gprs = 256;
2986 rdev->config.si.max_gs_threads = 16;
2987 rdev->config.si.max_hw_contexts = 8;
2989 rdev->config.si.sc_prim_fifo_size_frontend = 0x20;
2990 rdev->config.si.sc_prim_fifo_size_backend = 0x40;
2991 rdev->config.si.sc_hiz_tile_fifo_size = 0x30;
2992 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130;
2993 gb_addr_config = HAINAN_GB_ADDR_CONFIG_GOLDEN;
2997 /* Initialize HDP */
2998 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2999 WREG32((0x2c14 + j), 0x00000000);
3000 WREG32((0x2c18 + j), 0x00000000);
3001 WREG32((0x2c1c + j), 0x00000000);
3002 WREG32((0x2c20 + j), 0x00000000);
3003 WREG32((0x2c24 + j), 0x00000000);
3006 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3008 evergreen_fix_pci_max_read_req_size(rdev);
3010 WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
3012 mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3013 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3015 rdev->config.si.num_tile_pipes = rdev->config.si.max_tile_pipes;
3016 rdev->config.si.mem_max_burst_length_bytes = 256;
3017 tmp = (mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT;
3018 rdev->config.si.mem_row_size_in_kb = (4 * (1 << (8 + tmp))) / 1024;
3019 if (rdev->config.si.mem_row_size_in_kb > 4)
3020 rdev->config.si.mem_row_size_in_kb = 4;
3021 /* XXX use MC settings? */
3022 rdev->config.si.shader_engine_tile_size = 32;
3023 rdev->config.si.num_gpus = 1;
3024 rdev->config.si.multi_gpu_tile_size = 64;
3026 /* fix up row size */
3027 gb_addr_config &= ~ROW_SIZE_MASK;
3028 switch (rdev->config.si.mem_row_size_in_kb) {
3031 gb_addr_config |= ROW_SIZE(0);
3034 gb_addr_config |= ROW_SIZE(1);
3037 gb_addr_config |= ROW_SIZE(2);
3041 /* setup tiling info dword. gb_addr_config is not adequate since it does
3042 * not have bank info, so create a custom tiling dword.
3043 * bits 3:0 num_pipes
3044 * bits 7:4 num_banks
3045 * bits 11:8 group_size
3046 * bits 15:12 row_size
3048 rdev->config.si.tile_config = 0;
3049 switch (rdev->config.si.num_tile_pipes) {
3051 rdev->config.si.tile_config |= (0 << 0);
3054 rdev->config.si.tile_config |= (1 << 0);
3057 rdev->config.si.tile_config |= (2 << 0);
3061 /* XXX what about 12? */
3062 rdev->config.si.tile_config |= (3 << 0);
3065 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3066 case 0: /* four banks */
3067 rdev->config.si.tile_config |= 0 << 4;
3069 case 1: /* eight banks */
3070 rdev->config.si.tile_config |= 1 << 4;
3072 case 2: /* sixteen banks */
3074 rdev->config.si.tile_config |= 2 << 4;
3077 rdev->config.si.tile_config |=
3078 ((gb_addr_config & PIPE_INTERLEAVE_SIZE_MASK) >> PIPE_INTERLEAVE_SIZE_SHIFT) << 8;
3079 rdev->config.si.tile_config |=
3080 ((gb_addr_config & ROW_SIZE_MASK) >> ROW_SIZE_SHIFT) << 12;
3082 WREG32(GB_ADDR_CONFIG, gb_addr_config);
3083 WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3084 WREG32(DMIF_ADDR_CALC, gb_addr_config);
3085 WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3086 WREG32(DMA_TILING_CONFIG + DMA0_REGISTER_OFFSET, gb_addr_config);
3087 WREG32(DMA_TILING_CONFIG + DMA1_REGISTER_OFFSET, gb_addr_config);
3088 if (rdev->has_uvd) {
3089 WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3090 WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3091 WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3094 si_tiling_mode_table_init(rdev);
3096 si_setup_rb(rdev, rdev->config.si.max_shader_engines,
3097 rdev->config.si.max_sh_per_se,
3098 rdev->config.si.max_backends_per_se);
3100 si_setup_spi(rdev, rdev->config.si.max_shader_engines,
3101 rdev->config.si.max_sh_per_se,
3102 rdev->config.si.max_cu_per_sh);
3105 /* set HW defaults for 3D engine */
3106 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3107 ROQ_IB2_START(0x2b)));
3108 WREG32(CP_MEQ_THRESHOLDS, MEQ1_START(0x30) | MEQ2_START(0x60));
3110 sx_debug_1 = RREG32(SX_DEBUG_1);
3111 WREG32(SX_DEBUG_1, sx_debug_1);
3113 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3115 WREG32(PA_SC_FIFO_SIZE, (SC_FRONTEND_PRIM_FIFO_SIZE(rdev->config.si.sc_prim_fifo_size_frontend) |
3116 SC_BACKEND_PRIM_FIFO_SIZE(rdev->config.si.sc_prim_fifo_size_backend) |
3117 SC_HIZ_TILE_FIFO_SIZE(rdev->config.si.sc_hiz_tile_fifo_size) |
3118 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.si.sc_earlyz_tile_fifo_size)));
3120 WREG32(VGT_NUM_INSTANCES, 1);
3122 WREG32(CP_PERFMON_CNTL, 0);
3124 WREG32(SQ_CONFIG, 0);
3126 WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3127 FORCE_EOV_MAX_REZ_CNT(255)));
3129 WREG32(VGT_CACHE_INVALIDATION, CACHE_INVALIDATION(VC_AND_TC) |
3130 AUTO_INVLD_EN(ES_AND_GS_AUTO));
3132 WREG32(VGT_GS_VERTEX_REUSE, 16);
3133 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3135 WREG32(CB_PERFCOUNTER0_SELECT0, 0);
3136 WREG32(CB_PERFCOUNTER0_SELECT1, 0);
3137 WREG32(CB_PERFCOUNTER1_SELECT0, 0);
3138 WREG32(CB_PERFCOUNTER1_SELECT1, 0);
3139 WREG32(CB_PERFCOUNTER2_SELECT0, 0);
3140 WREG32(CB_PERFCOUNTER2_SELECT1, 0);
3141 WREG32(CB_PERFCOUNTER3_SELECT0, 0);
3142 WREG32(CB_PERFCOUNTER3_SELECT1, 0);
3144 tmp = RREG32(HDP_MISC_CNTL);
3145 tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3146 WREG32(HDP_MISC_CNTL, tmp);
3148 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3149 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3151 WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3157 * GPU scratch registers helpers function.
3159 static void si_scratch_init(struct radeon_device *rdev)
3163 rdev->scratch.num_reg = 7;
3164 rdev->scratch.reg_base = SCRATCH_REG0;
3165 for (i = 0; i < rdev->scratch.num_reg; i++) {
3166 rdev->scratch.free[i] = true;
3167 rdev->scratch.reg[i] = rdev->scratch.reg_base + (i * 4);
3171 void si_fence_ring_emit(struct radeon_device *rdev,
3172 struct radeon_fence *fence)
3174 struct radeon_ring *ring = &rdev->ring[fence->ring];
3175 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
3177 /* flush read cache over gart */
3178 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3179 radeon_ring_write(ring, (CP_COHER_CNTL2 - PACKET3_SET_CONFIG_REG_START) >> 2);
3180 radeon_ring_write(ring, 0);
3181 radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
3182 radeon_ring_write(ring, PACKET3_TCL1_ACTION_ENA |
3183 PACKET3_TC_ACTION_ENA |
3184 PACKET3_SH_KCACHE_ACTION_ENA |
3185 PACKET3_SH_ICACHE_ACTION_ENA);
3186 radeon_ring_write(ring, 0xFFFFFFFF);
3187 radeon_ring_write(ring, 0);
3188 radeon_ring_write(ring, 10); /* poll interval */
3189 /* EVENT_WRITE_EOP - flush caches, send int */
3190 radeon_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE_EOP, 4));
3191 radeon_ring_write(ring, EVENT_TYPE(CACHE_FLUSH_AND_INV_TS_EVENT) | EVENT_INDEX(5));
3192 radeon_ring_write(ring, addr & 0xffffffff);
3193 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff) | DATA_SEL(1) | INT_SEL(2));
3194 radeon_ring_write(ring, fence->seq);
3195 radeon_ring_write(ring, 0);
3201 void si_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
3203 struct radeon_ring *ring = &rdev->ring[ib->ring];
3206 if (ib->is_const_ib) {
3207 /* set switch buffer packet before const IB */
3208 radeon_ring_write(ring, PACKET3(PACKET3_SWITCH_BUFFER, 0));
3209 radeon_ring_write(ring, 0);
3211 header = PACKET3(PACKET3_INDIRECT_BUFFER_CONST, 2);
3214 if (ring->rptr_save_reg) {
3215 next_rptr = ring->wptr + 3 + 4 + 8;
3216 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3217 radeon_ring_write(ring, ((ring->rptr_save_reg -
3218 PACKET3_SET_CONFIG_REG_START) >> 2));
3219 radeon_ring_write(ring, next_rptr);
3220 } else if (rdev->wb.enabled) {
3221 next_rptr = ring->wptr + 5 + 4 + 8;
3222 radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
3223 radeon_ring_write(ring, (1 << 8));
3224 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
3225 radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr) & 0xffffffff);
3226 radeon_ring_write(ring, next_rptr);
3229 header = PACKET3(PACKET3_INDIRECT_BUFFER, 2);
3232 radeon_ring_write(ring, header);
3233 radeon_ring_write(ring,
3237 (ib->gpu_addr & 0xFFFFFFFC));
3238 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF);
3239 radeon_ring_write(ring, ib->length_dw |
3240 (ib->vm ? (ib->vm->id << 24) : 0));
3242 if (!ib->is_const_ib) {
3243 /* flush read cache over gart for this vmid */
3244 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3245 radeon_ring_write(ring, (CP_COHER_CNTL2 - PACKET3_SET_CONFIG_REG_START) >> 2);
3246 radeon_ring_write(ring, ib->vm ? ib->vm->id : 0);
3247 radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
3248 radeon_ring_write(ring, PACKET3_TCL1_ACTION_ENA |
3249 PACKET3_TC_ACTION_ENA |
3250 PACKET3_SH_KCACHE_ACTION_ENA |
3251 PACKET3_SH_ICACHE_ACTION_ENA);
3252 radeon_ring_write(ring, 0xFFFFFFFF);
3253 radeon_ring_write(ring, 0);
3254 radeon_ring_write(ring, 10); /* poll interval */
3261 static void si_cp_enable(struct radeon_device *rdev, bool enable)
3264 WREG32(CP_ME_CNTL, 0);
3266 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX)
3267 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
3268 WREG32(CP_ME_CNTL, (CP_ME_HALT | CP_PFP_HALT | CP_CE_HALT));
3269 WREG32(SCRATCH_UMSK, 0);
3270 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
3271 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
3272 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
3277 static int si_cp_load_microcode(struct radeon_device *rdev)
3279 const __be32 *fw_data;
3282 if (!rdev->me_fw || !rdev->pfp_fw)
3285 si_cp_enable(rdev, false);
3288 fw_data = (const __be32 *)rdev->pfp_fw->data;
3289 WREG32(CP_PFP_UCODE_ADDR, 0);
3290 for (i = 0; i < SI_PFP_UCODE_SIZE; i++)
3291 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
3292 WREG32(CP_PFP_UCODE_ADDR, 0);
3295 fw_data = (const __be32 *)rdev->ce_fw->data;
3296 WREG32(CP_CE_UCODE_ADDR, 0);
3297 for (i = 0; i < SI_CE_UCODE_SIZE; i++)
3298 WREG32(CP_CE_UCODE_DATA, be32_to_cpup(fw_data++));
3299 WREG32(CP_CE_UCODE_ADDR, 0);
3302 fw_data = (const __be32 *)rdev->me_fw->data;
3303 WREG32(CP_ME_RAM_WADDR, 0);
3304 for (i = 0; i < SI_PM4_UCODE_SIZE; i++)
3305 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
3306 WREG32(CP_ME_RAM_WADDR, 0);
3308 WREG32(CP_PFP_UCODE_ADDR, 0);
3309 WREG32(CP_CE_UCODE_ADDR, 0);
3310 WREG32(CP_ME_RAM_WADDR, 0);
3311 WREG32(CP_ME_RAM_RADDR, 0);
3315 static int si_cp_start(struct radeon_device *rdev)
3317 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3320 r = radeon_ring_lock(rdev, ring, 7 + 4);
3322 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3326 radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
3327 radeon_ring_write(ring, 0x1);
3328 radeon_ring_write(ring, 0x0);
3329 radeon_ring_write(ring, rdev->config.si.max_hw_contexts - 1);
3330 radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
3331 radeon_ring_write(ring, 0);
3332 radeon_ring_write(ring, 0);
3334 /* init the CE partitions */
3335 radeon_ring_write(ring, PACKET3(PACKET3_SET_BASE, 2));
3336 radeon_ring_write(ring, PACKET3_BASE_INDEX(CE_PARTITION_BASE));
3337 radeon_ring_write(ring, 0xc000);
3338 radeon_ring_write(ring, 0xe000);
3339 radeon_ring_unlock_commit(rdev, ring);
3341 si_cp_enable(rdev, true);
3343 r = radeon_ring_lock(rdev, ring, si_default_size + 10);
3345 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3349 /* setup clear context state */
3350 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3351 radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
3353 for (i = 0; i < si_default_size; i++)
3354 radeon_ring_write(ring, si_default_state[i]);
3356 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3357 radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
3359 /* set clear context state */
3360 radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
3361 radeon_ring_write(ring, 0);
3363 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 2));
3364 radeon_ring_write(ring, 0x00000316);
3365 radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
3366 radeon_ring_write(ring, 0x00000010); /* VGT_OUT_DEALLOC_CNTL */
3368 radeon_ring_unlock_commit(rdev, ring);
3370 for (i = RADEON_RING_TYPE_GFX_INDEX; i <= CAYMAN_RING_TYPE_CP2_INDEX; ++i) {
3371 ring = &rdev->ring[i];
3372 r = radeon_ring_lock(rdev, ring, 2);
3374 /* clear the compute context state */
3375 radeon_ring_write(ring, PACKET3_COMPUTE(PACKET3_CLEAR_STATE, 0));
3376 radeon_ring_write(ring, 0);
3378 radeon_ring_unlock_commit(rdev, ring);
3384 static void si_cp_fini(struct radeon_device *rdev)
3386 struct radeon_ring *ring;
3387 si_cp_enable(rdev, false);
3389 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3390 radeon_ring_fini(rdev, ring);
3391 radeon_scratch_free(rdev, ring->rptr_save_reg);
3393 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
3394 radeon_ring_fini(rdev, ring);
3395 radeon_scratch_free(rdev, ring->rptr_save_reg);
3397 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
3398 radeon_ring_fini(rdev, ring);
3399 radeon_scratch_free(rdev, ring->rptr_save_reg);
3402 static int si_cp_resume(struct radeon_device *rdev)
3404 struct radeon_ring *ring;
3409 si_enable_gui_idle_interrupt(rdev, false);
3411 WREG32(CP_SEM_WAIT_TIMER, 0x0);
3412 WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3414 /* Set the write pointer delay */
3415 WREG32(CP_RB_WPTR_DELAY, 0);
3417 WREG32(CP_DEBUG, 0);
3418 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3420 /* ring 0 - compute and gfx */
3421 /* Set ring buffer size */
3422 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3423 rb_bufsz = order_base_2(ring->ring_size / 8);
3424 tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3426 tmp |= BUF_SWAP_32BIT;
3428 WREG32(CP_RB0_CNTL, tmp);
3430 /* Initialize the ring buffer's read and write pointers */
3431 WREG32(CP_RB0_CNTL, tmp | RB_RPTR_WR_ENA);
3433 WREG32(CP_RB0_WPTR, ring->wptr);
3435 /* set the wb address whether it's enabled or not */
3436 WREG32(CP_RB0_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC);
3437 WREG32(CP_RB0_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3439 if (rdev->wb.enabled)
3440 WREG32(SCRATCH_UMSK, 0xff);
3442 tmp |= RB_NO_UPDATE;
3443 WREG32(SCRATCH_UMSK, 0);
3447 WREG32(CP_RB0_CNTL, tmp);
3449 WREG32(CP_RB0_BASE, ring->gpu_addr >> 8);
3451 ring->rptr = RREG32(CP_RB0_RPTR);
3453 /* ring1 - compute only */
3454 /* Set ring buffer size */
3455 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
3456 rb_bufsz = order_base_2(ring->ring_size / 8);
3457 tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3459 tmp |= BUF_SWAP_32BIT;
3461 WREG32(CP_RB1_CNTL, tmp);
3463 /* Initialize the ring buffer's read and write pointers */
3464 WREG32(CP_RB1_CNTL, tmp | RB_RPTR_WR_ENA);
3466 WREG32(CP_RB1_WPTR, ring->wptr);
3468 /* set the wb address whether it's enabled or not */
3469 WREG32(CP_RB1_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP1_RPTR_OFFSET) & 0xFFFFFFFC);
3470 WREG32(CP_RB1_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP1_RPTR_OFFSET) & 0xFF);
3473 WREG32(CP_RB1_CNTL, tmp);
3475 WREG32(CP_RB1_BASE, ring->gpu_addr >> 8);
3477 ring->rptr = RREG32(CP_RB1_RPTR);
3479 /* ring2 - compute only */
3480 /* Set ring buffer size */
3481 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
3482 rb_bufsz = order_base_2(ring->ring_size / 8);
3483 tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3485 tmp |= BUF_SWAP_32BIT;
3487 WREG32(CP_RB2_CNTL, tmp);
3489 /* Initialize the ring buffer's read and write pointers */
3490 WREG32(CP_RB2_CNTL, tmp | RB_RPTR_WR_ENA);
3492 WREG32(CP_RB2_WPTR, ring->wptr);
3494 /* set the wb address whether it's enabled or not */
3495 WREG32(CP_RB2_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP2_RPTR_OFFSET) & 0xFFFFFFFC);
3496 WREG32(CP_RB2_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP2_RPTR_OFFSET) & 0xFF);
3499 WREG32(CP_RB2_CNTL, tmp);
3501 WREG32(CP_RB2_BASE, ring->gpu_addr >> 8);
3503 ring->rptr = RREG32(CP_RB2_RPTR);
3505 /* start the rings */
3507 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = true;
3508 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = true;
3509 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = true;
3510 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]);
3512 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
3513 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
3514 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
3517 r = radeon_ring_test(rdev, CAYMAN_RING_TYPE_CP1_INDEX, &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]);
3519 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
3521 r = radeon_ring_test(rdev, CAYMAN_RING_TYPE_CP2_INDEX, &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]);
3523 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
3526 si_enable_gui_idle_interrupt(rdev, true);
3528 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX)
3529 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size);
3534 u32 si_gpu_check_soft_reset(struct radeon_device *rdev)
3540 tmp = RREG32(GRBM_STATUS);
3541 if (tmp & (PA_BUSY | SC_BUSY |
3542 BCI_BUSY | SX_BUSY |
3543 TA_BUSY | VGT_BUSY |
3545 GDS_BUSY | SPI_BUSY |
3546 IA_BUSY | IA_BUSY_NO_DMA))
3547 reset_mask |= RADEON_RESET_GFX;
3549 if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3550 CP_BUSY | CP_COHERENCY_BUSY))
3551 reset_mask |= RADEON_RESET_CP;
3553 if (tmp & GRBM_EE_BUSY)
3554 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3557 tmp = RREG32(GRBM_STATUS2);
3558 if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3559 reset_mask |= RADEON_RESET_RLC;
3561 /* DMA_STATUS_REG 0 */
3562 tmp = RREG32(DMA_STATUS_REG + DMA0_REGISTER_OFFSET);
3563 if (!(tmp & DMA_IDLE))
3564 reset_mask |= RADEON_RESET_DMA;
3566 /* DMA_STATUS_REG 1 */
3567 tmp = RREG32(DMA_STATUS_REG + DMA1_REGISTER_OFFSET);
3568 if (!(tmp & DMA_IDLE))
3569 reset_mask |= RADEON_RESET_DMA1;
3572 tmp = RREG32(SRBM_STATUS2);
3574 reset_mask |= RADEON_RESET_DMA;
3576 if (tmp & DMA1_BUSY)
3577 reset_mask |= RADEON_RESET_DMA1;
3580 tmp = RREG32(SRBM_STATUS);
3583 reset_mask |= RADEON_RESET_IH;
3586 reset_mask |= RADEON_RESET_SEM;
3588 if (tmp & GRBM_RQ_PENDING)
3589 reset_mask |= RADEON_RESET_GRBM;
3592 reset_mask |= RADEON_RESET_VMC;
3594 if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3595 MCC_BUSY | MCD_BUSY))
3596 reset_mask |= RADEON_RESET_MC;
3598 if (evergreen_is_display_hung(rdev))
3599 reset_mask |= RADEON_RESET_DISPLAY;
3602 tmp = RREG32(VM_L2_STATUS);
3604 reset_mask |= RADEON_RESET_VMC;
3606 /* Skip MC reset as it's mostly likely not hung, just busy */
3607 if (reset_mask & RADEON_RESET_MC) {
3608 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3609 reset_mask &= ~RADEON_RESET_MC;
3615 static void si_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3617 struct evergreen_mc_save save;
3618 u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3621 if (reset_mask == 0)
3624 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3626 evergreen_print_gpu_status_regs(rdev);
3627 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
3628 RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR));
3629 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
3630 RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS));
3639 /* Disable CP parsing/prefetching */
3640 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT | CP_CE_HALT);
3642 if (reset_mask & RADEON_RESET_DMA) {
3644 tmp = RREG32(DMA_RB_CNTL + DMA0_REGISTER_OFFSET);
3645 tmp &= ~DMA_RB_ENABLE;
3646 WREG32(DMA_RB_CNTL + DMA0_REGISTER_OFFSET, tmp);
3648 if (reset_mask & RADEON_RESET_DMA1) {
3650 tmp = RREG32(DMA_RB_CNTL + DMA1_REGISTER_OFFSET);
3651 tmp &= ~DMA_RB_ENABLE;
3652 WREG32(DMA_RB_CNTL + DMA1_REGISTER_OFFSET, tmp);
3657 evergreen_mc_stop(rdev, &save);
3658 if (evergreen_mc_wait_for_idle(rdev)) {
3659 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3662 if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE | RADEON_RESET_CP)) {
3663 grbm_soft_reset = SOFT_RESET_CB |
3677 if (reset_mask & RADEON_RESET_CP) {
3678 grbm_soft_reset |= SOFT_RESET_CP | SOFT_RESET_VGT;
3680 srbm_soft_reset |= SOFT_RESET_GRBM;
3683 if (reset_mask & RADEON_RESET_DMA)
3684 srbm_soft_reset |= SOFT_RESET_DMA;
3686 if (reset_mask & RADEON_RESET_DMA1)
3687 srbm_soft_reset |= SOFT_RESET_DMA1;
3689 if (reset_mask & RADEON_RESET_DISPLAY)
3690 srbm_soft_reset |= SOFT_RESET_DC;
3692 if (reset_mask & RADEON_RESET_RLC)
3693 grbm_soft_reset |= SOFT_RESET_RLC;
3695 if (reset_mask & RADEON_RESET_SEM)
3696 srbm_soft_reset |= SOFT_RESET_SEM;
3698 if (reset_mask & RADEON_RESET_IH)
3699 srbm_soft_reset |= SOFT_RESET_IH;
3701 if (reset_mask & RADEON_RESET_GRBM)
3702 srbm_soft_reset |= SOFT_RESET_GRBM;
3704 if (reset_mask & RADEON_RESET_VMC)
3705 srbm_soft_reset |= SOFT_RESET_VMC;
3707 if (reset_mask & RADEON_RESET_MC)
3708 srbm_soft_reset |= SOFT_RESET_MC;
3710 if (grbm_soft_reset) {
3711 tmp = RREG32(GRBM_SOFT_RESET);
3712 tmp |= grbm_soft_reset;
3713 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3714 WREG32(GRBM_SOFT_RESET, tmp);
3715 tmp = RREG32(GRBM_SOFT_RESET);
3719 tmp &= ~grbm_soft_reset;
3720 WREG32(GRBM_SOFT_RESET, tmp);
3721 tmp = RREG32(GRBM_SOFT_RESET);
3724 if (srbm_soft_reset) {
3725 tmp = RREG32(SRBM_SOFT_RESET);
3726 tmp |= srbm_soft_reset;
3727 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3728 WREG32(SRBM_SOFT_RESET, tmp);
3729 tmp = RREG32(SRBM_SOFT_RESET);
3733 tmp &= ~srbm_soft_reset;
3734 WREG32(SRBM_SOFT_RESET, tmp);
3735 tmp = RREG32(SRBM_SOFT_RESET);
3738 /* Wait a little for things to settle down */
3741 evergreen_mc_resume(rdev, &save);
3744 evergreen_print_gpu_status_regs(rdev);
3747 static void si_set_clk_bypass_mode(struct radeon_device *rdev)
3751 tmp = RREG32(CG_SPLL_FUNC_CNTL);
3752 tmp |= SPLL_BYPASS_EN;
3753 WREG32(CG_SPLL_FUNC_CNTL, tmp);
3755 tmp = RREG32(CG_SPLL_FUNC_CNTL_2);
3756 tmp |= SPLL_CTLREQ_CHG;
3757 WREG32(CG_SPLL_FUNC_CNTL_2, tmp);
3759 for (i = 0; i < rdev->usec_timeout; i++) {
3760 if (RREG32(SPLL_STATUS) & SPLL_CHG_STATUS)
3765 tmp = RREG32(CG_SPLL_FUNC_CNTL_2);
3766 tmp &= ~(SPLL_CTLREQ_CHG | SCLK_MUX_UPDATE);
3767 WREG32(CG_SPLL_FUNC_CNTL_2, tmp);
3769 tmp = RREG32(MPLL_CNTL_MODE);
3770 tmp &= ~MPLL_MCLK_SEL;
3771 WREG32(MPLL_CNTL_MODE, tmp);
3774 static void si_spll_powerdown(struct radeon_device *rdev)
3778 tmp = RREG32(SPLL_CNTL_MODE);
3779 tmp |= SPLL_SW_DIR_CONTROL;
3780 WREG32(SPLL_CNTL_MODE, tmp);
3782 tmp = RREG32(CG_SPLL_FUNC_CNTL);
3784 WREG32(CG_SPLL_FUNC_CNTL, tmp);
3786 tmp = RREG32(CG_SPLL_FUNC_CNTL);
3788 WREG32(CG_SPLL_FUNC_CNTL, tmp);
3790 tmp = RREG32(SPLL_CNTL_MODE);
3791 tmp &= ~SPLL_SW_DIR_CONTROL;
3792 WREG32(SPLL_CNTL_MODE, tmp);
3795 static void si_gpu_pci_config_reset(struct radeon_device *rdev)
3797 struct evergreen_mc_save save;
3800 dev_info(rdev->dev, "GPU pci config reset\n");
3808 /* Disable CP parsing/prefetching */
3809 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT | CP_CE_HALT);
3811 tmp = RREG32(DMA_RB_CNTL + DMA0_REGISTER_OFFSET);
3812 tmp &= ~DMA_RB_ENABLE;
3813 WREG32(DMA_RB_CNTL + DMA0_REGISTER_OFFSET, tmp);
3815 tmp = RREG32(DMA_RB_CNTL + DMA1_REGISTER_OFFSET);
3816 tmp &= ~DMA_RB_ENABLE;
3817 WREG32(DMA_RB_CNTL + DMA1_REGISTER_OFFSET, tmp);
3818 /* XXX other engines? */
3820 /* halt the rlc, disable cp internal ints */
3825 /* disable mem access */
3826 evergreen_mc_stop(rdev, &save);
3827 if (evergreen_mc_wait_for_idle(rdev)) {
3828 dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
3831 /* set mclk/sclk to bypass */
3832 si_set_clk_bypass_mode(rdev);
3833 /* powerdown spll */
3834 si_spll_powerdown(rdev);
3836 pci_clear_master(rdev->pdev);
3838 radeon_pci_config_reset(rdev);
3839 /* wait for asic to come out of reset */
3840 for (i = 0; i < rdev->usec_timeout; i++) {
3841 if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
3847 int si_asic_reset(struct radeon_device *rdev)
3851 reset_mask = si_gpu_check_soft_reset(rdev);
3854 r600_set_bios_scratch_engine_hung(rdev, true);
3856 /* try soft reset */
3857 si_gpu_soft_reset(rdev, reset_mask);
3859 reset_mask = si_gpu_check_soft_reset(rdev);
3861 /* try pci config reset */
3862 if (reset_mask && radeon_hard_reset)
3863 si_gpu_pci_config_reset(rdev);
3865 reset_mask = si_gpu_check_soft_reset(rdev);
3868 r600_set_bios_scratch_engine_hung(rdev, false);
3874 * si_gfx_is_lockup - Check if the GFX engine is locked up
3876 * @rdev: radeon_device pointer
3877 * @ring: radeon_ring structure holding ring information
3879 * Check if the GFX engine is locked up.
3880 * Returns true if the engine appears to be locked up, false if not.
3882 bool si_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3884 u32 reset_mask = si_gpu_check_soft_reset(rdev);
3886 if (!(reset_mask & (RADEON_RESET_GFX |
3887 RADEON_RESET_COMPUTE |
3888 RADEON_RESET_CP))) {
3889 radeon_ring_lockup_update(ring);
3892 /* force CP activities */
3893 radeon_ring_force_activity(rdev, ring);
3894 return radeon_ring_test_lockup(rdev, ring);
3898 static void si_mc_program(struct radeon_device *rdev)
3900 struct evergreen_mc_save save;
3904 /* Initialize HDP */
3905 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3906 WREG32((0x2c14 + j), 0x00000000);
3907 WREG32((0x2c18 + j), 0x00000000);
3908 WREG32((0x2c1c + j), 0x00000000);
3909 WREG32((0x2c20 + j), 0x00000000);
3910 WREG32((0x2c24 + j), 0x00000000);
3912 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
3914 evergreen_mc_stop(rdev, &save);
3915 if (radeon_mc_wait_for_idle(rdev)) {
3916 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3918 if (!ASIC_IS_NODCE(rdev))
3919 /* Lockout access through VGA aperture*/
3920 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
3921 /* Update configuration */
3922 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
3923 rdev->mc.vram_start >> 12);
3924 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
3925 rdev->mc.vram_end >> 12);
3926 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR,
3927 rdev->vram_scratch.gpu_addr >> 12);
3928 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
3929 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
3930 WREG32(MC_VM_FB_LOCATION, tmp);
3931 /* XXX double check these! */
3932 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
3933 WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
3934 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
3935 WREG32(MC_VM_AGP_BASE, 0);
3936 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
3937 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
3938 if (radeon_mc_wait_for_idle(rdev)) {
3939 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3941 evergreen_mc_resume(rdev, &save);
3942 if (!ASIC_IS_NODCE(rdev)) {
3943 /* we need to own VRAM, so turn off the VGA renderer here
3944 * to stop it overwriting our objects */
3945 rv515_vga_render_disable(rdev);
3949 void si_vram_gtt_location(struct radeon_device *rdev,
3950 struct radeon_mc *mc)
3952 if (mc->mc_vram_size > 0xFFC0000000ULL) {
3953 /* leave room for at least 1024M GTT */
3954 dev_warn(rdev->dev, "limiting VRAM\n");
3955 mc->real_vram_size = 0xFFC0000000ULL;
3956 mc->mc_vram_size = 0xFFC0000000ULL;
3958 radeon_vram_location(rdev, &rdev->mc, 0);
3959 rdev->mc.gtt_base_align = 0;
3960 radeon_gtt_location(rdev, mc);
3963 static int si_mc_init(struct radeon_device *rdev)
3966 int chansize, numchan;
3968 /* Get VRAM informations */
3969 rdev->mc.vram_is_ddr = true;
3970 tmp = RREG32(MC_ARB_RAMCFG);
3971 if (tmp & CHANSIZE_OVERRIDE) {
3973 } else if (tmp & CHANSIZE_MASK) {
3978 tmp = RREG32(MC_SHARED_CHMAP);
3979 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
4009 rdev->mc.vram_width = numchan * chansize;
4010 /* Could aper size report 0 ? */
4011 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
4012 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
4013 /* size in MB on si */
4014 tmp = RREG32(CONFIG_MEMSIZE);
4015 /* some boards may have garbage in the upper 16 bits */
4016 if (tmp & 0xffff0000) {
4017 DRM_INFO("Probable bad vram size: 0x%08x\n", tmp);
4021 rdev->mc.mc_vram_size = tmp * 1024ULL * 1024ULL;
4022 rdev->mc.real_vram_size = rdev->mc.mc_vram_size;
4023 rdev->mc.visible_vram_size = rdev->mc.aper_size;
4024 si_vram_gtt_location(rdev, &rdev->mc);
4025 radeon_update_bandwidth_info(rdev);
4033 void si_pcie_gart_tlb_flush(struct radeon_device *rdev)
4035 /* flush hdp cache */
4036 WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
4038 /* bits 0-15 are the VM contexts0-15 */
4039 WREG32(VM_INVALIDATE_REQUEST, 1);
4042 static int si_pcie_gart_enable(struct radeon_device *rdev)
4046 if (rdev->gart.robj == NULL) {
4047 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
4050 r = radeon_gart_table_vram_pin(rdev);
4053 radeon_gart_restore(rdev);
4054 /* Setup TLB control */
4055 WREG32(MC_VM_MX_L1_TLB_CNTL,
4058 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
4059 ENABLE_ADVANCED_DRIVER_MODEL |
4060 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU);
4061 /* Setup L2 cache */
4062 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE |
4063 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
4064 ENABLE_L2_PDE0_CACHE_LRU_UPDATE_BY_WRITE |
4065 EFFECTIVE_L2_QUEUE_SIZE(7) |
4066 CONTEXT1_IDENTITY_ACCESS_MODE(1));
4067 WREG32(VM_L2_CNTL2, INVALIDATE_ALL_L1_TLBS | INVALIDATE_L2_CACHE);
4068 WREG32(VM_L2_CNTL3, L2_CACHE_BIGK_ASSOCIATIVITY |
4069 L2_CACHE_BIGK_FRAGMENT_SIZE(0));
4070 /* setup context0 */
4071 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
4072 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
4073 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
4074 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
4075 (u32)(rdev->dummy_page.addr >> 12));
4076 WREG32(VM_CONTEXT0_CNTL2, 0);
4077 WREG32(VM_CONTEXT0_CNTL, (ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
4078 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT));
4084 /* empty context1-15 */
4085 /* set vm size, must be a multiple of 4 */
4086 WREG32(VM_CONTEXT1_PAGE_TABLE_START_ADDR, 0);
4087 WREG32(VM_CONTEXT1_PAGE_TABLE_END_ADDR, rdev->vm_manager.max_pfn);
4088 /* Assign the pt base to something valid for now; the pts used for
4089 * the VMs are determined by the application and setup and assigned
4090 * on the fly in the vm part of radeon_gart.c
4092 for (i = 1; i < 16; i++) {
4094 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR + (i << 2),
4095 rdev->gart.table_addr >> 12);
4097 WREG32(VM_CONTEXT8_PAGE_TABLE_BASE_ADDR + ((i - 8) << 2),
4098 rdev->gart.table_addr >> 12);
4101 /* enable context1-15 */
4102 WREG32(VM_CONTEXT1_PROTECTION_FAULT_DEFAULT_ADDR,
4103 (u32)(rdev->dummy_page.addr >> 12));
4104 WREG32(VM_CONTEXT1_CNTL2, 4);
4105 WREG32(VM_CONTEXT1_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(1) |
4106 RANGE_PROTECTION_FAULT_ENABLE_INTERRUPT |
4107 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT |
4108 DUMMY_PAGE_PROTECTION_FAULT_ENABLE_INTERRUPT |
4109 DUMMY_PAGE_PROTECTION_FAULT_ENABLE_DEFAULT |
4110 PDE0_PROTECTION_FAULT_ENABLE_INTERRUPT |
4111 PDE0_PROTECTION_FAULT_ENABLE_DEFAULT |
4112 VALID_PROTECTION_FAULT_ENABLE_INTERRUPT |
4113 VALID_PROTECTION_FAULT_ENABLE_DEFAULT |
4114 READ_PROTECTION_FAULT_ENABLE_INTERRUPT |
4115 READ_PROTECTION_FAULT_ENABLE_DEFAULT |
4116 WRITE_PROTECTION_FAULT_ENABLE_INTERRUPT |
4117 WRITE_PROTECTION_FAULT_ENABLE_DEFAULT);
4119 si_pcie_gart_tlb_flush(rdev);
4120 DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
4121 (unsigned)(rdev->mc.gtt_size >> 20),
4122 (unsigned long long)rdev->gart.table_addr);
4123 rdev->gart.ready = true;
4127 static void si_pcie_gart_disable(struct radeon_device *rdev)
4129 /* Disable all tables */
4130 WREG32(VM_CONTEXT0_CNTL, 0);
4131 WREG32(VM_CONTEXT1_CNTL, 0);
4132 /* Setup TLB control */
4133 WREG32(MC_VM_MX_L1_TLB_CNTL, SYSTEM_ACCESS_MODE_NOT_IN_SYS |
4134 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU);
4135 /* Setup L2 cache */
4136 WREG32(VM_L2_CNTL, ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
4137 ENABLE_L2_PDE0_CACHE_LRU_UPDATE_BY_WRITE |
4138 EFFECTIVE_L2_QUEUE_SIZE(7) |
4139 CONTEXT1_IDENTITY_ACCESS_MODE(1));
4140 WREG32(VM_L2_CNTL2, 0);
4141 WREG32(VM_L2_CNTL3, L2_CACHE_BIGK_ASSOCIATIVITY |
4142 L2_CACHE_BIGK_FRAGMENT_SIZE(0));
4143 radeon_gart_table_vram_unpin(rdev);
4146 static void si_pcie_gart_fini(struct radeon_device *rdev)
4148 si_pcie_gart_disable(rdev);
4149 radeon_gart_table_vram_free(rdev);
4150 radeon_gart_fini(rdev);
4154 static bool si_vm_reg_valid(u32 reg)
4156 /* context regs are fine */
4160 /* check config regs */
4162 case GRBM_GFX_INDEX:
4163 case CP_STRMOUT_CNTL:
4164 case VGT_VTX_VECT_EJECT_REG:
4165 case VGT_CACHE_INVALIDATION:
4166 case VGT_ESGS_RING_SIZE:
4167 case VGT_GSVS_RING_SIZE:
4168 case VGT_GS_VERTEX_REUSE:
4169 case VGT_PRIMITIVE_TYPE:
4170 case VGT_INDEX_TYPE:
4171 case VGT_NUM_INDICES:
4172 case VGT_NUM_INSTANCES:
4173 case VGT_TF_RING_SIZE:
4174 case VGT_HS_OFFCHIP_PARAM:
4175 case VGT_TF_MEMORY_BASE:
4177 case PA_SU_LINE_STIPPLE_VALUE:
4178 case PA_SC_LINE_STIPPLE_STATE:
4181 case SPI_STATIC_THREAD_MGMT_1:
4182 case SPI_STATIC_THREAD_MGMT_2:
4183 case SPI_STATIC_THREAD_MGMT_3:
4184 case SPI_PS_MAX_WAVE_ID:
4185 case SPI_CONFIG_CNTL:
4186 case SPI_CONFIG_CNTL_1:
4190 DRM_ERROR("Invalid register 0x%x in CS\n", reg);
4195 static int si_vm_packet3_ce_check(struct radeon_device *rdev,
4196 u32 *ib, struct radeon_cs_packet *pkt)
4198 switch (pkt->opcode) {
4200 case PACKET3_SET_BASE:
4201 case PACKET3_SET_CE_DE_COUNTERS:
4202 case PACKET3_LOAD_CONST_RAM:
4203 case PACKET3_WRITE_CONST_RAM:
4204 case PACKET3_WRITE_CONST_RAM_OFFSET:
4205 case PACKET3_DUMP_CONST_RAM:
4206 case PACKET3_INCREMENT_CE_COUNTER:
4207 case PACKET3_WAIT_ON_DE_COUNTER:
4208 case PACKET3_CE_WRITE:
4211 DRM_ERROR("Invalid CE packet3: 0x%x\n", pkt->opcode);
4217 static int si_vm_packet3_cp_dma_check(u32 *ib, u32 idx)
4219 u32 start_reg, reg, i;
4220 u32 command = ib[idx + 4];
4221 u32 info = ib[idx + 1];
4222 u32 idx_value = ib[idx];
4223 if (command & PACKET3_CP_DMA_CMD_SAS) {
4224 /* src address space is register */
4225 if (((info & 0x60000000) >> 29) == 0) {
4226 start_reg = idx_value << 2;
4227 if (command & PACKET3_CP_DMA_CMD_SAIC) {
4229 if (!si_vm_reg_valid(reg)) {
4230 DRM_ERROR("CP DMA Bad SRC register\n");
4234 for (i = 0; i < (command & 0x1fffff); i++) {
4235 reg = start_reg + (4 * i);
4236 if (!si_vm_reg_valid(reg)) {
4237 DRM_ERROR("CP DMA Bad SRC register\n");
4244 if (command & PACKET3_CP_DMA_CMD_DAS) {
4245 /* dst address space is register */
4246 if (((info & 0x00300000) >> 20) == 0) {
4247 start_reg = ib[idx + 2];
4248 if (command & PACKET3_CP_DMA_CMD_DAIC) {
4250 if (!si_vm_reg_valid(reg)) {
4251 DRM_ERROR("CP DMA Bad DST register\n");
4255 for (i = 0; i < (command & 0x1fffff); i++) {
4256 reg = start_reg + (4 * i);
4257 if (!si_vm_reg_valid(reg)) {
4258 DRM_ERROR("CP DMA Bad DST register\n");
4268 static int si_vm_packet3_gfx_check(struct radeon_device *rdev,
4269 u32 *ib, struct radeon_cs_packet *pkt)
4272 u32 idx = pkt->idx + 1;
4273 u32 idx_value = ib[idx];
4274 u32 start_reg, end_reg, reg, i;
4276 switch (pkt->opcode) {
4278 case PACKET3_SET_BASE:
4279 case PACKET3_CLEAR_STATE:
4280 case PACKET3_INDEX_BUFFER_SIZE:
4281 case PACKET3_DISPATCH_DIRECT:
4282 case PACKET3_DISPATCH_INDIRECT:
4283 case PACKET3_ALLOC_GDS:
4284 case PACKET3_WRITE_GDS_RAM:
4285 case PACKET3_ATOMIC_GDS:
4286 case PACKET3_ATOMIC:
4287 case PACKET3_OCCLUSION_QUERY:
4288 case PACKET3_SET_PREDICATION:
4289 case PACKET3_COND_EXEC:
4290 case PACKET3_PRED_EXEC:
4291 case PACKET3_DRAW_INDIRECT:
4292 case PACKET3_DRAW_INDEX_INDIRECT:
4293 case PACKET3_INDEX_BASE:
4294 case PACKET3_DRAW_INDEX_2:
4295 case PACKET3_CONTEXT_CONTROL:
4296 case PACKET3_INDEX_TYPE:
4297 case PACKET3_DRAW_INDIRECT_MULTI:
4298 case PACKET3_DRAW_INDEX_AUTO:
4299 case PACKET3_DRAW_INDEX_IMMD:
4300 case PACKET3_NUM_INSTANCES:
4301 case PACKET3_DRAW_INDEX_MULTI_AUTO:
4302 case PACKET3_STRMOUT_BUFFER_UPDATE:
4303 case PACKET3_DRAW_INDEX_OFFSET_2:
4304 case PACKET3_DRAW_INDEX_MULTI_ELEMENT:
4305 case PACKET3_DRAW_INDEX_INDIRECT_MULTI:
4306 case PACKET3_MPEG_INDEX:
4307 case PACKET3_WAIT_REG_MEM:
4308 case PACKET3_MEM_WRITE:
4309 case PACKET3_PFP_SYNC_ME:
4310 case PACKET3_SURFACE_SYNC:
4311 case PACKET3_EVENT_WRITE:
4312 case PACKET3_EVENT_WRITE_EOP:
4313 case PACKET3_EVENT_WRITE_EOS:
4314 case PACKET3_SET_CONTEXT_REG:
4315 case PACKET3_SET_CONTEXT_REG_INDIRECT:
4316 case PACKET3_SET_SH_REG:
4317 case PACKET3_SET_SH_REG_OFFSET:
4318 case PACKET3_INCREMENT_DE_COUNTER:
4319 case PACKET3_WAIT_ON_CE_COUNTER:
4320 case PACKET3_WAIT_ON_AVAIL_BUFFER:
4321 case PACKET3_ME_WRITE:
4323 case PACKET3_COPY_DATA:
4324 if ((idx_value & 0xf00) == 0) {
4325 reg = ib[idx + 3] * 4;
4326 if (!si_vm_reg_valid(reg))
4330 case PACKET3_WRITE_DATA:
4331 if ((idx_value & 0xf00) == 0) {
4332 start_reg = ib[idx + 1] * 4;
4333 if (idx_value & 0x10000) {
4334 if (!si_vm_reg_valid(start_reg))
4337 for (i = 0; i < (pkt->count - 2); i++) {
4338 reg = start_reg + (4 * i);
4339 if (!si_vm_reg_valid(reg))
4345 case PACKET3_COND_WRITE:
4346 if (idx_value & 0x100) {
4347 reg = ib[idx + 5] * 4;
4348 if (!si_vm_reg_valid(reg))
4352 case PACKET3_COPY_DW:
4353 if (idx_value & 0x2) {
4354 reg = ib[idx + 3] * 4;
4355 if (!si_vm_reg_valid(reg))
4359 case PACKET3_SET_CONFIG_REG:
4360 start_reg = (idx_value << 2) + PACKET3_SET_CONFIG_REG_START;
4361 end_reg = 4 * pkt->count + start_reg - 4;
4362 if ((start_reg < PACKET3_SET_CONFIG_REG_START) ||
4363 (start_reg >= PACKET3_SET_CONFIG_REG_END) ||
4364 (end_reg >= PACKET3_SET_CONFIG_REG_END)) {
4365 DRM_ERROR("bad PACKET3_SET_CONFIG_REG\n");
4368 for (i = 0; i < pkt->count; i++) {
4369 reg = start_reg + (4 * i);
4370 if (!si_vm_reg_valid(reg))
4374 case PACKET3_CP_DMA:
4375 r = si_vm_packet3_cp_dma_check(ib, idx);
4380 DRM_ERROR("Invalid GFX packet3: 0x%x\n", pkt->opcode);
4386 static int si_vm_packet3_compute_check(struct radeon_device *rdev,
4387 u32 *ib, struct radeon_cs_packet *pkt)
4390 u32 idx = pkt->idx + 1;
4391 u32 idx_value = ib[idx];
4392 u32 start_reg, reg, i;
4394 switch (pkt->opcode) {
4396 case PACKET3_SET_BASE:
4397 case PACKET3_CLEAR_STATE:
4398 case PACKET3_DISPATCH_DIRECT:
4399 case PACKET3_DISPATCH_INDIRECT:
4400 case PACKET3_ALLOC_GDS:
4401 case PACKET3_WRITE_GDS_RAM:
4402 case PACKET3_ATOMIC_GDS:
4403 case PACKET3_ATOMIC:
4404 case PACKET3_OCCLUSION_QUERY:
4405 case PACKET3_SET_PREDICATION:
4406 case PACKET3_COND_EXEC:
4407 case PACKET3_PRED_EXEC:
4408 case PACKET3_CONTEXT_CONTROL:
4409 case PACKET3_STRMOUT_BUFFER_UPDATE:
4410 case PACKET3_WAIT_REG_MEM:
4411 case PACKET3_MEM_WRITE:
4412 case PACKET3_PFP_SYNC_ME:
4413 case PACKET3_SURFACE_SYNC:
4414 case PACKET3_EVENT_WRITE:
4415 case PACKET3_EVENT_WRITE_EOP:
4416 case PACKET3_EVENT_WRITE_EOS:
4417 case PACKET3_SET_CONTEXT_REG:
4418 case PACKET3_SET_CONTEXT_REG_INDIRECT:
4419 case PACKET3_SET_SH_REG:
4420 case PACKET3_SET_SH_REG_OFFSET:
4421 case PACKET3_INCREMENT_DE_COUNTER:
4422 case PACKET3_WAIT_ON_CE_COUNTER:
4423 case PACKET3_WAIT_ON_AVAIL_BUFFER:
4424 case PACKET3_ME_WRITE:
4426 case PACKET3_COPY_DATA:
4427 if ((idx_value & 0xf00) == 0) {
4428 reg = ib[idx + 3] * 4;
4429 if (!si_vm_reg_valid(reg))
4433 case PACKET3_WRITE_DATA:
4434 if ((idx_value & 0xf00) == 0) {
4435 start_reg = ib[idx + 1] * 4;
4436 if (idx_value & 0x10000) {
4437 if (!si_vm_reg_valid(start_reg))
4440 for (i = 0; i < (pkt->count - 2); i++) {
4441 reg = start_reg + (4 * i);
4442 if (!si_vm_reg_valid(reg))
4448 case PACKET3_COND_WRITE:
4449 if (idx_value & 0x100) {
4450 reg = ib[idx + 5] * 4;
4451 if (!si_vm_reg_valid(reg))
4455 case PACKET3_COPY_DW:
4456 if (idx_value & 0x2) {
4457 reg = ib[idx + 3] * 4;
4458 if (!si_vm_reg_valid(reg))
4462 case PACKET3_CP_DMA:
4463 r = si_vm_packet3_cp_dma_check(ib, idx);
4468 DRM_ERROR("Invalid Compute packet3: 0x%x\n", pkt->opcode);
4474 int si_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib)
4478 struct radeon_cs_packet pkt;
4482 pkt.type = RADEON_CP_PACKET_GET_TYPE(ib->ptr[idx]);
4483 pkt.count = RADEON_CP_PACKET_GET_COUNT(ib->ptr[idx]);
4486 case RADEON_PACKET_TYPE0:
4487 dev_err(rdev->dev, "Packet0 not allowed!\n");
4490 case RADEON_PACKET_TYPE2:
4493 case RADEON_PACKET_TYPE3:
4494 pkt.opcode = RADEON_CP_PACKET3_GET_OPCODE(ib->ptr[idx]);
4495 if (ib->is_const_ib)
4496 ret = si_vm_packet3_ce_check(rdev, ib->ptr, &pkt);
4499 case RADEON_RING_TYPE_GFX_INDEX:
4500 ret = si_vm_packet3_gfx_check(rdev, ib->ptr, &pkt);
4502 case CAYMAN_RING_TYPE_CP1_INDEX:
4503 case CAYMAN_RING_TYPE_CP2_INDEX:
4504 ret = si_vm_packet3_compute_check(rdev, ib->ptr, &pkt);
4507 dev_err(rdev->dev, "Non-PM4 ring %d !\n", ib->ring);
4512 idx += pkt.count + 2;
4515 dev_err(rdev->dev, "Unknown packet type %d !\n", pkt.type);
4521 } while (idx < ib->length_dw);
4529 int si_vm_init(struct radeon_device *rdev)
4532 rdev->vm_manager.nvm = 16;
4533 /* base offset of vram pages */
4534 rdev->vm_manager.vram_base_offset = 0;
4539 void si_vm_fini(struct radeon_device *rdev)
4544 * si_vm_decode_fault - print human readable fault info
4546 * @rdev: radeon_device pointer
4547 * @status: VM_CONTEXT1_PROTECTION_FAULT_STATUS register value
4548 * @addr: VM_CONTEXT1_PROTECTION_FAULT_ADDR register value
4550 * Print human readable fault information (SI).
4552 static void si_vm_decode_fault(struct radeon_device *rdev,
4553 u32 status, u32 addr)
4555 u32 mc_id = (status & MEMORY_CLIENT_ID_MASK) >> MEMORY_CLIENT_ID_SHIFT;
4556 u32 vmid = (status & FAULT_VMID_MASK) >> FAULT_VMID_SHIFT;
4557 u32 protections = (status & PROTECTIONS_MASK) >> PROTECTIONS_SHIFT;
4560 if (rdev->family == CHIP_TAHITI) {
4801 printk("VM fault (0x%02x, vmid %d) at page %u, %s from %s (%d)\n",
4802 protections, vmid, addr,
4803 (status & MEMORY_CLIENT_RW_MASK) ? "write" : "read",
4807 void si_vm_flush(struct radeon_device *rdev, int ridx, struct radeon_vm *vm)
4809 struct radeon_ring *ring = &rdev->ring[ridx];
4814 /* write new base address */
4815 radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
4816 radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(1) |
4817 WRITE_DATA_DST_SEL(0)));
4820 radeon_ring_write(ring,
4821 (VM_CONTEXT0_PAGE_TABLE_BASE_ADDR + (vm->id << 2)) >> 2);
4823 radeon_ring_write(ring,
4824 (VM_CONTEXT8_PAGE_TABLE_BASE_ADDR + ((vm->id - 8) << 2)) >> 2);
4826 radeon_ring_write(ring, 0);
4827 radeon_ring_write(ring, vm->pd_gpu_addr >> 12);
4829 /* flush hdp cache */
4830 radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
4831 radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
4832 WRITE_DATA_DST_SEL(0)));
4833 radeon_ring_write(ring, HDP_MEM_COHERENCY_FLUSH_CNTL >> 2);
4834 radeon_ring_write(ring, 0);
4835 radeon_ring_write(ring, 0x1);
4837 /* bits 0-15 are the VM contexts0-15 */
4838 radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
4839 radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
4840 WRITE_DATA_DST_SEL(0)));
4841 radeon_ring_write(ring, VM_INVALIDATE_REQUEST >> 2);
4842 radeon_ring_write(ring, 0);
4843 radeon_ring_write(ring, 1 << vm->id);
4845 /* sync PFP to ME, otherwise we might get invalid PFP reads */
4846 radeon_ring_write(ring, PACKET3(PACKET3_PFP_SYNC_ME, 0));
4847 radeon_ring_write(ring, 0x0);
4851 * Power and clock gating
4853 static void si_wait_for_rlc_serdes(struct radeon_device *rdev)
4857 for (i = 0; i < rdev->usec_timeout; i++) {
4858 if (RREG32(RLC_SERDES_MASTER_BUSY_0) == 0)
4863 for (i = 0; i < rdev->usec_timeout; i++) {
4864 if (RREG32(RLC_SERDES_MASTER_BUSY_1) == 0)
4870 static void si_enable_gui_idle_interrupt(struct radeon_device *rdev,
4873 u32 tmp = RREG32(CP_INT_CNTL_RING0);
4878 tmp |= (CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4880 tmp &= ~(CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4881 WREG32(CP_INT_CNTL_RING0, tmp);
4884 /* read a gfx register */
4885 tmp = RREG32(DB_DEPTH_INFO);
4887 mask = RLC_BUSY_STATUS | GFX_POWER_STATUS | GFX_CLOCK_STATUS | GFX_LS_STATUS;
4888 for (i = 0; i < rdev->usec_timeout; i++) {
4889 if ((RREG32(RLC_STAT) & mask) == (GFX_CLOCK_STATUS | GFX_POWER_STATUS))
4896 static void si_set_uvd_dcm(struct radeon_device *rdev,
4901 tmp = RREG32(UVD_CGC_CTRL);
4902 tmp &= ~(CLK_OD_MASK | CG_DT_MASK);
4903 tmp |= DCM | CG_DT(1) | CLK_OD(4);
4907 tmp2 = DYN_OR_EN | DYN_RR_EN | G_DIV_ID(7);
4913 WREG32(UVD_CGC_CTRL, tmp);
4914 WREG32_UVD_CTX(UVD_CGC_CTRL2, tmp2);
4917 void si_init_uvd_internal_cg(struct radeon_device *rdev)
4919 bool hw_mode = true;
4922 si_set_uvd_dcm(rdev, false);
4924 u32 tmp = RREG32(UVD_CGC_CTRL);
4926 WREG32(UVD_CGC_CTRL, tmp);
4930 static u32 si_halt_rlc(struct radeon_device *rdev)
4934 orig = data = RREG32(RLC_CNTL);
4936 if (data & RLC_ENABLE) {
4937 data &= ~RLC_ENABLE;
4938 WREG32(RLC_CNTL, data);
4940 si_wait_for_rlc_serdes(rdev);
4946 static void si_update_rlc(struct radeon_device *rdev, u32 rlc)
4950 tmp = RREG32(RLC_CNTL);
4952 WREG32(RLC_CNTL, rlc);
4955 static void si_enable_dma_pg(struct radeon_device *rdev, bool enable)
4959 orig = data = RREG32(DMA_PG);
4960 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_SDMA))
4961 data |= PG_CNTL_ENABLE;
4963 data &= ~PG_CNTL_ENABLE;
4965 WREG32(DMA_PG, data);
4968 static void si_init_dma_pg(struct radeon_device *rdev)
4972 WREG32(DMA_PGFSM_WRITE, 0x00002000);
4973 WREG32(DMA_PGFSM_CONFIG, 0x100010ff);
4975 for (tmp = 0; tmp < 5; tmp++)
4976 WREG32(DMA_PGFSM_WRITE, 0);
4979 static void si_enable_gfx_cgpg(struct radeon_device *rdev,
4984 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG)) {
4985 tmp = RLC_PUD(0x10) | RLC_PDD(0x10) | RLC_TTPD(0x10) | RLC_MSD(0x10);
4986 WREG32(RLC_TTOP_D, tmp);
4988 tmp = RREG32(RLC_PG_CNTL);
4989 tmp |= GFX_PG_ENABLE;
4990 WREG32(RLC_PG_CNTL, tmp);
4992 tmp = RREG32(RLC_AUTO_PG_CTRL);
4994 WREG32(RLC_AUTO_PG_CTRL, tmp);
4996 tmp = RREG32(RLC_AUTO_PG_CTRL);
4998 WREG32(RLC_AUTO_PG_CTRL, tmp);
5000 tmp = RREG32(DB_RENDER_CONTROL);
5004 static void si_init_gfx_cgpg(struct radeon_device *rdev)
5008 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
5010 tmp = RREG32(RLC_PG_CNTL);
5012 WREG32(RLC_PG_CNTL, tmp);
5014 WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
5016 tmp = RREG32(RLC_AUTO_PG_CTRL);
5018 tmp &= ~GRBM_REG_SGIT_MASK;
5019 tmp |= GRBM_REG_SGIT(0x700);
5020 tmp &= ~PG_AFTER_GRBM_REG_ST_MASK;
5021 WREG32(RLC_AUTO_PG_CTRL, tmp);
5024 static u32 si_get_cu_active_bitmap(struct radeon_device *rdev, u32 se, u32 sh)
5026 u32 mask = 0, tmp, tmp1;
5029 si_select_se_sh(rdev, se, sh);
5030 tmp = RREG32(CC_GC_SHADER_ARRAY_CONFIG);
5031 tmp1 = RREG32(GC_USER_SHADER_ARRAY_CONFIG);
5032 si_select_se_sh(rdev, 0xffffffff, 0xffffffff);
5039 for (i = 0; i < rdev->config.si.max_cu_per_sh; i ++) {
5044 return (~tmp) & mask;
5047 static void si_init_ao_cu_mask(struct radeon_device *rdev)
5049 u32 i, j, k, active_cu_number = 0;
5050 u32 mask, counter, cu_bitmap;
5053 for (i = 0; i < rdev->config.si.max_shader_engines; i++) {
5054 for (j = 0; j < rdev->config.si.max_sh_per_se; j++) {
5058 for (k = 0; k < rdev->config.si.max_cu_per_sh; k++) {
5059 if (si_get_cu_active_bitmap(rdev, i, j) & mask) {
5067 active_cu_number += counter;
5068 tmp |= (cu_bitmap << (i * 16 + j * 8));
5072 WREG32(RLC_PG_AO_CU_MASK, tmp);
5074 tmp = RREG32(RLC_MAX_PG_CU);
5075 tmp &= ~MAX_PU_CU_MASK;
5076 tmp |= MAX_PU_CU(active_cu_number);
5077 WREG32(RLC_MAX_PG_CU, tmp);
5080 static void si_enable_cgcg(struct radeon_device *rdev,
5083 u32 data, orig, tmp;
5085 orig = data = RREG32(RLC_CGCG_CGLS_CTRL);
5087 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CGCG)) {
5088 si_enable_gui_idle_interrupt(rdev, true);
5090 WREG32(RLC_GCPM_GENERAL_3, 0x00000080);
5092 tmp = si_halt_rlc(rdev);
5094 WREG32(RLC_SERDES_WR_MASTER_MASK_0, 0xffffffff);
5095 WREG32(RLC_SERDES_WR_MASTER_MASK_1, 0xffffffff);
5096 WREG32(RLC_SERDES_WR_CTRL, 0x00b000ff);
5098 si_wait_for_rlc_serdes(rdev);
5100 si_update_rlc(rdev, tmp);
5102 WREG32(RLC_SERDES_WR_CTRL, 0x007000ff);
5104 data |= CGCG_EN | CGLS_EN;
5106 si_enable_gui_idle_interrupt(rdev, false);
5108 RREG32(CB_CGTT_SCLK_CTRL);
5109 RREG32(CB_CGTT_SCLK_CTRL);
5110 RREG32(CB_CGTT_SCLK_CTRL);
5111 RREG32(CB_CGTT_SCLK_CTRL);
5113 data &= ~(CGCG_EN | CGLS_EN);
5117 WREG32(RLC_CGCG_CGLS_CTRL, data);
5120 static void si_enable_mgcg(struct radeon_device *rdev,
5123 u32 data, orig, tmp = 0;
5125 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_MGCG)) {
5126 orig = data = RREG32(CGTS_SM_CTRL_REG);
5129 WREG32(CGTS_SM_CTRL_REG, data);
5131 if (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CP_LS) {
5132 orig = data = RREG32(CP_MEM_SLP_CNTL);
5133 data |= CP_MEM_LS_EN;
5135 WREG32(CP_MEM_SLP_CNTL, data);
5138 orig = data = RREG32(RLC_CGTT_MGCG_OVERRIDE);
5141 WREG32(RLC_CGTT_MGCG_OVERRIDE, data);
5143 tmp = si_halt_rlc(rdev);
5145 WREG32(RLC_SERDES_WR_MASTER_MASK_0, 0xffffffff);
5146 WREG32(RLC_SERDES_WR_MASTER_MASK_1, 0xffffffff);
5147 WREG32(RLC_SERDES_WR_CTRL, 0x00d000ff);
5149 si_update_rlc(rdev, tmp);
5151 orig = data = RREG32(RLC_CGTT_MGCG_OVERRIDE);
5154 WREG32(RLC_CGTT_MGCG_OVERRIDE, data);
5156 data = RREG32(CP_MEM_SLP_CNTL);
5157 if (data & CP_MEM_LS_EN) {
5158 data &= ~CP_MEM_LS_EN;
5159 WREG32(CP_MEM_SLP_CNTL, data);
5161 orig = data = RREG32(CGTS_SM_CTRL_REG);
5162 data |= LS_OVERRIDE | OVERRIDE;
5164 WREG32(CGTS_SM_CTRL_REG, data);
5166 tmp = si_halt_rlc(rdev);
5168 WREG32(RLC_SERDES_WR_MASTER_MASK_0, 0xffffffff);
5169 WREG32(RLC_SERDES_WR_MASTER_MASK_1, 0xffffffff);
5170 WREG32(RLC_SERDES_WR_CTRL, 0x00e000ff);
5172 si_update_rlc(rdev, tmp);
5176 static void si_enable_uvd_mgcg(struct radeon_device *rdev,
5179 u32 orig, data, tmp;
5181 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_UVD_MGCG)) {
5182 tmp = RREG32_UVD_CTX(UVD_CGC_MEM_CTRL);
5184 WREG32_UVD_CTX(UVD_CGC_MEM_CTRL, tmp);
5186 orig = data = RREG32(UVD_CGC_CTRL);
5189 WREG32(UVD_CGC_CTRL, data);
5191 WREG32_SMC(SMC_CG_IND_START + CG_CGTT_LOCAL_0, 0);
5192 WREG32_SMC(SMC_CG_IND_START + CG_CGTT_LOCAL_1, 0);
5194 tmp = RREG32_UVD_CTX(UVD_CGC_MEM_CTRL);
5196 WREG32_UVD_CTX(UVD_CGC_MEM_CTRL, tmp);
5198 orig = data = RREG32(UVD_CGC_CTRL);
5201 WREG32(UVD_CGC_CTRL, data);
5203 WREG32_SMC(SMC_CG_IND_START + CG_CGTT_LOCAL_0, 0xffffffff);
5204 WREG32_SMC(SMC_CG_IND_START + CG_CGTT_LOCAL_1, 0xffffffff);
5208 static const u32 mc_cg_registers[] =
5221 static void si_enable_mc_ls(struct radeon_device *rdev,
5227 for (i = 0; i < ARRAY_SIZE(mc_cg_registers); i++) {
5228 orig = data = RREG32(mc_cg_registers[i]);
5229 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_MC_LS))
5230 data |= MC_LS_ENABLE;
5232 data &= ~MC_LS_ENABLE;
5234 WREG32(mc_cg_registers[i], data);
5238 static void si_enable_mc_mgcg(struct radeon_device *rdev,
5244 for (i = 0; i < ARRAY_SIZE(mc_cg_registers); i++) {
5245 orig = data = RREG32(mc_cg_registers[i]);
5246 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_MC_MGCG))
5247 data |= MC_CG_ENABLE;
5249 data &= ~MC_CG_ENABLE;
5251 WREG32(mc_cg_registers[i], data);
5255 static void si_enable_dma_mgcg(struct radeon_device *rdev,
5258 u32 orig, data, offset;
5261 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_SDMA_MGCG)) {
5262 for (i = 0; i < 2; i++) {
5264 offset = DMA0_REGISTER_OFFSET;
5266 offset = DMA1_REGISTER_OFFSET;
5267 orig = data = RREG32(DMA_POWER_CNTL + offset);
5268 data &= ~MEM_POWER_OVERRIDE;
5270 WREG32(DMA_POWER_CNTL + offset, data);
5271 WREG32(DMA_CLK_CTRL + offset, 0x00000100);
5274 for (i = 0; i < 2; i++) {
5276 offset = DMA0_REGISTER_OFFSET;
5278 offset = DMA1_REGISTER_OFFSET;
5279 orig = data = RREG32(DMA_POWER_CNTL + offset);
5280 data |= MEM_POWER_OVERRIDE;
5282 WREG32(DMA_POWER_CNTL + offset, data);
5284 orig = data = RREG32(DMA_CLK_CTRL + offset);
5287 WREG32(DMA_CLK_CTRL + offset, data);
5292 static void si_enable_bif_mgls(struct radeon_device *rdev,
5297 orig = data = RREG32_PCIE(PCIE_CNTL2);
5299 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_BIF_LS))
5300 data |= SLV_MEM_LS_EN | MST_MEM_LS_EN |
5301 REPLAY_MEM_LS_EN | SLV_MEM_AGGRESSIVE_LS_EN;
5303 data &= ~(SLV_MEM_LS_EN | MST_MEM_LS_EN |
5304 REPLAY_MEM_LS_EN | SLV_MEM_AGGRESSIVE_LS_EN);
5307 WREG32_PCIE(PCIE_CNTL2, data);
5310 static void si_enable_hdp_mgcg(struct radeon_device *rdev,
5315 orig = data = RREG32(HDP_HOST_PATH_CNTL);
5317 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_HDP_MGCG))
5318 data &= ~CLOCK_GATING_DIS;
5320 data |= CLOCK_GATING_DIS;
5323 WREG32(HDP_HOST_PATH_CNTL, data);
5326 static void si_enable_hdp_ls(struct radeon_device *rdev,
5331 orig = data = RREG32(HDP_MEM_POWER_LS);
5333 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_HDP_LS))
5334 data |= HDP_LS_ENABLE;
5336 data &= ~HDP_LS_ENABLE;
5339 WREG32(HDP_MEM_POWER_LS, data);
5342 static void si_update_cg(struct radeon_device *rdev,
5343 u32 block, bool enable)
5345 if (block & RADEON_CG_BLOCK_GFX) {
5346 si_enable_gui_idle_interrupt(rdev, false);
5347 /* order matters! */
5349 si_enable_mgcg(rdev, true);
5350 si_enable_cgcg(rdev, true);
5352 si_enable_cgcg(rdev, false);
5353 si_enable_mgcg(rdev, false);
5355 si_enable_gui_idle_interrupt(rdev, true);
5358 if (block & RADEON_CG_BLOCK_MC) {
5359 si_enable_mc_mgcg(rdev, enable);
5360 si_enable_mc_ls(rdev, enable);
5363 if (block & RADEON_CG_BLOCK_SDMA) {
5364 si_enable_dma_mgcg(rdev, enable);
5367 if (block & RADEON_CG_BLOCK_BIF) {
5368 si_enable_bif_mgls(rdev, enable);
5371 if (block & RADEON_CG_BLOCK_UVD) {
5372 if (rdev->has_uvd) {
5373 si_enable_uvd_mgcg(rdev, enable);
5377 if (block & RADEON_CG_BLOCK_HDP) {
5378 si_enable_hdp_mgcg(rdev, enable);
5379 si_enable_hdp_ls(rdev, enable);
5383 static void si_init_cg(struct radeon_device *rdev)
5385 si_update_cg(rdev, (RADEON_CG_BLOCK_GFX |
5386 RADEON_CG_BLOCK_MC |
5387 RADEON_CG_BLOCK_SDMA |
5388 RADEON_CG_BLOCK_BIF |
5389 RADEON_CG_BLOCK_HDP), true);
5390 if (rdev->has_uvd) {
5391 si_update_cg(rdev, RADEON_CG_BLOCK_UVD, true);
5392 si_init_uvd_internal_cg(rdev);
5396 static void si_fini_cg(struct radeon_device *rdev)
5398 if (rdev->has_uvd) {
5399 si_update_cg(rdev, RADEON_CG_BLOCK_UVD, false);
5401 si_update_cg(rdev, (RADEON_CG_BLOCK_GFX |
5402 RADEON_CG_BLOCK_MC |
5403 RADEON_CG_BLOCK_SDMA |
5404 RADEON_CG_BLOCK_BIF |
5405 RADEON_CG_BLOCK_HDP), false);
5408 u32 si_get_csb_size(struct radeon_device *rdev)
5411 const struct cs_section_def *sect = NULL;
5412 const struct cs_extent_def *ext = NULL;
5414 if (rdev->rlc.cs_data == NULL)
5417 /* begin clear state */
5419 /* context control state */
5422 for (sect = rdev->rlc.cs_data; sect->section != NULL; ++sect) {
5423 for (ext = sect->section; ext->extent != NULL; ++ext) {
5424 if (sect->id == SECT_CONTEXT)
5425 count += 2 + ext->reg_count;
5430 /* pa_sc_raster_config */
5432 /* end clear state */
5440 void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer)
5443 const struct cs_section_def *sect = NULL;
5444 const struct cs_extent_def *ext = NULL;
5446 if (rdev->rlc.cs_data == NULL)
5451 buffer[count++] = cpu_to_le32(PACKET3(PACKET3_PREAMBLE_CNTL, 0));
5452 buffer[count++] = cpu_to_le32(PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
5454 buffer[count++] = cpu_to_le32(PACKET3(PACKET3_CONTEXT_CONTROL, 1));
5455 buffer[count++] = cpu_to_le32(0x80000000);
5456 buffer[count++] = cpu_to_le32(0x80000000);
5458 for (sect = rdev->rlc.cs_data; sect->section != NULL; ++sect) {
5459 for (ext = sect->section; ext->extent != NULL; ++ext) {
5460 if (sect->id == SECT_CONTEXT) {
5462 cpu_to_le32(PACKET3(PACKET3_SET_CONTEXT_REG, ext->reg_count));
5463 buffer[count++] = cpu_to_le32(ext->reg_index - 0xa000);
5464 for (i = 0; i < ext->reg_count; i++)
5465 buffer[count++] = cpu_to_le32(ext->extent[i]);
5472 buffer[count++] = cpu_to_le32(PACKET3(PACKET3_SET_CONTEXT_REG, 1));
5473 buffer[count++] = cpu_to_le32(PA_SC_RASTER_CONFIG - PACKET3_SET_CONTEXT_REG_START);
5474 switch (rdev->family) {
5477 buffer[count++] = cpu_to_le32(0x2a00126a);
5480 buffer[count++] = cpu_to_le32(0x0000124a);
5483 buffer[count++] = cpu_to_le32(0x00000082);
5486 buffer[count++] = cpu_to_le32(0x00000000);
5489 buffer[count++] = cpu_to_le32(0x00000000);
5493 buffer[count++] = cpu_to_le32(PACKET3(PACKET3_PREAMBLE_CNTL, 0));
5494 buffer[count++] = cpu_to_le32(PACKET3_PREAMBLE_END_CLEAR_STATE);
5496 buffer[count++] = cpu_to_le32(PACKET3(PACKET3_CLEAR_STATE, 0));
5497 buffer[count++] = cpu_to_le32(0);
5500 static void si_init_pg(struct radeon_device *rdev)
5502 if (rdev->pg_flags) {
5503 if (rdev->pg_flags & RADEON_PG_SUPPORT_SDMA) {
5504 si_init_dma_pg(rdev);
5506 si_init_ao_cu_mask(rdev);
5507 if (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG) {
5508 si_init_gfx_cgpg(rdev);
5510 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
5511 WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
5513 si_enable_dma_pg(rdev, true);
5514 si_enable_gfx_cgpg(rdev, true);
5516 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
5517 WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
5521 static void si_fini_pg(struct radeon_device *rdev)
5523 if (rdev->pg_flags) {
5524 si_enable_dma_pg(rdev, false);
5525 si_enable_gfx_cgpg(rdev, false);
5532 void si_rlc_reset(struct radeon_device *rdev)
5534 u32 tmp = RREG32(GRBM_SOFT_RESET);
5536 tmp |= SOFT_RESET_RLC;
5537 WREG32(GRBM_SOFT_RESET, tmp);
5539 tmp &= ~SOFT_RESET_RLC;
5540 WREG32(GRBM_SOFT_RESET, tmp);
5544 static void si_rlc_stop(struct radeon_device *rdev)
5546 WREG32(RLC_CNTL, 0);
5548 si_enable_gui_idle_interrupt(rdev, false);
5550 si_wait_for_rlc_serdes(rdev);
5553 static void si_rlc_start(struct radeon_device *rdev)
5555 WREG32(RLC_CNTL, RLC_ENABLE);
5557 si_enable_gui_idle_interrupt(rdev, true);
5562 static bool si_lbpw_supported(struct radeon_device *rdev)
5566 /* Enable LBPW only for DDR3 */
5567 tmp = RREG32(MC_SEQ_MISC0);
5568 if ((tmp & 0xF0000000) == 0xB0000000)
5573 static void si_enable_lbpw(struct radeon_device *rdev, bool enable)
5577 tmp = RREG32(RLC_LB_CNTL);
5579 tmp |= LOAD_BALANCE_ENABLE;
5581 tmp &= ~LOAD_BALANCE_ENABLE;
5582 WREG32(RLC_LB_CNTL, tmp);
5585 si_select_se_sh(rdev, 0xffffffff, 0xffffffff);
5586 WREG32(SPI_LB_CU_MASK, 0x00ff);
5590 static int si_rlc_resume(struct radeon_device *rdev)
5593 const __be32 *fw_data;
5606 WREG32(RLC_RL_BASE, 0);
5607 WREG32(RLC_RL_SIZE, 0);
5608 WREG32(RLC_LB_CNTL, 0);
5609 WREG32(RLC_LB_CNTR_MAX, 0xffffffff);
5610 WREG32(RLC_LB_CNTR_INIT, 0);
5611 WREG32(RLC_LB_INIT_CU_MASK, 0xffffffff);
5613 WREG32(RLC_MC_CNTL, 0);
5614 WREG32(RLC_UCODE_CNTL, 0);
5616 fw_data = (const __be32 *)rdev->rlc_fw->data;
5617 for (i = 0; i < SI_RLC_UCODE_SIZE; i++) {
5618 WREG32(RLC_UCODE_ADDR, i);
5619 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
5621 WREG32(RLC_UCODE_ADDR, 0);
5623 si_enable_lbpw(rdev, si_lbpw_supported(rdev));
5630 static void si_enable_interrupts(struct radeon_device *rdev)
5632 u32 ih_cntl = RREG32(IH_CNTL);
5633 u32 ih_rb_cntl = RREG32(IH_RB_CNTL);
5635 ih_cntl |= ENABLE_INTR;
5636 ih_rb_cntl |= IH_RB_ENABLE;
5637 WREG32(IH_CNTL, ih_cntl);
5638 WREG32(IH_RB_CNTL, ih_rb_cntl);
5639 rdev->ih.enabled = true;
5642 static void si_disable_interrupts(struct radeon_device *rdev)
5644 u32 ih_rb_cntl = RREG32(IH_RB_CNTL);
5645 u32 ih_cntl = RREG32(IH_CNTL);
5647 ih_rb_cntl &= ~IH_RB_ENABLE;
5648 ih_cntl &= ~ENABLE_INTR;
5649 WREG32(IH_RB_CNTL, ih_rb_cntl);
5650 WREG32(IH_CNTL, ih_cntl);
5651 /* set rptr, wptr to 0 */
5652 WREG32(IH_RB_RPTR, 0);
5653 WREG32(IH_RB_WPTR, 0);
5654 rdev->ih.enabled = false;
5658 static void si_disable_interrupt_state(struct radeon_device *rdev)
5662 tmp = RREG32(CP_INT_CNTL_RING0) &
5663 (CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
5664 WREG32(CP_INT_CNTL_RING0, tmp);
5665 WREG32(CP_INT_CNTL_RING1, 0);
5666 WREG32(CP_INT_CNTL_RING2, 0);
5667 tmp = RREG32(DMA_CNTL + DMA0_REGISTER_OFFSET) & ~TRAP_ENABLE;
5668 WREG32(DMA_CNTL + DMA0_REGISTER_OFFSET, tmp);
5669 tmp = RREG32(DMA_CNTL + DMA1_REGISTER_OFFSET) & ~TRAP_ENABLE;
5670 WREG32(DMA_CNTL + DMA1_REGISTER_OFFSET, tmp);
5671 WREG32(GRBM_INT_CNTL, 0);
5672 if (rdev->num_crtc >= 2) {
5673 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
5674 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
5676 if (rdev->num_crtc >= 4) {
5677 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
5678 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
5680 if (rdev->num_crtc >= 6) {
5681 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
5682 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
5685 if (rdev->num_crtc >= 2) {
5686 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
5687 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
5689 if (rdev->num_crtc >= 4) {
5690 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
5691 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
5693 if (rdev->num_crtc >= 6) {
5694 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
5695 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
5698 if (!ASIC_IS_NODCE(rdev)) {
5699 WREG32(DAC_AUTODETECT_INT_CONTROL, 0);
5701 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5702 WREG32(DC_HPD1_INT_CONTROL, tmp);
5703 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5704 WREG32(DC_HPD2_INT_CONTROL, tmp);
5705 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5706 WREG32(DC_HPD3_INT_CONTROL, tmp);
5707 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5708 WREG32(DC_HPD4_INT_CONTROL, tmp);
5709 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5710 WREG32(DC_HPD5_INT_CONTROL, tmp);
5711 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5712 WREG32(DC_HPD6_INT_CONTROL, tmp);
5716 static int si_irq_init(struct radeon_device *rdev)
5720 u32 interrupt_cntl, ih_cntl, ih_rb_cntl;
5723 ret = r600_ih_ring_alloc(rdev);
5728 si_disable_interrupts(rdev);
5731 ret = si_rlc_resume(rdev);
5733 r600_ih_ring_fini(rdev);
5737 /* setup interrupt control */
5738 /* set dummy read address to ring address */
5739 WREG32(INTERRUPT_CNTL2, rdev->ih.gpu_addr >> 8);
5740 interrupt_cntl = RREG32(INTERRUPT_CNTL);
5741 /* IH_DUMMY_RD_OVERRIDE=0 - dummy read disabled with msi, enabled without msi
5742 * IH_DUMMY_RD_OVERRIDE=1 - dummy read controlled by IH_DUMMY_RD_EN
5744 interrupt_cntl &= ~IH_DUMMY_RD_OVERRIDE;
5745 /* IH_REQ_NONSNOOP_EN=1 if ring is in non-cacheable memory, e.g., vram */
5746 interrupt_cntl &= ~IH_REQ_NONSNOOP_EN;
5747 WREG32(INTERRUPT_CNTL, interrupt_cntl);
5749 WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8);
5750 rb_bufsz = order_base_2(rdev->ih.ring_size / 4);
5752 ih_rb_cntl = (IH_WPTR_OVERFLOW_ENABLE |
5753 IH_WPTR_OVERFLOW_CLEAR |
5756 if (rdev->wb.enabled)
5757 ih_rb_cntl |= IH_WPTR_WRITEBACK_ENABLE;
5759 /* set the writeback address whether it's enabled or not */
5760 WREG32(IH_RB_WPTR_ADDR_LO, (rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFFFFFFFC);
5761 WREG32(IH_RB_WPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFF);
5763 WREG32(IH_RB_CNTL, ih_rb_cntl);
5765 /* set rptr, wptr to 0 */
5766 WREG32(IH_RB_RPTR, 0);
5767 WREG32(IH_RB_WPTR, 0);
5769 /* Default settings for IH_CNTL (disabled at first) */
5770 ih_cntl = MC_WRREQ_CREDIT(0x10) | MC_WR_CLEAN_CNT(0x10) | MC_VMID(0);
5771 /* RPTR_REARM only works if msi's are enabled */
5772 if (rdev->msi_enabled)
5773 ih_cntl |= RPTR_REARM;
5774 WREG32(IH_CNTL, ih_cntl);
5776 /* force the active interrupt state to all disabled */
5777 si_disable_interrupt_state(rdev);
5779 pci_set_master(rdev->pdev);
5782 si_enable_interrupts(rdev);
5787 int si_irq_set(struct radeon_device *rdev)
5790 u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
5791 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
5792 u32 hpd1 = 0, hpd2 = 0, hpd3 = 0, hpd4 = 0, hpd5 = 0, hpd6 = 0;
5793 u32 grbm_int_cntl = 0;
5794 u32 dma_cntl, dma_cntl1;
5795 u32 thermal_int = 0;
5797 if (!rdev->irq.installed) {
5798 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
5801 /* don't enable anything if the ih is disabled */
5802 if (!rdev->ih.enabled) {
5803 si_disable_interrupts(rdev);
5804 /* force the active interrupt state to all disabled */
5805 si_disable_interrupt_state(rdev);
5809 cp_int_cntl = RREG32(CP_INT_CNTL_RING0) &
5810 (CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
5812 if (!ASIC_IS_NODCE(rdev)) {
5813 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
5814 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
5815 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
5816 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
5817 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
5818 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
5821 dma_cntl = RREG32(DMA_CNTL + DMA0_REGISTER_OFFSET) & ~TRAP_ENABLE;
5822 dma_cntl1 = RREG32(DMA_CNTL + DMA1_REGISTER_OFFSET) & ~TRAP_ENABLE;
5824 thermal_int = RREG32(CG_THERMAL_INT) &
5825 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
5827 /* enable CP interrupts on all rings */
5828 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
5829 DRM_DEBUG("si_irq_set: sw int gfx\n");
5830 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
5832 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
5833 DRM_DEBUG("si_irq_set: sw int cp1\n");
5834 cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
5836 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
5837 DRM_DEBUG("si_irq_set: sw int cp2\n");
5838 cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
5840 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
5841 DRM_DEBUG("si_irq_set: sw int dma\n");
5842 dma_cntl |= TRAP_ENABLE;
5845 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
5846 DRM_DEBUG("si_irq_set: sw int dma1\n");
5847 dma_cntl1 |= TRAP_ENABLE;
5849 if (rdev->irq.crtc_vblank_int[0] ||
5850 atomic_read(&rdev->irq.pflip[0])) {
5851 DRM_DEBUG("si_irq_set: vblank 0\n");
5852 crtc1 |= VBLANK_INT_MASK;
5854 if (rdev->irq.crtc_vblank_int[1] ||
5855 atomic_read(&rdev->irq.pflip[1])) {
5856 DRM_DEBUG("si_irq_set: vblank 1\n");
5857 crtc2 |= VBLANK_INT_MASK;
5859 if (rdev->irq.crtc_vblank_int[2] ||
5860 atomic_read(&rdev->irq.pflip[2])) {
5861 DRM_DEBUG("si_irq_set: vblank 2\n");
5862 crtc3 |= VBLANK_INT_MASK;
5864 if (rdev->irq.crtc_vblank_int[3] ||
5865 atomic_read(&rdev->irq.pflip[3])) {
5866 DRM_DEBUG("si_irq_set: vblank 3\n");
5867 crtc4 |= VBLANK_INT_MASK;
5869 if (rdev->irq.crtc_vblank_int[4] ||
5870 atomic_read(&rdev->irq.pflip[4])) {
5871 DRM_DEBUG("si_irq_set: vblank 4\n");
5872 crtc5 |= VBLANK_INT_MASK;
5874 if (rdev->irq.crtc_vblank_int[5] ||
5875 atomic_read(&rdev->irq.pflip[5])) {
5876 DRM_DEBUG("si_irq_set: vblank 5\n");
5877 crtc6 |= VBLANK_INT_MASK;
5879 if (rdev->irq.hpd[0]) {
5880 DRM_DEBUG("si_irq_set: hpd 1\n");
5881 hpd1 |= DC_HPDx_INT_EN;
5883 if (rdev->irq.hpd[1]) {
5884 DRM_DEBUG("si_irq_set: hpd 2\n");
5885 hpd2 |= DC_HPDx_INT_EN;
5887 if (rdev->irq.hpd[2]) {
5888 DRM_DEBUG("si_irq_set: hpd 3\n");
5889 hpd3 |= DC_HPDx_INT_EN;
5891 if (rdev->irq.hpd[3]) {
5892 DRM_DEBUG("si_irq_set: hpd 4\n");
5893 hpd4 |= DC_HPDx_INT_EN;
5895 if (rdev->irq.hpd[4]) {
5896 DRM_DEBUG("si_irq_set: hpd 5\n");
5897 hpd5 |= DC_HPDx_INT_EN;
5899 if (rdev->irq.hpd[5]) {
5900 DRM_DEBUG("si_irq_set: hpd 6\n");
5901 hpd6 |= DC_HPDx_INT_EN;
5904 WREG32(CP_INT_CNTL_RING0, cp_int_cntl);
5905 WREG32(CP_INT_CNTL_RING1, cp_int_cntl1);
5906 WREG32(CP_INT_CNTL_RING2, cp_int_cntl2);
5908 WREG32(DMA_CNTL + DMA0_REGISTER_OFFSET, dma_cntl);
5909 WREG32(DMA_CNTL + DMA1_REGISTER_OFFSET, dma_cntl1);
5911 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
5913 if (rdev->irq.dpm_thermal) {
5914 DRM_DEBUG("dpm thermal\n");
5915 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
5918 if (rdev->num_crtc >= 2) {
5919 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
5920 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
5922 if (rdev->num_crtc >= 4) {
5923 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
5924 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
5926 if (rdev->num_crtc >= 6) {
5927 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
5928 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
5931 if (rdev->num_crtc >= 2) {
5932 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
5933 GRPH_PFLIP_INT_MASK);
5934 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
5935 GRPH_PFLIP_INT_MASK);
5937 if (rdev->num_crtc >= 4) {
5938 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
5939 GRPH_PFLIP_INT_MASK);
5940 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
5941 GRPH_PFLIP_INT_MASK);
5943 if (rdev->num_crtc >= 6) {
5944 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
5945 GRPH_PFLIP_INT_MASK);
5946 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
5947 GRPH_PFLIP_INT_MASK);
5950 if (!ASIC_IS_NODCE(rdev)) {
5951 WREG32(DC_HPD1_INT_CONTROL, hpd1);
5952 WREG32(DC_HPD2_INT_CONTROL, hpd2);
5953 WREG32(DC_HPD3_INT_CONTROL, hpd3);
5954 WREG32(DC_HPD4_INT_CONTROL, hpd4);
5955 WREG32(DC_HPD5_INT_CONTROL, hpd5);
5956 WREG32(DC_HPD6_INT_CONTROL, hpd6);
5959 WREG32(CG_THERMAL_INT, thermal_int);
5964 static inline void si_irq_ack(struct radeon_device *rdev)
5968 if (ASIC_IS_NODCE(rdev))
5971 rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
5972 rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
5973 rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
5974 rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
5975 rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
5976 rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
5977 rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
5978 rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
5979 if (rdev->num_crtc >= 4) {
5980 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
5981 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
5983 if (rdev->num_crtc >= 6) {
5984 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
5985 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
5988 if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
5989 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
5990 if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
5991 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
5992 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
5993 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
5994 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
5995 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
5996 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
5997 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
5998 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
5999 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
6001 if (rdev->num_crtc >= 4) {
6002 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
6003 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
6004 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
6005 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
6006 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
6007 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
6008 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
6009 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
6010 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
6011 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
6012 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
6013 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
6016 if (rdev->num_crtc >= 6) {
6017 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
6018 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
6019 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
6020 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
6021 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
6022 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
6023 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
6024 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
6025 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
6026 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
6027 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
6028 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
6031 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
6032 tmp = RREG32(DC_HPD1_INT_CONTROL);
6033 tmp |= DC_HPDx_INT_ACK;
6034 WREG32(DC_HPD1_INT_CONTROL, tmp);
6036 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
6037 tmp = RREG32(DC_HPD2_INT_CONTROL);
6038 tmp |= DC_HPDx_INT_ACK;
6039 WREG32(DC_HPD2_INT_CONTROL, tmp);
6041 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
6042 tmp = RREG32(DC_HPD3_INT_CONTROL);
6043 tmp |= DC_HPDx_INT_ACK;
6044 WREG32(DC_HPD3_INT_CONTROL, tmp);
6046 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
6047 tmp = RREG32(DC_HPD4_INT_CONTROL);
6048 tmp |= DC_HPDx_INT_ACK;
6049 WREG32(DC_HPD4_INT_CONTROL, tmp);
6051 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
6052 tmp = RREG32(DC_HPD5_INT_CONTROL);
6053 tmp |= DC_HPDx_INT_ACK;
6054 WREG32(DC_HPD5_INT_CONTROL, tmp);
6056 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
6057 tmp = RREG32(DC_HPD5_INT_CONTROL);
6058 tmp |= DC_HPDx_INT_ACK;
6059 WREG32(DC_HPD6_INT_CONTROL, tmp);
6063 static void si_irq_disable(struct radeon_device *rdev)
6065 si_disable_interrupts(rdev);
6066 /* Wait and acknowledge irq */
6069 si_disable_interrupt_state(rdev);
6072 static void si_irq_suspend(struct radeon_device *rdev)
6074 si_irq_disable(rdev);
6078 static void si_irq_fini(struct radeon_device *rdev)
6080 si_irq_suspend(rdev);
6081 r600_ih_ring_fini(rdev);
6084 static inline u32 si_get_ih_wptr(struct radeon_device *rdev)
6088 if (rdev->wb.enabled)
6089 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
6091 wptr = RREG32(IH_RB_WPTR);
6093 if (wptr & RB_OVERFLOW) {
6094 /* When a ring buffer overflow happen start parsing interrupt
6095 * from the last not overwritten vector (wptr + 16). Hopefully
6096 * this should allow us to catchup.
6098 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
6099 wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
6100 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
6101 tmp = RREG32(IH_RB_CNTL);
6102 tmp |= IH_WPTR_OVERFLOW_CLEAR;
6103 WREG32(IH_RB_CNTL, tmp);
6104 wptr &= ~RB_OVERFLOW;
6106 return (wptr & rdev->ih.ptr_mask);
6110 * Each IV ring entry is 128 bits:
6111 * [7:0] - interrupt source id
6113 * [59:32] - interrupt source data
6114 * [63:60] - reserved
6117 * [127:80] - reserved
6119 int si_irq_process(struct radeon_device *rdev)
6123 u32 src_id, src_data, ring_id;
6125 bool queue_hotplug = false;
6126 bool queue_thermal = false;
6129 if (!rdev->ih.enabled || rdev->shutdown)
6132 wptr = si_get_ih_wptr(rdev);
6135 /* is somebody else already processing irqs? */
6136 if (atomic_xchg(&rdev->ih.lock, 1))
6139 rptr = rdev->ih.rptr;
6140 DRM_DEBUG("si_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
6142 /* Order reading of wptr vs. reading of IH ring data */
6145 /* display interrupts */
6148 while (rptr != wptr) {
6149 /* wptr/rptr are in bytes! */
6150 ring_index = rptr / 4;
6151 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
6152 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
6153 ring_id = le32_to_cpu(rdev->ih.ring[ring_index + 2]) & 0xff;
6156 case 1: /* D1 vblank/vline */
6158 case 0: /* D1 vblank */
6159 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
6160 if (rdev->irq.crtc_vblank_int[0]) {
6161 drm_handle_vblank(rdev->ddev, 0);
6162 rdev->pm.vblank_sync = true;
6163 wake_up(&rdev->irq.vblank_queue);
6165 if (atomic_read(&rdev->irq.pflip[0]))
6166 radeon_crtc_handle_flip(rdev, 0);
6167 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
6168 DRM_DEBUG("IH: D1 vblank\n");
6171 case 1: /* D1 vline */
6172 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
6173 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
6174 DRM_DEBUG("IH: D1 vline\n");
6178 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6182 case 2: /* D2 vblank/vline */
6184 case 0: /* D2 vblank */
6185 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
6186 if (rdev->irq.crtc_vblank_int[1]) {
6187 drm_handle_vblank(rdev->ddev, 1);
6188 rdev->pm.vblank_sync = true;
6189 wake_up(&rdev->irq.vblank_queue);
6191 if (atomic_read(&rdev->irq.pflip[1]))
6192 radeon_crtc_handle_flip(rdev, 1);
6193 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
6194 DRM_DEBUG("IH: D2 vblank\n");
6197 case 1: /* D2 vline */
6198 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
6199 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
6200 DRM_DEBUG("IH: D2 vline\n");
6204 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6208 case 3: /* D3 vblank/vline */
6210 case 0: /* D3 vblank */
6211 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
6212 if (rdev->irq.crtc_vblank_int[2]) {
6213 drm_handle_vblank(rdev->ddev, 2);
6214 rdev->pm.vblank_sync = true;
6215 wake_up(&rdev->irq.vblank_queue);
6217 if (atomic_read(&rdev->irq.pflip[2]))
6218 radeon_crtc_handle_flip(rdev, 2);
6219 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
6220 DRM_DEBUG("IH: D3 vblank\n");
6223 case 1: /* D3 vline */
6224 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
6225 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
6226 DRM_DEBUG("IH: D3 vline\n");
6230 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6234 case 4: /* D4 vblank/vline */
6236 case 0: /* D4 vblank */
6237 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
6238 if (rdev->irq.crtc_vblank_int[3]) {
6239 drm_handle_vblank(rdev->ddev, 3);
6240 rdev->pm.vblank_sync = true;
6241 wake_up(&rdev->irq.vblank_queue);
6243 if (atomic_read(&rdev->irq.pflip[3]))
6244 radeon_crtc_handle_flip(rdev, 3);
6245 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
6246 DRM_DEBUG("IH: D4 vblank\n");
6249 case 1: /* D4 vline */
6250 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
6251 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
6252 DRM_DEBUG("IH: D4 vline\n");
6256 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6260 case 5: /* D5 vblank/vline */
6262 case 0: /* D5 vblank */
6263 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
6264 if (rdev->irq.crtc_vblank_int[4]) {
6265 drm_handle_vblank(rdev->ddev, 4);
6266 rdev->pm.vblank_sync = true;
6267 wake_up(&rdev->irq.vblank_queue);
6269 if (atomic_read(&rdev->irq.pflip[4]))
6270 radeon_crtc_handle_flip(rdev, 4);
6271 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
6272 DRM_DEBUG("IH: D5 vblank\n");
6275 case 1: /* D5 vline */
6276 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
6277 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
6278 DRM_DEBUG("IH: D5 vline\n");
6282 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6286 case 6: /* D6 vblank/vline */
6288 case 0: /* D6 vblank */
6289 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
6290 if (rdev->irq.crtc_vblank_int[5]) {
6291 drm_handle_vblank(rdev->ddev, 5);
6292 rdev->pm.vblank_sync = true;
6293 wake_up(&rdev->irq.vblank_queue);
6295 if (atomic_read(&rdev->irq.pflip[5]))
6296 radeon_crtc_handle_flip(rdev, 5);
6297 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
6298 DRM_DEBUG("IH: D6 vblank\n");
6301 case 1: /* D6 vline */
6302 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
6303 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
6304 DRM_DEBUG("IH: D6 vline\n");
6308 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6312 case 8: /* D1 page flip */
6313 case 10: /* D2 page flip */
6314 case 12: /* D3 page flip */
6315 case 14: /* D4 page flip */
6316 case 16: /* D5 page flip */
6317 case 18: /* D6 page flip */
6318 DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
6319 radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
6321 case 42: /* HPD hotplug */
6324 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
6325 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
6326 queue_hotplug = true;
6327 DRM_DEBUG("IH: HPD1\n");
6331 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
6332 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
6333 queue_hotplug = true;
6334 DRM_DEBUG("IH: HPD2\n");
6338 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
6339 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
6340 queue_hotplug = true;
6341 DRM_DEBUG("IH: HPD3\n");
6345 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
6346 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
6347 queue_hotplug = true;
6348 DRM_DEBUG("IH: HPD4\n");
6352 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
6353 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
6354 queue_hotplug = true;
6355 DRM_DEBUG("IH: HPD5\n");
6359 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
6360 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
6361 queue_hotplug = true;
6362 DRM_DEBUG("IH: HPD6\n");
6366 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6371 DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
6372 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
6376 addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
6377 status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
6378 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
6379 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
6381 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
6383 si_vm_decode_fault(rdev, status, addr);
6384 /* reset addr and status */
6385 WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
6387 case 176: /* RINGID0 CP_INT */
6388 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
6390 case 177: /* RINGID1 CP_INT */
6391 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
6393 case 178: /* RINGID2 CP_INT */
6394 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
6396 case 181: /* CP EOP event */
6397 DRM_DEBUG("IH: CP EOP\n");
6400 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
6403 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
6406 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
6410 case 224: /* DMA trap event */
6411 DRM_DEBUG("IH: DMA trap\n");
6412 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
6414 case 230: /* thermal low to high */
6415 DRM_DEBUG("IH: thermal low to high\n");
6416 rdev->pm.dpm.thermal.high_to_low = false;
6417 queue_thermal = true;
6419 case 231: /* thermal high to low */
6420 DRM_DEBUG("IH: thermal high to low\n");
6421 rdev->pm.dpm.thermal.high_to_low = true;
6422 queue_thermal = true;
6424 case 233: /* GUI IDLE */
6425 DRM_DEBUG("IH: GUI idle\n");
6427 case 244: /* DMA trap event */
6428 DRM_DEBUG("IH: DMA1 trap\n");
6429 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
6432 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6436 /* wptr/rptr are in bytes! */
6438 rptr &= rdev->ih.ptr_mask;
6441 schedule_work(&rdev->hotplug_work);
6442 if (queue_thermal && rdev->pm.dpm_enabled)
6443 schedule_work(&rdev->pm.dpm.thermal.work);
6444 rdev->ih.rptr = rptr;
6445 WREG32(IH_RB_RPTR, rdev->ih.rptr);
6446 atomic_set(&rdev->ih.lock, 0);
6448 /* make sure wptr hasn't changed while processing */
6449 wptr = si_get_ih_wptr(rdev);
6457 * startup/shutdown callbacks
6459 static int si_startup(struct radeon_device *rdev)
6461 struct radeon_ring *ring;
6464 /* enable pcie gen2/3 link */
6465 si_pcie_gen3_enable(rdev);
6467 si_program_aspm(rdev);
6469 /* scratch needs to be initialized before MC */
6470 r = r600_vram_scratch_init(rdev);
6474 si_mc_program(rdev);
6476 if (!rdev->pm.dpm_enabled) {
6477 r = si_mc_load_microcode(rdev);
6479 DRM_ERROR("Failed to load MC firmware!\n");
6484 r = si_pcie_gart_enable(rdev);
6489 /* allocate rlc buffers */
6490 if (rdev->family == CHIP_VERDE) {
6491 rdev->rlc.reg_list = verde_rlc_save_restore_register_list;
6492 rdev->rlc.reg_list_size =
6493 (u32)ARRAY_SIZE(verde_rlc_save_restore_register_list);
6495 rdev->rlc.cs_data = si_cs_data;
6496 r = sumo_rlc_init(rdev);
6498 DRM_ERROR("Failed to init rlc BOs!\n");
6502 /* allocate wb buffer */
6503 r = radeon_wb_init(rdev);
6507 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
6509 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
6513 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
6515 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
6519 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
6521 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
6525 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
6527 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
6531 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
6533 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
6537 if (rdev->has_uvd) {
6538 r = uvd_v2_2_resume(rdev);
6540 r = radeon_fence_driver_start_ring(rdev,
6541 R600_RING_TYPE_UVD_INDEX);
6543 dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
6546 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
6550 if (!rdev->irq.installed) {
6551 r = radeon_irq_kms_init(rdev);
6556 r = si_irq_init(rdev);
6558 DRM_ERROR("radeon: IH init failed (%d).\n", r);
6559 radeon_irq_kms_fini(rdev);
6564 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
6565 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
6570 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
6571 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP1_RPTR_OFFSET,
6576 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
6577 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP2_RPTR_OFFSET,
6582 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
6583 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
6584 DMA_PACKET(DMA_PACKET_NOP, 0, 0, 0, 0));
6588 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
6589 r = radeon_ring_init(rdev, ring, ring->ring_size, CAYMAN_WB_DMA1_RPTR_OFFSET,
6590 DMA_PACKET(DMA_PACKET_NOP, 0, 0, 0, 0));
6594 r = si_cp_load_microcode(rdev);
6597 r = si_cp_resume(rdev);
6601 r = cayman_dma_resume(rdev);
6605 if (rdev->has_uvd) {
6606 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
6607 if (ring->ring_size) {
6608 r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
6611 r = uvd_v1_0_init(rdev);
6613 DRM_ERROR("radeon: failed initializing UVD (%d).\n", r);
6617 r = radeon_ib_pool_init(rdev);
6619 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
6623 r = radeon_vm_manager_init(rdev);
6625 dev_err(rdev->dev, "vm manager initialization failed (%d).\n", r);
6629 r = dce6_audio_init(rdev);
6636 int si_resume(struct radeon_device *rdev)
6640 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
6641 * posting will perform necessary task to bring back GPU into good
6645 atom_asic_init(rdev->mode_info.atom_context);
6647 /* init golden registers */
6648 si_init_golden_registers(rdev);
6650 if (rdev->pm.pm_method == PM_METHOD_DPM)
6651 radeon_pm_resume(rdev);
6653 rdev->accel_working = true;
6654 r = si_startup(rdev);
6656 DRM_ERROR("si startup failed on resume\n");
6657 rdev->accel_working = false;
6665 int si_suspend(struct radeon_device *rdev)
6667 radeon_pm_suspend(rdev);
6668 dce6_audio_fini(rdev);
6669 radeon_vm_manager_fini(rdev);
6670 si_cp_enable(rdev, false);
6671 cayman_dma_stop(rdev);
6672 if (rdev->has_uvd) {
6673 uvd_v1_0_fini(rdev);
6674 radeon_uvd_suspend(rdev);
6678 si_irq_suspend(rdev);
6679 radeon_wb_disable(rdev);
6680 si_pcie_gart_disable(rdev);
6684 /* Plan is to move initialization in that function and use
6685 * helper function so that radeon_device_init pretty much
6686 * do nothing more than calling asic specific function. This
6687 * should also allow to remove a bunch of callback function
6690 int si_init(struct radeon_device *rdev)
6692 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
6696 if (!radeon_get_bios(rdev)) {
6697 if (ASIC_IS_AVIVO(rdev))
6700 /* Must be an ATOMBIOS */
6701 if (!rdev->is_atom_bios) {
6702 dev_err(rdev->dev, "Expecting atombios for cayman GPU\n");
6705 r = radeon_atombios_init(rdev);
6709 /* Post card if necessary */
6710 if (!radeon_card_posted(rdev)) {
6712 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
6715 DRM_INFO("GPU not posted. posting now...\n");
6716 atom_asic_init(rdev->mode_info.atom_context);
6718 /* init golden registers */
6719 si_init_golden_registers(rdev);
6720 /* Initialize scratch registers */
6721 si_scratch_init(rdev);
6722 /* Initialize surface registers */
6723 radeon_surface_init(rdev);
6724 /* Initialize clocks */
6725 radeon_get_clock_info(rdev->ddev);
6728 r = radeon_fence_driver_init(rdev);
6732 /* initialize memory controller */
6733 r = si_mc_init(rdev);
6736 /* Memory manager */
6737 r = radeon_bo_init(rdev);
6741 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->ce_fw ||
6742 !rdev->rlc_fw || !rdev->mc_fw) {
6743 r = si_init_microcode(rdev);
6745 DRM_ERROR("Failed to load firmware!\n");
6750 /* Initialize power management */
6751 radeon_pm_init(rdev);
6753 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
6754 ring->ring_obj = NULL;
6755 r600_ring_init(rdev, ring, 1024 * 1024);
6757 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
6758 ring->ring_obj = NULL;
6759 r600_ring_init(rdev, ring, 1024 * 1024);
6761 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
6762 ring->ring_obj = NULL;
6763 r600_ring_init(rdev, ring, 1024 * 1024);
6765 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
6766 ring->ring_obj = NULL;
6767 r600_ring_init(rdev, ring, 64 * 1024);
6769 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
6770 ring->ring_obj = NULL;
6771 r600_ring_init(rdev, ring, 64 * 1024);
6773 if (rdev->has_uvd) {
6774 r = radeon_uvd_init(rdev);
6776 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
6777 ring->ring_obj = NULL;
6778 r600_ring_init(rdev, ring, 4096);
6782 rdev->ih.ring_obj = NULL;
6783 r600_ih_ring_init(rdev, 64 * 1024);
6785 r = r600_pcie_gart_init(rdev);
6789 rdev->accel_working = true;
6790 r = si_startup(rdev);
6792 dev_err(rdev->dev, "disabling GPU acceleration\n");
6794 cayman_dma_fini(rdev);
6796 sumo_rlc_fini(rdev);
6797 radeon_wb_fini(rdev);
6798 radeon_ib_pool_fini(rdev);
6799 radeon_vm_manager_fini(rdev);
6800 radeon_irq_kms_fini(rdev);
6801 si_pcie_gart_fini(rdev);
6802 rdev->accel_working = false;
6805 /* Don't start up if the MC ucode is missing.
6806 * The default clocks and voltages before the MC ucode
6807 * is loaded are not suffient for advanced operations.
6810 DRM_ERROR("radeon: MC ucode required for NI+.\n");
6817 void si_fini(struct radeon_device *rdev)
6819 radeon_pm_fini(rdev);
6821 cayman_dma_fini(rdev);
6825 sumo_rlc_fini(rdev);
6826 radeon_wb_fini(rdev);
6827 radeon_vm_manager_fini(rdev);
6828 radeon_ib_pool_fini(rdev);
6829 radeon_irq_kms_fini(rdev);
6830 if (rdev->has_uvd) {
6831 uvd_v1_0_fini(rdev);
6832 radeon_uvd_fini(rdev);
6834 si_pcie_gart_fini(rdev);
6835 r600_vram_scratch_fini(rdev);
6836 radeon_gem_fini(rdev);
6837 radeon_fence_driver_fini(rdev);
6838 radeon_bo_fini(rdev);
6839 radeon_atombios_fini(rdev);
6845 * si_get_gpu_clock_counter - return GPU clock counter snapshot
6847 * @rdev: radeon_device pointer
6849 * Fetches a GPU clock counter snapshot (SI).
6850 * Returns the 64 bit clock counter snapshot.
6852 uint64_t si_get_gpu_clock_counter(struct radeon_device *rdev)
6856 mutex_lock(&rdev->gpu_clock_mutex);
6857 WREG32(RLC_CAPTURE_GPU_CLOCK_COUNT, 1);
6858 clock = (uint64_t)RREG32(RLC_GPU_CLOCK_COUNT_LSB) |
6859 ((uint64_t)RREG32(RLC_GPU_CLOCK_COUNT_MSB) << 32ULL);
6860 mutex_unlock(&rdev->gpu_clock_mutex);
6864 int si_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
6866 unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
6869 /* bypass vclk and dclk with bclk */
6870 WREG32_P(CG_UPLL_FUNC_CNTL_2,
6871 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
6872 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
6874 /* put PLL in bypass mode */
6875 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
6877 if (!vclk || !dclk) {
6878 /* keep the Bypass mode, put PLL to sleep */
6879 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
6883 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
6884 16384, 0x03FFFFFF, 0, 128, 5,
6885 &fb_div, &vclk_div, &dclk_div);
6889 /* set RESET_ANTI_MUX to 0 */
6890 WREG32_P(CG_UPLL_FUNC_CNTL_5, 0, ~RESET_ANTI_MUX_MASK);
6892 /* set VCO_MODE to 1 */
6893 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
6895 /* toggle UPLL_SLEEP to 1 then back to 0 */
6896 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
6897 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
6899 /* deassert UPLL_RESET */
6900 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
6904 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
6908 /* assert UPLL_RESET again */
6909 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
6911 /* disable spread spectrum. */
6912 WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
6914 /* set feedback divider */
6915 WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
6917 /* set ref divider to 0 */
6918 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
6920 if (fb_div < 307200)
6921 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
6923 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
6925 /* set PDIV_A and PDIV_B */
6926 WREG32_P(CG_UPLL_FUNC_CNTL_2,
6927 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
6928 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
6930 /* give the PLL some time to settle */
6933 /* deassert PLL_RESET */
6934 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
6938 /* switch from bypass mode to normal mode */
6939 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
6941 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
6945 /* switch VCLK and DCLK selection */
6946 WREG32_P(CG_UPLL_FUNC_CNTL_2,
6947 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
6948 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
6955 static void si_pcie_gen3_enable(struct radeon_device *rdev)
6957 struct pci_dev *root = rdev->pdev->bus->self;
6958 int bridge_pos, gpu_pos;
6959 u32 speed_cntl, mask, current_data_rate;
6963 if (radeon_pcie_gen2 == 0)
6966 if (rdev->flags & RADEON_IS_IGP)
6969 if (!(rdev->flags & RADEON_IS_PCIE))
6972 ret = drm_pcie_get_speed_cap_mask(rdev->ddev, &mask);
6976 if (!(mask & (DRM_PCIE_SPEED_50 | DRM_PCIE_SPEED_80)))
6979 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
6980 current_data_rate = (speed_cntl & LC_CURRENT_DATA_RATE_MASK) >>
6981 LC_CURRENT_DATA_RATE_SHIFT;
6982 if (mask & DRM_PCIE_SPEED_80) {
6983 if (current_data_rate == 2) {
6984 DRM_INFO("PCIE gen 3 link speeds already enabled\n");
6987 DRM_INFO("enabling PCIE gen 3 link speeds, disable with radeon.pcie_gen2=0\n");
6988 } else if (mask & DRM_PCIE_SPEED_50) {
6989 if (current_data_rate == 1) {
6990 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
6993 DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
6996 bridge_pos = pci_pcie_cap(root);
7000 gpu_pos = pci_pcie_cap(rdev->pdev);
7004 if (mask & DRM_PCIE_SPEED_80) {
7005 /* re-try equalization if gen3 is not already enabled */
7006 if (current_data_rate != 2) {
7007 u16 bridge_cfg, gpu_cfg;
7008 u16 bridge_cfg2, gpu_cfg2;
7009 u32 max_lw, current_lw, tmp;
7011 pci_read_config_word(root, bridge_pos + PCI_EXP_LNKCTL, &bridge_cfg);
7012 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &gpu_cfg);
7014 tmp16 = bridge_cfg | PCI_EXP_LNKCTL_HAWD;
7015 pci_write_config_word(root, bridge_pos + PCI_EXP_LNKCTL, tmp16);
7017 tmp16 = gpu_cfg | PCI_EXP_LNKCTL_HAWD;
7018 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, tmp16);
7020 tmp = RREG32_PCIE(PCIE_LC_STATUS1);
7021 max_lw = (tmp & LC_DETECTED_LINK_WIDTH_MASK) >> LC_DETECTED_LINK_WIDTH_SHIFT;
7022 current_lw = (tmp & LC_OPERATING_LINK_WIDTH_MASK) >> LC_OPERATING_LINK_WIDTH_SHIFT;
7024 if (current_lw < max_lw) {
7025 tmp = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
7026 if (tmp & LC_RENEGOTIATION_SUPPORT) {
7027 tmp &= ~(LC_LINK_WIDTH_MASK | LC_UPCONFIGURE_DIS);
7028 tmp |= (max_lw << LC_LINK_WIDTH_SHIFT);
7029 tmp |= LC_UPCONFIGURE_SUPPORT | LC_RENEGOTIATE_EN | LC_RECONFIG_NOW;
7030 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, tmp);
7034 for (i = 0; i < 10; i++) {
7036 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_DEVSTA, &tmp16);
7037 if (tmp16 & PCI_EXP_DEVSTA_TRPND)
7040 pci_read_config_word(root, bridge_pos + PCI_EXP_LNKCTL, &bridge_cfg);
7041 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &gpu_cfg);
7043 pci_read_config_word(root, bridge_pos + PCI_EXP_LNKCTL2, &bridge_cfg2);
7044 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &gpu_cfg2);
7046 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL4);
7047 tmp |= LC_SET_QUIESCE;
7048 WREG32_PCIE_PORT(PCIE_LC_CNTL4, tmp);
7050 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL4);
7052 WREG32_PCIE_PORT(PCIE_LC_CNTL4, tmp);
7057 pci_read_config_word(root, bridge_pos + PCI_EXP_LNKCTL, &tmp16);
7058 tmp16 &= ~PCI_EXP_LNKCTL_HAWD;
7059 tmp16 |= (bridge_cfg & PCI_EXP_LNKCTL_HAWD);
7060 pci_write_config_word(root, bridge_pos + PCI_EXP_LNKCTL, tmp16);
7062 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &tmp16);
7063 tmp16 &= ~PCI_EXP_LNKCTL_HAWD;
7064 tmp16 |= (gpu_cfg & PCI_EXP_LNKCTL_HAWD);
7065 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, tmp16);
7068 pci_read_config_word(root, bridge_pos + PCI_EXP_LNKCTL2, &tmp16);
7069 tmp16 &= ~((1 << 4) | (7 << 9));
7070 tmp16 |= (bridge_cfg2 & ((1 << 4) | (7 << 9)));
7071 pci_write_config_word(root, bridge_pos + PCI_EXP_LNKCTL2, tmp16);
7073 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &tmp16);
7074 tmp16 &= ~((1 << 4) | (7 << 9));
7075 tmp16 |= (gpu_cfg2 & ((1 << 4) | (7 << 9)));
7076 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, tmp16);
7078 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL4);
7079 tmp &= ~LC_SET_QUIESCE;
7080 WREG32_PCIE_PORT(PCIE_LC_CNTL4, tmp);
7085 /* set the link speed */
7086 speed_cntl |= LC_FORCE_EN_SW_SPEED_CHANGE | LC_FORCE_DIS_HW_SPEED_CHANGE;
7087 speed_cntl &= ~LC_FORCE_DIS_SW_SPEED_CHANGE;
7088 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
7090 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &tmp16);
7092 if (mask & DRM_PCIE_SPEED_80)
7093 tmp16 |= 3; /* gen3 */
7094 else if (mask & DRM_PCIE_SPEED_50)
7095 tmp16 |= 2; /* gen2 */
7097 tmp16 |= 1; /* gen1 */
7098 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, tmp16);
7100 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
7101 speed_cntl |= LC_INITIATE_LINK_SPEED_CHANGE;
7102 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
7104 for (i = 0; i < rdev->usec_timeout; i++) {
7105 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
7106 if ((speed_cntl & LC_INITIATE_LINK_SPEED_CHANGE) == 0)
7112 static void si_program_aspm(struct radeon_device *rdev)
7115 bool disable_l0s = false, disable_l1 = false, disable_plloff_in_l1 = false;
7116 bool disable_clkreq = false;
7118 if (radeon_aspm == 0)
7121 if (!(rdev->flags & RADEON_IS_PCIE))
7124 orig = data = RREG32_PCIE_PORT(PCIE_LC_N_FTS_CNTL);
7125 data &= ~LC_XMIT_N_FTS_MASK;
7126 data |= LC_XMIT_N_FTS(0x24) | LC_XMIT_N_FTS_OVERRIDE_EN;
7128 WREG32_PCIE_PORT(PCIE_LC_N_FTS_CNTL, data);
7130 orig = data = RREG32_PCIE_PORT(PCIE_LC_CNTL3);
7131 data |= LC_GO_TO_RECOVERY;
7133 WREG32_PCIE_PORT(PCIE_LC_CNTL3, data);
7135 orig = data = RREG32_PCIE(PCIE_P_CNTL);
7136 data |= P_IGNORE_EDB_ERR;
7138 WREG32_PCIE(PCIE_P_CNTL, data);
7140 orig = data = RREG32_PCIE_PORT(PCIE_LC_CNTL);
7141 data &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
7142 data |= LC_PMI_TO_L1_DIS;
7144 data |= LC_L0S_INACTIVITY(7);
7147 data |= LC_L1_INACTIVITY(7);
7148 data &= ~LC_PMI_TO_L1_DIS;
7150 WREG32_PCIE_PORT(PCIE_LC_CNTL, data);
7152 if (!disable_plloff_in_l1) {
7153 bool clk_req_support;
7155 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
7156 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
7157 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
7159 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
7161 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
7162 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
7163 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
7165 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
7167 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
7168 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
7169 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
7171 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
7173 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
7174 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
7175 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
7177 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
7179 if ((rdev->family != CHIP_OLAND) && (rdev->family != CHIP_HAINAN)) {
7180 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
7181 data &= ~PLL_RAMP_UP_TIME_0_MASK;
7183 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
7185 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
7186 data &= ~PLL_RAMP_UP_TIME_1_MASK;
7188 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
7190 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_2);
7191 data &= ~PLL_RAMP_UP_TIME_2_MASK;
7193 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_2, data);
7195 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_3);
7196 data &= ~PLL_RAMP_UP_TIME_3_MASK;
7198 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_3, data);
7200 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
7201 data &= ~PLL_RAMP_UP_TIME_0_MASK;
7203 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
7205 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
7206 data &= ~PLL_RAMP_UP_TIME_1_MASK;
7208 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
7210 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_2);
7211 data &= ~PLL_RAMP_UP_TIME_2_MASK;
7213 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_2, data);
7215 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_3);
7216 data &= ~PLL_RAMP_UP_TIME_3_MASK;
7218 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_3, data);
7220 orig = data = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
7221 data &= ~LC_DYN_LANES_PWR_STATE_MASK;
7222 data |= LC_DYN_LANES_PWR_STATE(3);
7224 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
7226 orig = data = RREG32_PIF_PHY0(PB0_PIF_CNTL);
7227 data &= ~LS2_EXIT_TIME_MASK;
7228 if ((rdev->family == CHIP_OLAND) || (rdev->family == CHIP_HAINAN))
7229 data |= LS2_EXIT_TIME(5);
7231 WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
7233 orig = data = RREG32_PIF_PHY1(PB1_PIF_CNTL);
7234 data &= ~LS2_EXIT_TIME_MASK;
7235 if ((rdev->family == CHIP_OLAND) || (rdev->family == CHIP_HAINAN))
7236 data |= LS2_EXIT_TIME(5);
7238 WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
7240 if (!disable_clkreq) {
7241 struct pci_dev *root = rdev->pdev->bus->self;
7244 clk_req_support = false;
7245 pcie_capability_read_dword(root, PCI_EXP_LNKCAP, &lnkcap);
7246 if (lnkcap & PCI_EXP_LNKCAP_CLKPM)
7247 clk_req_support = true;
7249 clk_req_support = false;
7252 if (clk_req_support) {
7253 orig = data = RREG32_PCIE_PORT(PCIE_LC_CNTL2);
7254 data |= LC_ALLOW_PDWN_IN_L1 | LC_ALLOW_PDWN_IN_L23;
7256 WREG32_PCIE_PORT(PCIE_LC_CNTL2, data);
7258 orig = data = RREG32(THM_CLK_CNTL);
7259 data &= ~(CMON_CLK_SEL_MASK | TMON_CLK_SEL_MASK);
7260 data |= CMON_CLK_SEL(1) | TMON_CLK_SEL(1);
7262 WREG32(THM_CLK_CNTL, data);
7264 orig = data = RREG32(MISC_CLK_CNTL);
7265 data &= ~(DEEP_SLEEP_CLK_SEL_MASK | ZCLK_SEL_MASK);
7266 data |= DEEP_SLEEP_CLK_SEL(1) | ZCLK_SEL(1);
7268 WREG32(MISC_CLK_CNTL, data);
7270 orig = data = RREG32(CG_CLKPIN_CNTL);
7271 data &= ~BCLK_AS_XCLK;
7273 WREG32(CG_CLKPIN_CNTL, data);
7275 orig = data = RREG32(CG_CLKPIN_CNTL_2);
7276 data &= ~FORCE_BIF_REFCLK_EN;
7278 WREG32(CG_CLKPIN_CNTL_2, data);
7280 orig = data = RREG32(MPLL_BYPASSCLK_SEL);
7281 data &= ~MPLL_CLKOUT_SEL_MASK;
7282 data |= MPLL_CLKOUT_SEL(4);
7284 WREG32(MPLL_BYPASSCLK_SEL, data);
7286 orig = data = RREG32(SPLL_CNTL_MODE);
7287 data &= ~SPLL_REFCLK_SEL_MASK;
7289 WREG32(SPLL_CNTL_MODE, data);
7294 WREG32_PCIE_PORT(PCIE_LC_CNTL, data);
7297 orig = data = RREG32_PCIE(PCIE_CNTL2);
7298 data |= SLV_MEM_LS_EN | MST_MEM_LS_EN | REPLAY_MEM_LS_EN;
7300 WREG32_PCIE(PCIE_CNTL2, data);
7303 data = RREG32_PCIE_PORT(PCIE_LC_N_FTS_CNTL);
7304 if((data & LC_N_FTS_MASK) == LC_N_FTS_MASK) {
7305 data = RREG32_PCIE(PCIE_LC_STATUS1);
7306 if ((data & LC_REVERSE_XMIT) && (data & LC_REVERSE_RCVR)) {
7307 orig = data = RREG32_PCIE_PORT(PCIE_LC_CNTL);
7308 data &= ~LC_L0S_INACTIVITY_MASK;
7310 WREG32_PCIE_PORT(PCIE_LC_CNTL, data);