2 * Copyright 2011 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
22 * Authors: Alex Deucher
24 #include <linux/firmware.h>
25 #include <linux/slab.h>
26 #include <linux/module.h>
29 #include "radeon_asic.h"
30 #include <drm/radeon_drm.h>
33 #include "si_blit_shaders.h"
34 #include "clearstate_si.h"
35 #include "radeon_ucode.h"
38 MODULE_FIRMWARE("radeon/TAHITI_pfp.bin");
39 MODULE_FIRMWARE("radeon/TAHITI_me.bin");
40 MODULE_FIRMWARE("radeon/TAHITI_ce.bin");
41 MODULE_FIRMWARE("radeon/TAHITI_mc.bin");
42 MODULE_FIRMWARE("radeon/TAHITI_mc2.bin");
43 MODULE_FIRMWARE("radeon/TAHITI_rlc.bin");
44 MODULE_FIRMWARE("radeon/TAHITI_smc.bin");
45 MODULE_FIRMWARE("radeon/PITCAIRN_pfp.bin");
46 MODULE_FIRMWARE("radeon/PITCAIRN_me.bin");
47 MODULE_FIRMWARE("radeon/PITCAIRN_ce.bin");
48 MODULE_FIRMWARE("radeon/PITCAIRN_mc.bin");
49 MODULE_FIRMWARE("radeon/PITCAIRN_mc2.bin");
50 MODULE_FIRMWARE("radeon/PITCAIRN_rlc.bin");
51 MODULE_FIRMWARE("radeon/PITCAIRN_smc.bin");
52 MODULE_FIRMWARE("radeon/VERDE_pfp.bin");
53 MODULE_FIRMWARE("radeon/VERDE_me.bin");
54 MODULE_FIRMWARE("radeon/VERDE_ce.bin");
55 MODULE_FIRMWARE("radeon/VERDE_mc.bin");
56 MODULE_FIRMWARE("radeon/VERDE_mc2.bin");
57 MODULE_FIRMWARE("radeon/VERDE_rlc.bin");
58 MODULE_FIRMWARE("radeon/VERDE_smc.bin");
59 MODULE_FIRMWARE("radeon/OLAND_pfp.bin");
60 MODULE_FIRMWARE("radeon/OLAND_me.bin");
61 MODULE_FIRMWARE("radeon/OLAND_ce.bin");
62 MODULE_FIRMWARE("radeon/OLAND_mc.bin");
63 MODULE_FIRMWARE("radeon/OLAND_mc2.bin");
64 MODULE_FIRMWARE("radeon/OLAND_rlc.bin");
65 MODULE_FIRMWARE("radeon/OLAND_smc.bin");
66 MODULE_FIRMWARE("radeon/HAINAN_pfp.bin");
67 MODULE_FIRMWARE("radeon/HAINAN_me.bin");
68 MODULE_FIRMWARE("radeon/HAINAN_ce.bin");
69 MODULE_FIRMWARE("radeon/HAINAN_mc.bin");
70 MODULE_FIRMWARE("radeon/HAINAN_mc2.bin");
71 MODULE_FIRMWARE("radeon/HAINAN_rlc.bin");
72 MODULE_FIRMWARE("radeon/HAINAN_smc.bin");
74 static void si_pcie_gen3_enable(struct radeon_device *rdev);
75 static void si_program_aspm(struct radeon_device *rdev);
76 extern void sumo_rlc_fini(struct radeon_device *rdev);
77 extern int sumo_rlc_init(struct radeon_device *rdev);
78 extern int r600_ih_ring_alloc(struct radeon_device *rdev);
79 extern void r600_ih_ring_fini(struct radeon_device *rdev);
80 extern void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev);
81 extern void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save);
82 extern void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save);
83 extern u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev);
84 extern void evergreen_print_gpu_status_regs(struct radeon_device *rdev);
85 extern bool evergreen_is_display_hung(struct radeon_device *rdev);
86 static void si_enable_gui_idle_interrupt(struct radeon_device *rdev,
88 static void si_init_pg(struct radeon_device *rdev);
89 static void si_init_cg(struct radeon_device *rdev);
90 static void si_fini_pg(struct radeon_device *rdev);
91 static void si_fini_cg(struct radeon_device *rdev);
92 static void si_rlc_stop(struct radeon_device *rdev);
94 static const u32 verde_rlc_save_restore_register_list[] =
96 (0x8000 << 16) | (0x98f4 >> 2),
98 (0x8040 << 16) | (0x98f4 >> 2),
100 (0x8000 << 16) | (0xe80 >> 2),
102 (0x8040 << 16) | (0xe80 >> 2),
104 (0x8000 << 16) | (0x89bc >> 2),
106 (0x8040 << 16) | (0x89bc >> 2),
108 (0x8000 << 16) | (0x8c1c >> 2),
110 (0x8040 << 16) | (0x8c1c >> 2),
112 (0x9c00 << 16) | (0x98f0 >> 2),
114 (0x9c00 << 16) | (0xe7c >> 2),
116 (0x8000 << 16) | (0x9148 >> 2),
118 (0x8040 << 16) | (0x9148 >> 2),
120 (0x9c00 << 16) | (0x9150 >> 2),
122 (0x9c00 << 16) | (0x897c >> 2),
124 (0x9c00 << 16) | (0x8d8c >> 2),
126 (0x9c00 << 16) | (0xac54 >> 2),
129 (0x9c00 << 16) | (0x98f8 >> 2),
131 (0x9c00 << 16) | (0x9910 >> 2),
133 (0x9c00 << 16) | (0x9914 >> 2),
135 (0x9c00 << 16) | (0x9918 >> 2),
137 (0x9c00 << 16) | (0x991c >> 2),
139 (0x9c00 << 16) | (0x9920 >> 2),
141 (0x9c00 << 16) | (0x9924 >> 2),
143 (0x9c00 << 16) | (0x9928 >> 2),
145 (0x9c00 << 16) | (0x992c >> 2),
147 (0x9c00 << 16) | (0x9930 >> 2),
149 (0x9c00 << 16) | (0x9934 >> 2),
151 (0x9c00 << 16) | (0x9938 >> 2),
153 (0x9c00 << 16) | (0x993c >> 2),
155 (0x9c00 << 16) | (0x9940 >> 2),
157 (0x9c00 << 16) | (0x9944 >> 2),
159 (0x9c00 << 16) | (0x9948 >> 2),
161 (0x9c00 << 16) | (0x994c >> 2),
163 (0x9c00 << 16) | (0x9950 >> 2),
165 (0x9c00 << 16) | (0x9954 >> 2),
167 (0x9c00 << 16) | (0x9958 >> 2),
169 (0x9c00 << 16) | (0x995c >> 2),
171 (0x9c00 << 16) | (0x9960 >> 2),
173 (0x9c00 << 16) | (0x9964 >> 2),
175 (0x9c00 << 16) | (0x9968 >> 2),
177 (0x9c00 << 16) | (0x996c >> 2),
179 (0x9c00 << 16) | (0x9970 >> 2),
181 (0x9c00 << 16) | (0x9974 >> 2),
183 (0x9c00 << 16) | (0x9978 >> 2),
185 (0x9c00 << 16) | (0x997c >> 2),
187 (0x9c00 << 16) | (0x9980 >> 2),
189 (0x9c00 << 16) | (0x9984 >> 2),
191 (0x9c00 << 16) | (0x9988 >> 2),
193 (0x9c00 << 16) | (0x998c >> 2),
195 (0x9c00 << 16) | (0x8c00 >> 2),
197 (0x9c00 << 16) | (0x8c14 >> 2),
199 (0x9c00 << 16) | (0x8c04 >> 2),
201 (0x9c00 << 16) | (0x8c08 >> 2),
203 (0x8000 << 16) | (0x9b7c >> 2),
205 (0x8040 << 16) | (0x9b7c >> 2),
207 (0x8000 << 16) | (0xe84 >> 2),
209 (0x8040 << 16) | (0xe84 >> 2),
211 (0x8000 << 16) | (0x89c0 >> 2),
213 (0x8040 << 16) | (0x89c0 >> 2),
215 (0x8000 << 16) | (0x914c >> 2),
217 (0x8040 << 16) | (0x914c >> 2),
219 (0x8000 << 16) | (0x8c20 >> 2),
221 (0x8040 << 16) | (0x8c20 >> 2),
223 (0x8000 << 16) | (0x9354 >> 2),
225 (0x8040 << 16) | (0x9354 >> 2),
227 (0x9c00 << 16) | (0x9060 >> 2),
229 (0x9c00 << 16) | (0x9364 >> 2),
231 (0x9c00 << 16) | (0x9100 >> 2),
233 (0x9c00 << 16) | (0x913c >> 2),
235 (0x8000 << 16) | (0x90e0 >> 2),
237 (0x8000 << 16) | (0x90e4 >> 2),
239 (0x8000 << 16) | (0x90e8 >> 2),
241 (0x8040 << 16) | (0x90e0 >> 2),
243 (0x8040 << 16) | (0x90e4 >> 2),
245 (0x8040 << 16) | (0x90e8 >> 2),
247 (0x9c00 << 16) | (0x8bcc >> 2),
249 (0x9c00 << 16) | (0x8b24 >> 2),
251 (0x9c00 << 16) | (0x88c4 >> 2),
253 (0x9c00 << 16) | (0x8e50 >> 2),
255 (0x9c00 << 16) | (0x8c0c >> 2),
257 (0x9c00 << 16) | (0x8e58 >> 2),
259 (0x9c00 << 16) | (0x8e5c >> 2),
261 (0x9c00 << 16) | (0x9508 >> 2),
263 (0x9c00 << 16) | (0x950c >> 2),
265 (0x9c00 << 16) | (0x9494 >> 2),
267 (0x9c00 << 16) | (0xac0c >> 2),
269 (0x9c00 << 16) | (0xac10 >> 2),
271 (0x9c00 << 16) | (0xac14 >> 2),
273 (0x9c00 << 16) | (0xae00 >> 2),
275 (0x9c00 << 16) | (0xac08 >> 2),
277 (0x9c00 << 16) | (0x88d4 >> 2),
279 (0x9c00 << 16) | (0x88c8 >> 2),
281 (0x9c00 << 16) | (0x88cc >> 2),
283 (0x9c00 << 16) | (0x89b0 >> 2),
285 (0x9c00 << 16) | (0x8b10 >> 2),
287 (0x9c00 << 16) | (0x8a14 >> 2),
289 (0x9c00 << 16) | (0x9830 >> 2),
291 (0x9c00 << 16) | (0x9834 >> 2),
293 (0x9c00 << 16) | (0x9838 >> 2),
295 (0x9c00 << 16) | (0x9a10 >> 2),
297 (0x8000 << 16) | (0x9870 >> 2),
299 (0x8000 << 16) | (0x9874 >> 2),
301 (0x8001 << 16) | (0x9870 >> 2),
303 (0x8001 << 16) | (0x9874 >> 2),
305 (0x8040 << 16) | (0x9870 >> 2),
307 (0x8040 << 16) | (0x9874 >> 2),
309 (0x8041 << 16) | (0x9870 >> 2),
311 (0x8041 << 16) | (0x9874 >> 2),
316 static const u32 tahiti_golden_rlc_registers[] =
318 0xc424, 0xffffffff, 0x00601005,
319 0xc47c, 0xffffffff, 0x10104040,
320 0xc488, 0xffffffff, 0x0100000a,
321 0xc314, 0xffffffff, 0x00000800,
322 0xc30c, 0xffffffff, 0x800000f4,
323 0xf4a8, 0xffffffff, 0x00000000
326 static const u32 tahiti_golden_registers[] =
328 0x9a10, 0x00010000, 0x00018208,
329 0x9830, 0xffffffff, 0x00000000,
330 0x9834, 0xf00fffff, 0x00000400,
331 0x9838, 0x0002021c, 0x00020200,
332 0xc78, 0x00000080, 0x00000000,
333 0xd030, 0x000300c0, 0x00800040,
334 0xd830, 0x000300c0, 0x00800040,
335 0x5bb0, 0x000000f0, 0x00000070,
336 0x5bc0, 0x00200000, 0x50100000,
337 0x7030, 0x31000311, 0x00000011,
338 0x277c, 0x00000003, 0x000007ff,
339 0x240c, 0x000007ff, 0x00000000,
340 0x8a14, 0xf000001f, 0x00000007,
341 0x8b24, 0xffffffff, 0x00ffffff,
342 0x8b10, 0x0000ff0f, 0x00000000,
343 0x28a4c, 0x07ffffff, 0x4e000000,
344 0x28350, 0x3f3f3fff, 0x2a00126a,
345 0x30, 0x000000ff, 0x0040,
346 0x34, 0x00000040, 0x00004040,
347 0x9100, 0x07ffffff, 0x03000000,
348 0x8e88, 0x01ff1f3f, 0x00000000,
349 0x8e84, 0x01ff1f3f, 0x00000000,
350 0x9060, 0x0000007f, 0x00000020,
351 0x9508, 0x00010000, 0x00010000,
352 0xac14, 0x00000200, 0x000002fb,
353 0xac10, 0xffffffff, 0x0000543b,
354 0xac0c, 0xffffffff, 0xa9210876,
355 0x88d0, 0xffffffff, 0x000fff40,
356 0x88d4, 0x0000001f, 0x00000010,
357 0x1410, 0x20000000, 0x20fffed8,
358 0x15c0, 0x000c0fc0, 0x000c0400
361 static const u32 tahiti_golden_registers2[] =
363 0xc64, 0x00000001, 0x00000001
366 static const u32 pitcairn_golden_rlc_registers[] =
368 0xc424, 0xffffffff, 0x00601004,
369 0xc47c, 0xffffffff, 0x10102020,
370 0xc488, 0xffffffff, 0x01000020,
371 0xc314, 0xffffffff, 0x00000800,
372 0xc30c, 0xffffffff, 0x800000a4
375 static const u32 pitcairn_golden_registers[] =
377 0x9a10, 0x00010000, 0x00018208,
378 0x9830, 0xffffffff, 0x00000000,
379 0x9834, 0xf00fffff, 0x00000400,
380 0x9838, 0x0002021c, 0x00020200,
381 0xc78, 0x00000080, 0x00000000,
382 0xd030, 0x000300c0, 0x00800040,
383 0xd830, 0x000300c0, 0x00800040,
384 0x5bb0, 0x000000f0, 0x00000070,
385 0x5bc0, 0x00200000, 0x50100000,
386 0x7030, 0x31000311, 0x00000011,
387 0x2ae4, 0x00073ffe, 0x000022a2,
388 0x240c, 0x000007ff, 0x00000000,
389 0x8a14, 0xf000001f, 0x00000007,
390 0x8b24, 0xffffffff, 0x00ffffff,
391 0x8b10, 0x0000ff0f, 0x00000000,
392 0x28a4c, 0x07ffffff, 0x4e000000,
393 0x28350, 0x3f3f3fff, 0x2a00126a,
394 0x30, 0x000000ff, 0x0040,
395 0x34, 0x00000040, 0x00004040,
396 0x9100, 0x07ffffff, 0x03000000,
397 0x9060, 0x0000007f, 0x00000020,
398 0x9508, 0x00010000, 0x00010000,
399 0xac14, 0x000003ff, 0x000000f7,
400 0xac10, 0xffffffff, 0x00000000,
401 0xac0c, 0xffffffff, 0x32761054,
402 0x88d4, 0x0000001f, 0x00000010,
403 0x15c0, 0x000c0fc0, 0x000c0400
406 static const u32 verde_golden_rlc_registers[] =
408 0xc424, 0xffffffff, 0x033f1005,
409 0xc47c, 0xffffffff, 0x10808020,
410 0xc488, 0xffffffff, 0x00800008,
411 0xc314, 0xffffffff, 0x00001000,
412 0xc30c, 0xffffffff, 0x80010014
415 static const u32 verde_golden_registers[] =
417 0x9a10, 0x00010000, 0x00018208,
418 0x9830, 0xffffffff, 0x00000000,
419 0x9834, 0xf00fffff, 0x00000400,
420 0x9838, 0x0002021c, 0x00020200,
421 0xc78, 0x00000080, 0x00000000,
422 0xd030, 0x000300c0, 0x00800040,
423 0xd030, 0x000300c0, 0x00800040,
424 0xd830, 0x000300c0, 0x00800040,
425 0xd830, 0x000300c0, 0x00800040,
426 0x5bb0, 0x000000f0, 0x00000070,
427 0x5bc0, 0x00200000, 0x50100000,
428 0x7030, 0x31000311, 0x00000011,
429 0x2ae4, 0x00073ffe, 0x000022a2,
430 0x2ae4, 0x00073ffe, 0x000022a2,
431 0x2ae4, 0x00073ffe, 0x000022a2,
432 0x240c, 0x000007ff, 0x00000000,
433 0x240c, 0x000007ff, 0x00000000,
434 0x240c, 0x000007ff, 0x00000000,
435 0x8a14, 0xf000001f, 0x00000007,
436 0x8a14, 0xf000001f, 0x00000007,
437 0x8a14, 0xf000001f, 0x00000007,
438 0x8b24, 0xffffffff, 0x00ffffff,
439 0x8b10, 0x0000ff0f, 0x00000000,
440 0x28a4c, 0x07ffffff, 0x4e000000,
441 0x28350, 0x3f3f3fff, 0x0000124a,
442 0x28350, 0x3f3f3fff, 0x0000124a,
443 0x28350, 0x3f3f3fff, 0x0000124a,
444 0x30, 0x000000ff, 0x0040,
445 0x34, 0x00000040, 0x00004040,
446 0x9100, 0x07ffffff, 0x03000000,
447 0x9100, 0x07ffffff, 0x03000000,
448 0x8e88, 0x01ff1f3f, 0x00000000,
449 0x8e88, 0x01ff1f3f, 0x00000000,
450 0x8e88, 0x01ff1f3f, 0x00000000,
451 0x8e84, 0x01ff1f3f, 0x00000000,
452 0x8e84, 0x01ff1f3f, 0x00000000,
453 0x8e84, 0x01ff1f3f, 0x00000000,
454 0x9060, 0x0000007f, 0x00000020,
455 0x9508, 0x00010000, 0x00010000,
456 0xac14, 0x000003ff, 0x00000003,
457 0xac14, 0x000003ff, 0x00000003,
458 0xac14, 0x000003ff, 0x00000003,
459 0xac10, 0xffffffff, 0x00000000,
460 0xac10, 0xffffffff, 0x00000000,
461 0xac10, 0xffffffff, 0x00000000,
462 0xac0c, 0xffffffff, 0x00001032,
463 0xac0c, 0xffffffff, 0x00001032,
464 0xac0c, 0xffffffff, 0x00001032,
465 0x88d4, 0x0000001f, 0x00000010,
466 0x88d4, 0x0000001f, 0x00000010,
467 0x88d4, 0x0000001f, 0x00000010,
468 0x15c0, 0x000c0fc0, 0x000c0400
471 static const u32 oland_golden_rlc_registers[] =
473 0xc424, 0xffffffff, 0x00601005,
474 0xc47c, 0xffffffff, 0x10104040,
475 0xc488, 0xffffffff, 0x0100000a,
476 0xc314, 0xffffffff, 0x00000800,
477 0xc30c, 0xffffffff, 0x800000f4
480 static const u32 oland_golden_registers[] =
482 0x9a10, 0x00010000, 0x00018208,
483 0x9830, 0xffffffff, 0x00000000,
484 0x9834, 0xf00fffff, 0x00000400,
485 0x9838, 0x0002021c, 0x00020200,
486 0xc78, 0x00000080, 0x00000000,
487 0xd030, 0x000300c0, 0x00800040,
488 0xd830, 0x000300c0, 0x00800040,
489 0x5bb0, 0x000000f0, 0x00000070,
490 0x5bc0, 0x00200000, 0x50100000,
491 0x7030, 0x31000311, 0x00000011,
492 0x2ae4, 0x00073ffe, 0x000022a2,
493 0x240c, 0x000007ff, 0x00000000,
494 0x8a14, 0xf000001f, 0x00000007,
495 0x8b24, 0xffffffff, 0x00ffffff,
496 0x8b10, 0x0000ff0f, 0x00000000,
497 0x28a4c, 0x07ffffff, 0x4e000000,
498 0x28350, 0x3f3f3fff, 0x00000082,
499 0x30, 0x000000ff, 0x0040,
500 0x34, 0x00000040, 0x00004040,
501 0x9100, 0x07ffffff, 0x03000000,
502 0x9060, 0x0000007f, 0x00000020,
503 0x9508, 0x00010000, 0x00010000,
504 0xac14, 0x000003ff, 0x000000f3,
505 0xac10, 0xffffffff, 0x00000000,
506 0xac0c, 0xffffffff, 0x00003210,
507 0x88d4, 0x0000001f, 0x00000010,
508 0x15c0, 0x000c0fc0, 0x000c0400
511 static const u32 hainan_golden_registers[] =
513 0x9a10, 0x00010000, 0x00018208,
514 0x9830, 0xffffffff, 0x00000000,
515 0x9834, 0xf00fffff, 0x00000400,
516 0x9838, 0x0002021c, 0x00020200,
517 0xd0c0, 0xff000fff, 0x00000100,
518 0xd030, 0x000300c0, 0x00800040,
519 0xd8c0, 0xff000fff, 0x00000100,
520 0xd830, 0x000300c0, 0x00800040,
521 0x2ae4, 0x00073ffe, 0x000022a2,
522 0x240c, 0x000007ff, 0x00000000,
523 0x8a14, 0xf000001f, 0x00000007,
524 0x8b24, 0xffffffff, 0x00ffffff,
525 0x8b10, 0x0000ff0f, 0x00000000,
526 0x28a4c, 0x07ffffff, 0x4e000000,
527 0x28350, 0x3f3f3fff, 0x00000000,
528 0x30, 0x000000ff, 0x0040,
529 0x34, 0x00000040, 0x00004040,
530 0x9100, 0x03e00000, 0x03600000,
531 0x9060, 0x0000007f, 0x00000020,
532 0x9508, 0x00010000, 0x00010000,
533 0xac14, 0x000003ff, 0x000000f1,
534 0xac10, 0xffffffff, 0x00000000,
535 0xac0c, 0xffffffff, 0x00003210,
536 0x88d4, 0x0000001f, 0x00000010,
537 0x15c0, 0x000c0fc0, 0x000c0400
540 static const u32 hainan_golden_registers2[] =
542 0x98f8, 0xffffffff, 0x02010001
545 static const u32 tahiti_mgcg_cgcg_init[] =
547 0xc400, 0xffffffff, 0xfffffffc,
548 0x802c, 0xffffffff, 0xe0000000,
549 0x9a60, 0xffffffff, 0x00000100,
550 0x92a4, 0xffffffff, 0x00000100,
551 0xc164, 0xffffffff, 0x00000100,
552 0x9774, 0xffffffff, 0x00000100,
553 0x8984, 0xffffffff, 0x06000100,
554 0x8a18, 0xffffffff, 0x00000100,
555 0x92a0, 0xffffffff, 0x00000100,
556 0xc380, 0xffffffff, 0x00000100,
557 0x8b28, 0xffffffff, 0x00000100,
558 0x9144, 0xffffffff, 0x00000100,
559 0x8d88, 0xffffffff, 0x00000100,
560 0x8d8c, 0xffffffff, 0x00000100,
561 0x9030, 0xffffffff, 0x00000100,
562 0x9034, 0xffffffff, 0x00000100,
563 0x9038, 0xffffffff, 0x00000100,
564 0x903c, 0xffffffff, 0x00000100,
565 0xad80, 0xffffffff, 0x00000100,
566 0xac54, 0xffffffff, 0x00000100,
567 0x897c, 0xffffffff, 0x06000100,
568 0x9868, 0xffffffff, 0x00000100,
569 0x9510, 0xffffffff, 0x00000100,
570 0xaf04, 0xffffffff, 0x00000100,
571 0xae04, 0xffffffff, 0x00000100,
572 0x949c, 0xffffffff, 0x00000100,
573 0x802c, 0xffffffff, 0xe0000000,
574 0x9160, 0xffffffff, 0x00010000,
575 0x9164, 0xffffffff, 0x00030002,
576 0x9168, 0xffffffff, 0x00040007,
577 0x916c, 0xffffffff, 0x00060005,
578 0x9170, 0xffffffff, 0x00090008,
579 0x9174, 0xffffffff, 0x00020001,
580 0x9178, 0xffffffff, 0x00040003,
581 0x917c, 0xffffffff, 0x00000007,
582 0x9180, 0xffffffff, 0x00060005,
583 0x9184, 0xffffffff, 0x00090008,
584 0x9188, 0xffffffff, 0x00030002,
585 0x918c, 0xffffffff, 0x00050004,
586 0x9190, 0xffffffff, 0x00000008,
587 0x9194, 0xffffffff, 0x00070006,
588 0x9198, 0xffffffff, 0x000a0009,
589 0x919c, 0xffffffff, 0x00040003,
590 0x91a0, 0xffffffff, 0x00060005,
591 0x91a4, 0xffffffff, 0x00000009,
592 0x91a8, 0xffffffff, 0x00080007,
593 0x91ac, 0xffffffff, 0x000b000a,
594 0x91b0, 0xffffffff, 0x00050004,
595 0x91b4, 0xffffffff, 0x00070006,
596 0x91b8, 0xffffffff, 0x0008000b,
597 0x91bc, 0xffffffff, 0x000a0009,
598 0x91c0, 0xffffffff, 0x000d000c,
599 0x91c4, 0xffffffff, 0x00060005,
600 0x91c8, 0xffffffff, 0x00080007,
601 0x91cc, 0xffffffff, 0x0000000b,
602 0x91d0, 0xffffffff, 0x000a0009,
603 0x91d4, 0xffffffff, 0x000d000c,
604 0x91d8, 0xffffffff, 0x00070006,
605 0x91dc, 0xffffffff, 0x00090008,
606 0x91e0, 0xffffffff, 0x0000000c,
607 0x91e4, 0xffffffff, 0x000b000a,
608 0x91e8, 0xffffffff, 0x000e000d,
609 0x91ec, 0xffffffff, 0x00080007,
610 0x91f0, 0xffffffff, 0x000a0009,
611 0x91f4, 0xffffffff, 0x0000000d,
612 0x91f8, 0xffffffff, 0x000c000b,
613 0x91fc, 0xffffffff, 0x000f000e,
614 0x9200, 0xffffffff, 0x00090008,
615 0x9204, 0xffffffff, 0x000b000a,
616 0x9208, 0xffffffff, 0x000c000f,
617 0x920c, 0xffffffff, 0x000e000d,
618 0x9210, 0xffffffff, 0x00110010,
619 0x9214, 0xffffffff, 0x000a0009,
620 0x9218, 0xffffffff, 0x000c000b,
621 0x921c, 0xffffffff, 0x0000000f,
622 0x9220, 0xffffffff, 0x000e000d,
623 0x9224, 0xffffffff, 0x00110010,
624 0x9228, 0xffffffff, 0x000b000a,
625 0x922c, 0xffffffff, 0x000d000c,
626 0x9230, 0xffffffff, 0x00000010,
627 0x9234, 0xffffffff, 0x000f000e,
628 0x9238, 0xffffffff, 0x00120011,
629 0x923c, 0xffffffff, 0x000c000b,
630 0x9240, 0xffffffff, 0x000e000d,
631 0x9244, 0xffffffff, 0x00000011,
632 0x9248, 0xffffffff, 0x0010000f,
633 0x924c, 0xffffffff, 0x00130012,
634 0x9250, 0xffffffff, 0x000d000c,
635 0x9254, 0xffffffff, 0x000f000e,
636 0x9258, 0xffffffff, 0x00100013,
637 0x925c, 0xffffffff, 0x00120011,
638 0x9260, 0xffffffff, 0x00150014,
639 0x9264, 0xffffffff, 0x000e000d,
640 0x9268, 0xffffffff, 0x0010000f,
641 0x926c, 0xffffffff, 0x00000013,
642 0x9270, 0xffffffff, 0x00120011,
643 0x9274, 0xffffffff, 0x00150014,
644 0x9278, 0xffffffff, 0x000f000e,
645 0x927c, 0xffffffff, 0x00110010,
646 0x9280, 0xffffffff, 0x00000014,
647 0x9284, 0xffffffff, 0x00130012,
648 0x9288, 0xffffffff, 0x00160015,
649 0x928c, 0xffffffff, 0x0010000f,
650 0x9290, 0xffffffff, 0x00120011,
651 0x9294, 0xffffffff, 0x00000015,
652 0x9298, 0xffffffff, 0x00140013,
653 0x929c, 0xffffffff, 0x00170016,
654 0x9150, 0xffffffff, 0x96940200,
655 0x8708, 0xffffffff, 0x00900100,
656 0xc478, 0xffffffff, 0x00000080,
657 0xc404, 0xffffffff, 0x0020003f,
658 0x30, 0xffffffff, 0x0000001c,
659 0x34, 0x000f0000, 0x000f0000,
660 0x160c, 0xffffffff, 0x00000100,
661 0x1024, 0xffffffff, 0x00000100,
662 0x102c, 0x00000101, 0x00000000,
663 0x20a8, 0xffffffff, 0x00000104,
664 0x264c, 0x000c0000, 0x000c0000,
665 0x2648, 0x000c0000, 0x000c0000,
666 0x55e4, 0xff000fff, 0x00000100,
667 0x55e8, 0x00000001, 0x00000001,
668 0x2f50, 0x00000001, 0x00000001,
669 0x30cc, 0xc0000fff, 0x00000104,
670 0xc1e4, 0x00000001, 0x00000001,
671 0xd0c0, 0xfffffff0, 0x00000100,
672 0xd8c0, 0xfffffff0, 0x00000100
675 static const u32 pitcairn_mgcg_cgcg_init[] =
677 0xc400, 0xffffffff, 0xfffffffc,
678 0x802c, 0xffffffff, 0xe0000000,
679 0x9a60, 0xffffffff, 0x00000100,
680 0x92a4, 0xffffffff, 0x00000100,
681 0xc164, 0xffffffff, 0x00000100,
682 0x9774, 0xffffffff, 0x00000100,
683 0x8984, 0xffffffff, 0x06000100,
684 0x8a18, 0xffffffff, 0x00000100,
685 0x92a0, 0xffffffff, 0x00000100,
686 0xc380, 0xffffffff, 0x00000100,
687 0x8b28, 0xffffffff, 0x00000100,
688 0x9144, 0xffffffff, 0x00000100,
689 0x8d88, 0xffffffff, 0x00000100,
690 0x8d8c, 0xffffffff, 0x00000100,
691 0x9030, 0xffffffff, 0x00000100,
692 0x9034, 0xffffffff, 0x00000100,
693 0x9038, 0xffffffff, 0x00000100,
694 0x903c, 0xffffffff, 0x00000100,
695 0xad80, 0xffffffff, 0x00000100,
696 0xac54, 0xffffffff, 0x00000100,
697 0x897c, 0xffffffff, 0x06000100,
698 0x9868, 0xffffffff, 0x00000100,
699 0x9510, 0xffffffff, 0x00000100,
700 0xaf04, 0xffffffff, 0x00000100,
701 0xae04, 0xffffffff, 0x00000100,
702 0x949c, 0xffffffff, 0x00000100,
703 0x802c, 0xffffffff, 0xe0000000,
704 0x9160, 0xffffffff, 0x00010000,
705 0x9164, 0xffffffff, 0x00030002,
706 0x9168, 0xffffffff, 0x00040007,
707 0x916c, 0xffffffff, 0x00060005,
708 0x9170, 0xffffffff, 0x00090008,
709 0x9174, 0xffffffff, 0x00020001,
710 0x9178, 0xffffffff, 0x00040003,
711 0x917c, 0xffffffff, 0x00000007,
712 0x9180, 0xffffffff, 0x00060005,
713 0x9184, 0xffffffff, 0x00090008,
714 0x9188, 0xffffffff, 0x00030002,
715 0x918c, 0xffffffff, 0x00050004,
716 0x9190, 0xffffffff, 0x00000008,
717 0x9194, 0xffffffff, 0x00070006,
718 0x9198, 0xffffffff, 0x000a0009,
719 0x919c, 0xffffffff, 0x00040003,
720 0x91a0, 0xffffffff, 0x00060005,
721 0x91a4, 0xffffffff, 0x00000009,
722 0x91a8, 0xffffffff, 0x00080007,
723 0x91ac, 0xffffffff, 0x000b000a,
724 0x91b0, 0xffffffff, 0x00050004,
725 0x91b4, 0xffffffff, 0x00070006,
726 0x91b8, 0xffffffff, 0x0008000b,
727 0x91bc, 0xffffffff, 0x000a0009,
728 0x91c0, 0xffffffff, 0x000d000c,
729 0x9200, 0xffffffff, 0x00090008,
730 0x9204, 0xffffffff, 0x000b000a,
731 0x9208, 0xffffffff, 0x000c000f,
732 0x920c, 0xffffffff, 0x000e000d,
733 0x9210, 0xffffffff, 0x00110010,
734 0x9214, 0xffffffff, 0x000a0009,
735 0x9218, 0xffffffff, 0x000c000b,
736 0x921c, 0xffffffff, 0x0000000f,
737 0x9220, 0xffffffff, 0x000e000d,
738 0x9224, 0xffffffff, 0x00110010,
739 0x9228, 0xffffffff, 0x000b000a,
740 0x922c, 0xffffffff, 0x000d000c,
741 0x9230, 0xffffffff, 0x00000010,
742 0x9234, 0xffffffff, 0x000f000e,
743 0x9238, 0xffffffff, 0x00120011,
744 0x923c, 0xffffffff, 0x000c000b,
745 0x9240, 0xffffffff, 0x000e000d,
746 0x9244, 0xffffffff, 0x00000011,
747 0x9248, 0xffffffff, 0x0010000f,
748 0x924c, 0xffffffff, 0x00130012,
749 0x9250, 0xffffffff, 0x000d000c,
750 0x9254, 0xffffffff, 0x000f000e,
751 0x9258, 0xffffffff, 0x00100013,
752 0x925c, 0xffffffff, 0x00120011,
753 0x9260, 0xffffffff, 0x00150014,
754 0x9150, 0xffffffff, 0x96940200,
755 0x8708, 0xffffffff, 0x00900100,
756 0xc478, 0xffffffff, 0x00000080,
757 0xc404, 0xffffffff, 0x0020003f,
758 0x30, 0xffffffff, 0x0000001c,
759 0x34, 0x000f0000, 0x000f0000,
760 0x160c, 0xffffffff, 0x00000100,
761 0x1024, 0xffffffff, 0x00000100,
762 0x102c, 0x00000101, 0x00000000,
763 0x20a8, 0xffffffff, 0x00000104,
764 0x55e4, 0xff000fff, 0x00000100,
765 0x55e8, 0x00000001, 0x00000001,
766 0x2f50, 0x00000001, 0x00000001,
767 0x30cc, 0xc0000fff, 0x00000104,
768 0xc1e4, 0x00000001, 0x00000001,
769 0xd0c0, 0xfffffff0, 0x00000100,
770 0xd8c0, 0xfffffff0, 0x00000100
773 static const u32 verde_mgcg_cgcg_init[] =
775 0xc400, 0xffffffff, 0xfffffffc,
776 0x802c, 0xffffffff, 0xe0000000,
777 0x9a60, 0xffffffff, 0x00000100,
778 0x92a4, 0xffffffff, 0x00000100,
779 0xc164, 0xffffffff, 0x00000100,
780 0x9774, 0xffffffff, 0x00000100,
781 0x8984, 0xffffffff, 0x06000100,
782 0x8a18, 0xffffffff, 0x00000100,
783 0x92a0, 0xffffffff, 0x00000100,
784 0xc380, 0xffffffff, 0x00000100,
785 0x8b28, 0xffffffff, 0x00000100,
786 0x9144, 0xffffffff, 0x00000100,
787 0x8d88, 0xffffffff, 0x00000100,
788 0x8d8c, 0xffffffff, 0x00000100,
789 0x9030, 0xffffffff, 0x00000100,
790 0x9034, 0xffffffff, 0x00000100,
791 0x9038, 0xffffffff, 0x00000100,
792 0x903c, 0xffffffff, 0x00000100,
793 0xad80, 0xffffffff, 0x00000100,
794 0xac54, 0xffffffff, 0x00000100,
795 0x897c, 0xffffffff, 0x06000100,
796 0x9868, 0xffffffff, 0x00000100,
797 0x9510, 0xffffffff, 0x00000100,
798 0xaf04, 0xffffffff, 0x00000100,
799 0xae04, 0xffffffff, 0x00000100,
800 0x949c, 0xffffffff, 0x00000100,
801 0x802c, 0xffffffff, 0xe0000000,
802 0x9160, 0xffffffff, 0x00010000,
803 0x9164, 0xffffffff, 0x00030002,
804 0x9168, 0xffffffff, 0x00040007,
805 0x916c, 0xffffffff, 0x00060005,
806 0x9170, 0xffffffff, 0x00090008,
807 0x9174, 0xffffffff, 0x00020001,
808 0x9178, 0xffffffff, 0x00040003,
809 0x917c, 0xffffffff, 0x00000007,
810 0x9180, 0xffffffff, 0x00060005,
811 0x9184, 0xffffffff, 0x00090008,
812 0x9188, 0xffffffff, 0x00030002,
813 0x918c, 0xffffffff, 0x00050004,
814 0x9190, 0xffffffff, 0x00000008,
815 0x9194, 0xffffffff, 0x00070006,
816 0x9198, 0xffffffff, 0x000a0009,
817 0x919c, 0xffffffff, 0x00040003,
818 0x91a0, 0xffffffff, 0x00060005,
819 0x91a4, 0xffffffff, 0x00000009,
820 0x91a8, 0xffffffff, 0x00080007,
821 0x91ac, 0xffffffff, 0x000b000a,
822 0x91b0, 0xffffffff, 0x00050004,
823 0x91b4, 0xffffffff, 0x00070006,
824 0x91b8, 0xffffffff, 0x0008000b,
825 0x91bc, 0xffffffff, 0x000a0009,
826 0x91c0, 0xffffffff, 0x000d000c,
827 0x9200, 0xffffffff, 0x00090008,
828 0x9204, 0xffffffff, 0x000b000a,
829 0x9208, 0xffffffff, 0x000c000f,
830 0x920c, 0xffffffff, 0x000e000d,
831 0x9210, 0xffffffff, 0x00110010,
832 0x9214, 0xffffffff, 0x000a0009,
833 0x9218, 0xffffffff, 0x000c000b,
834 0x921c, 0xffffffff, 0x0000000f,
835 0x9220, 0xffffffff, 0x000e000d,
836 0x9224, 0xffffffff, 0x00110010,
837 0x9228, 0xffffffff, 0x000b000a,
838 0x922c, 0xffffffff, 0x000d000c,
839 0x9230, 0xffffffff, 0x00000010,
840 0x9234, 0xffffffff, 0x000f000e,
841 0x9238, 0xffffffff, 0x00120011,
842 0x923c, 0xffffffff, 0x000c000b,
843 0x9240, 0xffffffff, 0x000e000d,
844 0x9244, 0xffffffff, 0x00000011,
845 0x9248, 0xffffffff, 0x0010000f,
846 0x924c, 0xffffffff, 0x00130012,
847 0x9250, 0xffffffff, 0x000d000c,
848 0x9254, 0xffffffff, 0x000f000e,
849 0x9258, 0xffffffff, 0x00100013,
850 0x925c, 0xffffffff, 0x00120011,
851 0x9260, 0xffffffff, 0x00150014,
852 0x9150, 0xffffffff, 0x96940200,
853 0x8708, 0xffffffff, 0x00900100,
854 0xc478, 0xffffffff, 0x00000080,
855 0xc404, 0xffffffff, 0x0020003f,
856 0x30, 0xffffffff, 0x0000001c,
857 0x34, 0x000f0000, 0x000f0000,
858 0x160c, 0xffffffff, 0x00000100,
859 0x1024, 0xffffffff, 0x00000100,
860 0x102c, 0x00000101, 0x00000000,
861 0x20a8, 0xffffffff, 0x00000104,
862 0x264c, 0x000c0000, 0x000c0000,
863 0x2648, 0x000c0000, 0x000c0000,
864 0x55e4, 0xff000fff, 0x00000100,
865 0x55e8, 0x00000001, 0x00000001,
866 0x2f50, 0x00000001, 0x00000001,
867 0x30cc, 0xc0000fff, 0x00000104,
868 0xc1e4, 0x00000001, 0x00000001,
869 0xd0c0, 0xfffffff0, 0x00000100,
870 0xd8c0, 0xfffffff0, 0x00000100
873 static const u32 oland_mgcg_cgcg_init[] =
875 0xc400, 0xffffffff, 0xfffffffc,
876 0x802c, 0xffffffff, 0xe0000000,
877 0x9a60, 0xffffffff, 0x00000100,
878 0x92a4, 0xffffffff, 0x00000100,
879 0xc164, 0xffffffff, 0x00000100,
880 0x9774, 0xffffffff, 0x00000100,
881 0x8984, 0xffffffff, 0x06000100,
882 0x8a18, 0xffffffff, 0x00000100,
883 0x92a0, 0xffffffff, 0x00000100,
884 0xc380, 0xffffffff, 0x00000100,
885 0x8b28, 0xffffffff, 0x00000100,
886 0x9144, 0xffffffff, 0x00000100,
887 0x8d88, 0xffffffff, 0x00000100,
888 0x8d8c, 0xffffffff, 0x00000100,
889 0x9030, 0xffffffff, 0x00000100,
890 0x9034, 0xffffffff, 0x00000100,
891 0x9038, 0xffffffff, 0x00000100,
892 0x903c, 0xffffffff, 0x00000100,
893 0xad80, 0xffffffff, 0x00000100,
894 0xac54, 0xffffffff, 0x00000100,
895 0x897c, 0xffffffff, 0x06000100,
896 0x9868, 0xffffffff, 0x00000100,
897 0x9510, 0xffffffff, 0x00000100,
898 0xaf04, 0xffffffff, 0x00000100,
899 0xae04, 0xffffffff, 0x00000100,
900 0x949c, 0xffffffff, 0x00000100,
901 0x802c, 0xffffffff, 0xe0000000,
902 0x9160, 0xffffffff, 0x00010000,
903 0x9164, 0xffffffff, 0x00030002,
904 0x9168, 0xffffffff, 0x00040007,
905 0x916c, 0xffffffff, 0x00060005,
906 0x9170, 0xffffffff, 0x00090008,
907 0x9174, 0xffffffff, 0x00020001,
908 0x9178, 0xffffffff, 0x00040003,
909 0x917c, 0xffffffff, 0x00000007,
910 0x9180, 0xffffffff, 0x00060005,
911 0x9184, 0xffffffff, 0x00090008,
912 0x9188, 0xffffffff, 0x00030002,
913 0x918c, 0xffffffff, 0x00050004,
914 0x9190, 0xffffffff, 0x00000008,
915 0x9194, 0xffffffff, 0x00070006,
916 0x9198, 0xffffffff, 0x000a0009,
917 0x919c, 0xffffffff, 0x00040003,
918 0x91a0, 0xffffffff, 0x00060005,
919 0x91a4, 0xffffffff, 0x00000009,
920 0x91a8, 0xffffffff, 0x00080007,
921 0x91ac, 0xffffffff, 0x000b000a,
922 0x91b0, 0xffffffff, 0x00050004,
923 0x91b4, 0xffffffff, 0x00070006,
924 0x91b8, 0xffffffff, 0x0008000b,
925 0x91bc, 0xffffffff, 0x000a0009,
926 0x91c0, 0xffffffff, 0x000d000c,
927 0x91c4, 0xffffffff, 0x00060005,
928 0x91c8, 0xffffffff, 0x00080007,
929 0x91cc, 0xffffffff, 0x0000000b,
930 0x91d0, 0xffffffff, 0x000a0009,
931 0x91d4, 0xffffffff, 0x000d000c,
932 0x9150, 0xffffffff, 0x96940200,
933 0x8708, 0xffffffff, 0x00900100,
934 0xc478, 0xffffffff, 0x00000080,
935 0xc404, 0xffffffff, 0x0020003f,
936 0x30, 0xffffffff, 0x0000001c,
937 0x34, 0x000f0000, 0x000f0000,
938 0x160c, 0xffffffff, 0x00000100,
939 0x1024, 0xffffffff, 0x00000100,
940 0x102c, 0x00000101, 0x00000000,
941 0x20a8, 0xffffffff, 0x00000104,
942 0x264c, 0x000c0000, 0x000c0000,
943 0x2648, 0x000c0000, 0x000c0000,
944 0x55e4, 0xff000fff, 0x00000100,
945 0x55e8, 0x00000001, 0x00000001,
946 0x2f50, 0x00000001, 0x00000001,
947 0x30cc, 0xc0000fff, 0x00000104,
948 0xc1e4, 0x00000001, 0x00000001,
949 0xd0c0, 0xfffffff0, 0x00000100,
950 0xd8c0, 0xfffffff0, 0x00000100
953 static const u32 hainan_mgcg_cgcg_init[] =
955 0xc400, 0xffffffff, 0xfffffffc,
956 0x802c, 0xffffffff, 0xe0000000,
957 0x9a60, 0xffffffff, 0x00000100,
958 0x92a4, 0xffffffff, 0x00000100,
959 0xc164, 0xffffffff, 0x00000100,
960 0x9774, 0xffffffff, 0x00000100,
961 0x8984, 0xffffffff, 0x06000100,
962 0x8a18, 0xffffffff, 0x00000100,
963 0x92a0, 0xffffffff, 0x00000100,
964 0xc380, 0xffffffff, 0x00000100,
965 0x8b28, 0xffffffff, 0x00000100,
966 0x9144, 0xffffffff, 0x00000100,
967 0x8d88, 0xffffffff, 0x00000100,
968 0x8d8c, 0xffffffff, 0x00000100,
969 0x9030, 0xffffffff, 0x00000100,
970 0x9034, 0xffffffff, 0x00000100,
971 0x9038, 0xffffffff, 0x00000100,
972 0x903c, 0xffffffff, 0x00000100,
973 0xad80, 0xffffffff, 0x00000100,
974 0xac54, 0xffffffff, 0x00000100,
975 0x897c, 0xffffffff, 0x06000100,
976 0x9868, 0xffffffff, 0x00000100,
977 0x9510, 0xffffffff, 0x00000100,
978 0xaf04, 0xffffffff, 0x00000100,
979 0xae04, 0xffffffff, 0x00000100,
980 0x949c, 0xffffffff, 0x00000100,
981 0x802c, 0xffffffff, 0xe0000000,
982 0x9160, 0xffffffff, 0x00010000,
983 0x9164, 0xffffffff, 0x00030002,
984 0x9168, 0xffffffff, 0x00040007,
985 0x916c, 0xffffffff, 0x00060005,
986 0x9170, 0xffffffff, 0x00090008,
987 0x9174, 0xffffffff, 0x00020001,
988 0x9178, 0xffffffff, 0x00040003,
989 0x917c, 0xffffffff, 0x00000007,
990 0x9180, 0xffffffff, 0x00060005,
991 0x9184, 0xffffffff, 0x00090008,
992 0x9188, 0xffffffff, 0x00030002,
993 0x918c, 0xffffffff, 0x00050004,
994 0x9190, 0xffffffff, 0x00000008,
995 0x9194, 0xffffffff, 0x00070006,
996 0x9198, 0xffffffff, 0x000a0009,
997 0x919c, 0xffffffff, 0x00040003,
998 0x91a0, 0xffffffff, 0x00060005,
999 0x91a4, 0xffffffff, 0x00000009,
1000 0x91a8, 0xffffffff, 0x00080007,
1001 0x91ac, 0xffffffff, 0x000b000a,
1002 0x91b0, 0xffffffff, 0x00050004,
1003 0x91b4, 0xffffffff, 0x00070006,
1004 0x91b8, 0xffffffff, 0x0008000b,
1005 0x91bc, 0xffffffff, 0x000a0009,
1006 0x91c0, 0xffffffff, 0x000d000c,
1007 0x91c4, 0xffffffff, 0x00060005,
1008 0x91c8, 0xffffffff, 0x00080007,
1009 0x91cc, 0xffffffff, 0x0000000b,
1010 0x91d0, 0xffffffff, 0x000a0009,
1011 0x91d4, 0xffffffff, 0x000d000c,
1012 0x9150, 0xffffffff, 0x96940200,
1013 0x8708, 0xffffffff, 0x00900100,
1014 0xc478, 0xffffffff, 0x00000080,
1015 0xc404, 0xffffffff, 0x0020003f,
1016 0x30, 0xffffffff, 0x0000001c,
1017 0x34, 0x000f0000, 0x000f0000,
1018 0x160c, 0xffffffff, 0x00000100,
1019 0x1024, 0xffffffff, 0x00000100,
1020 0x20a8, 0xffffffff, 0x00000104,
1021 0x264c, 0x000c0000, 0x000c0000,
1022 0x2648, 0x000c0000, 0x000c0000,
1023 0x2f50, 0x00000001, 0x00000001,
1024 0x30cc, 0xc0000fff, 0x00000104,
1025 0xc1e4, 0x00000001, 0x00000001,
1026 0xd0c0, 0xfffffff0, 0x00000100,
1027 0xd8c0, 0xfffffff0, 0x00000100
1030 static u32 verde_pg_init[] =
1032 0x353c, 0xffffffff, 0x40000,
1033 0x3538, 0xffffffff, 0x200010ff,
1034 0x353c, 0xffffffff, 0x0,
1035 0x353c, 0xffffffff, 0x0,
1036 0x353c, 0xffffffff, 0x0,
1037 0x353c, 0xffffffff, 0x0,
1038 0x353c, 0xffffffff, 0x0,
1039 0x353c, 0xffffffff, 0x7007,
1040 0x3538, 0xffffffff, 0x300010ff,
1041 0x353c, 0xffffffff, 0x0,
1042 0x353c, 0xffffffff, 0x0,
1043 0x353c, 0xffffffff, 0x0,
1044 0x353c, 0xffffffff, 0x0,
1045 0x353c, 0xffffffff, 0x0,
1046 0x353c, 0xffffffff, 0x400000,
1047 0x3538, 0xffffffff, 0x100010ff,
1048 0x353c, 0xffffffff, 0x0,
1049 0x353c, 0xffffffff, 0x0,
1050 0x353c, 0xffffffff, 0x0,
1051 0x353c, 0xffffffff, 0x0,
1052 0x353c, 0xffffffff, 0x0,
1053 0x353c, 0xffffffff, 0x120200,
1054 0x3538, 0xffffffff, 0x500010ff,
1055 0x353c, 0xffffffff, 0x0,
1056 0x353c, 0xffffffff, 0x0,
1057 0x353c, 0xffffffff, 0x0,
1058 0x353c, 0xffffffff, 0x0,
1059 0x353c, 0xffffffff, 0x0,
1060 0x353c, 0xffffffff, 0x1e1e16,
1061 0x3538, 0xffffffff, 0x600010ff,
1062 0x353c, 0xffffffff, 0x0,
1063 0x353c, 0xffffffff, 0x0,
1064 0x353c, 0xffffffff, 0x0,
1065 0x353c, 0xffffffff, 0x0,
1066 0x353c, 0xffffffff, 0x0,
1067 0x353c, 0xffffffff, 0x171f1e,
1068 0x3538, 0xffffffff, 0x700010ff,
1069 0x353c, 0xffffffff, 0x0,
1070 0x353c, 0xffffffff, 0x0,
1071 0x353c, 0xffffffff, 0x0,
1072 0x353c, 0xffffffff, 0x0,
1073 0x353c, 0xffffffff, 0x0,
1074 0x353c, 0xffffffff, 0x0,
1075 0x3538, 0xffffffff, 0x9ff,
1076 0x3500, 0xffffffff, 0x0,
1077 0x3504, 0xffffffff, 0x10000800,
1078 0x3504, 0xffffffff, 0xf,
1079 0x3504, 0xffffffff, 0xf,
1080 0x3500, 0xffffffff, 0x4,
1081 0x3504, 0xffffffff, 0x1000051e,
1082 0x3504, 0xffffffff, 0xffff,
1083 0x3504, 0xffffffff, 0xffff,
1084 0x3500, 0xffffffff, 0x8,
1085 0x3504, 0xffffffff, 0x80500,
1086 0x3500, 0xffffffff, 0x12,
1087 0x3504, 0xffffffff, 0x9050c,
1088 0x3500, 0xffffffff, 0x1d,
1089 0x3504, 0xffffffff, 0xb052c,
1090 0x3500, 0xffffffff, 0x2a,
1091 0x3504, 0xffffffff, 0x1053e,
1092 0x3500, 0xffffffff, 0x2d,
1093 0x3504, 0xffffffff, 0x10546,
1094 0x3500, 0xffffffff, 0x30,
1095 0x3504, 0xffffffff, 0xa054e,
1096 0x3500, 0xffffffff, 0x3c,
1097 0x3504, 0xffffffff, 0x1055f,
1098 0x3500, 0xffffffff, 0x3f,
1099 0x3504, 0xffffffff, 0x10567,
1100 0x3500, 0xffffffff, 0x42,
1101 0x3504, 0xffffffff, 0x1056f,
1102 0x3500, 0xffffffff, 0x45,
1103 0x3504, 0xffffffff, 0x10572,
1104 0x3500, 0xffffffff, 0x48,
1105 0x3504, 0xffffffff, 0x20575,
1106 0x3500, 0xffffffff, 0x4c,
1107 0x3504, 0xffffffff, 0x190801,
1108 0x3500, 0xffffffff, 0x67,
1109 0x3504, 0xffffffff, 0x1082a,
1110 0x3500, 0xffffffff, 0x6a,
1111 0x3504, 0xffffffff, 0x1b082d,
1112 0x3500, 0xffffffff, 0x87,
1113 0x3504, 0xffffffff, 0x310851,
1114 0x3500, 0xffffffff, 0xba,
1115 0x3504, 0xffffffff, 0x891,
1116 0x3500, 0xffffffff, 0xbc,
1117 0x3504, 0xffffffff, 0x893,
1118 0x3500, 0xffffffff, 0xbe,
1119 0x3504, 0xffffffff, 0x20895,
1120 0x3500, 0xffffffff, 0xc2,
1121 0x3504, 0xffffffff, 0x20899,
1122 0x3500, 0xffffffff, 0xc6,
1123 0x3504, 0xffffffff, 0x2089d,
1124 0x3500, 0xffffffff, 0xca,
1125 0x3504, 0xffffffff, 0x8a1,
1126 0x3500, 0xffffffff, 0xcc,
1127 0x3504, 0xffffffff, 0x8a3,
1128 0x3500, 0xffffffff, 0xce,
1129 0x3504, 0xffffffff, 0x308a5,
1130 0x3500, 0xffffffff, 0xd3,
1131 0x3504, 0xffffffff, 0x6d08cd,
1132 0x3500, 0xffffffff, 0x142,
1133 0x3504, 0xffffffff, 0x2000095a,
1134 0x3504, 0xffffffff, 0x1,
1135 0x3500, 0xffffffff, 0x144,
1136 0x3504, 0xffffffff, 0x301f095b,
1137 0x3500, 0xffffffff, 0x165,
1138 0x3504, 0xffffffff, 0xc094d,
1139 0x3500, 0xffffffff, 0x173,
1140 0x3504, 0xffffffff, 0xf096d,
1141 0x3500, 0xffffffff, 0x184,
1142 0x3504, 0xffffffff, 0x15097f,
1143 0x3500, 0xffffffff, 0x19b,
1144 0x3504, 0xffffffff, 0xc0998,
1145 0x3500, 0xffffffff, 0x1a9,
1146 0x3504, 0xffffffff, 0x409a7,
1147 0x3500, 0xffffffff, 0x1af,
1148 0x3504, 0xffffffff, 0xcdc,
1149 0x3500, 0xffffffff, 0x1b1,
1150 0x3504, 0xffffffff, 0x800,
1151 0x3508, 0xffffffff, 0x6c9b2000,
1152 0x3510, 0xfc00, 0x2000,
1153 0x3544, 0xffffffff, 0xfc0,
1154 0x28d4, 0x00000100, 0x100
1157 static void si_init_golden_registers(struct radeon_device *rdev)
1159 switch (rdev->family) {
1161 radeon_program_register_sequence(rdev,
1162 tahiti_golden_registers,
1163 (const u32)ARRAY_SIZE(tahiti_golden_registers));
1164 radeon_program_register_sequence(rdev,
1165 tahiti_golden_rlc_registers,
1166 (const u32)ARRAY_SIZE(tahiti_golden_rlc_registers));
1167 radeon_program_register_sequence(rdev,
1168 tahiti_mgcg_cgcg_init,
1169 (const u32)ARRAY_SIZE(tahiti_mgcg_cgcg_init));
1170 radeon_program_register_sequence(rdev,
1171 tahiti_golden_registers2,
1172 (const u32)ARRAY_SIZE(tahiti_golden_registers2));
1175 radeon_program_register_sequence(rdev,
1176 pitcairn_golden_registers,
1177 (const u32)ARRAY_SIZE(pitcairn_golden_registers));
1178 radeon_program_register_sequence(rdev,
1179 pitcairn_golden_rlc_registers,
1180 (const u32)ARRAY_SIZE(pitcairn_golden_rlc_registers));
1181 radeon_program_register_sequence(rdev,
1182 pitcairn_mgcg_cgcg_init,
1183 (const u32)ARRAY_SIZE(pitcairn_mgcg_cgcg_init));
1186 radeon_program_register_sequence(rdev,
1187 verde_golden_registers,
1188 (const u32)ARRAY_SIZE(verde_golden_registers));
1189 radeon_program_register_sequence(rdev,
1190 verde_golden_rlc_registers,
1191 (const u32)ARRAY_SIZE(verde_golden_rlc_registers));
1192 radeon_program_register_sequence(rdev,
1193 verde_mgcg_cgcg_init,
1194 (const u32)ARRAY_SIZE(verde_mgcg_cgcg_init));
1195 radeon_program_register_sequence(rdev,
1197 (const u32)ARRAY_SIZE(verde_pg_init));
1200 radeon_program_register_sequence(rdev,
1201 oland_golden_registers,
1202 (const u32)ARRAY_SIZE(oland_golden_registers));
1203 radeon_program_register_sequence(rdev,
1204 oland_golden_rlc_registers,
1205 (const u32)ARRAY_SIZE(oland_golden_rlc_registers));
1206 radeon_program_register_sequence(rdev,
1207 oland_mgcg_cgcg_init,
1208 (const u32)ARRAY_SIZE(oland_mgcg_cgcg_init));
1211 radeon_program_register_sequence(rdev,
1212 hainan_golden_registers,
1213 (const u32)ARRAY_SIZE(hainan_golden_registers));
1214 radeon_program_register_sequence(rdev,
1215 hainan_golden_registers2,
1216 (const u32)ARRAY_SIZE(hainan_golden_registers2));
1217 radeon_program_register_sequence(rdev,
1218 hainan_mgcg_cgcg_init,
1219 (const u32)ARRAY_SIZE(hainan_mgcg_cgcg_init));
1226 #define PCIE_BUS_CLK 10000
1227 #define TCLK (PCIE_BUS_CLK / 10)
1230 * si_get_xclk - get the xclk
1232 * @rdev: radeon_device pointer
1234 * Returns the reference clock used by the gfx engine
1237 u32 si_get_xclk(struct radeon_device *rdev)
1239 u32 reference_clock = rdev->clock.spll.reference_freq;
1242 tmp = RREG32(CG_CLKPIN_CNTL_2);
1243 if (tmp & MUX_TCLK_TO_XCLK)
1246 tmp = RREG32(CG_CLKPIN_CNTL);
1247 if (tmp & XTALIN_DIVIDE)
1248 return reference_clock / 4;
1250 return reference_clock;
1253 /* get temperature in millidegrees */
1254 int si_get_temp(struct radeon_device *rdev)
1257 int actual_temp = 0;
1259 temp = (RREG32(CG_MULT_THERMAL_STATUS) & CTF_TEMP_MASK) >>
1265 actual_temp = temp & 0x1ff;
1267 actual_temp = (actual_temp * 1000);
1272 #define TAHITI_IO_MC_REGS_SIZE 36
1274 static const u32 tahiti_io_mc_regs[TAHITI_IO_MC_REGS_SIZE][2] = {
1275 {0x0000006f, 0x03044000},
1276 {0x00000070, 0x0480c018},
1277 {0x00000071, 0x00000040},
1278 {0x00000072, 0x01000000},
1279 {0x00000074, 0x000000ff},
1280 {0x00000075, 0x00143400},
1281 {0x00000076, 0x08ec0800},
1282 {0x00000077, 0x040000cc},
1283 {0x00000079, 0x00000000},
1284 {0x0000007a, 0x21000409},
1285 {0x0000007c, 0x00000000},
1286 {0x0000007d, 0xe8000000},
1287 {0x0000007e, 0x044408a8},
1288 {0x0000007f, 0x00000003},
1289 {0x00000080, 0x00000000},
1290 {0x00000081, 0x01000000},
1291 {0x00000082, 0x02000000},
1292 {0x00000083, 0x00000000},
1293 {0x00000084, 0xe3f3e4f4},
1294 {0x00000085, 0x00052024},
1295 {0x00000087, 0x00000000},
1296 {0x00000088, 0x66036603},
1297 {0x00000089, 0x01000000},
1298 {0x0000008b, 0x1c0a0000},
1299 {0x0000008c, 0xff010000},
1300 {0x0000008e, 0xffffefff},
1301 {0x0000008f, 0xfff3efff},
1302 {0x00000090, 0xfff3efbf},
1303 {0x00000094, 0x00101101},
1304 {0x00000095, 0x00000fff},
1305 {0x00000096, 0x00116fff},
1306 {0x00000097, 0x60010000},
1307 {0x00000098, 0x10010000},
1308 {0x00000099, 0x00006000},
1309 {0x0000009a, 0x00001000},
1310 {0x0000009f, 0x00a77400}
1313 static const u32 pitcairn_io_mc_regs[TAHITI_IO_MC_REGS_SIZE][2] = {
1314 {0x0000006f, 0x03044000},
1315 {0x00000070, 0x0480c018},
1316 {0x00000071, 0x00000040},
1317 {0x00000072, 0x01000000},
1318 {0x00000074, 0x000000ff},
1319 {0x00000075, 0x00143400},
1320 {0x00000076, 0x08ec0800},
1321 {0x00000077, 0x040000cc},
1322 {0x00000079, 0x00000000},
1323 {0x0000007a, 0x21000409},
1324 {0x0000007c, 0x00000000},
1325 {0x0000007d, 0xe8000000},
1326 {0x0000007e, 0x044408a8},
1327 {0x0000007f, 0x00000003},
1328 {0x00000080, 0x00000000},
1329 {0x00000081, 0x01000000},
1330 {0x00000082, 0x02000000},
1331 {0x00000083, 0x00000000},
1332 {0x00000084, 0xe3f3e4f4},
1333 {0x00000085, 0x00052024},
1334 {0x00000087, 0x00000000},
1335 {0x00000088, 0x66036603},
1336 {0x00000089, 0x01000000},
1337 {0x0000008b, 0x1c0a0000},
1338 {0x0000008c, 0xff010000},
1339 {0x0000008e, 0xffffefff},
1340 {0x0000008f, 0xfff3efff},
1341 {0x00000090, 0xfff3efbf},
1342 {0x00000094, 0x00101101},
1343 {0x00000095, 0x00000fff},
1344 {0x00000096, 0x00116fff},
1345 {0x00000097, 0x60010000},
1346 {0x00000098, 0x10010000},
1347 {0x00000099, 0x00006000},
1348 {0x0000009a, 0x00001000},
1349 {0x0000009f, 0x00a47400}
1352 static const u32 verde_io_mc_regs[TAHITI_IO_MC_REGS_SIZE][2] = {
1353 {0x0000006f, 0x03044000},
1354 {0x00000070, 0x0480c018},
1355 {0x00000071, 0x00000040},
1356 {0x00000072, 0x01000000},
1357 {0x00000074, 0x000000ff},
1358 {0x00000075, 0x00143400},
1359 {0x00000076, 0x08ec0800},
1360 {0x00000077, 0x040000cc},
1361 {0x00000079, 0x00000000},
1362 {0x0000007a, 0x21000409},
1363 {0x0000007c, 0x00000000},
1364 {0x0000007d, 0xe8000000},
1365 {0x0000007e, 0x044408a8},
1366 {0x0000007f, 0x00000003},
1367 {0x00000080, 0x00000000},
1368 {0x00000081, 0x01000000},
1369 {0x00000082, 0x02000000},
1370 {0x00000083, 0x00000000},
1371 {0x00000084, 0xe3f3e4f4},
1372 {0x00000085, 0x00052024},
1373 {0x00000087, 0x00000000},
1374 {0x00000088, 0x66036603},
1375 {0x00000089, 0x01000000},
1376 {0x0000008b, 0x1c0a0000},
1377 {0x0000008c, 0xff010000},
1378 {0x0000008e, 0xffffefff},
1379 {0x0000008f, 0xfff3efff},
1380 {0x00000090, 0xfff3efbf},
1381 {0x00000094, 0x00101101},
1382 {0x00000095, 0x00000fff},
1383 {0x00000096, 0x00116fff},
1384 {0x00000097, 0x60010000},
1385 {0x00000098, 0x10010000},
1386 {0x00000099, 0x00006000},
1387 {0x0000009a, 0x00001000},
1388 {0x0000009f, 0x00a37400}
1391 static const u32 oland_io_mc_regs[TAHITI_IO_MC_REGS_SIZE][2] = {
1392 {0x0000006f, 0x03044000},
1393 {0x00000070, 0x0480c018},
1394 {0x00000071, 0x00000040},
1395 {0x00000072, 0x01000000},
1396 {0x00000074, 0x000000ff},
1397 {0x00000075, 0x00143400},
1398 {0x00000076, 0x08ec0800},
1399 {0x00000077, 0x040000cc},
1400 {0x00000079, 0x00000000},
1401 {0x0000007a, 0x21000409},
1402 {0x0000007c, 0x00000000},
1403 {0x0000007d, 0xe8000000},
1404 {0x0000007e, 0x044408a8},
1405 {0x0000007f, 0x00000003},
1406 {0x00000080, 0x00000000},
1407 {0x00000081, 0x01000000},
1408 {0x00000082, 0x02000000},
1409 {0x00000083, 0x00000000},
1410 {0x00000084, 0xe3f3e4f4},
1411 {0x00000085, 0x00052024},
1412 {0x00000087, 0x00000000},
1413 {0x00000088, 0x66036603},
1414 {0x00000089, 0x01000000},
1415 {0x0000008b, 0x1c0a0000},
1416 {0x0000008c, 0xff010000},
1417 {0x0000008e, 0xffffefff},
1418 {0x0000008f, 0xfff3efff},
1419 {0x00000090, 0xfff3efbf},
1420 {0x00000094, 0x00101101},
1421 {0x00000095, 0x00000fff},
1422 {0x00000096, 0x00116fff},
1423 {0x00000097, 0x60010000},
1424 {0x00000098, 0x10010000},
1425 {0x00000099, 0x00006000},
1426 {0x0000009a, 0x00001000},
1427 {0x0000009f, 0x00a17730}
1430 static const u32 hainan_io_mc_regs[TAHITI_IO_MC_REGS_SIZE][2] = {
1431 {0x0000006f, 0x03044000},
1432 {0x00000070, 0x0480c018},
1433 {0x00000071, 0x00000040},
1434 {0x00000072, 0x01000000},
1435 {0x00000074, 0x000000ff},
1436 {0x00000075, 0x00143400},
1437 {0x00000076, 0x08ec0800},
1438 {0x00000077, 0x040000cc},
1439 {0x00000079, 0x00000000},
1440 {0x0000007a, 0x21000409},
1441 {0x0000007c, 0x00000000},
1442 {0x0000007d, 0xe8000000},
1443 {0x0000007e, 0x044408a8},
1444 {0x0000007f, 0x00000003},
1445 {0x00000080, 0x00000000},
1446 {0x00000081, 0x01000000},
1447 {0x00000082, 0x02000000},
1448 {0x00000083, 0x00000000},
1449 {0x00000084, 0xe3f3e4f4},
1450 {0x00000085, 0x00052024},
1451 {0x00000087, 0x00000000},
1452 {0x00000088, 0x66036603},
1453 {0x00000089, 0x01000000},
1454 {0x0000008b, 0x1c0a0000},
1455 {0x0000008c, 0xff010000},
1456 {0x0000008e, 0xffffefff},
1457 {0x0000008f, 0xfff3efff},
1458 {0x00000090, 0xfff3efbf},
1459 {0x00000094, 0x00101101},
1460 {0x00000095, 0x00000fff},
1461 {0x00000096, 0x00116fff},
1462 {0x00000097, 0x60010000},
1463 {0x00000098, 0x10010000},
1464 {0x00000099, 0x00006000},
1465 {0x0000009a, 0x00001000},
1466 {0x0000009f, 0x00a07730}
1470 int si_mc_load_microcode(struct radeon_device *rdev)
1472 const __be32 *fw_data;
1473 u32 running, blackout = 0;
1475 int i, regs_size, ucode_size;
1480 ucode_size = rdev->mc_fw->size / 4;
1482 switch (rdev->family) {
1484 io_mc_regs = (u32 *)&tahiti_io_mc_regs;
1485 regs_size = TAHITI_IO_MC_REGS_SIZE;
1488 io_mc_regs = (u32 *)&pitcairn_io_mc_regs;
1489 regs_size = TAHITI_IO_MC_REGS_SIZE;
1493 io_mc_regs = (u32 *)&verde_io_mc_regs;
1494 regs_size = TAHITI_IO_MC_REGS_SIZE;
1497 io_mc_regs = (u32 *)&oland_io_mc_regs;
1498 regs_size = TAHITI_IO_MC_REGS_SIZE;
1501 io_mc_regs = (u32 *)&hainan_io_mc_regs;
1502 regs_size = TAHITI_IO_MC_REGS_SIZE;
1506 running = RREG32(MC_SEQ_SUP_CNTL) & RUN_MASK;
1510 blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
1511 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
1514 /* reset the engine and set to writable */
1515 WREG32(MC_SEQ_SUP_CNTL, 0x00000008);
1516 WREG32(MC_SEQ_SUP_CNTL, 0x00000010);
1518 /* load mc io regs */
1519 for (i = 0; i < regs_size; i++) {
1520 WREG32(MC_SEQ_IO_DEBUG_INDEX, io_mc_regs[(i << 1)]);
1521 WREG32(MC_SEQ_IO_DEBUG_DATA, io_mc_regs[(i << 1) + 1]);
1523 /* load the MC ucode */
1524 fw_data = (const __be32 *)rdev->mc_fw->data;
1525 for (i = 0; i < ucode_size; i++)
1526 WREG32(MC_SEQ_SUP_PGM, be32_to_cpup(fw_data++));
1528 /* put the engine back into the active state */
1529 WREG32(MC_SEQ_SUP_CNTL, 0x00000008);
1530 WREG32(MC_SEQ_SUP_CNTL, 0x00000004);
1531 WREG32(MC_SEQ_SUP_CNTL, 0x00000001);
1533 /* wait for training to complete */
1534 for (i = 0; i < rdev->usec_timeout; i++) {
1535 if (RREG32(MC_SEQ_TRAIN_WAKEUP_CNTL) & TRAIN_DONE_D0)
1539 for (i = 0; i < rdev->usec_timeout; i++) {
1540 if (RREG32(MC_SEQ_TRAIN_WAKEUP_CNTL) & TRAIN_DONE_D1)
1546 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout);
1552 static int si_init_microcode(struct radeon_device *rdev)
1554 const char *chip_name;
1555 const char *rlc_chip_name;
1556 size_t pfp_req_size, me_req_size, ce_req_size, rlc_req_size, mc_req_size;
1557 size_t smc_req_size, mc2_req_size;
1563 switch (rdev->family) {
1565 chip_name = "TAHITI";
1566 rlc_chip_name = "TAHITI";
1567 pfp_req_size = SI_PFP_UCODE_SIZE * 4;
1568 me_req_size = SI_PM4_UCODE_SIZE * 4;
1569 ce_req_size = SI_CE_UCODE_SIZE * 4;
1570 rlc_req_size = SI_RLC_UCODE_SIZE * 4;
1571 mc_req_size = SI_MC_UCODE_SIZE * 4;
1572 mc2_req_size = TAHITI_MC_UCODE_SIZE * 4;
1573 smc_req_size = ALIGN(TAHITI_SMC_UCODE_SIZE, 4);
1576 chip_name = "PITCAIRN";
1577 rlc_chip_name = "PITCAIRN";
1578 pfp_req_size = SI_PFP_UCODE_SIZE * 4;
1579 me_req_size = SI_PM4_UCODE_SIZE * 4;
1580 ce_req_size = SI_CE_UCODE_SIZE * 4;
1581 rlc_req_size = SI_RLC_UCODE_SIZE * 4;
1582 mc_req_size = SI_MC_UCODE_SIZE * 4;
1583 mc2_req_size = PITCAIRN_MC_UCODE_SIZE * 4;
1584 smc_req_size = ALIGN(PITCAIRN_SMC_UCODE_SIZE, 4);
1587 chip_name = "VERDE";
1588 rlc_chip_name = "VERDE";
1589 pfp_req_size = SI_PFP_UCODE_SIZE * 4;
1590 me_req_size = SI_PM4_UCODE_SIZE * 4;
1591 ce_req_size = SI_CE_UCODE_SIZE * 4;
1592 rlc_req_size = SI_RLC_UCODE_SIZE * 4;
1593 mc_req_size = SI_MC_UCODE_SIZE * 4;
1594 mc2_req_size = VERDE_MC_UCODE_SIZE * 4;
1595 smc_req_size = ALIGN(VERDE_SMC_UCODE_SIZE, 4);
1598 chip_name = "OLAND";
1599 rlc_chip_name = "OLAND";
1600 pfp_req_size = SI_PFP_UCODE_SIZE * 4;
1601 me_req_size = SI_PM4_UCODE_SIZE * 4;
1602 ce_req_size = SI_CE_UCODE_SIZE * 4;
1603 rlc_req_size = SI_RLC_UCODE_SIZE * 4;
1604 mc_req_size = mc2_req_size = OLAND_MC_UCODE_SIZE * 4;
1605 smc_req_size = ALIGN(OLAND_SMC_UCODE_SIZE, 4);
1608 chip_name = "HAINAN";
1609 rlc_chip_name = "HAINAN";
1610 pfp_req_size = SI_PFP_UCODE_SIZE * 4;
1611 me_req_size = SI_PM4_UCODE_SIZE * 4;
1612 ce_req_size = SI_CE_UCODE_SIZE * 4;
1613 rlc_req_size = SI_RLC_UCODE_SIZE * 4;
1614 mc_req_size = mc2_req_size = OLAND_MC_UCODE_SIZE * 4;
1615 smc_req_size = ALIGN(HAINAN_SMC_UCODE_SIZE, 4);
1620 DRM_INFO("Loading %s Microcode\n", chip_name);
1622 snprintf(fw_name, sizeof(fw_name), "radeon/%s_pfp.bin", chip_name);
1623 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev);
1626 if (rdev->pfp_fw->size != pfp_req_size) {
1628 "si_cp: Bogus length %zu in firmware \"%s\"\n",
1629 rdev->pfp_fw->size, fw_name);
1634 snprintf(fw_name, sizeof(fw_name), "radeon/%s_me.bin", chip_name);
1635 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev);
1638 if (rdev->me_fw->size != me_req_size) {
1640 "si_cp: Bogus length %zu in firmware \"%s\"\n",
1641 rdev->me_fw->size, fw_name);
1645 snprintf(fw_name, sizeof(fw_name), "radeon/%s_ce.bin", chip_name);
1646 err = request_firmware(&rdev->ce_fw, fw_name, rdev->dev);
1649 if (rdev->ce_fw->size != ce_req_size) {
1651 "si_cp: Bogus length %zu in firmware \"%s\"\n",
1652 rdev->ce_fw->size, fw_name);
1656 snprintf(fw_name, sizeof(fw_name), "radeon/%s_rlc.bin", rlc_chip_name);
1657 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev);
1660 if (rdev->rlc_fw->size != rlc_req_size) {
1662 "si_rlc: Bogus length %zu in firmware \"%s\"\n",
1663 rdev->rlc_fw->size, fw_name);
1667 snprintf(fw_name, sizeof(fw_name), "radeon/%s_mc2.bin", chip_name);
1668 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev);
1670 snprintf(fw_name, sizeof(fw_name), "radeon/%s_mc.bin", chip_name);
1671 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev);
1675 if ((rdev->mc_fw->size != mc_req_size) &&
1676 (rdev->mc_fw->size != mc2_req_size)) {
1678 "si_mc: Bogus length %zu in firmware \"%s\"\n",
1679 rdev->mc_fw->size, fw_name);
1682 DRM_INFO("%s: %zu bytes\n", fw_name, rdev->mc_fw->size);
1684 snprintf(fw_name, sizeof(fw_name), "radeon/%s_smc.bin", chip_name);
1685 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev);
1688 "smc: error loading firmware \"%s\"\n",
1690 release_firmware(rdev->smc_fw);
1691 rdev->smc_fw = NULL;
1693 } else if (rdev->smc_fw->size != smc_req_size) {
1695 "si_smc: Bogus length %zu in firmware \"%s\"\n",
1696 rdev->smc_fw->size, fw_name);
1704 "si_cp: Failed to load firmware \"%s\"\n",
1706 release_firmware(rdev->pfp_fw);
1707 rdev->pfp_fw = NULL;
1708 release_firmware(rdev->me_fw);
1710 release_firmware(rdev->ce_fw);
1712 release_firmware(rdev->rlc_fw);
1713 rdev->rlc_fw = NULL;
1714 release_firmware(rdev->mc_fw);
1716 release_firmware(rdev->smc_fw);
1717 rdev->smc_fw = NULL;
1722 /* watermark setup */
1723 static u32 dce6_line_buffer_adjust(struct radeon_device *rdev,
1724 struct radeon_crtc *radeon_crtc,
1725 struct drm_display_mode *mode,
1726 struct drm_display_mode *other_mode)
1728 u32 tmp, buffer_alloc, i;
1729 u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1732 * There are 3 line buffers, each one shared by 2 display controllers.
1733 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1734 * the display controllers. The paritioning is done via one of four
1735 * preset allocations specified in bits 21:20:
1737 * 2 - whole lb, other crtc must be disabled
1739 /* this can get tricky if we have two large displays on a paired group
1740 * of crtcs. Ideally for multiple large displays we'd assign them to
1741 * non-linked crtcs for maximum line buffer allocation.
1743 if (radeon_crtc->base.enabled && mode) {
1748 tmp = 2; /* whole */
1756 WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset,
1757 DC_LB_MEMORY_CONFIG(tmp));
1759 WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1760 DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1761 for (i = 0; i < rdev->usec_timeout; i++) {
1762 if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1763 DMIF_BUFFERS_ALLOCATED_COMPLETED)
1768 if (radeon_crtc->base.enabled && mode) {
1778 /* controller not enabled, so no lb used */
1782 static u32 si_get_number_of_dram_channels(struct radeon_device *rdev)
1784 u32 tmp = RREG32(MC_SHARED_CHMAP);
1786 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1809 struct dce6_wm_params {
1810 u32 dram_channels; /* number of dram channels */
1811 u32 yclk; /* bandwidth per dram data pin in kHz */
1812 u32 sclk; /* engine clock in kHz */
1813 u32 disp_clk; /* display clock in kHz */
1814 u32 src_width; /* viewport width */
1815 u32 active_time; /* active display time in ns */
1816 u32 blank_time; /* blank time in ns */
1817 bool interlaced; /* mode is interlaced */
1818 fixed20_12 vsc; /* vertical scale ratio */
1819 u32 num_heads; /* number of active crtcs */
1820 u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1821 u32 lb_size; /* line buffer allocated to pipe */
1822 u32 vtaps; /* vertical scaler taps */
1825 static u32 dce6_dram_bandwidth(struct dce6_wm_params *wm)
1827 /* Calculate raw DRAM Bandwidth */
1828 fixed20_12 dram_efficiency; /* 0.7 */
1829 fixed20_12 yclk, dram_channels, bandwidth;
1832 a.full = dfixed_const(1000);
1833 yclk.full = dfixed_const(wm->yclk);
1834 yclk.full = dfixed_div(yclk, a);
1835 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1836 a.full = dfixed_const(10);
1837 dram_efficiency.full = dfixed_const(7);
1838 dram_efficiency.full = dfixed_div(dram_efficiency, a);
1839 bandwidth.full = dfixed_mul(dram_channels, yclk);
1840 bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1842 return dfixed_trunc(bandwidth);
1845 static u32 dce6_dram_bandwidth_for_display(struct dce6_wm_params *wm)
1847 /* Calculate DRAM Bandwidth and the part allocated to display. */
1848 fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1849 fixed20_12 yclk, dram_channels, bandwidth;
1852 a.full = dfixed_const(1000);
1853 yclk.full = dfixed_const(wm->yclk);
1854 yclk.full = dfixed_div(yclk, a);
1855 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1856 a.full = dfixed_const(10);
1857 disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
1858 disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
1859 bandwidth.full = dfixed_mul(dram_channels, yclk);
1860 bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
1862 return dfixed_trunc(bandwidth);
1865 static u32 dce6_data_return_bandwidth(struct dce6_wm_params *wm)
1867 /* Calculate the display Data return Bandwidth */
1868 fixed20_12 return_efficiency; /* 0.8 */
1869 fixed20_12 sclk, bandwidth;
1872 a.full = dfixed_const(1000);
1873 sclk.full = dfixed_const(wm->sclk);
1874 sclk.full = dfixed_div(sclk, a);
1875 a.full = dfixed_const(10);
1876 return_efficiency.full = dfixed_const(8);
1877 return_efficiency.full = dfixed_div(return_efficiency, a);
1878 a.full = dfixed_const(32);
1879 bandwidth.full = dfixed_mul(a, sclk);
1880 bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
1882 return dfixed_trunc(bandwidth);
1885 static u32 dce6_get_dmif_bytes_per_request(struct dce6_wm_params *wm)
1890 static u32 dce6_dmif_request_bandwidth(struct dce6_wm_params *wm)
1892 /* Calculate the DMIF Request Bandwidth */
1893 fixed20_12 disp_clk_request_efficiency; /* 0.8 */
1894 fixed20_12 disp_clk, sclk, bandwidth;
1895 fixed20_12 a, b1, b2;
1898 a.full = dfixed_const(1000);
1899 disp_clk.full = dfixed_const(wm->disp_clk);
1900 disp_clk.full = dfixed_div(disp_clk, a);
1901 a.full = dfixed_const(dce6_get_dmif_bytes_per_request(wm) / 2);
1902 b1.full = dfixed_mul(a, disp_clk);
1904 a.full = dfixed_const(1000);
1905 sclk.full = dfixed_const(wm->sclk);
1906 sclk.full = dfixed_div(sclk, a);
1907 a.full = dfixed_const(dce6_get_dmif_bytes_per_request(wm));
1908 b2.full = dfixed_mul(a, sclk);
1910 a.full = dfixed_const(10);
1911 disp_clk_request_efficiency.full = dfixed_const(8);
1912 disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
1914 min_bandwidth = min(dfixed_trunc(b1), dfixed_trunc(b2));
1916 a.full = dfixed_const(min_bandwidth);
1917 bandwidth.full = dfixed_mul(a, disp_clk_request_efficiency);
1919 return dfixed_trunc(bandwidth);
1922 static u32 dce6_available_bandwidth(struct dce6_wm_params *wm)
1924 /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
1925 u32 dram_bandwidth = dce6_dram_bandwidth(wm);
1926 u32 data_return_bandwidth = dce6_data_return_bandwidth(wm);
1927 u32 dmif_req_bandwidth = dce6_dmif_request_bandwidth(wm);
1929 return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
1932 static u32 dce6_average_bandwidth(struct dce6_wm_params *wm)
1934 /* Calculate the display mode Average Bandwidth
1935 * DisplayMode should contain the source and destination dimensions,
1939 fixed20_12 line_time;
1940 fixed20_12 src_width;
1941 fixed20_12 bandwidth;
1944 a.full = dfixed_const(1000);
1945 line_time.full = dfixed_const(wm->active_time + wm->blank_time);
1946 line_time.full = dfixed_div(line_time, a);
1947 bpp.full = dfixed_const(wm->bytes_per_pixel);
1948 src_width.full = dfixed_const(wm->src_width);
1949 bandwidth.full = dfixed_mul(src_width, bpp);
1950 bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
1951 bandwidth.full = dfixed_div(bandwidth, line_time);
1953 return dfixed_trunc(bandwidth);
1956 static u32 dce6_latency_watermark(struct dce6_wm_params *wm)
1958 /* First calcualte the latency in ns */
1959 u32 mc_latency = 2000; /* 2000 ns. */
1960 u32 available_bandwidth = dce6_available_bandwidth(wm);
1961 u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
1962 u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
1963 u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
1964 u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
1965 (wm->num_heads * cursor_line_pair_return_time);
1966 u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
1967 u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
1968 u32 tmp, dmif_size = 12288;
1971 if (wm->num_heads == 0)
1974 a.full = dfixed_const(2);
1975 b.full = dfixed_const(1);
1976 if ((wm->vsc.full > a.full) ||
1977 ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
1979 ((wm->vsc.full >= a.full) && wm->interlaced))
1980 max_src_lines_per_dst_line = 4;
1982 max_src_lines_per_dst_line = 2;
1984 a.full = dfixed_const(available_bandwidth);
1985 b.full = dfixed_const(wm->num_heads);
1986 a.full = dfixed_div(a, b);
1988 b.full = dfixed_const(mc_latency + 512);
1989 c.full = dfixed_const(wm->disp_clk);
1990 b.full = dfixed_div(b, c);
1992 c.full = dfixed_const(dmif_size);
1993 b.full = dfixed_div(c, b);
1995 tmp = min(dfixed_trunc(a), dfixed_trunc(b));
1997 b.full = dfixed_const(1000);
1998 c.full = dfixed_const(wm->disp_clk);
1999 b.full = dfixed_div(c, b);
2000 c.full = dfixed_const(wm->bytes_per_pixel);
2001 b.full = dfixed_mul(b, c);
2003 lb_fill_bw = min(tmp, dfixed_trunc(b));
2005 a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2006 b.full = dfixed_const(1000);
2007 c.full = dfixed_const(lb_fill_bw);
2008 b.full = dfixed_div(c, b);
2009 a.full = dfixed_div(a, b);
2010 line_fill_time = dfixed_trunc(a);
2012 if (line_fill_time < wm->active_time)
2015 return latency + (line_fill_time - wm->active_time);
2019 static bool dce6_average_bandwidth_vs_dram_bandwidth_for_display(struct dce6_wm_params *wm)
2021 if (dce6_average_bandwidth(wm) <=
2022 (dce6_dram_bandwidth_for_display(wm) / wm->num_heads))
2028 static bool dce6_average_bandwidth_vs_available_bandwidth(struct dce6_wm_params *wm)
2030 if (dce6_average_bandwidth(wm) <=
2031 (dce6_available_bandwidth(wm) / wm->num_heads))
2037 static bool dce6_check_latency_hiding(struct dce6_wm_params *wm)
2039 u32 lb_partitions = wm->lb_size / wm->src_width;
2040 u32 line_time = wm->active_time + wm->blank_time;
2041 u32 latency_tolerant_lines;
2045 a.full = dfixed_const(1);
2046 if (wm->vsc.full > a.full)
2047 latency_tolerant_lines = 1;
2049 if (lb_partitions <= (wm->vtaps + 1))
2050 latency_tolerant_lines = 1;
2052 latency_tolerant_lines = 2;
2055 latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2057 if (dce6_latency_watermark(wm) <= latency_hiding)
2063 static void dce6_program_watermarks(struct radeon_device *rdev,
2064 struct radeon_crtc *radeon_crtc,
2065 u32 lb_size, u32 num_heads)
2067 struct drm_display_mode *mode = &radeon_crtc->base.mode;
2068 struct dce6_wm_params wm_low, wm_high;
2072 u32 latency_watermark_a = 0, latency_watermark_b = 0;
2073 u32 priority_a_mark = 0, priority_b_mark = 0;
2074 u32 priority_a_cnt = PRIORITY_OFF;
2075 u32 priority_b_cnt = PRIORITY_OFF;
2076 u32 tmp, arb_control3;
2079 if (radeon_crtc->base.enabled && num_heads && mode) {
2080 pixel_period = 1000000 / (u32)mode->clock;
2081 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2085 if (rdev->family == CHIP_ARUBA)
2086 dram_channels = evergreen_get_number_of_dram_channels(rdev);
2088 dram_channels = si_get_number_of_dram_channels(rdev);
2090 /* watermark for high clocks */
2091 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2093 radeon_dpm_get_mclk(rdev, false) * 10;
2095 radeon_dpm_get_sclk(rdev, false) * 10;
2097 wm_high.yclk = rdev->pm.current_mclk * 10;
2098 wm_high.sclk = rdev->pm.current_sclk * 10;
2101 wm_high.disp_clk = mode->clock;
2102 wm_high.src_width = mode->crtc_hdisplay;
2103 wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2104 wm_high.blank_time = line_time - wm_high.active_time;
2105 wm_high.interlaced = false;
2106 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2107 wm_high.interlaced = true;
2108 wm_high.vsc = radeon_crtc->vsc;
2110 if (radeon_crtc->rmx_type != RMX_OFF)
2112 wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2113 wm_high.lb_size = lb_size;
2114 wm_high.dram_channels = dram_channels;
2115 wm_high.num_heads = num_heads;
2117 /* watermark for low clocks */
2118 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2120 radeon_dpm_get_mclk(rdev, true) * 10;
2122 radeon_dpm_get_sclk(rdev, true) * 10;
2124 wm_low.yclk = rdev->pm.current_mclk * 10;
2125 wm_low.sclk = rdev->pm.current_sclk * 10;
2128 wm_low.disp_clk = mode->clock;
2129 wm_low.src_width = mode->crtc_hdisplay;
2130 wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2131 wm_low.blank_time = line_time - wm_low.active_time;
2132 wm_low.interlaced = false;
2133 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2134 wm_low.interlaced = true;
2135 wm_low.vsc = radeon_crtc->vsc;
2137 if (radeon_crtc->rmx_type != RMX_OFF)
2139 wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2140 wm_low.lb_size = lb_size;
2141 wm_low.dram_channels = dram_channels;
2142 wm_low.num_heads = num_heads;
2144 /* set for high clocks */
2145 latency_watermark_a = min(dce6_latency_watermark(&wm_high), (u32)65535);
2146 /* set for low clocks */
2147 latency_watermark_b = min(dce6_latency_watermark(&wm_low), (u32)65535);
2149 /* possibly force display priority to high */
2150 /* should really do this at mode validation time... */
2151 if (!dce6_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2152 !dce6_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2153 !dce6_check_latency_hiding(&wm_high) ||
2154 (rdev->disp_priority == 2)) {
2155 DRM_DEBUG_KMS("force priority to high\n");
2156 priority_a_cnt |= PRIORITY_ALWAYS_ON;
2157 priority_b_cnt |= PRIORITY_ALWAYS_ON;
2159 if (!dce6_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2160 !dce6_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2161 !dce6_check_latency_hiding(&wm_low) ||
2162 (rdev->disp_priority == 2)) {
2163 DRM_DEBUG_KMS("force priority to high\n");
2164 priority_a_cnt |= PRIORITY_ALWAYS_ON;
2165 priority_b_cnt |= PRIORITY_ALWAYS_ON;
2168 a.full = dfixed_const(1000);
2169 b.full = dfixed_const(mode->clock);
2170 b.full = dfixed_div(b, a);
2171 c.full = dfixed_const(latency_watermark_a);
2172 c.full = dfixed_mul(c, b);
2173 c.full = dfixed_mul(c, radeon_crtc->hsc);
2174 c.full = dfixed_div(c, a);
2175 a.full = dfixed_const(16);
2176 c.full = dfixed_div(c, a);
2177 priority_a_mark = dfixed_trunc(c);
2178 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2180 a.full = dfixed_const(1000);
2181 b.full = dfixed_const(mode->clock);
2182 b.full = dfixed_div(b, a);
2183 c.full = dfixed_const(latency_watermark_b);
2184 c.full = dfixed_mul(c, b);
2185 c.full = dfixed_mul(c, radeon_crtc->hsc);
2186 c.full = dfixed_div(c, a);
2187 a.full = dfixed_const(16);
2188 c.full = dfixed_div(c, a);
2189 priority_b_mark = dfixed_trunc(c);
2190 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2194 arb_control3 = RREG32(DPG_PIPE_ARBITRATION_CONTROL3 + radeon_crtc->crtc_offset);
2196 tmp &= ~LATENCY_WATERMARK_MASK(3);
2197 tmp |= LATENCY_WATERMARK_MASK(1);
2198 WREG32(DPG_PIPE_ARBITRATION_CONTROL3 + radeon_crtc->crtc_offset, tmp);
2199 WREG32(DPG_PIPE_LATENCY_CONTROL + radeon_crtc->crtc_offset,
2200 (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2201 LATENCY_HIGH_WATERMARK(line_time)));
2203 tmp = RREG32(DPG_PIPE_ARBITRATION_CONTROL3 + radeon_crtc->crtc_offset);
2204 tmp &= ~LATENCY_WATERMARK_MASK(3);
2205 tmp |= LATENCY_WATERMARK_MASK(2);
2206 WREG32(DPG_PIPE_ARBITRATION_CONTROL3 + radeon_crtc->crtc_offset, tmp);
2207 WREG32(DPG_PIPE_LATENCY_CONTROL + radeon_crtc->crtc_offset,
2208 (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2209 LATENCY_HIGH_WATERMARK(line_time)));
2210 /* restore original selection */
2211 WREG32(DPG_PIPE_ARBITRATION_CONTROL3 + radeon_crtc->crtc_offset, arb_control3);
2213 /* write the priority marks */
2214 WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2215 WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2217 /* save values for DPM */
2218 radeon_crtc->line_time = line_time;
2219 radeon_crtc->wm_high = latency_watermark_a;
2220 radeon_crtc->wm_low = latency_watermark_b;
2223 void dce6_bandwidth_update(struct radeon_device *rdev)
2225 struct drm_display_mode *mode0 = NULL;
2226 struct drm_display_mode *mode1 = NULL;
2227 u32 num_heads = 0, lb_size;
2230 radeon_update_display_priority(rdev);
2232 for (i = 0; i < rdev->num_crtc; i++) {
2233 if (rdev->mode_info.crtcs[i]->base.enabled)
2236 for (i = 0; i < rdev->num_crtc; i += 2) {
2237 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2238 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2239 lb_size = dce6_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2240 dce6_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2241 lb_size = dce6_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2242 dce6_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2249 static void si_tiling_mode_table_init(struct radeon_device *rdev)
2251 const u32 num_tile_mode_states = 32;
2252 u32 reg_offset, gb_tile_moden, split_equal_to_row_size;
2254 switch (rdev->config.si.mem_row_size_in_kb) {
2256 split_equal_to_row_size = ADDR_SURF_TILE_SPLIT_1KB;
2260 split_equal_to_row_size = ADDR_SURF_TILE_SPLIT_2KB;
2263 split_equal_to_row_size = ADDR_SURF_TILE_SPLIT_4KB;
2267 if ((rdev->family == CHIP_TAHITI) ||
2268 (rdev->family == CHIP_PITCAIRN)) {
2269 for (reg_offset = 0; reg_offset < num_tile_mode_states; reg_offset++) {
2270 switch (reg_offset) {
2271 case 0: /* non-AA compressed depth or any compressed stencil */
2272 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2273 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2274 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2275 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2276 NUM_BANKS(ADDR_SURF_16_BANK) |
2277 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2278 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2279 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2281 case 1: /* 2xAA/4xAA compressed depth only */
2282 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2283 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2284 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2285 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_128B) |
2286 NUM_BANKS(ADDR_SURF_16_BANK) |
2287 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2288 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2289 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2291 case 2: /* 8xAA compressed depth only */
2292 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2293 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2294 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2295 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2296 NUM_BANKS(ADDR_SURF_16_BANK) |
2297 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2298 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2299 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2301 case 3: /* 2xAA/4xAA compressed depth with stencil (for depth buffer) */
2302 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2303 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2304 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2305 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_128B) |
2306 NUM_BANKS(ADDR_SURF_16_BANK) |
2307 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2308 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2309 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2311 case 4: /* Maps w/ a dimension less than the 2D macro-tile dimensions (for mipmapped depth textures) */
2312 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2313 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2314 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2315 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2316 NUM_BANKS(ADDR_SURF_16_BANK) |
2317 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2318 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2319 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2321 case 5: /* Uncompressed 16bpp depth - and stencil buffer allocated with it */
2322 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2323 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2324 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2325 TILE_SPLIT(split_equal_to_row_size) |
2326 NUM_BANKS(ADDR_SURF_16_BANK) |
2327 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2328 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2329 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2331 case 6: /* Uncompressed 32bpp depth - and stencil buffer allocated with it */
2332 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2333 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2334 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2335 TILE_SPLIT(split_equal_to_row_size) |
2336 NUM_BANKS(ADDR_SURF_16_BANK) |
2337 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2338 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2339 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2341 case 7: /* Uncompressed 8bpp stencil without depth (drivers typically do not use) */
2342 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2343 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2344 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2345 TILE_SPLIT(split_equal_to_row_size) |
2346 NUM_BANKS(ADDR_SURF_16_BANK) |
2347 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2348 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2349 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2351 case 8: /* 1D and 1D Array Surfaces */
2352 gb_tile_moden = (ARRAY_MODE(ARRAY_LINEAR_ALIGNED) |
2353 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2354 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2355 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2356 NUM_BANKS(ADDR_SURF_16_BANK) |
2357 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2358 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2359 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2361 case 9: /* Displayable maps. */
2362 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2363 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2364 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2365 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2366 NUM_BANKS(ADDR_SURF_16_BANK) |
2367 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2368 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2369 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2371 case 10: /* Display 8bpp. */
2372 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2373 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2374 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2375 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2376 NUM_BANKS(ADDR_SURF_16_BANK) |
2377 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2378 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2379 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2381 case 11: /* Display 16bpp. */
2382 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2383 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2384 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2385 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2386 NUM_BANKS(ADDR_SURF_16_BANK) |
2387 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2388 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2389 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2391 case 12: /* Display 32bpp. */
2392 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2393 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2394 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2395 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2396 NUM_BANKS(ADDR_SURF_16_BANK) |
2397 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2398 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2399 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2401 case 13: /* Thin. */
2402 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2403 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2404 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2405 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2406 NUM_BANKS(ADDR_SURF_16_BANK) |
2407 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2408 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2409 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2411 case 14: /* Thin 8 bpp. */
2412 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2413 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2414 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2415 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2416 NUM_BANKS(ADDR_SURF_16_BANK) |
2417 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2418 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2419 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2421 case 15: /* Thin 16 bpp. */
2422 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2423 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2424 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2425 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2426 NUM_BANKS(ADDR_SURF_16_BANK) |
2427 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2428 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2429 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2431 case 16: /* Thin 32 bpp. */
2432 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2433 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2434 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2435 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2436 NUM_BANKS(ADDR_SURF_16_BANK) |
2437 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2438 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2439 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2441 case 17: /* Thin 64 bpp. */
2442 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2443 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2444 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2445 TILE_SPLIT(split_equal_to_row_size) |
2446 NUM_BANKS(ADDR_SURF_16_BANK) |
2447 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2448 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2449 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2451 case 21: /* 8 bpp PRT. */
2452 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2453 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2454 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2455 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2456 NUM_BANKS(ADDR_SURF_16_BANK) |
2457 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_2) |
2458 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2459 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2461 case 22: /* 16 bpp PRT */
2462 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2463 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2464 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2465 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2466 NUM_BANKS(ADDR_SURF_16_BANK) |
2467 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2468 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2469 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2471 case 23: /* 32 bpp PRT */
2472 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2473 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2474 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2475 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2476 NUM_BANKS(ADDR_SURF_16_BANK) |
2477 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2478 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2479 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2481 case 24: /* 64 bpp PRT */
2482 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2483 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2484 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2485 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2486 NUM_BANKS(ADDR_SURF_16_BANK) |
2487 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2488 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2489 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2491 case 25: /* 128 bpp PRT */
2492 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2493 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2494 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2495 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_1KB) |
2496 NUM_BANKS(ADDR_SURF_8_BANK) |
2497 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2498 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2499 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2505 rdev->config.si.tile_mode_array[reg_offset] = gb_tile_moden;
2506 WREG32(GB_TILE_MODE0 + (reg_offset * 4), gb_tile_moden);
2508 } else if ((rdev->family == CHIP_VERDE) ||
2509 (rdev->family == CHIP_OLAND) ||
2510 (rdev->family == CHIP_HAINAN)) {
2511 for (reg_offset = 0; reg_offset < num_tile_mode_states; reg_offset++) {
2512 switch (reg_offset) {
2513 case 0: /* non-AA compressed depth or any compressed stencil */
2514 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2515 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2516 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2517 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2518 NUM_BANKS(ADDR_SURF_16_BANK) |
2519 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2520 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2521 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2523 case 1: /* 2xAA/4xAA compressed depth only */
2524 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2525 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2526 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2527 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_128B) |
2528 NUM_BANKS(ADDR_SURF_16_BANK) |
2529 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2530 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2531 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2533 case 2: /* 8xAA compressed depth only */
2534 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2535 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2536 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2537 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2538 NUM_BANKS(ADDR_SURF_16_BANK) |
2539 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2540 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2541 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2543 case 3: /* 2xAA/4xAA compressed depth with stencil (for depth buffer) */
2544 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2545 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2546 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2547 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_128B) |
2548 NUM_BANKS(ADDR_SURF_16_BANK) |
2549 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2550 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2551 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2553 case 4: /* Maps w/ a dimension less than the 2D macro-tile dimensions (for mipmapped depth textures) */
2554 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2555 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2556 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2557 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2558 NUM_BANKS(ADDR_SURF_16_BANK) |
2559 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2560 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2561 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2563 case 5: /* Uncompressed 16bpp depth - and stencil buffer allocated with it */
2564 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2565 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2566 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2567 TILE_SPLIT(split_equal_to_row_size) |
2568 NUM_BANKS(ADDR_SURF_16_BANK) |
2569 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2570 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2571 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2573 case 6: /* Uncompressed 32bpp depth - and stencil buffer allocated with it */
2574 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2575 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2576 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2577 TILE_SPLIT(split_equal_to_row_size) |
2578 NUM_BANKS(ADDR_SURF_16_BANK) |
2579 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2580 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2581 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2583 case 7: /* Uncompressed 8bpp stencil without depth (drivers typically do not use) */
2584 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2585 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2586 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2587 TILE_SPLIT(split_equal_to_row_size) |
2588 NUM_BANKS(ADDR_SURF_16_BANK) |
2589 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2590 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2591 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2593 case 8: /* 1D and 1D Array Surfaces */
2594 gb_tile_moden = (ARRAY_MODE(ARRAY_LINEAR_ALIGNED) |
2595 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2596 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2597 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2598 NUM_BANKS(ADDR_SURF_16_BANK) |
2599 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2600 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2601 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2603 case 9: /* Displayable maps. */
2604 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2605 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2606 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2607 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2608 NUM_BANKS(ADDR_SURF_16_BANK) |
2609 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2610 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2611 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2613 case 10: /* Display 8bpp. */
2614 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2615 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2616 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2617 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2618 NUM_BANKS(ADDR_SURF_16_BANK) |
2619 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2620 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2621 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2623 case 11: /* Display 16bpp. */
2624 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2625 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2626 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2627 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2628 NUM_BANKS(ADDR_SURF_16_BANK) |
2629 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2630 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2631 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2633 case 12: /* Display 32bpp. */
2634 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2635 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2636 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2637 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2638 NUM_BANKS(ADDR_SURF_16_BANK) |
2639 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2640 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2641 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2643 case 13: /* Thin. */
2644 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2645 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2646 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2647 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2648 NUM_BANKS(ADDR_SURF_16_BANK) |
2649 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2650 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2651 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2653 case 14: /* Thin 8 bpp. */
2654 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2655 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2656 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2657 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2658 NUM_BANKS(ADDR_SURF_16_BANK) |
2659 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2660 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2661 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2663 case 15: /* Thin 16 bpp. */
2664 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2665 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2666 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2667 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2668 NUM_BANKS(ADDR_SURF_16_BANK) |
2669 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2670 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2671 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2673 case 16: /* Thin 32 bpp. */
2674 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2675 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2676 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2677 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2678 NUM_BANKS(ADDR_SURF_16_BANK) |
2679 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2680 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2681 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2683 case 17: /* Thin 64 bpp. */
2684 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2685 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2686 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2687 TILE_SPLIT(split_equal_to_row_size) |
2688 NUM_BANKS(ADDR_SURF_16_BANK) |
2689 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2690 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2691 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2693 case 21: /* 8 bpp PRT. */
2694 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2695 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2696 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2697 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2698 NUM_BANKS(ADDR_SURF_16_BANK) |
2699 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_2) |
2700 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2701 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2703 case 22: /* 16 bpp PRT */
2704 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2705 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2706 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2707 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2708 NUM_BANKS(ADDR_SURF_16_BANK) |
2709 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2710 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2711 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2713 case 23: /* 32 bpp PRT */
2714 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2715 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2716 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2717 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2718 NUM_BANKS(ADDR_SURF_16_BANK) |
2719 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2720 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2721 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2723 case 24: /* 64 bpp PRT */
2724 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2725 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2726 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2727 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2728 NUM_BANKS(ADDR_SURF_16_BANK) |
2729 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2730 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2731 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2733 case 25: /* 128 bpp PRT */
2734 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2735 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2736 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2737 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_1KB) |
2738 NUM_BANKS(ADDR_SURF_8_BANK) |
2739 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2740 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2741 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2747 rdev->config.si.tile_mode_array[reg_offset] = gb_tile_moden;
2748 WREG32(GB_TILE_MODE0 + (reg_offset * 4), gb_tile_moden);
2751 DRM_ERROR("unknown asic: 0x%x\n", rdev->family);
2754 static void si_select_se_sh(struct radeon_device *rdev,
2755 u32 se_num, u32 sh_num)
2757 u32 data = INSTANCE_BROADCAST_WRITES;
2759 if ((se_num == 0xffffffff) && (sh_num == 0xffffffff))
2760 data |= SH_BROADCAST_WRITES | SE_BROADCAST_WRITES;
2761 else if (se_num == 0xffffffff)
2762 data |= SE_BROADCAST_WRITES | SH_INDEX(sh_num);
2763 else if (sh_num == 0xffffffff)
2764 data |= SH_BROADCAST_WRITES | SE_INDEX(se_num);
2766 data |= SH_INDEX(sh_num) | SE_INDEX(se_num);
2767 WREG32(GRBM_GFX_INDEX, data);
2770 static u32 si_create_bitmask(u32 bit_width)
2774 for (i = 0; i < bit_width; i++) {
2781 static u32 si_get_cu_enabled(struct radeon_device *rdev, u32 cu_per_sh)
2785 data = RREG32(CC_GC_SHADER_ARRAY_CONFIG);
2787 data &= INACTIVE_CUS_MASK;
2790 data |= RREG32(GC_USER_SHADER_ARRAY_CONFIG);
2792 data >>= INACTIVE_CUS_SHIFT;
2794 mask = si_create_bitmask(cu_per_sh);
2796 return ~data & mask;
2799 static void si_setup_spi(struct radeon_device *rdev,
2800 u32 se_num, u32 sh_per_se,
2804 u32 data, mask, active_cu;
2806 for (i = 0; i < se_num; i++) {
2807 for (j = 0; j < sh_per_se; j++) {
2808 si_select_se_sh(rdev, i, j);
2809 data = RREG32(SPI_STATIC_THREAD_MGMT_3);
2810 active_cu = si_get_cu_enabled(rdev, cu_per_sh);
2813 for (k = 0; k < 16; k++) {
2815 if (active_cu & mask) {
2817 WREG32(SPI_STATIC_THREAD_MGMT_3, data);
2823 si_select_se_sh(rdev, 0xffffffff, 0xffffffff);
2826 static u32 si_get_rb_disabled(struct radeon_device *rdev,
2827 u32 max_rb_num_per_se,
2832 data = RREG32(CC_RB_BACKEND_DISABLE);
2834 data &= BACKEND_DISABLE_MASK;
2837 data |= RREG32(GC_USER_RB_BACKEND_DISABLE);
2839 data >>= BACKEND_DISABLE_SHIFT;
2841 mask = si_create_bitmask(max_rb_num_per_se / sh_per_se);
2846 static void si_setup_rb(struct radeon_device *rdev,
2847 u32 se_num, u32 sh_per_se,
2848 u32 max_rb_num_per_se)
2852 u32 disabled_rbs = 0;
2853 u32 enabled_rbs = 0;
2855 for (i = 0; i < se_num; i++) {
2856 for (j = 0; j < sh_per_se; j++) {
2857 si_select_se_sh(rdev, i, j);
2858 data = si_get_rb_disabled(rdev, max_rb_num_per_se, sh_per_se);
2859 disabled_rbs |= data << ((i * sh_per_se + j) * TAHITI_RB_BITMAP_WIDTH_PER_SH);
2862 si_select_se_sh(rdev, 0xffffffff, 0xffffffff);
2865 for (i = 0; i < max_rb_num_per_se * se_num; i++) {
2866 if (!(disabled_rbs & mask))
2867 enabled_rbs |= mask;
2871 rdev->config.si.backend_enable_mask = enabled_rbs;
2873 for (i = 0; i < se_num; i++) {
2874 si_select_se_sh(rdev, i, 0xffffffff);
2876 for (j = 0; j < sh_per_se; j++) {
2877 switch (enabled_rbs & 3) {
2879 data |= (RASTER_CONFIG_RB_MAP_0 << (i * sh_per_se + j) * 2);
2882 data |= (RASTER_CONFIG_RB_MAP_3 << (i * sh_per_se + j) * 2);
2886 data |= (RASTER_CONFIG_RB_MAP_2 << (i * sh_per_se + j) * 2);
2891 WREG32(PA_SC_RASTER_CONFIG, data);
2893 si_select_se_sh(rdev, 0xffffffff, 0xffffffff);
2896 static void si_gpu_init(struct radeon_device *rdev)
2898 u32 gb_addr_config = 0;
2899 u32 mc_shared_chmap, mc_arb_ramcfg;
2901 u32 hdp_host_path_cntl;
2905 switch (rdev->family) {
2907 rdev->config.si.max_shader_engines = 2;
2908 rdev->config.si.max_tile_pipes = 12;
2909 rdev->config.si.max_cu_per_sh = 8;
2910 rdev->config.si.max_sh_per_se = 2;
2911 rdev->config.si.max_backends_per_se = 4;
2912 rdev->config.si.max_texture_channel_caches = 12;
2913 rdev->config.si.max_gprs = 256;
2914 rdev->config.si.max_gs_threads = 32;
2915 rdev->config.si.max_hw_contexts = 8;
2917 rdev->config.si.sc_prim_fifo_size_frontend = 0x20;
2918 rdev->config.si.sc_prim_fifo_size_backend = 0x100;
2919 rdev->config.si.sc_hiz_tile_fifo_size = 0x30;
2920 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130;
2921 gb_addr_config = TAHITI_GB_ADDR_CONFIG_GOLDEN;
2924 rdev->config.si.max_shader_engines = 2;
2925 rdev->config.si.max_tile_pipes = 8;
2926 rdev->config.si.max_cu_per_sh = 5;
2927 rdev->config.si.max_sh_per_se = 2;
2928 rdev->config.si.max_backends_per_se = 4;
2929 rdev->config.si.max_texture_channel_caches = 8;
2930 rdev->config.si.max_gprs = 256;
2931 rdev->config.si.max_gs_threads = 32;
2932 rdev->config.si.max_hw_contexts = 8;
2934 rdev->config.si.sc_prim_fifo_size_frontend = 0x20;
2935 rdev->config.si.sc_prim_fifo_size_backend = 0x100;
2936 rdev->config.si.sc_hiz_tile_fifo_size = 0x30;
2937 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130;
2938 gb_addr_config = TAHITI_GB_ADDR_CONFIG_GOLDEN;
2942 rdev->config.si.max_shader_engines = 1;
2943 rdev->config.si.max_tile_pipes = 4;
2944 rdev->config.si.max_cu_per_sh = 5;
2945 rdev->config.si.max_sh_per_se = 2;
2946 rdev->config.si.max_backends_per_se = 4;
2947 rdev->config.si.max_texture_channel_caches = 4;
2948 rdev->config.si.max_gprs = 256;
2949 rdev->config.si.max_gs_threads = 32;
2950 rdev->config.si.max_hw_contexts = 8;
2952 rdev->config.si.sc_prim_fifo_size_frontend = 0x20;
2953 rdev->config.si.sc_prim_fifo_size_backend = 0x40;
2954 rdev->config.si.sc_hiz_tile_fifo_size = 0x30;
2955 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130;
2956 gb_addr_config = VERDE_GB_ADDR_CONFIG_GOLDEN;
2959 rdev->config.si.max_shader_engines = 1;
2960 rdev->config.si.max_tile_pipes = 4;
2961 rdev->config.si.max_cu_per_sh = 6;
2962 rdev->config.si.max_sh_per_se = 1;
2963 rdev->config.si.max_backends_per_se = 2;
2964 rdev->config.si.max_texture_channel_caches = 4;
2965 rdev->config.si.max_gprs = 256;
2966 rdev->config.si.max_gs_threads = 16;
2967 rdev->config.si.max_hw_contexts = 8;
2969 rdev->config.si.sc_prim_fifo_size_frontend = 0x20;
2970 rdev->config.si.sc_prim_fifo_size_backend = 0x40;
2971 rdev->config.si.sc_hiz_tile_fifo_size = 0x30;
2972 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130;
2973 gb_addr_config = VERDE_GB_ADDR_CONFIG_GOLDEN;
2976 rdev->config.si.max_shader_engines = 1;
2977 rdev->config.si.max_tile_pipes = 4;
2978 rdev->config.si.max_cu_per_sh = 5;
2979 rdev->config.si.max_sh_per_se = 1;
2980 rdev->config.si.max_backends_per_se = 1;
2981 rdev->config.si.max_texture_channel_caches = 2;
2982 rdev->config.si.max_gprs = 256;
2983 rdev->config.si.max_gs_threads = 16;
2984 rdev->config.si.max_hw_contexts = 8;
2986 rdev->config.si.sc_prim_fifo_size_frontend = 0x20;
2987 rdev->config.si.sc_prim_fifo_size_backend = 0x40;
2988 rdev->config.si.sc_hiz_tile_fifo_size = 0x30;
2989 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130;
2990 gb_addr_config = HAINAN_GB_ADDR_CONFIG_GOLDEN;
2994 /* Initialize HDP */
2995 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2996 WREG32((0x2c14 + j), 0x00000000);
2997 WREG32((0x2c18 + j), 0x00000000);
2998 WREG32((0x2c1c + j), 0x00000000);
2999 WREG32((0x2c20 + j), 0x00000000);
3000 WREG32((0x2c24 + j), 0x00000000);
3003 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3005 evergreen_fix_pci_max_read_req_size(rdev);
3007 WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
3009 mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3010 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3012 rdev->config.si.num_tile_pipes = rdev->config.si.max_tile_pipes;
3013 rdev->config.si.mem_max_burst_length_bytes = 256;
3014 tmp = (mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT;
3015 rdev->config.si.mem_row_size_in_kb = (4 * (1 << (8 + tmp))) / 1024;
3016 if (rdev->config.si.mem_row_size_in_kb > 4)
3017 rdev->config.si.mem_row_size_in_kb = 4;
3018 /* XXX use MC settings? */
3019 rdev->config.si.shader_engine_tile_size = 32;
3020 rdev->config.si.num_gpus = 1;
3021 rdev->config.si.multi_gpu_tile_size = 64;
3023 /* fix up row size */
3024 gb_addr_config &= ~ROW_SIZE_MASK;
3025 switch (rdev->config.si.mem_row_size_in_kb) {
3028 gb_addr_config |= ROW_SIZE(0);
3031 gb_addr_config |= ROW_SIZE(1);
3034 gb_addr_config |= ROW_SIZE(2);
3038 /* setup tiling info dword. gb_addr_config is not adequate since it does
3039 * not have bank info, so create a custom tiling dword.
3040 * bits 3:0 num_pipes
3041 * bits 7:4 num_banks
3042 * bits 11:8 group_size
3043 * bits 15:12 row_size
3045 rdev->config.si.tile_config = 0;
3046 switch (rdev->config.si.num_tile_pipes) {
3048 rdev->config.si.tile_config |= (0 << 0);
3051 rdev->config.si.tile_config |= (1 << 0);
3054 rdev->config.si.tile_config |= (2 << 0);
3058 /* XXX what about 12? */
3059 rdev->config.si.tile_config |= (3 << 0);
3062 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3063 case 0: /* four banks */
3064 rdev->config.si.tile_config |= 0 << 4;
3066 case 1: /* eight banks */
3067 rdev->config.si.tile_config |= 1 << 4;
3069 case 2: /* sixteen banks */
3071 rdev->config.si.tile_config |= 2 << 4;
3074 rdev->config.si.tile_config |=
3075 ((gb_addr_config & PIPE_INTERLEAVE_SIZE_MASK) >> PIPE_INTERLEAVE_SIZE_SHIFT) << 8;
3076 rdev->config.si.tile_config |=
3077 ((gb_addr_config & ROW_SIZE_MASK) >> ROW_SIZE_SHIFT) << 12;
3079 WREG32(GB_ADDR_CONFIG, gb_addr_config);
3080 WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3081 WREG32(DMIF_ADDR_CALC, gb_addr_config);
3082 WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3083 WREG32(DMA_TILING_CONFIG + DMA0_REGISTER_OFFSET, gb_addr_config);
3084 WREG32(DMA_TILING_CONFIG + DMA1_REGISTER_OFFSET, gb_addr_config);
3085 if (rdev->has_uvd) {
3086 WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3087 WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3088 WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3091 si_tiling_mode_table_init(rdev);
3093 si_setup_rb(rdev, rdev->config.si.max_shader_engines,
3094 rdev->config.si.max_sh_per_se,
3095 rdev->config.si.max_backends_per_se);
3097 si_setup_spi(rdev, rdev->config.si.max_shader_engines,
3098 rdev->config.si.max_sh_per_se,
3099 rdev->config.si.max_cu_per_sh);
3102 /* set HW defaults for 3D engine */
3103 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3104 ROQ_IB2_START(0x2b)));
3105 WREG32(CP_MEQ_THRESHOLDS, MEQ1_START(0x30) | MEQ2_START(0x60));
3107 sx_debug_1 = RREG32(SX_DEBUG_1);
3108 WREG32(SX_DEBUG_1, sx_debug_1);
3110 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3112 WREG32(PA_SC_FIFO_SIZE, (SC_FRONTEND_PRIM_FIFO_SIZE(rdev->config.si.sc_prim_fifo_size_frontend) |
3113 SC_BACKEND_PRIM_FIFO_SIZE(rdev->config.si.sc_prim_fifo_size_backend) |
3114 SC_HIZ_TILE_FIFO_SIZE(rdev->config.si.sc_hiz_tile_fifo_size) |
3115 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.si.sc_earlyz_tile_fifo_size)));
3117 WREG32(VGT_NUM_INSTANCES, 1);
3119 WREG32(CP_PERFMON_CNTL, 0);
3121 WREG32(SQ_CONFIG, 0);
3123 WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3124 FORCE_EOV_MAX_REZ_CNT(255)));
3126 WREG32(VGT_CACHE_INVALIDATION, CACHE_INVALIDATION(VC_AND_TC) |
3127 AUTO_INVLD_EN(ES_AND_GS_AUTO));
3129 WREG32(VGT_GS_VERTEX_REUSE, 16);
3130 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3132 WREG32(CB_PERFCOUNTER0_SELECT0, 0);
3133 WREG32(CB_PERFCOUNTER0_SELECT1, 0);
3134 WREG32(CB_PERFCOUNTER1_SELECT0, 0);
3135 WREG32(CB_PERFCOUNTER1_SELECT1, 0);
3136 WREG32(CB_PERFCOUNTER2_SELECT0, 0);
3137 WREG32(CB_PERFCOUNTER2_SELECT1, 0);
3138 WREG32(CB_PERFCOUNTER3_SELECT0, 0);
3139 WREG32(CB_PERFCOUNTER3_SELECT1, 0);
3141 tmp = RREG32(HDP_MISC_CNTL);
3142 tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3143 WREG32(HDP_MISC_CNTL, tmp);
3145 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3146 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3148 WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3154 * GPU scratch registers helpers function.
3156 static void si_scratch_init(struct radeon_device *rdev)
3160 rdev->scratch.num_reg = 7;
3161 rdev->scratch.reg_base = SCRATCH_REG0;
3162 for (i = 0; i < rdev->scratch.num_reg; i++) {
3163 rdev->scratch.free[i] = true;
3164 rdev->scratch.reg[i] = rdev->scratch.reg_base + (i * 4);
3168 void si_fence_ring_emit(struct radeon_device *rdev,
3169 struct radeon_fence *fence)
3171 struct radeon_ring *ring = &rdev->ring[fence->ring];
3172 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
3174 /* flush read cache over gart */
3175 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3176 radeon_ring_write(ring, (CP_COHER_CNTL2 - PACKET3_SET_CONFIG_REG_START) >> 2);
3177 radeon_ring_write(ring, 0);
3178 radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
3179 radeon_ring_write(ring, PACKET3_TCL1_ACTION_ENA |
3180 PACKET3_TC_ACTION_ENA |
3181 PACKET3_SH_KCACHE_ACTION_ENA |
3182 PACKET3_SH_ICACHE_ACTION_ENA);
3183 radeon_ring_write(ring, 0xFFFFFFFF);
3184 radeon_ring_write(ring, 0);
3185 radeon_ring_write(ring, 10); /* poll interval */
3186 /* EVENT_WRITE_EOP - flush caches, send int */
3187 radeon_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE_EOP, 4));
3188 radeon_ring_write(ring, EVENT_TYPE(CACHE_FLUSH_AND_INV_TS_EVENT) | EVENT_INDEX(5));
3189 radeon_ring_write(ring, addr & 0xffffffff);
3190 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff) | DATA_SEL(1) | INT_SEL(2));
3191 radeon_ring_write(ring, fence->seq);
3192 radeon_ring_write(ring, 0);
3198 void si_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
3200 struct radeon_ring *ring = &rdev->ring[ib->ring];
3203 if (ib->is_const_ib) {
3204 /* set switch buffer packet before const IB */
3205 radeon_ring_write(ring, PACKET3(PACKET3_SWITCH_BUFFER, 0));
3206 radeon_ring_write(ring, 0);
3208 header = PACKET3(PACKET3_INDIRECT_BUFFER_CONST, 2);
3211 if (ring->rptr_save_reg) {
3212 next_rptr = ring->wptr + 3 + 4 + 8;
3213 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3214 radeon_ring_write(ring, ((ring->rptr_save_reg -
3215 PACKET3_SET_CONFIG_REG_START) >> 2));
3216 radeon_ring_write(ring, next_rptr);
3217 } else if (rdev->wb.enabled) {
3218 next_rptr = ring->wptr + 5 + 4 + 8;
3219 radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
3220 radeon_ring_write(ring, (1 << 8));
3221 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
3222 radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr) & 0xffffffff);
3223 radeon_ring_write(ring, next_rptr);
3226 header = PACKET3(PACKET3_INDIRECT_BUFFER, 2);
3229 radeon_ring_write(ring, header);
3230 radeon_ring_write(ring,
3234 (ib->gpu_addr & 0xFFFFFFFC));
3235 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF);
3236 radeon_ring_write(ring, ib->length_dw |
3237 (ib->vm ? (ib->vm->id << 24) : 0));
3239 if (!ib->is_const_ib) {
3240 /* flush read cache over gart for this vmid */
3241 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3242 radeon_ring_write(ring, (CP_COHER_CNTL2 - PACKET3_SET_CONFIG_REG_START) >> 2);
3243 radeon_ring_write(ring, ib->vm ? ib->vm->id : 0);
3244 radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
3245 radeon_ring_write(ring, PACKET3_TCL1_ACTION_ENA |
3246 PACKET3_TC_ACTION_ENA |
3247 PACKET3_SH_KCACHE_ACTION_ENA |
3248 PACKET3_SH_ICACHE_ACTION_ENA);
3249 radeon_ring_write(ring, 0xFFFFFFFF);
3250 radeon_ring_write(ring, 0);
3251 radeon_ring_write(ring, 10); /* poll interval */
3258 static void si_cp_enable(struct radeon_device *rdev, bool enable)
3261 WREG32(CP_ME_CNTL, 0);
3263 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX)
3264 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
3265 WREG32(CP_ME_CNTL, (CP_ME_HALT | CP_PFP_HALT | CP_CE_HALT));
3266 WREG32(SCRATCH_UMSK, 0);
3267 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
3268 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
3269 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
3274 static int si_cp_load_microcode(struct radeon_device *rdev)
3276 const __be32 *fw_data;
3279 if (!rdev->me_fw || !rdev->pfp_fw)
3282 si_cp_enable(rdev, false);
3285 fw_data = (const __be32 *)rdev->pfp_fw->data;
3286 WREG32(CP_PFP_UCODE_ADDR, 0);
3287 for (i = 0; i < SI_PFP_UCODE_SIZE; i++)
3288 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
3289 WREG32(CP_PFP_UCODE_ADDR, 0);
3292 fw_data = (const __be32 *)rdev->ce_fw->data;
3293 WREG32(CP_CE_UCODE_ADDR, 0);
3294 for (i = 0; i < SI_CE_UCODE_SIZE; i++)
3295 WREG32(CP_CE_UCODE_DATA, be32_to_cpup(fw_data++));
3296 WREG32(CP_CE_UCODE_ADDR, 0);
3299 fw_data = (const __be32 *)rdev->me_fw->data;
3300 WREG32(CP_ME_RAM_WADDR, 0);
3301 for (i = 0; i < SI_PM4_UCODE_SIZE; i++)
3302 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
3303 WREG32(CP_ME_RAM_WADDR, 0);
3305 WREG32(CP_PFP_UCODE_ADDR, 0);
3306 WREG32(CP_CE_UCODE_ADDR, 0);
3307 WREG32(CP_ME_RAM_WADDR, 0);
3308 WREG32(CP_ME_RAM_RADDR, 0);
3312 static int si_cp_start(struct radeon_device *rdev)
3314 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3317 r = radeon_ring_lock(rdev, ring, 7 + 4);
3319 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3323 radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
3324 radeon_ring_write(ring, 0x1);
3325 radeon_ring_write(ring, 0x0);
3326 radeon_ring_write(ring, rdev->config.si.max_hw_contexts - 1);
3327 radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
3328 radeon_ring_write(ring, 0);
3329 radeon_ring_write(ring, 0);
3331 /* init the CE partitions */
3332 radeon_ring_write(ring, PACKET3(PACKET3_SET_BASE, 2));
3333 radeon_ring_write(ring, PACKET3_BASE_INDEX(CE_PARTITION_BASE));
3334 radeon_ring_write(ring, 0xc000);
3335 radeon_ring_write(ring, 0xe000);
3336 radeon_ring_unlock_commit(rdev, ring);
3338 si_cp_enable(rdev, true);
3340 r = radeon_ring_lock(rdev, ring, si_default_size + 10);
3342 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3346 /* setup clear context state */
3347 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3348 radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
3350 for (i = 0; i < si_default_size; i++)
3351 radeon_ring_write(ring, si_default_state[i]);
3353 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3354 radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
3356 /* set clear context state */
3357 radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
3358 radeon_ring_write(ring, 0);
3360 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 2));
3361 radeon_ring_write(ring, 0x00000316);
3362 radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
3363 radeon_ring_write(ring, 0x00000010); /* VGT_OUT_DEALLOC_CNTL */
3365 radeon_ring_unlock_commit(rdev, ring);
3367 for (i = RADEON_RING_TYPE_GFX_INDEX; i <= CAYMAN_RING_TYPE_CP2_INDEX; ++i) {
3368 ring = &rdev->ring[i];
3369 r = radeon_ring_lock(rdev, ring, 2);
3371 /* clear the compute context state */
3372 radeon_ring_write(ring, PACKET3_COMPUTE(PACKET3_CLEAR_STATE, 0));
3373 radeon_ring_write(ring, 0);
3375 radeon_ring_unlock_commit(rdev, ring);
3381 static void si_cp_fini(struct radeon_device *rdev)
3383 struct radeon_ring *ring;
3384 si_cp_enable(rdev, false);
3386 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3387 radeon_ring_fini(rdev, ring);
3388 radeon_scratch_free(rdev, ring->rptr_save_reg);
3390 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
3391 radeon_ring_fini(rdev, ring);
3392 radeon_scratch_free(rdev, ring->rptr_save_reg);
3394 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
3395 radeon_ring_fini(rdev, ring);
3396 radeon_scratch_free(rdev, ring->rptr_save_reg);
3399 static int si_cp_resume(struct radeon_device *rdev)
3401 struct radeon_ring *ring;
3406 si_enable_gui_idle_interrupt(rdev, false);
3408 WREG32(CP_SEM_WAIT_TIMER, 0x0);
3409 WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3411 /* Set the write pointer delay */
3412 WREG32(CP_RB_WPTR_DELAY, 0);
3414 WREG32(CP_DEBUG, 0);
3415 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3417 /* ring 0 - compute and gfx */
3418 /* Set ring buffer size */
3419 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3420 rb_bufsz = order_base_2(ring->ring_size / 8);
3421 tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3423 tmp |= BUF_SWAP_32BIT;
3425 WREG32(CP_RB0_CNTL, tmp);
3427 /* Initialize the ring buffer's read and write pointers */
3428 WREG32(CP_RB0_CNTL, tmp | RB_RPTR_WR_ENA);
3430 WREG32(CP_RB0_WPTR, ring->wptr);
3432 /* set the wb address whether it's enabled or not */
3433 WREG32(CP_RB0_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC);
3434 WREG32(CP_RB0_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3436 if (rdev->wb.enabled)
3437 WREG32(SCRATCH_UMSK, 0xff);
3439 tmp |= RB_NO_UPDATE;
3440 WREG32(SCRATCH_UMSK, 0);
3444 WREG32(CP_RB0_CNTL, tmp);
3446 WREG32(CP_RB0_BASE, ring->gpu_addr >> 8);
3448 ring->rptr = RREG32(CP_RB0_RPTR);
3450 /* ring1 - compute only */
3451 /* Set ring buffer size */
3452 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
3453 rb_bufsz = order_base_2(ring->ring_size / 8);
3454 tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3456 tmp |= BUF_SWAP_32BIT;
3458 WREG32(CP_RB1_CNTL, tmp);
3460 /* Initialize the ring buffer's read and write pointers */
3461 WREG32(CP_RB1_CNTL, tmp | RB_RPTR_WR_ENA);
3463 WREG32(CP_RB1_WPTR, ring->wptr);
3465 /* set the wb address whether it's enabled or not */
3466 WREG32(CP_RB1_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP1_RPTR_OFFSET) & 0xFFFFFFFC);
3467 WREG32(CP_RB1_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP1_RPTR_OFFSET) & 0xFF);
3470 WREG32(CP_RB1_CNTL, tmp);
3472 WREG32(CP_RB1_BASE, ring->gpu_addr >> 8);
3474 ring->rptr = RREG32(CP_RB1_RPTR);
3476 /* ring2 - compute only */
3477 /* Set ring buffer size */
3478 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
3479 rb_bufsz = order_base_2(ring->ring_size / 8);
3480 tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3482 tmp |= BUF_SWAP_32BIT;
3484 WREG32(CP_RB2_CNTL, tmp);
3486 /* Initialize the ring buffer's read and write pointers */
3487 WREG32(CP_RB2_CNTL, tmp | RB_RPTR_WR_ENA);
3489 WREG32(CP_RB2_WPTR, ring->wptr);
3491 /* set the wb address whether it's enabled or not */
3492 WREG32(CP_RB2_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP2_RPTR_OFFSET) & 0xFFFFFFFC);
3493 WREG32(CP_RB2_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP2_RPTR_OFFSET) & 0xFF);
3496 WREG32(CP_RB2_CNTL, tmp);
3498 WREG32(CP_RB2_BASE, ring->gpu_addr >> 8);
3500 ring->rptr = RREG32(CP_RB2_RPTR);
3502 /* start the rings */
3504 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = true;
3505 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = true;
3506 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = true;
3507 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]);
3509 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
3510 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
3511 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
3514 r = radeon_ring_test(rdev, CAYMAN_RING_TYPE_CP1_INDEX, &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]);
3516 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
3518 r = radeon_ring_test(rdev, CAYMAN_RING_TYPE_CP2_INDEX, &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]);
3520 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
3523 si_enable_gui_idle_interrupt(rdev, true);
3525 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX)
3526 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size);
3531 u32 si_gpu_check_soft_reset(struct radeon_device *rdev)
3537 tmp = RREG32(GRBM_STATUS);
3538 if (tmp & (PA_BUSY | SC_BUSY |
3539 BCI_BUSY | SX_BUSY |
3540 TA_BUSY | VGT_BUSY |
3542 GDS_BUSY | SPI_BUSY |
3543 IA_BUSY | IA_BUSY_NO_DMA))
3544 reset_mask |= RADEON_RESET_GFX;
3546 if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3547 CP_BUSY | CP_COHERENCY_BUSY))
3548 reset_mask |= RADEON_RESET_CP;
3550 if (tmp & GRBM_EE_BUSY)
3551 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3554 tmp = RREG32(GRBM_STATUS2);
3555 if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3556 reset_mask |= RADEON_RESET_RLC;
3558 /* DMA_STATUS_REG 0 */
3559 tmp = RREG32(DMA_STATUS_REG + DMA0_REGISTER_OFFSET);
3560 if (!(tmp & DMA_IDLE))
3561 reset_mask |= RADEON_RESET_DMA;
3563 /* DMA_STATUS_REG 1 */
3564 tmp = RREG32(DMA_STATUS_REG + DMA1_REGISTER_OFFSET);
3565 if (!(tmp & DMA_IDLE))
3566 reset_mask |= RADEON_RESET_DMA1;
3569 tmp = RREG32(SRBM_STATUS2);
3571 reset_mask |= RADEON_RESET_DMA;
3573 if (tmp & DMA1_BUSY)
3574 reset_mask |= RADEON_RESET_DMA1;
3577 tmp = RREG32(SRBM_STATUS);
3580 reset_mask |= RADEON_RESET_IH;
3583 reset_mask |= RADEON_RESET_SEM;
3585 if (tmp & GRBM_RQ_PENDING)
3586 reset_mask |= RADEON_RESET_GRBM;
3589 reset_mask |= RADEON_RESET_VMC;
3591 if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3592 MCC_BUSY | MCD_BUSY))
3593 reset_mask |= RADEON_RESET_MC;
3595 if (evergreen_is_display_hung(rdev))
3596 reset_mask |= RADEON_RESET_DISPLAY;
3599 tmp = RREG32(VM_L2_STATUS);
3601 reset_mask |= RADEON_RESET_VMC;
3603 /* Skip MC reset as it's mostly likely not hung, just busy */
3604 if (reset_mask & RADEON_RESET_MC) {
3605 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3606 reset_mask &= ~RADEON_RESET_MC;
3612 static void si_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3614 struct evergreen_mc_save save;
3615 u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3618 if (reset_mask == 0)
3621 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3623 evergreen_print_gpu_status_regs(rdev);
3624 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
3625 RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR));
3626 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
3627 RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS));
3636 /* Disable CP parsing/prefetching */
3637 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT | CP_CE_HALT);
3639 if (reset_mask & RADEON_RESET_DMA) {
3641 tmp = RREG32(DMA_RB_CNTL + DMA0_REGISTER_OFFSET);
3642 tmp &= ~DMA_RB_ENABLE;
3643 WREG32(DMA_RB_CNTL + DMA0_REGISTER_OFFSET, tmp);
3645 if (reset_mask & RADEON_RESET_DMA1) {
3647 tmp = RREG32(DMA_RB_CNTL + DMA1_REGISTER_OFFSET);
3648 tmp &= ~DMA_RB_ENABLE;
3649 WREG32(DMA_RB_CNTL + DMA1_REGISTER_OFFSET, tmp);
3654 evergreen_mc_stop(rdev, &save);
3655 if (evergreen_mc_wait_for_idle(rdev)) {
3656 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3659 if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE | RADEON_RESET_CP)) {
3660 grbm_soft_reset = SOFT_RESET_CB |
3674 if (reset_mask & RADEON_RESET_CP) {
3675 grbm_soft_reset |= SOFT_RESET_CP | SOFT_RESET_VGT;
3677 srbm_soft_reset |= SOFT_RESET_GRBM;
3680 if (reset_mask & RADEON_RESET_DMA)
3681 srbm_soft_reset |= SOFT_RESET_DMA;
3683 if (reset_mask & RADEON_RESET_DMA1)
3684 srbm_soft_reset |= SOFT_RESET_DMA1;
3686 if (reset_mask & RADEON_RESET_DISPLAY)
3687 srbm_soft_reset |= SOFT_RESET_DC;
3689 if (reset_mask & RADEON_RESET_RLC)
3690 grbm_soft_reset |= SOFT_RESET_RLC;
3692 if (reset_mask & RADEON_RESET_SEM)
3693 srbm_soft_reset |= SOFT_RESET_SEM;
3695 if (reset_mask & RADEON_RESET_IH)
3696 srbm_soft_reset |= SOFT_RESET_IH;
3698 if (reset_mask & RADEON_RESET_GRBM)
3699 srbm_soft_reset |= SOFT_RESET_GRBM;
3701 if (reset_mask & RADEON_RESET_VMC)
3702 srbm_soft_reset |= SOFT_RESET_VMC;
3704 if (reset_mask & RADEON_RESET_MC)
3705 srbm_soft_reset |= SOFT_RESET_MC;
3707 if (grbm_soft_reset) {
3708 tmp = RREG32(GRBM_SOFT_RESET);
3709 tmp |= grbm_soft_reset;
3710 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3711 WREG32(GRBM_SOFT_RESET, tmp);
3712 tmp = RREG32(GRBM_SOFT_RESET);
3716 tmp &= ~grbm_soft_reset;
3717 WREG32(GRBM_SOFT_RESET, tmp);
3718 tmp = RREG32(GRBM_SOFT_RESET);
3721 if (srbm_soft_reset) {
3722 tmp = RREG32(SRBM_SOFT_RESET);
3723 tmp |= srbm_soft_reset;
3724 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3725 WREG32(SRBM_SOFT_RESET, tmp);
3726 tmp = RREG32(SRBM_SOFT_RESET);
3730 tmp &= ~srbm_soft_reset;
3731 WREG32(SRBM_SOFT_RESET, tmp);
3732 tmp = RREG32(SRBM_SOFT_RESET);
3735 /* Wait a little for things to settle down */
3738 evergreen_mc_resume(rdev, &save);
3741 evergreen_print_gpu_status_regs(rdev);
3744 static void si_set_clk_bypass_mode(struct radeon_device *rdev)
3748 tmp = RREG32(CG_SPLL_FUNC_CNTL);
3749 tmp |= SPLL_BYPASS_EN;
3750 WREG32(CG_SPLL_FUNC_CNTL, tmp);
3752 tmp = RREG32(CG_SPLL_FUNC_CNTL_2);
3753 tmp |= SPLL_CTLREQ_CHG;
3754 WREG32(CG_SPLL_FUNC_CNTL_2, tmp);
3756 for (i = 0; i < rdev->usec_timeout; i++) {
3757 if (RREG32(SPLL_STATUS) & SPLL_CHG_STATUS)
3762 tmp = RREG32(CG_SPLL_FUNC_CNTL_2);
3763 tmp &= ~(SPLL_CTLREQ_CHG | SCLK_MUX_UPDATE);
3764 WREG32(CG_SPLL_FUNC_CNTL_2, tmp);
3766 tmp = RREG32(MPLL_CNTL_MODE);
3767 tmp &= ~MPLL_MCLK_SEL;
3768 WREG32(MPLL_CNTL_MODE, tmp);
3771 static void si_spll_powerdown(struct radeon_device *rdev)
3775 tmp = RREG32(SPLL_CNTL_MODE);
3776 tmp |= SPLL_SW_DIR_CONTROL;
3777 WREG32(SPLL_CNTL_MODE, tmp);
3779 tmp = RREG32(CG_SPLL_FUNC_CNTL);
3781 WREG32(CG_SPLL_FUNC_CNTL, tmp);
3783 tmp = RREG32(CG_SPLL_FUNC_CNTL);
3785 WREG32(CG_SPLL_FUNC_CNTL, tmp);
3787 tmp = RREG32(SPLL_CNTL_MODE);
3788 tmp &= ~SPLL_SW_DIR_CONTROL;
3789 WREG32(SPLL_CNTL_MODE, tmp);
3792 static void si_gpu_pci_config_reset(struct radeon_device *rdev)
3794 struct evergreen_mc_save save;
3797 dev_info(rdev->dev, "GPU pci config reset\n");
3805 /* Disable CP parsing/prefetching */
3806 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT | CP_CE_HALT);
3808 tmp = RREG32(DMA_RB_CNTL + DMA0_REGISTER_OFFSET);
3809 tmp &= ~DMA_RB_ENABLE;
3810 WREG32(DMA_RB_CNTL + DMA0_REGISTER_OFFSET, tmp);
3812 tmp = RREG32(DMA_RB_CNTL + DMA1_REGISTER_OFFSET);
3813 tmp &= ~DMA_RB_ENABLE;
3814 WREG32(DMA_RB_CNTL + DMA1_REGISTER_OFFSET, tmp);
3815 /* XXX other engines? */
3817 /* halt the rlc, disable cp internal ints */
3822 /* disable mem access */
3823 evergreen_mc_stop(rdev, &save);
3824 if (evergreen_mc_wait_for_idle(rdev)) {
3825 dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
3828 /* set mclk/sclk to bypass */
3829 si_set_clk_bypass_mode(rdev);
3830 /* powerdown spll */
3831 si_spll_powerdown(rdev);
3833 pci_clear_master(rdev->pdev);
3835 radeon_pci_config_reset(rdev);
3836 /* wait for asic to come out of reset */
3837 for (i = 0; i < rdev->usec_timeout; i++) {
3838 if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
3844 int si_asic_reset(struct radeon_device *rdev)
3848 reset_mask = si_gpu_check_soft_reset(rdev);
3851 r600_set_bios_scratch_engine_hung(rdev, true);
3853 /* try soft reset */
3854 si_gpu_soft_reset(rdev, reset_mask);
3856 reset_mask = si_gpu_check_soft_reset(rdev);
3858 /* try pci config reset */
3859 if (reset_mask && radeon_hard_reset)
3860 si_gpu_pci_config_reset(rdev);
3862 reset_mask = si_gpu_check_soft_reset(rdev);
3865 r600_set_bios_scratch_engine_hung(rdev, false);
3871 * si_gfx_is_lockup - Check if the GFX engine is locked up
3873 * @rdev: radeon_device pointer
3874 * @ring: radeon_ring structure holding ring information
3876 * Check if the GFX engine is locked up.
3877 * Returns true if the engine appears to be locked up, false if not.
3879 bool si_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3881 u32 reset_mask = si_gpu_check_soft_reset(rdev);
3883 if (!(reset_mask & (RADEON_RESET_GFX |
3884 RADEON_RESET_COMPUTE |
3885 RADEON_RESET_CP))) {
3886 radeon_ring_lockup_update(ring);
3889 /* force CP activities */
3890 radeon_ring_force_activity(rdev, ring);
3891 return radeon_ring_test_lockup(rdev, ring);
3895 static void si_mc_program(struct radeon_device *rdev)
3897 struct evergreen_mc_save save;
3901 /* Initialize HDP */
3902 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3903 WREG32((0x2c14 + j), 0x00000000);
3904 WREG32((0x2c18 + j), 0x00000000);
3905 WREG32((0x2c1c + j), 0x00000000);
3906 WREG32((0x2c20 + j), 0x00000000);
3907 WREG32((0x2c24 + j), 0x00000000);
3909 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
3911 evergreen_mc_stop(rdev, &save);
3912 if (radeon_mc_wait_for_idle(rdev)) {
3913 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3915 if (!ASIC_IS_NODCE(rdev))
3916 /* Lockout access through VGA aperture*/
3917 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
3918 /* Update configuration */
3919 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
3920 rdev->mc.vram_start >> 12);
3921 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
3922 rdev->mc.vram_end >> 12);
3923 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR,
3924 rdev->vram_scratch.gpu_addr >> 12);
3925 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
3926 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
3927 WREG32(MC_VM_FB_LOCATION, tmp);
3928 /* XXX double check these! */
3929 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
3930 WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
3931 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
3932 WREG32(MC_VM_AGP_BASE, 0);
3933 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
3934 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
3935 if (radeon_mc_wait_for_idle(rdev)) {
3936 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3938 evergreen_mc_resume(rdev, &save);
3939 if (!ASIC_IS_NODCE(rdev)) {
3940 /* we need to own VRAM, so turn off the VGA renderer here
3941 * to stop it overwriting our objects */
3942 rv515_vga_render_disable(rdev);
3946 void si_vram_gtt_location(struct radeon_device *rdev,
3947 struct radeon_mc *mc)
3949 if (mc->mc_vram_size > 0xFFC0000000ULL) {
3950 /* leave room for at least 1024M GTT */
3951 dev_warn(rdev->dev, "limiting VRAM\n");
3952 mc->real_vram_size = 0xFFC0000000ULL;
3953 mc->mc_vram_size = 0xFFC0000000ULL;
3955 radeon_vram_location(rdev, &rdev->mc, 0);
3956 rdev->mc.gtt_base_align = 0;
3957 radeon_gtt_location(rdev, mc);
3960 static int si_mc_init(struct radeon_device *rdev)
3963 int chansize, numchan;
3965 /* Get VRAM informations */
3966 rdev->mc.vram_is_ddr = true;
3967 tmp = RREG32(MC_ARB_RAMCFG);
3968 if (tmp & CHANSIZE_OVERRIDE) {
3970 } else if (tmp & CHANSIZE_MASK) {
3975 tmp = RREG32(MC_SHARED_CHMAP);
3976 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
4006 rdev->mc.vram_width = numchan * chansize;
4007 /* Could aper size report 0 ? */
4008 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
4009 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
4010 /* size in MB on si */
4011 tmp = RREG32(CONFIG_MEMSIZE);
4012 /* some boards may have garbage in the upper 16 bits */
4013 if (tmp & 0xffff0000) {
4014 DRM_INFO("Probable bad vram size: 0x%08x\n", tmp);
4018 rdev->mc.mc_vram_size = tmp * 1024ULL * 1024ULL;
4019 rdev->mc.real_vram_size = rdev->mc.mc_vram_size;
4020 rdev->mc.visible_vram_size = rdev->mc.aper_size;
4021 si_vram_gtt_location(rdev, &rdev->mc);
4022 radeon_update_bandwidth_info(rdev);
4030 void si_pcie_gart_tlb_flush(struct radeon_device *rdev)
4032 /* flush hdp cache */
4033 WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
4035 /* bits 0-15 are the VM contexts0-15 */
4036 WREG32(VM_INVALIDATE_REQUEST, 1);
4039 static int si_pcie_gart_enable(struct radeon_device *rdev)
4043 if (rdev->gart.robj == NULL) {
4044 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
4047 r = radeon_gart_table_vram_pin(rdev);
4050 radeon_gart_restore(rdev);
4051 /* Setup TLB control */
4052 WREG32(MC_VM_MX_L1_TLB_CNTL,
4055 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
4056 ENABLE_ADVANCED_DRIVER_MODEL |
4057 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU);
4058 /* Setup L2 cache */
4059 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE |
4060 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
4061 ENABLE_L2_PDE0_CACHE_LRU_UPDATE_BY_WRITE |
4062 EFFECTIVE_L2_QUEUE_SIZE(7) |
4063 CONTEXT1_IDENTITY_ACCESS_MODE(1));
4064 WREG32(VM_L2_CNTL2, INVALIDATE_ALL_L1_TLBS | INVALIDATE_L2_CACHE);
4065 WREG32(VM_L2_CNTL3, L2_CACHE_BIGK_ASSOCIATIVITY |
4066 L2_CACHE_BIGK_FRAGMENT_SIZE(0));
4067 /* setup context0 */
4068 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
4069 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
4070 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
4071 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
4072 (u32)(rdev->dummy_page.addr >> 12));
4073 WREG32(VM_CONTEXT0_CNTL2, 0);
4074 WREG32(VM_CONTEXT0_CNTL, (ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
4075 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT));
4081 /* empty context1-15 */
4082 /* set vm size, must be a multiple of 4 */
4083 WREG32(VM_CONTEXT1_PAGE_TABLE_START_ADDR, 0);
4084 WREG32(VM_CONTEXT1_PAGE_TABLE_END_ADDR, rdev->vm_manager.max_pfn);
4085 /* Assign the pt base to something valid for now; the pts used for
4086 * the VMs are determined by the application and setup and assigned
4087 * on the fly in the vm part of radeon_gart.c
4089 for (i = 1; i < 16; i++) {
4091 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR + (i << 2),
4092 rdev->gart.table_addr >> 12);
4094 WREG32(VM_CONTEXT8_PAGE_TABLE_BASE_ADDR + ((i - 8) << 2),
4095 rdev->gart.table_addr >> 12);
4098 /* enable context1-15 */
4099 WREG32(VM_CONTEXT1_PROTECTION_FAULT_DEFAULT_ADDR,
4100 (u32)(rdev->dummy_page.addr >> 12));
4101 WREG32(VM_CONTEXT1_CNTL2, 4);
4102 WREG32(VM_CONTEXT1_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(1) |
4103 RANGE_PROTECTION_FAULT_ENABLE_INTERRUPT |
4104 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT |
4105 DUMMY_PAGE_PROTECTION_FAULT_ENABLE_INTERRUPT |
4106 DUMMY_PAGE_PROTECTION_FAULT_ENABLE_DEFAULT |
4107 PDE0_PROTECTION_FAULT_ENABLE_INTERRUPT |
4108 PDE0_PROTECTION_FAULT_ENABLE_DEFAULT |
4109 VALID_PROTECTION_FAULT_ENABLE_INTERRUPT |
4110 VALID_PROTECTION_FAULT_ENABLE_DEFAULT |
4111 READ_PROTECTION_FAULT_ENABLE_INTERRUPT |
4112 READ_PROTECTION_FAULT_ENABLE_DEFAULT |
4113 WRITE_PROTECTION_FAULT_ENABLE_INTERRUPT |
4114 WRITE_PROTECTION_FAULT_ENABLE_DEFAULT);
4116 si_pcie_gart_tlb_flush(rdev);
4117 DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
4118 (unsigned)(rdev->mc.gtt_size >> 20),
4119 (unsigned long long)rdev->gart.table_addr);
4120 rdev->gart.ready = true;
4124 static void si_pcie_gart_disable(struct radeon_device *rdev)
4126 /* Disable all tables */
4127 WREG32(VM_CONTEXT0_CNTL, 0);
4128 WREG32(VM_CONTEXT1_CNTL, 0);
4129 /* Setup TLB control */
4130 WREG32(MC_VM_MX_L1_TLB_CNTL, SYSTEM_ACCESS_MODE_NOT_IN_SYS |
4131 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU);
4132 /* Setup L2 cache */
4133 WREG32(VM_L2_CNTL, ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
4134 ENABLE_L2_PDE0_CACHE_LRU_UPDATE_BY_WRITE |
4135 EFFECTIVE_L2_QUEUE_SIZE(7) |
4136 CONTEXT1_IDENTITY_ACCESS_MODE(1));
4137 WREG32(VM_L2_CNTL2, 0);
4138 WREG32(VM_L2_CNTL3, L2_CACHE_BIGK_ASSOCIATIVITY |
4139 L2_CACHE_BIGK_FRAGMENT_SIZE(0));
4140 radeon_gart_table_vram_unpin(rdev);
4143 static void si_pcie_gart_fini(struct radeon_device *rdev)
4145 si_pcie_gart_disable(rdev);
4146 radeon_gart_table_vram_free(rdev);
4147 radeon_gart_fini(rdev);
4151 static bool si_vm_reg_valid(u32 reg)
4153 /* context regs are fine */
4157 /* check config regs */
4159 case GRBM_GFX_INDEX:
4160 case CP_STRMOUT_CNTL:
4161 case VGT_VTX_VECT_EJECT_REG:
4162 case VGT_CACHE_INVALIDATION:
4163 case VGT_ESGS_RING_SIZE:
4164 case VGT_GSVS_RING_SIZE:
4165 case VGT_GS_VERTEX_REUSE:
4166 case VGT_PRIMITIVE_TYPE:
4167 case VGT_INDEX_TYPE:
4168 case VGT_NUM_INDICES:
4169 case VGT_NUM_INSTANCES:
4170 case VGT_TF_RING_SIZE:
4171 case VGT_HS_OFFCHIP_PARAM:
4172 case VGT_TF_MEMORY_BASE:
4174 case PA_SU_LINE_STIPPLE_VALUE:
4175 case PA_SC_LINE_STIPPLE_STATE:
4178 case SPI_STATIC_THREAD_MGMT_1:
4179 case SPI_STATIC_THREAD_MGMT_2:
4180 case SPI_STATIC_THREAD_MGMT_3:
4181 case SPI_PS_MAX_WAVE_ID:
4182 case SPI_CONFIG_CNTL:
4183 case SPI_CONFIG_CNTL_1:
4187 DRM_ERROR("Invalid register 0x%x in CS\n", reg);
4192 static int si_vm_packet3_ce_check(struct radeon_device *rdev,
4193 u32 *ib, struct radeon_cs_packet *pkt)
4195 switch (pkt->opcode) {
4197 case PACKET3_SET_BASE:
4198 case PACKET3_SET_CE_DE_COUNTERS:
4199 case PACKET3_LOAD_CONST_RAM:
4200 case PACKET3_WRITE_CONST_RAM:
4201 case PACKET3_WRITE_CONST_RAM_OFFSET:
4202 case PACKET3_DUMP_CONST_RAM:
4203 case PACKET3_INCREMENT_CE_COUNTER:
4204 case PACKET3_WAIT_ON_DE_COUNTER:
4205 case PACKET3_CE_WRITE:
4208 DRM_ERROR("Invalid CE packet3: 0x%x\n", pkt->opcode);
4214 static int si_vm_packet3_cp_dma_check(u32 *ib, u32 idx)
4216 u32 start_reg, reg, i;
4217 u32 command = ib[idx + 4];
4218 u32 info = ib[idx + 1];
4219 u32 idx_value = ib[idx];
4220 if (command & PACKET3_CP_DMA_CMD_SAS) {
4221 /* src address space is register */
4222 if (((info & 0x60000000) >> 29) == 0) {
4223 start_reg = idx_value << 2;
4224 if (command & PACKET3_CP_DMA_CMD_SAIC) {
4226 if (!si_vm_reg_valid(reg)) {
4227 DRM_ERROR("CP DMA Bad SRC register\n");
4231 for (i = 0; i < (command & 0x1fffff); i++) {
4232 reg = start_reg + (4 * i);
4233 if (!si_vm_reg_valid(reg)) {
4234 DRM_ERROR("CP DMA Bad SRC register\n");
4241 if (command & PACKET3_CP_DMA_CMD_DAS) {
4242 /* dst address space is register */
4243 if (((info & 0x00300000) >> 20) == 0) {
4244 start_reg = ib[idx + 2];
4245 if (command & PACKET3_CP_DMA_CMD_DAIC) {
4247 if (!si_vm_reg_valid(reg)) {
4248 DRM_ERROR("CP DMA Bad DST register\n");
4252 for (i = 0; i < (command & 0x1fffff); i++) {
4253 reg = start_reg + (4 * i);
4254 if (!si_vm_reg_valid(reg)) {
4255 DRM_ERROR("CP DMA Bad DST register\n");
4265 static int si_vm_packet3_gfx_check(struct radeon_device *rdev,
4266 u32 *ib, struct radeon_cs_packet *pkt)
4269 u32 idx = pkt->idx + 1;
4270 u32 idx_value = ib[idx];
4271 u32 start_reg, end_reg, reg, i;
4273 switch (pkt->opcode) {
4275 case PACKET3_SET_BASE:
4276 case PACKET3_CLEAR_STATE:
4277 case PACKET3_INDEX_BUFFER_SIZE:
4278 case PACKET3_DISPATCH_DIRECT:
4279 case PACKET3_DISPATCH_INDIRECT:
4280 case PACKET3_ALLOC_GDS:
4281 case PACKET3_WRITE_GDS_RAM:
4282 case PACKET3_ATOMIC_GDS:
4283 case PACKET3_ATOMIC:
4284 case PACKET3_OCCLUSION_QUERY:
4285 case PACKET3_SET_PREDICATION:
4286 case PACKET3_COND_EXEC:
4287 case PACKET3_PRED_EXEC:
4288 case PACKET3_DRAW_INDIRECT:
4289 case PACKET3_DRAW_INDEX_INDIRECT:
4290 case PACKET3_INDEX_BASE:
4291 case PACKET3_DRAW_INDEX_2:
4292 case PACKET3_CONTEXT_CONTROL:
4293 case PACKET3_INDEX_TYPE:
4294 case PACKET3_DRAW_INDIRECT_MULTI:
4295 case PACKET3_DRAW_INDEX_AUTO:
4296 case PACKET3_DRAW_INDEX_IMMD:
4297 case PACKET3_NUM_INSTANCES:
4298 case PACKET3_DRAW_INDEX_MULTI_AUTO:
4299 case PACKET3_STRMOUT_BUFFER_UPDATE:
4300 case PACKET3_DRAW_INDEX_OFFSET_2:
4301 case PACKET3_DRAW_INDEX_MULTI_ELEMENT:
4302 case PACKET3_DRAW_INDEX_INDIRECT_MULTI:
4303 case PACKET3_MPEG_INDEX:
4304 case PACKET3_WAIT_REG_MEM:
4305 case PACKET3_MEM_WRITE:
4306 case PACKET3_PFP_SYNC_ME:
4307 case PACKET3_SURFACE_SYNC:
4308 case PACKET3_EVENT_WRITE:
4309 case PACKET3_EVENT_WRITE_EOP:
4310 case PACKET3_EVENT_WRITE_EOS:
4311 case PACKET3_SET_CONTEXT_REG:
4312 case PACKET3_SET_CONTEXT_REG_INDIRECT:
4313 case PACKET3_SET_SH_REG:
4314 case PACKET3_SET_SH_REG_OFFSET:
4315 case PACKET3_INCREMENT_DE_COUNTER:
4316 case PACKET3_WAIT_ON_CE_COUNTER:
4317 case PACKET3_WAIT_ON_AVAIL_BUFFER:
4318 case PACKET3_ME_WRITE:
4320 case PACKET3_COPY_DATA:
4321 if ((idx_value & 0xf00) == 0) {
4322 reg = ib[idx + 3] * 4;
4323 if (!si_vm_reg_valid(reg))
4327 case PACKET3_WRITE_DATA:
4328 if ((idx_value & 0xf00) == 0) {
4329 start_reg = ib[idx + 1] * 4;
4330 if (idx_value & 0x10000) {
4331 if (!si_vm_reg_valid(start_reg))
4334 for (i = 0; i < (pkt->count - 2); i++) {
4335 reg = start_reg + (4 * i);
4336 if (!si_vm_reg_valid(reg))
4342 case PACKET3_COND_WRITE:
4343 if (idx_value & 0x100) {
4344 reg = ib[idx + 5] * 4;
4345 if (!si_vm_reg_valid(reg))
4349 case PACKET3_COPY_DW:
4350 if (idx_value & 0x2) {
4351 reg = ib[idx + 3] * 4;
4352 if (!si_vm_reg_valid(reg))
4356 case PACKET3_SET_CONFIG_REG:
4357 start_reg = (idx_value << 2) + PACKET3_SET_CONFIG_REG_START;
4358 end_reg = 4 * pkt->count + start_reg - 4;
4359 if ((start_reg < PACKET3_SET_CONFIG_REG_START) ||
4360 (start_reg >= PACKET3_SET_CONFIG_REG_END) ||
4361 (end_reg >= PACKET3_SET_CONFIG_REG_END)) {
4362 DRM_ERROR("bad PACKET3_SET_CONFIG_REG\n");
4365 for (i = 0; i < pkt->count; i++) {
4366 reg = start_reg + (4 * i);
4367 if (!si_vm_reg_valid(reg))
4371 case PACKET3_CP_DMA:
4372 r = si_vm_packet3_cp_dma_check(ib, idx);
4377 DRM_ERROR("Invalid GFX packet3: 0x%x\n", pkt->opcode);
4383 static int si_vm_packet3_compute_check(struct radeon_device *rdev,
4384 u32 *ib, struct radeon_cs_packet *pkt)
4387 u32 idx = pkt->idx + 1;
4388 u32 idx_value = ib[idx];
4389 u32 start_reg, reg, i;
4391 switch (pkt->opcode) {
4393 case PACKET3_SET_BASE:
4394 case PACKET3_CLEAR_STATE:
4395 case PACKET3_DISPATCH_DIRECT:
4396 case PACKET3_DISPATCH_INDIRECT:
4397 case PACKET3_ALLOC_GDS:
4398 case PACKET3_WRITE_GDS_RAM:
4399 case PACKET3_ATOMIC_GDS:
4400 case PACKET3_ATOMIC:
4401 case PACKET3_OCCLUSION_QUERY:
4402 case PACKET3_SET_PREDICATION:
4403 case PACKET3_COND_EXEC:
4404 case PACKET3_PRED_EXEC:
4405 case PACKET3_CONTEXT_CONTROL:
4406 case PACKET3_STRMOUT_BUFFER_UPDATE:
4407 case PACKET3_WAIT_REG_MEM:
4408 case PACKET3_MEM_WRITE:
4409 case PACKET3_PFP_SYNC_ME:
4410 case PACKET3_SURFACE_SYNC:
4411 case PACKET3_EVENT_WRITE:
4412 case PACKET3_EVENT_WRITE_EOP:
4413 case PACKET3_EVENT_WRITE_EOS:
4414 case PACKET3_SET_CONTEXT_REG:
4415 case PACKET3_SET_CONTEXT_REG_INDIRECT:
4416 case PACKET3_SET_SH_REG:
4417 case PACKET3_SET_SH_REG_OFFSET:
4418 case PACKET3_INCREMENT_DE_COUNTER:
4419 case PACKET3_WAIT_ON_CE_COUNTER:
4420 case PACKET3_WAIT_ON_AVAIL_BUFFER:
4421 case PACKET3_ME_WRITE:
4423 case PACKET3_COPY_DATA:
4424 if ((idx_value & 0xf00) == 0) {
4425 reg = ib[idx + 3] * 4;
4426 if (!si_vm_reg_valid(reg))
4430 case PACKET3_WRITE_DATA:
4431 if ((idx_value & 0xf00) == 0) {
4432 start_reg = ib[idx + 1] * 4;
4433 if (idx_value & 0x10000) {
4434 if (!si_vm_reg_valid(start_reg))
4437 for (i = 0; i < (pkt->count - 2); i++) {
4438 reg = start_reg + (4 * i);
4439 if (!si_vm_reg_valid(reg))
4445 case PACKET3_COND_WRITE:
4446 if (idx_value & 0x100) {
4447 reg = ib[idx + 5] * 4;
4448 if (!si_vm_reg_valid(reg))
4452 case PACKET3_COPY_DW:
4453 if (idx_value & 0x2) {
4454 reg = ib[idx + 3] * 4;
4455 if (!si_vm_reg_valid(reg))
4459 case PACKET3_CP_DMA:
4460 r = si_vm_packet3_cp_dma_check(ib, idx);
4465 DRM_ERROR("Invalid Compute packet3: 0x%x\n", pkt->opcode);
4471 int si_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib)
4475 struct radeon_cs_packet pkt;
4479 pkt.type = RADEON_CP_PACKET_GET_TYPE(ib->ptr[idx]);
4480 pkt.count = RADEON_CP_PACKET_GET_COUNT(ib->ptr[idx]);
4483 case RADEON_PACKET_TYPE0:
4484 dev_err(rdev->dev, "Packet0 not allowed!\n");
4487 case RADEON_PACKET_TYPE2:
4490 case RADEON_PACKET_TYPE3:
4491 pkt.opcode = RADEON_CP_PACKET3_GET_OPCODE(ib->ptr[idx]);
4492 if (ib->is_const_ib)
4493 ret = si_vm_packet3_ce_check(rdev, ib->ptr, &pkt);
4496 case RADEON_RING_TYPE_GFX_INDEX:
4497 ret = si_vm_packet3_gfx_check(rdev, ib->ptr, &pkt);
4499 case CAYMAN_RING_TYPE_CP1_INDEX:
4500 case CAYMAN_RING_TYPE_CP2_INDEX:
4501 ret = si_vm_packet3_compute_check(rdev, ib->ptr, &pkt);
4504 dev_err(rdev->dev, "Non-PM4 ring %d !\n", ib->ring);
4509 idx += pkt.count + 2;
4512 dev_err(rdev->dev, "Unknown packet type %d !\n", pkt.type);
4518 } while (idx < ib->length_dw);
4526 int si_vm_init(struct radeon_device *rdev)
4529 rdev->vm_manager.nvm = 16;
4530 /* base offset of vram pages */
4531 rdev->vm_manager.vram_base_offset = 0;
4536 void si_vm_fini(struct radeon_device *rdev)
4541 * si_vm_decode_fault - print human readable fault info
4543 * @rdev: radeon_device pointer
4544 * @status: VM_CONTEXT1_PROTECTION_FAULT_STATUS register value
4545 * @addr: VM_CONTEXT1_PROTECTION_FAULT_ADDR register value
4547 * Print human readable fault information (SI).
4549 static void si_vm_decode_fault(struct radeon_device *rdev,
4550 u32 status, u32 addr)
4552 u32 mc_id = (status & MEMORY_CLIENT_ID_MASK) >> MEMORY_CLIENT_ID_SHIFT;
4553 u32 vmid = (status & FAULT_VMID_MASK) >> FAULT_VMID_SHIFT;
4554 u32 protections = (status & PROTECTIONS_MASK) >> PROTECTIONS_SHIFT;
4557 if (rdev->family == CHIP_TAHITI) {
4798 printk("VM fault (0x%02x, vmid %d) at page %u, %s from %s (%d)\n",
4799 protections, vmid, addr,
4800 (status & MEMORY_CLIENT_RW_MASK) ? "write" : "read",
4804 void si_vm_flush(struct radeon_device *rdev, int ridx, struct radeon_vm *vm)
4806 struct radeon_ring *ring = &rdev->ring[ridx];
4811 /* write new base address */
4812 radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
4813 radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(1) |
4814 WRITE_DATA_DST_SEL(0)));
4817 radeon_ring_write(ring,
4818 (VM_CONTEXT0_PAGE_TABLE_BASE_ADDR + (vm->id << 2)) >> 2);
4820 radeon_ring_write(ring,
4821 (VM_CONTEXT8_PAGE_TABLE_BASE_ADDR + ((vm->id - 8) << 2)) >> 2);
4823 radeon_ring_write(ring, 0);
4824 radeon_ring_write(ring, vm->pd_gpu_addr >> 12);
4826 /* flush hdp cache */
4827 radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
4828 radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
4829 WRITE_DATA_DST_SEL(0)));
4830 radeon_ring_write(ring, HDP_MEM_COHERENCY_FLUSH_CNTL >> 2);
4831 radeon_ring_write(ring, 0);
4832 radeon_ring_write(ring, 0x1);
4834 /* bits 0-15 are the VM contexts0-15 */
4835 radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
4836 radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
4837 WRITE_DATA_DST_SEL(0)));
4838 radeon_ring_write(ring, VM_INVALIDATE_REQUEST >> 2);
4839 radeon_ring_write(ring, 0);
4840 radeon_ring_write(ring, 1 << vm->id);
4842 /* sync PFP to ME, otherwise we might get invalid PFP reads */
4843 radeon_ring_write(ring, PACKET3(PACKET3_PFP_SYNC_ME, 0));
4844 radeon_ring_write(ring, 0x0);
4848 * Power and clock gating
4850 static void si_wait_for_rlc_serdes(struct radeon_device *rdev)
4854 for (i = 0; i < rdev->usec_timeout; i++) {
4855 if (RREG32(RLC_SERDES_MASTER_BUSY_0) == 0)
4860 for (i = 0; i < rdev->usec_timeout; i++) {
4861 if (RREG32(RLC_SERDES_MASTER_BUSY_1) == 0)
4867 static void si_enable_gui_idle_interrupt(struct radeon_device *rdev,
4870 u32 tmp = RREG32(CP_INT_CNTL_RING0);
4875 tmp |= (CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4877 tmp &= ~(CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4878 WREG32(CP_INT_CNTL_RING0, tmp);
4881 /* read a gfx register */
4882 tmp = RREG32(DB_DEPTH_INFO);
4884 mask = RLC_BUSY_STATUS | GFX_POWER_STATUS | GFX_CLOCK_STATUS | GFX_LS_STATUS;
4885 for (i = 0; i < rdev->usec_timeout; i++) {
4886 if ((RREG32(RLC_STAT) & mask) == (GFX_CLOCK_STATUS | GFX_POWER_STATUS))
4893 static void si_set_uvd_dcm(struct radeon_device *rdev,
4898 tmp = RREG32(UVD_CGC_CTRL);
4899 tmp &= ~(CLK_OD_MASK | CG_DT_MASK);
4900 tmp |= DCM | CG_DT(1) | CLK_OD(4);
4904 tmp2 = DYN_OR_EN | DYN_RR_EN | G_DIV_ID(7);
4910 WREG32(UVD_CGC_CTRL, tmp);
4911 WREG32_UVD_CTX(UVD_CGC_CTRL2, tmp2);
4914 void si_init_uvd_internal_cg(struct radeon_device *rdev)
4916 bool hw_mode = true;
4919 si_set_uvd_dcm(rdev, false);
4921 u32 tmp = RREG32(UVD_CGC_CTRL);
4923 WREG32(UVD_CGC_CTRL, tmp);
4927 static u32 si_halt_rlc(struct radeon_device *rdev)
4931 orig = data = RREG32(RLC_CNTL);
4933 if (data & RLC_ENABLE) {
4934 data &= ~RLC_ENABLE;
4935 WREG32(RLC_CNTL, data);
4937 si_wait_for_rlc_serdes(rdev);
4943 static void si_update_rlc(struct radeon_device *rdev, u32 rlc)
4947 tmp = RREG32(RLC_CNTL);
4949 WREG32(RLC_CNTL, rlc);
4952 static void si_enable_dma_pg(struct radeon_device *rdev, bool enable)
4956 orig = data = RREG32(DMA_PG);
4957 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_SDMA))
4958 data |= PG_CNTL_ENABLE;
4960 data &= ~PG_CNTL_ENABLE;
4962 WREG32(DMA_PG, data);
4965 static void si_init_dma_pg(struct radeon_device *rdev)
4969 WREG32(DMA_PGFSM_WRITE, 0x00002000);
4970 WREG32(DMA_PGFSM_CONFIG, 0x100010ff);
4972 for (tmp = 0; tmp < 5; tmp++)
4973 WREG32(DMA_PGFSM_WRITE, 0);
4976 static void si_enable_gfx_cgpg(struct radeon_device *rdev,
4981 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG)) {
4982 tmp = RLC_PUD(0x10) | RLC_PDD(0x10) | RLC_TTPD(0x10) | RLC_MSD(0x10);
4983 WREG32(RLC_TTOP_D, tmp);
4985 tmp = RREG32(RLC_PG_CNTL);
4986 tmp |= GFX_PG_ENABLE;
4987 WREG32(RLC_PG_CNTL, tmp);
4989 tmp = RREG32(RLC_AUTO_PG_CTRL);
4991 WREG32(RLC_AUTO_PG_CTRL, tmp);
4993 tmp = RREG32(RLC_AUTO_PG_CTRL);
4995 WREG32(RLC_AUTO_PG_CTRL, tmp);
4997 tmp = RREG32(DB_RENDER_CONTROL);
5001 static void si_init_gfx_cgpg(struct radeon_device *rdev)
5005 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
5007 tmp = RREG32(RLC_PG_CNTL);
5009 WREG32(RLC_PG_CNTL, tmp);
5011 WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
5013 tmp = RREG32(RLC_AUTO_PG_CTRL);
5015 tmp &= ~GRBM_REG_SGIT_MASK;
5016 tmp |= GRBM_REG_SGIT(0x700);
5017 tmp &= ~PG_AFTER_GRBM_REG_ST_MASK;
5018 WREG32(RLC_AUTO_PG_CTRL, tmp);
5021 static u32 si_get_cu_active_bitmap(struct radeon_device *rdev, u32 se, u32 sh)
5023 u32 mask = 0, tmp, tmp1;
5026 si_select_se_sh(rdev, se, sh);
5027 tmp = RREG32(CC_GC_SHADER_ARRAY_CONFIG);
5028 tmp1 = RREG32(GC_USER_SHADER_ARRAY_CONFIG);
5029 si_select_se_sh(rdev, 0xffffffff, 0xffffffff);
5036 for (i = 0; i < rdev->config.si.max_cu_per_sh; i ++) {
5041 return (~tmp) & mask;
5044 static void si_init_ao_cu_mask(struct radeon_device *rdev)
5046 u32 i, j, k, active_cu_number = 0;
5047 u32 mask, counter, cu_bitmap;
5050 for (i = 0; i < rdev->config.si.max_shader_engines; i++) {
5051 for (j = 0; j < rdev->config.si.max_sh_per_se; j++) {
5055 for (k = 0; k < rdev->config.si.max_cu_per_sh; k++) {
5056 if (si_get_cu_active_bitmap(rdev, i, j) & mask) {
5064 active_cu_number += counter;
5065 tmp |= (cu_bitmap << (i * 16 + j * 8));
5069 WREG32(RLC_PG_AO_CU_MASK, tmp);
5071 tmp = RREG32(RLC_MAX_PG_CU);
5072 tmp &= ~MAX_PU_CU_MASK;
5073 tmp |= MAX_PU_CU(active_cu_number);
5074 WREG32(RLC_MAX_PG_CU, tmp);
5077 static void si_enable_cgcg(struct radeon_device *rdev,
5080 u32 data, orig, tmp;
5082 orig = data = RREG32(RLC_CGCG_CGLS_CTRL);
5084 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CGCG)) {
5085 si_enable_gui_idle_interrupt(rdev, true);
5087 WREG32(RLC_GCPM_GENERAL_3, 0x00000080);
5089 tmp = si_halt_rlc(rdev);
5091 WREG32(RLC_SERDES_WR_MASTER_MASK_0, 0xffffffff);
5092 WREG32(RLC_SERDES_WR_MASTER_MASK_1, 0xffffffff);
5093 WREG32(RLC_SERDES_WR_CTRL, 0x00b000ff);
5095 si_wait_for_rlc_serdes(rdev);
5097 si_update_rlc(rdev, tmp);
5099 WREG32(RLC_SERDES_WR_CTRL, 0x007000ff);
5101 data |= CGCG_EN | CGLS_EN;
5103 si_enable_gui_idle_interrupt(rdev, false);
5105 RREG32(CB_CGTT_SCLK_CTRL);
5106 RREG32(CB_CGTT_SCLK_CTRL);
5107 RREG32(CB_CGTT_SCLK_CTRL);
5108 RREG32(CB_CGTT_SCLK_CTRL);
5110 data &= ~(CGCG_EN | CGLS_EN);
5114 WREG32(RLC_CGCG_CGLS_CTRL, data);
5117 static void si_enable_mgcg(struct radeon_device *rdev,
5120 u32 data, orig, tmp = 0;
5122 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_MGCG)) {
5123 orig = data = RREG32(CGTS_SM_CTRL_REG);
5126 WREG32(CGTS_SM_CTRL_REG, data);
5128 if (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CP_LS) {
5129 orig = data = RREG32(CP_MEM_SLP_CNTL);
5130 data |= CP_MEM_LS_EN;
5132 WREG32(CP_MEM_SLP_CNTL, data);
5135 orig = data = RREG32(RLC_CGTT_MGCG_OVERRIDE);
5138 WREG32(RLC_CGTT_MGCG_OVERRIDE, data);
5140 tmp = si_halt_rlc(rdev);
5142 WREG32(RLC_SERDES_WR_MASTER_MASK_0, 0xffffffff);
5143 WREG32(RLC_SERDES_WR_MASTER_MASK_1, 0xffffffff);
5144 WREG32(RLC_SERDES_WR_CTRL, 0x00d000ff);
5146 si_update_rlc(rdev, tmp);
5148 orig = data = RREG32(RLC_CGTT_MGCG_OVERRIDE);
5151 WREG32(RLC_CGTT_MGCG_OVERRIDE, data);
5153 data = RREG32(CP_MEM_SLP_CNTL);
5154 if (data & CP_MEM_LS_EN) {
5155 data &= ~CP_MEM_LS_EN;
5156 WREG32(CP_MEM_SLP_CNTL, data);
5158 orig = data = RREG32(CGTS_SM_CTRL_REG);
5159 data |= LS_OVERRIDE | OVERRIDE;
5161 WREG32(CGTS_SM_CTRL_REG, data);
5163 tmp = si_halt_rlc(rdev);
5165 WREG32(RLC_SERDES_WR_MASTER_MASK_0, 0xffffffff);
5166 WREG32(RLC_SERDES_WR_MASTER_MASK_1, 0xffffffff);
5167 WREG32(RLC_SERDES_WR_CTRL, 0x00e000ff);
5169 si_update_rlc(rdev, tmp);
5173 static void si_enable_uvd_mgcg(struct radeon_device *rdev,
5176 u32 orig, data, tmp;
5178 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_UVD_MGCG)) {
5179 tmp = RREG32_UVD_CTX(UVD_CGC_MEM_CTRL);
5181 WREG32_UVD_CTX(UVD_CGC_MEM_CTRL, tmp);
5183 orig = data = RREG32(UVD_CGC_CTRL);
5186 WREG32(UVD_CGC_CTRL, data);
5188 WREG32_SMC(SMC_CG_IND_START + CG_CGTT_LOCAL_0, 0);
5189 WREG32_SMC(SMC_CG_IND_START + CG_CGTT_LOCAL_1, 0);
5191 tmp = RREG32_UVD_CTX(UVD_CGC_MEM_CTRL);
5193 WREG32_UVD_CTX(UVD_CGC_MEM_CTRL, tmp);
5195 orig = data = RREG32(UVD_CGC_CTRL);
5198 WREG32(UVD_CGC_CTRL, data);
5200 WREG32_SMC(SMC_CG_IND_START + CG_CGTT_LOCAL_0, 0xffffffff);
5201 WREG32_SMC(SMC_CG_IND_START + CG_CGTT_LOCAL_1, 0xffffffff);
5205 static const u32 mc_cg_registers[] =
5218 static void si_enable_mc_ls(struct radeon_device *rdev,
5224 for (i = 0; i < ARRAY_SIZE(mc_cg_registers); i++) {
5225 orig = data = RREG32(mc_cg_registers[i]);
5226 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_MC_LS))
5227 data |= MC_LS_ENABLE;
5229 data &= ~MC_LS_ENABLE;
5231 WREG32(mc_cg_registers[i], data);
5235 static void si_enable_mc_mgcg(struct radeon_device *rdev,
5241 for (i = 0; i < ARRAY_SIZE(mc_cg_registers); i++) {
5242 orig = data = RREG32(mc_cg_registers[i]);
5243 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_MC_MGCG))
5244 data |= MC_CG_ENABLE;
5246 data &= ~MC_CG_ENABLE;
5248 WREG32(mc_cg_registers[i], data);
5252 static void si_enable_dma_mgcg(struct radeon_device *rdev,
5255 u32 orig, data, offset;
5258 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_SDMA_MGCG)) {
5259 for (i = 0; i < 2; i++) {
5261 offset = DMA0_REGISTER_OFFSET;
5263 offset = DMA1_REGISTER_OFFSET;
5264 orig = data = RREG32(DMA_POWER_CNTL + offset);
5265 data &= ~MEM_POWER_OVERRIDE;
5267 WREG32(DMA_POWER_CNTL + offset, data);
5268 WREG32(DMA_CLK_CTRL + offset, 0x00000100);
5271 for (i = 0; i < 2; i++) {
5273 offset = DMA0_REGISTER_OFFSET;
5275 offset = DMA1_REGISTER_OFFSET;
5276 orig = data = RREG32(DMA_POWER_CNTL + offset);
5277 data |= MEM_POWER_OVERRIDE;
5279 WREG32(DMA_POWER_CNTL + offset, data);
5281 orig = data = RREG32(DMA_CLK_CTRL + offset);
5284 WREG32(DMA_CLK_CTRL + offset, data);
5289 static void si_enable_bif_mgls(struct radeon_device *rdev,
5294 orig = data = RREG32_PCIE(PCIE_CNTL2);
5296 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_BIF_LS))
5297 data |= SLV_MEM_LS_EN | MST_MEM_LS_EN |
5298 REPLAY_MEM_LS_EN | SLV_MEM_AGGRESSIVE_LS_EN;
5300 data &= ~(SLV_MEM_LS_EN | MST_MEM_LS_EN |
5301 REPLAY_MEM_LS_EN | SLV_MEM_AGGRESSIVE_LS_EN);
5304 WREG32_PCIE(PCIE_CNTL2, data);
5307 static void si_enable_hdp_mgcg(struct radeon_device *rdev,
5312 orig = data = RREG32(HDP_HOST_PATH_CNTL);
5314 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_HDP_MGCG))
5315 data &= ~CLOCK_GATING_DIS;
5317 data |= CLOCK_GATING_DIS;
5320 WREG32(HDP_HOST_PATH_CNTL, data);
5323 static void si_enable_hdp_ls(struct radeon_device *rdev,
5328 orig = data = RREG32(HDP_MEM_POWER_LS);
5330 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_HDP_LS))
5331 data |= HDP_LS_ENABLE;
5333 data &= ~HDP_LS_ENABLE;
5336 WREG32(HDP_MEM_POWER_LS, data);
5339 static void si_update_cg(struct radeon_device *rdev,
5340 u32 block, bool enable)
5342 if (block & RADEON_CG_BLOCK_GFX) {
5343 si_enable_gui_idle_interrupt(rdev, false);
5344 /* order matters! */
5346 si_enable_mgcg(rdev, true);
5347 si_enable_cgcg(rdev, true);
5349 si_enable_cgcg(rdev, false);
5350 si_enable_mgcg(rdev, false);
5352 si_enable_gui_idle_interrupt(rdev, true);
5355 if (block & RADEON_CG_BLOCK_MC) {
5356 si_enable_mc_mgcg(rdev, enable);
5357 si_enable_mc_ls(rdev, enable);
5360 if (block & RADEON_CG_BLOCK_SDMA) {
5361 si_enable_dma_mgcg(rdev, enable);
5364 if (block & RADEON_CG_BLOCK_BIF) {
5365 si_enable_bif_mgls(rdev, enable);
5368 if (block & RADEON_CG_BLOCK_UVD) {
5369 if (rdev->has_uvd) {
5370 si_enable_uvd_mgcg(rdev, enable);
5374 if (block & RADEON_CG_BLOCK_HDP) {
5375 si_enable_hdp_mgcg(rdev, enable);
5376 si_enable_hdp_ls(rdev, enable);
5380 static void si_init_cg(struct radeon_device *rdev)
5382 si_update_cg(rdev, (RADEON_CG_BLOCK_GFX |
5383 RADEON_CG_BLOCK_MC |
5384 RADEON_CG_BLOCK_SDMA |
5385 RADEON_CG_BLOCK_BIF |
5386 RADEON_CG_BLOCK_HDP), true);
5387 if (rdev->has_uvd) {
5388 si_update_cg(rdev, RADEON_CG_BLOCK_UVD, true);
5389 si_init_uvd_internal_cg(rdev);
5393 static void si_fini_cg(struct radeon_device *rdev)
5395 if (rdev->has_uvd) {
5396 si_update_cg(rdev, RADEON_CG_BLOCK_UVD, false);
5398 si_update_cg(rdev, (RADEON_CG_BLOCK_GFX |
5399 RADEON_CG_BLOCK_MC |
5400 RADEON_CG_BLOCK_SDMA |
5401 RADEON_CG_BLOCK_BIF |
5402 RADEON_CG_BLOCK_HDP), false);
5405 u32 si_get_csb_size(struct radeon_device *rdev)
5408 const struct cs_section_def *sect = NULL;
5409 const struct cs_extent_def *ext = NULL;
5411 if (rdev->rlc.cs_data == NULL)
5414 /* begin clear state */
5416 /* context control state */
5419 for (sect = rdev->rlc.cs_data; sect->section != NULL; ++sect) {
5420 for (ext = sect->section; ext->extent != NULL; ++ext) {
5421 if (sect->id == SECT_CONTEXT)
5422 count += 2 + ext->reg_count;
5427 /* pa_sc_raster_config */
5429 /* end clear state */
5437 void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer)
5440 const struct cs_section_def *sect = NULL;
5441 const struct cs_extent_def *ext = NULL;
5443 if (rdev->rlc.cs_data == NULL)
5448 buffer[count++] = cpu_to_le32(PACKET3(PACKET3_PREAMBLE_CNTL, 0));
5449 buffer[count++] = cpu_to_le32(PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
5451 buffer[count++] = cpu_to_le32(PACKET3(PACKET3_CONTEXT_CONTROL, 1));
5452 buffer[count++] = cpu_to_le32(0x80000000);
5453 buffer[count++] = cpu_to_le32(0x80000000);
5455 for (sect = rdev->rlc.cs_data; sect->section != NULL; ++sect) {
5456 for (ext = sect->section; ext->extent != NULL; ++ext) {
5457 if (sect->id == SECT_CONTEXT) {
5459 cpu_to_le32(PACKET3(PACKET3_SET_CONTEXT_REG, ext->reg_count));
5460 buffer[count++] = cpu_to_le32(ext->reg_index - 0xa000);
5461 for (i = 0; i < ext->reg_count; i++)
5462 buffer[count++] = cpu_to_le32(ext->extent[i]);
5469 buffer[count++] = cpu_to_le32(PACKET3(PACKET3_SET_CONTEXT_REG, 1));
5470 buffer[count++] = cpu_to_le32(PA_SC_RASTER_CONFIG - PACKET3_SET_CONTEXT_REG_START);
5471 switch (rdev->family) {
5474 buffer[count++] = cpu_to_le32(0x2a00126a);
5477 buffer[count++] = cpu_to_le32(0x0000124a);
5480 buffer[count++] = cpu_to_le32(0x00000082);
5483 buffer[count++] = cpu_to_le32(0x00000000);
5486 buffer[count++] = cpu_to_le32(0x00000000);
5490 buffer[count++] = cpu_to_le32(PACKET3(PACKET3_PREAMBLE_CNTL, 0));
5491 buffer[count++] = cpu_to_le32(PACKET3_PREAMBLE_END_CLEAR_STATE);
5493 buffer[count++] = cpu_to_le32(PACKET3(PACKET3_CLEAR_STATE, 0));
5494 buffer[count++] = cpu_to_le32(0);
5497 static void si_init_pg(struct radeon_device *rdev)
5499 if (rdev->pg_flags) {
5500 if (rdev->pg_flags & RADEON_PG_SUPPORT_SDMA) {
5501 si_init_dma_pg(rdev);
5503 si_init_ao_cu_mask(rdev);
5504 if (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG) {
5505 si_init_gfx_cgpg(rdev);
5507 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
5508 WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
5510 si_enable_dma_pg(rdev, true);
5511 si_enable_gfx_cgpg(rdev, true);
5513 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
5514 WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
5518 static void si_fini_pg(struct radeon_device *rdev)
5520 if (rdev->pg_flags) {
5521 si_enable_dma_pg(rdev, false);
5522 si_enable_gfx_cgpg(rdev, false);
5529 void si_rlc_reset(struct radeon_device *rdev)
5531 u32 tmp = RREG32(GRBM_SOFT_RESET);
5533 tmp |= SOFT_RESET_RLC;
5534 WREG32(GRBM_SOFT_RESET, tmp);
5536 tmp &= ~SOFT_RESET_RLC;
5537 WREG32(GRBM_SOFT_RESET, tmp);
5541 static void si_rlc_stop(struct radeon_device *rdev)
5543 WREG32(RLC_CNTL, 0);
5545 si_enable_gui_idle_interrupt(rdev, false);
5547 si_wait_for_rlc_serdes(rdev);
5550 static void si_rlc_start(struct radeon_device *rdev)
5552 WREG32(RLC_CNTL, RLC_ENABLE);
5554 si_enable_gui_idle_interrupt(rdev, true);
5559 static bool si_lbpw_supported(struct radeon_device *rdev)
5563 /* Enable LBPW only for DDR3 */
5564 tmp = RREG32(MC_SEQ_MISC0);
5565 if ((tmp & 0xF0000000) == 0xB0000000)
5570 static void si_enable_lbpw(struct radeon_device *rdev, bool enable)
5574 tmp = RREG32(RLC_LB_CNTL);
5576 tmp |= LOAD_BALANCE_ENABLE;
5578 tmp &= ~LOAD_BALANCE_ENABLE;
5579 WREG32(RLC_LB_CNTL, tmp);
5582 si_select_se_sh(rdev, 0xffffffff, 0xffffffff);
5583 WREG32(SPI_LB_CU_MASK, 0x00ff);
5587 static int si_rlc_resume(struct radeon_device *rdev)
5590 const __be32 *fw_data;
5603 WREG32(RLC_RL_BASE, 0);
5604 WREG32(RLC_RL_SIZE, 0);
5605 WREG32(RLC_LB_CNTL, 0);
5606 WREG32(RLC_LB_CNTR_MAX, 0xffffffff);
5607 WREG32(RLC_LB_CNTR_INIT, 0);
5608 WREG32(RLC_LB_INIT_CU_MASK, 0xffffffff);
5610 WREG32(RLC_MC_CNTL, 0);
5611 WREG32(RLC_UCODE_CNTL, 0);
5613 fw_data = (const __be32 *)rdev->rlc_fw->data;
5614 for (i = 0; i < SI_RLC_UCODE_SIZE; i++) {
5615 WREG32(RLC_UCODE_ADDR, i);
5616 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
5618 WREG32(RLC_UCODE_ADDR, 0);
5620 si_enable_lbpw(rdev, si_lbpw_supported(rdev));
5627 static void si_enable_interrupts(struct radeon_device *rdev)
5629 u32 ih_cntl = RREG32(IH_CNTL);
5630 u32 ih_rb_cntl = RREG32(IH_RB_CNTL);
5632 ih_cntl |= ENABLE_INTR;
5633 ih_rb_cntl |= IH_RB_ENABLE;
5634 WREG32(IH_CNTL, ih_cntl);
5635 WREG32(IH_RB_CNTL, ih_rb_cntl);
5636 rdev->ih.enabled = true;
5639 static void si_disable_interrupts(struct radeon_device *rdev)
5641 u32 ih_rb_cntl = RREG32(IH_RB_CNTL);
5642 u32 ih_cntl = RREG32(IH_CNTL);
5644 ih_rb_cntl &= ~IH_RB_ENABLE;
5645 ih_cntl &= ~ENABLE_INTR;
5646 WREG32(IH_RB_CNTL, ih_rb_cntl);
5647 WREG32(IH_CNTL, ih_cntl);
5648 /* set rptr, wptr to 0 */
5649 WREG32(IH_RB_RPTR, 0);
5650 WREG32(IH_RB_WPTR, 0);
5651 rdev->ih.enabled = false;
5655 static void si_disable_interrupt_state(struct radeon_device *rdev)
5659 tmp = RREG32(CP_INT_CNTL_RING0) &
5660 (CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
5661 WREG32(CP_INT_CNTL_RING0, tmp);
5662 WREG32(CP_INT_CNTL_RING1, 0);
5663 WREG32(CP_INT_CNTL_RING2, 0);
5664 tmp = RREG32(DMA_CNTL + DMA0_REGISTER_OFFSET) & ~TRAP_ENABLE;
5665 WREG32(DMA_CNTL + DMA0_REGISTER_OFFSET, tmp);
5666 tmp = RREG32(DMA_CNTL + DMA1_REGISTER_OFFSET) & ~TRAP_ENABLE;
5667 WREG32(DMA_CNTL + DMA1_REGISTER_OFFSET, tmp);
5668 WREG32(GRBM_INT_CNTL, 0);
5669 if (rdev->num_crtc >= 2) {
5670 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
5671 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
5673 if (rdev->num_crtc >= 4) {
5674 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
5675 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
5677 if (rdev->num_crtc >= 6) {
5678 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
5679 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
5682 if (rdev->num_crtc >= 2) {
5683 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
5684 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
5686 if (rdev->num_crtc >= 4) {
5687 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
5688 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
5690 if (rdev->num_crtc >= 6) {
5691 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
5692 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
5695 if (!ASIC_IS_NODCE(rdev)) {
5696 WREG32(DAC_AUTODETECT_INT_CONTROL, 0);
5698 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5699 WREG32(DC_HPD1_INT_CONTROL, tmp);
5700 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5701 WREG32(DC_HPD2_INT_CONTROL, tmp);
5702 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5703 WREG32(DC_HPD3_INT_CONTROL, tmp);
5704 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5705 WREG32(DC_HPD4_INT_CONTROL, tmp);
5706 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5707 WREG32(DC_HPD5_INT_CONTROL, tmp);
5708 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5709 WREG32(DC_HPD6_INT_CONTROL, tmp);
5713 static int si_irq_init(struct radeon_device *rdev)
5717 u32 interrupt_cntl, ih_cntl, ih_rb_cntl;
5720 ret = r600_ih_ring_alloc(rdev);
5725 si_disable_interrupts(rdev);
5728 ret = si_rlc_resume(rdev);
5730 r600_ih_ring_fini(rdev);
5734 /* setup interrupt control */
5735 /* set dummy read address to ring address */
5736 WREG32(INTERRUPT_CNTL2, rdev->ih.gpu_addr >> 8);
5737 interrupt_cntl = RREG32(INTERRUPT_CNTL);
5738 /* IH_DUMMY_RD_OVERRIDE=0 - dummy read disabled with msi, enabled without msi
5739 * IH_DUMMY_RD_OVERRIDE=1 - dummy read controlled by IH_DUMMY_RD_EN
5741 interrupt_cntl &= ~IH_DUMMY_RD_OVERRIDE;
5742 /* IH_REQ_NONSNOOP_EN=1 if ring is in non-cacheable memory, e.g., vram */
5743 interrupt_cntl &= ~IH_REQ_NONSNOOP_EN;
5744 WREG32(INTERRUPT_CNTL, interrupt_cntl);
5746 WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8);
5747 rb_bufsz = order_base_2(rdev->ih.ring_size / 4);
5749 ih_rb_cntl = (IH_WPTR_OVERFLOW_ENABLE |
5750 IH_WPTR_OVERFLOW_CLEAR |
5753 if (rdev->wb.enabled)
5754 ih_rb_cntl |= IH_WPTR_WRITEBACK_ENABLE;
5756 /* set the writeback address whether it's enabled or not */
5757 WREG32(IH_RB_WPTR_ADDR_LO, (rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFFFFFFFC);
5758 WREG32(IH_RB_WPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFF);
5760 WREG32(IH_RB_CNTL, ih_rb_cntl);
5762 /* set rptr, wptr to 0 */
5763 WREG32(IH_RB_RPTR, 0);
5764 WREG32(IH_RB_WPTR, 0);
5766 /* Default settings for IH_CNTL (disabled at first) */
5767 ih_cntl = MC_WRREQ_CREDIT(0x10) | MC_WR_CLEAN_CNT(0x10) | MC_VMID(0);
5768 /* RPTR_REARM only works if msi's are enabled */
5769 if (rdev->msi_enabled)
5770 ih_cntl |= RPTR_REARM;
5771 WREG32(IH_CNTL, ih_cntl);
5773 /* force the active interrupt state to all disabled */
5774 si_disable_interrupt_state(rdev);
5776 pci_set_master(rdev->pdev);
5779 si_enable_interrupts(rdev);
5784 int si_irq_set(struct radeon_device *rdev)
5787 u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
5788 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
5789 u32 hpd1 = 0, hpd2 = 0, hpd3 = 0, hpd4 = 0, hpd5 = 0, hpd6 = 0;
5790 u32 grbm_int_cntl = 0;
5791 u32 dma_cntl, dma_cntl1;
5792 u32 thermal_int = 0;
5794 if (!rdev->irq.installed) {
5795 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
5798 /* don't enable anything if the ih is disabled */
5799 if (!rdev->ih.enabled) {
5800 si_disable_interrupts(rdev);
5801 /* force the active interrupt state to all disabled */
5802 si_disable_interrupt_state(rdev);
5806 cp_int_cntl = RREG32(CP_INT_CNTL_RING0) &
5807 (CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
5809 if (!ASIC_IS_NODCE(rdev)) {
5810 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
5811 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
5812 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
5813 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
5814 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
5815 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
5818 dma_cntl = RREG32(DMA_CNTL + DMA0_REGISTER_OFFSET) & ~TRAP_ENABLE;
5819 dma_cntl1 = RREG32(DMA_CNTL + DMA1_REGISTER_OFFSET) & ~TRAP_ENABLE;
5821 thermal_int = RREG32(CG_THERMAL_INT) &
5822 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
5824 /* enable CP interrupts on all rings */
5825 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
5826 DRM_DEBUG("si_irq_set: sw int gfx\n");
5827 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
5829 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
5830 DRM_DEBUG("si_irq_set: sw int cp1\n");
5831 cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
5833 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
5834 DRM_DEBUG("si_irq_set: sw int cp2\n");
5835 cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
5837 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
5838 DRM_DEBUG("si_irq_set: sw int dma\n");
5839 dma_cntl |= TRAP_ENABLE;
5842 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
5843 DRM_DEBUG("si_irq_set: sw int dma1\n");
5844 dma_cntl1 |= TRAP_ENABLE;
5846 if (rdev->irq.crtc_vblank_int[0] ||
5847 atomic_read(&rdev->irq.pflip[0])) {
5848 DRM_DEBUG("si_irq_set: vblank 0\n");
5849 crtc1 |= VBLANK_INT_MASK;
5851 if (rdev->irq.crtc_vblank_int[1] ||
5852 atomic_read(&rdev->irq.pflip[1])) {
5853 DRM_DEBUG("si_irq_set: vblank 1\n");
5854 crtc2 |= VBLANK_INT_MASK;
5856 if (rdev->irq.crtc_vblank_int[2] ||
5857 atomic_read(&rdev->irq.pflip[2])) {
5858 DRM_DEBUG("si_irq_set: vblank 2\n");
5859 crtc3 |= VBLANK_INT_MASK;
5861 if (rdev->irq.crtc_vblank_int[3] ||
5862 atomic_read(&rdev->irq.pflip[3])) {
5863 DRM_DEBUG("si_irq_set: vblank 3\n");
5864 crtc4 |= VBLANK_INT_MASK;
5866 if (rdev->irq.crtc_vblank_int[4] ||
5867 atomic_read(&rdev->irq.pflip[4])) {
5868 DRM_DEBUG("si_irq_set: vblank 4\n");
5869 crtc5 |= VBLANK_INT_MASK;
5871 if (rdev->irq.crtc_vblank_int[5] ||
5872 atomic_read(&rdev->irq.pflip[5])) {
5873 DRM_DEBUG("si_irq_set: vblank 5\n");
5874 crtc6 |= VBLANK_INT_MASK;
5876 if (rdev->irq.hpd[0]) {
5877 DRM_DEBUG("si_irq_set: hpd 1\n");
5878 hpd1 |= DC_HPDx_INT_EN;
5880 if (rdev->irq.hpd[1]) {
5881 DRM_DEBUG("si_irq_set: hpd 2\n");
5882 hpd2 |= DC_HPDx_INT_EN;
5884 if (rdev->irq.hpd[2]) {
5885 DRM_DEBUG("si_irq_set: hpd 3\n");
5886 hpd3 |= DC_HPDx_INT_EN;
5888 if (rdev->irq.hpd[3]) {
5889 DRM_DEBUG("si_irq_set: hpd 4\n");
5890 hpd4 |= DC_HPDx_INT_EN;
5892 if (rdev->irq.hpd[4]) {
5893 DRM_DEBUG("si_irq_set: hpd 5\n");
5894 hpd5 |= DC_HPDx_INT_EN;
5896 if (rdev->irq.hpd[5]) {
5897 DRM_DEBUG("si_irq_set: hpd 6\n");
5898 hpd6 |= DC_HPDx_INT_EN;
5901 WREG32(CP_INT_CNTL_RING0, cp_int_cntl);
5902 WREG32(CP_INT_CNTL_RING1, cp_int_cntl1);
5903 WREG32(CP_INT_CNTL_RING2, cp_int_cntl2);
5905 WREG32(DMA_CNTL + DMA0_REGISTER_OFFSET, dma_cntl);
5906 WREG32(DMA_CNTL + DMA1_REGISTER_OFFSET, dma_cntl1);
5908 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
5910 if (rdev->irq.dpm_thermal) {
5911 DRM_DEBUG("dpm thermal\n");
5912 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
5915 if (rdev->num_crtc >= 2) {
5916 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
5917 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
5919 if (rdev->num_crtc >= 4) {
5920 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
5921 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
5923 if (rdev->num_crtc >= 6) {
5924 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
5925 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
5928 if (rdev->num_crtc >= 2) {
5929 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
5930 GRPH_PFLIP_INT_MASK);
5931 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
5932 GRPH_PFLIP_INT_MASK);
5934 if (rdev->num_crtc >= 4) {
5935 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
5936 GRPH_PFLIP_INT_MASK);
5937 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
5938 GRPH_PFLIP_INT_MASK);
5940 if (rdev->num_crtc >= 6) {
5941 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
5942 GRPH_PFLIP_INT_MASK);
5943 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
5944 GRPH_PFLIP_INT_MASK);
5947 if (!ASIC_IS_NODCE(rdev)) {
5948 WREG32(DC_HPD1_INT_CONTROL, hpd1);
5949 WREG32(DC_HPD2_INT_CONTROL, hpd2);
5950 WREG32(DC_HPD3_INT_CONTROL, hpd3);
5951 WREG32(DC_HPD4_INT_CONTROL, hpd4);
5952 WREG32(DC_HPD5_INT_CONTROL, hpd5);
5953 WREG32(DC_HPD6_INT_CONTROL, hpd6);
5956 WREG32(CG_THERMAL_INT, thermal_int);
5961 static inline void si_irq_ack(struct radeon_device *rdev)
5965 if (ASIC_IS_NODCE(rdev))
5968 rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
5969 rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
5970 rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
5971 rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
5972 rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
5973 rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
5974 rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
5975 rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
5976 if (rdev->num_crtc >= 4) {
5977 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
5978 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
5980 if (rdev->num_crtc >= 6) {
5981 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
5982 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
5985 if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
5986 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
5987 if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
5988 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
5989 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
5990 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
5991 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
5992 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
5993 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
5994 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
5995 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
5996 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
5998 if (rdev->num_crtc >= 4) {
5999 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
6000 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
6001 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
6002 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
6003 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
6004 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
6005 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
6006 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
6007 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
6008 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
6009 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
6010 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
6013 if (rdev->num_crtc >= 6) {
6014 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
6015 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
6016 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
6017 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
6018 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
6019 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
6020 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
6021 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
6022 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
6023 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
6024 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
6025 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
6028 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
6029 tmp = RREG32(DC_HPD1_INT_CONTROL);
6030 tmp |= DC_HPDx_INT_ACK;
6031 WREG32(DC_HPD1_INT_CONTROL, tmp);
6033 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
6034 tmp = RREG32(DC_HPD2_INT_CONTROL);
6035 tmp |= DC_HPDx_INT_ACK;
6036 WREG32(DC_HPD2_INT_CONTROL, tmp);
6038 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
6039 tmp = RREG32(DC_HPD3_INT_CONTROL);
6040 tmp |= DC_HPDx_INT_ACK;
6041 WREG32(DC_HPD3_INT_CONTROL, tmp);
6043 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
6044 tmp = RREG32(DC_HPD4_INT_CONTROL);
6045 tmp |= DC_HPDx_INT_ACK;
6046 WREG32(DC_HPD4_INT_CONTROL, tmp);
6048 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
6049 tmp = RREG32(DC_HPD5_INT_CONTROL);
6050 tmp |= DC_HPDx_INT_ACK;
6051 WREG32(DC_HPD5_INT_CONTROL, tmp);
6053 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
6054 tmp = RREG32(DC_HPD5_INT_CONTROL);
6055 tmp |= DC_HPDx_INT_ACK;
6056 WREG32(DC_HPD6_INT_CONTROL, tmp);
6060 static void si_irq_disable(struct radeon_device *rdev)
6062 si_disable_interrupts(rdev);
6063 /* Wait and acknowledge irq */
6066 si_disable_interrupt_state(rdev);
6069 static void si_irq_suspend(struct radeon_device *rdev)
6071 si_irq_disable(rdev);
6075 static void si_irq_fini(struct radeon_device *rdev)
6077 si_irq_suspend(rdev);
6078 r600_ih_ring_fini(rdev);
6081 static inline u32 si_get_ih_wptr(struct radeon_device *rdev)
6085 if (rdev->wb.enabled)
6086 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
6088 wptr = RREG32(IH_RB_WPTR);
6090 if (wptr & RB_OVERFLOW) {
6091 /* When a ring buffer overflow happen start parsing interrupt
6092 * from the last not overwritten vector (wptr + 16). Hopefully
6093 * this should allow us to catchup.
6095 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
6096 wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
6097 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
6098 tmp = RREG32(IH_RB_CNTL);
6099 tmp |= IH_WPTR_OVERFLOW_CLEAR;
6100 WREG32(IH_RB_CNTL, tmp);
6101 wptr &= ~RB_OVERFLOW;
6103 return (wptr & rdev->ih.ptr_mask);
6107 * Each IV ring entry is 128 bits:
6108 * [7:0] - interrupt source id
6110 * [59:32] - interrupt source data
6111 * [63:60] - reserved
6114 * [127:80] - reserved
6116 int si_irq_process(struct radeon_device *rdev)
6120 u32 src_id, src_data, ring_id;
6122 bool queue_hotplug = false;
6123 bool queue_thermal = false;
6126 if (!rdev->ih.enabled || rdev->shutdown)
6129 wptr = si_get_ih_wptr(rdev);
6132 /* is somebody else already processing irqs? */
6133 if (atomic_xchg(&rdev->ih.lock, 1))
6136 rptr = rdev->ih.rptr;
6137 DRM_DEBUG("si_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
6139 /* Order reading of wptr vs. reading of IH ring data */
6142 /* display interrupts */
6145 while (rptr != wptr) {
6146 /* wptr/rptr are in bytes! */
6147 ring_index = rptr / 4;
6148 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
6149 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
6150 ring_id = le32_to_cpu(rdev->ih.ring[ring_index + 2]) & 0xff;
6153 case 1: /* D1 vblank/vline */
6155 case 0: /* D1 vblank */
6156 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
6157 if (rdev->irq.crtc_vblank_int[0]) {
6158 drm_handle_vblank(rdev->ddev, 0);
6159 rdev->pm.vblank_sync = true;
6160 wake_up(&rdev->irq.vblank_queue);
6162 if (atomic_read(&rdev->irq.pflip[0]))
6163 radeon_crtc_handle_flip(rdev, 0);
6164 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
6165 DRM_DEBUG("IH: D1 vblank\n");
6168 case 1: /* D1 vline */
6169 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
6170 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
6171 DRM_DEBUG("IH: D1 vline\n");
6175 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6179 case 2: /* D2 vblank/vline */
6181 case 0: /* D2 vblank */
6182 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
6183 if (rdev->irq.crtc_vblank_int[1]) {
6184 drm_handle_vblank(rdev->ddev, 1);
6185 rdev->pm.vblank_sync = true;
6186 wake_up(&rdev->irq.vblank_queue);
6188 if (atomic_read(&rdev->irq.pflip[1]))
6189 radeon_crtc_handle_flip(rdev, 1);
6190 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
6191 DRM_DEBUG("IH: D2 vblank\n");
6194 case 1: /* D2 vline */
6195 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
6196 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
6197 DRM_DEBUG("IH: D2 vline\n");
6201 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6205 case 3: /* D3 vblank/vline */
6207 case 0: /* D3 vblank */
6208 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
6209 if (rdev->irq.crtc_vblank_int[2]) {
6210 drm_handle_vblank(rdev->ddev, 2);
6211 rdev->pm.vblank_sync = true;
6212 wake_up(&rdev->irq.vblank_queue);
6214 if (atomic_read(&rdev->irq.pflip[2]))
6215 radeon_crtc_handle_flip(rdev, 2);
6216 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
6217 DRM_DEBUG("IH: D3 vblank\n");
6220 case 1: /* D3 vline */
6221 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
6222 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
6223 DRM_DEBUG("IH: D3 vline\n");
6227 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6231 case 4: /* D4 vblank/vline */
6233 case 0: /* D4 vblank */
6234 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
6235 if (rdev->irq.crtc_vblank_int[3]) {
6236 drm_handle_vblank(rdev->ddev, 3);
6237 rdev->pm.vblank_sync = true;
6238 wake_up(&rdev->irq.vblank_queue);
6240 if (atomic_read(&rdev->irq.pflip[3]))
6241 radeon_crtc_handle_flip(rdev, 3);
6242 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
6243 DRM_DEBUG("IH: D4 vblank\n");
6246 case 1: /* D4 vline */
6247 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
6248 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
6249 DRM_DEBUG("IH: D4 vline\n");
6253 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6257 case 5: /* D5 vblank/vline */
6259 case 0: /* D5 vblank */
6260 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
6261 if (rdev->irq.crtc_vblank_int[4]) {
6262 drm_handle_vblank(rdev->ddev, 4);
6263 rdev->pm.vblank_sync = true;
6264 wake_up(&rdev->irq.vblank_queue);
6266 if (atomic_read(&rdev->irq.pflip[4]))
6267 radeon_crtc_handle_flip(rdev, 4);
6268 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
6269 DRM_DEBUG("IH: D5 vblank\n");
6272 case 1: /* D5 vline */
6273 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
6274 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
6275 DRM_DEBUG("IH: D5 vline\n");
6279 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6283 case 6: /* D6 vblank/vline */
6285 case 0: /* D6 vblank */
6286 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
6287 if (rdev->irq.crtc_vblank_int[5]) {
6288 drm_handle_vblank(rdev->ddev, 5);
6289 rdev->pm.vblank_sync = true;
6290 wake_up(&rdev->irq.vblank_queue);
6292 if (atomic_read(&rdev->irq.pflip[5]))
6293 radeon_crtc_handle_flip(rdev, 5);
6294 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
6295 DRM_DEBUG("IH: D6 vblank\n");
6298 case 1: /* D6 vline */
6299 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
6300 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
6301 DRM_DEBUG("IH: D6 vline\n");
6305 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6309 case 8: /* D1 page flip */
6310 case 10: /* D2 page flip */
6311 case 12: /* D3 page flip */
6312 case 14: /* D4 page flip */
6313 case 16: /* D5 page flip */
6314 case 18: /* D6 page flip */
6315 DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
6316 radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
6318 case 42: /* HPD hotplug */
6321 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
6322 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
6323 queue_hotplug = true;
6324 DRM_DEBUG("IH: HPD1\n");
6328 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
6329 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
6330 queue_hotplug = true;
6331 DRM_DEBUG("IH: HPD2\n");
6335 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
6336 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
6337 queue_hotplug = true;
6338 DRM_DEBUG("IH: HPD3\n");
6342 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
6343 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
6344 queue_hotplug = true;
6345 DRM_DEBUG("IH: HPD4\n");
6349 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
6350 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
6351 queue_hotplug = true;
6352 DRM_DEBUG("IH: HPD5\n");
6356 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
6357 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
6358 queue_hotplug = true;
6359 DRM_DEBUG("IH: HPD6\n");
6363 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6368 DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
6369 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
6373 addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
6374 status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
6375 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
6376 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
6378 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
6380 si_vm_decode_fault(rdev, status, addr);
6381 /* reset addr and status */
6382 WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
6384 case 176: /* RINGID0 CP_INT */
6385 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
6387 case 177: /* RINGID1 CP_INT */
6388 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
6390 case 178: /* RINGID2 CP_INT */
6391 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
6393 case 181: /* CP EOP event */
6394 DRM_DEBUG("IH: CP EOP\n");
6397 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
6400 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
6403 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
6407 case 224: /* DMA trap event */
6408 DRM_DEBUG("IH: DMA trap\n");
6409 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
6411 case 230: /* thermal low to high */
6412 DRM_DEBUG("IH: thermal low to high\n");
6413 rdev->pm.dpm.thermal.high_to_low = false;
6414 queue_thermal = true;
6416 case 231: /* thermal high to low */
6417 DRM_DEBUG("IH: thermal high to low\n");
6418 rdev->pm.dpm.thermal.high_to_low = true;
6419 queue_thermal = true;
6421 case 233: /* GUI IDLE */
6422 DRM_DEBUG("IH: GUI idle\n");
6424 case 244: /* DMA trap event */
6425 DRM_DEBUG("IH: DMA1 trap\n");
6426 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
6429 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6433 /* wptr/rptr are in bytes! */
6435 rptr &= rdev->ih.ptr_mask;
6438 schedule_work(&rdev->hotplug_work);
6439 if (queue_thermal && rdev->pm.dpm_enabled)
6440 schedule_work(&rdev->pm.dpm.thermal.work);
6441 rdev->ih.rptr = rptr;
6442 WREG32(IH_RB_RPTR, rdev->ih.rptr);
6443 atomic_set(&rdev->ih.lock, 0);
6445 /* make sure wptr hasn't changed while processing */
6446 wptr = si_get_ih_wptr(rdev);
6454 * startup/shutdown callbacks
6456 static int si_startup(struct radeon_device *rdev)
6458 struct radeon_ring *ring;
6461 /* enable pcie gen2/3 link */
6462 si_pcie_gen3_enable(rdev);
6464 si_program_aspm(rdev);
6466 /* scratch needs to be initialized before MC */
6467 r = r600_vram_scratch_init(rdev);
6471 si_mc_program(rdev);
6473 if (!rdev->pm.dpm_enabled) {
6474 r = si_mc_load_microcode(rdev);
6476 DRM_ERROR("Failed to load MC firmware!\n");
6481 r = si_pcie_gart_enable(rdev);
6486 /* allocate rlc buffers */
6487 if (rdev->family == CHIP_VERDE) {
6488 rdev->rlc.reg_list = verde_rlc_save_restore_register_list;
6489 rdev->rlc.reg_list_size =
6490 (u32)ARRAY_SIZE(verde_rlc_save_restore_register_list);
6492 rdev->rlc.cs_data = si_cs_data;
6493 r = sumo_rlc_init(rdev);
6495 DRM_ERROR("Failed to init rlc BOs!\n");
6499 /* allocate wb buffer */
6500 r = radeon_wb_init(rdev);
6504 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
6506 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
6510 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
6512 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
6516 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
6518 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
6522 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
6524 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
6528 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
6530 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
6534 if (rdev->has_uvd) {
6535 r = uvd_v2_2_resume(rdev);
6537 r = radeon_fence_driver_start_ring(rdev,
6538 R600_RING_TYPE_UVD_INDEX);
6540 dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
6543 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
6547 if (!rdev->irq.installed) {
6548 r = radeon_irq_kms_init(rdev);
6553 r = si_irq_init(rdev);
6555 DRM_ERROR("radeon: IH init failed (%d).\n", r);
6556 radeon_irq_kms_fini(rdev);
6561 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
6562 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
6567 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
6568 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP1_RPTR_OFFSET,
6573 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
6574 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP2_RPTR_OFFSET,
6579 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
6580 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
6581 DMA_PACKET(DMA_PACKET_NOP, 0, 0, 0, 0));
6585 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
6586 r = radeon_ring_init(rdev, ring, ring->ring_size, CAYMAN_WB_DMA1_RPTR_OFFSET,
6587 DMA_PACKET(DMA_PACKET_NOP, 0, 0, 0, 0));
6591 r = si_cp_load_microcode(rdev);
6594 r = si_cp_resume(rdev);
6598 r = cayman_dma_resume(rdev);
6602 if (rdev->has_uvd) {
6603 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
6604 if (ring->ring_size) {
6605 r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
6608 r = uvd_v1_0_init(rdev);
6610 DRM_ERROR("radeon: failed initializing UVD (%d).\n", r);
6614 r = radeon_ib_pool_init(rdev);
6616 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
6620 r = radeon_vm_manager_init(rdev);
6622 dev_err(rdev->dev, "vm manager initialization failed (%d).\n", r);
6626 r = dce6_audio_init(rdev);
6633 int si_resume(struct radeon_device *rdev)
6637 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
6638 * posting will perform necessary task to bring back GPU into good
6642 atom_asic_init(rdev->mode_info.atom_context);
6644 /* init golden registers */
6645 si_init_golden_registers(rdev);
6647 if (rdev->pm.pm_method == PM_METHOD_DPM)
6648 radeon_pm_resume(rdev);
6650 rdev->accel_working = true;
6651 r = si_startup(rdev);
6653 DRM_ERROR("si startup failed on resume\n");
6654 rdev->accel_working = false;
6662 int si_suspend(struct radeon_device *rdev)
6664 radeon_pm_suspend(rdev);
6665 dce6_audio_fini(rdev);
6666 radeon_vm_manager_fini(rdev);
6667 si_cp_enable(rdev, false);
6668 cayman_dma_stop(rdev);
6669 if (rdev->has_uvd) {
6670 uvd_v1_0_fini(rdev);
6671 radeon_uvd_suspend(rdev);
6675 si_irq_suspend(rdev);
6676 radeon_wb_disable(rdev);
6677 si_pcie_gart_disable(rdev);
6681 /* Plan is to move initialization in that function and use
6682 * helper function so that radeon_device_init pretty much
6683 * do nothing more than calling asic specific function. This
6684 * should also allow to remove a bunch of callback function
6687 int si_init(struct radeon_device *rdev)
6689 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
6693 if (!radeon_get_bios(rdev)) {
6694 if (ASIC_IS_AVIVO(rdev))
6697 /* Must be an ATOMBIOS */
6698 if (!rdev->is_atom_bios) {
6699 dev_err(rdev->dev, "Expecting atombios for cayman GPU\n");
6702 r = radeon_atombios_init(rdev);
6706 /* Post card if necessary */
6707 if (!radeon_card_posted(rdev)) {
6709 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
6712 DRM_INFO("GPU not posted. posting now...\n");
6713 atom_asic_init(rdev->mode_info.atom_context);
6715 /* init golden registers */
6716 si_init_golden_registers(rdev);
6717 /* Initialize scratch registers */
6718 si_scratch_init(rdev);
6719 /* Initialize surface registers */
6720 radeon_surface_init(rdev);
6721 /* Initialize clocks */
6722 radeon_get_clock_info(rdev->ddev);
6725 r = radeon_fence_driver_init(rdev);
6729 /* initialize memory controller */
6730 r = si_mc_init(rdev);
6733 /* Memory manager */
6734 r = radeon_bo_init(rdev);
6738 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->ce_fw ||
6739 !rdev->rlc_fw || !rdev->mc_fw) {
6740 r = si_init_microcode(rdev);
6742 DRM_ERROR("Failed to load firmware!\n");
6747 /* Initialize power management */
6748 radeon_pm_init(rdev);
6750 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
6751 ring->ring_obj = NULL;
6752 r600_ring_init(rdev, ring, 1024 * 1024);
6754 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
6755 ring->ring_obj = NULL;
6756 r600_ring_init(rdev, ring, 1024 * 1024);
6758 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
6759 ring->ring_obj = NULL;
6760 r600_ring_init(rdev, ring, 1024 * 1024);
6762 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
6763 ring->ring_obj = NULL;
6764 r600_ring_init(rdev, ring, 64 * 1024);
6766 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
6767 ring->ring_obj = NULL;
6768 r600_ring_init(rdev, ring, 64 * 1024);
6770 if (rdev->has_uvd) {
6771 r = radeon_uvd_init(rdev);
6773 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
6774 ring->ring_obj = NULL;
6775 r600_ring_init(rdev, ring, 4096);
6779 rdev->ih.ring_obj = NULL;
6780 r600_ih_ring_init(rdev, 64 * 1024);
6782 r = r600_pcie_gart_init(rdev);
6786 rdev->accel_working = true;
6787 r = si_startup(rdev);
6789 dev_err(rdev->dev, "disabling GPU acceleration\n");
6791 cayman_dma_fini(rdev);
6793 sumo_rlc_fini(rdev);
6794 radeon_wb_fini(rdev);
6795 radeon_ib_pool_fini(rdev);
6796 radeon_vm_manager_fini(rdev);
6797 radeon_irq_kms_fini(rdev);
6798 si_pcie_gart_fini(rdev);
6799 rdev->accel_working = false;
6802 /* Don't start up if the MC ucode is missing.
6803 * The default clocks and voltages before the MC ucode
6804 * is loaded are not suffient for advanced operations.
6807 DRM_ERROR("radeon: MC ucode required for NI+.\n");
6814 void si_fini(struct radeon_device *rdev)
6816 radeon_pm_fini(rdev);
6818 cayman_dma_fini(rdev);
6822 sumo_rlc_fini(rdev);
6823 radeon_wb_fini(rdev);
6824 radeon_vm_manager_fini(rdev);
6825 radeon_ib_pool_fini(rdev);
6826 radeon_irq_kms_fini(rdev);
6827 if (rdev->has_uvd) {
6828 uvd_v1_0_fini(rdev);
6829 radeon_uvd_fini(rdev);
6831 si_pcie_gart_fini(rdev);
6832 r600_vram_scratch_fini(rdev);
6833 radeon_gem_fini(rdev);
6834 radeon_fence_driver_fini(rdev);
6835 radeon_bo_fini(rdev);
6836 radeon_atombios_fini(rdev);
6842 * si_get_gpu_clock_counter - return GPU clock counter snapshot
6844 * @rdev: radeon_device pointer
6846 * Fetches a GPU clock counter snapshot (SI).
6847 * Returns the 64 bit clock counter snapshot.
6849 uint64_t si_get_gpu_clock_counter(struct radeon_device *rdev)
6853 mutex_lock(&rdev->gpu_clock_mutex);
6854 WREG32(RLC_CAPTURE_GPU_CLOCK_COUNT, 1);
6855 clock = (uint64_t)RREG32(RLC_GPU_CLOCK_COUNT_LSB) |
6856 ((uint64_t)RREG32(RLC_GPU_CLOCK_COUNT_MSB) << 32ULL);
6857 mutex_unlock(&rdev->gpu_clock_mutex);
6861 int si_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
6863 unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
6866 /* bypass vclk and dclk with bclk */
6867 WREG32_P(CG_UPLL_FUNC_CNTL_2,
6868 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
6869 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
6871 /* put PLL in bypass mode */
6872 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
6874 if (!vclk || !dclk) {
6875 /* keep the Bypass mode, put PLL to sleep */
6876 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
6880 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
6881 16384, 0x03FFFFFF, 0, 128, 5,
6882 &fb_div, &vclk_div, &dclk_div);
6886 /* set RESET_ANTI_MUX to 0 */
6887 WREG32_P(CG_UPLL_FUNC_CNTL_5, 0, ~RESET_ANTI_MUX_MASK);
6889 /* set VCO_MODE to 1 */
6890 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
6892 /* toggle UPLL_SLEEP to 1 then back to 0 */
6893 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
6894 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
6896 /* deassert UPLL_RESET */
6897 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
6901 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
6905 /* assert UPLL_RESET again */
6906 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
6908 /* disable spread spectrum. */
6909 WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
6911 /* set feedback divider */
6912 WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
6914 /* set ref divider to 0 */
6915 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
6917 if (fb_div < 307200)
6918 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
6920 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
6922 /* set PDIV_A and PDIV_B */
6923 WREG32_P(CG_UPLL_FUNC_CNTL_2,
6924 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
6925 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
6927 /* give the PLL some time to settle */
6930 /* deassert PLL_RESET */
6931 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
6935 /* switch from bypass mode to normal mode */
6936 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
6938 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
6942 /* switch VCLK and DCLK selection */
6943 WREG32_P(CG_UPLL_FUNC_CNTL_2,
6944 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
6945 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
6952 static void si_pcie_gen3_enable(struct radeon_device *rdev)
6954 struct pci_dev *root = rdev->pdev->bus->self;
6955 int bridge_pos, gpu_pos;
6956 u32 speed_cntl, mask, current_data_rate;
6960 if (radeon_pcie_gen2 == 0)
6963 if (rdev->flags & RADEON_IS_IGP)
6966 if (!(rdev->flags & RADEON_IS_PCIE))
6969 ret = drm_pcie_get_speed_cap_mask(rdev->ddev, &mask);
6973 if (!(mask & (DRM_PCIE_SPEED_50 | DRM_PCIE_SPEED_80)))
6976 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
6977 current_data_rate = (speed_cntl & LC_CURRENT_DATA_RATE_MASK) >>
6978 LC_CURRENT_DATA_RATE_SHIFT;
6979 if (mask & DRM_PCIE_SPEED_80) {
6980 if (current_data_rate == 2) {
6981 DRM_INFO("PCIE gen 3 link speeds already enabled\n");
6984 DRM_INFO("enabling PCIE gen 3 link speeds, disable with radeon.pcie_gen2=0\n");
6985 } else if (mask & DRM_PCIE_SPEED_50) {
6986 if (current_data_rate == 1) {
6987 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
6990 DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
6993 bridge_pos = pci_pcie_cap(root);
6997 gpu_pos = pci_pcie_cap(rdev->pdev);
7001 if (mask & DRM_PCIE_SPEED_80) {
7002 /* re-try equalization if gen3 is not already enabled */
7003 if (current_data_rate != 2) {
7004 u16 bridge_cfg, gpu_cfg;
7005 u16 bridge_cfg2, gpu_cfg2;
7006 u32 max_lw, current_lw, tmp;
7008 pci_read_config_word(root, bridge_pos + PCI_EXP_LNKCTL, &bridge_cfg);
7009 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &gpu_cfg);
7011 tmp16 = bridge_cfg | PCI_EXP_LNKCTL_HAWD;
7012 pci_write_config_word(root, bridge_pos + PCI_EXP_LNKCTL, tmp16);
7014 tmp16 = gpu_cfg | PCI_EXP_LNKCTL_HAWD;
7015 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, tmp16);
7017 tmp = RREG32_PCIE(PCIE_LC_STATUS1);
7018 max_lw = (tmp & LC_DETECTED_LINK_WIDTH_MASK) >> LC_DETECTED_LINK_WIDTH_SHIFT;
7019 current_lw = (tmp & LC_OPERATING_LINK_WIDTH_MASK) >> LC_OPERATING_LINK_WIDTH_SHIFT;
7021 if (current_lw < max_lw) {
7022 tmp = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
7023 if (tmp & LC_RENEGOTIATION_SUPPORT) {
7024 tmp &= ~(LC_LINK_WIDTH_MASK | LC_UPCONFIGURE_DIS);
7025 tmp |= (max_lw << LC_LINK_WIDTH_SHIFT);
7026 tmp |= LC_UPCONFIGURE_SUPPORT | LC_RENEGOTIATE_EN | LC_RECONFIG_NOW;
7027 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, tmp);
7031 for (i = 0; i < 10; i++) {
7033 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_DEVSTA, &tmp16);
7034 if (tmp16 & PCI_EXP_DEVSTA_TRPND)
7037 pci_read_config_word(root, bridge_pos + PCI_EXP_LNKCTL, &bridge_cfg);
7038 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &gpu_cfg);
7040 pci_read_config_word(root, bridge_pos + PCI_EXP_LNKCTL2, &bridge_cfg2);
7041 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &gpu_cfg2);
7043 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL4);
7044 tmp |= LC_SET_QUIESCE;
7045 WREG32_PCIE_PORT(PCIE_LC_CNTL4, tmp);
7047 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL4);
7049 WREG32_PCIE_PORT(PCIE_LC_CNTL4, tmp);
7054 pci_read_config_word(root, bridge_pos + PCI_EXP_LNKCTL, &tmp16);
7055 tmp16 &= ~PCI_EXP_LNKCTL_HAWD;
7056 tmp16 |= (bridge_cfg & PCI_EXP_LNKCTL_HAWD);
7057 pci_write_config_word(root, bridge_pos + PCI_EXP_LNKCTL, tmp16);
7059 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &tmp16);
7060 tmp16 &= ~PCI_EXP_LNKCTL_HAWD;
7061 tmp16 |= (gpu_cfg & PCI_EXP_LNKCTL_HAWD);
7062 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, tmp16);
7065 pci_read_config_word(root, bridge_pos + PCI_EXP_LNKCTL2, &tmp16);
7066 tmp16 &= ~((1 << 4) | (7 << 9));
7067 tmp16 |= (bridge_cfg2 & ((1 << 4) | (7 << 9)));
7068 pci_write_config_word(root, bridge_pos + PCI_EXP_LNKCTL2, tmp16);
7070 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &tmp16);
7071 tmp16 &= ~((1 << 4) | (7 << 9));
7072 tmp16 |= (gpu_cfg2 & ((1 << 4) | (7 << 9)));
7073 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, tmp16);
7075 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL4);
7076 tmp &= ~LC_SET_QUIESCE;
7077 WREG32_PCIE_PORT(PCIE_LC_CNTL4, tmp);
7082 /* set the link speed */
7083 speed_cntl |= LC_FORCE_EN_SW_SPEED_CHANGE | LC_FORCE_DIS_HW_SPEED_CHANGE;
7084 speed_cntl &= ~LC_FORCE_DIS_SW_SPEED_CHANGE;
7085 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
7087 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &tmp16);
7089 if (mask & DRM_PCIE_SPEED_80)
7090 tmp16 |= 3; /* gen3 */
7091 else if (mask & DRM_PCIE_SPEED_50)
7092 tmp16 |= 2; /* gen2 */
7094 tmp16 |= 1; /* gen1 */
7095 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, tmp16);
7097 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
7098 speed_cntl |= LC_INITIATE_LINK_SPEED_CHANGE;
7099 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
7101 for (i = 0; i < rdev->usec_timeout; i++) {
7102 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
7103 if ((speed_cntl & LC_INITIATE_LINK_SPEED_CHANGE) == 0)
7109 static void si_program_aspm(struct radeon_device *rdev)
7112 bool disable_l0s = false, disable_l1 = false, disable_plloff_in_l1 = false;
7113 bool disable_clkreq = false;
7115 if (radeon_aspm == 0)
7118 if (!(rdev->flags & RADEON_IS_PCIE))
7121 orig = data = RREG32_PCIE_PORT(PCIE_LC_N_FTS_CNTL);
7122 data &= ~LC_XMIT_N_FTS_MASK;
7123 data |= LC_XMIT_N_FTS(0x24) | LC_XMIT_N_FTS_OVERRIDE_EN;
7125 WREG32_PCIE_PORT(PCIE_LC_N_FTS_CNTL, data);
7127 orig = data = RREG32_PCIE_PORT(PCIE_LC_CNTL3);
7128 data |= LC_GO_TO_RECOVERY;
7130 WREG32_PCIE_PORT(PCIE_LC_CNTL3, data);
7132 orig = data = RREG32_PCIE(PCIE_P_CNTL);
7133 data |= P_IGNORE_EDB_ERR;
7135 WREG32_PCIE(PCIE_P_CNTL, data);
7137 orig = data = RREG32_PCIE_PORT(PCIE_LC_CNTL);
7138 data &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
7139 data |= LC_PMI_TO_L1_DIS;
7141 data |= LC_L0S_INACTIVITY(7);
7144 data |= LC_L1_INACTIVITY(7);
7145 data &= ~LC_PMI_TO_L1_DIS;
7147 WREG32_PCIE_PORT(PCIE_LC_CNTL, data);
7149 if (!disable_plloff_in_l1) {
7150 bool clk_req_support;
7152 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
7153 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
7154 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
7156 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
7158 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
7159 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
7160 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
7162 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
7164 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
7165 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
7166 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
7168 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
7170 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
7171 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
7172 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
7174 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
7176 if ((rdev->family != CHIP_OLAND) && (rdev->family != CHIP_HAINAN)) {
7177 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
7178 data &= ~PLL_RAMP_UP_TIME_0_MASK;
7180 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
7182 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
7183 data &= ~PLL_RAMP_UP_TIME_1_MASK;
7185 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
7187 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_2);
7188 data &= ~PLL_RAMP_UP_TIME_2_MASK;
7190 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_2, data);
7192 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_3);
7193 data &= ~PLL_RAMP_UP_TIME_3_MASK;
7195 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_3, data);
7197 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
7198 data &= ~PLL_RAMP_UP_TIME_0_MASK;
7200 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
7202 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
7203 data &= ~PLL_RAMP_UP_TIME_1_MASK;
7205 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
7207 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_2);
7208 data &= ~PLL_RAMP_UP_TIME_2_MASK;
7210 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_2, data);
7212 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_3);
7213 data &= ~PLL_RAMP_UP_TIME_3_MASK;
7215 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_3, data);
7217 orig = data = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
7218 data &= ~LC_DYN_LANES_PWR_STATE_MASK;
7219 data |= LC_DYN_LANES_PWR_STATE(3);
7221 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
7223 orig = data = RREG32_PIF_PHY0(PB0_PIF_CNTL);
7224 data &= ~LS2_EXIT_TIME_MASK;
7225 if ((rdev->family == CHIP_OLAND) || (rdev->family == CHIP_HAINAN))
7226 data |= LS2_EXIT_TIME(5);
7228 WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
7230 orig = data = RREG32_PIF_PHY1(PB1_PIF_CNTL);
7231 data &= ~LS2_EXIT_TIME_MASK;
7232 if ((rdev->family == CHIP_OLAND) || (rdev->family == CHIP_HAINAN))
7233 data |= LS2_EXIT_TIME(5);
7235 WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
7237 if (!disable_clkreq) {
7238 struct pci_dev *root = rdev->pdev->bus->self;
7241 clk_req_support = false;
7242 pcie_capability_read_dword(root, PCI_EXP_LNKCAP, &lnkcap);
7243 if (lnkcap & PCI_EXP_LNKCAP_CLKPM)
7244 clk_req_support = true;
7246 clk_req_support = false;
7249 if (clk_req_support) {
7250 orig = data = RREG32_PCIE_PORT(PCIE_LC_CNTL2);
7251 data |= LC_ALLOW_PDWN_IN_L1 | LC_ALLOW_PDWN_IN_L23;
7253 WREG32_PCIE_PORT(PCIE_LC_CNTL2, data);
7255 orig = data = RREG32(THM_CLK_CNTL);
7256 data &= ~(CMON_CLK_SEL_MASK | TMON_CLK_SEL_MASK);
7257 data |= CMON_CLK_SEL(1) | TMON_CLK_SEL(1);
7259 WREG32(THM_CLK_CNTL, data);
7261 orig = data = RREG32(MISC_CLK_CNTL);
7262 data &= ~(DEEP_SLEEP_CLK_SEL_MASK | ZCLK_SEL_MASK);
7263 data |= DEEP_SLEEP_CLK_SEL(1) | ZCLK_SEL(1);
7265 WREG32(MISC_CLK_CNTL, data);
7267 orig = data = RREG32(CG_CLKPIN_CNTL);
7268 data &= ~BCLK_AS_XCLK;
7270 WREG32(CG_CLKPIN_CNTL, data);
7272 orig = data = RREG32(CG_CLKPIN_CNTL_2);
7273 data &= ~FORCE_BIF_REFCLK_EN;
7275 WREG32(CG_CLKPIN_CNTL_2, data);
7277 orig = data = RREG32(MPLL_BYPASSCLK_SEL);
7278 data &= ~MPLL_CLKOUT_SEL_MASK;
7279 data |= MPLL_CLKOUT_SEL(4);
7281 WREG32(MPLL_BYPASSCLK_SEL, data);
7283 orig = data = RREG32(SPLL_CNTL_MODE);
7284 data &= ~SPLL_REFCLK_SEL_MASK;
7286 WREG32(SPLL_CNTL_MODE, data);
7291 WREG32_PCIE_PORT(PCIE_LC_CNTL, data);
7294 orig = data = RREG32_PCIE(PCIE_CNTL2);
7295 data |= SLV_MEM_LS_EN | MST_MEM_LS_EN | REPLAY_MEM_LS_EN;
7297 WREG32_PCIE(PCIE_CNTL2, data);
7300 data = RREG32_PCIE_PORT(PCIE_LC_N_FTS_CNTL);
7301 if((data & LC_N_FTS_MASK) == LC_N_FTS_MASK) {
7302 data = RREG32_PCIE(PCIE_LC_STATUS1);
7303 if ((data & LC_REVERSE_XMIT) && (data & LC_REVERSE_RCVR)) {
7304 orig = data = RREG32_PCIE_PORT(PCIE_LC_CNTL);
7305 data &= ~LC_L0S_INACTIVITY_MASK;
7307 WREG32_PCIE_PORT(PCIE_LC_CNTL, data);