2 * Copyright 2010 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
22 * Authors: Alex Deucher
25 #include <linux/firmware.h>
26 #include <linux/pci.h>
27 #include <linux/slab.h>
29 #include <drm/drm_vblank.h>
30 #include <drm/radeon_drm.h>
31 #include <drm/drm_fourcc.h>
38 #include "evergreen.h"
39 #include "evergreen_blit_shaders.h"
40 #include "evergreen_reg.h"
41 #include "evergreend.h"
43 #include "radeon_asic.h"
44 #include "radeon_audio.h"
45 #include "radeon_ucode.h"
48 #define DC_HPDx_CONTROL(x) (DC_HPD1_CONTROL + (x * 0xc))
49 #define DC_HPDx_INT_CONTROL(x) (DC_HPD1_INT_CONTROL + (x * 0xc))
50 #define DC_HPDx_INT_STATUS_REG(x) (DC_HPD1_INT_STATUS + (x * 0xc))
53 * Indirect registers accessor
55 u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg)
60 spin_lock_irqsave(&rdev->cg_idx_lock, flags);
61 WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
62 r = RREG32(EVERGREEN_CG_IND_DATA);
63 spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
67 void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v)
71 spin_lock_irqsave(&rdev->cg_idx_lock, flags);
72 WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
73 WREG32(EVERGREEN_CG_IND_DATA, (v));
74 spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
77 u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg)
82 spin_lock_irqsave(&rdev->pif_idx_lock, flags);
83 WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
84 r = RREG32(EVERGREEN_PIF_PHY0_DATA);
85 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
89 void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v)
93 spin_lock_irqsave(&rdev->pif_idx_lock, flags);
94 WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
95 WREG32(EVERGREEN_PIF_PHY0_DATA, (v));
96 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
99 u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg)
104 spin_lock_irqsave(&rdev->pif_idx_lock, flags);
105 WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
106 r = RREG32(EVERGREEN_PIF_PHY1_DATA);
107 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
111 void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v)
115 spin_lock_irqsave(&rdev->pif_idx_lock, flags);
116 WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
117 WREG32(EVERGREEN_PIF_PHY1_DATA, (v));
118 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
121 static const u32 crtc_offsets[6] =
123 EVERGREEN_CRTC0_REGISTER_OFFSET,
124 EVERGREEN_CRTC1_REGISTER_OFFSET,
125 EVERGREEN_CRTC2_REGISTER_OFFSET,
126 EVERGREEN_CRTC3_REGISTER_OFFSET,
127 EVERGREEN_CRTC4_REGISTER_OFFSET,
128 EVERGREEN_CRTC5_REGISTER_OFFSET
131 #include "clearstate_evergreen.h"
133 static const u32 sumo_rlc_save_restore_register_list[] =
218 static void evergreen_gpu_init(struct radeon_device *rdev);
219 void evergreen_fini(struct radeon_device *rdev);
220 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
221 void evergreen_program_aspm(struct radeon_device *rdev);
223 static const u32 evergreen_golden_registers[] =
225 0x3f90, 0xffff0000, 0xff000000,
226 0x9148, 0xffff0000, 0xff000000,
227 0x3f94, 0xffff0000, 0xff000000,
228 0x914c, 0xffff0000, 0xff000000,
229 0x9b7c, 0xffffffff, 0x00000000,
230 0x8a14, 0xffffffff, 0x00000007,
231 0x8b10, 0xffffffff, 0x00000000,
232 0x960c, 0xffffffff, 0x54763210,
233 0x88c4, 0xffffffff, 0x000000c2,
234 0x88d4, 0xffffffff, 0x00000010,
235 0x8974, 0xffffffff, 0x00000000,
236 0xc78, 0x00000080, 0x00000080,
237 0x5eb4, 0xffffffff, 0x00000002,
238 0x5e78, 0xffffffff, 0x001000f0,
239 0x6104, 0x01000300, 0x00000000,
240 0x5bc0, 0x00300000, 0x00000000,
241 0x7030, 0xffffffff, 0x00000011,
242 0x7c30, 0xffffffff, 0x00000011,
243 0x10830, 0xffffffff, 0x00000011,
244 0x11430, 0xffffffff, 0x00000011,
245 0x12030, 0xffffffff, 0x00000011,
246 0x12c30, 0xffffffff, 0x00000011,
247 0xd02c, 0xffffffff, 0x08421000,
248 0x240c, 0xffffffff, 0x00000380,
249 0x8b24, 0xffffffff, 0x00ff0fff,
250 0x28a4c, 0x06000000, 0x06000000,
251 0x10c, 0x00000001, 0x00000001,
252 0x8d00, 0xffffffff, 0x100e4848,
253 0x8d04, 0xffffffff, 0x00164745,
254 0x8c00, 0xffffffff, 0xe4000003,
255 0x8c04, 0xffffffff, 0x40600060,
256 0x8c08, 0xffffffff, 0x001c001c,
257 0x8cf0, 0xffffffff, 0x08e00620,
258 0x8c20, 0xffffffff, 0x00800080,
259 0x8c24, 0xffffffff, 0x00800080,
260 0x8c18, 0xffffffff, 0x20202078,
261 0x8c1c, 0xffffffff, 0x00001010,
262 0x28350, 0xffffffff, 0x00000000,
263 0xa008, 0xffffffff, 0x00010000,
264 0x5c4, 0xffffffff, 0x00000001,
265 0x9508, 0xffffffff, 0x00000002,
266 0x913c, 0x0000000f, 0x0000000a
269 static const u32 evergreen_golden_registers2[] =
271 0x2f4c, 0xffffffff, 0x00000000,
272 0x54f4, 0xffffffff, 0x00000000,
273 0x54f0, 0xffffffff, 0x00000000,
274 0x5498, 0xffffffff, 0x00000000,
275 0x549c, 0xffffffff, 0x00000000,
276 0x5494, 0xffffffff, 0x00000000,
277 0x53cc, 0xffffffff, 0x00000000,
278 0x53c8, 0xffffffff, 0x00000000,
279 0x53c4, 0xffffffff, 0x00000000,
280 0x53c0, 0xffffffff, 0x00000000,
281 0x53bc, 0xffffffff, 0x00000000,
282 0x53b8, 0xffffffff, 0x00000000,
283 0x53b4, 0xffffffff, 0x00000000,
284 0x53b0, 0xffffffff, 0x00000000
287 static const u32 cypress_mgcg_init[] =
289 0x802c, 0xffffffff, 0xc0000000,
290 0x5448, 0xffffffff, 0x00000100,
291 0x55e4, 0xffffffff, 0x00000100,
292 0x160c, 0xffffffff, 0x00000100,
293 0x5644, 0xffffffff, 0x00000100,
294 0xc164, 0xffffffff, 0x00000100,
295 0x8a18, 0xffffffff, 0x00000100,
296 0x897c, 0xffffffff, 0x06000100,
297 0x8b28, 0xffffffff, 0x00000100,
298 0x9144, 0xffffffff, 0x00000100,
299 0x9a60, 0xffffffff, 0x00000100,
300 0x9868, 0xffffffff, 0x00000100,
301 0x8d58, 0xffffffff, 0x00000100,
302 0x9510, 0xffffffff, 0x00000100,
303 0x949c, 0xffffffff, 0x00000100,
304 0x9654, 0xffffffff, 0x00000100,
305 0x9030, 0xffffffff, 0x00000100,
306 0x9034, 0xffffffff, 0x00000100,
307 0x9038, 0xffffffff, 0x00000100,
308 0x903c, 0xffffffff, 0x00000100,
309 0x9040, 0xffffffff, 0x00000100,
310 0xa200, 0xffffffff, 0x00000100,
311 0xa204, 0xffffffff, 0x00000100,
312 0xa208, 0xffffffff, 0x00000100,
313 0xa20c, 0xffffffff, 0x00000100,
314 0x971c, 0xffffffff, 0x00000100,
315 0x977c, 0xffffffff, 0x00000100,
316 0x3f80, 0xffffffff, 0x00000100,
317 0xa210, 0xffffffff, 0x00000100,
318 0xa214, 0xffffffff, 0x00000100,
319 0x4d8, 0xffffffff, 0x00000100,
320 0x9784, 0xffffffff, 0x00000100,
321 0x9698, 0xffffffff, 0x00000100,
322 0x4d4, 0xffffffff, 0x00000200,
323 0x30cc, 0xffffffff, 0x00000100,
324 0xd0c0, 0xffffffff, 0xff000100,
325 0x802c, 0xffffffff, 0x40000000,
326 0x915c, 0xffffffff, 0x00010000,
327 0x9160, 0xffffffff, 0x00030002,
328 0x9178, 0xffffffff, 0x00070000,
329 0x917c, 0xffffffff, 0x00030002,
330 0x9180, 0xffffffff, 0x00050004,
331 0x918c, 0xffffffff, 0x00010006,
332 0x9190, 0xffffffff, 0x00090008,
333 0x9194, 0xffffffff, 0x00070000,
334 0x9198, 0xffffffff, 0x00030002,
335 0x919c, 0xffffffff, 0x00050004,
336 0x91a8, 0xffffffff, 0x00010006,
337 0x91ac, 0xffffffff, 0x00090008,
338 0x91b0, 0xffffffff, 0x00070000,
339 0x91b4, 0xffffffff, 0x00030002,
340 0x91b8, 0xffffffff, 0x00050004,
341 0x91c4, 0xffffffff, 0x00010006,
342 0x91c8, 0xffffffff, 0x00090008,
343 0x91cc, 0xffffffff, 0x00070000,
344 0x91d0, 0xffffffff, 0x00030002,
345 0x91d4, 0xffffffff, 0x00050004,
346 0x91e0, 0xffffffff, 0x00010006,
347 0x91e4, 0xffffffff, 0x00090008,
348 0x91e8, 0xffffffff, 0x00000000,
349 0x91ec, 0xffffffff, 0x00070000,
350 0x91f0, 0xffffffff, 0x00030002,
351 0x91f4, 0xffffffff, 0x00050004,
352 0x9200, 0xffffffff, 0x00010006,
353 0x9204, 0xffffffff, 0x00090008,
354 0x9208, 0xffffffff, 0x00070000,
355 0x920c, 0xffffffff, 0x00030002,
356 0x9210, 0xffffffff, 0x00050004,
357 0x921c, 0xffffffff, 0x00010006,
358 0x9220, 0xffffffff, 0x00090008,
359 0x9224, 0xffffffff, 0x00070000,
360 0x9228, 0xffffffff, 0x00030002,
361 0x922c, 0xffffffff, 0x00050004,
362 0x9238, 0xffffffff, 0x00010006,
363 0x923c, 0xffffffff, 0x00090008,
364 0x9240, 0xffffffff, 0x00070000,
365 0x9244, 0xffffffff, 0x00030002,
366 0x9248, 0xffffffff, 0x00050004,
367 0x9254, 0xffffffff, 0x00010006,
368 0x9258, 0xffffffff, 0x00090008,
369 0x925c, 0xffffffff, 0x00070000,
370 0x9260, 0xffffffff, 0x00030002,
371 0x9264, 0xffffffff, 0x00050004,
372 0x9270, 0xffffffff, 0x00010006,
373 0x9274, 0xffffffff, 0x00090008,
374 0x9278, 0xffffffff, 0x00070000,
375 0x927c, 0xffffffff, 0x00030002,
376 0x9280, 0xffffffff, 0x00050004,
377 0x928c, 0xffffffff, 0x00010006,
378 0x9290, 0xffffffff, 0x00090008,
379 0x9294, 0xffffffff, 0x00000000,
380 0x929c, 0xffffffff, 0x00000001,
381 0x802c, 0xffffffff, 0x40010000,
382 0x915c, 0xffffffff, 0x00010000,
383 0x9160, 0xffffffff, 0x00030002,
384 0x9178, 0xffffffff, 0x00070000,
385 0x917c, 0xffffffff, 0x00030002,
386 0x9180, 0xffffffff, 0x00050004,
387 0x918c, 0xffffffff, 0x00010006,
388 0x9190, 0xffffffff, 0x00090008,
389 0x9194, 0xffffffff, 0x00070000,
390 0x9198, 0xffffffff, 0x00030002,
391 0x919c, 0xffffffff, 0x00050004,
392 0x91a8, 0xffffffff, 0x00010006,
393 0x91ac, 0xffffffff, 0x00090008,
394 0x91b0, 0xffffffff, 0x00070000,
395 0x91b4, 0xffffffff, 0x00030002,
396 0x91b8, 0xffffffff, 0x00050004,
397 0x91c4, 0xffffffff, 0x00010006,
398 0x91c8, 0xffffffff, 0x00090008,
399 0x91cc, 0xffffffff, 0x00070000,
400 0x91d0, 0xffffffff, 0x00030002,
401 0x91d4, 0xffffffff, 0x00050004,
402 0x91e0, 0xffffffff, 0x00010006,
403 0x91e4, 0xffffffff, 0x00090008,
404 0x91e8, 0xffffffff, 0x00000000,
405 0x91ec, 0xffffffff, 0x00070000,
406 0x91f0, 0xffffffff, 0x00030002,
407 0x91f4, 0xffffffff, 0x00050004,
408 0x9200, 0xffffffff, 0x00010006,
409 0x9204, 0xffffffff, 0x00090008,
410 0x9208, 0xffffffff, 0x00070000,
411 0x920c, 0xffffffff, 0x00030002,
412 0x9210, 0xffffffff, 0x00050004,
413 0x921c, 0xffffffff, 0x00010006,
414 0x9220, 0xffffffff, 0x00090008,
415 0x9224, 0xffffffff, 0x00070000,
416 0x9228, 0xffffffff, 0x00030002,
417 0x922c, 0xffffffff, 0x00050004,
418 0x9238, 0xffffffff, 0x00010006,
419 0x923c, 0xffffffff, 0x00090008,
420 0x9240, 0xffffffff, 0x00070000,
421 0x9244, 0xffffffff, 0x00030002,
422 0x9248, 0xffffffff, 0x00050004,
423 0x9254, 0xffffffff, 0x00010006,
424 0x9258, 0xffffffff, 0x00090008,
425 0x925c, 0xffffffff, 0x00070000,
426 0x9260, 0xffffffff, 0x00030002,
427 0x9264, 0xffffffff, 0x00050004,
428 0x9270, 0xffffffff, 0x00010006,
429 0x9274, 0xffffffff, 0x00090008,
430 0x9278, 0xffffffff, 0x00070000,
431 0x927c, 0xffffffff, 0x00030002,
432 0x9280, 0xffffffff, 0x00050004,
433 0x928c, 0xffffffff, 0x00010006,
434 0x9290, 0xffffffff, 0x00090008,
435 0x9294, 0xffffffff, 0x00000000,
436 0x929c, 0xffffffff, 0x00000001,
437 0x802c, 0xffffffff, 0xc0000000
440 static const u32 redwood_mgcg_init[] =
442 0x802c, 0xffffffff, 0xc0000000,
443 0x5448, 0xffffffff, 0x00000100,
444 0x55e4, 0xffffffff, 0x00000100,
445 0x160c, 0xffffffff, 0x00000100,
446 0x5644, 0xffffffff, 0x00000100,
447 0xc164, 0xffffffff, 0x00000100,
448 0x8a18, 0xffffffff, 0x00000100,
449 0x897c, 0xffffffff, 0x06000100,
450 0x8b28, 0xffffffff, 0x00000100,
451 0x9144, 0xffffffff, 0x00000100,
452 0x9a60, 0xffffffff, 0x00000100,
453 0x9868, 0xffffffff, 0x00000100,
454 0x8d58, 0xffffffff, 0x00000100,
455 0x9510, 0xffffffff, 0x00000100,
456 0x949c, 0xffffffff, 0x00000100,
457 0x9654, 0xffffffff, 0x00000100,
458 0x9030, 0xffffffff, 0x00000100,
459 0x9034, 0xffffffff, 0x00000100,
460 0x9038, 0xffffffff, 0x00000100,
461 0x903c, 0xffffffff, 0x00000100,
462 0x9040, 0xffffffff, 0x00000100,
463 0xa200, 0xffffffff, 0x00000100,
464 0xa204, 0xffffffff, 0x00000100,
465 0xa208, 0xffffffff, 0x00000100,
466 0xa20c, 0xffffffff, 0x00000100,
467 0x971c, 0xffffffff, 0x00000100,
468 0x977c, 0xffffffff, 0x00000100,
469 0x3f80, 0xffffffff, 0x00000100,
470 0xa210, 0xffffffff, 0x00000100,
471 0xa214, 0xffffffff, 0x00000100,
472 0x4d8, 0xffffffff, 0x00000100,
473 0x9784, 0xffffffff, 0x00000100,
474 0x9698, 0xffffffff, 0x00000100,
475 0x4d4, 0xffffffff, 0x00000200,
476 0x30cc, 0xffffffff, 0x00000100,
477 0xd0c0, 0xffffffff, 0xff000100,
478 0x802c, 0xffffffff, 0x40000000,
479 0x915c, 0xffffffff, 0x00010000,
480 0x9160, 0xffffffff, 0x00030002,
481 0x9178, 0xffffffff, 0x00070000,
482 0x917c, 0xffffffff, 0x00030002,
483 0x9180, 0xffffffff, 0x00050004,
484 0x918c, 0xffffffff, 0x00010006,
485 0x9190, 0xffffffff, 0x00090008,
486 0x9194, 0xffffffff, 0x00070000,
487 0x9198, 0xffffffff, 0x00030002,
488 0x919c, 0xffffffff, 0x00050004,
489 0x91a8, 0xffffffff, 0x00010006,
490 0x91ac, 0xffffffff, 0x00090008,
491 0x91b0, 0xffffffff, 0x00070000,
492 0x91b4, 0xffffffff, 0x00030002,
493 0x91b8, 0xffffffff, 0x00050004,
494 0x91c4, 0xffffffff, 0x00010006,
495 0x91c8, 0xffffffff, 0x00090008,
496 0x91cc, 0xffffffff, 0x00070000,
497 0x91d0, 0xffffffff, 0x00030002,
498 0x91d4, 0xffffffff, 0x00050004,
499 0x91e0, 0xffffffff, 0x00010006,
500 0x91e4, 0xffffffff, 0x00090008,
501 0x91e8, 0xffffffff, 0x00000000,
502 0x91ec, 0xffffffff, 0x00070000,
503 0x91f0, 0xffffffff, 0x00030002,
504 0x91f4, 0xffffffff, 0x00050004,
505 0x9200, 0xffffffff, 0x00010006,
506 0x9204, 0xffffffff, 0x00090008,
507 0x9294, 0xffffffff, 0x00000000,
508 0x929c, 0xffffffff, 0x00000001,
509 0x802c, 0xffffffff, 0xc0000000
512 static const u32 cedar_golden_registers[] =
514 0x3f90, 0xffff0000, 0xff000000,
515 0x9148, 0xffff0000, 0xff000000,
516 0x3f94, 0xffff0000, 0xff000000,
517 0x914c, 0xffff0000, 0xff000000,
518 0x9b7c, 0xffffffff, 0x00000000,
519 0x8a14, 0xffffffff, 0x00000007,
520 0x8b10, 0xffffffff, 0x00000000,
521 0x960c, 0xffffffff, 0x54763210,
522 0x88c4, 0xffffffff, 0x000000c2,
523 0x88d4, 0xffffffff, 0x00000000,
524 0x8974, 0xffffffff, 0x00000000,
525 0xc78, 0x00000080, 0x00000080,
526 0x5eb4, 0xffffffff, 0x00000002,
527 0x5e78, 0xffffffff, 0x001000f0,
528 0x6104, 0x01000300, 0x00000000,
529 0x5bc0, 0x00300000, 0x00000000,
530 0x7030, 0xffffffff, 0x00000011,
531 0x7c30, 0xffffffff, 0x00000011,
532 0x10830, 0xffffffff, 0x00000011,
533 0x11430, 0xffffffff, 0x00000011,
534 0xd02c, 0xffffffff, 0x08421000,
535 0x240c, 0xffffffff, 0x00000380,
536 0x8b24, 0xffffffff, 0x00ff0fff,
537 0x28a4c, 0x06000000, 0x06000000,
538 0x10c, 0x00000001, 0x00000001,
539 0x8d00, 0xffffffff, 0x100e4848,
540 0x8d04, 0xffffffff, 0x00164745,
541 0x8c00, 0xffffffff, 0xe4000003,
542 0x8c04, 0xffffffff, 0x40600060,
543 0x8c08, 0xffffffff, 0x001c001c,
544 0x8cf0, 0xffffffff, 0x08e00410,
545 0x8c20, 0xffffffff, 0x00800080,
546 0x8c24, 0xffffffff, 0x00800080,
547 0x8c18, 0xffffffff, 0x20202078,
548 0x8c1c, 0xffffffff, 0x00001010,
549 0x28350, 0xffffffff, 0x00000000,
550 0xa008, 0xffffffff, 0x00010000,
551 0x5c4, 0xffffffff, 0x00000001,
552 0x9508, 0xffffffff, 0x00000002
555 static const u32 cedar_mgcg_init[] =
557 0x802c, 0xffffffff, 0xc0000000,
558 0x5448, 0xffffffff, 0x00000100,
559 0x55e4, 0xffffffff, 0x00000100,
560 0x160c, 0xffffffff, 0x00000100,
561 0x5644, 0xffffffff, 0x00000100,
562 0xc164, 0xffffffff, 0x00000100,
563 0x8a18, 0xffffffff, 0x00000100,
564 0x897c, 0xffffffff, 0x06000100,
565 0x8b28, 0xffffffff, 0x00000100,
566 0x9144, 0xffffffff, 0x00000100,
567 0x9a60, 0xffffffff, 0x00000100,
568 0x9868, 0xffffffff, 0x00000100,
569 0x8d58, 0xffffffff, 0x00000100,
570 0x9510, 0xffffffff, 0x00000100,
571 0x949c, 0xffffffff, 0x00000100,
572 0x9654, 0xffffffff, 0x00000100,
573 0x9030, 0xffffffff, 0x00000100,
574 0x9034, 0xffffffff, 0x00000100,
575 0x9038, 0xffffffff, 0x00000100,
576 0x903c, 0xffffffff, 0x00000100,
577 0x9040, 0xffffffff, 0x00000100,
578 0xa200, 0xffffffff, 0x00000100,
579 0xa204, 0xffffffff, 0x00000100,
580 0xa208, 0xffffffff, 0x00000100,
581 0xa20c, 0xffffffff, 0x00000100,
582 0x971c, 0xffffffff, 0x00000100,
583 0x977c, 0xffffffff, 0x00000100,
584 0x3f80, 0xffffffff, 0x00000100,
585 0xa210, 0xffffffff, 0x00000100,
586 0xa214, 0xffffffff, 0x00000100,
587 0x4d8, 0xffffffff, 0x00000100,
588 0x9784, 0xffffffff, 0x00000100,
589 0x9698, 0xffffffff, 0x00000100,
590 0x4d4, 0xffffffff, 0x00000200,
591 0x30cc, 0xffffffff, 0x00000100,
592 0xd0c0, 0xffffffff, 0xff000100,
593 0x802c, 0xffffffff, 0x40000000,
594 0x915c, 0xffffffff, 0x00010000,
595 0x9178, 0xffffffff, 0x00050000,
596 0x917c, 0xffffffff, 0x00030002,
597 0x918c, 0xffffffff, 0x00010004,
598 0x9190, 0xffffffff, 0x00070006,
599 0x9194, 0xffffffff, 0x00050000,
600 0x9198, 0xffffffff, 0x00030002,
601 0x91a8, 0xffffffff, 0x00010004,
602 0x91ac, 0xffffffff, 0x00070006,
603 0x91e8, 0xffffffff, 0x00000000,
604 0x9294, 0xffffffff, 0x00000000,
605 0x929c, 0xffffffff, 0x00000001,
606 0x802c, 0xffffffff, 0xc0000000
609 static const u32 juniper_mgcg_init[] =
611 0x802c, 0xffffffff, 0xc0000000,
612 0x5448, 0xffffffff, 0x00000100,
613 0x55e4, 0xffffffff, 0x00000100,
614 0x160c, 0xffffffff, 0x00000100,
615 0x5644, 0xffffffff, 0x00000100,
616 0xc164, 0xffffffff, 0x00000100,
617 0x8a18, 0xffffffff, 0x00000100,
618 0x897c, 0xffffffff, 0x06000100,
619 0x8b28, 0xffffffff, 0x00000100,
620 0x9144, 0xffffffff, 0x00000100,
621 0x9a60, 0xffffffff, 0x00000100,
622 0x9868, 0xffffffff, 0x00000100,
623 0x8d58, 0xffffffff, 0x00000100,
624 0x9510, 0xffffffff, 0x00000100,
625 0x949c, 0xffffffff, 0x00000100,
626 0x9654, 0xffffffff, 0x00000100,
627 0x9030, 0xffffffff, 0x00000100,
628 0x9034, 0xffffffff, 0x00000100,
629 0x9038, 0xffffffff, 0x00000100,
630 0x903c, 0xffffffff, 0x00000100,
631 0x9040, 0xffffffff, 0x00000100,
632 0xa200, 0xffffffff, 0x00000100,
633 0xa204, 0xffffffff, 0x00000100,
634 0xa208, 0xffffffff, 0x00000100,
635 0xa20c, 0xffffffff, 0x00000100,
636 0x971c, 0xffffffff, 0x00000100,
637 0xd0c0, 0xffffffff, 0xff000100,
638 0x802c, 0xffffffff, 0x40000000,
639 0x915c, 0xffffffff, 0x00010000,
640 0x9160, 0xffffffff, 0x00030002,
641 0x9178, 0xffffffff, 0x00070000,
642 0x917c, 0xffffffff, 0x00030002,
643 0x9180, 0xffffffff, 0x00050004,
644 0x918c, 0xffffffff, 0x00010006,
645 0x9190, 0xffffffff, 0x00090008,
646 0x9194, 0xffffffff, 0x00070000,
647 0x9198, 0xffffffff, 0x00030002,
648 0x919c, 0xffffffff, 0x00050004,
649 0x91a8, 0xffffffff, 0x00010006,
650 0x91ac, 0xffffffff, 0x00090008,
651 0x91b0, 0xffffffff, 0x00070000,
652 0x91b4, 0xffffffff, 0x00030002,
653 0x91b8, 0xffffffff, 0x00050004,
654 0x91c4, 0xffffffff, 0x00010006,
655 0x91c8, 0xffffffff, 0x00090008,
656 0x91cc, 0xffffffff, 0x00070000,
657 0x91d0, 0xffffffff, 0x00030002,
658 0x91d4, 0xffffffff, 0x00050004,
659 0x91e0, 0xffffffff, 0x00010006,
660 0x91e4, 0xffffffff, 0x00090008,
661 0x91e8, 0xffffffff, 0x00000000,
662 0x91ec, 0xffffffff, 0x00070000,
663 0x91f0, 0xffffffff, 0x00030002,
664 0x91f4, 0xffffffff, 0x00050004,
665 0x9200, 0xffffffff, 0x00010006,
666 0x9204, 0xffffffff, 0x00090008,
667 0x9208, 0xffffffff, 0x00070000,
668 0x920c, 0xffffffff, 0x00030002,
669 0x9210, 0xffffffff, 0x00050004,
670 0x921c, 0xffffffff, 0x00010006,
671 0x9220, 0xffffffff, 0x00090008,
672 0x9224, 0xffffffff, 0x00070000,
673 0x9228, 0xffffffff, 0x00030002,
674 0x922c, 0xffffffff, 0x00050004,
675 0x9238, 0xffffffff, 0x00010006,
676 0x923c, 0xffffffff, 0x00090008,
677 0x9240, 0xffffffff, 0x00070000,
678 0x9244, 0xffffffff, 0x00030002,
679 0x9248, 0xffffffff, 0x00050004,
680 0x9254, 0xffffffff, 0x00010006,
681 0x9258, 0xffffffff, 0x00090008,
682 0x925c, 0xffffffff, 0x00070000,
683 0x9260, 0xffffffff, 0x00030002,
684 0x9264, 0xffffffff, 0x00050004,
685 0x9270, 0xffffffff, 0x00010006,
686 0x9274, 0xffffffff, 0x00090008,
687 0x9278, 0xffffffff, 0x00070000,
688 0x927c, 0xffffffff, 0x00030002,
689 0x9280, 0xffffffff, 0x00050004,
690 0x928c, 0xffffffff, 0x00010006,
691 0x9290, 0xffffffff, 0x00090008,
692 0x9294, 0xffffffff, 0x00000000,
693 0x929c, 0xffffffff, 0x00000001,
694 0x802c, 0xffffffff, 0xc0000000,
695 0x977c, 0xffffffff, 0x00000100,
696 0x3f80, 0xffffffff, 0x00000100,
697 0xa210, 0xffffffff, 0x00000100,
698 0xa214, 0xffffffff, 0x00000100,
699 0x4d8, 0xffffffff, 0x00000100,
700 0x9784, 0xffffffff, 0x00000100,
701 0x9698, 0xffffffff, 0x00000100,
702 0x4d4, 0xffffffff, 0x00000200,
703 0x30cc, 0xffffffff, 0x00000100,
704 0x802c, 0xffffffff, 0xc0000000
707 static const u32 supersumo_golden_registers[] =
709 0x5eb4, 0xffffffff, 0x00000002,
710 0x5c4, 0xffffffff, 0x00000001,
711 0x7030, 0xffffffff, 0x00000011,
712 0x7c30, 0xffffffff, 0x00000011,
713 0x6104, 0x01000300, 0x00000000,
714 0x5bc0, 0x00300000, 0x00000000,
715 0x8c04, 0xffffffff, 0x40600060,
716 0x8c08, 0xffffffff, 0x001c001c,
717 0x8c20, 0xffffffff, 0x00800080,
718 0x8c24, 0xffffffff, 0x00800080,
719 0x8c18, 0xffffffff, 0x20202078,
720 0x8c1c, 0xffffffff, 0x00001010,
721 0x918c, 0xffffffff, 0x00010006,
722 0x91a8, 0xffffffff, 0x00010006,
723 0x91c4, 0xffffffff, 0x00010006,
724 0x91e0, 0xffffffff, 0x00010006,
725 0x9200, 0xffffffff, 0x00010006,
726 0x9150, 0xffffffff, 0x6e944040,
727 0x917c, 0xffffffff, 0x00030002,
728 0x9180, 0xffffffff, 0x00050004,
729 0x9198, 0xffffffff, 0x00030002,
730 0x919c, 0xffffffff, 0x00050004,
731 0x91b4, 0xffffffff, 0x00030002,
732 0x91b8, 0xffffffff, 0x00050004,
733 0x91d0, 0xffffffff, 0x00030002,
734 0x91d4, 0xffffffff, 0x00050004,
735 0x91f0, 0xffffffff, 0x00030002,
736 0x91f4, 0xffffffff, 0x00050004,
737 0x915c, 0xffffffff, 0x00010000,
738 0x9160, 0xffffffff, 0x00030002,
739 0x3f90, 0xffff0000, 0xff000000,
740 0x9178, 0xffffffff, 0x00070000,
741 0x9194, 0xffffffff, 0x00070000,
742 0x91b0, 0xffffffff, 0x00070000,
743 0x91cc, 0xffffffff, 0x00070000,
744 0x91ec, 0xffffffff, 0x00070000,
745 0x9148, 0xffff0000, 0xff000000,
746 0x9190, 0xffffffff, 0x00090008,
747 0x91ac, 0xffffffff, 0x00090008,
748 0x91c8, 0xffffffff, 0x00090008,
749 0x91e4, 0xffffffff, 0x00090008,
750 0x9204, 0xffffffff, 0x00090008,
751 0x3f94, 0xffff0000, 0xff000000,
752 0x914c, 0xffff0000, 0xff000000,
753 0x929c, 0xffffffff, 0x00000001,
754 0x8a18, 0xffffffff, 0x00000100,
755 0x8b28, 0xffffffff, 0x00000100,
756 0x9144, 0xffffffff, 0x00000100,
757 0x5644, 0xffffffff, 0x00000100,
758 0x9b7c, 0xffffffff, 0x00000000,
759 0x8030, 0xffffffff, 0x0000100a,
760 0x8a14, 0xffffffff, 0x00000007,
761 0x8b24, 0xffffffff, 0x00ff0fff,
762 0x8b10, 0xffffffff, 0x00000000,
763 0x28a4c, 0x06000000, 0x06000000,
764 0x4d8, 0xffffffff, 0x00000100,
765 0x913c, 0xffff000f, 0x0100000a,
766 0x960c, 0xffffffff, 0x54763210,
767 0x88c4, 0xffffffff, 0x000000c2,
768 0x88d4, 0xffffffff, 0x00000010,
769 0x8974, 0xffffffff, 0x00000000,
770 0xc78, 0x00000080, 0x00000080,
771 0x5e78, 0xffffffff, 0x001000f0,
772 0xd02c, 0xffffffff, 0x08421000,
773 0xa008, 0xffffffff, 0x00010000,
774 0x8d00, 0xffffffff, 0x100e4848,
775 0x8d04, 0xffffffff, 0x00164745,
776 0x8c00, 0xffffffff, 0xe4000003,
777 0x8cf0, 0x1fffffff, 0x08e00620,
778 0x28350, 0xffffffff, 0x00000000,
779 0x9508, 0xffffffff, 0x00000002
782 static const u32 sumo_golden_registers[] =
784 0x900c, 0x00ffffff, 0x0017071f,
785 0x8c18, 0xffffffff, 0x10101060,
786 0x8c1c, 0xffffffff, 0x00001010,
787 0x8c30, 0x0000000f, 0x00000005,
788 0x9688, 0x0000000f, 0x00000007
791 static const u32 wrestler_golden_registers[] =
793 0x5eb4, 0xffffffff, 0x00000002,
794 0x5c4, 0xffffffff, 0x00000001,
795 0x7030, 0xffffffff, 0x00000011,
796 0x7c30, 0xffffffff, 0x00000011,
797 0x6104, 0x01000300, 0x00000000,
798 0x5bc0, 0x00300000, 0x00000000,
799 0x918c, 0xffffffff, 0x00010006,
800 0x91a8, 0xffffffff, 0x00010006,
801 0x9150, 0xffffffff, 0x6e944040,
802 0x917c, 0xffffffff, 0x00030002,
803 0x9198, 0xffffffff, 0x00030002,
804 0x915c, 0xffffffff, 0x00010000,
805 0x3f90, 0xffff0000, 0xff000000,
806 0x9178, 0xffffffff, 0x00070000,
807 0x9194, 0xffffffff, 0x00070000,
808 0x9148, 0xffff0000, 0xff000000,
809 0x9190, 0xffffffff, 0x00090008,
810 0x91ac, 0xffffffff, 0x00090008,
811 0x3f94, 0xffff0000, 0xff000000,
812 0x914c, 0xffff0000, 0xff000000,
813 0x929c, 0xffffffff, 0x00000001,
814 0x8a18, 0xffffffff, 0x00000100,
815 0x8b28, 0xffffffff, 0x00000100,
816 0x9144, 0xffffffff, 0x00000100,
817 0x9b7c, 0xffffffff, 0x00000000,
818 0x8030, 0xffffffff, 0x0000100a,
819 0x8a14, 0xffffffff, 0x00000001,
820 0x8b24, 0xffffffff, 0x00ff0fff,
821 0x8b10, 0xffffffff, 0x00000000,
822 0x28a4c, 0x06000000, 0x06000000,
823 0x4d8, 0xffffffff, 0x00000100,
824 0x913c, 0xffff000f, 0x0100000a,
825 0x960c, 0xffffffff, 0x54763210,
826 0x88c4, 0xffffffff, 0x000000c2,
827 0x88d4, 0xffffffff, 0x00000010,
828 0x8974, 0xffffffff, 0x00000000,
829 0xc78, 0x00000080, 0x00000080,
830 0x5e78, 0xffffffff, 0x001000f0,
831 0xd02c, 0xffffffff, 0x08421000,
832 0xa008, 0xffffffff, 0x00010000,
833 0x8d00, 0xffffffff, 0x100e4848,
834 0x8d04, 0xffffffff, 0x00164745,
835 0x8c00, 0xffffffff, 0xe4000003,
836 0x8cf0, 0x1fffffff, 0x08e00410,
837 0x28350, 0xffffffff, 0x00000000,
838 0x9508, 0xffffffff, 0x00000002,
839 0x900c, 0xffffffff, 0x0017071f,
840 0x8c18, 0xffffffff, 0x10101060,
841 0x8c1c, 0xffffffff, 0x00001010
844 static const u32 barts_golden_registers[] =
846 0x5eb4, 0xffffffff, 0x00000002,
847 0x5e78, 0x8f311ff1, 0x001000f0,
848 0x3f90, 0xffff0000, 0xff000000,
849 0x9148, 0xffff0000, 0xff000000,
850 0x3f94, 0xffff0000, 0xff000000,
851 0x914c, 0xffff0000, 0xff000000,
852 0xc78, 0x00000080, 0x00000080,
853 0xbd4, 0x70073777, 0x00010001,
854 0xd02c, 0xbfffff1f, 0x08421000,
855 0xd0b8, 0x03773777, 0x02011003,
856 0x5bc0, 0x00200000, 0x50100000,
857 0x98f8, 0x33773777, 0x02011003,
858 0x98fc, 0xffffffff, 0x76543210,
859 0x7030, 0x31000311, 0x00000011,
860 0x2f48, 0x00000007, 0x02011003,
861 0x6b28, 0x00000010, 0x00000012,
862 0x7728, 0x00000010, 0x00000012,
863 0x10328, 0x00000010, 0x00000012,
864 0x10f28, 0x00000010, 0x00000012,
865 0x11b28, 0x00000010, 0x00000012,
866 0x12728, 0x00000010, 0x00000012,
867 0x240c, 0x000007ff, 0x00000380,
868 0x8a14, 0xf000001f, 0x00000007,
869 0x8b24, 0x3fff3fff, 0x00ff0fff,
870 0x8b10, 0x0000ff0f, 0x00000000,
871 0x28a4c, 0x07ffffff, 0x06000000,
872 0x10c, 0x00000001, 0x00010003,
873 0xa02c, 0xffffffff, 0x0000009b,
874 0x913c, 0x0000000f, 0x0100000a,
875 0x8d00, 0xffff7f7f, 0x100e4848,
876 0x8d04, 0x00ffffff, 0x00164745,
877 0x8c00, 0xfffc0003, 0xe4000003,
878 0x8c04, 0xf8ff00ff, 0x40600060,
879 0x8c08, 0x00ff00ff, 0x001c001c,
880 0x8cf0, 0x1fff1fff, 0x08e00620,
881 0x8c20, 0x0fff0fff, 0x00800080,
882 0x8c24, 0x0fff0fff, 0x00800080,
883 0x8c18, 0xffffffff, 0x20202078,
884 0x8c1c, 0x0000ffff, 0x00001010,
885 0x28350, 0x00000f01, 0x00000000,
886 0x9508, 0x3700001f, 0x00000002,
887 0x960c, 0xffffffff, 0x54763210,
888 0x88c4, 0x001f3ae3, 0x000000c2,
889 0x88d4, 0x0000001f, 0x00000010,
890 0x8974, 0xffffffff, 0x00000000
893 static const u32 turks_golden_registers[] =
895 0x5eb4, 0xffffffff, 0x00000002,
896 0x5e78, 0x8f311ff1, 0x001000f0,
897 0x8c8, 0x00003000, 0x00001070,
898 0x8cc, 0x000fffff, 0x00040035,
899 0x3f90, 0xffff0000, 0xfff00000,
900 0x9148, 0xffff0000, 0xfff00000,
901 0x3f94, 0xffff0000, 0xfff00000,
902 0x914c, 0xffff0000, 0xfff00000,
903 0xc78, 0x00000080, 0x00000080,
904 0xbd4, 0x00073007, 0x00010002,
905 0xd02c, 0xbfffff1f, 0x08421000,
906 0xd0b8, 0x03773777, 0x02010002,
907 0x5bc0, 0x00200000, 0x50100000,
908 0x98f8, 0x33773777, 0x00010002,
909 0x98fc, 0xffffffff, 0x33221100,
910 0x7030, 0x31000311, 0x00000011,
911 0x2f48, 0x33773777, 0x00010002,
912 0x6b28, 0x00000010, 0x00000012,
913 0x7728, 0x00000010, 0x00000012,
914 0x10328, 0x00000010, 0x00000012,
915 0x10f28, 0x00000010, 0x00000012,
916 0x11b28, 0x00000010, 0x00000012,
917 0x12728, 0x00000010, 0x00000012,
918 0x240c, 0x000007ff, 0x00000380,
919 0x8a14, 0xf000001f, 0x00000007,
920 0x8b24, 0x3fff3fff, 0x00ff0fff,
921 0x8b10, 0x0000ff0f, 0x00000000,
922 0x28a4c, 0x07ffffff, 0x06000000,
923 0x10c, 0x00000001, 0x00010003,
924 0xa02c, 0xffffffff, 0x0000009b,
925 0x913c, 0x0000000f, 0x0100000a,
926 0x8d00, 0xffff7f7f, 0x100e4848,
927 0x8d04, 0x00ffffff, 0x00164745,
928 0x8c00, 0xfffc0003, 0xe4000003,
929 0x8c04, 0xf8ff00ff, 0x40600060,
930 0x8c08, 0x00ff00ff, 0x001c001c,
931 0x8cf0, 0x1fff1fff, 0x08e00410,
932 0x8c20, 0x0fff0fff, 0x00800080,
933 0x8c24, 0x0fff0fff, 0x00800080,
934 0x8c18, 0xffffffff, 0x20202078,
935 0x8c1c, 0x0000ffff, 0x00001010,
936 0x28350, 0x00000f01, 0x00000000,
937 0x9508, 0x3700001f, 0x00000002,
938 0x960c, 0xffffffff, 0x54763210,
939 0x88c4, 0x001f3ae3, 0x000000c2,
940 0x88d4, 0x0000001f, 0x00000010,
941 0x8974, 0xffffffff, 0x00000000
944 static const u32 caicos_golden_registers[] =
946 0x5eb4, 0xffffffff, 0x00000002,
947 0x5e78, 0x8f311ff1, 0x001000f0,
948 0x8c8, 0x00003420, 0x00001450,
949 0x8cc, 0x000fffff, 0x00040035,
950 0x3f90, 0xffff0000, 0xfffc0000,
951 0x9148, 0xffff0000, 0xfffc0000,
952 0x3f94, 0xffff0000, 0xfffc0000,
953 0x914c, 0xffff0000, 0xfffc0000,
954 0xc78, 0x00000080, 0x00000080,
955 0xbd4, 0x00073007, 0x00010001,
956 0xd02c, 0xbfffff1f, 0x08421000,
957 0xd0b8, 0x03773777, 0x02010001,
958 0x5bc0, 0x00200000, 0x50100000,
959 0x98f8, 0x33773777, 0x02010001,
960 0x98fc, 0xffffffff, 0x33221100,
961 0x7030, 0x31000311, 0x00000011,
962 0x2f48, 0x33773777, 0x02010001,
963 0x6b28, 0x00000010, 0x00000012,
964 0x7728, 0x00000010, 0x00000012,
965 0x10328, 0x00000010, 0x00000012,
966 0x10f28, 0x00000010, 0x00000012,
967 0x11b28, 0x00000010, 0x00000012,
968 0x12728, 0x00000010, 0x00000012,
969 0x240c, 0x000007ff, 0x00000380,
970 0x8a14, 0xf000001f, 0x00000001,
971 0x8b24, 0x3fff3fff, 0x00ff0fff,
972 0x8b10, 0x0000ff0f, 0x00000000,
973 0x28a4c, 0x07ffffff, 0x06000000,
974 0x10c, 0x00000001, 0x00010003,
975 0xa02c, 0xffffffff, 0x0000009b,
976 0x913c, 0x0000000f, 0x0100000a,
977 0x8d00, 0xffff7f7f, 0x100e4848,
978 0x8d04, 0x00ffffff, 0x00164745,
979 0x8c00, 0xfffc0003, 0xe4000003,
980 0x8c04, 0xf8ff00ff, 0x40600060,
981 0x8c08, 0x00ff00ff, 0x001c001c,
982 0x8cf0, 0x1fff1fff, 0x08e00410,
983 0x8c20, 0x0fff0fff, 0x00800080,
984 0x8c24, 0x0fff0fff, 0x00800080,
985 0x8c18, 0xffffffff, 0x20202078,
986 0x8c1c, 0x0000ffff, 0x00001010,
987 0x28350, 0x00000f01, 0x00000000,
988 0x9508, 0x3700001f, 0x00000002,
989 0x960c, 0xffffffff, 0x54763210,
990 0x88c4, 0x001f3ae3, 0x000000c2,
991 0x88d4, 0x0000001f, 0x00000010,
992 0x8974, 0xffffffff, 0x00000000
995 static void evergreen_init_golden_registers(struct radeon_device *rdev)
997 switch (rdev->family) {
1000 radeon_program_register_sequence(rdev,
1001 evergreen_golden_registers,
1002 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1003 radeon_program_register_sequence(rdev,
1004 evergreen_golden_registers2,
1005 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1006 radeon_program_register_sequence(rdev,
1008 (const u32)ARRAY_SIZE(cypress_mgcg_init));
1011 radeon_program_register_sequence(rdev,
1012 evergreen_golden_registers,
1013 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1014 radeon_program_register_sequence(rdev,
1015 evergreen_golden_registers2,
1016 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1017 radeon_program_register_sequence(rdev,
1019 (const u32)ARRAY_SIZE(juniper_mgcg_init));
1022 radeon_program_register_sequence(rdev,
1023 evergreen_golden_registers,
1024 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1025 radeon_program_register_sequence(rdev,
1026 evergreen_golden_registers2,
1027 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1028 radeon_program_register_sequence(rdev,
1030 (const u32)ARRAY_SIZE(redwood_mgcg_init));
1033 radeon_program_register_sequence(rdev,
1034 cedar_golden_registers,
1035 (const u32)ARRAY_SIZE(cedar_golden_registers));
1036 radeon_program_register_sequence(rdev,
1037 evergreen_golden_registers2,
1038 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1039 radeon_program_register_sequence(rdev,
1041 (const u32)ARRAY_SIZE(cedar_mgcg_init));
1044 radeon_program_register_sequence(rdev,
1045 wrestler_golden_registers,
1046 (const u32)ARRAY_SIZE(wrestler_golden_registers));
1049 radeon_program_register_sequence(rdev,
1050 supersumo_golden_registers,
1051 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1054 radeon_program_register_sequence(rdev,
1055 supersumo_golden_registers,
1056 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1057 radeon_program_register_sequence(rdev,
1058 sumo_golden_registers,
1059 (const u32)ARRAY_SIZE(sumo_golden_registers));
1062 radeon_program_register_sequence(rdev,
1063 barts_golden_registers,
1064 (const u32)ARRAY_SIZE(barts_golden_registers));
1067 radeon_program_register_sequence(rdev,
1068 turks_golden_registers,
1069 (const u32)ARRAY_SIZE(turks_golden_registers));
1072 radeon_program_register_sequence(rdev,
1073 caicos_golden_registers,
1074 (const u32)ARRAY_SIZE(caicos_golden_registers));
1082 * evergreen_get_allowed_info_register - fetch the register for the info ioctl
1084 * @rdev: radeon_device pointer
1085 * @reg: register offset in bytes
1086 * @val: register value
1088 * Returns 0 for success or -EINVAL for an invalid register
1091 int evergreen_get_allowed_info_register(struct radeon_device *rdev,
1096 case GRBM_STATUS_SE0:
1097 case GRBM_STATUS_SE1:
1100 case DMA_STATUS_REG:
1109 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1110 unsigned *bankh, unsigned *mtaspect,
1111 unsigned *tile_split)
1113 *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1114 *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1115 *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1116 *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1119 case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1120 case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1121 case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1122 case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1126 case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1127 case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1128 case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1129 case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1131 switch (*mtaspect) {
1133 case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1134 case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1135 case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1136 case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1140 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1141 u32 cntl_reg, u32 status_reg)
1144 struct atom_clock_dividers dividers;
1146 r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1147 clock, false, ÷rs);
1151 WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1153 for (i = 0; i < 100; i++) {
1154 if (RREG32(status_reg) & DCLK_STATUS)
1164 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1167 u32 cg_scratch = RREG32(CG_SCRATCH1);
1169 r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1172 cg_scratch &= 0xffff0000;
1173 cg_scratch |= vclk / 100; /* Mhz */
1175 r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1178 cg_scratch &= 0x0000ffff;
1179 cg_scratch |= (dclk / 100) << 16; /* Mhz */
1182 WREG32(CG_SCRATCH1, cg_scratch);
1187 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1189 /* start off with something large */
1190 unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1193 /* bypass vclk and dclk with bclk */
1194 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1195 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1196 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1198 /* put PLL in bypass mode */
1199 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1201 if (!vclk || !dclk) {
1202 /* keep the Bypass mode, put PLL to sleep */
1203 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1207 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1208 16384, 0x03FFFFFF, 0, 128, 5,
1209 &fb_div, &vclk_div, &dclk_div);
1213 /* set VCO_MODE to 1 */
1214 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1216 /* toggle UPLL_SLEEP to 1 then back to 0 */
1217 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1218 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1220 /* deassert UPLL_RESET */
1221 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1225 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1229 /* assert UPLL_RESET again */
1230 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1232 /* disable spread spectrum. */
1233 WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1235 /* set feedback divider */
1236 WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1238 /* set ref divider to 0 */
1239 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1241 if (fb_div < 307200)
1242 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1244 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1246 /* set PDIV_A and PDIV_B */
1247 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1248 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1249 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1251 /* give the PLL some time to settle */
1254 /* deassert PLL_RESET */
1255 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1259 /* switch from bypass mode to normal mode */
1260 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1262 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1266 /* switch VCLK and DCLK selection */
1267 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1268 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1269 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1276 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1281 readrq = pcie_get_readrq(rdev->pdev);
1282 v = ffs(readrq) - 8;
1283 /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1284 * to avoid hangs or perfomance issues
1286 if ((v == 0) || (v == 6) || (v == 7))
1287 pcie_set_readrq(rdev->pdev, 512);
1290 void dce4_program_fmt(struct drm_encoder *encoder)
1292 struct drm_device *dev = encoder->dev;
1293 struct radeon_device *rdev = dev->dev_private;
1294 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1295 struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1296 struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1299 enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1302 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1303 bpc = radeon_get_monitor_bpc(connector);
1304 dither = radeon_connector->dither;
1307 /* LVDS/eDP FMT is set up by atom */
1308 if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1311 /* not needed for analog */
1312 if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1313 (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1321 if (dither == RADEON_FMT_DITHER_ENABLE)
1322 /* XXX sort out optimal dither settings */
1323 tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1324 FMT_SPATIAL_DITHER_EN);
1326 tmp |= FMT_TRUNCATE_EN;
1329 if (dither == RADEON_FMT_DITHER_ENABLE)
1330 /* XXX sort out optimal dither settings */
1331 tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1332 FMT_RGB_RANDOM_ENABLE |
1333 FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1335 tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1343 WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1346 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1348 if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1354 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1358 pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1359 pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1368 * dce4_wait_for_vblank - vblank wait asic callback.
1370 * @rdev: radeon_device pointer
1371 * @crtc: crtc to wait for vblank on
1373 * Wait for vblank on the requested crtc (evergreen+).
1375 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1379 if (crtc >= rdev->num_crtc)
1382 if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1385 /* depending on when we hit vblank, we may be close to active; if so,
1386 * wait for another frame.
1388 while (dce4_is_in_vblank(rdev, crtc)) {
1389 if (i++ % 100 == 0) {
1390 if (!dce4_is_counter_moving(rdev, crtc))
1395 while (!dce4_is_in_vblank(rdev, crtc)) {
1396 if (i++ % 100 == 0) {
1397 if (!dce4_is_counter_moving(rdev, crtc))
1404 * evergreen_page_flip - pageflip callback.
1406 * @rdev: radeon_device pointer
1407 * @crtc_id: crtc to cleanup pageflip on
1408 * @crtc_base: new address of the crtc (GPU MC address)
1409 * @async: asynchronous flip
1411 * Triggers the actual pageflip by updating the primary
1412 * surface base address (evergreen+).
1414 void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base,
1417 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1418 struct drm_framebuffer *fb = radeon_crtc->base.primary->fb;
1420 /* flip at hsync for async, default is vsync */
1421 WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset,
1422 async ? EVERGREEN_GRPH_SURFACE_UPDATE_H_RETRACE_EN : 0);
1424 WREG32(EVERGREEN_GRPH_PITCH + radeon_crtc->crtc_offset,
1425 fb->pitches[0] / fb->format->cpp[0]);
1426 /* update the scanout addresses */
1427 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1428 upper_32_bits(crtc_base));
1429 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1431 /* post the write */
1432 RREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset);
1436 * evergreen_page_flip_pending - check if page flip is still pending
1438 * @rdev: radeon_device pointer
1439 * @crtc_id: crtc to check
1441 * Returns the current update pending status.
1443 bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1445 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1447 /* Return current update_pending status: */
1448 return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1449 EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1452 /* get temperature in millidegrees */
1453 int evergreen_get_temp(struct radeon_device *rdev)
1456 int actual_temp = 0;
1458 if (rdev->family == CHIP_JUNIPER) {
1459 toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1461 temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1464 if (toffset & 0x100)
1465 actual_temp = temp / 2 - (0x200 - toffset);
1467 actual_temp = temp / 2 + toffset;
1469 actual_temp = actual_temp * 1000;
1472 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1477 else if (temp & 0x200)
1479 else if (temp & 0x100) {
1480 actual_temp = temp & 0x1ff;
1481 actual_temp |= ~0x1ff;
1483 actual_temp = temp & 0xff;
1485 actual_temp = (actual_temp * 1000) / 2;
1491 int sumo_get_temp(struct radeon_device *rdev)
1493 u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1494 int actual_temp = temp - 49;
1496 return actual_temp * 1000;
1500 * sumo_pm_init_profile - Initialize power profiles callback.
1502 * @rdev: radeon_device pointer
1504 * Initialize the power states used in profile mode
1505 * (sumo, trinity, SI).
1506 * Used for profile mode only.
1508 void sumo_pm_init_profile(struct radeon_device *rdev)
1513 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1514 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1515 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1516 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1519 if (rdev->flags & RADEON_IS_MOBILITY)
1520 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1522 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1524 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1525 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1526 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1527 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1529 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1530 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1531 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1532 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1534 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1535 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1536 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1537 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1539 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1540 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1541 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1542 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1545 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1546 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1547 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1548 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1549 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1550 rdev->pm.power_state[idx].num_clock_modes - 1;
1552 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1553 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1554 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1555 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1556 rdev->pm.power_state[idx].num_clock_modes - 1;
1560 * btc_pm_init_profile - Initialize power profiles callback.
1562 * @rdev: radeon_device pointer
1564 * Initialize the power states used in profile mode
1566 * Used for profile mode only.
1568 void btc_pm_init_profile(struct radeon_device *rdev)
1573 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1574 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1575 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1576 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1577 /* starting with BTC, there is one state that is used for both
1578 * MH and SH. Difference is that we always use the high clock index for
1581 if (rdev->flags & RADEON_IS_MOBILITY)
1582 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1584 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1586 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1587 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1588 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1589 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1591 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1592 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1593 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1594 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1596 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1597 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1598 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1599 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1601 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1602 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1603 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1604 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1606 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1607 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1608 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1609 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1611 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1612 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1613 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1614 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1618 * evergreen_pm_misc - set additional pm hw parameters callback.
1620 * @rdev: radeon_device pointer
1622 * Set non-clock parameters associated with a power state
1623 * (voltage, etc.) (evergreen+).
1625 void evergreen_pm_misc(struct radeon_device *rdev)
1627 int req_ps_idx = rdev->pm.requested_power_state_index;
1628 int req_cm_idx = rdev->pm.requested_clock_mode_index;
1629 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1630 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1632 if (voltage->type == VOLTAGE_SW) {
1633 /* 0xff0x are flags rather then an actual voltage */
1634 if ((voltage->voltage & 0xff00) == 0xff00)
1636 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1637 radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1638 rdev->pm.current_vddc = voltage->voltage;
1639 DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1642 /* starting with BTC, there is one state that is used for both
1643 * MH and SH. Difference is that we always use the high clock index for
1646 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1647 (rdev->family >= CHIP_BARTS) &&
1648 rdev->pm.active_crtc_count &&
1649 ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1650 (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1651 voltage = &rdev->pm.power_state[req_ps_idx].
1652 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1654 /* 0xff0x are flags rather then an actual voltage */
1655 if ((voltage->vddci & 0xff00) == 0xff00)
1657 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1658 radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1659 rdev->pm.current_vddci = voltage->vddci;
1660 DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1666 * evergreen_pm_prepare - pre-power state change callback.
1668 * @rdev: radeon_device pointer
1670 * Prepare for a power state change (evergreen+).
1672 void evergreen_pm_prepare(struct radeon_device *rdev)
1674 struct drm_device *ddev = rdev->ddev;
1675 struct drm_crtc *crtc;
1676 struct radeon_crtc *radeon_crtc;
1679 /* disable any active CRTCs */
1680 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1681 radeon_crtc = to_radeon_crtc(crtc);
1682 if (radeon_crtc->enabled) {
1683 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1684 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1685 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1691 * evergreen_pm_finish - post-power state change callback.
1693 * @rdev: radeon_device pointer
1695 * Clean up after a power state change (evergreen+).
1697 void evergreen_pm_finish(struct radeon_device *rdev)
1699 struct drm_device *ddev = rdev->ddev;
1700 struct drm_crtc *crtc;
1701 struct radeon_crtc *radeon_crtc;
1704 /* enable any active CRTCs */
1705 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1706 radeon_crtc = to_radeon_crtc(crtc);
1707 if (radeon_crtc->enabled) {
1708 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1709 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1710 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1716 * evergreen_hpd_sense - hpd sense callback.
1718 * @rdev: radeon_device pointer
1719 * @hpd: hpd (hotplug detect) pin
1721 * Checks if a digital monitor is connected (evergreen+).
1722 * Returns true if connected, false if not connected.
1724 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1726 if (hpd == RADEON_HPD_NONE)
1729 return !!(RREG32(DC_HPDx_INT_STATUS_REG(hpd)) & DC_HPDx_SENSE);
1733 * evergreen_hpd_set_polarity - hpd set polarity callback.
1735 * @rdev: radeon_device pointer
1736 * @hpd: hpd (hotplug detect) pin
1738 * Set the polarity of the hpd pin (evergreen+).
1740 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1741 enum radeon_hpd_id hpd)
1743 bool connected = evergreen_hpd_sense(rdev, hpd);
1745 if (hpd == RADEON_HPD_NONE)
1749 WREG32_AND(DC_HPDx_INT_CONTROL(hpd), ~DC_HPDx_INT_POLARITY);
1751 WREG32_OR(DC_HPDx_INT_CONTROL(hpd), DC_HPDx_INT_POLARITY);
1755 * evergreen_hpd_init - hpd setup callback.
1757 * @rdev: radeon_device pointer
1759 * Setup the hpd pins used by the card (evergreen+).
1760 * Enable the pin, set the polarity, and enable the hpd interrupts.
1762 void evergreen_hpd_init(struct radeon_device *rdev)
1764 struct drm_device *dev = rdev->ddev;
1765 struct drm_connector *connector;
1766 unsigned enabled = 0;
1767 u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1768 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1770 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1771 enum radeon_hpd_id hpd =
1772 to_radeon_connector(connector)->hpd.hpd;
1774 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1775 connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1776 /* don't try to enable hpd on eDP or LVDS avoid breaking the
1777 * aux dp channel on imac and help (but not completely fix)
1778 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1779 * also avoid interrupt storms during dpms.
1784 if (hpd == RADEON_HPD_NONE)
1787 WREG32(DC_HPDx_CONTROL(hpd), tmp);
1788 enabled |= 1 << hpd;
1790 radeon_hpd_set_polarity(rdev, hpd);
1792 radeon_irq_kms_enable_hpd(rdev, enabled);
1796 * evergreen_hpd_fini - hpd tear down callback.
1798 * @rdev: radeon_device pointer
1800 * Tear down the hpd pins used by the card (evergreen+).
1801 * Disable the hpd interrupts.
1803 void evergreen_hpd_fini(struct radeon_device *rdev)
1805 struct drm_device *dev = rdev->ddev;
1806 struct drm_connector *connector;
1807 unsigned disabled = 0;
1809 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1810 enum radeon_hpd_id hpd =
1811 to_radeon_connector(connector)->hpd.hpd;
1813 if (hpd == RADEON_HPD_NONE)
1816 WREG32(DC_HPDx_CONTROL(hpd), 0);
1817 disabled |= 1 << hpd;
1819 radeon_irq_kms_disable_hpd(rdev, disabled);
1822 /* watermark setup */
1824 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1825 struct radeon_crtc *radeon_crtc,
1826 struct drm_display_mode *mode,
1827 struct drm_display_mode *other_mode)
1829 u32 tmp, buffer_alloc, i;
1830 u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1833 * There are 3 line buffers, each one shared by 2 display controllers.
1834 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1835 * the display controllers. The paritioning is done via one of four
1836 * preset allocations specified in bits 2:0:
1837 * first display controller
1838 * 0 - first half of lb (3840 * 2)
1839 * 1 - first 3/4 of lb (5760 * 2)
1840 * 2 - whole lb (7680 * 2), other crtc must be disabled
1841 * 3 - first 1/4 of lb (1920 * 2)
1842 * second display controller
1843 * 4 - second half of lb (3840 * 2)
1844 * 5 - second 3/4 of lb (5760 * 2)
1845 * 6 - whole lb (7680 * 2), other crtc must be disabled
1846 * 7 - last 1/4 of lb (1920 * 2)
1848 /* this can get tricky if we have two large displays on a paired group
1849 * of crtcs. Ideally for multiple large displays we'd assign them to
1850 * non-linked crtcs for maximum line buffer allocation.
1852 if (radeon_crtc->base.enabled && mode) {
1857 tmp = 2; /* whole */
1865 /* second controller of the pair uses second half of the lb */
1866 if (radeon_crtc->crtc_id % 2)
1868 WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1870 if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1871 WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1872 DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1873 for (i = 0; i < rdev->usec_timeout; i++) {
1874 if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1875 DMIF_BUFFERS_ALLOCATED_COMPLETED)
1881 if (radeon_crtc->base.enabled && mode) {
1886 if (ASIC_IS_DCE5(rdev))
1892 if (ASIC_IS_DCE5(rdev))
1898 if (ASIC_IS_DCE5(rdev))
1904 if (ASIC_IS_DCE5(rdev))
1911 /* controller not enabled, so no lb used */
1915 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1917 u32 tmp = RREG32(MC_SHARED_CHMAP);
1919 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1932 struct evergreen_wm_params {
1933 u32 dram_channels; /* number of dram channels */
1934 u32 yclk; /* bandwidth per dram data pin in kHz */
1935 u32 sclk; /* engine clock in kHz */
1936 u32 disp_clk; /* display clock in kHz */
1937 u32 src_width; /* viewport width */
1938 u32 active_time; /* active display time in ns */
1939 u32 blank_time; /* blank time in ns */
1940 bool interlaced; /* mode is interlaced */
1941 fixed20_12 vsc; /* vertical scale ratio */
1942 u32 num_heads; /* number of active crtcs */
1943 u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1944 u32 lb_size; /* line buffer allocated to pipe */
1945 u32 vtaps; /* vertical scaler taps */
1948 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1950 /* Calculate DRAM Bandwidth and the part allocated to display. */
1951 fixed20_12 dram_efficiency; /* 0.7 */
1952 fixed20_12 yclk, dram_channels, bandwidth;
1955 a.full = dfixed_const(1000);
1956 yclk.full = dfixed_const(wm->yclk);
1957 yclk.full = dfixed_div(yclk, a);
1958 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1959 a.full = dfixed_const(10);
1960 dram_efficiency.full = dfixed_const(7);
1961 dram_efficiency.full = dfixed_div(dram_efficiency, a);
1962 bandwidth.full = dfixed_mul(dram_channels, yclk);
1963 bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1965 return dfixed_trunc(bandwidth);
1968 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1970 /* Calculate DRAM Bandwidth and the part allocated to display. */
1971 fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1972 fixed20_12 yclk, dram_channels, bandwidth;
1975 a.full = dfixed_const(1000);
1976 yclk.full = dfixed_const(wm->yclk);
1977 yclk.full = dfixed_div(yclk, a);
1978 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1979 a.full = dfixed_const(10);
1980 disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
1981 disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
1982 bandwidth.full = dfixed_mul(dram_channels, yclk);
1983 bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
1985 return dfixed_trunc(bandwidth);
1988 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
1990 /* Calculate the display Data return Bandwidth */
1991 fixed20_12 return_efficiency; /* 0.8 */
1992 fixed20_12 sclk, bandwidth;
1995 a.full = dfixed_const(1000);
1996 sclk.full = dfixed_const(wm->sclk);
1997 sclk.full = dfixed_div(sclk, a);
1998 a.full = dfixed_const(10);
1999 return_efficiency.full = dfixed_const(8);
2000 return_efficiency.full = dfixed_div(return_efficiency, a);
2001 a.full = dfixed_const(32);
2002 bandwidth.full = dfixed_mul(a, sclk);
2003 bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2005 return dfixed_trunc(bandwidth);
2008 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2010 /* Calculate the DMIF Request Bandwidth */
2011 fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2012 fixed20_12 disp_clk, bandwidth;
2015 a.full = dfixed_const(1000);
2016 disp_clk.full = dfixed_const(wm->disp_clk);
2017 disp_clk.full = dfixed_div(disp_clk, a);
2018 a.full = dfixed_const(10);
2019 disp_clk_request_efficiency.full = dfixed_const(8);
2020 disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2021 a.full = dfixed_const(32);
2022 bandwidth.full = dfixed_mul(a, disp_clk);
2023 bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2025 return dfixed_trunc(bandwidth);
2028 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2030 /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2031 u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2032 u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2033 u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2035 return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2038 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2040 /* Calculate the display mode Average Bandwidth
2041 * DisplayMode should contain the source and destination dimensions,
2045 fixed20_12 line_time;
2046 fixed20_12 src_width;
2047 fixed20_12 bandwidth;
2050 a.full = dfixed_const(1000);
2051 line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2052 line_time.full = dfixed_div(line_time, a);
2053 bpp.full = dfixed_const(wm->bytes_per_pixel);
2054 src_width.full = dfixed_const(wm->src_width);
2055 bandwidth.full = dfixed_mul(src_width, bpp);
2056 bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2057 bandwidth.full = dfixed_div(bandwidth, line_time);
2059 return dfixed_trunc(bandwidth);
2062 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2064 /* First calcualte the latency in ns */
2065 u32 mc_latency = 2000; /* 2000 ns. */
2066 u32 available_bandwidth = evergreen_available_bandwidth(wm);
2067 u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2068 u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2069 u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2070 u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2071 (wm->num_heads * cursor_line_pair_return_time);
2072 u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2073 u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2076 if (wm->num_heads == 0)
2079 a.full = dfixed_const(2);
2080 b.full = dfixed_const(1);
2081 if ((wm->vsc.full > a.full) ||
2082 ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2084 ((wm->vsc.full >= a.full) && wm->interlaced))
2085 max_src_lines_per_dst_line = 4;
2087 max_src_lines_per_dst_line = 2;
2089 a.full = dfixed_const(available_bandwidth);
2090 b.full = dfixed_const(wm->num_heads);
2091 a.full = dfixed_div(a, b);
2093 lb_fill_bw = min(dfixed_trunc(a), wm->disp_clk * wm->bytes_per_pixel / 1000);
2095 a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2096 b.full = dfixed_const(1000);
2097 c.full = dfixed_const(lb_fill_bw);
2098 b.full = dfixed_div(c, b);
2099 a.full = dfixed_div(a, b);
2100 line_fill_time = dfixed_trunc(a);
2102 if (line_fill_time < wm->active_time)
2105 return latency + (line_fill_time - wm->active_time);
2109 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2111 if (evergreen_average_bandwidth(wm) <=
2112 (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2118 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2120 if (evergreen_average_bandwidth(wm) <=
2121 (evergreen_available_bandwidth(wm) / wm->num_heads))
2127 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2129 u32 lb_partitions = wm->lb_size / wm->src_width;
2130 u32 line_time = wm->active_time + wm->blank_time;
2131 u32 latency_tolerant_lines;
2135 a.full = dfixed_const(1);
2136 if (wm->vsc.full > a.full)
2137 latency_tolerant_lines = 1;
2139 if (lb_partitions <= (wm->vtaps + 1))
2140 latency_tolerant_lines = 1;
2142 latency_tolerant_lines = 2;
2145 latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2147 if (evergreen_latency_watermark(wm) <= latency_hiding)
2153 static void evergreen_program_watermarks(struct radeon_device *rdev,
2154 struct radeon_crtc *radeon_crtc,
2155 u32 lb_size, u32 num_heads)
2157 struct drm_display_mode *mode = &radeon_crtc->base.mode;
2158 struct evergreen_wm_params wm_low, wm_high;
2162 u32 latency_watermark_a = 0, latency_watermark_b = 0;
2163 u32 priority_a_mark = 0, priority_b_mark = 0;
2164 u32 priority_a_cnt = PRIORITY_OFF;
2165 u32 priority_b_cnt = PRIORITY_OFF;
2166 u32 pipe_offset = radeon_crtc->crtc_id * 16;
2167 u32 tmp, arb_control3;
2170 if (radeon_crtc->base.enabled && num_heads && mode) {
2171 active_time = (u32) div_u64((u64)mode->crtc_hdisplay * 1000000,
2173 line_time = (u32) div_u64((u64)mode->crtc_htotal * 1000000,
2175 line_time = min(line_time, (u32)65535);
2178 dram_channels = evergreen_get_number_of_dram_channels(rdev);
2180 /* watermark for high clocks */
2181 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2183 radeon_dpm_get_mclk(rdev, false) * 10;
2185 radeon_dpm_get_sclk(rdev, false) * 10;
2187 wm_high.yclk = rdev->pm.current_mclk * 10;
2188 wm_high.sclk = rdev->pm.current_sclk * 10;
2191 wm_high.disp_clk = mode->clock;
2192 wm_high.src_width = mode->crtc_hdisplay;
2193 wm_high.active_time = active_time;
2194 wm_high.blank_time = line_time - wm_high.active_time;
2195 wm_high.interlaced = false;
2196 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2197 wm_high.interlaced = true;
2198 wm_high.vsc = radeon_crtc->vsc;
2200 if (radeon_crtc->rmx_type != RMX_OFF)
2202 wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2203 wm_high.lb_size = lb_size;
2204 wm_high.dram_channels = dram_channels;
2205 wm_high.num_heads = num_heads;
2207 /* watermark for low clocks */
2208 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2210 radeon_dpm_get_mclk(rdev, true) * 10;
2212 radeon_dpm_get_sclk(rdev, true) * 10;
2214 wm_low.yclk = rdev->pm.current_mclk * 10;
2215 wm_low.sclk = rdev->pm.current_sclk * 10;
2218 wm_low.disp_clk = mode->clock;
2219 wm_low.src_width = mode->crtc_hdisplay;
2220 wm_low.active_time = active_time;
2221 wm_low.blank_time = line_time - wm_low.active_time;
2222 wm_low.interlaced = false;
2223 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2224 wm_low.interlaced = true;
2225 wm_low.vsc = radeon_crtc->vsc;
2227 if (radeon_crtc->rmx_type != RMX_OFF)
2229 wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2230 wm_low.lb_size = lb_size;
2231 wm_low.dram_channels = dram_channels;
2232 wm_low.num_heads = num_heads;
2234 /* set for high clocks */
2235 latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2236 /* set for low clocks */
2237 latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2239 /* possibly force display priority to high */
2240 /* should really do this at mode validation time... */
2241 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2242 !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2243 !evergreen_check_latency_hiding(&wm_high) ||
2244 (rdev->disp_priority == 2)) {
2245 DRM_DEBUG_KMS("force priority a to high\n");
2246 priority_a_cnt |= PRIORITY_ALWAYS_ON;
2248 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2249 !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2250 !evergreen_check_latency_hiding(&wm_low) ||
2251 (rdev->disp_priority == 2)) {
2252 DRM_DEBUG_KMS("force priority b to high\n");
2253 priority_b_cnt |= PRIORITY_ALWAYS_ON;
2256 a.full = dfixed_const(1000);
2257 b.full = dfixed_const(mode->clock);
2258 b.full = dfixed_div(b, a);
2259 c.full = dfixed_const(latency_watermark_a);
2260 c.full = dfixed_mul(c, b);
2261 c.full = dfixed_mul(c, radeon_crtc->hsc);
2262 c.full = dfixed_div(c, a);
2263 a.full = dfixed_const(16);
2264 c.full = dfixed_div(c, a);
2265 priority_a_mark = dfixed_trunc(c);
2266 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2268 a.full = dfixed_const(1000);
2269 b.full = dfixed_const(mode->clock);
2270 b.full = dfixed_div(b, a);
2271 c.full = dfixed_const(latency_watermark_b);
2272 c.full = dfixed_mul(c, b);
2273 c.full = dfixed_mul(c, radeon_crtc->hsc);
2274 c.full = dfixed_div(c, a);
2275 a.full = dfixed_const(16);
2276 c.full = dfixed_div(c, a);
2277 priority_b_mark = dfixed_trunc(c);
2278 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2280 /* Save number of lines the linebuffer leads before the scanout */
2281 radeon_crtc->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay);
2285 arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2287 tmp &= ~LATENCY_WATERMARK_MASK(3);
2288 tmp |= LATENCY_WATERMARK_MASK(1);
2289 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2290 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2291 (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2292 LATENCY_HIGH_WATERMARK(line_time)));
2294 tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2295 tmp &= ~LATENCY_WATERMARK_MASK(3);
2296 tmp |= LATENCY_WATERMARK_MASK(2);
2297 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2298 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2299 (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2300 LATENCY_HIGH_WATERMARK(line_time)));
2301 /* restore original selection */
2302 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2304 /* write the priority marks */
2305 WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2306 WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2308 /* save values for DPM */
2309 radeon_crtc->line_time = line_time;
2310 radeon_crtc->wm_high = latency_watermark_a;
2311 radeon_crtc->wm_low = latency_watermark_b;
2315 * evergreen_bandwidth_update - update display watermarks callback.
2317 * @rdev: radeon_device pointer
2319 * Update the display watermarks based on the requested mode(s)
2322 void evergreen_bandwidth_update(struct radeon_device *rdev)
2324 struct drm_display_mode *mode0 = NULL;
2325 struct drm_display_mode *mode1 = NULL;
2326 u32 num_heads = 0, lb_size;
2329 if (!rdev->mode_info.mode_config_initialized)
2332 radeon_update_display_priority(rdev);
2334 for (i = 0; i < rdev->num_crtc; i++) {
2335 if (rdev->mode_info.crtcs[i]->base.enabled)
2338 for (i = 0; i < rdev->num_crtc; i += 2) {
2339 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2340 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2341 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2342 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2343 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2344 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2349 * evergreen_mc_wait_for_idle - wait for MC idle callback.
2351 * @rdev: radeon_device pointer
2353 * Wait for the MC (memory controller) to be idle.
2355 * Returns 0 if the MC is idle, -1 if not.
2357 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2362 for (i = 0; i < rdev->usec_timeout; i++) {
2363 /* read MC_STATUS */
2364 tmp = RREG32(SRBM_STATUS) & 0x1F00;
2375 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2380 WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2382 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2383 for (i = 0; i < rdev->usec_timeout; i++) {
2384 /* read MC_STATUS */
2385 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2386 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2388 pr_warn("[drm] r600 flush TLB failed\n");
2398 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2403 if (rdev->gart.robj == NULL) {
2404 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2407 r = radeon_gart_table_vram_pin(rdev);
2410 /* Setup L2 cache */
2411 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2412 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2413 EFFECTIVE_L2_QUEUE_SIZE(7));
2414 WREG32(VM_L2_CNTL2, 0);
2415 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2416 /* Setup TLB control */
2417 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2418 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2419 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2420 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2421 if (rdev->flags & RADEON_IS_IGP) {
2422 WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2423 WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2424 WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2426 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2427 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2428 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2429 if ((rdev->family == CHIP_JUNIPER) ||
2430 (rdev->family == CHIP_CYPRESS) ||
2431 (rdev->family == CHIP_HEMLOCK) ||
2432 (rdev->family == CHIP_BARTS))
2433 WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2435 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2436 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2437 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2438 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2439 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2440 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2441 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2442 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2443 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2444 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2445 (u32)(rdev->dummy_page.addr >> 12));
2446 WREG32(VM_CONTEXT1_CNTL, 0);
2448 evergreen_pcie_gart_tlb_flush(rdev);
2449 DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2450 (unsigned)(rdev->mc.gtt_size >> 20),
2451 (unsigned long long)rdev->gart.table_addr);
2452 rdev->gart.ready = true;
2456 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2460 /* Disable all tables */
2461 WREG32(VM_CONTEXT0_CNTL, 0);
2462 WREG32(VM_CONTEXT1_CNTL, 0);
2464 /* Setup L2 cache */
2465 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2466 EFFECTIVE_L2_QUEUE_SIZE(7));
2467 WREG32(VM_L2_CNTL2, 0);
2468 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2469 /* Setup TLB control */
2470 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2471 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2472 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2473 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2474 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2475 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2476 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2477 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2478 radeon_gart_table_vram_unpin(rdev);
2481 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2483 evergreen_pcie_gart_disable(rdev);
2484 radeon_gart_table_vram_free(rdev);
2485 radeon_gart_fini(rdev);
2489 static void evergreen_agp_enable(struct radeon_device *rdev)
2493 /* Setup L2 cache */
2494 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2495 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2496 EFFECTIVE_L2_QUEUE_SIZE(7));
2497 WREG32(VM_L2_CNTL2, 0);
2498 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2499 /* Setup TLB control */
2500 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2501 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2502 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2503 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2504 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2505 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2506 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2507 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2508 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2509 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2510 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2511 WREG32(VM_CONTEXT0_CNTL, 0);
2512 WREG32(VM_CONTEXT1_CNTL, 0);
2515 static const unsigned ni_dig_offsets[] =
2517 NI_DIG0_REGISTER_OFFSET,
2518 NI_DIG1_REGISTER_OFFSET,
2519 NI_DIG2_REGISTER_OFFSET,
2520 NI_DIG3_REGISTER_OFFSET,
2521 NI_DIG4_REGISTER_OFFSET,
2522 NI_DIG5_REGISTER_OFFSET
2525 static const unsigned ni_tx_offsets[] =
2527 NI_DCIO_UNIPHY0_UNIPHY_TX_CONTROL1,
2528 NI_DCIO_UNIPHY1_UNIPHY_TX_CONTROL1,
2529 NI_DCIO_UNIPHY2_UNIPHY_TX_CONTROL1,
2530 NI_DCIO_UNIPHY3_UNIPHY_TX_CONTROL1,
2531 NI_DCIO_UNIPHY4_UNIPHY_TX_CONTROL1,
2532 NI_DCIO_UNIPHY5_UNIPHY_TX_CONTROL1
2535 static const unsigned evergreen_dp_offsets[] =
2537 EVERGREEN_DP0_REGISTER_OFFSET,
2538 EVERGREEN_DP1_REGISTER_OFFSET,
2539 EVERGREEN_DP2_REGISTER_OFFSET,
2540 EVERGREEN_DP3_REGISTER_OFFSET,
2541 EVERGREEN_DP4_REGISTER_OFFSET,
2542 EVERGREEN_DP5_REGISTER_OFFSET
2545 static const unsigned evergreen_disp_int_status[] =
2547 DISP_INTERRUPT_STATUS,
2548 DISP_INTERRUPT_STATUS_CONTINUE,
2549 DISP_INTERRUPT_STATUS_CONTINUE2,
2550 DISP_INTERRUPT_STATUS_CONTINUE3,
2551 DISP_INTERRUPT_STATUS_CONTINUE4,
2552 DISP_INTERRUPT_STATUS_CONTINUE5
2556 * Assumption is that EVERGREEN_CRTC_MASTER_EN enable for requested crtc
2557 * We go from crtc to connector and it is not relible since it
2558 * should be an opposite direction .If crtc is enable then
2559 * find the dig_fe which selects this crtc and insure that it enable.
2560 * if such dig_fe is found then find dig_be which selects found dig_be and
2561 * insure that it enable and in DP_SST mode.
2562 * if UNIPHY_PLL_CONTROL1.enable then we should disconnect timing
2563 * from dp symbols clocks .
2565 static bool evergreen_is_dp_sst_stream_enabled(struct radeon_device *rdev,
2566 unsigned crtc_id, unsigned *ret_dig_fe)
2572 unsigned uniphy_pll;
2573 unsigned digs_fe_selected;
2574 unsigned dig_be_mode;
2575 unsigned dig_fe_mask;
2576 bool is_enabled = false;
2577 bool found_crtc = false;
2579 /* loop through all running dig_fe to find selected crtc */
2580 for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2581 dig_fe = RREG32(NI_DIG_FE_CNTL + ni_dig_offsets[i]);
2582 if (dig_fe & NI_DIG_FE_CNTL_SYMCLK_FE_ON &&
2583 crtc_id == NI_DIG_FE_CNTL_SOURCE_SELECT(dig_fe)) {
2584 /* found running pipe */
2586 dig_fe_mask = 1 << i;
2593 /* loop through all running dig_be to find selected dig_fe */
2594 for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2595 dig_be = RREG32(NI_DIG_BE_CNTL + ni_dig_offsets[i]);
2596 /* if dig_fe_selected by dig_be? */
2597 digs_fe_selected = NI_DIG_BE_CNTL_FE_SOURCE_SELECT(dig_be);
2598 dig_be_mode = NI_DIG_FE_CNTL_MODE(dig_be);
2599 if (dig_fe_mask & digs_fe_selected &&
2600 /* if dig_be in sst mode? */
2601 dig_be_mode == NI_DIG_BE_DPSST) {
2602 dig_en_be = RREG32(NI_DIG_BE_EN_CNTL +
2604 uniphy_pll = RREG32(NI_DCIO_UNIPHY0_PLL_CONTROL1 +
2606 /* dig_be enable and tx is running */
2607 if (dig_en_be & NI_DIG_BE_EN_CNTL_ENABLE &&
2608 dig_en_be & NI_DIG_BE_EN_CNTL_SYMBCLK_ON &&
2609 uniphy_pll & NI_DCIO_UNIPHY0_PLL_CONTROL1_ENABLE) {
2611 *ret_dig_fe = dig_fe;
2622 * Blank dig when in dp sst mode
2623 * Dig ignores crtc timing
2625 static void evergreen_blank_dp_output(struct radeon_device *rdev,
2628 unsigned stream_ctrl;
2630 unsigned counter = 0;
2632 if (dig_fe >= ARRAY_SIZE(evergreen_dp_offsets)) {
2633 DRM_ERROR("invalid dig_fe %d\n", dig_fe);
2637 stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2638 evergreen_dp_offsets[dig_fe]);
2639 if (!(stream_ctrl & EVERGREEN_DP_VID_STREAM_CNTL_ENABLE)) {
2640 DRM_ERROR("dig %d , should be enable\n", dig_fe);
2644 stream_ctrl &=~EVERGREEN_DP_VID_STREAM_CNTL_ENABLE;
2645 WREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2646 evergreen_dp_offsets[dig_fe], stream_ctrl);
2648 stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2649 evergreen_dp_offsets[dig_fe]);
2650 while (counter < 32 && stream_ctrl & EVERGREEN_DP_VID_STREAM_STATUS) {
2653 stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2654 evergreen_dp_offsets[dig_fe]);
2657 DRM_ERROR("counter exceeds %d\n", counter);
2659 fifo_ctrl = RREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe]);
2660 fifo_ctrl |= EVERGREEN_DP_STEER_FIFO_RESET;
2661 WREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe], fifo_ctrl);
2665 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2667 u32 crtc_enabled, tmp, frame_count, blackout;
2671 if (!ASIC_IS_NODCE(rdev)) {
2672 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2673 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2675 /* disable VGA render */
2676 WREG32(VGA_RENDER_CONTROL, 0);
2678 /* blank the display controllers */
2679 for (i = 0; i < rdev->num_crtc; i++) {
2680 crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2682 save->crtc_enabled[i] = true;
2683 if (ASIC_IS_DCE6(rdev)) {
2684 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2685 if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2686 radeon_wait_for_vblank(rdev, i);
2687 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2688 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2689 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2690 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2693 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2694 if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2695 radeon_wait_for_vblank(rdev, i);
2696 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2697 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2698 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2699 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2702 /* wait for the next frame */
2703 frame_count = radeon_get_vblank_counter(rdev, i);
2704 for (j = 0; j < rdev->usec_timeout; j++) {
2705 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2709 /*we should disable dig if it drives dp sst*/
2710 /*but we are in radeon_device_init and the topology is unknown*/
2711 /*and it is available after radeon_modeset_init*/
2712 /*the following method radeon_atom_encoder_dpms_dig*/
2713 /*does the job if we initialize it properly*/
2714 /*for now we do it this manually*/
2716 if (ASIC_IS_DCE5(rdev) &&
2717 evergreen_is_dp_sst_stream_enabled(rdev, i ,&dig_fe))
2718 evergreen_blank_dp_output(rdev, dig_fe);
2719 /*we could remove 6 lines below*/
2720 /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2721 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2722 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2723 tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2724 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2725 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2726 save->crtc_enabled[i] = false;
2729 save->crtc_enabled[i] = false;
2733 radeon_mc_wait_for_idle(rdev);
2735 blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2736 if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2737 /* Block CPU access */
2738 WREG32(BIF_FB_EN, 0);
2739 /* blackout the MC */
2740 blackout &= ~BLACKOUT_MODE_MASK;
2741 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2743 /* wait for the MC to settle */
2746 /* lock double buffered regs */
2747 for (i = 0; i < rdev->num_crtc; i++) {
2748 if (save->crtc_enabled[i]) {
2749 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2750 if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2751 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2752 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2754 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2757 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2763 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2765 u32 tmp, frame_count;
2768 /* update crtc base addresses */
2769 for (i = 0; i < rdev->num_crtc; i++) {
2770 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2771 upper_32_bits(rdev->mc.vram_start));
2772 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2773 upper_32_bits(rdev->mc.vram_start));
2774 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2775 (u32)rdev->mc.vram_start);
2776 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2777 (u32)rdev->mc.vram_start);
2780 if (!ASIC_IS_NODCE(rdev)) {
2781 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2782 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2785 /* unlock regs and wait for update */
2786 for (i = 0; i < rdev->num_crtc; i++) {
2787 if (save->crtc_enabled[i]) {
2788 tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2789 if ((tmp & 0x7) != 0) {
2791 WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2793 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2794 if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2795 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2796 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2798 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2801 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2803 for (j = 0; j < rdev->usec_timeout; j++) {
2804 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2805 if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2812 /* unblackout the MC */
2813 tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2814 tmp &= ~BLACKOUT_MODE_MASK;
2815 WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2816 /* allow CPU access */
2817 WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2819 for (i = 0; i < rdev->num_crtc; i++) {
2820 if (save->crtc_enabled[i]) {
2821 if (ASIC_IS_DCE6(rdev)) {
2822 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2823 tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
2824 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2825 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2826 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2828 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2829 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2830 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2831 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2832 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2834 /* wait for the next frame */
2835 frame_count = radeon_get_vblank_counter(rdev, i);
2836 for (j = 0; j < rdev->usec_timeout; j++) {
2837 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2843 if (!ASIC_IS_NODCE(rdev)) {
2844 /* Unlock vga access */
2845 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2847 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2851 void evergreen_mc_program(struct radeon_device *rdev)
2853 struct evergreen_mc_save save;
2857 /* Initialize HDP */
2858 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2859 WREG32((0x2c14 + j), 0x00000000);
2860 WREG32((0x2c18 + j), 0x00000000);
2861 WREG32((0x2c1c + j), 0x00000000);
2862 WREG32((0x2c20 + j), 0x00000000);
2863 WREG32((0x2c24 + j), 0x00000000);
2865 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2867 evergreen_mc_stop(rdev, &save);
2868 if (evergreen_mc_wait_for_idle(rdev)) {
2869 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2871 /* Lockout access through VGA aperture*/
2872 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2873 /* Update configuration */
2874 if (rdev->flags & RADEON_IS_AGP) {
2875 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2876 /* VRAM before AGP */
2877 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2878 rdev->mc.vram_start >> 12);
2879 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2880 rdev->mc.gtt_end >> 12);
2882 /* VRAM after AGP */
2883 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2884 rdev->mc.gtt_start >> 12);
2885 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2886 rdev->mc.vram_end >> 12);
2889 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2890 rdev->mc.vram_start >> 12);
2891 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2892 rdev->mc.vram_end >> 12);
2894 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2895 /* llano/ontario only */
2896 if ((rdev->family == CHIP_PALM) ||
2897 (rdev->family == CHIP_SUMO) ||
2898 (rdev->family == CHIP_SUMO2)) {
2899 tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2900 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2901 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2902 WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2904 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2905 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2906 WREG32(MC_VM_FB_LOCATION, tmp);
2907 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2908 WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2909 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2910 if (rdev->flags & RADEON_IS_AGP) {
2911 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2912 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2913 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2915 WREG32(MC_VM_AGP_BASE, 0);
2916 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2917 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2919 if (evergreen_mc_wait_for_idle(rdev)) {
2920 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2922 evergreen_mc_resume(rdev, &save);
2923 /* we need to own VRAM, so turn off the VGA renderer here
2924 * to stop it overwriting our objects */
2925 rv515_vga_render_disable(rdev);
2931 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2933 struct radeon_ring *ring = &rdev->ring[ib->ring];
2936 /* set to DX10/11 mode */
2937 radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2938 radeon_ring_write(ring, 1);
2940 if (ring->rptr_save_reg) {
2941 next_rptr = ring->wptr + 3 + 4;
2942 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2943 radeon_ring_write(ring, ((ring->rptr_save_reg -
2944 PACKET3_SET_CONFIG_REG_START) >> 2));
2945 radeon_ring_write(ring, next_rptr);
2946 } else if (rdev->wb.enabled) {
2947 next_rptr = ring->wptr + 5 + 4;
2948 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2949 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2950 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2951 radeon_ring_write(ring, next_rptr);
2952 radeon_ring_write(ring, 0);
2955 radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2956 radeon_ring_write(ring,
2960 (ib->gpu_addr & 0xFFFFFFFC));
2961 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2962 radeon_ring_write(ring, ib->length_dw);
2966 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2968 const __be32 *fw_data;
2971 if (!rdev->me_fw || !rdev->pfp_fw)
2979 RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2981 fw_data = (const __be32 *)rdev->pfp_fw->data;
2982 WREG32(CP_PFP_UCODE_ADDR, 0);
2983 for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2984 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2985 WREG32(CP_PFP_UCODE_ADDR, 0);
2987 fw_data = (const __be32 *)rdev->me_fw->data;
2988 WREG32(CP_ME_RAM_WADDR, 0);
2989 for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2990 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2992 WREG32(CP_PFP_UCODE_ADDR, 0);
2993 WREG32(CP_ME_RAM_WADDR, 0);
2994 WREG32(CP_ME_RAM_RADDR, 0);
2998 static int evergreen_cp_start(struct radeon_device *rdev)
3000 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3004 r = radeon_ring_lock(rdev, ring, 7);
3006 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3009 radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
3010 radeon_ring_write(ring, 0x1);
3011 radeon_ring_write(ring, 0x0);
3012 radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
3013 radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
3014 radeon_ring_write(ring, 0);
3015 radeon_ring_write(ring, 0);
3016 radeon_ring_unlock_commit(rdev, ring, false);
3019 WREG32(CP_ME_CNTL, cp_me);
3021 r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
3023 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3027 /* setup clear context state */
3028 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3029 radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
3031 for (i = 0; i < evergreen_default_size; i++)
3032 radeon_ring_write(ring, evergreen_default_state[i]);
3034 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3035 radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
3037 /* set clear context state */
3038 radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
3039 radeon_ring_write(ring, 0);
3041 /* SQ_VTX_BASE_VTX_LOC */
3042 radeon_ring_write(ring, 0xc0026f00);
3043 radeon_ring_write(ring, 0x00000000);
3044 radeon_ring_write(ring, 0x00000000);
3045 radeon_ring_write(ring, 0x00000000);
3048 radeon_ring_write(ring, 0xc0036f00);
3049 radeon_ring_write(ring, 0x00000bc4);
3050 radeon_ring_write(ring, 0xffffffff);
3051 radeon_ring_write(ring, 0xffffffff);
3052 radeon_ring_write(ring, 0xffffffff);
3054 radeon_ring_write(ring, 0xc0026900);
3055 radeon_ring_write(ring, 0x00000316);
3056 radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
3057 radeon_ring_write(ring, 0x00000010); /* */
3059 radeon_ring_unlock_commit(rdev, ring, false);
3064 static int evergreen_cp_resume(struct radeon_device *rdev)
3066 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3071 /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
3072 WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
3078 RREG32(GRBM_SOFT_RESET);
3080 WREG32(GRBM_SOFT_RESET, 0);
3081 RREG32(GRBM_SOFT_RESET);
3083 /* Set ring buffer size */
3084 rb_bufsz = order_base_2(ring->ring_size / 8);
3085 tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3087 tmp |= BUF_SWAP_32BIT;
3089 WREG32(CP_RB_CNTL, tmp);
3090 WREG32(CP_SEM_WAIT_TIMER, 0x0);
3091 WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3093 /* Set the write pointer delay */
3094 WREG32(CP_RB_WPTR_DELAY, 0);
3096 /* Initialize the ring buffer's read and write pointers */
3097 WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
3098 WREG32(CP_RB_RPTR_WR, 0);
3100 WREG32(CP_RB_WPTR, ring->wptr);
3102 /* set the wb address whether it's enabled or not */
3103 WREG32(CP_RB_RPTR_ADDR,
3104 ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
3105 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3106 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3108 if (rdev->wb.enabled)
3109 WREG32(SCRATCH_UMSK, 0xff);
3111 tmp |= RB_NO_UPDATE;
3112 WREG32(SCRATCH_UMSK, 0);
3116 WREG32(CP_RB_CNTL, tmp);
3118 WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
3119 WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
3121 evergreen_cp_start(rdev);
3123 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
3125 ring->ready = false;
3134 static void evergreen_gpu_init(struct radeon_device *rdev)
3141 u32 sq_lds_resource_mgmt;
3142 u32 sq_gpr_resource_mgmt_1;
3143 u32 sq_gpr_resource_mgmt_2;
3144 u32 sq_gpr_resource_mgmt_3;
3145 u32 sq_thread_resource_mgmt;
3146 u32 sq_thread_resource_mgmt_2;
3147 u32 sq_stack_resource_mgmt_1;
3148 u32 sq_stack_resource_mgmt_2;
3149 u32 sq_stack_resource_mgmt_3;
3150 u32 vgt_cache_invalidation;
3151 u32 hdp_host_path_cntl, tmp;
3152 u32 disabled_rb_mask;
3153 int i, j, ps_thread_count;
3155 switch (rdev->family) {
3158 rdev->config.evergreen.num_ses = 2;
3159 rdev->config.evergreen.max_pipes = 4;
3160 rdev->config.evergreen.max_tile_pipes = 8;
3161 rdev->config.evergreen.max_simds = 10;
3162 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3163 rdev->config.evergreen.max_gprs = 256;
3164 rdev->config.evergreen.max_threads = 248;
3165 rdev->config.evergreen.max_gs_threads = 32;
3166 rdev->config.evergreen.max_stack_entries = 512;
3167 rdev->config.evergreen.sx_num_of_sets = 4;
3168 rdev->config.evergreen.sx_max_export_size = 256;
3169 rdev->config.evergreen.sx_max_export_pos_size = 64;
3170 rdev->config.evergreen.sx_max_export_smx_size = 192;
3171 rdev->config.evergreen.max_hw_contexts = 8;
3172 rdev->config.evergreen.sq_num_cf_insts = 2;
3174 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3175 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3176 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3177 gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3180 rdev->config.evergreen.num_ses = 1;
3181 rdev->config.evergreen.max_pipes = 4;
3182 rdev->config.evergreen.max_tile_pipes = 4;
3183 rdev->config.evergreen.max_simds = 10;
3184 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3185 rdev->config.evergreen.max_gprs = 256;
3186 rdev->config.evergreen.max_threads = 248;
3187 rdev->config.evergreen.max_gs_threads = 32;
3188 rdev->config.evergreen.max_stack_entries = 512;
3189 rdev->config.evergreen.sx_num_of_sets = 4;
3190 rdev->config.evergreen.sx_max_export_size = 256;
3191 rdev->config.evergreen.sx_max_export_pos_size = 64;
3192 rdev->config.evergreen.sx_max_export_smx_size = 192;
3193 rdev->config.evergreen.max_hw_contexts = 8;
3194 rdev->config.evergreen.sq_num_cf_insts = 2;
3196 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3197 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3198 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3199 gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3202 rdev->config.evergreen.num_ses = 1;
3203 rdev->config.evergreen.max_pipes = 4;
3204 rdev->config.evergreen.max_tile_pipes = 4;
3205 rdev->config.evergreen.max_simds = 5;
3206 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3207 rdev->config.evergreen.max_gprs = 256;
3208 rdev->config.evergreen.max_threads = 248;
3209 rdev->config.evergreen.max_gs_threads = 32;
3210 rdev->config.evergreen.max_stack_entries = 256;
3211 rdev->config.evergreen.sx_num_of_sets = 4;
3212 rdev->config.evergreen.sx_max_export_size = 256;
3213 rdev->config.evergreen.sx_max_export_pos_size = 64;
3214 rdev->config.evergreen.sx_max_export_smx_size = 192;
3215 rdev->config.evergreen.max_hw_contexts = 8;
3216 rdev->config.evergreen.sq_num_cf_insts = 2;
3218 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3219 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3220 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3221 gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3225 rdev->config.evergreen.num_ses = 1;
3226 rdev->config.evergreen.max_pipes = 2;
3227 rdev->config.evergreen.max_tile_pipes = 2;
3228 rdev->config.evergreen.max_simds = 2;
3229 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3230 rdev->config.evergreen.max_gprs = 256;
3231 rdev->config.evergreen.max_threads = 192;
3232 rdev->config.evergreen.max_gs_threads = 16;
3233 rdev->config.evergreen.max_stack_entries = 256;
3234 rdev->config.evergreen.sx_num_of_sets = 4;
3235 rdev->config.evergreen.sx_max_export_size = 128;
3236 rdev->config.evergreen.sx_max_export_pos_size = 32;
3237 rdev->config.evergreen.sx_max_export_smx_size = 96;
3238 rdev->config.evergreen.max_hw_contexts = 4;
3239 rdev->config.evergreen.sq_num_cf_insts = 1;
3241 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3242 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3243 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3244 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3247 rdev->config.evergreen.num_ses = 1;
3248 rdev->config.evergreen.max_pipes = 2;
3249 rdev->config.evergreen.max_tile_pipes = 2;
3250 rdev->config.evergreen.max_simds = 2;
3251 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3252 rdev->config.evergreen.max_gprs = 256;
3253 rdev->config.evergreen.max_threads = 192;
3254 rdev->config.evergreen.max_gs_threads = 16;
3255 rdev->config.evergreen.max_stack_entries = 256;
3256 rdev->config.evergreen.sx_num_of_sets = 4;
3257 rdev->config.evergreen.sx_max_export_size = 128;
3258 rdev->config.evergreen.sx_max_export_pos_size = 32;
3259 rdev->config.evergreen.sx_max_export_smx_size = 96;
3260 rdev->config.evergreen.max_hw_contexts = 4;
3261 rdev->config.evergreen.sq_num_cf_insts = 1;
3263 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3264 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3265 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3266 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3269 rdev->config.evergreen.num_ses = 1;
3270 rdev->config.evergreen.max_pipes = 4;
3271 rdev->config.evergreen.max_tile_pipes = 4;
3272 if (rdev->pdev->device == 0x9648)
3273 rdev->config.evergreen.max_simds = 3;
3274 else if ((rdev->pdev->device == 0x9647) ||
3275 (rdev->pdev->device == 0x964a))
3276 rdev->config.evergreen.max_simds = 4;
3278 rdev->config.evergreen.max_simds = 5;
3279 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3280 rdev->config.evergreen.max_gprs = 256;
3281 rdev->config.evergreen.max_threads = 248;
3282 rdev->config.evergreen.max_gs_threads = 32;
3283 rdev->config.evergreen.max_stack_entries = 256;
3284 rdev->config.evergreen.sx_num_of_sets = 4;
3285 rdev->config.evergreen.sx_max_export_size = 256;
3286 rdev->config.evergreen.sx_max_export_pos_size = 64;
3287 rdev->config.evergreen.sx_max_export_smx_size = 192;
3288 rdev->config.evergreen.max_hw_contexts = 8;
3289 rdev->config.evergreen.sq_num_cf_insts = 2;
3291 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3292 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3293 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3294 gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3297 rdev->config.evergreen.num_ses = 1;
3298 rdev->config.evergreen.max_pipes = 4;
3299 rdev->config.evergreen.max_tile_pipes = 4;
3300 rdev->config.evergreen.max_simds = 2;
3301 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3302 rdev->config.evergreen.max_gprs = 256;
3303 rdev->config.evergreen.max_threads = 248;
3304 rdev->config.evergreen.max_gs_threads = 32;
3305 rdev->config.evergreen.max_stack_entries = 512;
3306 rdev->config.evergreen.sx_num_of_sets = 4;
3307 rdev->config.evergreen.sx_max_export_size = 256;
3308 rdev->config.evergreen.sx_max_export_pos_size = 64;
3309 rdev->config.evergreen.sx_max_export_smx_size = 192;
3310 rdev->config.evergreen.max_hw_contexts = 4;
3311 rdev->config.evergreen.sq_num_cf_insts = 2;
3313 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3314 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3315 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3316 gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3319 rdev->config.evergreen.num_ses = 2;
3320 rdev->config.evergreen.max_pipes = 4;
3321 rdev->config.evergreen.max_tile_pipes = 8;
3322 rdev->config.evergreen.max_simds = 7;
3323 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3324 rdev->config.evergreen.max_gprs = 256;
3325 rdev->config.evergreen.max_threads = 248;
3326 rdev->config.evergreen.max_gs_threads = 32;
3327 rdev->config.evergreen.max_stack_entries = 512;
3328 rdev->config.evergreen.sx_num_of_sets = 4;
3329 rdev->config.evergreen.sx_max_export_size = 256;
3330 rdev->config.evergreen.sx_max_export_pos_size = 64;
3331 rdev->config.evergreen.sx_max_export_smx_size = 192;
3332 rdev->config.evergreen.max_hw_contexts = 8;
3333 rdev->config.evergreen.sq_num_cf_insts = 2;
3335 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3336 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3337 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3338 gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3341 rdev->config.evergreen.num_ses = 1;
3342 rdev->config.evergreen.max_pipes = 4;
3343 rdev->config.evergreen.max_tile_pipes = 4;
3344 rdev->config.evergreen.max_simds = 6;
3345 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3346 rdev->config.evergreen.max_gprs = 256;
3347 rdev->config.evergreen.max_threads = 248;
3348 rdev->config.evergreen.max_gs_threads = 32;
3349 rdev->config.evergreen.max_stack_entries = 256;
3350 rdev->config.evergreen.sx_num_of_sets = 4;
3351 rdev->config.evergreen.sx_max_export_size = 256;
3352 rdev->config.evergreen.sx_max_export_pos_size = 64;
3353 rdev->config.evergreen.sx_max_export_smx_size = 192;
3354 rdev->config.evergreen.max_hw_contexts = 8;
3355 rdev->config.evergreen.sq_num_cf_insts = 2;
3357 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3358 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3359 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3360 gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3363 rdev->config.evergreen.num_ses = 1;
3364 rdev->config.evergreen.max_pipes = 2;
3365 rdev->config.evergreen.max_tile_pipes = 2;
3366 rdev->config.evergreen.max_simds = 2;
3367 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3368 rdev->config.evergreen.max_gprs = 256;
3369 rdev->config.evergreen.max_threads = 192;
3370 rdev->config.evergreen.max_gs_threads = 16;
3371 rdev->config.evergreen.max_stack_entries = 256;
3372 rdev->config.evergreen.sx_num_of_sets = 4;
3373 rdev->config.evergreen.sx_max_export_size = 128;
3374 rdev->config.evergreen.sx_max_export_pos_size = 32;
3375 rdev->config.evergreen.sx_max_export_smx_size = 96;
3376 rdev->config.evergreen.max_hw_contexts = 4;
3377 rdev->config.evergreen.sq_num_cf_insts = 1;
3379 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3380 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3381 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3382 gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3386 /* Initialize HDP */
3387 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3388 WREG32((0x2c14 + j), 0x00000000);
3389 WREG32((0x2c18 + j), 0x00000000);
3390 WREG32((0x2c1c + j), 0x00000000);
3391 WREG32((0x2c20 + j), 0x00000000);
3392 WREG32((0x2c24 + j), 0x00000000);
3395 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3396 WREG32(SRBM_INT_CNTL, 0x1);
3397 WREG32(SRBM_INT_ACK, 0x1);
3399 evergreen_fix_pci_max_read_req_size(rdev);
3401 RREG32(MC_SHARED_CHMAP);
3402 if ((rdev->family == CHIP_PALM) ||
3403 (rdev->family == CHIP_SUMO) ||
3404 (rdev->family == CHIP_SUMO2))
3405 mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3407 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3409 /* setup tiling info dword. gb_addr_config is not adequate since it does
3410 * not have bank info, so create a custom tiling dword.
3411 * bits 3:0 num_pipes
3412 * bits 7:4 num_banks
3413 * bits 11:8 group_size
3414 * bits 15:12 row_size
3416 rdev->config.evergreen.tile_config = 0;
3417 switch (rdev->config.evergreen.max_tile_pipes) {
3420 rdev->config.evergreen.tile_config |= (0 << 0);
3423 rdev->config.evergreen.tile_config |= (1 << 0);
3426 rdev->config.evergreen.tile_config |= (2 << 0);
3429 rdev->config.evergreen.tile_config |= (3 << 0);
3432 /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3433 if (rdev->flags & RADEON_IS_IGP)
3434 rdev->config.evergreen.tile_config |= 1 << 4;
3436 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3437 case 0: /* four banks */
3438 rdev->config.evergreen.tile_config |= 0 << 4;
3440 case 1: /* eight banks */
3441 rdev->config.evergreen.tile_config |= 1 << 4;
3443 case 2: /* sixteen banks */
3445 rdev->config.evergreen.tile_config |= 2 << 4;
3449 rdev->config.evergreen.tile_config |= 0 << 8;
3450 rdev->config.evergreen.tile_config |=
3451 ((gb_addr_config & 0x30000000) >> 28) << 12;
3453 if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3457 efuse_straps_4 = RREG32_RCU(0x204);
3458 efuse_straps_3 = RREG32_RCU(0x203);
3459 tmp = (((efuse_straps_4 & 0xf) << 4) |
3460 ((efuse_straps_3 & 0xf0000000) >> 28));
3463 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3464 u32 rb_disable_bitmap;
3466 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3467 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3468 rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3470 tmp |= rb_disable_bitmap;
3473 /* enabled rb are just the one not disabled :) */
3474 disabled_rb_mask = tmp;
3476 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3478 /* if all the backends are disabled, fix it up here */
3479 if ((disabled_rb_mask & tmp) == tmp) {
3480 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3481 disabled_rb_mask &= ~(1 << i);
3484 for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3485 u32 simd_disable_bitmap;
3487 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3488 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3489 simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3490 simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3492 tmp |= simd_disable_bitmap;
3494 rdev->config.evergreen.active_simds = hweight32(~tmp);
3496 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3497 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3499 WREG32(GB_ADDR_CONFIG, gb_addr_config);
3500 WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3501 WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3502 WREG32(DMA_TILING_CONFIG, gb_addr_config);
3503 WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3504 WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3505 WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3507 if ((rdev->config.evergreen.max_backends == 1) &&
3508 (rdev->flags & RADEON_IS_IGP)) {
3509 if ((disabled_rb_mask & 3) == 1) {
3510 /* RB0 disabled, RB1 enabled */
3513 /* RB1 disabled, RB0 enabled */
3517 tmp = gb_addr_config & NUM_PIPES_MASK;
3518 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3519 EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3521 rdev->config.evergreen.backend_map = tmp;
3522 WREG32(GB_BACKEND_MAP, tmp);
3524 WREG32(CGTS_SYS_TCC_DISABLE, 0);
3525 WREG32(CGTS_TCC_DISABLE, 0);
3526 WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3527 WREG32(CGTS_USER_TCC_DISABLE, 0);
3529 /* set HW defaults for 3D engine */
3530 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3531 ROQ_IB2_START(0x2b)));
3533 WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3535 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3540 sx_debug_1 = RREG32(SX_DEBUG_1);
3541 sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3542 WREG32(SX_DEBUG_1, sx_debug_1);
3545 smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3546 smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3547 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3548 WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3550 if (rdev->family <= CHIP_SUMO2)
3551 WREG32(SMX_SAR_CTL0, 0x00010000);
3553 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3554 POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3555 SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3557 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3558 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3559 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3561 WREG32(VGT_NUM_INSTANCES, 1);
3562 WREG32(SPI_CONFIG_CNTL, 0);
3563 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3564 WREG32(CP_PERFMON_CNTL, 0);
3566 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3567 FETCH_FIFO_HIWATER(0x4) |
3568 DONE_FIFO_HIWATER(0xe0) |
3569 ALU_UPDATE_FIFO_HIWATER(0x8)));
3571 sq_config = RREG32(SQ_CONFIG);
3572 sq_config &= ~(PS_PRIO(3) |
3576 sq_config |= (VC_ENABLE |
3583 switch (rdev->family) {
3589 /* no vertex cache */
3590 sq_config &= ~VC_ENABLE;
3596 sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3598 sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3599 sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3600 sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3601 sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3602 sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3603 sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3604 sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3606 switch (rdev->family) {
3611 ps_thread_count = 96;
3614 ps_thread_count = 128;
3618 sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3619 sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3620 sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3621 sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3622 sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3623 sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3625 sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3626 sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3627 sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3628 sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3629 sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3630 sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3632 WREG32(SQ_CONFIG, sq_config);
3633 WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3634 WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3635 WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3636 WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3637 WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3638 WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3639 WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3640 WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3641 WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3642 WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3644 WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3645 FORCE_EOV_MAX_REZ_CNT(255)));
3647 switch (rdev->family) {
3653 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3656 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3659 vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3660 WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3662 WREG32(VGT_GS_VERTEX_REUSE, 16);
3663 WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3664 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3666 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3667 WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3669 WREG32(CB_PERF_CTR0_SEL_0, 0);
3670 WREG32(CB_PERF_CTR0_SEL_1, 0);
3671 WREG32(CB_PERF_CTR1_SEL_0, 0);
3672 WREG32(CB_PERF_CTR1_SEL_1, 0);
3673 WREG32(CB_PERF_CTR2_SEL_0, 0);
3674 WREG32(CB_PERF_CTR2_SEL_1, 0);
3675 WREG32(CB_PERF_CTR3_SEL_0, 0);
3676 WREG32(CB_PERF_CTR3_SEL_1, 0);
3678 /* clear render buffer base addresses */
3679 WREG32(CB_COLOR0_BASE, 0);
3680 WREG32(CB_COLOR1_BASE, 0);
3681 WREG32(CB_COLOR2_BASE, 0);
3682 WREG32(CB_COLOR3_BASE, 0);
3683 WREG32(CB_COLOR4_BASE, 0);
3684 WREG32(CB_COLOR5_BASE, 0);
3685 WREG32(CB_COLOR6_BASE, 0);
3686 WREG32(CB_COLOR7_BASE, 0);
3687 WREG32(CB_COLOR8_BASE, 0);
3688 WREG32(CB_COLOR9_BASE, 0);
3689 WREG32(CB_COLOR10_BASE, 0);
3690 WREG32(CB_COLOR11_BASE, 0);
3692 /* set the shader const cache sizes to 0 */
3693 for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3695 for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3698 tmp = RREG32(HDP_MISC_CNTL);
3699 tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3700 WREG32(HDP_MISC_CNTL, tmp);
3702 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3703 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3705 WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3711 int evergreen_mc_init(struct radeon_device *rdev)
3714 int chansize, numchan;
3716 /* Get VRAM informations */
3717 rdev->mc.vram_is_ddr = true;
3718 if ((rdev->family == CHIP_PALM) ||
3719 (rdev->family == CHIP_SUMO) ||
3720 (rdev->family == CHIP_SUMO2))
3721 tmp = RREG32(FUS_MC_ARB_RAMCFG);
3723 tmp = RREG32(MC_ARB_RAMCFG);
3724 if (tmp & CHANSIZE_OVERRIDE) {
3726 } else if (tmp & CHANSIZE_MASK) {
3731 tmp = RREG32(MC_SHARED_CHMAP);
3732 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3747 rdev->mc.vram_width = numchan * chansize;
3748 /* Could aper size report 0 ? */
3749 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3750 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3751 /* Setup GPU memory space */
3752 if ((rdev->family == CHIP_PALM) ||
3753 (rdev->family == CHIP_SUMO) ||
3754 (rdev->family == CHIP_SUMO2)) {
3755 /* size in bytes on fusion */
3756 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3757 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3759 /* size in MB on evergreen/cayman/tn */
3760 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3761 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3763 rdev->mc.visible_vram_size = rdev->mc.aper_size;
3764 r700_vram_gtt_location(rdev, &rdev->mc);
3765 radeon_update_bandwidth_info(rdev);
3770 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3772 dev_info(rdev->dev, " GRBM_STATUS = 0x%08X\n",
3773 RREG32(GRBM_STATUS));
3774 dev_info(rdev->dev, " GRBM_STATUS_SE0 = 0x%08X\n",
3775 RREG32(GRBM_STATUS_SE0));
3776 dev_info(rdev->dev, " GRBM_STATUS_SE1 = 0x%08X\n",
3777 RREG32(GRBM_STATUS_SE1));
3778 dev_info(rdev->dev, " SRBM_STATUS = 0x%08X\n",
3779 RREG32(SRBM_STATUS));
3780 dev_info(rdev->dev, " SRBM_STATUS2 = 0x%08X\n",
3781 RREG32(SRBM_STATUS2));
3782 dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3783 RREG32(CP_STALLED_STAT1));
3784 dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3785 RREG32(CP_STALLED_STAT2));
3786 dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n",
3787 RREG32(CP_BUSY_STAT));
3788 dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n",
3790 dev_info(rdev->dev, " R_00D034_DMA_STATUS_REG = 0x%08X\n",
3791 RREG32(DMA_STATUS_REG));
3792 if (rdev->family >= CHIP_CAYMAN) {
3793 dev_info(rdev->dev, " R_00D834_DMA_STATUS_REG = 0x%08X\n",
3794 RREG32(DMA_STATUS_REG + 0x800));
3798 bool evergreen_is_display_hung(struct radeon_device *rdev)
3804 for (i = 0; i < rdev->num_crtc; i++) {
3805 if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3806 crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3807 crtc_hung |= (1 << i);
3811 for (j = 0; j < 10; j++) {
3812 for (i = 0; i < rdev->num_crtc; i++) {
3813 if (crtc_hung & (1 << i)) {
3814 tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3815 if (tmp != crtc_status[i])
3816 crtc_hung &= ~(1 << i);
3827 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3833 tmp = RREG32(GRBM_STATUS);
3834 if (tmp & (PA_BUSY | SC_BUSY |
3836 TA_BUSY | VGT_BUSY |
3838 SPI_BUSY | VGT_BUSY_NO_DMA))
3839 reset_mask |= RADEON_RESET_GFX;
3841 if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3842 CP_BUSY | CP_COHERENCY_BUSY))
3843 reset_mask |= RADEON_RESET_CP;
3845 if (tmp & GRBM_EE_BUSY)
3846 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3848 /* DMA_STATUS_REG */
3849 tmp = RREG32(DMA_STATUS_REG);
3850 if (!(tmp & DMA_IDLE))
3851 reset_mask |= RADEON_RESET_DMA;
3854 tmp = RREG32(SRBM_STATUS2);
3856 reset_mask |= RADEON_RESET_DMA;
3859 tmp = RREG32(SRBM_STATUS);
3860 if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3861 reset_mask |= RADEON_RESET_RLC;
3864 reset_mask |= RADEON_RESET_IH;
3867 reset_mask |= RADEON_RESET_SEM;
3869 if (tmp & GRBM_RQ_PENDING)
3870 reset_mask |= RADEON_RESET_GRBM;
3873 reset_mask |= RADEON_RESET_VMC;
3875 if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3876 MCC_BUSY | MCD_BUSY))
3877 reset_mask |= RADEON_RESET_MC;
3879 if (evergreen_is_display_hung(rdev))
3880 reset_mask |= RADEON_RESET_DISPLAY;
3883 tmp = RREG32(VM_L2_STATUS);
3885 reset_mask |= RADEON_RESET_VMC;
3887 /* Skip MC reset as it's mostly likely not hung, just busy */
3888 if (reset_mask & RADEON_RESET_MC) {
3889 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3890 reset_mask &= ~RADEON_RESET_MC;
3896 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3898 struct evergreen_mc_save save;
3899 u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3902 if (reset_mask == 0)
3905 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3907 evergreen_print_gpu_status_regs(rdev);
3909 /* Disable CP parsing/prefetching */
3910 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3912 if (reset_mask & RADEON_RESET_DMA) {
3914 tmp = RREG32(DMA_RB_CNTL);
3915 tmp &= ~DMA_RB_ENABLE;
3916 WREG32(DMA_RB_CNTL, tmp);
3921 evergreen_mc_stop(rdev, &save);
3922 if (evergreen_mc_wait_for_idle(rdev)) {
3923 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3926 if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3927 grbm_soft_reset |= SOFT_RESET_DB |
3940 if (reset_mask & RADEON_RESET_CP) {
3941 grbm_soft_reset |= SOFT_RESET_CP |
3944 srbm_soft_reset |= SOFT_RESET_GRBM;
3947 if (reset_mask & RADEON_RESET_DMA)
3948 srbm_soft_reset |= SOFT_RESET_DMA;
3950 if (reset_mask & RADEON_RESET_DISPLAY)
3951 srbm_soft_reset |= SOFT_RESET_DC;
3953 if (reset_mask & RADEON_RESET_RLC)
3954 srbm_soft_reset |= SOFT_RESET_RLC;
3956 if (reset_mask & RADEON_RESET_SEM)
3957 srbm_soft_reset |= SOFT_RESET_SEM;
3959 if (reset_mask & RADEON_RESET_IH)
3960 srbm_soft_reset |= SOFT_RESET_IH;
3962 if (reset_mask & RADEON_RESET_GRBM)
3963 srbm_soft_reset |= SOFT_RESET_GRBM;
3965 if (reset_mask & RADEON_RESET_VMC)
3966 srbm_soft_reset |= SOFT_RESET_VMC;
3968 if (!(rdev->flags & RADEON_IS_IGP)) {
3969 if (reset_mask & RADEON_RESET_MC)
3970 srbm_soft_reset |= SOFT_RESET_MC;
3973 if (grbm_soft_reset) {
3974 tmp = RREG32(GRBM_SOFT_RESET);
3975 tmp |= grbm_soft_reset;
3976 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3977 WREG32(GRBM_SOFT_RESET, tmp);
3978 tmp = RREG32(GRBM_SOFT_RESET);
3982 tmp &= ~grbm_soft_reset;
3983 WREG32(GRBM_SOFT_RESET, tmp);
3984 tmp = RREG32(GRBM_SOFT_RESET);
3987 if (srbm_soft_reset) {
3988 tmp = RREG32(SRBM_SOFT_RESET);
3989 tmp |= srbm_soft_reset;
3990 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3991 WREG32(SRBM_SOFT_RESET, tmp);
3992 tmp = RREG32(SRBM_SOFT_RESET);
3996 tmp &= ~srbm_soft_reset;
3997 WREG32(SRBM_SOFT_RESET, tmp);
3998 tmp = RREG32(SRBM_SOFT_RESET);
4001 /* Wait a little for things to settle down */
4004 evergreen_mc_resume(rdev, &save);
4007 evergreen_print_gpu_status_regs(rdev);
4010 void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
4012 struct evergreen_mc_save save;
4015 dev_info(rdev->dev, "GPU pci config reset\n");
4019 /* Disable CP parsing/prefetching */
4020 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
4023 tmp = RREG32(DMA_RB_CNTL);
4024 tmp &= ~DMA_RB_ENABLE;
4025 WREG32(DMA_RB_CNTL, tmp);
4026 /* XXX other engines? */
4029 r600_rlc_stop(rdev);
4033 /* set mclk/sclk to bypass */
4034 rv770_set_clk_bypass_mode(rdev);
4036 pci_clear_master(rdev->pdev);
4037 /* disable mem access */
4038 evergreen_mc_stop(rdev, &save);
4039 if (evergreen_mc_wait_for_idle(rdev)) {
4040 dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
4043 radeon_pci_config_reset(rdev);
4044 /* wait for asic to come out of reset */
4045 for (i = 0; i < rdev->usec_timeout; i++) {
4046 if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
4052 int evergreen_asic_reset(struct radeon_device *rdev, bool hard)
4057 evergreen_gpu_pci_config_reset(rdev);
4061 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4064 r600_set_bios_scratch_engine_hung(rdev, true);
4066 /* try soft reset */
4067 evergreen_gpu_soft_reset(rdev, reset_mask);
4069 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4071 /* try pci config reset */
4072 if (reset_mask && radeon_hard_reset)
4073 evergreen_gpu_pci_config_reset(rdev);
4075 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4078 r600_set_bios_scratch_engine_hung(rdev, false);
4084 * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
4086 * @rdev: radeon_device pointer
4087 * @ring: radeon_ring structure holding ring information
4089 * Check if the GFX engine is locked up.
4090 * Returns true if the engine appears to be locked up, false if not.
4092 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
4094 u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4096 if (!(reset_mask & (RADEON_RESET_GFX |
4097 RADEON_RESET_COMPUTE |
4098 RADEON_RESET_CP))) {
4099 radeon_ring_lockup_update(rdev, ring);
4102 return radeon_ring_test_lockup(rdev, ring);
4108 #define RLC_SAVE_RESTORE_LIST_END_MARKER 0x00000000
4109 #define RLC_CLEAR_STATE_END_MARKER 0x00000001
4111 void sumo_rlc_fini(struct radeon_device *rdev)
4115 /* save restore block */
4116 if (rdev->rlc.save_restore_obj) {
4117 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4118 if (unlikely(r != 0))
4119 dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
4120 radeon_bo_unpin(rdev->rlc.save_restore_obj);
4121 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4123 radeon_bo_unref(&rdev->rlc.save_restore_obj);
4124 rdev->rlc.save_restore_obj = NULL;
4127 /* clear state block */
4128 if (rdev->rlc.clear_state_obj) {
4129 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4130 if (unlikely(r != 0))
4131 dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
4132 radeon_bo_unpin(rdev->rlc.clear_state_obj);
4133 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4135 radeon_bo_unref(&rdev->rlc.clear_state_obj);
4136 rdev->rlc.clear_state_obj = NULL;
4139 /* clear state block */
4140 if (rdev->rlc.cp_table_obj) {
4141 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4142 if (unlikely(r != 0))
4143 dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4144 radeon_bo_unpin(rdev->rlc.cp_table_obj);
4145 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4147 radeon_bo_unref(&rdev->rlc.cp_table_obj);
4148 rdev->rlc.cp_table_obj = NULL;
4152 #define CP_ME_TABLE_SIZE 96
4154 int sumo_rlc_init(struct radeon_device *rdev)
4157 volatile u32 *dst_ptr;
4158 u32 dws, data, i, j, k, reg_num;
4159 u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4160 u64 reg_list_mc_addr;
4161 const struct cs_section_def *cs_data;
4164 src_ptr = rdev->rlc.reg_list;
4165 dws = rdev->rlc.reg_list_size;
4166 if (rdev->family >= CHIP_BONAIRE) {
4167 dws += (5 * 16) + 48 + 48 + 64;
4169 cs_data = rdev->rlc.cs_data;
4172 /* save restore block */
4173 if (rdev->rlc.save_restore_obj == NULL) {
4174 r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4175 RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4176 NULL, &rdev->rlc.save_restore_obj);
4178 dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4183 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4184 if (unlikely(r != 0)) {
4185 sumo_rlc_fini(rdev);
4188 r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4189 &rdev->rlc.save_restore_gpu_addr);
4191 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4192 dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4193 sumo_rlc_fini(rdev);
4197 r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
4199 dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4200 sumo_rlc_fini(rdev);
4203 /* write the sr buffer */
4204 dst_ptr = rdev->rlc.sr_ptr;
4205 if (rdev->family >= CHIP_TAHITI) {
4207 for (i = 0; i < rdev->rlc.reg_list_size; i++)
4208 dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4212 * dw0: (reg2 << 16) | reg1
4213 * dw1: reg1 save space
4214 * dw2: reg2 save space
4216 for (i = 0; i < dws; i++) {
4217 data = src_ptr[i] >> 2;
4220 data |= (src_ptr[i] >> 2) << 16;
4221 j = (((i - 1) * 3) / 2);
4222 dst_ptr[j] = cpu_to_le32(data);
4225 dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4227 radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4228 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4232 /* clear state block */
4233 if (rdev->family >= CHIP_BONAIRE) {
4234 rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4235 } else if (rdev->family >= CHIP_TAHITI) {
4236 rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4237 dws = rdev->rlc.clear_state_size + (256 / 4);
4241 for (i = 0; cs_data[i].section != NULL; i++) {
4242 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4244 dws += cs_data[i].section[j].reg_count;
4247 reg_list_blk_index = (3 * reg_list_num + 2);
4248 dws += reg_list_blk_index;
4249 rdev->rlc.clear_state_size = dws;
4252 if (rdev->rlc.clear_state_obj == NULL) {
4253 r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4254 RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4255 NULL, &rdev->rlc.clear_state_obj);
4257 dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4258 sumo_rlc_fini(rdev);
4262 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4263 if (unlikely(r != 0)) {
4264 sumo_rlc_fini(rdev);
4267 r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4268 &rdev->rlc.clear_state_gpu_addr);
4270 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4271 dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4272 sumo_rlc_fini(rdev);
4276 r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4278 dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4279 sumo_rlc_fini(rdev);
4282 /* set up the cs buffer */
4283 dst_ptr = rdev->rlc.cs_ptr;
4284 if (rdev->family >= CHIP_BONAIRE) {
4285 cik_get_csb_buffer(rdev, dst_ptr);
4286 } else if (rdev->family >= CHIP_TAHITI) {
4287 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4288 dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4289 dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4290 dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4291 si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4293 reg_list_hdr_blk_index = 0;
4294 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4295 data = upper_32_bits(reg_list_mc_addr);
4296 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4297 reg_list_hdr_blk_index++;
4298 for (i = 0; cs_data[i].section != NULL; i++) {
4299 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4300 reg_num = cs_data[i].section[j].reg_count;
4301 data = reg_list_mc_addr & 0xffffffff;
4302 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4303 reg_list_hdr_blk_index++;
4305 data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4306 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4307 reg_list_hdr_blk_index++;
4309 data = 0x08000000 | (reg_num * 4);
4310 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4311 reg_list_hdr_blk_index++;
4313 for (k = 0; k < reg_num; k++) {
4314 data = cs_data[i].section[j].extent[k];
4315 dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4317 reg_list_mc_addr += reg_num * 4;
4318 reg_list_blk_index += reg_num;
4321 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4323 radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4324 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4327 if (rdev->rlc.cp_table_size) {
4328 if (rdev->rlc.cp_table_obj == NULL) {
4329 r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4331 RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4332 NULL, &rdev->rlc.cp_table_obj);
4334 dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4335 sumo_rlc_fini(rdev);
4340 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4341 if (unlikely(r != 0)) {
4342 dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4343 sumo_rlc_fini(rdev);
4346 r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4347 &rdev->rlc.cp_table_gpu_addr);
4349 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4350 dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4351 sumo_rlc_fini(rdev);
4354 r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4356 dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4357 sumo_rlc_fini(rdev);
4361 cik_init_cp_pg_table(rdev);
4363 radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4364 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4371 static void evergreen_rlc_start(struct radeon_device *rdev)
4373 u32 mask = RLC_ENABLE;
4375 if (rdev->flags & RADEON_IS_IGP) {
4376 mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4379 WREG32(RLC_CNTL, mask);
4382 int evergreen_rlc_resume(struct radeon_device *rdev)
4385 const __be32 *fw_data;
4390 r600_rlc_stop(rdev);
4392 WREG32(RLC_HB_CNTL, 0);
4394 if (rdev->flags & RADEON_IS_IGP) {
4395 if (rdev->family == CHIP_ARUBA) {
4396 u32 always_on_bitmap =
4397 3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4398 /* find out the number of active simds */
4399 u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4400 tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4401 tmp = hweight32(~tmp);
4402 if (tmp == rdev->config.cayman.max_simds_per_se) {
4403 WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4404 WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4405 WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4406 WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4407 WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4410 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4411 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4413 WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4414 WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4416 WREG32(RLC_HB_BASE, 0);
4417 WREG32(RLC_HB_RPTR, 0);
4418 WREG32(RLC_HB_WPTR, 0);
4419 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4420 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4422 WREG32(RLC_MC_CNTL, 0);
4423 WREG32(RLC_UCODE_CNTL, 0);
4425 fw_data = (const __be32 *)rdev->rlc_fw->data;
4426 if (rdev->family >= CHIP_ARUBA) {
4427 for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4428 WREG32(RLC_UCODE_ADDR, i);
4429 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4431 } else if (rdev->family >= CHIP_CAYMAN) {
4432 for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4433 WREG32(RLC_UCODE_ADDR, i);
4434 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4437 for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4438 WREG32(RLC_UCODE_ADDR, i);
4439 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4442 WREG32(RLC_UCODE_ADDR, 0);
4444 evergreen_rlc_start(rdev);
4451 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4453 if (crtc >= rdev->num_crtc)
4456 return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4459 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4464 if (rdev->family >= CHIP_CAYMAN) {
4465 cayman_cp_int_cntl_setup(rdev, 0,
4466 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4467 cayman_cp_int_cntl_setup(rdev, 1, 0);
4468 cayman_cp_int_cntl_setup(rdev, 2, 0);
4469 tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4470 WREG32(CAYMAN_DMA1_CNTL, tmp);
4472 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4473 tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4474 WREG32(DMA_CNTL, tmp);
4475 WREG32(GRBM_INT_CNTL, 0);
4476 WREG32(SRBM_INT_CNTL, 0);
4477 for (i = 0; i < rdev->num_crtc; i++)
4478 WREG32(INT_MASK + crtc_offsets[i], 0);
4479 for (i = 0; i < rdev->num_crtc; i++)
4480 WREG32(GRPH_INT_CONTROL + crtc_offsets[i], 0);
4482 /* only one DAC on DCE5 */
4483 if (!ASIC_IS_DCE5(rdev))
4484 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4485 WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4487 for (i = 0; i < 6; i++)
4488 WREG32_AND(DC_HPDx_INT_CONTROL(i), DC_HPDx_INT_POLARITY);
4491 /* Note that the order we write back regs here is important */
4492 int evergreen_irq_set(struct radeon_device *rdev)
4495 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4496 u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4497 u32 grbm_int_cntl = 0;
4498 u32 dma_cntl, dma_cntl1 = 0;
4499 u32 thermal_int = 0;
4501 if (!rdev->irq.installed) {
4502 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4505 /* don't enable anything if the ih is disabled */
4506 if (!rdev->ih.enabled) {
4507 r600_disable_interrupts(rdev);
4508 /* force the active interrupt state to all disabled */
4509 evergreen_disable_interrupt_state(rdev);
4513 if (rdev->family == CHIP_ARUBA)
4514 thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4515 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4517 thermal_int = RREG32(CG_THERMAL_INT) &
4518 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4520 dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4522 if (rdev->family >= CHIP_CAYMAN) {
4523 /* enable CP interrupts on all rings */
4524 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4525 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4526 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4528 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4529 DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4530 cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4532 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4533 DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4534 cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4537 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4538 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4539 cp_int_cntl |= RB_INT_ENABLE;
4540 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4544 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4545 DRM_DEBUG("r600_irq_set: sw int dma\n");
4546 dma_cntl |= TRAP_ENABLE;
4549 if (rdev->family >= CHIP_CAYMAN) {
4550 dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4551 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4552 DRM_DEBUG("r600_irq_set: sw int dma1\n");
4553 dma_cntl1 |= TRAP_ENABLE;
4557 if (rdev->irq.dpm_thermal) {
4558 DRM_DEBUG("dpm thermal\n");
4559 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4562 if (rdev->family >= CHIP_CAYMAN) {
4563 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4564 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4565 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4567 WREG32(CP_INT_CNTL, cp_int_cntl);
4569 WREG32(DMA_CNTL, dma_cntl);
4571 if (rdev->family >= CHIP_CAYMAN)
4572 WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4574 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4576 for (i = 0; i < rdev->num_crtc; i++) {
4577 radeon_irq_kms_set_irq_n_enabled(
4578 rdev, INT_MASK + crtc_offsets[i],
4580 rdev->irq.crtc_vblank_int[i] ||
4581 atomic_read(&rdev->irq.pflip[i]), "vblank", i);
4584 for (i = 0; i < rdev->num_crtc; i++)
4585 WREG32(GRPH_INT_CONTROL + crtc_offsets[i], GRPH_PFLIP_INT_MASK);
4587 for (i = 0; i < 6; i++) {
4588 radeon_irq_kms_set_irq_n_enabled(
4589 rdev, DC_HPDx_INT_CONTROL(i),
4590 DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN,
4591 rdev->irq.hpd[i], "HPD", i);
4594 if (rdev->family == CHIP_ARUBA)
4595 WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4597 WREG32(CG_THERMAL_INT, thermal_int);
4599 for (i = 0; i < 6; i++) {
4600 radeon_irq_kms_set_irq_n_enabled(
4601 rdev, AFMT_AUDIO_PACKET_CONTROL + crtc_offsets[i],
4602 AFMT_AZ_FORMAT_WTRIG_MASK,
4603 rdev->irq.afmt[i], "HDMI", i);
4607 RREG32(SRBM_STATUS);
4612 /* Note that the order we write back regs here is important */
4613 static void evergreen_irq_ack(struct radeon_device *rdev)
4616 u32 *grph_int = rdev->irq.stat_regs.evergreen.grph_int;
4617 u32 *disp_int = rdev->irq.stat_regs.evergreen.disp_int;
4618 u32 *afmt_status = rdev->irq.stat_regs.evergreen.afmt_status;
4620 for (i = 0; i < 6; i++) {
4621 disp_int[i] = RREG32(evergreen_disp_int_status[i]);
4622 afmt_status[i] = RREG32(AFMT_STATUS + crtc_offsets[i]);
4623 if (i < rdev->num_crtc)
4624 grph_int[i] = RREG32(GRPH_INT_STATUS + crtc_offsets[i]);
4627 /* We write back each interrupt register in pairs of two */
4628 for (i = 0; i < rdev->num_crtc; i += 2) {
4629 for (j = i; j < (i + 2); j++) {
4630 if (grph_int[j] & GRPH_PFLIP_INT_OCCURRED)
4631 WREG32(GRPH_INT_STATUS + crtc_offsets[j],
4632 GRPH_PFLIP_INT_CLEAR);
4635 for (j = i; j < (i + 2); j++) {
4636 if (disp_int[j] & LB_D1_VBLANK_INTERRUPT)
4637 WREG32(VBLANK_STATUS + crtc_offsets[j],
4639 if (disp_int[j] & LB_D1_VLINE_INTERRUPT)
4640 WREG32(VLINE_STATUS + crtc_offsets[j],
4645 for (i = 0; i < 6; i++) {
4646 if (disp_int[i] & DC_HPD1_INTERRUPT)
4647 WREG32_OR(DC_HPDx_INT_CONTROL(i), DC_HPDx_INT_ACK);
4650 for (i = 0; i < 6; i++) {
4651 if (disp_int[i] & DC_HPD1_RX_INTERRUPT)
4652 WREG32_OR(DC_HPDx_INT_CONTROL(i), DC_HPDx_RX_INT_ACK);
4655 for (i = 0; i < 6; i++) {
4656 if (afmt_status[i] & AFMT_AZ_FORMAT_WTRIG)
4657 WREG32_OR(AFMT_AUDIO_PACKET_CONTROL + crtc_offsets[i],
4658 AFMT_AZ_FORMAT_WTRIG_ACK);
4662 static void evergreen_irq_disable(struct radeon_device *rdev)
4664 r600_disable_interrupts(rdev);
4665 /* Wait and acknowledge irq */
4667 evergreen_irq_ack(rdev);
4668 evergreen_disable_interrupt_state(rdev);
4671 void evergreen_irq_suspend(struct radeon_device *rdev)
4673 evergreen_irq_disable(rdev);
4674 r600_rlc_stop(rdev);
4677 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4681 if (rdev->wb.enabled)
4682 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4684 wptr = RREG32(IH_RB_WPTR);
4686 if (wptr & RB_OVERFLOW) {
4687 wptr &= ~RB_OVERFLOW;
4688 /* When a ring buffer overflow happen start parsing interrupt
4689 * from the last not overwritten vector (wptr + 16). Hopefully
4690 * this should allow us to catchup.
4692 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
4693 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
4694 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4695 tmp = RREG32(IH_RB_CNTL);
4696 tmp |= IH_WPTR_OVERFLOW_CLEAR;
4697 WREG32(IH_RB_CNTL, tmp);
4699 return (wptr & rdev->ih.ptr_mask);
4702 int evergreen_irq_process(struct radeon_device *rdev)
4704 u32 *disp_int = rdev->irq.stat_regs.evergreen.disp_int;
4705 u32 *afmt_status = rdev->irq.stat_regs.evergreen.afmt_status;
4706 u32 crtc_idx, hpd_idx, afmt_idx;
4710 u32 src_id, src_data;
4712 bool queue_hotplug = false;
4713 bool queue_hdmi = false;
4714 bool queue_dp = false;
4715 bool queue_thermal = false;
4717 const char *event_name;
4719 if (!rdev->ih.enabled || rdev->shutdown)
4722 wptr = evergreen_get_ih_wptr(rdev);
4725 /* is somebody else already processing irqs? */
4726 if (atomic_xchg(&rdev->ih.lock, 1))
4729 rptr = rdev->ih.rptr;
4730 DRM_DEBUG("evergreen_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4732 /* Order reading of wptr vs. reading of IH ring data */
4735 /* display interrupts */
4736 evergreen_irq_ack(rdev);
4738 while (rptr != wptr) {
4739 /* wptr/rptr are in bytes! */
4740 ring_index = rptr / 4;
4741 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4742 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4745 case 1: /* D1 vblank/vline */
4746 case 2: /* D2 vblank/vline */
4747 case 3: /* D3 vblank/vline */
4748 case 4: /* D4 vblank/vline */
4749 case 5: /* D5 vblank/vline */
4750 case 6: /* D6 vblank/vline */
4751 crtc_idx = src_id - 1;
4753 if (src_data == 0) { /* vblank */
4754 mask = LB_D1_VBLANK_INTERRUPT;
4755 event_name = "vblank";
4757 if (rdev->irq.crtc_vblank_int[crtc_idx]) {
4758 drm_handle_vblank(rdev->ddev, crtc_idx);
4759 rdev->pm.vblank_sync = true;
4760 wake_up(&rdev->irq.vblank_queue);
4762 if (atomic_read(&rdev->irq.pflip[crtc_idx])) {
4763 radeon_crtc_handle_vblank(rdev,
4767 } else if (src_data == 1) { /* vline */
4768 mask = LB_D1_VLINE_INTERRUPT;
4769 event_name = "vline";
4771 DRM_DEBUG("Unhandled interrupt: %d %d\n",
4776 if (!(disp_int[crtc_idx] & mask)) {
4777 DRM_DEBUG("IH: D%d %s - IH event w/o asserted irq bit?\n",
4778 crtc_idx + 1, event_name);
4781 disp_int[crtc_idx] &= ~mask;
4782 DRM_DEBUG("IH: D%d %s\n", crtc_idx + 1, event_name);
4785 case 8: /* D1 page flip */
4786 case 10: /* D2 page flip */
4787 case 12: /* D3 page flip */
4788 case 14: /* D4 page flip */
4789 case 16: /* D5 page flip */
4790 case 18: /* D6 page flip */
4791 DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
4792 if (radeon_use_pflipirq > 0)
4793 radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
4795 case 42: /* HPD hotplug */
4796 if (src_data <= 5) {
4798 mask = DC_HPD1_INTERRUPT;
4799 queue_hotplug = true;
4802 } else if (src_data <= 11) {
4803 hpd_idx = src_data - 6;
4804 mask = DC_HPD1_RX_INTERRUPT;
4806 event_name = "HPD_RX";
4809 DRM_DEBUG("Unhandled interrupt: %d %d\n",
4814 if (!(disp_int[hpd_idx] & mask))
4815 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
4817 disp_int[hpd_idx] &= ~mask;
4818 DRM_DEBUG("IH: %s%d\n", event_name, hpd_idx + 1);
4822 afmt_idx = src_data;
4823 if (!(afmt_status[afmt_idx] & AFMT_AZ_FORMAT_WTRIG))
4824 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
4827 DRM_ERROR("Unhandled interrupt: %d %d\n",
4831 afmt_status[afmt_idx] &= ~AFMT_AZ_FORMAT_WTRIG;
4833 DRM_DEBUG("IH: HDMI%d\n", afmt_idx + 1);
4836 DRM_ERROR("SRBM_READ_ERROR: 0x%x\n", RREG32(SRBM_READ_ERROR));
4837 WREG32(SRBM_INT_ACK, 0x1);
4840 DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
4841 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
4845 addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
4846 status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
4847 /* reset addr and status */
4848 WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
4849 if (addr == 0x0 && status == 0x0)
4851 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
4852 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
4854 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
4856 cayman_vm_decode_fault(rdev, status, addr);
4858 case 176: /* CP_INT in ring buffer */
4859 case 177: /* CP_INT in IB1 */
4860 case 178: /* CP_INT in IB2 */
4861 DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
4862 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4864 case 181: /* CP EOP event */
4865 DRM_DEBUG("IH: CP EOP\n");
4866 if (rdev->family >= CHIP_CAYMAN) {
4869 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4872 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
4875 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
4879 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4881 case 224: /* DMA trap event */
4882 DRM_DEBUG("IH: DMA trap\n");
4883 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
4885 case 230: /* thermal low to high */
4886 DRM_DEBUG("IH: thermal low to high\n");
4887 rdev->pm.dpm.thermal.high_to_low = false;
4888 queue_thermal = true;
4890 case 231: /* thermal high to low */
4891 DRM_DEBUG("IH: thermal high to low\n");
4892 rdev->pm.dpm.thermal.high_to_low = true;
4893 queue_thermal = true;
4895 case 233: /* GUI IDLE */
4896 DRM_DEBUG("IH: GUI idle\n");
4898 case 244: /* DMA trap event */
4899 if (rdev->family >= CHIP_CAYMAN) {
4900 DRM_DEBUG("IH: DMA1 trap\n");
4901 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
4905 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4909 /* wptr/rptr are in bytes! */
4911 rptr &= rdev->ih.ptr_mask;
4912 WREG32(IH_RB_RPTR, rptr);
4915 schedule_work(&rdev->dp_work);
4917 schedule_delayed_work(&rdev->hotplug_work, 0);
4919 schedule_work(&rdev->audio_work);
4920 if (queue_thermal && rdev->pm.dpm_enabled)
4921 schedule_work(&rdev->pm.dpm.thermal.work);
4922 rdev->ih.rptr = rptr;
4923 atomic_set(&rdev->ih.lock, 0);
4925 /* make sure wptr hasn't changed while processing */
4926 wptr = evergreen_get_ih_wptr(rdev);
4933 static void evergreen_uvd_init(struct radeon_device *rdev)
4940 r = radeon_uvd_init(rdev);
4942 dev_err(rdev->dev, "failed UVD (%d) init.\n", r);
4944 * At this point rdev->uvd.vcpu_bo is NULL which trickles down
4945 * to early fails uvd_v2_2_resume() and thus nothing happens
4946 * there. So it is pointless to try to go through that code
4947 * hence why we disable uvd here.
4949 rdev->has_uvd = false;
4952 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
4953 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
4956 static void evergreen_uvd_start(struct radeon_device *rdev)
4963 r = uvd_v2_2_resume(rdev);
4965 dev_err(rdev->dev, "failed UVD resume (%d).\n", r);
4968 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_UVD_INDEX);
4970 dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r);
4976 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
4979 static void evergreen_uvd_resume(struct radeon_device *rdev)
4981 struct radeon_ring *ring;
4984 if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
4987 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
4988 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0));
4990 dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r);
4993 r = uvd_v1_0_init(rdev);
4995 dev_err(rdev->dev, "failed initializing UVD (%d).\n", r);
5000 static int evergreen_startup(struct radeon_device *rdev)
5002 struct radeon_ring *ring;
5005 /* enable pcie gen2 link */
5006 evergreen_pcie_gen2_enable(rdev);
5008 evergreen_program_aspm(rdev);
5010 /* scratch needs to be initialized before MC */
5011 r = r600_vram_scratch_init(rdev);
5015 evergreen_mc_program(rdev);
5017 if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5018 r = ni_mc_load_microcode(rdev);
5020 DRM_ERROR("Failed to load MC firmware!\n");
5025 if (rdev->flags & RADEON_IS_AGP) {
5026 evergreen_agp_enable(rdev);
5028 r = evergreen_pcie_gart_enable(rdev);
5032 evergreen_gpu_init(rdev);
5034 /* allocate rlc buffers */
5035 if (rdev->flags & RADEON_IS_IGP) {
5036 rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5037 rdev->rlc.reg_list_size =
5038 (u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5039 rdev->rlc.cs_data = evergreen_cs_data;
5040 r = sumo_rlc_init(rdev);
5042 DRM_ERROR("Failed to init rlc BOs!\n");
5047 /* allocate wb buffer */
5048 r = radeon_wb_init(rdev);
5052 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5054 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5058 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5060 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5064 evergreen_uvd_start(rdev);
5067 if (!rdev->irq.installed) {
5068 r = radeon_irq_kms_init(rdev);
5073 r = r600_irq_init(rdev);
5075 DRM_ERROR("radeon: IH init failed (%d).\n", r);
5076 radeon_irq_kms_fini(rdev);
5079 evergreen_irq_set(rdev);
5081 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5082 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5087 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5088 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5089 DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5093 r = evergreen_cp_load_microcode(rdev);
5096 r = evergreen_cp_resume(rdev);
5099 r = r600_dma_resume(rdev);
5103 evergreen_uvd_resume(rdev);
5105 r = radeon_ib_pool_init(rdev);
5107 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5111 r = radeon_audio_init(rdev);
5113 DRM_ERROR("radeon: audio init failed\n");
5120 int evergreen_resume(struct radeon_device *rdev)
5124 /* reset the asic, the gfx blocks are often in a bad state
5125 * after the driver is unloaded or after a resume
5127 if (radeon_asic_reset(rdev))
5128 dev_warn(rdev->dev, "GPU reset failed !\n");
5129 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5130 * posting will perform necessary task to bring back GPU into good
5134 atom_asic_init(rdev->mode_info.atom_context);
5136 /* init golden registers */
5137 evergreen_init_golden_registers(rdev);
5139 if (rdev->pm.pm_method == PM_METHOD_DPM)
5140 radeon_pm_resume(rdev);
5142 rdev->accel_working = true;
5143 r = evergreen_startup(rdev);
5145 DRM_ERROR("evergreen startup failed on resume\n");
5146 rdev->accel_working = false;
5154 int evergreen_suspend(struct radeon_device *rdev)
5156 radeon_pm_suspend(rdev);
5157 radeon_audio_fini(rdev);
5158 if (rdev->has_uvd) {
5159 uvd_v1_0_fini(rdev);
5160 radeon_uvd_suspend(rdev);
5163 r600_dma_stop(rdev);
5164 evergreen_irq_suspend(rdev);
5165 radeon_wb_disable(rdev);
5166 evergreen_pcie_gart_disable(rdev);
5171 /* Plan is to move initialization in that function and use
5172 * helper function so that radeon_device_init pretty much
5173 * do nothing more than calling asic specific function. This
5174 * should also allow to remove a bunch of callback function
5177 int evergreen_init(struct radeon_device *rdev)
5182 if (!radeon_get_bios(rdev)) {
5183 if (ASIC_IS_AVIVO(rdev))
5186 /* Must be an ATOMBIOS */
5187 if (!rdev->is_atom_bios) {
5188 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5191 r = radeon_atombios_init(rdev);
5194 /* reset the asic, the gfx blocks are often in a bad state
5195 * after the driver is unloaded or after a resume
5197 if (radeon_asic_reset(rdev))
5198 dev_warn(rdev->dev, "GPU reset failed !\n");
5199 /* Post card if necessary */
5200 if (!radeon_card_posted(rdev)) {
5202 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5205 DRM_INFO("GPU not posted. posting now...\n");
5206 atom_asic_init(rdev->mode_info.atom_context);
5208 /* init golden registers */
5209 evergreen_init_golden_registers(rdev);
5210 /* Initialize scratch registers */
5211 r600_scratch_init(rdev);
5212 /* Initialize surface registers */
5213 radeon_surface_init(rdev);
5214 /* Initialize clocks */
5215 radeon_get_clock_info(rdev->ddev);
5217 radeon_fence_driver_init(rdev);
5218 /* initialize AGP */
5219 if (rdev->flags & RADEON_IS_AGP) {
5220 r = radeon_agp_init(rdev);
5222 radeon_agp_disable(rdev);
5224 /* initialize memory controller */
5225 r = evergreen_mc_init(rdev);
5228 /* Memory manager */
5229 r = radeon_bo_init(rdev);
5233 if (ASIC_IS_DCE5(rdev)) {
5234 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5235 r = ni_init_microcode(rdev);
5237 DRM_ERROR("Failed to load firmware!\n");
5242 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5243 r = r600_init_microcode(rdev);
5245 DRM_ERROR("Failed to load firmware!\n");
5251 /* Initialize power management */
5252 radeon_pm_init(rdev);
5254 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5255 r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5257 rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5258 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5260 evergreen_uvd_init(rdev);
5262 rdev->ih.ring_obj = NULL;
5263 r600_ih_ring_init(rdev, 64 * 1024);
5265 r = r600_pcie_gart_init(rdev);
5269 rdev->accel_working = true;
5270 r = evergreen_startup(rdev);
5272 dev_err(rdev->dev, "disabling GPU acceleration\n");
5274 r600_dma_fini(rdev);
5275 r600_irq_fini(rdev);
5276 if (rdev->flags & RADEON_IS_IGP)
5277 sumo_rlc_fini(rdev);
5278 radeon_wb_fini(rdev);
5279 radeon_ib_pool_fini(rdev);
5280 radeon_irq_kms_fini(rdev);
5281 evergreen_pcie_gart_fini(rdev);
5282 rdev->accel_working = false;
5285 /* Don't start up if the MC ucode is missing on BTC parts.
5286 * The default clocks and voltages before the MC ucode
5287 * is loaded are not suffient for advanced operations.
5289 if (ASIC_IS_DCE5(rdev)) {
5290 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5291 DRM_ERROR("radeon: MC ucode required for NI+.\n");
5299 void evergreen_fini(struct radeon_device *rdev)
5301 radeon_pm_fini(rdev);
5302 radeon_audio_fini(rdev);
5304 r600_dma_fini(rdev);
5305 r600_irq_fini(rdev);
5306 if (rdev->flags & RADEON_IS_IGP)
5307 sumo_rlc_fini(rdev);
5308 radeon_wb_fini(rdev);
5309 radeon_ib_pool_fini(rdev);
5310 radeon_irq_kms_fini(rdev);
5311 uvd_v1_0_fini(rdev);
5312 radeon_uvd_fini(rdev);
5313 evergreen_pcie_gart_fini(rdev);
5314 r600_vram_scratch_fini(rdev);
5315 radeon_gem_fini(rdev);
5316 radeon_fence_driver_fini(rdev);
5317 radeon_agp_fini(rdev);
5318 radeon_bo_fini(rdev);
5319 radeon_atombios_fini(rdev);
5324 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5326 u32 link_width_cntl, speed_cntl;
5328 if (radeon_pcie_gen2 == 0)
5331 if (rdev->flags & RADEON_IS_IGP)
5334 if (!(rdev->flags & RADEON_IS_PCIE))
5337 /* x2 cards have a special sequence */
5338 if (ASIC_IS_X2(rdev))
5341 if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5342 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5345 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5346 if (speed_cntl & LC_CURRENT_DATA_RATE) {
5347 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5351 DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5353 if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5354 (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5356 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5357 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5358 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5360 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5361 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5362 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5364 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5365 speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5366 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5368 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5369 speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5370 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5372 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5373 speed_cntl |= LC_GEN2_EN_STRAP;
5374 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5377 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5378 /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5380 link_width_cntl |= LC_UPCONFIGURE_DIS;
5382 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5383 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5387 void evergreen_program_aspm(struct radeon_device *rdev)
5390 u32 pcie_lc_cntl, pcie_lc_cntl_old;
5391 bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5392 /* fusion_platform = true
5393 * if the system is a fusion system
5394 * (APU or DGPU in a fusion system).
5395 * todo: check if the system is a fusion platform.
5397 bool fusion_platform = false;
5399 if (radeon_aspm == 0)
5402 if (!(rdev->flags & RADEON_IS_PCIE))
5405 switch (rdev->family) {
5418 disable_l0s = false;
5422 if (rdev->flags & RADEON_IS_IGP)
5423 fusion_platform = true; /* XXX also dGPUs in a fusion system */
5425 data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5426 if (fusion_platform)
5431 WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5433 data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5434 if (fusion_platform)
5439 WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5441 pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5442 pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5444 if (rdev->family >= CHIP_BARTS)
5445 pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5447 pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5451 if (rdev->family >= CHIP_BARTS)
5452 pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5454 pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5456 if (!disable_plloff_in_l1) {
5457 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5458 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5459 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5461 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5463 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5464 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5465 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5467 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5469 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5470 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5471 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5473 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5475 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5476 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5477 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5479 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5481 if (rdev->family >= CHIP_BARTS) {
5482 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5483 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5484 data |= PLL_RAMP_UP_TIME_0(4);
5486 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5488 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5489 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5490 data |= PLL_RAMP_UP_TIME_1(4);
5492 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5494 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5495 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5496 data |= PLL_RAMP_UP_TIME_0(4);
5498 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5500 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5501 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5502 data |= PLL_RAMP_UP_TIME_1(4);
5504 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5507 data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5508 data &= ~LC_DYN_LANES_PWR_STATE_MASK;
5509 data |= LC_DYN_LANES_PWR_STATE(3);
5511 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
5513 if (rdev->family >= CHIP_BARTS) {
5514 data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
5515 data &= ~LS2_EXIT_TIME_MASK;
5516 data |= LS2_EXIT_TIME(1);
5518 WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
5520 data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
5521 data &= ~LS2_EXIT_TIME_MASK;
5522 data |= LS2_EXIT_TIME(1);
5524 WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
5529 /* evergreen parts only */
5530 if (rdev->family < CHIP_BARTS)
5531 pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
5533 if (pcie_lc_cntl != pcie_lc_cntl_old)
5534 WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);