1 /* radeon_cp.c -- CP support for Radeon -*- linux-c -*- */
3 * Copyright 2000 Precision Insight, Inc., Cedar Park, Texas.
4 * Copyright 2000 VA Linux Systems, Inc., Fremont, California.
5 * Copyright 2007 Advanced Micro Devices, Inc.
8 * Permission is hereby granted, free of charge, to any person obtaining a
9 * copy of this software and associated documentation files (the "Software"),
10 * to deal in the Software without restriction, including without limitation
11 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
12 * and/or sell copies of the Software, and to permit persons to whom the
13 * Software is furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice (including the next
16 * paragraph) shall be included in all copies or substantial portions of the
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
22 * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
23 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
24 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
25 * DEALINGS IN THE SOFTWARE.
28 * Kevin E. Martin <martin@valinux.com>
29 * Gareth Hughes <gareth@valinux.com>
34 #include "drm_sarea.h"
35 #include "radeon_drm.h"
36 #include "radeon_drv.h"
39 #include "radeon_microcode.h"
40 #define RADEON_FIFO_DEBUG 0
42 static int radeon_do_cleanup_cp(struct drm_device * dev);
43 static void radeon_do_cp_start(drm_radeon_private_t * dev_priv);
45 static u32 R500_READ_MCIND(drm_radeon_private_t *dev_priv, int addr)
48 RADEON_WRITE(R520_MC_IND_INDEX, 0x7f0000 | (addr & 0xff));
49 ret = RADEON_READ(R520_MC_IND_DATA);
50 RADEON_WRITE(R520_MC_IND_INDEX, 0);
54 static u32 RS480_READ_MCIND(drm_radeon_private_t *dev_priv, int addr)
57 RADEON_WRITE(RS480_NB_MC_INDEX, addr & 0xff);
58 ret = RADEON_READ(RS480_NB_MC_DATA);
59 RADEON_WRITE(RS480_NB_MC_INDEX, 0xff);
63 static u32 RS690_READ_MCIND(drm_radeon_private_t *dev_priv, int addr)
66 RADEON_WRITE(RS690_MC_INDEX, (addr & RS690_MC_INDEX_MASK));
67 ret = RADEON_READ(RS690_MC_DATA);
68 RADEON_WRITE(RS690_MC_INDEX, RS690_MC_INDEX_MASK);
72 static u32 IGP_READ_MCIND(drm_radeon_private_t *dev_priv, int addr)
74 if ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RS690)
75 return RS690_READ_MCIND(dev_priv, addr);
77 return RS480_READ_MCIND(dev_priv, addr);
80 u32 radeon_read_mc_reg(drm_radeon_private_t *dev_priv, int addr)
82 if ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RS690)
83 return IGP_READ_MCIND(dev_priv, addr);
84 if ((dev_priv->flags & RADEON_FAMILY_MASK) >= CHIP_RV515)
85 return R500_READ_MCIND(dev_priv, addr);
89 void radeon_write_mc_reg(drm_radeon_private_t *dev_priv, u32 addr, u32 val)
91 if ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RS690)
92 IGP_WRITE_MCIND(addr, val);
93 else if ((dev_priv->flags & RADEON_FAMILY_MASK) >= CHIP_RV515)
94 R500_WRITE_MCIND(addr, val);
97 u32 radeon_read_fb_location(drm_radeon_private_t *dev_priv)
100 if ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RV515)
101 return R500_READ_MCIND(dev_priv, RV515_MC_FB_LOCATION);
102 else if ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RS690)
103 return RS690_READ_MCIND(dev_priv, RS690_MC_FB_LOCATION);
104 else if ((dev_priv->flags & RADEON_FAMILY_MASK) >= CHIP_RV770)
105 return RADEON_READ(R700_MC_VM_FB_LOCATION);
106 else if ((dev_priv->flags & RADEON_FAMILY_MASK) >= CHIP_R600)
107 return RADEON_READ(R600_MC_VM_FB_LOCATION);
108 else if ((dev_priv->flags & RADEON_FAMILY_MASK) > CHIP_RV515)
109 return R500_READ_MCIND(dev_priv, R520_MC_FB_LOCATION);
111 return RADEON_READ(RADEON_MC_FB_LOCATION);
114 void radeon_read_agp_location(drm_radeon_private_t *dev_priv, u32 *agp_lo, u32 *agp_hi)
116 if (dev_priv->chip_family == CHIP_RV770) {
117 *agp_lo = RADEON_READ(R600_MC_VM_AGP_BOT);
118 *agp_hi = RADEON_READ(R600_MC_VM_AGP_TOP);
119 } else if (dev_priv->chip_family == CHIP_R600) {
120 *agp_lo = RADEON_READ(R600_MC_VM_AGP_BOT);
121 *agp_hi = RADEON_READ(R600_MC_VM_AGP_TOP);
122 } else if (dev_priv->chip_family == CHIP_RV515) {
123 *agp_lo = radeon_read_mc_reg(dev_priv, RV515_MC_AGP_LOCATION);
125 } else if (dev_priv->chip_family == CHIP_RS600) {
128 } else if (dev_priv->chip_family == CHIP_RS690 ||
129 dev_priv->chip_family == CHIP_RS740) {
130 *agp_lo = radeon_read_mc_reg(dev_priv, RS690_MC_AGP_LOCATION);
132 } else if (dev_priv->chip_family >= CHIP_R520) {
133 *agp_lo = radeon_read_mc_reg(dev_priv, R520_MC_AGP_LOCATION);
136 *agp_lo = RADEON_READ(RADEON_MC_AGP_LOCATION);
141 void radeon_write_fb_location(drm_radeon_private_t *dev_priv, u32 fb_loc)
143 if ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RV515)
144 R500_WRITE_MCIND(RV515_MC_FB_LOCATION, fb_loc);
145 else if ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RS690)
146 RS690_WRITE_MCIND(RS690_MC_FB_LOCATION, fb_loc);
147 else if ((dev_priv->flags & RADEON_FAMILY_MASK) >= CHIP_RV770)
148 RADEON_WRITE(R700_MC_VM_FB_LOCATION, fb_loc);
149 else if ((dev_priv->flags & RADEON_FAMILY_MASK) >= CHIP_R600)
150 RADEON_WRITE(R600_MC_VM_FB_LOCATION, fb_loc);
151 else if ((dev_priv->flags & RADEON_FAMILY_MASK) > CHIP_RV515)
152 R500_WRITE_MCIND(R520_MC_FB_LOCATION, fb_loc);
154 RADEON_WRITE(RADEON_MC_FB_LOCATION, fb_loc);
157 static void radeon_write_agp_location(drm_radeon_private_t *dev_priv, u32 agp_loc, u32 agp_loc_hi)
159 if ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RV515)
160 R500_WRITE_MCIND(RV515_MC_AGP_LOCATION, agp_loc);
161 else if ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RS690)
162 RS690_WRITE_MCIND(RS690_MC_AGP_LOCATION, agp_loc);
163 else if ((dev_priv->flags & RADEON_FAMILY_MASK) >= CHIP_R600) {
164 RADEON_WRITE(R600_MC_VM_AGP_BOT, agp_loc);
165 RADEON_WRITE(R600_MC_VM_AGP_TOP, agp_loc_hi);
166 } else if ((dev_priv->flags & RADEON_FAMILY_MASK) > CHIP_RV515)
167 R500_WRITE_MCIND(R520_MC_AGP_LOCATION, agp_loc);
169 RADEON_WRITE(RADEON_MC_AGP_LOCATION, agp_loc);
172 static void radeon_write_agp_base(drm_radeon_private_t *dev_priv, u64 agp_base)
174 u32 agp_base_hi = upper_32_bits(agp_base);
175 u32 agp_base_lo = agp_base & 0xffffffff;
177 if ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RV515) {
178 R500_WRITE_MCIND(RV515_MC_AGP_BASE, agp_base_lo);
179 R500_WRITE_MCIND(RV515_MC_AGP_BASE_2, agp_base_hi);
180 } else if ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RS690) {
181 RS690_WRITE_MCIND(RS690_MC_AGP_BASE, agp_base_lo);
182 RS690_WRITE_MCIND(RS690_MC_AGP_BASE_2, agp_base_hi);
183 } else if ((dev_priv->flags & RADEON_FAMILY_MASK) > CHIP_RV515) {
184 R500_WRITE_MCIND(R520_MC_AGP_BASE, agp_base_lo);
185 R500_WRITE_MCIND(R520_MC_AGP_BASE_2, agp_base_hi);
186 } else if (((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RS400) ||
187 ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RS480)) {
188 RADEON_WRITE(RADEON_AGP_BASE, agp_base_lo);
189 RADEON_WRITE(RS480_AGP_BASE_2, agp_base_hi);
191 RADEON_WRITE(RADEON_AGP_BASE, agp_base_lo);
192 if ((dev_priv->flags & RADEON_FAMILY_MASK) >= CHIP_R200)
193 RADEON_WRITE(RADEON_AGP_BASE_2, agp_base_hi);
198 void radeon_pll_errata_after_index(struct drm_radeon_private *dev_priv)
200 if (!(dev_priv->pll_errata & CHIP_ERRATA_PLL_DUMMYREADS))
203 (void)RADEON_READ(RADEON_CLOCK_CNTL_DATA);
204 (void)RADEON_READ(RADEON_CRTC_GEN_CNTL);
207 void radeon_pll_errata_after_data(struct drm_radeon_private *dev_priv)
209 /* This workarounds is necessary on RV100, RS100 and RS200 chips
210 * or the chip could hang on a subsequent access
212 if (dev_priv->pll_errata & CHIP_ERRATA_PLL_DELAY)
215 /* This function is required to workaround a hardware bug in some (all?)
216 * revisions of the R300. This workaround should be called after every
217 * CLOCK_CNTL_INDEX register access. If not, register reads afterward
218 * may not be correct.
220 if (dev_priv->pll_errata & CHIP_ERRATA_R300_CG) {
223 save = RADEON_READ(RADEON_CLOCK_CNTL_INDEX);
224 tmp = save & ~(0x3f | RADEON_PLL_WR_EN);
225 RADEON_WRITE(RADEON_CLOCK_CNTL_INDEX, tmp);
226 tmp = RADEON_READ(RADEON_CLOCK_CNTL_DATA);
227 RADEON_WRITE(RADEON_CLOCK_CNTL_INDEX, save);
231 u32 RADEON_READ_PLL(struct drm_radeon_private *dev_priv, int addr)
235 RADEON_WRITE8(RADEON_CLOCK_CNTL_INDEX, addr & 0x3f);
236 radeon_pll_errata_after_index(dev_priv);
237 data = RADEON_READ(RADEON_CLOCK_CNTL_DATA);
238 radeon_pll_errata_after_data(dev_priv);
242 void RADEON_WRITE_PLL(struct drm_radeon_private *dev_priv, int addr, uint32_t data)
244 RADEON_WRITE8(RADEON_CLOCK_CNTL_INDEX, ((addr & 0x3f) | RADEON_PLL_WR_EN));
245 radeon_pll_errata_after_index(dev_priv);
246 RADEON_WRITE(RADEON_CLOCK_CNTL_DATA, data);
247 radeon_pll_errata_after_data(dev_priv);
250 u32 RADEON_READ_PCIE(drm_radeon_private_t *dev_priv, int addr)
252 RADEON_WRITE8(RADEON_PCIE_INDEX, addr & 0xff);
253 return RADEON_READ(RADEON_PCIE_DATA);
256 /* ATOM accessor methods */
257 static uint32_t cail_mc_read(struct card_info *info, uint32_t reg)
259 uint32_t ret = radeon_read_mc_reg(info->dev->dev_private, reg);
261 // DRM_DEBUG("(%x) = %x\n", reg, ret);
265 static void cail_mc_write(struct card_info *info, uint32_t reg, uint32_t val)
267 // DRM_DEBUG("(%x, %x)\n", reg, val);
268 radeon_write_mc_reg(info->dev->dev_private, reg, val);
271 static void cail_reg_write(struct card_info *info, uint32_t reg, uint32_t val)
273 drm_radeon_private_t *dev_priv = info->dev->dev_private;
275 // DRM_DEBUG("(%x, %x)\n", reg*4, val);
276 RADEON_WRITE(reg*4, val);
279 static uint32_t cail_reg_read(struct card_info *info, uint32_t reg)
282 drm_radeon_private_t *dev_priv = info->dev->dev_private;
284 ret = RADEON_READ(reg*4);
285 // DRM_DEBUG("(%x) = %x\n", reg*4, ret);
289 #if RADEON_FIFO_DEBUG
290 static void radeon_status(drm_radeon_private_t * dev_priv)
292 printk("%s:\n", __FUNCTION__);
293 printk("RBBM_STATUS = 0x%08x\n",
294 (unsigned int)RADEON_READ(RADEON_RBBM_STATUS));
295 printk("CP_RB_RTPR = 0x%08x\n",
296 (unsigned int)RADEON_READ(RADEON_CP_RB_RPTR));
297 printk("CP_RB_WTPR = 0x%08x\n",
298 (unsigned int)RADEON_READ(RADEON_CP_RB_WPTR));
299 printk("AIC_CNTL = 0x%08x\n",
300 (unsigned int)RADEON_READ(RADEON_AIC_CNTL));
301 printk("AIC_STAT = 0x%08x\n",
302 (unsigned int)RADEON_READ(RADEON_AIC_STAT));
303 printk("AIC_PT_BASE = 0x%08x\n",
304 (unsigned int)RADEON_READ(RADEON_AIC_PT_BASE));
305 printk("TLB_ADDR = 0x%08x\n",
306 (unsigned int)RADEON_READ(RADEON_AIC_TLB_ADDR));
307 printk("TLB_DATA = 0x%08x\n",
308 (unsigned int)RADEON_READ(RADEON_AIC_TLB_DATA));
312 /* ================================================================
313 * Engine, FIFO control
316 static int radeon_do_pixcache_flush(drm_radeon_private_t * dev_priv)
321 dev_priv->stats.boxes |= RADEON_BOX_WAIT_IDLE;
323 if ((dev_priv->flags & RADEON_FAMILY_MASK) <= CHIP_RV280) {
324 tmp = RADEON_READ(RADEON_RB3D_DSTCACHE_CTLSTAT);
325 tmp |= RADEON_RB3D_DC_FLUSH_ALL;
326 RADEON_WRITE(RADEON_RB3D_DSTCACHE_CTLSTAT, tmp);
328 for (i = 0; i < dev_priv->usec_timeout; i++) {
329 if (!(RADEON_READ(RADEON_RB3D_DSTCACHE_CTLSTAT)
330 & RADEON_RB3D_DC_BUSY)) {
336 /* don't flush or purge cache here or lockup */
340 #if RADEON_FIFO_DEBUG
341 DRM_ERROR("failed!\n");
342 radeon_status(dev_priv);
347 static int radeon_do_wait_for_fifo(drm_radeon_private_t * dev_priv, int entries)
351 dev_priv->stats.boxes |= RADEON_BOX_WAIT_IDLE;
353 for (i = 0; i < dev_priv->usec_timeout; i++) {
354 int slots = (RADEON_READ(RADEON_RBBM_STATUS)
355 & RADEON_RBBM_FIFOCNT_MASK);
356 if (slots >= entries)
360 DRM_INFO("wait for fifo failed status : 0x%08X 0x%08X\n",
361 RADEON_READ(RADEON_RBBM_STATUS),
362 RADEON_READ(R300_VAP_CNTL_STATUS));
364 #if RADEON_FIFO_DEBUG
365 DRM_ERROR("failed!\n");
366 radeon_status(dev_priv);
371 int radeon_do_wait_for_idle(drm_radeon_private_t * dev_priv)
375 dev_priv->stats.boxes |= RADEON_BOX_WAIT_IDLE;
377 ret = radeon_do_wait_for_fifo(dev_priv, 64);
381 for (i = 0; i < dev_priv->usec_timeout; i++) {
382 if (!(RADEON_READ(RADEON_RBBM_STATUS)
383 & RADEON_RBBM_ACTIVE)) {
384 radeon_do_pixcache_flush(dev_priv);
389 DRM_INFO("wait idle failed status : 0x%08X 0x%08X\n",
390 RADEON_READ(RADEON_RBBM_STATUS),
391 RADEON_READ(R300_VAP_CNTL_STATUS));
393 #if RADEON_FIFO_DEBUG
394 DRM_ERROR("failed!\n");
395 radeon_status(dev_priv);
400 static void radeon_init_pipes(drm_radeon_private_t * dev_priv)
402 uint32_t gb_tile_config, gb_pipe_sel = 0;
404 /* RS4xx/RS6xx/R4xx/R5xx */
405 if ((dev_priv->flags & RADEON_FAMILY_MASK) >= CHIP_R420) {
406 gb_pipe_sel = RADEON_READ(R400_GB_PIPE_SELECT);
407 dev_priv->num_gb_pipes = ((gb_pipe_sel >> 12) & 0x3) + 1;
410 if (((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_R300) ||
411 ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_R350)) {
412 dev_priv->num_gb_pipes = 2;
415 dev_priv->num_gb_pipes = 1;
418 DRM_INFO("Num pipes: %d\n", dev_priv->num_gb_pipes);
420 gb_tile_config = (R300_ENABLE_TILING | R300_TILE_SIZE_16 /*| R300_SUBPIXEL_1_16*/);
422 switch(dev_priv->num_gb_pipes) {
423 case 2: gb_tile_config |= R300_PIPE_COUNT_R300; break;
424 case 3: gb_tile_config |= R300_PIPE_COUNT_R420_3P; break;
425 case 4: gb_tile_config |= R300_PIPE_COUNT_R420; break;
427 case 1: gb_tile_config |= R300_PIPE_COUNT_RV350; break;
430 if ((dev_priv->flags & RADEON_FAMILY_MASK) >= CHIP_RV515) {
431 RADEON_WRITE_PLL(dev_priv, R500_DYN_SCLK_PWMEM_PIPE, (1 | ((gb_pipe_sel >> 8) & 0xf) << 4));
432 RADEON_WRITE(R500_SU_REG_DEST, ((1 << dev_priv->num_gb_pipes) - 1));
434 RADEON_WRITE(R300_GB_TILE_CONFIG, gb_tile_config);
435 radeon_do_wait_for_idle(dev_priv);
436 RADEON_WRITE(R300_DST_PIPE_CONFIG, RADEON_READ(R300_DST_PIPE_CONFIG) | R300_PIPE_AUTO_CONFIG);
437 RADEON_WRITE(R300_RB2D_DSTCACHE_MODE, (RADEON_READ(R300_RB2D_DSTCACHE_MODE) |
438 R300_DC_AUTOFLUSH_ENABLE |
439 R300_DC_DC_DISABLE_IGNORE_PE));
444 /* ================================================================
445 * CP control, initialization
448 /* Load the microcode for the CP */
449 static void radeon_cp_load_microcode(drm_radeon_private_t * dev_priv)
454 radeon_do_wait_for_idle(dev_priv);
456 RADEON_WRITE(RADEON_CP_ME_RAM_ADDR, 0);
458 if (((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_R100) ||
459 ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RV100) ||
460 ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RV200) ||
461 ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RS100) ||
462 ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RS200)) {
463 DRM_INFO("Loading R100 Microcode\n");
464 for (i = 0; i < 256; i++) {
465 RADEON_WRITE(RADEON_CP_ME_RAM_DATAH,
466 R100_cp_microcode[i][1]);
467 RADEON_WRITE(RADEON_CP_ME_RAM_DATAL,
468 R100_cp_microcode[i][0]);
470 } else if (((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_R200) ||
471 ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RV250) ||
472 ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RV280) ||
473 ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RS300)) {
474 DRM_INFO("Loading R200 Microcode\n");
475 for (i = 0; i < 256; i++) {
476 RADEON_WRITE(RADEON_CP_ME_RAM_DATAH,
477 R200_cp_microcode[i][1]);
478 RADEON_WRITE(RADEON_CP_ME_RAM_DATAL,
479 R200_cp_microcode[i][0]);
481 } else if (((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_R300) ||
482 ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_R350) ||
483 ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RV350) ||
484 ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RV380) ||
485 ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RS400) ||
486 ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RS480)) {
487 DRM_INFO("Loading R300 Microcode\n");
488 for (i = 0; i < 256; i++) {
489 RADEON_WRITE(RADEON_CP_ME_RAM_DATAH,
490 R300_cp_microcode[i][1]);
491 RADEON_WRITE(RADEON_CP_ME_RAM_DATAL,
492 R300_cp_microcode[i][0]);
494 } else if (((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_R420) ||
495 ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RV410)) {
496 DRM_INFO("Loading R400 Microcode\n");
497 for (i = 0; i < 256; i++) {
498 RADEON_WRITE(RADEON_CP_ME_RAM_DATAH,
499 R420_cp_microcode[i][1]);
500 RADEON_WRITE(RADEON_CP_ME_RAM_DATAL,
501 R420_cp_microcode[i][0]);
503 } else if ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RS690) {
504 DRM_INFO("Loading RS690 Microcode\n");
505 for (i = 0; i < 256; i++) {
506 RADEON_WRITE(RADEON_CP_ME_RAM_DATAH,
507 RS690_cp_microcode[i][1]);
508 RADEON_WRITE(RADEON_CP_ME_RAM_DATAL,
509 RS690_cp_microcode[i][0]);
511 } else if (((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RV515) ||
512 ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_R520) ||
513 ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RV530) ||
514 ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_R580) ||
515 ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RV560) ||
516 ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RV570)) {
517 DRM_INFO("Loading R500 Microcode\n");
518 for (i = 0; i < 256; i++) {
519 RADEON_WRITE(RADEON_CP_ME_RAM_DATAH,
520 R520_cp_microcode[i][1]);
521 RADEON_WRITE(RADEON_CP_ME_RAM_DATAL,
522 R520_cp_microcode[i][0]);
527 /* Flush any pending commands to the CP. This should only be used just
528 * prior to a wait for idle, as it informs the engine that the command
531 static void radeon_do_cp_flush(drm_radeon_private_t * dev_priv)
536 tmp = RADEON_READ(RADEON_CP_RB_WPTR) | (1 << 31);
537 RADEON_WRITE(RADEON_CP_RB_WPTR, tmp);
541 /* Wait for the CP to go idle.
543 int radeon_do_cp_idle(drm_radeon_private_t * dev_priv)
550 RADEON_PURGE_CACHE();
551 RADEON_PURGE_ZCACHE();
552 RADEON_WAIT_UNTIL_IDLE();
557 return radeon_do_wait_for_idle(dev_priv);
560 /* Start the Command Processor.
562 static void radeon_do_cp_start(drm_radeon_private_t * dev_priv)
567 radeon_do_wait_for_idle(dev_priv);
569 RADEON_WRITE(RADEON_CP_CSQ_CNTL, dev_priv->cp_mode);
571 dev_priv->cp_running = 1;
574 /* isync can only be written through cp on r5xx write it here */
575 OUT_RING(CP_PACKET0(RADEON_ISYNC_CNTL, 0));
576 OUT_RING(RADEON_ISYNC_ANY2D_IDLE3D |
577 RADEON_ISYNC_ANY3D_IDLE2D |
578 RADEON_ISYNC_WAIT_IDLEGUI |
579 RADEON_ISYNC_CPSCRATCH_IDLEGUI);
580 RADEON_PURGE_CACHE();
581 RADEON_PURGE_ZCACHE();
582 RADEON_WAIT_UNTIL_IDLE();
586 dev_priv->track_flush |= RADEON_FLUSH_EMITED | RADEON_PURGE_EMITED;
589 /* Reset the Command Processor. This will not flush any pending
590 * commands, so you must wait for the CP command stream to complete
591 * before calling this routine.
593 static void radeon_do_cp_reset(drm_radeon_private_t * dev_priv)
598 cur_read_ptr = RADEON_READ(RADEON_CP_RB_RPTR);
599 RADEON_WRITE(RADEON_CP_RB_WPTR, cur_read_ptr);
600 SET_RING_HEAD(dev_priv, cur_read_ptr);
601 dev_priv->ring.tail = cur_read_ptr;
604 /* Stop the Command Processor. This will not flush any pending
605 * commands, so you must flush the command stream and wait for the CP
606 * to go idle before calling this routine.
608 static void radeon_do_cp_stop(drm_radeon_private_t * dev_priv)
612 RADEON_WRITE(RADEON_CP_CSQ_CNTL, RADEON_CSQ_PRIDIS_INDDIS);
614 dev_priv->cp_running = 0;
617 /* Reset the engine. This will stop the CP if it is running.
619 static int radeon_do_engine_reset(struct drm_device * dev)
621 drm_radeon_private_t *dev_priv = dev->dev_private;
622 u32 clock_cntl_index = 0, mclk_cntl = 0, rbbm_soft_reset;
625 radeon_do_pixcache_flush(dev_priv);
627 if ((dev_priv->flags & RADEON_FAMILY_MASK) <= CHIP_RV410) {
628 /* may need something similar for newer chips */
629 clock_cntl_index = RADEON_READ(RADEON_CLOCK_CNTL_INDEX);
630 mclk_cntl = RADEON_READ_PLL(dev_priv, RADEON_MCLK_CNTL);
632 RADEON_WRITE_PLL(dev_priv, RADEON_MCLK_CNTL, (mclk_cntl |
633 RADEON_FORCEON_MCLKA |
634 RADEON_FORCEON_MCLKB |
635 RADEON_FORCEON_YCLKA |
636 RADEON_FORCEON_YCLKB |
638 RADEON_FORCEON_AIC));
641 rbbm_soft_reset = RADEON_READ(RADEON_RBBM_SOFT_RESET);
643 RADEON_WRITE(RADEON_RBBM_SOFT_RESET, (rbbm_soft_reset |
644 RADEON_SOFT_RESET_CP |
645 RADEON_SOFT_RESET_HI |
646 RADEON_SOFT_RESET_SE |
647 RADEON_SOFT_RESET_RE |
648 RADEON_SOFT_RESET_PP |
649 RADEON_SOFT_RESET_E2 |
650 RADEON_SOFT_RESET_RB));
651 RADEON_READ(RADEON_RBBM_SOFT_RESET);
652 RADEON_WRITE(RADEON_RBBM_SOFT_RESET, (rbbm_soft_reset &
653 ~(RADEON_SOFT_RESET_CP |
654 RADEON_SOFT_RESET_HI |
655 RADEON_SOFT_RESET_SE |
656 RADEON_SOFT_RESET_RE |
657 RADEON_SOFT_RESET_PP |
658 RADEON_SOFT_RESET_E2 |
659 RADEON_SOFT_RESET_RB)));
660 RADEON_READ(RADEON_RBBM_SOFT_RESET);
662 if ((dev_priv->flags & RADEON_FAMILY_MASK) <= CHIP_RV410) {
663 RADEON_WRITE_PLL(dev_priv, RADEON_MCLK_CNTL, mclk_cntl);
664 RADEON_WRITE(RADEON_CLOCK_CNTL_INDEX, clock_cntl_index);
665 RADEON_WRITE(RADEON_RBBM_SOFT_RESET, rbbm_soft_reset);
668 /* setup the raster pipes */
669 if ((dev_priv->flags & RADEON_FAMILY_MASK) >= CHIP_R300)
670 radeon_init_pipes(dev_priv);
672 /* Reset the CP ring */
673 radeon_do_cp_reset(dev_priv);
675 /* The CP is no longer running after an engine reset */
676 dev_priv->cp_running = 0;
678 /* Reset any pending vertex, indirect buffers */
680 radeon_freelist_reset(dev);
685 static void radeon_cp_init_ring_buffer(struct drm_device * dev,
686 drm_radeon_private_t * dev_priv)
688 u32 ring_start, cur_read_ptr;
691 /* Initialize the memory controller. With new memory map, the fb location
692 * is not changed, it should have been properly initialized already. Part
693 * of the problem is that the code below is bogus, assuming the GART is
694 * always appended to the fb which is not necessarily the case
696 if (!dev_priv->new_memmap)
697 radeon_write_fb_location(dev_priv,
698 ((dev_priv->gart_vm_start - 1) & 0xffff0000)
699 | (dev_priv->fb_location >> 16));
701 if (dev_priv->mm.ring.bo) {
702 ring_start = dev_priv->mm.ring.bo->offset +
703 dev_priv->gart_vm_start;
706 if (dev_priv->flags & RADEON_IS_AGP) {
707 radeon_write_agp_base(dev_priv, dev->agp->base);
709 radeon_write_agp_location(dev_priv,
710 (((dev_priv->gart_vm_start - 1 +
711 dev_priv->gart_size) & 0xffff0000) |
712 (dev_priv->gart_vm_start >> 16)), 0);
714 ring_start = (dev_priv->cp_ring->offset
716 + dev_priv->gart_vm_start);
719 ring_start = (dev_priv->cp_ring->offset
720 - (unsigned long)dev->sg->virtual
721 + dev_priv->gart_vm_start);
723 RADEON_WRITE(RADEON_CP_RB_BASE, ring_start);
725 /* Set the write pointer delay */
726 RADEON_WRITE(RADEON_CP_RB_WPTR_DELAY, 0);
728 /* Initialize the ring buffer's read and write pointers */
729 cur_read_ptr = RADEON_READ(RADEON_CP_RB_RPTR);
730 RADEON_WRITE(RADEON_CP_RB_WPTR, cur_read_ptr);
731 SET_RING_HEAD(dev_priv, cur_read_ptr);
732 dev_priv->ring.tail = cur_read_ptr;
735 if (dev_priv->mm.ring_read.bo) {
736 RADEON_WRITE(RADEON_CP_RB_RPTR_ADDR,
737 dev_priv->mm.ring_read.bo->offset +
738 dev_priv->gart_vm_start);
741 if (dev_priv->flags & RADEON_IS_AGP) {
742 RADEON_WRITE(RADEON_CP_RB_RPTR_ADDR,
743 dev_priv->ring_rptr->offset
744 - dev->agp->base + dev_priv->gart_vm_start);
748 struct drm_sg_mem *entry = dev->sg;
749 unsigned long tmp_ofs, page_ofs;
751 tmp_ofs = dev_priv->ring_rptr->offset -
752 (unsigned long)dev->sg->virtual;
753 page_ofs = tmp_ofs >> PAGE_SHIFT;
755 RADEON_WRITE(RADEON_CP_RB_RPTR_ADDR, entry->busaddr[page_ofs]);
756 DRM_DEBUG("ring rptr: offset=0x%08lx handle=0x%08lx\n",
757 (unsigned long)entry->busaddr[page_ofs],
758 entry->handle + tmp_ofs);
761 /* Set ring buffer size */
763 RADEON_WRITE(RADEON_CP_RB_CNTL,
764 RADEON_BUF_SWAP_32BIT |
765 (dev_priv->ring.fetch_size_l2ow << 18) |
766 (dev_priv->ring.rptr_update_l2qw << 8) |
767 dev_priv->ring.size_l2qw);
769 RADEON_WRITE(RADEON_CP_RB_CNTL,
770 (dev_priv->ring.fetch_size_l2ow << 18) |
771 (dev_priv->ring.rptr_update_l2qw << 8) |
772 dev_priv->ring.size_l2qw);
775 /* Initialize the scratch register pointer. This will cause
776 * the scratch register values to be written out to memory
777 * whenever they are updated.
779 * We simply put this behind the ring read pointer, this works
780 * with PCI GART as well as (whatever kind of) AGP GART
782 RADEON_WRITE(RADEON_SCRATCH_ADDR, RADEON_READ(RADEON_CP_RB_RPTR_ADDR)
783 + RADEON_SCRATCH_REG_OFFSET);
785 if (dev_priv->mm.ring_read.bo)
786 dev_priv->scratch = ((__volatile__ u32 *)
787 dev_priv->mm.ring_read.kmap.virtual +
788 (RADEON_SCRATCH_REG_OFFSET / sizeof(u32)));
790 dev_priv->scratch = ((__volatile__ u32 *)
791 dev_priv->ring_rptr->handle +
792 (RADEON_SCRATCH_REG_OFFSET / sizeof(u32)));
794 RADEON_WRITE(RADEON_SCRATCH_UMSK, 0x7);
796 /* Turn on bus mastering */
797 tmp = RADEON_READ(RADEON_BUS_CNTL) & ~RADEON_BUS_MASTER_DIS;
798 RADEON_WRITE(RADEON_BUS_CNTL, tmp);
800 dev_priv->scratch[0] = 0;
801 RADEON_WRITE(RADEON_LAST_FRAME_REG, 0);
803 dev_priv->scratch[1] = 0;
804 RADEON_WRITE(RADEON_LAST_DISPATCH_REG, 0);
806 dev_priv->scratch[2] = 0;
807 RADEON_WRITE(RADEON_LAST_CLEAR_REG, 0);
809 radeon_do_wait_for_idle(dev_priv);
811 /* Sync everything up */
812 if (dev_priv->chip_family > CHIP_RV280) {
813 RADEON_WRITE(RADEON_ISYNC_CNTL,
814 (RADEON_ISYNC_ANY2D_IDLE3D |
815 RADEON_ISYNC_ANY3D_IDLE2D |
816 RADEON_ISYNC_WAIT_IDLEGUI |
817 RADEON_ISYNC_CPSCRATCH_IDLEGUI));
819 RADEON_WRITE(RADEON_ISYNC_CNTL,
820 (RADEON_ISYNC_ANY2D_IDLE3D |
821 RADEON_ISYNC_ANY3D_IDLE2D |
822 RADEON_ISYNC_WAIT_IDLEGUI));
826 static void radeon_test_writeback(drm_radeon_private_t * dev_priv)
831 if (dev_priv->mm.ring_read.bo)
832 ring_read_ptr = dev_priv->mm.ring_read.kmap.virtual;
834 ring_read_ptr = dev_priv->ring_rptr->handle;
836 /* Writeback doesn't seem to work everywhere, test it here and possibly
837 * enable it if it appears to work
839 writel(0, ring_read_ptr + RADEON_SCRATCHOFF(1));
840 RADEON_WRITE(RADEON_SCRATCH_REG1, 0xdeadbeef);
842 for (tmp = 0; tmp < dev_priv->usec_timeout; tmp++) {
843 if (readl(ring_read_ptr + RADEON_SCRATCHOFF(1)) ==
849 if (tmp < dev_priv->usec_timeout) {
850 dev_priv->writeback_works = 1;
851 DRM_INFO("writeback test succeeded in %d usecs\n", tmp);
853 dev_priv->writeback_works = 0;
854 DRM_INFO("writeback test failed\n");
856 if (radeon_no_wb == 1) {
857 dev_priv->writeback_works = 0;
858 DRM_INFO("writeback forced off\n");
861 if (!dev_priv->writeback_works) {
862 /* Disable writeback to avoid unnecessary bus master transfers */
863 RADEON_WRITE(RADEON_CP_RB_CNTL, RADEON_READ(RADEON_CP_RB_CNTL) | RADEON_RB_NO_UPDATE);
864 RADEON_WRITE(RADEON_SCRATCH_UMSK, 0);
868 /* Enable or disable IGP GART on the chip */
869 static void radeon_set_igpgart(drm_radeon_private_t * dev_priv, int on)
874 DRM_DEBUG("programming igp gart %08X %08lX %08X\n",
875 dev_priv->gart_vm_start,
876 (long)dev_priv->gart_info.bus_addr,
877 dev_priv->gart_size);
879 temp = IGP_READ_MCIND(dev_priv, RS480_MC_MISC_CNTL);
881 if ((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RS690)
882 IGP_WRITE_MCIND(RS480_MC_MISC_CNTL, (RS480_GART_INDEX_REG_EN |
883 RS690_BLOCK_GFX_D3_EN));
885 IGP_WRITE_MCIND(RS480_MC_MISC_CNTL, RS480_GART_INDEX_REG_EN);
887 IGP_WRITE_MCIND(RS480_AGP_ADDRESS_SPACE_SIZE, (RS480_GART_EN |
888 RS480_VA_SIZE_32MB));
890 temp = IGP_READ_MCIND(dev_priv, RS480_GART_FEATURE_ID);
891 IGP_WRITE_MCIND(RS480_GART_FEATURE_ID, (RS480_HANG_EN |
896 temp = dev_priv->gart_info.bus_addr & 0xfffff000;
897 temp |= (upper_32_bits(dev_priv->gart_info.bus_addr) & 0xff) << 4;
898 IGP_WRITE_MCIND(RS480_GART_BASE, temp);
900 temp = IGP_READ_MCIND(dev_priv, RS480_AGP_MODE_CNTL);
901 IGP_WRITE_MCIND(RS480_AGP_MODE_CNTL, ((1 << RS480_REQ_TYPE_SNOOP_SHIFT) |
902 RS480_REQ_TYPE_SNOOP_DIS));
904 radeon_write_agp_base(dev_priv, dev_priv->gart_vm_start);
906 dev_priv->gart_size = 32*1024*1024;
907 temp = (((dev_priv->gart_vm_start - 1 + dev_priv->gart_size) &
908 0xffff0000) | (dev_priv->gart_vm_start >> 16));
910 radeon_write_agp_location(dev_priv, temp, 0);
912 temp = IGP_READ_MCIND(dev_priv, RS480_AGP_ADDRESS_SPACE_SIZE);
913 IGP_WRITE_MCIND(RS480_AGP_ADDRESS_SPACE_SIZE, (RS480_GART_EN |
914 RS480_VA_SIZE_32MB));
917 temp = IGP_READ_MCIND(dev_priv, RS480_GART_CACHE_CNTRL);
918 if ((temp & RS480_GART_CACHE_INVALIDATE) == 0)
923 IGP_WRITE_MCIND(RS480_GART_CACHE_CNTRL,
924 RS480_GART_CACHE_INVALIDATE);
927 temp = IGP_READ_MCIND(dev_priv, RS480_GART_CACHE_CNTRL);
928 if ((temp & RS480_GART_CACHE_INVALIDATE) == 0)
933 IGP_WRITE_MCIND(RS480_GART_CACHE_CNTRL, 0);
935 IGP_WRITE_MCIND(RS480_AGP_ADDRESS_SPACE_SIZE, 0);
939 static void radeon_set_pciegart(drm_radeon_private_t * dev_priv, int on)
941 u32 tmp = RADEON_READ_PCIE(dev_priv, RADEON_PCIE_TX_GART_CNTL);
944 DRM_DEBUG("programming pcie %08X %08lX %08X\n",
945 dev_priv->gart_vm_start,
946 (long)dev_priv->gart_info.bus_addr,
947 dev_priv->gart_size);
948 RADEON_WRITE_PCIE(RADEON_PCIE_TX_DISCARD_RD_ADDR_LO,
949 dev_priv->gart_vm_start);
950 RADEON_WRITE_PCIE(RADEON_PCIE_TX_GART_BASE,
951 dev_priv->gart_info.bus_addr);
952 RADEON_WRITE_PCIE(RADEON_PCIE_TX_GART_START_LO,
953 dev_priv->gart_vm_start);
954 RADEON_WRITE_PCIE(RADEON_PCIE_TX_GART_END_LO,
955 dev_priv->gart_vm_start +
956 dev_priv->gart_size - 1);
958 radeon_write_agp_location(dev_priv, 0xffffffc0, 0); /* ?? */
960 RADEON_WRITE_PCIE(RADEON_PCIE_TX_GART_CNTL,
961 RADEON_PCIE_TX_GART_EN);
963 RADEON_WRITE_PCIE(RADEON_PCIE_TX_GART_CNTL,
964 tmp & ~RADEON_PCIE_TX_GART_EN);
968 /* Enable or disable PCI GART on the chip */
969 void radeon_set_pcigart(drm_radeon_private_t * dev_priv, int on)
973 if (((dev_priv->flags & RADEON_FAMILY_MASK) == CHIP_RS690) ||
974 (dev_priv->flags & RADEON_IS_IGPGART)) {
975 radeon_set_igpgart(dev_priv, on);
979 if (dev_priv->flags & RADEON_IS_PCIE) {
980 radeon_set_pciegart(dev_priv, on);
984 tmp = RADEON_READ(RADEON_AIC_CNTL);
987 RADEON_WRITE(RADEON_AIC_CNTL,
988 tmp | RADEON_PCIGART_TRANSLATE_EN);
990 /* set PCI GART page-table base address
992 RADEON_WRITE(RADEON_AIC_PT_BASE, dev_priv->gart_info.bus_addr);
994 /* set address range for PCI address translate
996 RADEON_WRITE(RADEON_AIC_LO_ADDR, dev_priv->gart_vm_start);
997 RADEON_WRITE(RADEON_AIC_HI_ADDR, dev_priv->gart_vm_start
998 + dev_priv->gart_size - 1);
1000 /* Turn off AGP aperture -- is this required for PCI GART?
1002 radeon_write_agp_location(dev_priv, 0xffffffc0, 0);
1003 RADEON_WRITE(RADEON_AGP_COMMAND, 0); /* clear AGP_COMMAND */
1005 RADEON_WRITE(RADEON_AIC_CNTL,
1006 tmp & ~RADEON_PCIGART_TRANSLATE_EN);
1010 static int radeon_do_init_cp(struct drm_device *dev, drm_radeon_init_t *init,
1011 struct drm_file *file_priv)
1013 drm_radeon_private_t *dev_priv = dev->dev_private;
1014 struct drm_radeon_master_private *master_priv = file_priv->master->driver_priv;
1018 /* if we require new memory map but we don't have it fail */
1019 if ((dev_priv->flags & RADEON_NEW_MEMMAP) && !dev_priv->new_memmap) {
1020 DRM_ERROR("Cannot initialise DRM on this card\nThis card requires a new X.org DDX for 3D\n");
1021 radeon_do_cleanup_cp(dev);
1025 if (init->is_pci && (dev_priv->flags & RADEON_IS_AGP))
1027 DRM_DEBUG("Forcing AGP card to PCI mode\n");
1028 dev_priv->flags &= ~RADEON_IS_AGP;
1030 else if (!(dev_priv->flags & (RADEON_IS_AGP | RADEON_IS_PCI | RADEON_IS_PCIE))
1033 DRM_DEBUG("Restoring AGP flag\n");
1034 dev_priv->flags |= RADEON_IS_AGP;
1037 if ((!(dev_priv->flags & RADEON_IS_AGP)) && !dev->sg) {
1038 DRM_ERROR("PCI GART memory not allocated!\n");
1039 radeon_do_cleanup_cp(dev);
1043 dev_priv->usec_timeout = init->usec_timeout;
1044 if (dev_priv->usec_timeout < 1 ||
1045 dev_priv->usec_timeout > RADEON_MAX_USEC_TIMEOUT) {
1046 DRM_DEBUG("TIMEOUT problem!\n");
1047 radeon_do_cleanup_cp(dev);
1051 /* Enable vblank on CRTC1 for older X servers
1053 dev_priv->vblank_crtc = DRM_RADEON_VBLANK_CRTC1;
1055 dev_priv->do_boxes = 0;
1056 dev_priv->cp_mode = init->cp_mode;
1058 /* We don't support anything other than bus-mastering ring mode,
1059 * but the ring can be in either AGP or PCI space for the ring
1062 if ((init->cp_mode != RADEON_CSQ_PRIBM_INDDIS) &&
1063 (init->cp_mode != RADEON_CSQ_PRIBM_INDBM)) {
1064 DRM_DEBUG("BAD cp_mode (%x)!\n", init->cp_mode);
1065 radeon_do_cleanup_cp(dev);
1069 switch (init->fb_bpp) {
1071 dev_priv->color_fmt = RADEON_COLOR_FORMAT_RGB565;
1075 dev_priv->color_fmt = RADEON_COLOR_FORMAT_ARGB8888;
1078 dev_priv->front_offset = init->front_offset;
1079 dev_priv->front_pitch = init->front_pitch;
1080 dev_priv->back_offset = init->back_offset;
1081 dev_priv->back_pitch = init->back_pitch;
1083 switch (init->depth_bpp) {
1085 dev_priv->depth_fmt = RADEON_DEPTH_FORMAT_16BIT_INT_Z;
1089 dev_priv->depth_fmt = RADEON_DEPTH_FORMAT_24BIT_INT_Z;
1092 dev_priv->depth_offset = init->depth_offset;
1093 dev_priv->depth_pitch = init->depth_pitch;
1095 /* Hardware state for depth clears. Remove this if/when we no
1096 * longer clear the depth buffer with a 3D rectangle. Hard-code
1097 * all values to prevent unwanted 3D state from slipping through
1098 * and screwing with the clear operation.
1100 dev_priv->depth_clear.rb3d_cntl = (RADEON_PLANE_MASK_ENABLE |
1101 (dev_priv->color_fmt << 10) |
1102 (dev_priv->chip_family < CHIP_R200 ? RADEON_ZBLOCK16 : 0));
1104 dev_priv->depth_clear.rb3d_zstencilcntl =
1105 (dev_priv->depth_fmt |
1106 RADEON_Z_TEST_ALWAYS |
1107 RADEON_STENCIL_TEST_ALWAYS |
1108 RADEON_STENCIL_S_FAIL_REPLACE |
1109 RADEON_STENCIL_ZPASS_REPLACE |
1110 RADEON_STENCIL_ZFAIL_REPLACE | RADEON_Z_WRITE_ENABLE);
1112 dev_priv->depth_clear.se_cntl = (RADEON_FFACE_CULL_CW |
1113 RADEON_BFACE_SOLID |
1114 RADEON_FFACE_SOLID |
1115 RADEON_FLAT_SHADE_VTX_LAST |
1116 RADEON_DIFFUSE_SHADE_FLAT |
1117 RADEON_ALPHA_SHADE_FLAT |
1118 RADEON_SPECULAR_SHADE_FLAT |
1119 RADEON_FOG_SHADE_FLAT |
1120 RADEON_VTX_PIX_CENTER_OGL |
1121 RADEON_ROUND_MODE_TRUNC |
1122 RADEON_ROUND_PREC_8TH_PIX);
1125 dev_priv->ring_offset = init->ring_offset;
1126 dev_priv->ring_rptr_offset = init->ring_rptr_offset;
1127 dev_priv->buffers_offset = init->buffers_offset;
1128 dev_priv->gart_textures_offset = init->gart_textures_offset;
1130 master_priv->sarea = drm_getsarea(dev);
1131 if (!master_priv->sarea) {
1132 DRM_ERROR("could not find sarea!\n");
1133 radeon_do_cleanup_cp(dev);
1137 dev_priv->cp_ring = drm_core_findmap(dev, init->ring_offset);
1138 if (!dev_priv->cp_ring) {
1139 DRM_ERROR("could not find cp ring region!\n");
1140 radeon_do_cleanup_cp(dev);
1143 dev_priv->ring_rptr = drm_core_findmap(dev, init->ring_rptr_offset);
1144 if (!dev_priv->ring_rptr) {
1145 DRM_ERROR("could not find ring read pointer!\n");
1146 radeon_do_cleanup_cp(dev);
1149 dev->agp_buffer_token = init->buffers_offset;
1150 dev->agp_buffer_map = drm_core_findmap(dev, init->buffers_offset);
1151 if (!dev->agp_buffer_map) {
1152 DRM_ERROR("could not find dma buffer region!\n");
1153 radeon_do_cleanup_cp(dev);
1157 if (init->gart_textures_offset) {
1158 dev_priv->gart_textures =
1159 drm_core_findmap(dev, init->gart_textures_offset);
1160 if (!dev_priv->gart_textures) {
1161 DRM_ERROR("could not find GART texture region!\n");
1162 radeon_do_cleanup_cp(dev);
1168 if (dev_priv->flags & RADEON_IS_AGP) {
1169 drm_core_ioremap(dev_priv->cp_ring, dev);
1170 drm_core_ioremap(dev_priv->ring_rptr, dev);
1171 drm_core_ioremap(dev->agp_buffer_map, dev);
1172 if (!dev_priv->cp_ring->handle ||
1173 !dev_priv->ring_rptr->handle ||
1174 !dev->agp_buffer_map->handle) {
1175 DRM_ERROR("could not find ioremap agp regions!\n");
1176 radeon_do_cleanup_cp(dev);
1182 dev_priv->cp_ring->handle = (void *)dev_priv->cp_ring->offset;
1183 dev_priv->ring_rptr->handle =
1184 (void *)dev_priv->ring_rptr->offset;
1185 dev->agp_buffer_map->handle =
1186 (void *)dev->agp_buffer_map->offset;
1188 DRM_DEBUG("dev_priv->cp_ring->handle %p\n",
1189 dev_priv->cp_ring->handle);
1190 DRM_DEBUG("dev_priv->ring_rptr->handle %p\n",
1191 dev_priv->ring_rptr->handle);
1192 DRM_DEBUG("dev->agp_buffer_map->handle %p\n",
1193 dev->agp_buffer_map->handle);
1196 dev_priv->fb_location = (radeon_read_fb_location(dev_priv) & 0xffff) << 16;
1198 ((radeon_read_fb_location(dev_priv) & 0xffff0000u) + 0x10000)
1199 - dev_priv->fb_location;
1201 dev_priv->front_pitch_offset = (((dev_priv->front_pitch / 64) << 22) |
1202 ((dev_priv->front_offset
1203 + dev_priv->fb_location) >> 10));
1205 dev_priv->back_pitch_offset = (((dev_priv->back_pitch / 64) << 22) |
1206 ((dev_priv->back_offset
1207 + dev_priv->fb_location) >> 10));
1209 dev_priv->depth_pitch_offset = (((dev_priv->depth_pitch / 64) << 22) |
1210 ((dev_priv->depth_offset
1211 + dev_priv->fb_location) >> 10));
1213 dev_priv->gart_size = init->gart_size;
1215 /* New let's set the memory map ... */
1216 if (dev_priv->new_memmap) {
1219 DRM_INFO("Setting GART location based on new memory map\n");
1221 /* If using AGP, try to locate the AGP aperture at the same
1222 * location in the card and on the bus, though we have to
1226 if (dev_priv->flags & RADEON_IS_AGP) {
1227 base = dev->agp->base;
1228 /* Check if valid */
1229 if ((base + dev_priv->gart_size - 1) >= dev_priv->fb_location &&
1230 base < (dev_priv->fb_location + dev_priv->fb_size - 1)) {
1231 DRM_INFO("Can't use AGP base @0x%08lx, won't fit\n",
1237 /* If not or if AGP is at 0 (Macs), try to put it elsewhere */
1239 base = dev_priv->fb_location + dev_priv->fb_size;
1240 if (base < dev_priv->fb_location ||
1241 ((base + dev_priv->gart_size) & 0xfffffffful) < base)
1242 base = dev_priv->fb_location
1243 - dev_priv->gart_size;
1245 dev_priv->gart_vm_start = base & 0xffc00000u;
1246 if (dev_priv->gart_vm_start != base)
1247 DRM_INFO("GART aligned down from 0x%08x to 0x%08x\n",
1248 base, dev_priv->gart_vm_start);
1250 DRM_INFO("Setting GART location based on old memory map\n");
1251 dev_priv->gart_vm_start = dev_priv->fb_location +
1252 RADEON_READ(RADEON_CONFIG_APER_SIZE);
1256 if (dev_priv->flags & RADEON_IS_AGP)
1257 dev_priv->gart_buffers_offset = (dev->agp_buffer_map->offset
1259 + dev_priv->gart_vm_start);
1262 dev_priv->gart_buffers_offset = (dev->agp_buffer_map->offset
1263 - (unsigned long)dev->sg->virtual
1264 + dev_priv->gart_vm_start);
1266 DRM_DEBUG("dev_priv->gart_size %d\n", dev_priv->gart_size);
1267 DRM_DEBUG("dev_priv->gart_vm_start 0x%x\n", dev_priv->gart_vm_start);
1268 DRM_DEBUG("dev_priv->gart_buffers_offset 0x%lx\n",
1269 dev_priv->gart_buffers_offset);
1271 dev_priv->ring.start = (u32 *) dev_priv->cp_ring->handle;
1272 dev_priv->ring.end = ((u32 *) dev_priv->cp_ring->handle
1273 + init->ring_size / sizeof(u32));
1274 dev_priv->ring.size = init->ring_size;
1275 dev_priv->ring.size_l2qw = drm_order(init->ring_size / 8);
1277 dev_priv->ring.rptr_update = /* init->rptr_update */ 4096;
1278 dev_priv->ring.rptr_update_l2qw = drm_order( /* init->rptr_update */ 4096 / 8);
1280 dev_priv->ring.fetch_size = /* init->fetch_size */ 32;
1281 dev_priv->ring.fetch_size_l2ow = drm_order( /* init->fetch_size */ 32 / 16);
1283 dev_priv->ring.tail_mask = (dev_priv->ring.size / sizeof(u32)) - 1;
1285 dev_priv->ring.high_mark = RADEON_RING_HIGH_MARK;
1288 if (dev_priv->flags & RADEON_IS_AGP) {
1289 /* Turn off PCI GART */
1290 radeon_set_pcigart(dev_priv, 0);
1294 dev_priv->gart_info.table_mask = DMA_BIT_MASK(32);
1295 /* if we have an offset set from userspace */
1296 if (dev_priv->pcigart_offset_set) {
1297 /* if it came from userspace - remap it */
1298 if (dev_priv->pcigart_offset_set == 1) {
1299 dev_priv->gart_info.bus_addr =
1300 dev_priv->pcigart_offset + dev_priv->fb_location;
1301 dev_priv->gart_info.mapping.offset =
1302 dev_priv->pcigart_offset + dev_priv->fb_aper_offset;
1303 dev_priv->gart_info.mapping.size =
1304 dev_priv->gart_info.table_size;
1306 /* this is done by the mm now */
1307 drm_core_ioremap(&dev_priv->gart_info.mapping, dev);
1308 dev_priv->gart_info.addr =
1309 dev_priv->gart_info.mapping.handle;
1311 memset(dev_priv->gart_info.addr, 0, dev_priv->gart_info.table_size);
1312 if (dev_priv->flags & RADEON_IS_PCIE)
1313 dev_priv->gart_info.gart_reg_if = DRM_ATI_GART_PCIE;
1315 dev_priv->gart_info.gart_reg_if = DRM_ATI_GART_PCI;
1316 dev_priv->gart_info.gart_table_location =
1319 DRM_DEBUG("Setting phys_pci_gart to %p %08lX\n",
1320 dev_priv->gart_info.addr,
1321 dev_priv->pcigart_offset);
1325 if (dev_priv->flags & RADEON_IS_PCIE) {
1327 ("Cannot use PCI Express without GART in FB memory\n");
1328 radeon_do_cleanup_cp(dev);
1331 if (dev_priv->flags & RADEON_IS_IGPGART)
1332 dev_priv->gart_info.gart_reg_if = DRM_ATI_GART_IGP;
1334 dev_priv->gart_info.gart_reg_if = DRM_ATI_GART_PCI;
1335 dev_priv->gart_info.gart_table_location =
1337 dev_priv->gart_info.addr = NULL;
1338 dev_priv->gart_info.bus_addr = 0;
1342 if (!drm_ati_pcigart_init(dev, &dev_priv->gart_info)) {
1343 DRM_ERROR("failed to init PCI GART!\n");
1344 radeon_do_cleanup_cp(dev);
1348 /* Turn on PCI GART */
1349 radeon_set_pcigart(dev_priv, 1);
1352 /* Start with assuming that writeback doesn't work */
1353 dev_priv->writeback_works = 0;
1355 radeon_cp_load_microcode(dev_priv);
1356 radeon_cp_init_ring_buffer(dev, dev_priv);
1358 dev_priv->last_buf = 0;
1360 radeon_do_engine_reset(dev);
1361 radeon_test_writeback(dev_priv);
1366 static int radeon_do_cleanup_cp(struct drm_device * dev)
1368 drm_radeon_private_t *dev_priv = dev->dev_private;
1371 /* Make sure interrupts are disabled here because the uninstall ioctl
1372 * may not have been called from userspace and after dev_private
1373 * is freed, it's too late.
1375 if (dev->irq_enabled)
1376 drm_irq_uninstall(dev);
1379 if (dev_priv->flags & RADEON_IS_AGP) {
1380 if (dev_priv->cp_ring != NULL) {
1381 drm_core_ioremapfree(dev_priv->cp_ring, dev);
1382 dev_priv->cp_ring = NULL;
1384 if (dev_priv->ring_rptr != NULL) {
1385 drm_core_ioremapfree(dev_priv->ring_rptr, dev);
1386 dev_priv->ring_rptr = NULL;
1388 if (dev->agp_buffer_map != NULL) {
1389 drm_core_ioremapfree(dev->agp_buffer_map, dev);
1390 dev->agp_buffer_map = NULL;
1396 if (dev_priv->gart_info.bus_addr) {
1397 /* Turn off PCI GART */
1398 radeon_set_pcigart(dev_priv, 0);
1399 drm_ati_pcigart_cleanup(dev, &dev_priv->gart_info);
1402 if (dev_priv->gart_info.gart_table_location == DRM_ATI_GART_FB)
1404 if (dev_priv->pcigart_offset_set == 1) {
1405 drm_core_ioremapfree(&dev_priv->gart_info.mapping, dev);
1406 dev_priv->gart_info.addr = NULL;
1407 dev_priv->pcigart_offset_set = 0;
1411 /* only clear to the start of flags */
1412 memset(dev_priv, 0, offsetof(drm_radeon_private_t, flags));
1417 /* This code will reinit the Radeon CP hardware after a resume from disc.
1418 * AFAIK, it would be very difficult to pickle the state at suspend time, so
1419 * here we make sure that all Radeon hardware initialisation is re-done without
1420 * affecting running applications.
1422 * Charl P. Botha <http://cpbotha.net>
1424 static int radeon_do_resume_cp(struct drm_device * dev)
1426 drm_radeon_private_t *dev_priv = dev->dev_private;
1429 DRM_ERROR("Called with no initialization\n");
1433 DRM_DEBUG("Starting radeon_do_resume_cp()\n");
1436 if (dev_priv->flags & RADEON_IS_AGP) {
1437 /* Turn off PCI GART */
1438 radeon_set_pcigart(dev_priv, 0);
1442 /* Turn on PCI GART */
1443 radeon_set_pcigart(dev_priv, 1);
1446 radeon_cp_load_microcode(dev_priv);
1447 radeon_cp_init_ring_buffer(dev, dev_priv);
1449 radeon_do_engine_reset(dev);
1450 radeon_irq_set_state(dev, RADEON_SW_INT_ENABLE, 1);
1452 DRM_DEBUG("radeon_do_resume_cp() complete\n");
1457 int radeon_cp_init(struct drm_device *dev, void *data, struct drm_file *file_priv)
1459 drm_radeon_init_t *init = data;
1461 /* on a modesetting driver ignore this stuff */
1462 if (drm_core_check_feature(dev, DRIVER_MODESET))
1465 LOCK_TEST_WITH_RETURN(dev, file_priv);
1467 if (init->func == RADEON_INIT_R300_CP)
1468 r300_init_reg_flags(dev);
1470 switch (init->func) {
1471 case RADEON_INIT_CP:
1472 case RADEON_INIT_R200_CP:
1473 case RADEON_INIT_R300_CP:
1474 return radeon_do_init_cp(dev, init, file_priv);
1475 case RADEON_CLEANUP_CP:
1476 return radeon_do_cleanup_cp(dev);
1482 int radeon_cp_start(struct drm_device *dev, void *data, struct drm_file *file_priv)
1484 drm_radeon_private_t *dev_priv = dev->dev_private;
1487 if (drm_core_check_feature(dev, DRIVER_MODESET))
1490 LOCK_TEST_WITH_RETURN(dev, file_priv);
1492 if (dev_priv->cp_running) {
1493 DRM_DEBUG("while CP running\n");
1496 if (dev_priv->cp_mode == RADEON_CSQ_PRIDIS_INDDIS) {
1497 DRM_DEBUG("called with bogus CP mode (%d)\n",
1502 radeon_do_cp_start(dev_priv);
1507 /* Stop the CP. The engine must have been idled before calling this
1510 int radeon_cp_stop(struct drm_device *dev, void *data, struct drm_file *file_priv)
1512 drm_radeon_private_t *dev_priv = dev->dev_private;
1513 drm_radeon_cp_stop_t *stop = data;
1517 if (drm_core_check_feature(dev, DRIVER_MODESET))
1520 LOCK_TEST_WITH_RETURN(dev, file_priv);
1522 if (!dev_priv->cp_running)
1525 /* Flush any pending CP commands. This ensures any outstanding
1526 * commands are exectuted by the engine before we turn it off.
1529 radeon_do_cp_flush(dev_priv);
1532 /* If we fail to make the engine go idle, we return an error
1533 * code so that the DRM ioctl wrapper can try again.
1536 ret = radeon_do_cp_idle(dev_priv);
1541 /* Finally, we can turn off the CP. If the engine isn't idle,
1542 * we will get some dropped triangles as they won't be fully
1543 * rendered before the CP is shut down.
1545 radeon_do_cp_stop(dev_priv);
1547 /* Reset the engine */
1548 radeon_do_engine_reset(dev);
1553 void radeon_do_release(struct drm_device * dev)
1555 drm_radeon_private_t *dev_priv = dev->dev_private;
1558 if (drm_core_check_feature(dev, DRIVER_MODESET))
1562 if (dev_priv->cp_running) {
1564 while ((ret = radeon_do_cp_idle(dev_priv)) != 0) {
1565 DRM_DEBUG("radeon_do_cp_idle %d\n", ret);
1569 #if defined(__FreeBSD__) && __FreeBSD_version > 500000
1570 mtx_sleep(&ret, &dev->dev_lock, PZERO, "rdnrel",
1573 tsleep(&ret, PZERO, "rdnrel", 1);
1577 radeon_do_cp_stop(dev_priv);
1578 radeon_do_engine_reset(dev);
1581 /* Disable *all* interrupts */
1582 if (dev_priv->mmio) /* remove this after permanent addmaps */
1583 RADEON_WRITE(RADEON_GEN_INT_CNTL, 0);
1585 if (dev_priv->mmio) { /* remove all surfaces */
1586 for (i = 0; i < RADEON_MAX_SURFACES; i++) {
1587 RADEON_WRITE(RADEON_SURFACE0_INFO + 16 * i, 0);
1588 RADEON_WRITE(RADEON_SURFACE0_LOWER_BOUND +
1590 RADEON_WRITE(RADEON_SURFACE0_UPPER_BOUND +
1595 /* Free memory heap structures */
1596 radeon_mem_takedown(&(dev_priv->gart_heap));
1597 radeon_mem_takedown(&(dev_priv->fb_heap));
1599 if (dev_priv->user_mm_enable) {
1600 radeon_gem_mm_fini(dev);
1601 dev_priv->user_mm_enable = false;
1604 /* deallocate kernel resources */
1605 radeon_do_cleanup_cp(dev);
1609 /* Just reset the CP ring. Called as part of an X Server engine reset.
1611 int radeon_cp_reset(struct drm_device *dev, void *data, struct drm_file *file_priv)
1613 drm_radeon_private_t *dev_priv = dev->dev_private;
1616 if (drm_core_check_feature(dev, DRIVER_MODESET))
1619 LOCK_TEST_WITH_RETURN(dev, file_priv);
1622 DRM_DEBUG("called before init done\n");
1626 radeon_do_cp_reset(dev_priv);
1628 /* The CP is no longer running after an engine reset */
1629 dev_priv->cp_running = 0;
1634 int radeon_cp_idle(struct drm_device *dev, void *data, struct drm_file *file_priv)
1636 drm_radeon_private_t *dev_priv = dev->dev_private;
1640 if (!drm_core_check_feature(dev, DRIVER_MODESET))
1641 LOCK_TEST_WITH_RETURN(dev, file_priv);
1643 return radeon_do_cp_idle(dev_priv);
1646 /* Added by Charl P. Botha to call radeon_do_resume_cp().
1648 int radeon_cp_resume(struct drm_device *dev, void *data, struct drm_file *file_priv)
1651 if (drm_core_check_feature(dev, DRIVER_MODESET))
1654 return radeon_do_resume_cp(dev);
1657 int radeon_engine_reset(struct drm_device *dev, void *data, struct drm_file *file_priv)
1661 if (drm_core_check_feature(dev, DRIVER_MODESET))
1664 LOCK_TEST_WITH_RETURN(dev, file_priv);
1666 return radeon_do_engine_reset(dev);
1669 /* ================================================================
1673 /* KW: Deprecated to say the least:
1675 int radeon_fullscreen(struct drm_device *dev, void *data, struct drm_file *file_priv)
1680 /* ================================================================
1681 * Freelist management
1684 /* Original comment: FIXME: ROTATE_BUFS is a hack to cycle through
1685 * bufs until freelist code is used. Note this hides a problem with
1686 * the scratch register * (used to keep track of last buffer
1687 * completed) being written to before * the last buffer has actually
1688 * completed rendering.
1690 * KW: It's also a good way to find free buffers quickly.
1692 * KW: Ideally this loop wouldn't exist, and freelist_get wouldn't
1693 * sleep. However, bugs in older versions of radeon_accel.c mean that
1694 * we essentially have to do this, else old clients will break.
1696 * However, it does leave open a potential deadlock where all the
1697 * buffers are held by other clients, which can't release them because
1698 * they can't get the lock.
1701 struct drm_buf *radeon_freelist_get(struct drm_device * dev)
1703 struct drm_device_dma *dma = dev->dma;
1704 drm_radeon_private_t *dev_priv = dev->dev_private;
1705 drm_radeon_buf_priv_t *buf_priv;
1706 struct drm_buf *buf;
1710 if (++dev_priv->last_buf >= dma->buf_count)
1711 dev_priv->last_buf = 0;
1713 start = dev_priv->last_buf;
1715 for (t = 0; t < dev_priv->usec_timeout; t++) {
1716 u32 done_age = GET_SCRATCH(1);
1717 DRM_DEBUG("done_age = %d\n", done_age);
1718 for (i = start; i < dma->buf_count; i++) {
1719 buf = dma->buflist[i];
1720 buf_priv = buf->dev_private;
1721 if (buf->file_priv == NULL || (buf->pending &&
1724 dev_priv->stats.requested_bufs++;
1733 dev_priv->stats.freelist_loops++;
1737 DRM_DEBUG("returning NULL!\n");
1742 struct drm_buf *radeon_freelist_get(struct drm_device * dev)
1744 struct drm_device_dma *dma = dev->dma;
1745 drm_radeon_private_t *dev_priv = dev->dev_private;
1746 drm_radeon_buf_priv_t *buf_priv;
1747 struct drm_buf *buf;
1750 u32 done_age = DRM_READ32(dev_priv->ring_rptr, RADEON_SCRATCHOFF(1));
1752 if (++dev_priv->last_buf >= dma->buf_count)
1753 dev_priv->last_buf = 0;
1755 start = dev_priv->last_buf;
1756 dev_priv->stats.freelist_loops++;
1758 for (t = 0; t < 2; t++) {
1759 for (i = start; i < dma->buf_count; i++) {
1760 buf = dma->buflist[i];
1761 buf_priv = buf->dev_private;
1762 if (buf->file_priv == 0 || (buf->pending &&
1765 dev_priv->stats.requested_bufs++;
1777 void radeon_freelist_reset(struct drm_device * dev)
1779 struct drm_device_dma *dma = dev->dma;
1780 drm_radeon_private_t *dev_priv = dev->dev_private;
1783 dev_priv->last_buf = 0;
1784 for (i = 0; i < dma->buf_count; i++) {
1785 struct drm_buf *buf = dma->buflist[i];
1786 drm_radeon_buf_priv_t *buf_priv = buf->dev_private;
1791 /* ================================================================
1792 * CP command submission
1795 int radeon_wait_ring(drm_radeon_private_t * dev_priv, int n)
1797 drm_radeon_ring_buffer_t *ring = &dev_priv->ring;
1799 u32 last_head = GET_RING_HEAD(dev_priv);
1801 for (i = 0; i < dev_priv->usec_timeout; i++) {
1802 u32 head = GET_RING_HEAD(dev_priv);
1804 ring->space = (head - ring->tail) * sizeof(u32);
1805 if (ring->space <= 0)
1806 ring->space += ring->size;
1807 if (ring->space > n)
1810 dev_priv->stats.boxes |= RADEON_BOX_WAIT_IDLE;
1812 if (head != last_head)
1819 /* FIXME: This return value is ignored in the BEGIN_RING macro! */
1820 #if RADEON_FIFO_DEBUG
1821 radeon_status(dev_priv);
1822 DRM_ERROR("failed!\n");
1827 static int radeon_cp_get_buffers(struct drm_device *dev,
1828 struct drm_file *file_priv,
1832 struct drm_buf *buf;
1834 for (i = d->granted_count; i < d->request_count; i++) {
1835 buf = radeon_freelist_get(dev);
1837 return -EBUSY; /* NOTE: broken client */
1839 buf->file_priv = file_priv;
1841 if (DRM_COPY_TO_USER(&d->request_indices[i], &buf->idx,
1844 if (DRM_COPY_TO_USER(&d->request_sizes[i], &buf->total,
1845 sizeof(buf->total)))
1853 int radeon_cp_buffers(struct drm_device *dev, void *data, struct drm_file *file_priv)
1855 struct drm_device_dma *dma = dev->dma;
1857 struct drm_dma *d = data;
1859 LOCK_TEST_WITH_RETURN(dev, file_priv);
1861 /* Please don't send us buffers.
1863 if (d->send_count != 0) {
1864 DRM_ERROR("Process %d trying to send %d buffers via drmDMA\n",
1865 DRM_CURRENTPID, d->send_count);
1869 /* We'll send you buffers.
1871 if (d->request_count < 0 || d->request_count > dma->buf_count) {
1872 DRM_ERROR("Process %d trying to get %d buffers (of %d max)\n",
1873 DRM_CURRENTPID, d->request_count, dma->buf_count);
1877 d->granted_count = 0;
1879 if (d->request_count) {
1880 ret = radeon_cp_get_buffers(dev, file_priv, d);
1886 static void radeon_get_vram_type(struct drm_device *dev)
1888 struct drm_radeon_private *dev_priv = dev->dev_private;
1891 if (dev_priv->flags & RADEON_IS_IGP || (dev_priv->chip_family >= CHIP_R300))
1892 dev_priv->is_ddr = true;
1893 else if (RADEON_READ(RADEON_MEM_SDRAM_MODE_REG) & RADEON_MEM_CFG_TYPE_DDR)
1894 dev_priv->is_ddr = true;
1896 dev_priv->is_ddr = false;
1898 if ((dev_priv->chip_family >= CHIP_R600) &&
1899 (dev_priv->chip_family <= CHIP_RV635)) {
1902 tmp = RADEON_READ(R600_RAMCFG);
1903 if (tmp & R600_CHANSIZE_OVERRIDE)
1905 else if (tmp & R600_CHANSIZE)
1910 if (dev_priv->chip_family == CHIP_R600)
1911 dev_priv->ram_width = 8 * chansize;
1912 else if (dev_priv->chip_family == CHIP_RV670)
1913 dev_priv->ram_width = 4 * chansize;
1914 else if ((dev_priv->chip_family == CHIP_RV610) ||
1915 (dev_priv->chip_family == CHIP_RV620))
1916 dev_priv->ram_width = chansize;
1917 else if ((dev_priv->chip_family == CHIP_RV630) ||
1918 (dev_priv->chip_family == CHIP_RV635))
1919 dev_priv->ram_width = 2 * chansize;
1920 } else if (dev_priv->chip_family == CHIP_RV515) {
1921 tmp = radeon_read_mc_reg(dev_priv, RV515_MC_CNTL);
1922 tmp &= RV515_MEM_NUM_CHANNELS_MASK;
1924 case 0: dev_priv->ram_width = 64; break;
1925 case 1: dev_priv->ram_width = 128; break;
1926 default: dev_priv->ram_width = 128; break;
1928 } else if ((dev_priv->chip_family >= CHIP_R520) &&
1929 (dev_priv->chip_family <= CHIP_RV570)) {
1930 tmp = radeon_read_mc_reg(dev_priv, R520_MC_CNTL0);
1931 switch ((tmp & R520_MEM_NUM_CHANNELS_MASK) >> R520_MEM_NUM_CHANNELS_SHIFT) {
1932 case 0: dev_priv->ram_width = 32; break;
1933 case 1: dev_priv->ram_width = 64; break;
1934 case 2: dev_priv->ram_width = 128; break;
1935 case 3: dev_priv->ram_width = 256; break;
1936 default: dev_priv->ram_width = 128; break;
1938 } else if ((dev_priv->chip_family == CHIP_RV100) ||
1939 (dev_priv->chip_family == CHIP_RS100) ||
1940 (dev_priv->chip_family == CHIP_RS200)) {
1941 tmp = RADEON_READ(RADEON_MEM_CNTL);
1942 if (tmp & RV100_HALF_MODE)
1943 dev_priv->ram_width = 32;
1945 dev_priv->ram_width = 64;
1947 if (dev_priv->flags & RADEON_SINGLE_CRTC) {
1948 dev_priv->ram_width /= 4;
1949 dev_priv->is_ddr = true;
1951 } else if (dev_priv->chip_family <= CHIP_RV280) {
1952 tmp = RADEON_READ(RADEON_MEM_CNTL);
1953 if (tmp & RADEON_MEM_NUM_CHANNELS_MASK)
1954 dev_priv->ram_width = 128;
1956 dev_priv->ram_width = 64;
1959 dev_priv->ram_width = 128;
1961 DRM_DEBUG("RAM width %d bits %cDR\n", dev_priv->ram_width, dev_priv->is_ddr ? 'D' : 'S');
1964 static void radeon_force_some_clocks(struct drm_device *dev)
1966 struct drm_radeon_private *dev_priv = dev->dev_private;
1969 tmp = RADEON_READ_PLL(dev_priv, RADEON_SCLK_CNTL);
1970 tmp |= RADEON_SCLK_FORCE_CP | RADEON_SCLK_FORCE_VIP;
1971 RADEON_WRITE_PLL(dev_priv, RADEON_SCLK_CNTL, tmp);
1974 static void radeon_set_dynamic_clock(struct drm_device *dev, int mode)
1976 struct drm_radeon_private *dev_priv = dev->dev_private;
1981 if (dev_priv->flags & RADEON_SINGLE_CRTC) {
1982 tmp = RADEON_READ_PLL(dev_priv, RADEON_SCLK_CNTL);
1983 tmp |= (RADEON_SCLK_FORCE_CP | RADEON_SCLK_FORCE_HDP |
1984 RADEON_SCLK_FORCE_DISP1 | RADEON_SCLK_FORCE_TOP |
1985 RADEON_SCLK_FORCE_E2 | RADEON_SCLK_FORCE_SE |
1986 RADEON_SCLK_FORCE_IDCT | RADEON_SCLK_FORCE_VIP |
1987 RADEON_SCLK_FORCE_RE | RADEON_SCLK_FORCE_PB |
1988 RADEON_SCLK_FORCE_TAM | RADEON_SCLK_FORCE_TDM |
1989 RADEON_SCLK_FORCE_RB);
1990 RADEON_WRITE_PLL(dev_priv, RADEON_SCLK_CNTL, tmp);
1991 } else if (dev_priv->chip_family == CHIP_RV350) {
1992 /* for RV350/M10, no delays are required. */
1993 tmp = RADEON_READ_PLL(dev_priv, R300_SCLK_CNTL2);
1994 tmp |= (R300_SCLK_FORCE_TCL |
1995 R300_SCLK_FORCE_GA |
1996 R300_SCLK_FORCE_CBA);
1997 RADEON_WRITE_PLL(dev_priv, R300_SCLK_CNTL2, tmp);
1999 tmp = RADEON_READ_PLL(dev_priv, RADEON_SCLK_CNTL);
2000 tmp &= ~(RADEON_SCLK_FORCE_DISP2 | RADEON_SCLK_FORCE_CP |
2001 RADEON_SCLK_FORCE_HDP | RADEON_SCLK_FORCE_DISP1 |
2002 RADEON_SCLK_FORCE_TOP | RADEON_SCLK_FORCE_E2 |
2003 R300_SCLK_FORCE_VAP | RADEON_SCLK_FORCE_IDCT |
2004 RADEON_SCLK_FORCE_VIP | R300_SCLK_FORCE_SR |
2005 R300_SCLK_FORCE_PX | R300_SCLK_FORCE_TX |
2006 R300_SCLK_FORCE_US | RADEON_SCLK_FORCE_TV_SCLK |
2007 R300_SCLK_FORCE_SU | RADEON_SCLK_FORCE_OV0);
2008 tmp |= RADEON_DYN_STOP_LAT_MASK;
2009 RADEON_WRITE_PLL(dev_priv, RADEON_SCLK_CNTL, tmp);
2011 tmp = RADEON_READ_PLL(dev_priv, RADEON_SCLK_MORE_CNTL);
2012 tmp &= ~RADEON_SCLK_MORE_FORCEON;
2013 tmp |= RADEON_SCLK_MORE_MAX_DYN_STOP_LAT;
2014 RADEON_WRITE_PLL(dev_priv, RADEON_SCLK_MORE_CNTL, tmp);
2016 tmp = RADEON_READ_PLL(dev_priv, RADEON_VCLK_ECP_CNTL);
2017 tmp |= (RADEON_PIXCLK_ALWAYS_ONb |
2018 RADEON_PIXCLK_DAC_ALWAYS_ONb);
2019 RADEON_WRITE_PLL(dev_priv, RADEON_VCLK_ECP_CNTL, tmp);
2021 tmp = RADEON_READ_PLL(dev_priv, RADEON_PIXCLKS_CNTL);
2022 tmp |= (RADEON_PIX2CLK_ALWAYS_ONb |
2023 RADEON_PIX2CLK_DAC_ALWAYS_ONb |
2024 RADEON_DISP_TVOUT_PIXCLK_TV_ALWAYS_ONb |
2025 R300_DVOCLK_ALWAYS_ONb |
2026 RADEON_PIXCLK_BLEND_ALWAYS_ONb |
2027 RADEON_PIXCLK_GV_ALWAYS_ONb |
2028 R300_PIXCLK_DVO_ALWAYS_ONb |
2029 RADEON_PIXCLK_LVDS_ALWAYS_ONb |
2030 RADEON_PIXCLK_TMDS_ALWAYS_ONb |
2031 R300_PIXCLK_TRANS_ALWAYS_ONb |
2032 R300_PIXCLK_TVO_ALWAYS_ONb |
2033 R300_P2G2CLK_ALWAYS_ONb |
2034 R300_P2G2CLK_ALWAYS_ONb);
2035 RADEON_WRITE_PLL(dev_priv, RADEON_PIXCLKS_CNTL, tmp);
2037 tmp = RADEON_READ_PLL(dev_priv, RADEON_SCLK_CNTL);
2038 tmp |= (RADEON_SCLK_FORCE_CP | RADEON_SCLK_FORCE_E2);
2039 tmp |= RADEON_SCLK_FORCE_SE;
2041 if ( dev_priv->flags & RADEON_SINGLE_CRTC ) {
2042 tmp |= ( RADEON_SCLK_FORCE_RB |
2043 RADEON_SCLK_FORCE_TDM |
2044 RADEON_SCLK_FORCE_TAM |
2045 RADEON_SCLK_FORCE_PB |
2046 RADEON_SCLK_FORCE_RE |
2047 RADEON_SCLK_FORCE_VIP |
2048 RADEON_SCLK_FORCE_IDCT |
2049 RADEON_SCLK_FORCE_TOP |
2050 RADEON_SCLK_FORCE_DISP1 |
2051 RADEON_SCLK_FORCE_DISP2 |
2052 RADEON_SCLK_FORCE_HDP );
2053 } else if ((dev_priv->chip_family == CHIP_R300) ||
2054 (dev_priv->chip_family == CHIP_R350)) {
2055 tmp |= ( RADEON_SCLK_FORCE_HDP |
2056 RADEON_SCLK_FORCE_DISP1 |
2057 RADEON_SCLK_FORCE_DISP2 |
2058 RADEON_SCLK_FORCE_TOP |
2059 RADEON_SCLK_FORCE_IDCT |
2060 RADEON_SCLK_FORCE_VIP);
2063 RADEON_WRITE_PLL(dev_priv, RADEON_SCLK_CNTL, tmp);
2067 if ((dev_priv->chip_family == CHIP_R300) ||
2068 (dev_priv->chip_family == CHIP_R350)) {
2069 tmp = RADEON_READ_PLL(dev_priv, R300_SCLK_CNTL2);
2070 tmp |= ( R300_SCLK_FORCE_TCL |
2071 R300_SCLK_FORCE_GA |
2072 R300_SCLK_FORCE_CBA);
2073 RADEON_WRITE_PLL(dev_priv, R300_SCLK_CNTL2, tmp);
2077 if (dev_priv->flags & RADEON_IS_IGP) {
2078 tmp = RADEON_READ_PLL(dev_priv, RADEON_MCLK_CNTL);
2079 tmp &= ~(RADEON_FORCEON_MCLKA |
2080 RADEON_FORCEON_YCLKA);
2081 RADEON_WRITE_PLL(dev_priv, RADEON_MCLK_CNTL, tmp);
2085 if ((dev_priv->chip_family == CHIP_RV200) ||
2086 (dev_priv->chip_family == CHIP_RV250) ||
2087 (dev_priv->chip_family == CHIP_RV280)) {
2088 tmp = RADEON_READ_PLL(dev_priv, RADEON_SCLK_MORE_CNTL);
2089 tmp |= RADEON_SCLK_MORE_FORCEON;
2090 RADEON_WRITE_PLL(dev_priv, RADEON_SCLK_MORE_CNTL, tmp);
2094 tmp = RADEON_READ_PLL(dev_priv, RADEON_PIXCLKS_CNTL);
2095 tmp &= ~(RADEON_PIX2CLK_ALWAYS_ONb |
2096 RADEON_PIX2CLK_DAC_ALWAYS_ONb |
2097 RADEON_PIXCLK_BLEND_ALWAYS_ONb |
2098 RADEON_PIXCLK_GV_ALWAYS_ONb |
2099 RADEON_PIXCLK_DIG_TMDS_ALWAYS_ONb |
2100 RADEON_PIXCLK_LVDS_ALWAYS_ONb |
2101 RADEON_PIXCLK_TMDS_ALWAYS_ONb);
2103 RADEON_WRITE_PLL(dev_priv, RADEON_PIXCLKS_CNTL, tmp);
2106 tmp = RADEON_READ_PLL(dev_priv, RADEON_VCLK_ECP_CNTL);
2107 tmp &= ~(RADEON_PIXCLK_ALWAYS_ONb |
2108 RADEON_PIXCLK_DAC_ALWAYS_ONb);
2109 RADEON_WRITE_PLL(dev_priv, RADEON_VCLK_ECP_CNTL, tmp);
2111 DRM_DEBUG("Dynamic Clock Scaling Disabled\n");
2114 if (dev_priv->flags & RADEON_SINGLE_CRTC) {
2115 tmp = RADEON_READ_PLL(dev_priv, RADEON_SCLK_CNTL);
2116 if ((RADEON_READ(RADEON_CONFIG_CNTL) & RADEON_CFG_ATI_REV_ID_MASK) >
2117 RADEON_CFG_ATI_REV_A13) {
2118 tmp &= ~(RADEON_SCLK_FORCE_CP | RADEON_SCLK_FORCE_RB);
2120 tmp &= ~(RADEON_SCLK_FORCE_HDP | RADEON_SCLK_FORCE_DISP1 |
2121 RADEON_SCLK_FORCE_TOP | RADEON_SCLK_FORCE_SE |
2122 RADEON_SCLK_FORCE_IDCT | RADEON_SCLK_FORCE_RE |
2123 RADEON_SCLK_FORCE_PB | RADEON_SCLK_FORCE_TAM |
2124 RADEON_SCLK_FORCE_TDM);
2125 RADEON_WRITE_PLL(dev_priv, RADEON_SCLK_CNTL, tmp);
2126 } else if ((dev_priv->chip_family == CHIP_R300) ||
2127 (dev_priv->chip_family == CHIP_R350) ||
2128 (dev_priv->chip_family == CHIP_RV350)) {
2129 if (dev_priv->chip_family == CHIP_RV350) {
2130 tmp = RADEON_READ_PLL(dev_priv, R300_SCLK_CNTL2);
2131 tmp &= ~(R300_SCLK_FORCE_TCL |
2132 R300_SCLK_FORCE_GA |
2133 R300_SCLK_FORCE_CBA);
2134 tmp |= (R300_SCLK_TCL_MAX_DYN_STOP_LAT |
2135 R300_SCLK_GA_MAX_DYN_STOP_LAT |
2136 R300_SCLK_CBA_MAX_DYN_STOP_LAT);
2137 RADEON_WRITE_PLL(dev_priv, R300_SCLK_CNTL2, tmp);
2139 tmp = RADEON_READ_PLL(dev_priv, RADEON_SCLK_CNTL);
2140 tmp &= ~(RADEON_SCLK_FORCE_DISP2 | RADEON_SCLK_FORCE_CP |
2141 RADEON_SCLK_FORCE_HDP | RADEON_SCLK_FORCE_DISP1 |
2142 RADEON_SCLK_FORCE_TOP | RADEON_SCLK_FORCE_E2 |
2143 R300_SCLK_FORCE_VAP | RADEON_SCLK_FORCE_IDCT |
2144 RADEON_SCLK_FORCE_VIP | R300_SCLK_FORCE_SR |
2145 R300_SCLK_FORCE_PX | R300_SCLK_FORCE_TX |
2146 R300_SCLK_FORCE_US | RADEON_SCLK_FORCE_TV_SCLK |
2147 R300_SCLK_FORCE_SU | RADEON_SCLK_FORCE_OV0);
2148 tmp |= RADEON_DYN_STOP_LAT_MASK;
2149 RADEON_WRITE_PLL(dev_priv, RADEON_SCLK_CNTL, tmp);
2151 tmp = RADEON_READ_PLL(dev_priv, RADEON_SCLK_MORE_CNTL);
2152 tmp &= ~RADEON_SCLK_MORE_FORCEON;
2153 tmp |= RADEON_SCLK_MORE_MAX_DYN_STOP_LAT;
2154 RADEON_WRITE_PLL(dev_priv, RADEON_SCLK_MORE_CNTL, tmp);
2156 tmp = RADEON_READ_PLL(dev_priv, RADEON_VCLK_ECP_CNTL);
2157 tmp |= (RADEON_PIXCLK_ALWAYS_ONb |
2158 RADEON_PIXCLK_DAC_ALWAYS_ONb);
2159 RADEON_WRITE_PLL(dev_priv, RADEON_VCLK_ECP_CNTL, tmp);
2161 tmp = RADEON_READ_PLL(dev_priv, RADEON_PIXCLKS_CNTL);
2162 tmp |= (RADEON_PIX2CLK_ALWAYS_ONb |
2163 RADEON_PIX2CLK_DAC_ALWAYS_ONb |
2164 RADEON_DISP_TVOUT_PIXCLK_TV_ALWAYS_ONb |
2165 R300_DVOCLK_ALWAYS_ONb |
2166 RADEON_PIXCLK_BLEND_ALWAYS_ONb |
2167 RADEON_PIXCLK_GV_ALWAYS_ONb |
2168 R300_PIXCLK_DVO_ALWAYS_ONb |
2169 RADEON_PIXCLK_LVDS_ALWAYS_ONb |
2170 RADEON_PIXCLK_TMDS_ALWAYS_ONb |
2171 R300_PIXCLK_TRANS_ALWAYS_ONb |
2172 R300_PIXCLK_TVO_ALWAYS_ONb |
2173 R300_P2G2CLK_ALWAYS_ONb |
2174 R300_P2G2CLK_ALWAYS_ONb);
2175 RADEON_WRITE_PLL(dev_priv, RADEON_PIXCLKS_CNTL, tmp);
2177 tmp = RADEON_READ_PLL(dev_priv, RADEON_MCLK_MISC);
2178 tmp |= (RADEON_MC_MCLK_DYN_ENABLE |
2179 RADEON_IO_MCLK_DYN_ENABLE);
2180 RADEON_WRITE_PLL(dev_priv, RADEON_MCLK_MISC, tmp);
2182 tmp = RADEON_READ_PLL(dev_priv, RADEON_MCLK_CNTL);
2183 tmp |= (RADEON_FORCEON_MCLKA |
2184 RADEON_FORCEON_MCLKB);
2186 tmp &= ~(RADEON_FORCEON_YCLKA |
2187 RADEON_FORCEON_YCLKB |
2190 /* Some releases of vbios have set DISABLE_MC_MCLKA
2191 and DISABLE_MC_MCLKB bits in the vbios table. Setting these
2192 bits will cause H/W hang when reading video memory with dynamic clocking
2194 if ((tmp & R300_DISABLE_MC_MCLKA) &&
2195 (tmp & R300_DISABLE_MC_MCLKB)) {
2196 /* If both bits are set, then check the active channels */
2197 tmp = RADEON_READ_PLL(dev_priv, RADEON_MCLK_CNTL);
2198 if (dev_priv->ram_width == 64) {
2199 if (RADEON_READ(RADEON_MEM_CNTL) & R300_MEM_USE_CD_CH_ONLY)
2200 tmp &= ~R300_DISABLE_MC_MCLKB;
2202 tmp &= ~R300_DISABLE_MC_MCLKA;
2204 tmp &= ~(R300_DISABLE_MC_MCLKA |
2205 R300_DISABLE_MC_MCLKB);
2209 RADEON_WRITE_PLL(dev_priv, RADEON_MCLK_CNTL, tmp);
2211 tmp = RADEON_READ_PLL(dev_priv, RADEON_SCLK_CNTL);
2212 tmp &= ~(R300_SCLK_FORCE_VAP);
2213 tmp |= RADEON_SCLK_FORCE_CP;
2214 RADEON_WRITE_PLL(dev_priv, RADEON_SCLK_CNTL, tmp);
2217 tmp = RADEON_READ_PLL(dev_priv, R300_SCLK_CNTL2);
2218 tmp &= ~(R300_SCLK_FORCE_TCL |
2219 R300_SCLK_FORCE_GA |
2220 R300_SCLK_FORCE_CBA);
2221 RADEON_WRITE_PLL(dev_priv, R300_SCLK_CNTL2, tmp);
2224 tmp = RADEON_READ_PLL(dev_priv, RADEON_CLK_PWRMGT_CNTL);
2225 tmp &= ~(RADEON_ACTIVE_HILO_LAT_MASK |
2226 RADEON_DISP_DYN_STOP_LAT_MASK |
2227 RADEON_DYN_STOP_MODE_MASK);
2229 tmp |= (RADEON_ENGIN_DYNCLK_MODE |
2230 (0x01 << RADEON_ACTIVE_HILO_LAT_SHIFT));
2231 RADEON_WRITE_PLL(dev_priv, RADEON_CLK_PWRMGT_CNTL, tmp);
2234 tmp = RADEON_READ_PLL(dev_priv, RADEON_CLK_PIN_CNTL);
2235 tmp |= RADEON_SCLK_DYN_START_CNTL;
2236 RADEON_WRITE_PLL(dev_priv, RADEON_CLK_PIN_CNTL, tmp);
2239 /* When DRI is enabled, setting DYN_STOP_LAT to zero can cause some R200
2240 to lockup randomly, leave them as set by BIOS.
2242 tmp = RADEON_READ_PLL(dev_priv, RADEON_SCLK_CNTL);
2243 /*tmp &= RADEON_SCLK_SRC_SEL_MASK;*/
2244 tmp &= ~RADEON_SCLK_FORCEON_MASK;
2246 /*RAGE_6::A11 A12 A12N1 A13, RV250::A11 A12, R300*/
2247 if (((dev_priv->chip_family == CHIP_RV250) &&
2248 ((RADEON_READ(RADEON_CONFIG_CNTL) & RADEON_CFG_ATI_REV_ID_MASK) <
2249 RADEON_CFG_ATI_REV_A13)) ||
2250 ((dev_priv->chip_family == CHIP_RV100) &&
2251 ((RADEON_READ(RADEON_CONFIG_CNTL) & RADEON_CFG_ATI_REV_ID_MASK) <=
2252 RADEON_CFG_ATI_REV_A13))){
2253 tmp |= RADEON_SCLK_FORCE_CP;
2254 tmp |= RADEON_SCLK_FORCE_VIP;
2257 RADEON_WRITE_PLL(dev_priv, RADEON_SCLK_CNTL, tmp);
2259 if ((dev_priv->chip_family == CHIP_RV200) ||
2260 (dev_priv->chip_family == CHIP_RV250) ||
2261 (dev_priv->chip_family == CHIP_RV280)) {
2262 tmp = RADEON_READ_PLL(dev_priv, RADEON_SCLK_MORE_CNTL);
2263 tmp &= ~RADEON_SCLK_MORE_FORCEON;
2265 /* RV200::A11 A12 RV250::A11 A12 */
2266 if (((dev_priv->chip_family == CHIP_RV200) ||
2267 (dev_priv->chip_family == CHIP_RV250)) &&
2268 ((RADEON_READ(RADEON_CONFIG_CNTL) & RADEON_CFG_ATI_REV_ID_MASK) <
2269 RADEON_CFG_ATI_REV_A13)) {
2270 tmp |= RADEON_SCLK_MORE_FORCEON;
2272 RADEON_WRITE_PLL(dev_priv, RADEON_SCLK_MORE_CNTL, tmp);
2276 /* RV200::A11 A12, RV250::A11 A12 */
2277 if (((dev_priv->chip_family == CHIP_RV200) ||
2278 (dev_priv->chip_family == CHIP_RV250)) &&
2279 ((RADEON_READ(RADEON_CONFIG_CNTL) & RADEON_CFG_ATI_REV_ID_MASK) <
2280 RADEON_CFG_ATI_REV_A13)) {
2281 tmp = RADEON_READ_PLL(dev_priv, RADEON_PLL_PWRMGT_CNTL);
2282 tmp |= RADEON_TCL_BYPASS_DISABLE;
2283 RADEON_WRITE_PLL(dev_priv, RADEON_PLL_PWRMGT_CNTL, tmp);
2287 /*enable dynamic mode for display clocks (PIXCLK and PIX2CLK)*/
2288 tmp = RADEON_READ_PLL(dev_priv, RADEON_PIXCLKS_CNTL);
2289 tmp |= (RADEON_PIX2CLK_ALWAYS_ONb |
2290 RADEON_PIX2CLK_DAC_ALWAYS_ONb |
2291 RADEON_PIXCLK_BLEND_ALWAYS_ONb |
2292 RADEON_PIXCLK_GV_ALWAYS_ONb |
2293 RADEON_PIXCLK_DIG_TMDS_ALWAYS_ONb |
2294 RADEON_PIXCLK_LVDS_ALWAYS_ONb |
2295 RADEON_PIXCLK_TMDS_ALWAYS_ONb);
2297 RADEON_WRITE_PLL(dev_priv, RADEON_PIXCLKS_CNTL, tmp);
2300 tmp = RADEON_READ_PLL(dev_priv, RADEON_VCLK_ECP_CNTL);
2301 tmp |= (RADEON_PIXCLK_ALWAYS_ONb |
2302 RADEON_PIXCLK_DAC_ALWAYS_ONb);
2304 RADEON_WRITE_PLL(dev_priv, RADEON_VCLK_ECP_CNTL, tmp);
2307 DRM_DEBUG("Dynamic Clock Scaling Enabled\n");
2315 int radeon_modeset_cp_suspend(struct drm_device *dev)
2317 drm_radeon_private_t *dev_priv = dev->dev_private;
2320 ret = radeon_do_cp_idle(dev_priv);
2322 DRM_ERROR("failed to idle CP on suspend\n");
2324 radeon_do_cp_stop(dev_priv);
2325 radeon_do_engine_reset(dev);
2326 if (dev_priv->flags & RADEON_IS_AGP) {
2328 radeon_set_pcigart(dev_priv, 0);
2334 int radeon_modeset_cp_resume(struct drm_device *dev)
2336 drm_radeon_private_t *dev_priv = dev->dev_private;
2338 radeon_do_wait_for_idle(dev_priv);
2340 if (dev_priv->flags & RADEON_IS_AGP) {
2341 /* Turn off PCI GART */
2342 radeon_set_pcigart(dev_priv, 0);
2346 /* Turn on PCI GART */
2347 radeon_set_pcigart(dev_priv, 1);
2349 radeon_gart_flush(dev);
2351 radeon_cp_load_microcode(dev_priv);
2352 radeon_cp_init_ring_buffer(dev, dev_priv);
2354 radeon_do_engine_reset(dev);
2356 radeon_do_cp_start(dev_priv);
2361 int radeon_modeset_agp_init(struct drm_device *dev)
2363 drm_radeon_private_t *dev_priv = dev->dev_private;
2364 struct drm_agp_mode mode;
2365 struct drm_agp_info info;
2368 uint32_t agp_status;
2372 ret = drm_agp_acquire(dev);
2374 DRM_ERROR("Unable to acquire AGP: %d\n", ret);
2378 ret = drm_agp_info(dev, &info);
2380 DRM_ERROR("Unable to get AGP info: %d\n", ret);
2384 mode.mode = info.mode;
2386 agp_status = (RADEON_READ(RADEON_AGP_STATUS) | RADEON_AGPv3_MODE) & mode.mode;
2387 is_v3 = !!(agp_status & RADEON_AGPv3_MODE);
2390 default_mode = (agp_status & RADEON_AGPv3_8X_MODE) ? 8 : 4;
2392 if (agp_status & RADEON_AGP_4X_MODE) default_mode = 4;
2393 else if (agp_status & RADEON_AGP_2X_MODE) default_mode = 2;
2394 else default_mode = 1;
2397 if (radeon_agpmode > 0) {
2398 if ((radeon_agpmode < (is_v3 ? 4 : 1)) ||
2399 (radeon_agpmode > (is_v3 ? 8 : 4)) ||
2400 (radeon_agpmode & (radeon_agpmode - 1))) {
2401 DRM_ERROR("Illegal AGP Mode: %d (valid %s), leaving at %d\n",
2402 radeon_agpmode, is_v3 ? "4, 8" : "1, 2, 4",
2404 radeon_agpmode = default_mode;
2407 DRM_INFO("AGP mode requested: %d\n", radeon_agpmode);
2409 radeon_agpmode = default_mode;
2411 mode.mode &= ~RADEON_AGP_MODE_MASK;
2413 switch(radeon_agpmode) {
2415 mode.mode |= RADEON_AGPv3_8X_MODE;
2419 mode.mode |= RADEON_AGPv3_4X_MODE;
2423 switch(radeon_agpmode) {
2424 case 4: mode.mode |= RADEON_AGP_4X_MODE;
2425 case 2: mode.mode |= RADEON_AGP_2X_MODE;
2428 mode.mode |= RADEON_AGP_1X_MODE;
2433 mode.mode &= ~RADEON_AGP_FW_MODE; /* disable fw */
2435 ret = drm_agp_enable(dev, mode);
2437 DRM_ERROR("Unable to enable AGP (mode = 0x%lx)\n", mode);
2441 /* workaround some hw issues */
2442 if (dev_priv->chip_family <= CHIP_R200) {
2443 RADEON_WRITE(RADEON_AGP_CNTL, RADEON_READ(RADEON_AGP_CNTL) | 0x000e0000);
2449 int radeon_modeset_cp_init(struct drm_device *dev)
2451 drm_radeon_private_t *dev_priv = dev->dev_private;
2453 /* allocate a ring and ring rptr bits from GART space */
2454 /* these are allocated in GEM files */
2456 /* Start with assuming that writeback doesn't work */
2457 dev_priv->writeback_works = 0;
2459 if (dev_priv->chip_family > CHIP_R600)
2462 dev_priv->usec_timeout = RADEON_DEFAULT_CP_TIMEOUT;
2463 dev_priv->ring.size = RADEON_DEFAULT_RING_SIZE;
2464 dev_priv->cp_mode = RADEON_CSQ_PRIBM_INDBM;
2466 dev_priv->ring.start = (u32 *)(void *)(unsigned long)dev_priv->mm.ring.kmap.virtual;
2467 dev_priv->ring.end = (u32 *)(void *)(unsigned long)dev_priv->mm.ring.kmap.virtual +
2468 dev_priv->ring.size / sizeof(u32);
2469 dev_priv->ring.size_l2qw = drm_order(dev_priv->ring.size / 8);
2470 dev_priv->ring.rptr_update = 4096;
2471 dev_priv->ring.rptr_update_l2qw = drm_order(4096 / 8);
2472 dev_priv->ring.fetch_size = 32;
2473 dev_priv->ring.fetch_size_l2ow = drm_order(32 / 16);
2474 dev_priv->ring.tail_mask = (dev_priv->ring.size / sizeof(u32)) - 1;
2475 dev_priv->ring.high_mark = RADEON_RING_HIGH_MARK;
2477 dev_priv->new_memmap = true;
2479 r300_init_reg_flags(dev);
2482 if (dev_priv->flags & RADEON_IS_AGP)
2483 radeon_modeset_agp_init(dev);
2486 return radeon_modeset_cp_resume(dev);
2489 static bool radeon_get_bios(struct drm_device *dev)
2491 drm_radeon_private_t *dev_priv = dev->dev_private;
2496 bios = pci_map_rom(dev->pdev, &size);
2500 dev_priv->bios = kmalloc(size, GFP_KERNEL);
2501 if (!dev_priv->bios) {
2502 pci_unmap_rom(dev->pdev, bios);
2506 memcpy(dev_priv->bios, bios, size);
2508 pci_unmap_rom(dev->pdev, bios);
2510 if (dev_priv->bios[0] != 0x55 || dev_priv->bios[1] != 0xaa)
2513 dev_priv->bios_header_start = radeon_bios16(dev_priv, 0x48);
2515 if (!dev_priv->bios_header_start)
2518 tmp = dev_priv->bios_header_start + 4;
2520 if (!memcmp(dev_priv->bios + tmp, "ATOM", 4) ||
2521 !memcmp(dev_priv->bios + tmp, "MOTA", 4))
2522 dev_priv->is_atom_bios = true;
2524 dev_priv->is_atom_bios = false;
2526 DRM_DEBUG("%sBIOS detected\n", dev_priv->is_atom_bios ? "ATOM" : "COM");
2529 kfree(dev_priv->bios);
2530 dev_priv->bios = NULL;
2534 int radeon_modeset_preinit(struct drm_device *dev)
2536 drm_radeon_private_t *dev_priv = dev->dev_private;
2537 static struct card_info card;
2541 card.reg_read = cail_reg_read;
2542 card.reg_write = cail_reg_write;
2543 card.mc_read = cail_mc_read;
2544 card.mc_write = cail_mc_write;
2546 ret = radeon_get_bios(dev);
2550 if (dev_priv->is_atom_bios) {
2551 dev_priv->mode_info.atom_context = atom_parse(&card, dev_priv->bios);
2552 radeon_atom_initialize_bios_scratch_regs(dev);
2554 radeon_combios_initialize_bios_scratch_regs(dev);
2556 radeon_get_clock_info(dev);
2561 int radeon_static_clocks_init(struct drm_device *dev)
2563 drm_radeon_private_t *dev_priv = dev->dev_private;
2565 if (dev_priv->chip_family == CHIP_RS400 ||
2566 dev_priv->chip_family == CHIP_RS480)
2569 if ((dev_priv->flags & RADEON_IS_MOBILITY) && !radeon_is_avivo(dev_priv)) {
2570 radeon_set_dynamic_clock(dev, radeon_dynclks);
2571 } else if (radeon_is_avivo(dev_priv)) {
2572 if (radeon_dynclks) {
2573 radeon_atom_static_pwrmgt_setup(dev, 1);
2574 radeon_atom_dyn_clk_setup(dev, 1);
2577 radeon_force_some_clocks(dev);
2581 int radeon_driver_load(struct drm_device *dev, unsigned long flags)
2583 drm_radeon_private_t *dev_priv;
2586 dev_priv = drm_alloc(sizeof(drm_radeon_private_t), DRM_MEM_DRIVER);
2587 if (dev_priv == NULL)
2590 memset(dev_priv, 0, sizeof(drm_radeon_private_t));
2591 dev->dev_private = (void *)dev_priv;
2592 dev_priv->flags = flags;
2594 switch (flags & RADEON_FAMILY_MASK) {
2606 dev_priv->flags |= RADEON_HAS_HIERZ;
2609 /* all other chips have no hierarchical z buffer */
2613 dev_priv->chip_family = flags & RADEON_FAMILY_MASK;
2614 if (drm_device_is_agp(dev))
2615 dev_priv->flags |= RADEON_IS_AGP;
2616 else if (drm_device_is_pcie(dev))
2617 dev_priv->flags |= RADEON_IS_PCIE;
2619 dev_priv->flags |= RADEON_IS_PCI;
2621 DRM_DEBUG("%s card detected\n",
2622 ((dev_priv->flags & RADEON_IS_AGP) ? "AGP" : (((dev_priv->flags & RADEON_IS_PCIE) ? "PCIE" : "PCI"))));
2624 if ((dev_priv->flags & RADEON_IS_AGP) && (radeon_agpmode == -1)) {
2625 DRM_INFO("Forcing AGP to PCI mode\n");
2626 dev_priv->flags &= ~RADEON_IS_AGP;
2630 ret = drm_addmap(dev, drm_get_resource_start(dev, 2),
2631 drm_get_resource_len(dev, 2), _DRM_REGISTERS,
2632 _DRM_DRIVER | _DRM_READ_ONLY, &dev_priv->mmio);
2636 if (drm_core_check_feature(dev, DRIVER_MODESET))
2637 radeon_modeset_preinit(dev);
2639 radeon_get_vram_type(dev);
2641 dev_priv->pll_errata = 0;
2643 if (dev_priv->chip_family == CHIP_R300 &&
2644 (RADEON_READ(RADEON_CONFIG_CNTL) & RADEON_CFG_ATI_REV_ID_MASK) == RADEON_CFG_ATI_REV_A11)
2645 dev_priv->pll_errata |= CHIP_ERRATA_R300_CG;
2647 if (dev_priv->chip_family == CHIP_RV200 ||
2648 dev_priv->chip_family == CHIP_RS200)
2649 dev_priv->pll_errata |= CHIP_ERRATA_PLL_DUMMYREADS;
2652 if (dev_priv->chip_family == CHIP_RV100 ||
2653 dev_priv->chip_family == CHIP_RS100 ||
2654 dev_priv->chip_family == CHIP_RS200)
2655 dev_priv->pll_errata |= CHIP_ERRATA_PLL_DELAY;
2658 if (drm_core_check_feature(dev, DRIVER_MODESET))
2659 radeon_static_clocks_init(dev);
2661 /* init memory manager - start with all of VRAM and a 32MB GART aperture for now */
2662 dev_priv->fb_aper_offset = drm_get_resource_start(dev, 0);
2664 drm_bo_driver_init(dev);
2666 if (drm_core_check_feature(dev, DRIVER_MODESET)) {
2668 ret = radeon_gem_mm_init(dev);
2671 radeon_modeset_init(dev);
2673 radeon_modeset_cp_init(dev);
2674 dev->devname = kstrdup(DRIVER_NAME, GFP_KERNEL);
2676 drm_irq_install(dev);
2682 dev->driver->driver_features &= ~DRIVER_MODESET;
2683 drm_put_minor(&dev->control);
2688 int radeon_master_create(struct drm_device *dev, struct drm_master *master)
2690 struct drm_radeon_master_private *master_priv;
2691 unsigned long sareapage;
2694 master_priv = drm_calloc(1, sizeof(*master_priv), DRM_MEM_DRIVER);
2698 /* prebuild the SAREA */
2699 sareapage = max(SAREA_MAX, PAGE_SIZE);
2700 ret = drm_addmap(dev, 0, sareapage, _DRM_SHM, _DRM_CONTAINS_LOCK|_DRM_DRIVER,
2701 &master_priv->sarea);
2703 DRM_ERROR("SAREA setup failed\n");
2706 master_priv->sarea_priv = master_priv->sarea->handle + sizeof(struct drm_sarea);
2707 master_priv->sarea_priv->pfCurrentPage = 0;
2709 master->driver_priv = master_priv;
2713 void radeon_master_destroy(struct drm_device *dev, struct drm_master *master)
2715 struct drm_radeon_master_private *master_priv = master->driver_priv;
2716 struct drm_radeon_private *dev_priv = dev->dev_private;
2721 if (master_priv->sarea_priv &&
2722 master_priv->sarea_priv->pfCurrentPage != 0)
2723 radeon_cp_dispatch_flip(dev, master);
2725 master_priv->sarea_priv = NULL;
2726 if (master_priv->sarea)
2727 drm_rmmap_locked(dev, master_priv->sarea);
2729 drm_free(master_priv, sizeof(*master_priv), DRM_MEM_DRIVER);
2731 master->driver_priv = NULL;
2733 /* Create mappings for registers and framebuffer so userland doesn't necessarily
2734 * have to find them.
2736 int radeon_driver_firstopen(struct drm_device *dev)
2739 drm_local_map_t *map;
2740 drm_radeon_private_t *dev_priv = dev->dev_private;
2742 dev_priv->gart_info.table_size = RADEON_PCIGART_TABLE_SIZE;
2744 if (!drm_core_check_feature(dev, DRIVER_MODESET))
2745 radeon_gem_mm_init(dev);
2747 ret = drm_addmap(dev, dev_priv->fb_aper_offset,
2748 drm_get_resource_len(dev, 0), _DRM_FRAME_BUFFER,
2749 _DRM_WRITE_COMBINING, &map);
2756 int radeon_driver_unload(struct drm_device *dev)
2758 drm_radeon_private_t *dev_priv = dev->dev_private;
2760 if (drm_core_check_feature(dev, DRIVER_MODESET)) {
2761 drm_irq_uninstall(dev);
2762 radeon_modeset_cleanup(dev);
2763 radeon_gem_mm_fini(dev);
2766 drm_bo_driver_finish(dev);
2767 drm_rmmap(dev, dev_priv->mmio);
2770 drm_free(dev_priv, sizeof(*dev_priv), DRM_MEM_DRIVER);
2772 dev->dev_private = NULL;
2776 void radeon_gart_flush(struct drm_device *dev)
2778 drm_radeon_private_t *dev_priv = dev->dev_private;
2780 if (dev_priv->flags & RADEON_IS_IGPGART) {
2781 IGP_READ_MCIND(dev_priv, RS480_GART_CACHE_CNTRL);
2782 IGP_WRITE_MCIND(RS480_GART_CACHE_CNTRL, RS480_GART_CACHE_INVALIDATE);
2783 IGP_READ_MCIND(dev_priv, RS480_GART_CACHE_CNTRL);
2784 IGP_WRITE_MCIND(RS480_GART_CACHE_CNTRL, 0);
2785 } else if (dev_priv->flags & RADEON_IS_PCIE) {
2786 u32 tmp = RADEON_READ_PCIE(dev_priv, RADEON_PCIE_TX_GART_CNTL);
2787 tmp |= RADEON_PCIE_TX_GART_INVALIDATE_TLB;
2788 RADEON_WRITE_PCIE(RADEON_PCIE_TX_GART_CNTL, tmp);
2789 tmp &= ~RADEON_PCIE_TX_GART_INVALIDATE_TLB;
2790 RADEON_WRITE_PCIE(RADEON_PCIE_TX_GART_CNTL, tmp);