if (r)
return r;
+ r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
+ if (r) {
+ dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
+ return r;
+ }
+
/* Enable IRQ */
r = r600_irq_init(rdev);
if (r) {
/* Initialize clocks */
radeon_get_clock_info(rdev->ddev);
/* Fence driver */
- r = radeon_fence_driver_init(rdev, 1);
+ r = radeon_fence_driver_init(rdev);
if (r)
return r;
/* initialize AGP */
if (r)
return r;
+ r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
+ if (r) {
+ dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
+ return r;
+ }
+
+ r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
+ if (r) {
+ dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
+ return r;
+ }
+
+ r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
+ if (r) {
+ dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
+ return r;
+ }
+
/* Enable IRQ */
r = r600_irq_init(rdev);
if (r) {
/* Initialize clocks */
radeon_get_clock_info(rdev->ddev);
/* Fence driver */
- r = radeon_fence_driver_init(rdev, 3);
+ r = radeon_fence_driver_init(rdev);
if (r)
return r;
/* initialize memory controller */
if (r)
return r;
+ r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
+ if (r) {
+ dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
+ return r;
+ }
+
/* Enable IRQ */
r100_irq_set(rdev);
rdev->config.r100.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL);
/* initialize VRAM */
r100_mc_init(rdev);
/* Fence driver */
- r = radeon_fence_driver_init(rdev, 1);
+ r = radeon_fence_driver_init(rdev);
if (r)
return r;
r = radeon_irq_kms_init(rdev);
if (r)
return r;
+ r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
+ if (r) {
+ dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
+ return r;
+ }
+
/* Enable IRQ */
r100_irq_set(rdev);
rdev->config.r300.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL);
/* initialize memory controller */
r300_mc_init(rdev);
/* Fence driver */
- r = radeon_fence_driver_init(rdev, 1);
+ r = radeon_fence_driver_init(rdev);
if (r)
return r;
r = radeon_irq_kms_init(rdev);
if (r)
return r;
+ r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
+ if (r) {
+ dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
+ return r;
+ }
+
/* Enable IRQ */
r100_irq_set(rdev);
rdev->config.r300.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL);
r300_mc_init(rdev);
r420_debugfs(rdev);
/* Fence driver */
- r = radeon_fence_driver_init(rdev, 1);
+ r = radeon_fence_driver_init(rdev);
if (r) {
return r;
}
if (r)
return r;
+ r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
+ if (r) {
+ dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
+ return r;
+ }
+
/* Enable IRQ */
rs600_irq_set(rdev);
rdev->config.r300.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL);
r520_mc_init(rdev);
rv515_debugfs(rdev);
/* Fence driver */
- r = radeon_fence_driver_init(rdev, 1);
+ r = radeon_fence_driver_init(rdev);
if (r)
return r;
r = radeon_irq_kms_init(rdev);
struct radeon_ring *ring = &rdev->ring[fence->ring];
if (rdev->wb.use_event) {
- u64 addr = rdev->wb.gpu_addr + R600_WB_EVENT_OFFSET +
- (u64)(rdev->fence_drv[fence->ring].scratch_reg - rdev->scratch.reg_base);
+ u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
/* flush read cache over gart */
radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
radeon_ring_write(ring, PACKET3_TC_ACTION_ENA |
if (r)
return r;
+ r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
+ if (r) {
+ dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
+ return r;
+ }
+
/* Enable IRQ */
r = r600_irq_init(rdev);
if (r) {
/* Initialize clocks */
radeon_get_clock_info(rdev->ddev);
/* Fence driver */
- r = radeon_fence_driver_init(rdev, 1);
+ r = radeon_fence_driver_init(rdev);
if (r)
return r;
if (rdev->flags & RADEON_IS_AGP) {
*/
struct radeon_fence_driver {
uint32_t scratch_reg;
+ uint64_t gpu_addr;
+ volatile uint32_t *cpu_addr;
atomic_t seq;
uint32_t last_seq;
unsigned long last_jiffies;
int ring;
};
-int radeon_fence_driver_init(struct radeon_device *rdev, int num_rings);
+int radeon_fence_driver_start_ring(struct radeon_device *rdev, int ring);
+int radeon_fence_driver_init(struct radeon_device *rdev);
void radeon_fence_driver_fini(struct radeon_device *rdev);
int radeon_fence_create(struct radeon_device *rdev, struct radeon_fence **fence, int ring);
int radeon_fence_emit(struct radeon_device *rdev, struct radeon_fence *fence);
static void radeon_fence_write(struct radeon_device *rdev, u32 seq, int ring)
{
- u32 scratch_index;
-
if (rdev->wb.enabled) {
- if (rdev->wb.use_event)
- scratch_index = R600_WB_EVENT_OFFSET +
- rdev->fence_drv[ring].scratch_reg - rdev->scratch.reg_base;
- else
- scratch_index = RADEON_WB_SCRATCH_OFFSET +
- rdev->fence_drv[ring].scratch_reg - rdev->scratch.reg_base;
- rdev->wb.wb[scratch_index/4] = cpu_to_le32(seq);
- } else
+ *rdev->fence_drv[ring].cpu_addr = cpu_to_le32(seq);
+ } else {
WREG32(rdev->fence_drv[ring].scratch_reg, seq);
+ }
}
static u32 radeon_fence_read(struct radeon_device *rdev, int ring)
{
u32 seq = 0;
- u32 scratch_index;
if (rdev->wb.enabled) {
- if (rdev->wb.use_event)
- scratch_index = R600_WB_EVENT_OFFSET +
- rdev->fence_drv[ring].scratch_reg - rdev->scratch.reg_base;
- else
- scratch_index = RADEON_WB_SCRATCH_OFFSET +
- rdev->fence_drv[ring].scratch_reg - rdev->scratch.reg_base;
- seq = le32_to_cpu(rdev->wb.wb[scratch_index/4]);
- } else
+ seq = le32_to_cpu(*rdev->fence_drv[ring].cpu_addr);
+ } else {
seq = RREG32(rdev->fence_drv[ring].scratch_reg);
+ }
return seq;
}
return not_processed;
}
-int radeon_fence_driver_init(struct radeon_device *rdev, int num_rings)
+int radeon_fence_driver_start_ring(struct radeon_device *rdev, int ring)
{
unsigned long irq_flags;
- int r, ring;
+ uint64_t index;
+ int r;
- for (ring = 0; ring < num_rings; ring++) {
- write_lock_irqsave(&rdev->fence_lock, irq_flags);
+ write_lock_irqsave(&rdev->fence_lock, irq_flags);
+ radeon_scratch_free(rdev, rdev->fence_drv[ring].scratch_reg);
+ if (rdev->wb.use_event) {
+ rdev->fence_drv[ring].scratch_reg = 0;
+ index = R600_WB_EVENT_OFFSET + ring * 4;
+ } else {
r = radeon_scratch_get(rdev, &rdev->fence_drv[ring].scratch_reg);
if (r) {
dev_err(rdev->dev, "fence failed to get scratch register\n");
write_unlock_irqrestore(&rdev->fence_lock, irq_flags);
return r;
}
- radeon_fence_write(rdev, 0, ring);
- atomic_set(&rdev->fence_drv[ring].seq, 0);
- INIT_LIST_HEAD(&rdev->fence_drv[ring].created);
- INIT_LIST_HEAD(&rdev->fence_drv[ring].emitted);
- INIT_LIST_HEAD(&rdev->fence_drv[ring].signaled);
- init_waitqueue_head(&rdev->fence_drv[ring].queue);
- rdev->fence_drv[ring].initialized = true;
- write_unlock_irqrestore(&rdev->fence_lock, irq_flags);
+ index = RADEON_WB_SCRATCH_OFFSET +
+ rdev->fence_drv[ring].scratch_reg -
+ rdev->scratch.reg_base;
}
- for (ring = num_rings; ring < RADEON_NUM_RINGS; ring++) {
- write_lock_irqsave(&rdev->fence_lock, irq_flags);
- INIT_LIST_HEAD(&rdev->fence_drv[ring].created);
- INIT_LIST_HEAD(&rdev->fence_drv[ring].emitted);
- INIT_LIST_HEAD(&rdev->fence_drv[ring].signaled);
- rdev->fence_drv[ring].initialized = false;
- write_unlock_irqrestore(&rdev->fence_lock, irq_flags);
+ rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4];
+ rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + index;
+ radeon_fence_write(rdev, atomic_read(&rdev->fence_drv[ring].seq), ring);
+ rdev->fence_drv[ring].initialized = true;
+ DRM_INFO("fence driver on ring %d use gpu addr 0x%08Lx and cpu addr 0x%p\n",
+ ring, rdev->fence_drv[ring].gpu_addr, rdev->fence_drv[ring].cpu_addr);
+ write_unlock_irqrestore(&rdev->fence_lock, irq_flags);
+ return 0;
+}
+
+static void radeon_fence_driver_init_ring(struct radeon_device *rdev, int ring)
+{
+ rdev->fence_drv[ring].scratch_reg = -1;
+ rdev->fence_drv[ring].cpu_addr = NULL;
+ rdev->fence_drv[ring].gpu_addr = 0;
+ atomic_set(&rdev->fence_drv[ring].seq, 0);
+ INIT_LIST_HEAD(&rdev->fence_drv[ring].created);
+ INIT_LIST_HEAD(&rdev->fence_drv[ring].emitted);
+ INIT_LIST_HEAD(&rdev->fence_drv[ring].signaled);
+ init_waitqueue_head(&rdev->fence_drv[ring].queue);
+ rdev->fence_drv[ring].initialized = false;
+}
+
+int radeon_fence_driver_init(struct radeon_device *rdev)
+{
+ unsigned long irq_flags;
+ int ring;
+
+ write_lock_irqsave(&rdev->fence_lock, irq_flags);
+ for (ring = 0; ring < RADEON_NUM_RINGS; ring++) {
+ radeon_fence_driver_init_ring(rdev, ring);
}
+ write_unlock_irqrestore(&rdev->fence_lock, irq_flags);
if (radeon_debugfs_fence_init(rdev)) {
dev_err(rdev->dev, "fence debugfs file creation failed\n");
}
for (ring = 0; ring < RADEON_NUM_RINGS; ring++) {
if (!rdev->fence_drv[ring].initialized)
continue;
+ radeon_fence_wait_last(rdev, ring);
wake_up_all(&rdev->fence_drv[ring].queue);
write_lock_irqsave(&rdev->fence_lock, irq_flags);
radeon_scratch_free(rdev, rdev->fence_drv[ring].scratch_reg);
if (r)
return r;
+ r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
+ if (r) {
+ dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
+ return r;
+ }
+
/* Enable IRQ */
r100_irq_set(rdev);
rdev->config.r300.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL);
/* initialize memory controller */
rs400_mc_init(rdev);
/* Fence driver */
- r = radeon_fence_driver_init(rdev, 1);
+ r = radeon_fence_driver_init(rdev);
if (r)
return r;
r = radeon_irq_kms_init(rdev);
if (r)
return r;
+ r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
+ if (r) {
+ dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
+ return r;
+ }
+
/* Enable IRQ */
rs600_irq_set(rdev);
rdev->config.r300.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL);
rs600_mc_init(rdev);
rs600_debugfs(rdev);
/* Fence driver */
- r = radeon_fence_driver_init(rdev, 1);
+ r = radeon_fence_driver_init(rdev);
if (r)
return r;
r = radeon_irq_kms_init(rdev);
if (r)
return r;
+ r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
+ if (r) {
+ dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
+ return r;
+ }
+
/* Enable IRQ */
rs600_irq_set(rdev);
rdev->config.r300.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL);
rs690_mc_init(rdev);
rv515_debugfs(rdev);
/* Fence driver */
- r = radeon_fence_driver_init(rdev, 1);
+ r = radeon_fence_driver_init(rdev);
if (r)
return r;
r = radeon_irq_kms_init(rdev);
if (r)
return r;
+ r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
+ if (r) {
+ dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
+ return r;
+ }
+
/* Enable IRQ */
rs600_irq_set(rdev);
rdev->config.r300.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL);
rv515_mc_init(rdev);
rv515_debugfs(rdev);
/* Fence driver */
- r = radeon_fence_driver_init(rdev, 1);
+ r = radeon_fence_driver_init(rdev);
if (r)
return r;
r = radeon_irq_kms_init(rdev);
if (r)
return r;
+ r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
+ if (r) {
+ dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
+ return r;
+ }
+
/* Enable IRQ */
r = r600_irq_init(rdev);
if (r) {
/* Initialize clocks */
radeon_get_clock_info(rdev->ddev);
/* Fence driver */
- r = radeon_fence_driver_init(rdev, 1);
+ r = radeon_fence_driver_init(rdev);
if (r)
return r;
/* initialize AGP */