1 #ifndef _INTEL_RINGBUFFER_H_
2 #define _INTEL_RINGBUFFER_H_
4 struct intel_hw_status_page {
7 struct drm_gem_object *obj;
10 struct drm_i915_gem_execbuffer2;
11 struct intel_ring_buffer {
19 unsigned int ring_flag;
21 unsigned int alignment;
23 struct drm_device *dev;
24 struct drm_gem_object *gem_object;
29 struct intel_hw_status_page status_page;
31 u32 irq_gem_seqno; /* last seq seem at irq time */
32 u32 waiting_gem_seqno;
33 int user_irq_refcount;
34 void (*user_irq_get)(struct drm_device *dev,
35 struct intel_ring_buffer *ring);
36 void (*user_irq_put)(struct drm_device *dev,
37 struct intel_ring_buffer *ring);
38 void (*setup_status_page)(struct drm_device *dev,
39 struct intel_ring_buffer *ring);
41 int (*init)(struct drm_device *dev,
42 struct intel_ring_buffer *ring);
44 unsigned int (*get_head)(struct drm_device *dev,
45 struct intel_ring_buffer *ring);
46 unsigned int (*get_tail)(struct drm_device *dev,
47 struct intel_ring_buffer *ring);
48 unsigned int (*get_active_head)(struct drm_device *dev,
49 struct intel_ring_buffer *ring);
50 void (*advance_ring)(struct drm_device *dev,
51 struct intel_ring_buffer *ring);
52 void (*flush)(struct drm_device *dev,
53 struct intel_ring_buffer *ring,
54 u32 invalidate_domains,
56 u32 (*add_request)(struct drm_device *dev,
57 struct intel_ring_buffer *ring,
58 struct drm_file *file_priv,
60 u32 (*get_gem_seqno)(struct drm_device *dev,
61 struct intel_ring_buffer *ring);
62 int (*dispatch_gem_execbuffer)(struct drm_device *dev,
63 struct intel_ring_buffer *ring,
64 struct drm_i915_gem_execbuffer2 *exec,
65 struct drm_clip_rect *cliprects,
66 uint64_t exec_offset);
69 * List of objects currently involved in rendering from the
72 * Includes buffers having the contents of their GPU caches
73 * flushed, not necessarily primitives. last_rendering_seqno
74 * represents when the rendering involved will be completed.
76 * A reference is held on the buffer while on this list.
78 struct list_head active_list;
81 * List of breadcrumbs associated with GPU requests currently
84 struct list_head request_list;
86 wait_queue_head_t irq_queue;
91 intel_read_status_page(struct intel_ring_buffer *ring,
94 u32 *regs = ring->status_page.page_addr;
98 int intel_init_ring_buffer(struct drm_device *dev,
99 struct intel_ring_buffer *ring);
100 void intel_cleanup_ring_buffer(struct drm_device *dev,
101 struct intel_ring_buffer *ring);
102 int intel_wait_ring_buffer(struct drm_device *dev,
103 struct intel_ring_buffer *ring, int n);
104 int intel_wrap_ring_buffer(struct drm_device *dev,
105 struct intel_ring_buffer *ring);
106 void intel_ring_begin(struct drm_device *dev,
107 struct intel_ring_buffer *ring, int n);
109 static inline void intel_ring_emit(struct drm_device *dev,
110 struct intel_ring_buffer *ring,
113 unsigned int *virt = ring->virtual_start + ring->tail;
118 void intel_fill_struct(struct drm_device *dev,
119 struct intel_ring_buffer *ring,
122 void intel_ring_advance(struct drm_device *dev,
123 struct intel_ring_buffer *ring);
125 u32 intel_ring_get_seqno(struct drm_device *dev,
126 struct intel_ring_buffer *ring);
128 extern struct intel_ring_buffer render_ring;
129 extern struct intel_ring_buffer bsd_ring;
131 #endif /* _INTEL_RINGBUFFER_H_ */