1 #ifndef _INTEL_RINGBUFFER_H_
2 #define _INTEL_RINGBUFFER_H_
11 struct intel_hw_status_page
{
12 u32 __iomem
*page_addr
;
13 unsigned int gfx_addr
;
14 struct drm_i915_gem_object
*obj
;
17 #define I915_RING_READ(reg) i915_safe_read(dev_priv, reg)
19 #define I915_READ_TAIL(ring) I915_RING_READ(RING_TAIL(ring->mmio_base))
20 #define I915_WRITE_TAIL(ring, val) I915_WRITE(RING_TAIL(ring->mmio_base), val)
22 #define I915_READ_START(ring) I915_RING_READ(RING_START(ring->mmio_base))
23 #define I915_WRITE_START(ring, val) I915_WRITE(RING_START(ring->mmio_base), val)
25 #define I915_READ_HEAD(ring) I915_RING_READ(RING_HEAD(ring->mmio_base))
26 #define I915_WRITE_HEAD(ring, val) I915_WRITE(RING_HEAD(ring->mmio_base), val)
28 #define I915_READ_CTL(ring) I915_RING_READ(RING_CTL(ring->mmio_base))
29 #define I915_WRITE_CTL(ring, val) I915_WRITE(RING_CTL(ring->mmio_base), val)
31 #define I915_READ_NOPID(ring) I915_RING_READ(RING_NOPID(ring->mmio_base))
32 #define I915_READ_SYNC_0(ring) I915_RING_READ(RING_SYNC_0(ring->mmio_base))
33 #define I915_READ_SYNC_1(ring) I915_RING_READ(RING_SYNC_1(ring->mmio_base))
35 struct intel_ring_buffer
{
44 struct drm_device
*dev
;
45 struct drm_i915_gem_object
*obj
;
53 struct intel_hw_status_page status_page
;
55 u32 irq_seqno
; /* last seq seem at irq time */
57 u32 sync_seqno
[I915_NUM_RINGS
-1];
58 atomic_t irq_refcount
;
59 bool __must_check (*irq_get
)(struct intel_ring_buffer
*ring
);
60 void (*irq_put
)(struct intel_ring_buffer
*ring
);
62 int (*init
)(struct intel_ring_buffer
*ring
);
64 void (*write_tail
)(struct intel_ring_buffer
*ring
,
66 void (*flush
)(struct intel_ring_buffer
*ring
,
67 u32 invalidate_domains
,
69 int (*add_request
)(struct intel_ring_buffer
*ring
,
71 u32 (*get_seqno
)(struct intel_ring_buffer
*ring
);
72 int (*dispatch_execbuffer
)(struct intel_ring_buffer
*ring
,
73 u32 offset
, u32 length
);
74 void (*cleanup
)(struct intel_ring_buffer
*ring
);
77 * List of objects currently involved in rendering from the
80 * Includes buffers having the contents of their GPU caches
81 * flushed, not necessarily primitives. last_rendering_seqno
82 * represents when the rendering involved will be completed.
84 * A reference is held on the buffer while on this list.
86 struct list_head active_list
;
89 * List of breadcrumbs associated with GPU requests currently
92 struct list_head request_list
;
95 * List of objects currently pending a GPU write flush.
97 * All elements on this list will belong to either the
98 * active_list or flushing_list, last_rendering_seqno can
99 * be used to differentiate between the two elements.
101 struct list_head gpu_write_list
;
104 * Do we have some not yet emitted requests outstanding?
106 u32 outstanding_lazy_request
;
108 wait_queue_head_t irq_queue
;
115 intel_ring_sync_index(struct intel_ring_buffer
*ring
,
116 struct intel_ring_buffer
*other
)
121 * cs -> 0 = vcs, 1 = bcs
122 * vcs -> 0 = bcs, 1 = cs,
123 * bcs -> 0 = cs, 1 = vcs.
126 idx
= (other
- ring
) - 1;
128 idx
+= I915_NUM_RINGS
;
134 intel_read_status_page(struct intel_ring_buffer
*ring
,
137 return ioread32(ring
->status_page
.page_addr
+ reg
);
140 void intel_cleanup_ring_buffer(struct intel_ring_buffer
*ring
);
141 int __must_check
intel_wait_ring_buffer(struct intel_ring_buffer
*ring
, int n
);
142 int __must_check
intel_ring_begin(struct intel_ring_buffer
*ring
, int n
);
144 static inline void intel_ring_emit(struct intel_ring_buffer
*ring
,
147 iowrite32(data
, ring
->virtual_start
+ ring
->tail
);
151 void intel_ring_advance(struct intel_ring_buffer
*ring
);
153 u32
intel_ring_get_seqno(struct intel_ring_buffer
*ring
);
154 int intel_ring_sync(struct intel_ring_buffer
*ring
,
155 struct intel_ring_buffer
*to
,
158 int intel_init_render_ring_buffer(struct drm_device
*dev
);
159 int intel_init_bsd_ring_buffer(struct drm_device
*dev
);
160 int intel_init_blt_ring_buffer(struct drm_device
*dev
);
162 u32
intel_ring_get_active_head(struct intel_ring_buffer
*ring
);
163 void intel_ring_setup_status_page(struct intel_ring_buffer
*ring
);
165 #endif /* _INTEL_RINGBUFFER_H_ */