drm/radeon/kms: Convert R100 to new init path (V2)
[linux-2.6/btrfs-unstable.git] / drivers / gpu / drm / radeon / radeon_asic.h
blob39f1bb656e619221883c28b6ab97b68f36a5c1c9
1 /*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
28 #ifndef __RADEON_ASIC_H__
29 #define __RADEON_ASIC_H__
32 * common functions
34 void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
35 void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
37 void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
38 void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
39 void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
42 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
44 extern int r100_init(struct radeon_device *rdev);
45 extern void r100_fini(struct radeon_device *rdev);
46 extern int r100_suspend(struct radeon_device *rdev);
47 extern int r100_resume(struct radeon_device *rdev);
48 uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
49 void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
50 int r100_gpu_reset(struct radeon_device *rdev);
51 u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
52 void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
53 int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
54 void r100_cp_commit(struct radeon_device *rdev);
55 void r100_ring_start(struct radeon_device *rdev);
56 int r100_irq_set(struct radeon_device *rdev);
57 int r100_irq_process(struct radeon_device *rdev);
58 void r100_fence_ring_emit(struct radeon_device *rdev,
59 struct radeon_fence *fence);
60 int r100_cs_parse(struct radeon_cs_parser *p);
61 void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
62 uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
63 int r100_copy_blit(struct radeon_device *rdev,
64 uint64_t src_offset,
65 uint64_t dst_offset,
66 unsigned num_pages,
67 struct radeon_fence *fence);
68 int r100_set_surface_reg(struct radeon_device *rdev, int reg,
69 uint32_t tiling_flags, uint32_t pitch,
70 uint32_t offset, uint32_t obj_size);
71 int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
72 void r100_bandwidth_update(struct radeon_device *rdev);
73 void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
74 int r100_ib_test(struct radeon_device *rdev);
75 int r100_ring_test(struct radeon_device *rdev);
77 static struct radeon_asic r100_asic = {
78 .init = &r100_init,
79 .fini = &r100_fini,
80 .suspend = &r100_suspend,
81 .resume = &r100_resume,
82 .errata = NULL,
83 .vram_info = NULL,
84 .gpu_reset = &r100_gpu_reset,
85 .mc_init = NULL,
86 .mc_fini = NULL,
87 .wb_init = NULL,
88 .wb_fini = NULL,
89 .gart_init = NULL,
90 .gart_fini = NULL,
91 .gart_enable = NULL,
92 .gart_disable = NULL,
93 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
94 .gart_set_page = &r100_pci_gart_set_page,
95 .cp_init = NULL,
96 .cp_fini = NULL,
97 .cp_disable = NULL,
98 .cp_commit = &r100_cp_commit,
99 .ring_start = &r100_ring_start,
100 .ring_test = &r100_ring_test,
101 .ring_ib_execute = &r100_ring_ib_execute,
102 .ib_test = NULL,
103 .irq_set = &r100_irq_set,
104 .irq_process = &r100_irq_process,
105 .get_vblank_counter = &r100_get_vblank_counter,
106 .fence_ring_emit = &r100_fence_ring_emit,
107 .cs_parse = &r100_cs_parse,
108 .copy_blit = &r100_copy_blit,
109 .copy_dma = NULL,
110 .copy = &r100_copy_blit,
111 .set_engine_clock = &radeon_legacy_set_engine_clock,
112 .set_memory_clock = NULL,
113 .set_pcie_lanes = NULL,
114 .set_clock_gating = &radeon_legacy_set_clock_gating,
115 .set_surface_reg = r100_set_surface_reg,
116 .clear_surface_reg = r100_clear_surface_reg,
117 .bandwidth_update = &r100_bandwidth_update,
122 * r300,r350,rv350,rv380
124 extern int r300_init(struct radeon_device *rdev);
125 extern void r300_fini(struct radeon_device *rdev);
126 extern int r300_suspend(struct radeon_device *rdev);
127 extern int r300_resume(struct radeon_device *rdev);
128 extern int r300_gpu_reset(struct radeon_device *rdev);
129 extern void r300_ring_start(struct radeon_device *rdev);
130 extern void r300_fence_ring_emit(struct radeon_device *rdev,
131 struct radeon_fence *fence);
132 extern int r300_cs_parse(struct radeon_cs_parser *p);
133 extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
134 extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
135 extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
136 extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
137 extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
138 extern int r300_copy_dma(struct radeon_device *rdev,
139 uint64_t src_offset,
140 uint64_t dst_offset,
141 unsigned num_pages,
142 struct radeon_fence *fence);
143 static struct radeon_asic r300_asic = {
144 .init = &r300_init,
145 .fini = &r300_fini,
146 .suspend = &r300_suspend,
147 .resume = &r300_resume,
148 .errata = NULL,
149 .vram_info = NULL,
150 .gpu_reset = &r300_gpu_reset,
151 .mc_init = NULL,
152 .mc_fini = NULL,
153 .wb_init = NULL,
154 .wb_fini = NULL,
155 .gart_init = NULL,
156 .gart_fini = NULL,
157 .gart_enable = NULL,
158 .gart_disable = NULL,
159 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
160 .gart_set_page = &r100_pci_gart_set_page,
161 .cp_init = NULL,
162 .cp_fini = NULL,
163 .cp_disable = NULL,
164 .cp_commit = &r100_cp_commit,
165 .ring_start = &r300_ring_start,
166 .ring_test = &r100_ring_test,
167 .ring_ib_execute = &r100_ring_ib_execute,
168 .ib_test = NULL,
169 .irq_set = &r100_irq_set,
170 .irq_process = &r100_irq_process,
171 .get_vblank_counter = &r100_get_vblank_counter,
172 .fence_ring_emit = &r300_fence_ring_emit,
173 .cs_parse = &r300_cs_parse,
174 .copy_blit = &r100_copy_blit,
175 .copy_dma = &r300_copy_dma,
176 .copy = &r100_copy_blit,
177 .set_engine_clock = &radeon_legacy_set_engine_clock,
178 .set_memory_clock = NULL,
179 .set_pcie_lanes = &rv370_set_pcie_lanes,
180 .set_clock_gating = &radeon_legacy_set_clock_gating,
181 .set_surface_reg = r100_set_surface_reg,
182 .clear_surface_reg = r100_clear_surface_reg,
183 .bandwidth_update = &r100_bandwidth_update,
187 * r420,r423,rv410
189 extern int r420_init(struct radeon_device *rdev);
190 extern void r420_fini(struct radeon_device *rdev);
191 extern int r420_suspend(struct radeon_device *rdev);
192 extern int r420_resume(struct radeon_device *rdev);
193 static struct radeon_asic r420_asic = {
194 .init = &r420_init,
195 .fini = &r420_fini,
196 .suspend = &r420_suspend,
197 .resume = &r420_resume,
198 .errata = NULL,
199 .vram_info = NULL,
200 .gpu_reset = &r300_gpu_reset,
201 .mc_init = NULL,
202 .mc_fini = NULL,
203 .wb_init = NULL,
204 .wb_fini = NULL,
205 .gart_enable = NULL,
206 .gart_disable = NULL,
207 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
208 .gart_set_page = &rv370_pcie_gart_set_page,
209 .cp_init = NULL,
210 .cp_fini = NULL,
211 .cp_disable = NULL,
212 .cp_commit = &r100_cp_commit,
213 .ring_start = &r300_ring_start,
214 .ring_test = &r100_ring_test,
215 .ring_ib_execute = &r100_ring_ib_execute,
216 .ib_test = NULL,
217 .irq_set = &r100_irq_set,
218 .irq_process = &r100_irq_process,
219 .get_vblank_counter = &r100_get_vblank_counter,
220 .fence_ring_emit = &r300_fence_ring_emit,
221 .cs_parse = &r300_cs_parse,
222 .copy_blit = &r100_copy_blit,
223 .copy_dma = &r300_copy_dma,
224 .copy = &r100_copy_blit,
225 .set_engine_clock = &radeon_atom_set_engine_clock,
226 .set_memory_clock = &radeon_atom_set_memory_clock,
227 .set_pcie_lanes = &rv370_set_pcie_lanes,
228 .set_clock_gating = &radeon_atom_set_clock_gating,
229 .set_surface_reg = r100_set_surface_reg,
230 .clear_surface_reg = r100_clear_surface_reg,
231 .bandwidth_update = &r100_bandwidth_update,
236 * rs400,rs480
238 extern int rs400_init(struct radeon_device *rdev);
239 extern void rs400_fini(struct radeon_device *rdev);
240 extern int rs400_suspend(struct radeon_device *rdev);
241 extern int rs400_resume(struct radeon_device *rdev);
242 void rs400_gart_tlb_flush(struct radeon_device *rdev);
243 int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
244 uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
245 void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
246 static struct radeon_asic rs400_asic = {
247 .init = &rs400_init,
248 .fini = &rs400_fini,
249 .suspend = &rs400_suspend,
250 .resume = &rs400_resume,
251 .errata = NULL,
252 .vram_info = NULL,
253 .gpu_reset = &r300_gpu_reset,
254 .mc_init = NULL,
255 .mc_fini = NULL,
256 .wb_init = NULL,
257 .wb_fini = NULL,
258 .gart_init = NULL,
259 .gart_fini = NULL,
260 .gart_enable = NULL,
261 .gart_disable = NULL,
262 .gart_tlb_flush = &rs400_gart_tlb_flush,
263 .gart_set_page = &rs400_gart_set_page,
264 .cp_init = NULL,
265 .cp_fini = NULL,
266 .cp_disable = NULL,
267 .cp_commit = &r100_cp_commit,
268 .ring_start = &r300_ring_start,
269 .ring_test = &r100_ring_test,
270 .ring_ib_execute = &r100_ring_ib_execute,
271 .ib_test = NULL,
272 .irq_set = &r100_irq_set,
273 .irq_process = &r100_irq_process,
274 .get_vblank_counter = &r100_get_vblank_counter,
275 .fence_ring_emit = &r300_fence_ring_emit,
276 .cs_parse = &r300_cs_parse,
277 .copy_blit = &r100_copy_blit,
278 .copy_dma = &r300_copy_dma,
279 .copy = &r100_copy_blit,
280 .set_engine_clock = &radeon_legacy_set_engine_clock,
281 .set_memory_clock = NULL,
282 .set_pcie_lanes = NULL,
283 .set_clock_gating = &radeon_legacy_set_clock_gating,
284 .set_surface_reg = r100_set_surface_reg,
285 .clear_surface_reg = r100_clear_surface_reg,
286 .bandwidth_update = &r100_bandwidth_update,
291 * rs600.
293 int rs600_init(struct radeon_device *rdev);
294 void rs600_errata(struct radeon_device *rdev);
295 void rs600_vram_info(struct radeon_device *rdev);
296 int rs600_mc_init(struct radeon_device *rdev);
297 void rs600_mc_fini(struct radeon_device *rdev);
298 int rs600_irq_set(struct radeon_device *rdev);
299 int rs600_irq_process(struct radeon_device *rdev);
300 u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
301 int rs600_gart_init(struct radeon_device *rdev);
302 void rs600_gart_fini(struct radeon_device *rdev);
303 int rs600_gart_enable(struct radeon_device *rdev);
304 void rs600_gart_disable(struct radeon_device *rdev);
305 void rs600_gart_tlb_flush(struct radeon_device *rdev);
306 int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
307 uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
308 void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
309 void rs600_bandwidth_update(struct radeon_device *rdev);
310 static struct radeon_asic rs600_asic = {
311 .init = &rs600_init,
312 .errata = &rs600_errata,
313 .vram_info = &rs600_vram_info,
314 .gpu_reset = &r300_gpu_reset,
315 .mc_init = &rs600_mc_init,
316 .mc_fini = &rs600_mc_fini,
317 .wb_init = &r100_wb_init,
318 .wb_fini = &r100_wb_fini,
319 .gart_init = &rs600_gart_init,
320 .gart_fini = &rs600_gart_fini,
321 .gart_enable = &rs600_gart_enable,
322 .gart_disable = &rs600_gart_disable,
323 .gart_tlb_flush = &rs600_gart_tlb_flush,
324 .gart_set_page = &rs600_gart_set_page,
325 .cp_init = &r100_cp_init,
326 .cp_fini = &r100_cp_fini,
327 .cp_disable = &r100_cp_disable,
328 .cp_commit = &r100_cp_commit,
329 .ring_start = &r300_ring_start,
330 .ring_test = &r100_ring_test,
331 .ring_ib_execute = &r100_ring_ib_execute,
332 .ib_test = &r100_ib_test,
333 .irq_set = &rs600_irq_set,
334 .irq_process = &rs600_irq_process,
335 .get_vblank_counter = &rs600_get_vblank_counter,
336 .fence_ring_emit = &r300_fence_ring_emit,
337 .cs_parse = &r300_cs_parse,
338 .copy_blit = &r100_copy_blit,
339 .copy_dma = &r300_copy_dma,
340 .copy = &r100_copy_blit,
341 .set_engine_clock = &radeon_atom_set_engine_clock,
342 .set_memory_clock = &radeon_atom_set_memory_clock,
343 .set_pcie_lanes = NULL,
344 .set_clock_gating = &radeon_atom_set_clock_gating,
345 .bandwidth_update = &rs600_bandwidth_update,
350 * rs690,rs740
352 void rs690_errata(struct radeon_device *rdev);
353 void rs690_vram_info(struct radeon_device *rdev);
354 int rs690_mc_init(struct radeon_device *rdev);
355 void rs690_mc_fini(struct radeon_device *rdev);
356 uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
357 void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
358 void rs690_bandwidth_update(struct radeon_device *rdev);
359 static struct radeon_asic rs690_asic = {
360 .init = &rs600_init,
361 .errata = &rs690_errata,
362 .vram_info = &rs690_vram_info,
363 .gpu_reset = &r300_gpu_reset,
364 .mc_init = &rs690_mc_init,
365 .mc_fini = &rs690_mc_fini,
366 .wb_init = &r100_wb_init,
367 .wb_fini = &r100_wb_fini,
368 .gart_init = &rs400_gart_init,
369 .gart_fini = &rs400_gart_fini,
370 .gart_enable = &rs400_gart_enable,
371 .gart_disable = &rs400_gart_disable,
372 .gart_tlb_flush = &rs400_gart_tlb_flush,
373 .gart_set_page = &rs400_gart_set_page,
374 .cp_init = &r100_cp_init,
375 .cp_fini = &r100_cp_fini,
376 .cp_disable = &r100_cp_disable,
377 .cp_commit = &r100_cp_commit,
378 .ring_start = &r300_ring_start,
379 .ring_test = &r100_ring_test,
380 .ring_ib_execute = &r100_ring_ib_execute,
381 .ib_test = &r100_ib_test,
382 .irq_set = &rs600_irq_set,
383 .irq_process = &rs600_irq_process,
384 .get_vblank_counter = &rs600_get_vblank_counter,
385 .fence_ring_emit = &r300_fence_ring_emit,
386 .cs_parse = &r300_cs_parse,
387 .copy_blit = &r100_copy_blit,
388 .copy_dma = &r300_copy_dma,
389 .copy = &r300_copy_dma,
390 .set_engine_clock = &radeon_atom_set_engine_clock,
391 .set_memory_clock = &radeon_atom_set_memory_clock,
392 .set_pcie_lanes = NULL,
393 .set_clock_gating = &radeon_atom_set_clock_gating,
394 .set_surface_reg = r100_set_surface_reg,
395 .clear_surface_reg = r100_clear_surface_reg,
396 .bandwidth_update = &rs690_bandwidth_update,
401 * rv515
403 int rv515_init(struct radeon_device *rdev);
404 void rv515_fini(struct radeon_device *rdev);
405 int rv515_gpu_reset(struct radeon_device *rdev);
406 uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
407 void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
408 void rv515_ring_start(struct radeon_device *rdev);
409 uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
410 void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
411 void rv515_bandwidth_update(struct radeon_device *rdev);
412 int rv515_resume(struct radeon_device *rdev);
413 int rv515_suspend(struct radeon_device *rdev);
414 static struct radeon_asic rv515_asic = {
415 .init = &rv515_init,
416 .fini = &rv515_fini,
417 .suspend = &rv515_suspend,
418 .resume = &rv515_resume,
419 .errata = NULL,
420 .vram_info = NULL,
421 .gpu_reset = &rv515_gpu_reset,
422 .mc_init = NULL,
423 .mc_fini = NULL,
424 .wb_init = NULL,
425 .wb_fini = NULL,
426 .gart_init = &rv370_pcie_gart_init,
427 .gart_fini = &rv370_pcie_gart_fini,
428 .gart_enable = NULL,
429 .gart_disable = NULL,
430 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
431 .gart_set_page = &rv370_pcie_gart_set_page,
432 .cp_init = NULL,
433 .cp_fini = NULL,
434 .cp_disable = NULL,
435 .cp_commit = &r100_cp_commit,
436 .ring_start = &rv515_ring_start,
437 .ring_test = &r100_ring_test,
438 .ring_ib_execute = &r100_ring_ib_execute,
439 .ib_test = NULL,
440 .irq_set = &rs600_irq_set,
441 .irq_process = &rs600_irq_process,
442 .get_vblank_counter = &rs600_get_vblank_counter,
443 .fence_ring_emit = &r300_fence_ring_emit,
444 .cs_parse = &r300_cs_parse,
445 .copy_blit = &r100_copy_blit,
446 .copy_dma = &r300_copy_dma,
447 .copy = &r100_copy_blit,
448 .set_engine_clock = &radeon_atom_set_engine_clock,
449 .set_memory_clock = &radeon_atom_set_memory_clock,
450 .set_pcie_lanes = &rv370_set_pcie_lanes,
451 .set_clock_gating = &radeon_atom_set_clock_gating,
452 .set_surface_reg = r100_set_surface_reg,
453 .clear_surface_reg = r100_clear_surface_reg,
454 .bandwidth_update = &rv515_bandwidth_update,
459 * r520,rv530,rv560,rv570,r580
461 int r520_init(struct radeon_device *rdev);
462 int r520_resume(struct radeon_device *rdev);
463 static struct radeon_asic r520_asic = {
464 .init = &r520_init,
465 .fini = &rv515_fini,
466 .suspend = &rv515_suspend,
467 .resume = &r520_resume,
468 .errata = NULL,
469 .vram_info = NULL,
470 .gpu_reset = &rv515_gpu_reset,
471 .mc_init = NULL,
472 .mc_fini = NULL,
473 .wb_init = NULL,
474 .wb_fini = NULL,
475 .gart_init = NULL,
476 .gart_fini = NULL,
477 .gart_enable = NULL,
478 .gart_disable = NULL,
479 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
480 .gart_set_page = &rv370_pcie_gart_set_page,
481 .cp_init = NULL,
482 .cp_fini = NULL,
483 .cp_disable = NULL,
484 .cp_commit = &r100_cp_commit,
485 .ring_start = &rv515_ring_start,
486 .ring_test = &r100_ring_test,
487 .ring_ib_execute = &r100_ring_ib_execute,
488 .ib_test = NULL,
489 .irq_set = &rs600_irq_set,
490 .irq_process = &rs600_irq_process,
491 .get_vblank_counter = &rs600_get_vblank_counter,
492 .fence_ring_emit = &r300_fence_ring_emit,
493 .cs_parse = &r300_cs_parse,
494 .copy_blit = &r100_copy_blit,
495 .copy_dma = &r300_copy_dma,
496 .copy = &r100_copy_blit,
497 .set_engine_clock = &radeon_atom_set_engine_clock,
498 .set_memory_clock = &radeon_atom_set_memory_clock,
499 .set_pcie_lanes = &rv370_set_pcie_lanes,
500 .set_clock_gating = &radeon_atom_set_clock_gating,
501 .set_surface_reg = r100_set_surface_reg,
502 .clear_surface_reg = r100_clear_surface_reg,
503 .bandwidth_update = &rv515_bandwidth_update,
507 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
509 int r600_init(struct radeon_device *rdev);
510 void r600_fini(struct radeon_device *rdev);
511 int r600_suspend(struct radeon_device *rdev);
512 int r600_resume(struct radeon_device *rdev);
513 int r600_wb_init(struct radeon_device *rdev);
514 void r600_wb_fini(struct radeon_device *rdev);
515 void r600_cp_commit(struct radeon_device *rdev);
516 void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
517 uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
518 void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
519 int r600_cs_parse(struct radeon_cs_parser *p);
520 void r600_fence_ring_emit(struct radeon_device *rdev,
521 struct radeon_fence *fence);
522 int r600_copy_dma(struct radeon_device *rdev,
523 uint64_t src_offset,
524 uint64_t dst_offset,
525 unsigned num_pages,
526 struct radeon_fence *fence);
527 int r600_irq_process(struct radeon_device *rdev);
528 int r600_irq_set(struct radeon_device *rdev);
529 int r600_gpu_reset(struct radeon_device *rdev);
530 int r600_set_surface_reg(struct radeon_device *rdev, int reg,
531 uint32_t tiling_flags, uint32_t pitch,
532 uint32_t offset, uint32_t obj_size);
533 int r600_clear_surface_reg(struct radeon_device *rdev, int reg);
534 void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
535 int r600_ib_test(struct radeon_device *rdev);
536 int r600_ring_test(struct radeon_device *rdev);
537 int r600_copy_blit(struct radeon_device *rdev,
538 uint64_t src_offset, uint64_t dst_offset,
539 unsigned num_pages, struct radeon_fence *fence);
541 static struct radeon_asic r600_asic = {
542 .errata = NULL,
543 .init = &r600_init,
544 .fini = &r600_fini,
545 .suspend = &r600_suspend,
546 .resume = &r600_resume,
547 .cp_commit = &r600_cp_commit,
548 .vram_info = NULL,
549 .gpu_reset = &r600_gpu_reset,
550 .mc_init = NULL,
551 .mc_fini = NULL,
552 .wb_init = &r600_wb_init,
553 .wb_fini = &r600_wb_fini,
554 .gart_enable = NULL,
555 .gart_disable = NULL,
556 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
557 .gart_set_page = &rs600_gart_set_page,
558 .cp_init = NULL,
559 .cp_fini = NULL,
560 .cp_disable = NULL,
561 .ring_start = NULL,
562 .ring_test = &r600_ring_test,
563 .ring_ib_execute = &r600_ring_ib_execute,
564 .ib_test = &r600_ib_test,
565 .irq_set = &r600_irq_set,
566 .irq_process = &r600_irq_process,
567 .fence_ring_emit = &r600_fence_ring_emit,
568 .cs_parse = &r600_cs_parse,
569 .copy_blit = &r600_copy_blit,
570 .copy_dma = &r600_copy_blit,
571 .copy = &r600_copy_blit,
572 .set_engine_clock = &radeon_atom_set_engine_clock,
573 .set_memory_clock = &radeon_atom_set_memory_clock,
574 .set_pcie_lanes = NULL,
575 .set_clock_gating = &radeon_atom_set_clock_gating,
576 .set_surface_reg = r600_set_surface_reg,
577 .clear_surface_reg = r600_clear_surface_reg,
578 .bandwidth_update = &rv515_bandwidth_update,
582 * rv770,rv730,rv710,rv740
584 int rv770_init(struct radeon_device *rdev);
585 void rv770_fini(struct radeon_device *rdev);
586 int rv770_suspend(struct radeon_device *rdev);
587 int rv770_resume(struct radeon_device *rdev);
588 int rv770_gpu_reset(struct radeon_device *rdev);
590 static struct radeon_asic rv770_asic = {
591 .errata = NULL,
592 .init = &rv770_init,
593 .fini = &rv770_fini,
594 .suspend = &rv770_suspend,
595 .resume = &rv770_resume,
596 .cp_commit = &r600_cp_commit,
597 .vram_info = NULL,
598 .gpu_reset = &rv770_gpu_reset,
599 .mc_init = NULL,
600 .mc_fini = NULL,
601 .wb_init = &r600_wb_init,
602 .wb_fini = &r600_wb_fini,
603 .gart_enable = NULL,
604 .gart_disable = NULL,
605 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
606 .gart_set_page = &rs600_gart_set_page,
607 .cp_init = NULL,
608 .cp_fini = NULL,
609 .cp_disable = NULL,
610 .ring_start = NULL,
611 .ring_test = &r600_ring_test,
612 .ring_ib_execute = &r600_ring_ib_execute,
613 .ib_test = &r600_ib_test,
614 .irq_set = &r600_irq_set,
615 .irq_process = &r600_irq_process,
616 .fence_ring_emit = &r600_fence_ring_emit,
617 .cs_parse = &r600_cs_parse,
618 .copy_blit = &r600_copy_blit,
619 .copy_dma = &r600_copy_blit,
620 .copy = &r600_copy_blit,
621 .set_engine_clock = &radeon_atom_set_engine_clock,
622 .set_memory_clock = &radeon_atom_set_memory_clock,
623 .set_pcie_lanes = NULL,
624 .set_clock_gating = &radeon_atom_set_clock_gating,
625 .set_surface_reg = r600_set_surface_reg,
626 .clear_surface_reg = r600_clear_surface_reg,
627 .bandwidth_update = &rv515_bandwidth_update,
630 #endif