2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
24 * Authors: Dave Airlie
28 #ifndef __RADEON_ASIC_H__
29 #define __RADEON_ASIC_H__
34 void radeon_legacy_set_engine_clock(struct radeon_device
*rdev
, uint32_t eng_clock
);
35 void radeon_legacy_set_clock_gating(struct radeon_device
*rdev
, int enable
);
37 void radeon_atom_set_engine_clock(struct radeon_device
*rdev
, uint32_t eng_clock
);
38 void radeon_atom_set_memory_clock(struct radeon_device
*rdev
, uint32_t mem_clock
);
39 void radeon_atom_set_clock_gating(struct radeon_device
*rdev
, int enable
);
42 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
44 extern int r100_init(struct radeon_device
*rdev
);
45 extern void r100_fini(struct radeon_device
*rdev
);
46 extern int r100_suspend(struct radeon_device
*rdev
);
47 extern int r100_resume(struct radeon_device
*rdev
);
48 uint32_t r100_mm_rreg(struct radeon_device
*rdev
, uint32_t reg
);
49 void r100_mm_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
50 int r100_gpu_reset(struct radeon_device
*rdev
);
51 u32
r100_get_vblank_counter(struct radeon_device
*rdev
, int crtc
);
52 void r100_pci_gart_tlb_flush(struct radeon_device
*rdev
);
53 int r100_pci_gart_set_page(struct radeon_device
*rdev
, int i
, uint64_t addr
);
54 void r100_cp_commit(struct radeon_device
*rdev
);
55 void r100_ring_start(struct radeon_device
*rdev
);
56 int r100_irq_set(struct radeon_device
*rdev
);
57 int r100_irq_process(struct radeon_device
*rdev
);
58 void r100_fence_ring_emit(struct radeon_device
*rdev
,
59 struct radeon_fence
*fence
);
60 int r100_cs_parse(struct radeon_cs_parser
*p
);
61 void r100_pll_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
62 uint32_t r100_pll_rreg(struct radeon_device
*rdev
, uint32_t reg
);
63 int r100_copy_blit(struct radeon_device
*rdev
,
67 struct radeon_fence
*fence
);
68 int r100_set_surface_reg(struct radeon_device
*rdev
, int reg
,
69 uint32_t tiling_flags
, uint32_t pitch
,
70 uint32_t offset
, uint32_t obj_size
);
71 int r100_clear_surface_reg(struct radeon_device
*rdev
, int reg
);
72 void r100_bandwidth_update(struct radeon_device
*rdev
);
73 void r100_ring_ib_execute(struct radeon_device
*rdev
, struct radeon_ib
*ib
);
74 int r100_ib_test(struct radeon_device
*rdev
);
75 int r100_ring_test(struct radeon_device
*rdev
);
77 static struct radeon_asic r100_asic
= {
80 .suspend
= &r100_suspend
,
81 .resume
= &r100_resume
,
84 .gpu_reset
= &r100_gpu_reset
,
93 .gart_tlb_flush
= &r100_pci_gart_tlb_flush
,
94 .gart_set_page
= &r100_pci_gart_set_page
,
98 .cp_commit
= &r100_cp_commit
,
99 .ring_start
= &r100_ring_start
,
100 .ring_test
= &r100_ring_test
,
101 .ring_ib_execute
= &r100_ring_ib_execute
,
103 .irq_set
= &r100_irq_set
,
104 .irq_process
= &r100_irq_process
,
105 .get_vblank_counter
= &r100_get_vblank_counter
,
106 .fence_ring_emit
= &r100_fence_ring_emit
,
107 .cs_parse
= &r100_cs_parse
,
108 .copy_blit
= &r100_copy_blit
,
110 .copy
= &r100_copy_blit
,
111 .set_engine_clock
= &radeon_legacy_set_engine_clock
,
112 .set_memory_clock
= NULL
,
113 .set_pcie_lanes
= NULL
,
114 .set_clock_gating
= &radeon_legacy_set_clock_gating
,
115 .set_surface_reg
= r100_set_surface_reg
,
116 .clear_surface_reg
= r100_clear_surface_reg
,
117 .bandwidth_update
= &r100_bandwidth_update
,
122 * r300,r350,rv350,rv380
124 extern int r300_init(struct radeon_device
*rdev
);
125 extern void r300_fini(struct radeon_device
*rdev
);
126 extern int r300_suspend(struct radeon_device
*rdev
);
127 extern int r300_resume(struct radeon_device
*rdev
);
128 extern int r300_gpu_reset(struct radeon_device
*rdev
);
129 extern void r300_ring_start(struct radeon_device
*rdev
);
130 extern void r300_fence_ring_emit(struct radeon_device
*rdev
,
131 struct radeon_fence
*fence
);
132 extern int r300_cs_parse(struct radeon_cs_parser
*p
);
133 extern void rv370_pcie_gart_tlb_flush(struct radeon_device
*rdev
);
134 extern int rv370_pcie_gart_set_page(struct radeon_device
*rdev
, int i
, uint64_t addr
);
135 extern uint32_t rv370_pcie_rreg(struct radeon_device
*rdev
, uint32_t reg
);
136 extern void rv370_pcie_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
137 extern void rv370_set_pcie_lanes(struct radeon_device
*rdev
, int lanes
);
138 extern int r300_copy_dma(struct radeon_device
*rdev
,
142 struct radeon_fence
*fence
);
143 static struct radeon_asic r300_asic
= {
146 .suspend
= &r300_suspend
,
147 .resume
= &r300_resume
,
150 .gpu_reset
= &r300_gpu_reset
,
158 .gart_disable
= NULL
,
159 .gart_tlb_flush
= &r100_pci_gart_tlb_flush
,
160 .gart_set_page
= &r100_pci_gart_set_page
,
164 .cp_commit
= &r100_cp_commit
,
165 .ring_start
= &r300_ring_start
,
166 .ring_test
= &r100_ring_test
,
167 .ring_ib_execute
= &r100_ring_ib_execute
,
169 .irq_set
= &r100_irq_set
,
170 .irq_process
= &r100_irq_process
,
171 .get_vblank_counter
= &r100_get_vblank_counter
,
172 .fence_ring_emit
= &r300_fence_ring_emit
,
173 .cs_parse
= &r300_cs_parse
,
174 .copy_blit
= &r100_copy_blit
,
175 .copy_dma
= &r300_copy_dma
,
176 .copy
= &r100_copy_blit
,
177 .set_engine_clock
= &radeon_legacy_set_engine_clock
,
178 .set_memory_clock
= NULL
,
179 .set_pcie_lanes
= &rv370_set_pcie_lanes
,
180 .set_clock_gating
= &radeon_legacy_set_clock_gating
,
181 .set_surface_reg
= r100_set_surface_reg
,
182 .clear_surface_reg
= r100_clear_surface_reg
,
183 .bandwidth_update
= &r100_bandwidth_update
,
189 extern int r420_init(struct radeon_device
*rdev
);
190 extern void r420_fini(struct radeon_device
*rdev
);
191 extern int r420_suspend(struct radeon_device
*rdev
);
192 extern int r420_resume(struct radeon_device
*rdev
);
193 static struct radeon_asic r420_asic
= {
196 .suspend
= &r420_suspend
,
197 .resume
= &r420_resume
,
200 .gpu_reset
= &r300_gpu_reset
,
206 .gart_disable
= NULL
,
207 .gart_tlb_flush
= &rv370_pcie_gart_tlb_flush
,
208 .gart_set_page
= &rv370_pcie_gart_set_page
,
212 .cp_commit
= &r100_cp_commit
,
213 .ring_start
= &r300_ring_start
,
214 .ring_test
= &r100_ring_test
,
215 .ring_ib_execute
= &r100_ring_ib_execute
,
217 .irq_set
= &r100_irq_set
,
218 .irq_process
= &r100_irq_process
,
219 .get_vblank_counter
= &r100_get_vblank_counter
,
220 .fence_ring_emit
= &r300_fence_ring_emit
,
221 .cs_parse
= &r300_cs_parse
,
222 .copy_blit
= &r100_copy_blit
,
223 .copy_dma
= &r300_copy_dma
,
224 .copy
= &r100_copy_blit
,
225 .set_engine_clock
= &radeon_atom_set_engine_clock
,
226 .set_memory_clock
= &radeon_atom_set_memory_clock
,
227 .set_pcie_lanes
= &rv370_set_pcie_lanes
,
228 .set_clock_gating
= &radeon_atom_set_clock_gating
,
229 .set_surface_reg
= r100_set_surface_reg
,
230 .clear_surface_reg
= r100_clear_surface_reg
,
231 .bandwidth_update
= &r100_bandwidth_update
,
238 extern int rs400_init(struct radeon_device
*rdev
);
239 extern void rs400_fini(struct radeon_device
*rdev
);
240 extern int rs400_suspend(struct radeon_device
*rdev
);
241 extern int rs400_resume(struct radeon_device
*rdev
);
242 void rs400_gart_tlb_flush(struct radeon_device
*rdev
);
243 int rs400_gart_set_page(struct radeon_device
*rdev
, int i
, uint64_t addr
);
244 uint32_t rs400_mc_rreg(struct radeon_device
*rdev
, uint32_t reg
);
245 void rs400_mc_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
246 static struct radeon_asic rs400_asic
= {
249 .suspend
= &rs400_suspend
,
250 .resume
= &rs400_resume
,
253 .gpu_reset
= &r300_gpu_reset
,
261 .gart_disable
= NULL
,
262 .gart_tlb_flush
= &rs400_gart_tlb_flush
,
263 .gart_set_page
= &rs400_gart_set_page
,
267 .cp_commit
= &r100_cp_commit
,
268 .ring_start
= &r300_ring_start
,
269 .ring_test
= &r100_ring_test
,
270 .ring_ib_execute
= &r100_ring_ib_execute
,
272 .irq_set
= &r100_irq_set
,
273 .irq_process
= &r100_irq_process
,
274 .get_vblank_counter
= &r100_get_vblank_counter
,
275 .fence_ring_emit
= &r300_fence_ring_emit
,
276 .cs_parse
= &r300_cs_parse
,
277 .copy_blit
= &r100_copy_blit
,
278 .copy_dma
= &r300_copy_dma
,
279 .copy
= &r100_copy_blit
,
280 .set_engine_clock
= &radeon_legacy_set_engine_clock
,
281 .set_memory_clock
= NULL
,
282 .set_pcie_lanes
= NULL
,
283 .set_clock_gating
= &radeon_legacy_set_clock_gating
,
284 .set_surface_reg
= r100_set_surface_reg
,
285 .clear_surface_reg
= r100_clear_surface_reg
,
286 .bandwidth_update
= &r100_bandwidth_update
,
293 extern int rs600_init(struct radeon_device
*rdev
);
294 extern void rs600_fini(struct radeon_device
*rdev
);
295 extern int rs600_suspend(struct radeon_device
*rdev
);
296 extern int rs600_resume(struct radeon_device
*rdev
);
297 int rs600_irq_set(struct radeon_device
*rdev
);
298 int rs600_irq_process(struct radeon_device
*rdev
);
299 u32
rs600_get_vblank_counter(struct radeon_device
*rdev
, int crtc
);
300 void rs600_gart_tlb_flush(struct radeon_device
*rdev
);
301 int rs600_gart_set_page(struct radeon_device
*rdev
, int i
, uint64_t addr
);
302 uint32_t rs600_mc_rreg(struct radeon_device
*rdev
, uint32_t reg
);
303 void rs600_mc_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
304 void rs600_bandwidth_update(struct radeon_device
*rdev
);
305 static struct radeon_asic rs600_asic
= {
308 .suspend
= &rs600_suspend
,
309 .resume
= &rs600_resume
,
312 .gpu_reset
= &r300_gpu_reset
,
320 .gart_disable
= NULL
,
321 .gart_tlb_flush
= &rs600_gart_tlb_flush
,
322 .gart_set_page
= &rs600_gart_set_page
,
326 .cp_commit
= &r100_cp_commit
,
327 .ring_start
= &r300_ring_start
,
328 .ring_test
= &r100_ring_test
,
329 .ring_ib_execute
= &r100_ring_ib_execute
,
331 .irq_set
= &rs600_irq_set
,
332 .irq_process
= &rs600_irq_process
,
333 .get_vblank_counter
= &rs600_get_vblank_counter
,
334 .fence_ring_emit
= &r300_fence_ring_emit
,
335 .cs_parse
= &r300_cs_parse
,
336 .copy_blit
= &r100_copy_blit
,
337 .copy_dma
= &r300_copy_dma
,
338 .copy
= &r100_copy_blit
,
339 .set_engine_clock
= &radeon_atom_set_engine_clock
,
340 .set_memory_clock
= &radeon_atom_set_memory_clock
,
341 .set_pcie_lanes
= NULL
,
342 .set_clock_gating
= &radeon_atom_set_clock_gating
,
343 .bandwidth_update
= &rs600_bandwidth_update
,
350 int rs690_init(struct radeon_device
*rdev
);
351 void rs690_fini(struct radeon_device
*rdev
);
352 int rs690_resume(struct radeon_device
*rdev
);
353 int rs690_suspend(struct radeon_device
*rdev
);
354 uint32_t rs690_mc_rreg(struct radeon_device
*rdev
, uint32_t reg
);
355 void rs690_mc_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
356 void rs690_bandwidth_update(struct radeon_device
*rdev
);
357 static struct radeon_asic rs690_asic
= {
360 .suspend
= &rs690_suspend
,
361 .resume
= &rs690_resume
,
364 .gpu_reset
= &r300_gpu_reset
,
372 .gart_disable
= NULL
,
373 .gart_tlb_flush
= &rs400_gart_tlb_flush
,
374 .gart_set_page
= &rs400_gart_set_page
,
378 .cp_commit
= &r100_cp_commit
,
379 .ring_start
= &r300_ring_start
,
380 .ring_test
= &r100_ring_test
,
381 .ring_ib_execute
= &r100_ring_ib_execute
,
383 .irq_set
= &rs600_irq_set
,
384 .irq_process
= &rs600_irq_process
,
385 .get_vblank_counter
= &rs600_get_vblank_counter
,
386 .fence_ring_emit
= &r300_fence_ring_emit
,
387 .cs_parse
= &r300_cs_parse
,
388 .copy_blit
= &r100_copy_blit
,
389 .copy_dma
= &r300_copy_dma
,
390 .copy
= &r300_copy_dma
,
391 .set_engine_clock
= &radeon_atom_set_engine_clock
,
392 .set_memory_clock
= &radeon_atom_set_memory_clock
,
393 .set_pcie_lanes
= NULL
,
394 .set_clock_gating
= &radeon_atom_set_clock_gating
,
395 .set_surface_reg
= r100_set_surface_reg
,
396 .clear_surface_reg
= r100_clear_surface_reg
,
397 .bandwidth_update
= &rs690_bandwidth_update
,
404 int rv515_init(struct radeon_device
*rdev
);
405 void rv515_fini(struct radeon_device
*rdev
);
406 int rv515_gpu_reset(struct radeon_device
*rdev
);
407 uint32_t rv515_mc_rreg(struct radeon_device
*rdev
, uint32_t reg
);
408 void rv515_mc_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
409 void rv515_ring_start(struct radeon_device
*rdev
);
410 uint32_t rv515_pcie_rreg(struct radeon_device
*rdev
, uint32_t reg
);
411 void rv515_pcie_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
412 void rv515_bandwidth_update(struct radeon_device
*rdev
);
413 int rv515_resume(struct radeon_device
*rdev
);
414 int rv515_suspend(struct radeon_device
*rdev
);
415 static struct radeon_asic rv515_asic
= {
418 .suspend
= &rv515_suspend
,
419 .resume
= &rv515_resume
,
422 .gpu_reset
= &rv515_gpu_reset
,
427 .gart_init
= &rv370_pcie_gart_init
,
428 .gart_fini
= &rv370_pcie_gart_fini
,
430 .gart_disable
= NULL
,
431 .gart_tlb_flush
= &rv370_pcie_gart_tlb_flush
,
432 .gart_set_page
= &rv370_pcie_gart_set_page
,
436 .cp_commit
= &r100_cp_commit
,
437 .ring_start
= &rv515_ring_start
,
438 .ring_test
= &r100_ring_test
,
439 .ring_ib_execute
= &r100_ring_ib_execute
,
441 .irq_set
= &rs600_irq_set
,
442 .irq_process
= &rs600_irq_process
,
443 .get_vblank_counter
= &rs600_get_vblank_counter
,
444 .fence_ring_emit
= &r300_fence_ring_emit
,
445 .cs_parse
= &r300_cs_parse
,
446 .copy_blit
= &r100_copy_blit
,
447 .copy_dma
= &r300_copy_dma
,
448 .copy
= &r100_copy_blit
,
449 .set_engine_clock
= &radeon_atom_set_engine_clock
,
450 .set_memory_clock
= &radeon_atom_set_memory_clock
,
451 .set_pcie_lanes
= &rv370_set_pcie_lanes
,
452 .set_clock_gating
= &radeon_atom_set_clock_gating
,
453 .set_surface_reg
= r100_set_surface_reg
,
454 .clear_surface_reg
= r100_clear_surface_reg
,
455 .bandwidth_update
= &rv515_bandwidth_update
,
460 * r520,rv530,rv560,rv570,r580
462 int r520_init(struct radeon_device
*rdev
);
463 int r520_resume(struct radeon_device
*rdev
);
464 static struct radeon_asic r520_asic
= {
467 .suspend
= &rv515_suspend
,
468 .resume
= &r520_resume
,
471 .gpu_reset
= &rv515_gpu_reset
,
479 .gart_disable
= NULL
,
480 .gart_tlb_flush
= &rv370_pcie_gart_tlb_flush
,
481 .gart_set_page
= &rv370_pcie_gart_set_page
,
485 .cp_commit
= &r100_cp_commit
,
486 .ring_start
= &rv515_ring_start
,
487 .ring_test
= &r100_ring_test
,
488 .ring_ib_execute
= &r100_ring_ib_execute
,
490 .irq_set
= &rs600_irq_set
,
491 .irq_process
= &rs600_irq_process
,
492 .get_vblank_counter
= &rs600_get_vblank_counter
,
493 .fence_ring_emit
= &r300_fence_ring_emit
,
494 .cs_parse
= &r300_cs_parse
,
495 .copy_blit
= &r100_copy_blit
,
496 .copy_dma
= &r300_copy_dma
,
497 .copy
= &r100_copy_blit
,
498 .set_engine_clock
= &radeon_atom_set_engine_clock
,
499 .set_memory_clock
= &radeon_atom_set_memory_clock
,
500 .set_pcie_lanes
= &rv370_set_pcie_lanes
,
501 .set_clock_gating
= &radeon_atom_set_clock_gating
,
502 .set_surface_reg
= r100_set_surface_reg
,
503 .clear_surface_reg
= r100_clear_surface_reg
,
504 .bandwidth_update
= &rv515_bandwidth_update
,
508 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
510 int r600_init(struct radeon_device
*rdev
);
511 void r600_fini(struct radeon_device
*rdev
);
512 int r600_suspend(struct radeon_device
*rdev
);
513 int r600_resume(struct radeon_device
*rdev
);
514 int r600_wb_init(struct radeon_device
*rdev
);
515 void r600_wb_fini(struct radeon_device
*rdev
);
516 void r600_cp_commit(struct radeon_device
*rdev
);
517 void r600_pcie_gart_tlb_flush(struct radeon_device
*rdev
);
518 uint32_t r600_pciep_rreg(struct radeon_device
*rdev
, uint32_t reg
);
519 void r600_pciep_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
520 int r600_cs_parse(struct radeon_cs_parser
*p
);
521 void r600_fence_ring_emit(struct radeon_device
*rdev
,
522 struct radeon_fence
*fence
);
523 int r600_copy_dma(struct radeon_device
*rdev
,
527 struct radeon_fence
*fence
);
528 int r600_irq_process(struct radeon_device
*rdev
);
529 int r600_irq_set(struct radeon_device
*rdev
);
530 int r600_gpu_reset(struct radeon_device
*rdev
);
531 int r600_set_surface_reg(struct radeon_device
*rdev
, int reg
,
532 uint32_t tiling_flags
, uint32_t pitch
,
533 uint32_t offset
, uint32_t obj_size
);
534 int r600_clear_surface_reg(struct radeon_device
*rdev
, int reg
);
535 void r600_ring_ib_execute(struct radeon_device
*rdev
, struct radeon_ib
*ib
);
536 int r600_ib_test(struct radeon_device
*rdev
);
537 int r600_ring_test(struct radeon_device
*rdev
);
538 int r600_copy_blit(struct radeon_device
*rdev
,
539 uint64_t src_offset
, uint64_t dst_offset
,
540 unsigned num_pages
, struct radeon_fence
*fence
);
542 static struct radeon_asic r600_asic
= {
546 .suspend
= &r600_suspend
,
547 .resume
= &r600_resume
,
548 .cp_commit
= &r600_cp_commit
,
550 .gpu_reset
= &r600_gpu_reset
,
553 .wb_init
= &r600_wb_init
,
554 .wb_fini
= &r600_wb_fini
,
556 .gart_disable
= NULL
,
557 .gart_tlb_flush
= &r600_pcie_gart_tlb_flush
,
558 .gart_set_page
= &rs600_gart_set_page
,
563 .ring_test
= &r600_ring_test
,
564 .ring_ib_execute
= &r600_ring_ib_execute
,
565 .ib_test
= &r600_ib_test
,
566 .irq_set
= &r600_irq_set
,
567 .irq_process
= &r600_irq_process
,
568 .fence_ring_emit
= &r600_fence_ring_emit
,
569 .cs_parse
= &r600_cs_parse
,
570 .copy_blit
= &r600_copy_blit
,
571 .copy_dma
= &r600_copy_blit
,
572 .copy
= &r600_copy_blit
,
573 .set_engine_clock
= &radeon_atom_set_engine_clock
,
574 .set_memory_clock
= &radeon_atom_set_memory_clock
,
575 .set_pcie_lanes
= NULL
,
576 .set_clock_gating
= &radeon_atom_set_clock_gating
,
577 .set_surface_reg
= r600_set_surface_reg
,
578 .clear_surface_reg
= r600_clear_surface_reg
,
579 .bandwidth_update
= &rv515_bandwidth_update
,
583 * rv770,rv730,rv710,rv740
585 int rv770_init(struct radeon_device
*rdev
);
586 void rv770_fini(struct radeon_device
*rdev
);
587 int rv770_suspend(struct radeon_device
*rdev
);
588 int rv770_resume(struct radeon_device
*rdev
);
589 int rv770_gpu_reset(struct radeon_device
*rdev
);
591 static struct radeon_asic rv770_asic
= {
595 .suspend
= &rv770_suspend
,
596 .resume
= &rv770_resume
,
597 .cp_commit
= &r600_cp_commit
,
599 .gpu_reset
= &rv770_gpu_reset
,
602 .wb_init
= &r600_wb_init
,
603 .wb_fini
= &r600_wb_fini
,
605 .gart_disable
= NULL
,
606 .gart_tlb_flush
= &r600_pcie_gart_tlb_flush
,
607 .gart_set_page
= &rs600_gart_set_page
,
612 .ring_test
= &r600_ring_test
,
613 .ring_ib_execute
= &r600_ring_ib_execute
,
614 .ib_test
= &r600_ib_test
,
615 .irq_set
= &r600_irq_set
,
616 .irq_process
= &r600_irq_process
,
617 .fence_ring_emit
= &r600_fence_ring_emit
,
618 .cs_parse
= &r600_cs_parse
,
619 .copy_blit
= &r600_copy_blit
,
620 .copy_dma
= &r600_copy_blit
,
621 .copy
= &r600_copy_blit
,
622 .set_engine_clock
= &radeon_atom_set_engine_clock
,
623 .set_memory_clock
= &radeon_atom_set_memory_clock
,
624 .set_pcie_lanes
= NULL
,
625 .set_clock_gating
= &radeon_atom_set_clock_gating
,
626 .set_surface_reg
= r600_set_surface_reg
,
627 .clear_surface_reg
= r600_clear_surface_reg
,
628 .bandwidth_update
= &rv515_bandwidth_update
,