2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
24 * Authors: Dave Airlie
28 #ifndef __RADEON_ASIC_H__
29 #define __RADEON_ASIC_H__
34 uint32_t radeon_legacy_get_engine_clock(struct radeon_device
*rdev
);
35 void radeon_legacy_set_engine_clock(struct radeon_device
*rdev
, uint32_t eng_clock
);
36 void radeon_legacy_set_clock_gating(struct radeon_device
*rdev
, int enable
);
38 uint32_t radeon_atom_get_engine_clock(struct radeon_device
*rdev
);
39 void radeon_atom_set_engine_clock(struct radeon_device
*rdev
, uint32_t eng_clock
);
40 uint32_t radeon_atom_get_memory_clock(struct radeon_device
*rdev
);
41 void radeon_atom_set_memory_clock(struct radeon_device
*rdev
, uint32_t mem_clock
);
42 void radeon_atom_set_clock_gating(struct radeon_device
*rdev
, int enable
);
45 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
47 extern int r100_init(struct radeon_device
*rdev
);
48 extern void r100_fini(struct radeon_device
*rdev
);
49 extern int r100_suspend(struct radeon_device
*rdev
);
50 extern int r100_resume(struct radeon_device
*rdev
);
51 uint32_t r100_mm_rreg(struct radeon_device
*rdev
, uint32_t reg
);
52 void r100_mm_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
53 int r100_gpu_reset(struct radeon_device
*rdev
);
54 u32
r100_get_vblank_counter(struct radeon_device
*rdev
, int crtc
);
55 void r100_pci_gart_tlb_flush(struct radeon_device
*rdev
);
56 int r100_pci_gart_set_page(struct radeon_device
*rdev
, int i
, uint64_t addr
);
57 void r100_cp_commit(struct radeon_device
*rdev
);
58 void r100_ring_start(struct radeon_device
*rdev
);
59 int r100_irq_set(struct radeon_device
*rdev
);
60 int r100_irq_process(struct radeon_device
*rdev
);
61 void r100_fence_ring_emit(struct radeon_device
*rdev
,
62 struct radeon_fence
*fence
);
63 int r100_cs_parse(struct radeon_cs_parser
*p
);
64 void r100_pll_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
65 uint32_t r100_pll_rreg(struct radeon_device
*rdev
, uint32_t reg
);
66 int r100_copy_blit(struct radeon_device
*rdev
,
70 struct radeon_fence
*fence
);
71 int r100_set_surface_reg(struct radeon_device
*rdev
, int reg
,
72 uint32_t tiling_flags
, uint32_t pitch
,
73 uint32_t offset
, uint32_t obj_size
);
74 int r100_clear_surface_reg(struct radeon_device
*rdev
, int reg
);
75 void r100_bandwidth_update(struct radeon_device
*rdev
);
76 void r100_ring_ib_execute(struct radeon_device
*rdev
, struct radeon_ib
*ib
);
77 int r100_ring_test(struct radeon_device
*rdev
);
79 static struct radeon_asic r100_asic
= {
82 .suspend
= &r100_suspend
,
83 .resume
= &r100_resume
,
84 .gpu_reset
= &r100_gpu_reset
,
85 .gart_tlb_flush
= &r100_pci_gart_tlb_flush
,
86 .gart_set_page
= &r100_pci_gart_set_page
,
87 .cp_commit
= &r100_cp_commit
,
88 .ring_start
= &r100_ring_start
,
89 .ring_test
= &r100_ring_test
,
90 .ring_ib_execute
= &r100_ring_ib_execute
,
91 .irq_set
= &r100_irq_set
,
92 .irq_process
= &r100_irq_process
,
93 .get_vblank_counter
= &r100_get_vblank_counter
,
94 .fence_ring_emit
= &r100_fence_ring_emit
,
95 .cs_parse
= &r100_cs_parse
,
96 .copy_blit
= &r100_copy_blit
,
98 .copy
= &r100_copy_blit
,
99 .get_engine_clock
= &radeon_legacy_get_engine_clock
,
100 .set_engine_clock
= &radeon_legacy_set_engine_clock
,
101 .get_memory_clock
= NULL
,
102 .set_memory_clock
= NULL
,
103 .set_pcie_lanes
= NULL
,
104 .set_clock_gating
= &radeon_legacy_set_clock_gating
,
105 .set_surface_reg
= r100_set_surface_reg
,
106 .clear_surface_reg
= r100_clear_surface_reg
,
107 .bandwidth_update
= &r100_bandwidth_update
,
112 * r300,r350,rv350,rv380
114 extern int r300_init(struct radeon_device
*rdev
);
115 extern void r300_fini(struct radeon_device
*rdev
);
116 extern int r300_suspend(struct radeon_device
*rdev
);
117 extern int r300_resume(struct radeon_device
*rdev
);
118 extern int r300_gpu_reset(struct radeon_device
*rdev
);
119 extern void r300_ring_start(struct radeon_device
*rdev
);
120 extern void r300_fence_ring_emit(struct radeon_device
*rdev
,
121 struct radeon_fence
*fence
);
122 extern int r300_cs_parse(struct radeon_cs_parser
*p
);
123 extern void rv370_pcie_gart_tlb_flush(struct radeon_device
*rdev
);
124 extern int rv370_pcie_gart_set_page(struct radeon_device
*rdev
, int i
, uint64_t addr
);
125 extern uint32_t rv370_pcie_rreg(struct radeon_device
*rdev
, uint32_t reg
);
126 extern void rv370_pcie_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
127 extern void rv370_set_pcie_lanes(struct radeon_device
*rdev
, int lanes
);
128 extern int r300_copy_dma(struct radeon_device
*rdev
,
132 struct radeon_fence
*fence
);
133 static struct radeon_asic r300_asic
= {
136 .suspend
= &r300_suspend
,
137 .resume
= &r300_resume
,
138 .gpu_reset
= &r300_gpu_reset
,
139 .gart_tlb_flush
= &r100_pci_gart_tlb_flush
,
140 .gart_set_page
= &r100_pci_gart_set_page
,
141 .cp_commit
= &r100_cp_commit
,
142 .ring_start
= &r300_ring_start
,
143 .ring_test
= &r100_ring_test
,
144 .ring_ib_execute
= &r100_ring_ib_execute
,
145 .irq_set
= &r100_irq_set
,
146 .irq_process
= &r100_irq_process
,
147 .get_vblank_counter
= &r100_get_vblank_counter
,
148 .fence_ring_emit
= &r300_fence_ring_emit
,
149 .cs_parse
= &r300_cs_parse
,
150 .copy_blit
= &r100_copy_blit
,
151 .copy_dma
= &r300_copy_dma
,
152 .copy
= &r100_copy_blit
,
153 .get_engine_clock
= &radeon_legacy_get_engine_clock
,
154 .set_engine_clock
= &radeon_legacy_set_engine_clock
,
155 .get_memory_clock
= NULL
,
156 .set_memory_clock
= NULL
,
157 .set_pcie_lanes
= &rv370_set_pcie_lanes
,
158 .set_clock_gating
= &radeon_legacy_set_clock_gating
,
159 .set_surface_reg
= r100_set_surface_reg
,
160 .clear_surface_reg
= r100_clear_surface_reg
,
161 .bandwidth_update
= &r100_bandwidth_update
,
167 extern int r420_init(struct radeon_device
*rdev
);
168 extern void r420_fini(struct radeon_device
*rdev
);
169 extern int r420_suspend(struct radeon_device
*rdev
);
170 extern int r420_resume(struct radeon_device
*rdev
);
171 static struct radeon_asic r420_asic
= {
174 .suspend
= &r420_suspend
,
175 .resume
= &r420_resume
,
176 .gpu_reset
= &r300_gpu_reset
,
177 .gart_tlb_flush
= &rv370_pcie_gart_tlb_flush
,
178 .gart_set_page
= &rv370_pcie_gart_set_page
,
179 .cp_commit
= &r100_cp_commit
,
180 .ring_start
= &r300_ring_start
,
181 .ring_test
= &r100_ring_test
,
182 .ring_ib_execute
= &r100_ring_ib_execute
,
183 .irq_set
= &r100_irq_set
,
184 .irq_process
= &r100_irq_process
,
185 .get_vblank_counter
= &r100_get_vblank_counter
,
186 .fence_ring_emit
= &r300_fence_ring_emit
,
187 .cs_parse
= &r300_cs_parse
,
188 .copy_blit
= &r100_copy_blit
,
189 .copy_dma
= &r300_copy_dma
,
190 .copy
= &r100_copy_blit
,
191 .get_engine_clock
= &radeon_atom_get_engine_clock
,
192 .set_engine_clock
= &radeon_atom_set_engine_clock
,
193 .get_memory_clock
= &radeon_atom_get_memory_clock
,
194 .set_memory_clock
= &radeon_atom_set_memory_clock
,
195 .set_pcie_lanes
= &rv370_set_pcie_lanes
,
196 .set_clock_gating
= &radeon_atom_set_clock_gating
,
197 .set_surface_reg
= r100_set_surface_reg
,
198 .clear_surface_reg
= r100_clear_surface_reg
,
199 .bandwidth_update
= &r100_bandwidth_update
,
206 extern int rs400_init(struct radeon_device
*rdev
);
207 extern void rs400_fini(struct radeon_device
*rdev
);
208 extern int rs400_suspend(struct radeon_device
*rdev
);
209 extern int rs400_resume(struct radeon_device
*rdev
);
210 void rs400_gart_tlb_flush(struct radeon_device
*rdev
);
211 int rs400_gart_set_page(struct radeon_device
*rdev
, int i
, uint64_t addr
);
212 uint32_t rs400_mc_rreg(struct radeon_device
*rdev
, uint32_t reg
);
213 void rs400_mc_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
214 static struct radeon_asic rs400_asic
= {
217 .suspend
= &rs400_suspend
,
218 .resume
= &rs400_resume
,
219 .gpu_reset
= &r300_gpu_reset
,
220 .gart_tlb_flush
= &rs400_gart_tlb_flush
,
221 .gart_set_page
= &rs400_gart_set_page
,
222 .cp_commit
= &r100_cp_commit
,
223 .ring_start
= &r300_ring_start
,
224 .ring_test
= &r100_ring_test
,
225 .ring_ib_execute
= &r100_ring_ib_execute
,
226 .irq_set
= &r100_irq_set
,
227 .irq_process
= &r100_irq_process
,
228 .get_vblank_counter
= &r100_get_vblank_counter
,
229 .fence_ring_emit
= &r300_fence_ring_emit
,
230 .cs_parse
= &r300_cs_parse
,
231 .copy_blit
= &r100_copy_blit
,
232 .copy_dma
= &r300_copy_dma
,
233 .copy
= &r100_copy_blit
,
234 .get_engine_clock
= &radeon_legacy_get_engine_clock
,
235 .set_engine_clock
= &radeon_legacy_set_engine_clock
,
236 .get_memory_clock
= NULL
,
237 .set_memory_clock
= NULL
,
238 .set_pcie_lanes
= NULL
,
239 .set_clock_gating
= &radeon_legacy_set_clock_gating
,
240 .set_surface_reg
= r100_set_surface_reg
,
241 .clear_surface_reg
= r100_clear_surface_reg
,
242 .bandwidth_update
= &r100_bandwidth_update
,
249 extern int rs600_init(struct radeon_device
*rdev
);
250 extern void rs600_fini(struct radeon_device
*rdev
);
251 extern int rs600_suspend(struct radeon_device
*rdev
);
252 extern int rs600_resume(struct radeon_device
*rdev
);
253 int rs600_irq_set(struct radeon_device
*rdev
);
254 int rs600_irq_process(struct radeon_device
*rdev
);
255 u32
rs600_get_vblank_counter(struct radeon_device
*rdev
, int crtc
);
256 void rs600_gart_tlb_flush(struct radeon_device
*rdev
);
257 int rs600_gart_set_page(struct radeon_device
*rdev
, int i
, uint64_t addr
);
258 uint32_t rs600_mc_rreg(struct radeon_device
*rdev
, uint32_t reg
);
259 void rs600_mc_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
260 void rs600_bandwidth_update(struct radeon_device
*rdev
);
261 static struct radeon_asic rs600_asic
= {
264 .suspend
= &rs600_suspend
,
265 .resume
= &rs600_resume
,
266 .gpu_reset
= &r300_gpu_reset
,
267 .gart_tlb_flush
= &rs600_gart_tlb_flush
,
268 .gart_set_page
= &rs600_gart_set_page
,
269 .cp_commit
= &r100_cp_commit
,
270 .ring_start
= &r300_ring_start
,
271 .ring_test
= &r100_ring_test
,
272 .ring_ib_execute
= &r100_ring_ib_execute
,
273 .irq_set
= &rs600_irq_set
,
274 .irq_process
= &rs600_irq_process
,
275 .get_vblank_counter
= &rs600_get_vblank_counter
,
276 .fence_ring_emit
= &r300_fence_ring_emit
,
277 .cs_parse
= &r300_cs_parse
,
278 .copy_blit
= &r100_copy_blit
,
279 .copy_dma
= &r300_copy_dma
,
280 .copy
= &r100_copy_blit
,
281 .get_engine_clock
= &radeon_atom_get_engine_clock
,
282 .set_engine_clock
= &radeon_atom_set_engine_clock
,
283 .get_memory_clock
= &radeon_atom_get_memory_clock
,
284 .set_memory_clock
= &radeon_atom_set_memory_clock
,
285 .set_pcie_lanes
= NULL
,
286 .set_clock_gating
= &radeon_atom_set_clock_gating
,
287 .bandwidth_update
= &rs600_bandwidth_update
,
294 int rs690_init(struct radeon_device
*rdev
);
295 void rs690_fini(struct radeon_device
*rdev
);
296 int rs690_resume(struct radeon_device
*rdev
);
297 int rs690_suspend(struct radeon_device
*rdev
);
298 uint32_t rs690_mc_rreg(struct radeon_device
*rdev
, uint32_t reg
);
299 void rs690_mc_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
300 void rs690_bandwidth_update(struct radeon_device
*rdev
);
301 static struct radeon_asic rs690_asic
= {
304 .suspend
= &rs690_suspend
,
305 .resume
= &rs690_resume
,
306 .gpu_reset
= &r300_gpu_reset
,
307 .gart_tlb_flush
= &rs400_gart_tlb_flush
,
308 .gart_set_page
= &rs400_gart_set_page
,
309 .cp_commit
= &r100_cp_commit
,
310 .ring_start
= &r300_ring_start
,
311 .ring_test
= &r100_ring_test
,
312 .ring_ib_execute
= &r100_ring_ib_execute
,
313 .irq_set
= &rs600_irq_set
,
314 .irq_process
= &rs600_irq_process
,
315 .get_vblank_counter
= &rs600_get_vblank_counter
,
316 .fence_ring_emit
= &r300_fence_ring_emit
,
317 .cs_parse
= &r300_cs_parse
,
318 .copy_blit
= &r100_copy_blit
,
319 .copy_dma
= &r300_copy_dma
,
320 .copy
= &r300_copy_dma
,
321 .get_engine_clock
= &radeon_atom_get_engine_clock
,
322 .set_engine_clock
= &radeon_atom_set_engine_clock
,
323 .get_memory_clock
= &radeon_atom_get_memory_clock
,
324 .set_memory_clock
= &radeon_atom_set_memory_clock
,
325 .set_pcie_lanes
= NULL
,
326 .set_clock_gating
= &radeon_atom_set_clock_gating
,
327 .set_surface_reg
= r100_set_surface_reg
,
328 .clear_surface_reg
= r100_clear_surface_reg
,
329 .bandwidth_update
= &rs690_bandwidth_update
,
336 int rv515_init(struct radeon_device
*rdev
);
337 void rv515_fini(struct radeon_device
*rdev
);
338 int rv515_gpu_reset(struct radeon_device
*rdev
);
339 uint32_t rv515_mc_rreg(struct radeon_device
*rdev
, uint32_t reg
);
340 void rv515_mc_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
341 void rv515_ring_start(struct radeon_device
*rdev
);
342 uint32_t rv515_pcie_rreg(struct radeon_device
*rdev
, uint32_t reg
);
343 void rv515_pcie_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
344 void rv515_bandwidth_update(struct radeon_device
*rdev
);
345 int rv515_resume(struct radeon_device
*rdev
);
346 int rv515_suspend(struct radeon_device
*rdev
);
347 static struct radeon_asic rv515_asic
= {
350 .suspend
= &rv515_suspend
,
351 .resume
= &rv515_resume
,
352 .gpu_reset
= &rv515_gpu_reset
,
353 .gart_tlb_flush
= &rv370_pcie_gart_tlb_flush
,
354 .gart_set_page
= &rv370_pcie_gart_set_page
,
355 .cp_commit
= &r100_cp_commit
,
356 .ring_start
= &rv515_ring_start
,
357 .ring_test
= &r100_ring_test
,
358 .ring_ib_execute
= &r100_ring_ib_execute
,
359 .irq_set
= &rs600_irq_set
,
360 .irq_process
= &rs600_irq_process
,
361 .get_vblank_counter
= &rs600_get_vblank_counter
,
362 .fence_ring_emit
= &r300_fence_ring_emit
,
363 .cs_parse
= &r300_cs_parse
,
364 .copy_blit
= &r100_copy_blit
,
365 .copy_dma
= &r300_copy_dma
,
366 .copy
= &r100_copy_blit
,
367 .get_engine_clock
= &radeon_atom_get_engine_clock
,
368 .set_engine_clock
= &radeon_atom_set_engine_clock
,
369 .get_memory_clock
= &radeon_atom_get_memory_clock
,
370 .set_memory_clock
= &radeon_atom_set_memory_clock
,
371 .set_pcie_lanes
= &rv370_set_pcie_lanes
,
372 .set_clock_gating
= &radeon_atom_set_clock_gating
,
373 .set_surface_reg
= r100_set_surface_reg
,
374 .clear_surface_reg
= r100_clear_surface_reg
,
375 .bandwidth_update
= &rv515_bandwidth_update
,
380 * r520,rv530,rv560,rv570,r580
382 int r520_init(struct radeon_device
*rdev
);
383 int r520_resume(struct radeon_device
*rdev
);
384 static struct radeon_asic r520_asic
= {
387 .suspend
= &rv515_suspend
,
388 .resume
= &r520_resume
,
389 .gpu_reset
= &rv515_gpu_reset
,
390 .gart_tlb_flush
= &rv370_pcie_gart_tlb_flush
,
391 .gart_set_page
= &rv370_pcie_gart_set_page
,
392 .cp_commit
= &r100_cp_commit
,
393 .ring_start
= &rv515_ring_start
,
394 .ring_test
= &r100_ring_test
,
395 .ring_ib_execute
= &r100_ring_ib_execute
,
396 .irq_set
= &rs600_irq_set
,
397 .irq_process
= &rs600_irq_process
,
398 .get_vblank_counter
= &rs600_get_vblank_counter
,
399 .fence_ring_emit
= &r300_fence_ring_emit
,
400 .cs_parse
= &r300_cs_parse
,
401 .copy_blit
= &r100_copy_blit
,
402 .copy_dma
= &r300_copy_dma
,
403 .copy
= &r100_copy_blit
,
404 .get_engine_clock
= &radeon_atom_get_engine_clock
,
405 .set_engine_clock
= &radeon_atom_set_engine_clock
,
406 .get_memory_clock
= &radeon_atom_get_memory_clock
,
407 .set_memory_clock
= &radeon_atom_set_memory_clock
,
408 .set_pcie_lanes
= &rv370_set_pcie_lanes
,
409 .set_clock_gating
= &radeon_atom_set_clock_gating
,
410 .set_surface_reg
= r100_set_surface_reg
,
411 .clear_surface_reg
= r100_clear_surface_reg
,
412 .bandwidth_update
= &rv515_bandwidth_update
,
416 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
418 int r600_init(struct radeon_device
*rdev
);
419 void r600_fini(struct radeon_device
*rdev
);
420 int r600_suspend(struct radeon_device
*rdev
);
421 int r600_resume(struct radeon_device
*rdev
);
422 int r600_wb_init(struct radeon_device
*rdev
);
423 void r600_wb_fini(struct radeon_device
*rdev
);
424 void r600_cp_commit(struct radeon_device
*rdev
);
425 void r600_pcie_gart_tlb_flush(struct radeon_device
*rdev
);
426 uint32_t r600_pciep_rreg(struct radeon_device
*rdev
, uint32_t reg
);
427 void r600_pciep_wreg(struct radeon_device
*rdev
, uint32_t reg
, uint32_t v
);
428 int r600_cs_parse(struct radeon_cs_parser
*p
);
429 void r600_fence_ring_emit(struct radeon_device
*rdev
,
430 struct radeon_fence
*fence
);
431 int r600_copy_dma(struct radeon_device
*rdev
,
435 struct radeon_fence
*fence
);
436 int r600_irq_process(struct radeon_device
*rdev
);
437 int r600_irq_set(struct radeon_device
*rdev
);
438 int r600_gpu_reset(struct radeon_device
*rdev
);
439 int r600_set_surface_reg(struct radeon_device
*rdev
, int reg
,
440 uint32_t tiling_flags
, uint32_t pitch
,
441 uint32_t offset
, uint32_t obj_size
);
442 int r600_clear_surface_reg(struct radeon_device
*rdev
, int reg
);
443 void r600_ring_ib_execute(struct radeon_device
*rdev
, struct radeon_ib
*ib
);
444 int r600_ring_test(struct radeon_device
*rdev
);
445 int r600_copy_blit(struct radeon_device
*rdev
,
446 uint64_t src_offset
, uint64_t dst_offset
,
447 unsigned num_pages
, struct radeon_fence
*fence
);
449 static struct radeon_asic r600_asic
= {
452 .suspend
= &r600_suspend
,
453 .resume
= &r600_resume
,
454 .cp_commit
= &r600_cp_commit
,
455 .gpu_reset
= &r600_gpu_reset
,
456 .gart_tlb_flush
= &r600_pcie_gart_tlb_flush
,
457 .gart_set_page
= &rs600_gart_set_page
,
458 .ring_test
= &r600_ring_test
,
459 .ring_ib_execute
= &r600_ring_ib_execute
,
460 .irq_set
= &r600_irq_set
,
461 .irq_process
= &r600_irq_process
,
462 .fence_ring_emit
= &r600_fence_ring_emit
,
463 .cs_parse
= &r600_cs_parse
,
464 .copy_blit
= &r600_copy_blit
,
465 .copy_dma
= &r600_copy_blit
,
466 .copy
= &r600_copy_blit
,
467 .get_engine_clock
= &radeon_atom_get_engine_clock
,
468 .set_engine_clock
= &radeon_atom_set_engine_clock
,
469 .get_memory_clock
= &radeon_atom_get_memory_clock
,
470 .set_memory_clock
= &radeon_atom_set_memory_clock
,
471 .set_pcie_lanes
= NULL
,
472 .set_clock_gating
= &radeon_atom_set_clock_gating
,
473 .set_surface_reg
= r600_set_surface_reg
,
474 .clear_surface_reg
= r600_clear_surface_reg
,
475 .bandwidth_update
= &rv515_bandwidth_update
,
479 * rv770,rv730,rv710,rv740
481 int rv770_init(struct radeon_device
*rdev
);
482 void rv770_fini(struct radeon_device
*rdev
);
483 int rv770_suspend(struct radeon_device
*rdev
);
484 int rv770_resume(struct radeon_device
*rdev
);
485 int rv770_gpu_reset(struct radeon_device
*rdev
);
487 static struct radeon_asic rv770_asic
= {
490 .suspend
= &rv770_suspend
,
491 .resume
= &rv770_resume
,
492 .cp_commit
= &r600_cp_commit
,
493 .gpu_reset
= &rv770_gpu_reset
,
494 .gart_tlb_flush
= &r600_pcie_gart_tlb_flush
,
495 .gart_set_page
= &rs600_gart_set_page
,
496 .ring_test
= &r600_ring_test
,
497 .ring_ib_execute
= &r600_ring_ib_execute
,
498 .irq_set
= &r600_irq_set
,
499 .irq_process
= &r600_irq_process
,
500 .fence_ring_emit
= &r600_fence_ring_emit
,
501 .cs_parse
= &r600_cs_parse
,
502 .copy_blit
= &r600_copy_blit
,
503 .copy_dma
= &r600_copy_blit
,
504 .copy
= &r600_copy_blit
,
505 .get_engine_clock
= &radeon_atom_get_engine_clock
,
506 .set_engine_clock
= &radeon_atom_set_engine_clock
,
507 .get_memory_clock
= &radeon_atom_get_memory_clock
,
508 .set_memory_clock
= &radeon_atom_set_memory_clock
,
509 .set_pcie_lanes
= NULL
,
510 .set_clock_gating
= &radeon_atom_set_clock_gating
,
511 .set_surface_reg
= r600_set_surface_reg
,
512 .clear_surface_reg
= r600_clear_surface_reg
,
513 .bandwidth_update
= &rv515_bandwidth_update
,