d38f996328270e88ced365c50aedc895c538ce17
[deliverable/linux.git] / drivers / gpu / drm / radeon / radeon_asic.h
1 /*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
27 */
28 #ifndef __RADEON_ASIC_H__
29 #define __RADEON_ASIC_H__
30
31 /*
32 * common functions
33 */
34 void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
35 void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
36
37 void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
38 void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
39 void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
40
41 /*
42 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
43 */
44 extern int r100_init(struct radeon_device *rdev);
45 extern void r100_fini(struct radeon_device *rdev);
46 extern int r100_suspend(struct radeon_device *rdev);
47 extern int r100_resume(struct radeon_device *rdev);
48 uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
49 void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
50 int r100_gpu_reset(struct radeon_device *rdev);
51 u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
52 void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
53 int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
54 void r100_cp_commit(struct radeon_device *rdev);
55 void r100_ring_start(struct radeon_device *rdev);
56 int r100_irq_set(struct radeon_device *rdev);
57 int r100_irq_process(struct radeon_device *rdev);
58 void r100_fence_ring_emit(struct radeon_device *rdev,
59 struct radeon_fence *fence);
60 int r100_cs_parse(struct radeon_cs_parser *p);
61 void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
62 uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
63 int r100_copy_blit(struct radeon_device *rdev,
64 uint64_t src_offset,
65 uint64_t dst_offset,
66 unsigned num_pages,
67 struct radeon_fence *fence);
68 int r100_set_surface_reg(struct radeon_device *rdev, int reg,
69 uint32_t tiling_flags, uint32_t pitch,
70 uint32_t offset, uint32_t obj_size);
71 int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
72 void r100_bandwidth_update(struct radeon_device *rdev);
73 void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
74 int r100_ring_test(struct radeon_device *rdev);
75
76 static struct radeon_asic r100_asic = {
77 .init = &r100_init,
78 .fini = &r100_fini,
79 .suspend = &r100_suspend,
80 .resume = &r100_resume,
81 .gpu_reset = &r100_gpu_reset,
82 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
83 .gart_set_page = &r100_pci_gart_set_page,
84 .cp_commit = &r100_cp_commit,
85 .ring_start = &r100_ring_start,
86 .ring_test = &r100_ring_test,
87 .ring_ib_execute = &r100_ring_ib_execute,
88 .irq_set = &r100_irq_set,
89 .irq_process = &r100_irq_process,
90 .get_vblank_counter = &r100_get_vblank_counter,
91 .fence_ring_emit = &r100_fence_ring_emit,
92 .cs_parse = &r100_cs_parse,
93 .copy_blit = &r100_copy_blit,
94 .copy_dma = NULL,
95 .copy = &r100_copy_blit,
96 .set_engine_clock = &radeon_legacy_set_engine_clock,
97 .set_memory_clock = NULL,
98 .set_pcie_lanes = NULL,
99 .set_clock_gating = &radeon_legacy_set_clock_gating,
100 .set_surface_reg = r100_set_surface_reg,
101 .clear_surface_reg = r100_clear_surface_reg,
102 .bandwidth_update = &r100_bandwidth_update,
103 };
104
105
106 /*
107 * r300,r350,rv350,rv380
108 */
109 extern int r300_init(struct radeon_device *rdev);
110 extern void r300_fini(struct radeon_device *rdev);
111 extern int r300_suspend(struct radeon_device *rdev);
112 extern int r300_resume(struct radeon_device *rdev);
113 extern int r300_gpu_reset(struct radeon_device *rdev);
114 extern void r300_ring_start(struct radeon_device *rdev);
115 extern void r300_fence_ring_emit(struct radeon_device *rdev,
116 struct radeon_fence *fence);
117 extern int r300_cs_parse(struct radeon_cs_parser *p);
118 extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
119 extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
120 extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
121 extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
122 extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
123 extern int r300_copy_dma(struct radeon_device *rdev,
124 uint64_t src_offset,
125 uint64_t dst_offset,
126 unsigned num_pages,
127 struct radeon_fence *fence);
128 static struct radeon_asic r300_asic = {
129 .init = &r300_init,
130 .fini = &r300_fini,
131 .suspend = &r300_suspend,
132 .resume = &r300_resume,
133 .gpu_reset = &r300_gpu_reset,
134 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
135 .gart_set_page = &r100_pci_gart_set_page,
136 .cp_commit = &r100_cp_commit,
137 .ring_start = &r300_ring_start,
138 .ring_test = &r100_ring_test,
139 .ring_ib_execute = &r100_ring_ib_execute,
140 .irq_set = &r100_irq_set,
141 .irq_process = &r100_irq_process,
142 .get_vblank_counter = &r100_get_vblank_counter,
143 .fence_ring_emit = &r300_fence_ring_emit,
144 .cs_parse = &r300_cs_parse,
145 .copy_blit = &r100_copy_blit,
146 .copy_dma = &r300_copy_dma,
147 .copy = &r100_copy_blit,
148 .set_engine_clock = &radeon_legacy_set_engine_clock,
149 .set_memory_clock = NULL,
150 .set_pcie_lanes = &rv370_set_pcie_lanes,
151 .set_clock_gating = &radeon_legacy_set_clock_gating,
152 .set_surface_reg = r100_set_surface_reg,
153 .clear_surface_reg = r100_clear_surface_reg,
154 .bandwidth_update = &r100_bandwidth_update,
155 };
156
157 /*
158 * r420,r423,rv410
159 */
160 extern int r420_init(struct radeon_device *rdev);
161 extern void r420_fini(struct radeon_device *rdev);
162 extern int r420_suspend(struct radeon_device *rdev);
163 extern int r420_resume(struct radeon_device *rdev);
164 static struct radeon_asic r420_asic = {
165 .init = &r420_init,
166 .fini = &r420_fini,
167 .suspend = &r420_suspend,
168 .resume = &r420_resume,
169 .gpu_reset = &r300_gpu_reset,
170 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
171 .gart_set_page = &rv370_pcie_gart_set_page,
172 .cp_commit = &r100_cp_commit,
173 .ring_start = &r300_ring_start,
174 .ring_test = &r100_ring_test,
175 .ring_ib_execute = &r100_ring_ib_execute,
176 .irq_set = &r100_irq_set,
177 .irq_process = &r100_irq_process,
178 .get_vblank_counter = &r100_get_vblank_counter,
179 .fence_ring_emit = &r300_fence_ring_emit,
180 .cs_parse = &r300_cs_parse,
181 .copy_blit = &r100_copy_blit,
182 .copy_dma = &r300_copy_dma,
183 .copy = &r100_copy_blit,
184 .set_engine_clock = &radeon_atom_set_engine_clock,
185 .set_memory_clock = &radeon_atom_set_memory_clock,
186 .set_pcie_lanes = &rv370_set_pcie_lanes,
187 .set_clock_gating = &radeon_atom_set_clock_gating,
188 .set_surface_reg = r100_set_surface_reg,
189 .clear_surface_reg = r100_clear_surface_reg,
190 .bandwidth_update = &r100_bandwidth_update,
191 };
192
193
194 /*
195 * rs400,rs480
196 */
197 extern int rs400_init(struct radeon_device *rdev);
198 extern void rs400_fini(struct radeon_device *rdev);
199 extern int rs400_suspend(struct radeon_device *rdev);
200 extern int rs400_resume(struct radeon_device *rdev);
201 void rs400_gart_tlb_flush(struct radeon_device *rdev);
202 int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
203 uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
204 void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
205 static struct radeon_asic rs400_asic = {
206 .init = &rs400_init,
207 .fini = &rs400_fini,
208 .suspend = &rs400_suspend,
209 .resume = &rs400_resume,
210 .gpu_reset = &r300_gpu_reset,
211 .gart_tlb_flush = &rs400_gart_tlb_flush,
212 .gart_set_page = &rs400_gart_set_page,
213 .cp_commit = &r100_cp_commit,
214 .ring_start = &r300_ring_start,
215 .ring_test = &r100_ring_test,
216 .ring_ib_execute = &r100_ring_ib_execute,
217 .irq_set = &r100_irq_set,
218 .irq_process = &r100_irq_process,
219 .get_vblank_counter = &r100_get_vblank_counter,
220 .fence_ring_emit = &r300_fence_ring_emit,
221 .cs_parse = &r300_cs_parse,
222 .copy_blit = &r100_copy_blit,
223 .copy_dma = &r300_copy_dma,
224 .copy = &r100_copy_blit,
225 .set_engine_clock = &radeon_legacy_set_engine_clock,
226 .set_memory_clock = NULL,
227 .set_pcie_lanes = NULL,
228 .set_clock_gating = &radeon_legacy_set_clock_gating,
229 .set_surface_reg = r100_set_surface_reg,
230 .clear_surface_reg = r100_clear_surface_reg,
231 .bandwidth_update = &r100_bandwidth_update,
232 };
233
234
235 /*
236 * rs600.
237 */
238 extern int rs600_init(struct radeon_device *rdev);
239 extern void rs600_fini(struct radeon_device *rdev);
240 extern int rs600_suspend(struct radeon_device *rdev);
241 extern int rs600_resume(struct radeon_device *rdev);
242 int rs600_irq_set(struct radeon_device *rdev);
243 int rs600_irq_process(struct radeon_device *rdev);
244 u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
245 void rs600_gart_tlb_flush(struct radeon_device *rdev);
246 int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
247 uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
248 void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
249 void rs600_bandwidth_update(struct radeon_device *rdev);
250 static struct radeon_asic rs600_asic = {
251 .init = &rs600_init,
252 .fini = &rs600_fini,
253 .suspend = &rs600_suspend,
254 .resume = &rs600_resume,
255 .gpu_reset = &r300_gpu_reset,
256 .gart_tlb_flush = &rs600_gart_tlb_flush,
257 .gart_set_page = &rs600_gart_set_page,
258 .cp_commit = &r100_cp_commit,
259 .ring_start = &r300_ring_start,
260 .ring_test = &r100_ring_test,
261 .ring_ib_execute = &r100_ring_ib_execute,
262 .irq_set = &rs600_irq_set,
263 .irq_process = &rs600_irq_process,
264 .get_vblank_counter = &rs600_get_vblank_counter,
265 .fence_ring_emit = &r300_fence_ring_emit,
266 .cs_parse = &r300_cs_parse,
267 .copy_blit = &r100_copy_blit,
268 .copy_dma = &r300_copy_dma,
269 .copy = &r100_copy_blit,
270 .set_engine_clock = &radeon_atom_set_engine_clock,
271 .set_memory_clock = &radeon_atom_set_memory_clock,
272 .set_pcie_lanes = NULL,
273 .set_clock_gating = &radeon_atom_set_clock_gating,
274 .bandwidth_update = &rs600_bandwidth_update,
275 };
276
277
278 /*
279 * rs690,rs740
280 */
281 int rs690_init(struct radeon_device *rdev);
282 void rs690_fini(struct radeon_device *rdev);
283 int rs690_resume(struct radeon_device *rdev);
284 int rs690_suspend(struct radeon_device *rdev);
285 uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
286 void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
287 void rs690_bandwidth_update(struct radeon_device *rdev);
288 static struct radeon_asic rs690_asic = {
289 .init = &rs690_init,
290 .fini = &rs690_fini,
291 .suspend = &rs690_suspend,
292 .resume = &rs690_resume,
293 .gpu_reset = &r300_gpu_reset,
294 .gart_tlb_flush = &rs400_gart_tlb_flush,
295 .gart_set_page = &rs400_gart_set_page,
296 .cp_commit = &r100_cp_commit,
297 .ring_start = &r300_ring_start,
298 .ring_test = &r100_ring_test,
299 .ring_ib_execute = &r100_ring_ib_execute,
300 .irq_set = &rs600_irq_set,
301 .irq_process = &rs600_irq_process,
302 .get_vblank_counter = &rs600_get_vblank_counter,
303 .fence_ring_emit = &r300_fence_ring_emit,
304 .cs_parse = &r300_cs_parse,
305 .copy_blit = &r100_copy_blit,
306 .copy_dma = &r300_copy_dma,
307 .copy = &r300_copy_dma,
308 .set_engine_clock = &radeon_atom_set_engine_clock,
309 .set_memory_clock = &radeon_atom_set_memory_clock,
310 .set_pcie_lanes = NULL,
311 .set_clock_gating = &radeon_atom_set_clock_gating,
312 .set_surface_reg = r100_set_surface_reg,
313 .clear_surface_reg = r100_clear_surface_reg,
314 .bandwidth_update = &rs690_bandwidth_update,
315 };
316
317
318 /*
319 * rv515
320 */
321 int rv515_init(struct radeon_device *rdev);
322 void rv515_fini(struct radeon_device *rdev);
323 int rv515_gpu_reset(struct radeon_device *rdev);
324 uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
325 void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
326 void rv515_ring_start(struct radeon_device *rdev);
327 uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
328 void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
329 void rv515_bandwidth_update(struct radeon_device *rdev);
330 int rv515_resume(struct radeon_device *rdev);
331 int rv515_suspend(struct radeon_device *rdev);
332 static struct radeon_asic rv515_asic = {
333 .init = &rv515_init,
334 .fini = &rv515_fini,
335 .suspend = &rv515_suspend,
336 .resume = &rv515_resume,
337 .gpu_reset = &rv515_gpu_reset,
338 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
339 .gart_set_page = &rv370_pcie_gart_set_page,
340 .cp_commit = &r100_cp_commit,
341 .ring_start = &rv515_ring_start,
342 .ring_test = &r100_ring_test,
343 .ring_ib_execute = &r100_ring_ib_execute,
344 .irq_set = &rs600_irq_set,
345 .irq_process = &rs600_irq_process,
346 .get_vblank_counter = &rs600_get_vblank_counter,
347 .fence_ring_emit = &r300_fence_ring_emit,
348 .cs_parse = &r300_cs_parse,
349 .copy_blit = &r100_copy_blit,
350 .copy_dma = &r300_copy_dma,
351 .copy = &r100_copy_blit,
352 .set_engine_clock = &radeon_atom_set_engine_clock,
353 .set_memory_clock = &radeon_atom_set_memory_clock,
354 .set_pcie_lanes = &rv370_set_pcie_lanes,
355 .set_clock_gating = &radeon_atom_set_clock_gating,
356 .set_surface_reg = r100_set_surface_reg,
357 .clear_surface_reg = r100_clear_surface_reg,
358 .bandwidth_update = &rv515_bandwidth_update,
359 };
360
361
362 /*
363 * r520,rv530,rv560,rv570,r580
364 */
365 int r520_init(struct radeon_device *rdev);
366 int r520_resume(struct radeon_device *rdev);
367 static struct radeon_asic r520_asic = {
368 .init = &r520_init,
369 .fini = &rv515_fini,
370 .suspend = &rv515_suspend,
371 .resume = &r520_resume,
372 .gpu_reset = &rv515_gpu_reset,
373 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
374 .gart_set_page = &rv370_pcie_gart_set_page,
375 .cp_commit = &r100_cp_commit,
376 .ring_start = &rv515_ring_start,
377 .ring_test = &r100_ring_test,
378 .ring_ib_execute = &r100_ring_ib_execute,
379 .irq_set = &rs600_irq_set,
380 .irq_process = &rs600_irq_process,
381 .get_vblank_counter = &rs600_get_vblank_counter,
382 .fence_ring_emit = &r300_fence_ring_emit,
383 .cs_parse = &r300_cs_parse,
384 .copy_blit = &r100_copy_blit,
385 .copy_dma = &r300_copy_dma,
386 .copy = &r100_copy_blit,
387 .set_engine_clock = &radeon_atom_set_engine_clock,
388 .set_memory_clock = &radeon_atom_set_memory_clock,
389 .set_pcie_lanes = &rv370_set_pcie_lanes,
390 .set_clock_gating = &radeon_atom_set_clock_gating,
391 .set_surface_reg = r100_set_surface_reg,
392 .clear_surface_reg = r100_clear_surface_reg,
393 .bandwidth_update = &rv515_bandwidth_update,
394 };
395
396 /*
397 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
398 */
399 int r600_init(struct radeon_device *rdev);
400 void r600_fini(struct radeon_device *rdev);
401 int r600_suspend(struct radeon_device *rdev);
402 int r600_resume(struct radeon_device *rdev);
403 int r600_wb_init(struct radeon_device *rdev);
404 void r600_wb_fini(struct radeon_device *rdev);
405 void r600_cp_commit(struct radeon_device *rdev);
406 void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
407 uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
408 void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
409 int r600_cs_parse(struct radeon_cs_parser *p);
410 void r600_fence_ring_emit(struct radeon_device *rdev,
411 struct radeon_fence *fence);
412 int r600_copy_dma(struct radeon_device *rdev,
413 uint64_t src_offset,
414 uint64_t dst_offset,
415 unsigned num_pages,
416 struct radeon_fence *fence);
417 int r600_irq_process(struct radeon_device *rdev);
418 int r600_irq_set(struct radeon_device *rdev);
419 int r600_gpu_reset(struct radeon_device *rdev);
420 int r600_set_surface_reg(struct radeon_device *rdev, int reg,
421 uint32_t tiling_flags, uint32_t pitch,
422 uint32_t offset, uint32_t obj_size);
423 int r600_clear_surface_reg(struct radeon_device *rdev, int reg);
424 void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
425 int r600_ring_test(struct radeon_device *rdev);
426 int r600_copy_blit(struct radeon_device *rdev,
427 uint64_t src_offset, uint64_t dst_offset,
428 unsigned num_pages, struct radeon_fence *fence);
429
430 static struct radeon_asic r600_asic = {
431 .init = &r600_init,
432 .fini = &r600_fini,
433 .suspend = &r600_suspend,
434 .resume = &r600_resume,
435 .cp_commit = &r600_cp_commit,
436 .gpu_reset = &r600_gpu_reset,
437 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
438 .gart_set_page = &rs600_gart_set_page,
439 .ring_test = &r600_ring_test,
440 .ring_ib_execute = &r600_ring_ib_execute,
441 .irq_set = &r600_irq_set,
442 .irq_process = &r600_irq_process,
443 .fence_ring_emit = &r600_fence_ring_emit,
444 .cs_parse = &r600_cs_parse,
445 .copy_blit = &r600_copy_blit,
446 .copy_dma = &r600_copy_blit,
447 .copy = &r600_copy_blit,
448 .set_engine_clock = &radeon_atom_set_engine_clock,
449 .set_memory_clock = &radeon_atom_set_memory_clock,
450 .set_pcie_lanes = NULL,
451 .set_clock_gating = &radeon_atom_set_clock_gating,
452 .set_surface_reg = r600_set_surface_reg,
453 .clear_surface_reg = r600_clear_surface_reg,
454 .bandwidth_update = &rv515_bandwidth_update,
455 };
456
457 /*
458 * rv770,rv730,rv710,rv740
459 */
460 int rv770_init(struct radeon_device *rdev);
461 void rv770_fini(struct radeon_device *rdev);
462 int rv770_suspend(struct radeon_device *rdev);
463 int rv770_resume(struct radeon_device *rdev);
464 int rv770_gpu_reset(struct radeon_device *rdev);
465
466 static struct radeon_asic rv770_asic = {
467 .init = &rv770_init,
468 .fini = &rv770_fini,
469 .suspend = &rv770_suspend,
470 .resume = &rv770_resume,
471 .cp_commit = &r600_cp_commit,
472 .gpu_reset = &rv770_gpu_reset,
473 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
474 .gart_set_page = &rs600_gart_set_page,
475 .ring_test = &r600_ring_test,
476 .ring_ib_execute = &r600_ring_ib_execute,
477 .irq_set = &r600_irq_set,
478 .irq_process = &r600_irq_process,
479 .fence_ring_emit = &r600_fence_ring_emit,
480 .cs_parse = &r600_cs_parse,
481 .copy_blit = &r600_copy_blit,
482 .copy_dma = &r600_copy_blit,
483 .copy = &r600_copy_blit,
484 .set_engine_clock = &radeon_atom_set_engine_clock,
485 .set_memory_clock = &radeon_atom_set_memory_clock,
486 .set_pcie_lanes = NULL,
487 .set_clock_gating = &radeon_atom_set_clock_gating,
488 .set_surface_reg = r600_set_surface_reg,
489 .clear_surface_reg = r600_clear_surface_reg,
490 .bandwidth_update = &rv515_bandwidth_update,
491 };
492
493 #endif
This page took 0.043505 seconds and 5 git commands to generate.