Commit | Line | Data |
---|---|---|
771fe6b9 JG |
1 | /* |
2 | * Copyright 2008 Advanced Micro Devices, Inc. | |
3 | * Copyright 2008 Red Hat Inc. | |
4 | * Copyright 2009 Jerome Glisse. | |
5 | * | |
6 | * Permission is hereby granted, free of charge, to any person obtaining a | |
7 | * copy of this software and associated documentation files (the "Software"), | |
8 | * to deal in the Software without restriction, including without limitation | |
9 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, | |
10 | * and/or sell copies of the Software, and to permit persons to whom the | |
11 | * Software is furnished to do so, subject to the following conditions: | |
12 | * | |
13 | * The above copyright notice and this permission notice shall be included in | |
14 | * all copies or substantial portions of the Software. | |
15 | * | |
16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |
17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |
18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL | |
19 | * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR | |
20 | * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, | |
21 | * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR | |
22 | * OTHER DEALINGS IN THE SOFTWARE. | |
23 | * | |
24 | * Authors: Dave Airlie | |
25 | * Alex Deucher | |
26 | * Jerome Glisse | |
27 | */ | |
28 | #ifndef __RADEON_ASIC_H__ | |
29 | #define __RADEON_ASIC_H__ | |
30 | ||
31 | /* | |
32 | * common functions | |
33 | */ | |
34 | void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock); | |
35 | void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable); | |
36 | ||
37 | void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock); | |
38 | void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock); | |
39 | void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable); | |
40 | ||
41 | /* | |
42 | * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280 | |
43 | */ | |
068a117c | 44 | int r100_init(struct radeon_device *rdev); |
771fe6b9 JG |
45 | uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg); |
46 | void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
47 | void r100_errata(struct radeon_device *rdev); | |
48 | void r100_vram_info(struct radeon_device *rdev); | |
49 | int r100_gpu_reset(struct radeon_device *rdev); | |
50 | int r100_mc_init(struct radeon_device *rdev); | |
51 | void r100_mc_fini(struct radeon_device *rdev); | |
52 | int r100_wb_init(struct radeon_device *rdev); | |
53 | void r100_wb_fini(struct radeon_device *rdev); | |
54 | int r100_gart_enable(struct radeon_device *rdev); | |
55 | void r100_pci_gart_disable(struct radeon_device *rdev); | |
56 | void r100_pci_gart_tlb_flush(struct radeon_device *rdev); | |
57 | int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); | |
58 | int r100_cp_init(struct radeon_device *rdev, unsigned ring_size); | |
59 | void r100_cp_fini(struct radeon_device *rdev); | |
60 | void r100_cp_disable(struct radeon_device *rdev); | |
61 | void r100_ring_start(struct radeon_device *rdev); | |
62 | int r100_irq_set(struct radeon_device *rdev); | |
63 | int r100_irq_process(struct radeon_device *rdev); | |
64 | void r100_fence_ring_emit(struct radeon_device *rdev, | |
65 | struct radeon_fence *fence); | |
66 | int r100_cs_parse(struct radeon_cs_parser *p); | |
67 | void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
68 | uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg); | |
69 | int r100_copy_blit(struct radeon_device *rdev, | |
70 | uint64_t src_offset, | |
71 | uint64_t dst_offset, | |
72 | unsigned num_pages, | |
73 | struct radeon_fence *fence); | |
e024e110 DA |
74 | int r100_set_surface_reg(struct radeon_device *rdev, int reg, |
75 | uint32_t tiling_flags, uint32_t pitch, | |
76 | uint32_t offset, uint32_t obj_size); | |
77 | int r100_clear_surface_reg(struct radeon_device *rdev, int reg); | |
c93bb85b | 78 | void r100_bandwidth_update(struct radeon_device *rdev); |
771fe6b9 JG |
79 | |
80 | static struct radeon_asic r100_asic = { | |
068a117c | 81 | .init = &r100_init, |
771fe6b9 JG |
82 | .errata = &r100_errata, |
83 | .vram_info = &r100_vram_info, | |
84 | .gpu_reset = &r100_gpu_reset, | |
85 | .mc_init = &r100_mc_init, | |
86 | .mc_fini = &r100_mc_fini, | |
87 | .wb_init = &r100_wb_init, | |
88 | .wb_fini = &r100_wb_fini, | |
89 | .gart_enable = &r100_gart_enable, | |
90 | .gart_disable = &r100_pci_gart_disable, | |
91 | .gart_tlb_flush = &r100_pci_gart_tlb_flush, | |
92 | .gart_set_page = &r100_pci_gart_set_page, | |
93 | .cp_init = &r100_cp_init, | |
94 | .cp_fini = &r100_cp_fini, | |
95 | .cp_disable = &r100_cp_disable, | |
96 | .ring_start = &r100_ring_start, | |
97 | .irq_set = &r100_irq_set, | |
98 | .irq_process = &r100_irq_process, | |
99 | .fence_ring_emit = &r100_fence_ring_emit, | |
100 | .cs_parse = &r100_cs_parse, | |
101 | .copy_blit = &r100_copy_blit, | |
102 | .copy_dma = NULL, | |
103 | .copy = &r100_copy_blit, | |
104 | .set_engine_clock = &radeon_legacy_set_engine_clock, | |
105 | .set_memory_clock = NULL, | |
106 | .set_pcie_lanes = NULL, | |
107 | .set_clock_gating = &radeon_legacy_set_clock_gating, | |
e024e110 DA |
108 | .set_surface_reg = r100_set_surface_reg, |
109 | .clear_surface_reg = r100_clear_surface_reg, | |
c93bb85b | 110 | .bandwidth_update = &r100_bandwidth_update, |
771fe6b9 JG |
111 | }; |
112 | ||
113 | ||
114 | /* | |
115 | * r300,r350,rv350,rv380 | |
116 | */ | |
068a117c | 117 | int r300_init(struct radeon_device *rdev); |
771fe6b9 JG |
118 | void r300_errata(struct radeon_device *rdev); |
119 | void r300_vram_info(struct radeon_device *rdev); | |
120 | int r300_gpu_reset(struct radeon_device *rdev); | |
121 | int r300_mc_init(struct radeon_device *rdev); | |
122 | void r300_mc_fini(struct radeon_device *rdev); | |
123 | void r300_ring_start(struct radeon_device *rdev); | |
124 | void r300_fence_ring_emit(struct radeon_device *rdev, | |
125 | struct radeon_fence *fence); | |
126 | int r300_cs_parse(struct radeon_cs_parser *p); | |
127 | int r300_gart_enable(struct radeon_device *rdev); | |
128 | void rv370_pcie_gart_disable(struct radeon_device *rdev); | |
129 | void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev); | |
130 | int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); | |
131 | uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg); | |
132 | void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
133 | void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes); | |
134 | int r300_copy_dma(struct radeon_device *rdev, | |
135 | uint64_t src_offset, | |
136 | uint64_t dst_offset, | |
137 | unsigned num_pages, | |
138 | struct radeon_fence *fence); | |
e024e110 | 139 | |
771fe6b9 | 140 | static struct radeon_asic r300_asic = { |
068a117c | 141 | .init = &r300_init, |
771fe6b9 JG |
142 | .errata = &r300_errata, |
143 | .vram_info = &r300_vram_info, | |
144 | .gpu_reset = &r300_gpu_reset, | |
145 | .mc_init = &r300_mc_init, | |
146 | .mc_fini = &r300_mc_fini, | |
147 | .wb_init = &r100_wb_init, | |
148 | .wb_fini = &r100_wb_fini, | |
149 | .gart_enable = &r300_gart_enable, | |
150 | .gart_disable = &r100_pci_gart_disable, | |
151 | .gart_tlb_flush = &r100_pci_gart_tlb_flush, | |
152 | .gart_set_page = &r100_pci_gart_set_page, | |
153 | .cp_init = &r100_cp_init, | |
154 | .cp_fini = &r100_cp_fini, | |
155 | .cp_disable = &r100_cp_disable, | |
156 | .ring_start = &r300_ring_start, | |
157 | .irq_set = &r100_irq_set, | |
158 | .irq_process = &r100_irq_process, | |
159 | .fence_ring_emit = &r300_fence_ring_emit, | |
160 | .cs_parse = &r300_cs_parse, | |
161 | .copy_blit = &r100_copy_blit, | |
162 | .copy_dma = &r300_copy_dma, | |
163 | .copy = &r100_copy_blit, | |
164 | .set_engine_clock = &radeon_legacy_set_engine_clock, | |
165 | .set_memory_clock = NULL, | |
166 | .set_pcie_lanes = &rv370_set_pcie_lanes, | |
167 | .set_clock_gating = &radeon_legacy_set_clock_gating, | |
e024e110 DA |
168 | .set_surface_reg = r100_set_surface_reg, |
169 | .clear_surface_reg = r100_clear_surface_reg, | |
c93bb85b | 170 | .bandwidth_update = &r100_bandwidth_update, |
771fe6b9 JG |
171 | }; |
172 | ||
173 | /* | |
174 | * r420,r423,rv410 | |
175 | */ | |
176 | void r420_errata(struct radeon_device *rdev); | |
177 | void r420_vram_info(struct radeon_device *rdev); | |
178 | int r420_mc_init(struct radeon_device *rdev); | |
179 | void r420_mc_fini(struct radeon_device *rdev); | |
180 | static struct radeon_asic r420_asic = { | |
068a117c | 181 | .init = &r300_init, |
771fe6b9 JG |
182 | .errata = &r420_errata, |
183 | .vram_info = &r420_vram_info, | |
184 | .gpu_reset = &r300_gpu_reset, | |
185 | .mc_init = &r420_mc_init, | |
186 | .mc_fini = &r420_mc_fini, | |
187 | .wb_init = &r100_wb_init, | |
188 | .wb_fini = &r100_wb_fini, | |
189 | .gart_enable = &r300_gart_enable, | |
190 | .gart_disable = &rv370_pcie_gart_disable, | |
191 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, | |
192 | .gart_set_page = &rv370_pcie_gart_set_page, | |
193 | .cp_init = &r100_cp_init, | |
194 | .cp_fini = &r100_cp_fini, | |
195 | .cp_disable = &r100_cp_disable, | |
196 | .ring_start = &r300_ring_start, | |
197 | .irq_set = &r100_irq_set, | |
198 | .irq_process = &r100_irq_process, | |
199 | .fence_ring_emit = &r300_fence_ring_emit, | |
200 | .cs_parse = &r300_cs_parse, | |
201 | .copy_blit = &r100_copy_blit, | |
202 | .copy_dma = &r300_copy_dma, | |
203 | .copy = &r100_copy_blit, | |
204 | .set_engine_clock = &radeon_atom_set_engine_clock, | |
205 | .set_memory_clock = &radeon_atom_set_memory_clock, | |
206 | .set_pcie_lanes = &rv370_set_pcie_lanes, | |
207 | .set_clock_gating = &radeon_atom_set_clock_gating, | |
e024e110 DA |
208 | .set_surface_reg = r100_set_surface_reg, |
209 | .clear_surface_reg = r100_clear_surface_reg, | |
c93bb85b | 210 | .bandwidth_update = &r100_bandwidth_update, |
771fe6b9 JG |
211 | }; |
212 | ||
213 | ||
214 | /* | |
215 | * rs400,rs480 | |
216 | */ | |
217 | void rs400_errata(struct radeon_device *rdev); | |
218 | void rs400_vram_info(struct radeon_device *rdev); | |
219 | int rs400_mc_init(struct radeon_device *rdev); | |
220 | void rs400_mc_fini(struct radeon_device *rdev); | |
221 | int rs400_gart_enable(struct radeon_device *rdev); | |
222 | void rs400_gart_disable(struct radeon_device *rdev); | |
223 | void rs400_gart_tlb_flush(struct radeon_device *rdev); | |
224 | int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); | |
225 | uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg); | |
226 | void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
227 | static struct radeon_asic rs400_asic = { | |
068a117c | 228 | .init = &r300_init, |
771fe6b9 JG |
229 | .errata = &rs400_errata, |
230 | .vram_info = &rs400_vram_info, | |
231 | .gpu_reset = &r300_gpu_reset, | |
232 | .mc_init = &rs400_mc_init, | |
233 | .mc_fini = &rs400_mc_fini, | |
234 | .wb_init = &r100_wb_init, | |
235 | .wb_fini = &r100_wb_fini, | |
236 | .gart_enable = &rs400_gart_enable, | |
237 | .gart_disable = &rs400_gart_disable, | |
238 | .gart_tlb_flush = &rs400_gart_tlb_flush, | |
239 | .gart_set_page = &rs400_gart_set_page, | |
240 | .cp_init = &r100_cp_init, | |
241 | .cp_fini = &r100_cp_fini, | |
242 | .cp_disable = &r100_cp_disable, | |
243 | .ring_start = &r300_ring_start, | |
244 | .irq_set = &r100_irq_set, | |
245 | .irq_process = &r100_irq_process, | |
246 | .fence_ring_emit = &r300_fence_ring_emit, | |
247 | .cs_parse = &r300_cs_parse, | |
248 | .copy_blit = &r100_copy_blit, | |
249 | .copy_dma = &r300_copy_dma, | |
250 | .copy = &r100_copy_blit, | |
251 | .set_engine_clock = &radeon_legacy_set_engine_clock, | |
252 | .set_memory_clock = NULL, | |
253 | .set_pcie_lanes = NULL, | |
254 | .set_clock_gating = &radeon_legacy_set_clock_gating, | |
e024e110 DA |
255 | .set_surface_reg = r100_set_surface_reg, |
256 | .clear_surface_reg = r100_clear_surface_reg, | |
c93bb85b | 257 | .bandwidth_update = &r100_bandwidth_update, |
771fe6b9 JG |
258 | }; |
259 | ||
260 | ||
261 | /* | |
262 | * rs600. | |
263 | */ | |
264 | void rs600_errata(struct radeon_device *rdev); | |
265 | void rs600_vram_info(struct radeon_device *rdev); | |
266 | int rs600_mc_init(struct radeon_device *rdev); | |
267 | void rs600_mc_fini(struct radeon_device *rdev); | |
268 | int rs600_irq_set(struct radeon_device *rdev); | |
269 | int rs600_gart_enable(struct radeon_device *rdev); | |
270 | void rs600_gart_disable(struct radeon_device *rdev); | |
271 | void rs600_gart_tlb_flush(struct radeon_device *rdev); | |
272 | int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); | |
273 | uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg); | |
274 | void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
c93bb85b | 275 | void rs600_bandwidth_update(struct radeon_device *rdev); |
771fe6b9 | 276 | static struct radeon_asic rs600_asic = { |
068a117c | 277 | .init = &r300_init, |
771fe6b9 JG |
278 | .errata = &rs600_errata, |
279 | .vram_info = &rs600_vram_info, | |
280 | .gpu_reset = &r300_gpu_reset, | |
281 | .mc_init = &rs600_mc_init, | |
282 | .mc_fini = &rs600_mc_fini, | |
283 | .wb_init = &r100_wb_init, | |
284 | .wb_fini = &r100_wb_fini, | |
285 | .gart_enable = &rs600_gart_enable, | |
286 | .gart_disable = &rs600_gart_disable, | |
287 | .gart_tlb_flush = &rs600_gart_tlb_flush, | |
288 | .gart_set_page = &rs600_gart_set_page, | |
289 | .cp_init = &r100_cp_init, | |
290 | .cp_fini = &r100_cp_fini, | |
291 | .cp_disable = &r100_cp_disable, | |
292 | .ring_start = &r300_ring_start, | |
293 | .irq_set = &rs600_irq_set, | |
294 | .irq_process = &r100_irq_process, | |
295 | .fence_ring_emit = &r300_fence_ring_emit, | |
296 | .cs_parse = &r300_cs_parse, | |
297 | .copy_blit = &r100_copy_blit, | |
298 | .copy_dma = &r300_copy_dma, | |
299 | .copy = &r100_copy_blit, | |
300 | .set_engine_clock = &radeon_atom_set_engine_clock, | |
301 | .set_memory_clock = &radeon_atom_set_memory_clock, | |
302 | .set_pcie_lanes = NULL, | |
303 | .set_clock_gating = &radeon_atom_set_clock_gating, | |
c93bb85b | 304 | .bandwidth_update = &rs600_bandwidth_update, |
771fe6b9 JG |
305 | }; |
306 | ||
307 | ||
308 | /* | |
309 | * rs690,rs740 | |
310 | */ | |
311 | void rs690_errata(struct radeon_device *rdev); | |
312 | void rs690_vram_info(struct radeon_device *rdev); | |
313 | int rs690_mc_init(struct radeon_device *rdev); | |
314 | void rs690_mc_fini(struct radeon_device *rdev); | |
315 | uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg); | |
316 | void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
c93bb85b | 317 | void rs690_bandwidth_update(struct radeon_device *rdev); |
771fe6b9 | 318 | static struct radeon_asic rs690_asic = { |
068a117c | 319 | .init = &r300_init, |
771fe6b9 JG |
320 | .errata = &rs690_errata, |
321 | .vram_info = &rs690_vram_info, | |
322 | .gpu_reset = &r300_gpu_reset, | |
323 | .mc_init = &rs690_mc_init, | |
324 | .mc_fini = &rs690_mc_fini, | |
325 | .wb_init = &r100_wb_init, | |
326 | .wb_fini = &r100_wb_fini, | |
327 | .gart_enable = &rs400_gart_enable, | |
328 | .gart_disable = &rs400_gart_disable, | |
329 | .gart_tlb_flush = &rs400_gart_tlb_flush, | |
330 | .gart_set_page = &rs400_gart_set_page, | |
331 | .cp_init = &r100_cp_init, | |
332 | .cp_fini = &r100_cp_fini, | |
333 | .cp_disable = &r100_cp_disable, | |
334 | .ring_start = &r300_ring_start, | |
335 | .irq_set = &rs600_irq_set, | |
336 | .irq_process = &r100_irq_process, | |
337 | .fence_ring_emit = &r300_fence_ring_emit, | |
338 | .cs_parse = &r300_cs_parse, | |
339 | .copy_blit = &r100_copy_blit, | |
340 | .copy_dma = &r300_copy_dma, | |
341 | .copy = &r300_copy_dma, | |
342 | .set_engine_clock = &radeon_atom_set_engine_clock, | |
343 | .set_memory_clock = &radeon_atom_set_memory_clock, | |
344 | .set_pcie_lanes = NULL, | |
345 | .set_clock_gating = &radeon_atom_set_clock_gating, | |
e024e110 DA |
346 | .set_surface_reg = r100_set_surface_reg, |
347 | .clear_surface_reg = r100_clear_surface_reg, | |
c93bb85b | 348 | .bandwidth_update = &rs690_bandwidth_update, |
771fe6b9 JG |
349 | }; |
350 | ||
351 | ||
352 | /* | |
353 | * rv515 | |
354 | */ | |
068a117c | 355 | int rv515_init(struct radeon_device *rdev); |
771fe6b9 JG |
356 | void rv515_errata(struct radeon_device *rdev); |
357 | void rv515_vram_info(struct radeon_device *rdev); | |
358 | int rv515_gpu_reset(struct radeon_device *rdev); | |
359 | int rv515_mc_init(struct radeon_device *rdev); | |
360 | void rv515_mc_fini(struct radeon_device *rdev); | |
361 | uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg); | |
362 | void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
363 | void rv515_ring_start(struct radeon_device *rdev); | |
364 | uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg); | |
365 | void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
c93bb85b | 366 | void rv515_bandwidth_update(struct radeon_device *rdev); |
771fe6b9 | 367 | static struct radeon_asic rv515_asic = { |
068a117c | 368 | .init = &rv515_init, |
771fe6b9 JG |
369 | .errata = &rv515_errata, |
370 | .vram_info = &rv515_vram_info, | |
371 | .gpu_reset = &rv515_gpu_reset, | |
372 | .mc_init = &rv515_mc_init, | |
373 | .mc_fini = &rv515_mc_fini, | |
374 | .wb_init = &r100_wb_init, | |
375 | .wb_fini = &r100_wb_fini, | |
376 | .gart_enable = &r300_gart_enable, | |
377 | .gart_disable = &rv370_pcie_gart_disable, | |
378 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, | |
379 | .gart_set_page = &rv370_pcie_gart_set_page, | |
380 | .cp_init = &r100_cp_init, | |
381 | .cp_fini = &r100_cp_fini, | |
382 | .cp_disable = &r100_cp_disable, | |
383 | .ring_start = &rv515_ring_start, | |
384 | .irq_set = &r100_irq_set, | |
385 | .irq_process = &r100_irq_process, | |
386 | .fence_ring_emit = &r300_fence_ring_emit, | |
068a117c | 387 | .cs_parse = &r300_cs_parse, |
771fe6b9 JG |
388 | .copy_blit = &r100_copy_blit, |
389 | .copy_dma = &r300_copy_dma, | |
390 | .copy = &r100_copy_blit, | |
391 | .set_engine_clock = &radeon_atom_set_engine_clock, | |
392 | .set_memory_clock = &radeon_atom_set_memory_clock, | |
393 | .set_pcie_lanes = &rv370_set_pcie_lanes, | |
394 | .set_clock_gating = &radeon_atom_set_clock_gating, | |
e024e110 DA |
395 | .set_surface_reg = r100_set_surface_reg, |
396 | .clear_surface_reg = r100_clear_surface_reg, | |
c93bb85b | 397 | .bandwidth_update = &rv515_bandwidth_update, |
771fe6b9 JG |
398 | }; |
399 | ||
400 | ||
401 | /* | |
402 | * r520,rv530,rv560,rv570,r580 | |
403 | */ | |
404 | void r520_errata(struct radeon_device *rdev); | |
405 | void r520_vram_info(struct radeon_device *rdev); | |
406 | int r520_mc_init(struct radeon_device *rdev); | |
407 | void r520_mc_fini(struct radeon_device *rdev); | |
c93bb85b | 408 | void r520_bandwidth_update(struct radeon_device *rdev); |
771fe6b9 | 409 | static struct radeon_asic r520_asic = { |
068a117c | 410 | .init = &rv515_init, |
771fe6b9 JG |
411 | .errata = &r520_errata, |
412 | .vram_info = &r520_vram_info, | |
413 | .gpu_reset = &rv515_gpu_reset, | |
414 | .mc_init = &r520_mc_init, | |
415 | .mc_fini = &r520_mc_fini, | |
416 | .wb_init = &r100_wb_init, | |
417 | .wb_fini = &r100_wb_fini, | |
418 | .gart_enable = &r300_gart_enable, | |
419 | .gart_disable = &rv370_pcie_gart_disable, | |
420 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, | |
421 | .gart_set_page = &rv370_pcie_gart_set_page, | |
422 | .cp_init = &r100_cp_init, | |
423 | .cp_fini = &r100_cp_fini, | |
424 | .cp_disable = &r100_cp_disable, | |
425 | .ring_start = &rv515_ring_start, | |
426 | .irq_set = &r100_irq_set, | |
427 | .irq_process = &r100_irq_process, | |
428 | .fence_ring_emit = &r300_fence_ring_emit, | |
068a117c | 429 | .cs_parse = &r300_cs_parse, |
771fe6b9 JG |
430 | .copy_blit = &r100_copy_blit, |
431 | .copy_dma = &r300_copy_dma, | |
432 | .copy = &r100_copy_blit, | |
433 | .set_engine_clock = &radeon_atom_set_engine_clock, | |
434 | .set_memory_clock = &radeon_atom_set_memory_clock, | |
435 | .set_pcie_lanes = &rv370_set_pcie_lanes, | |
436 | .set_clock_gating = &radeon_atom_set_clock_gating, | |
e024e110 DA |
437 | .set_surface_reg = r100_set_surface_reg, |
438 | .clear_surface_reg = r100_clear_surface_reg, | |
c93bb85b | 439 | .bandwidth_update = &r520_bandwidth_update, |
771fe6b9 JG |
440 | }; |
441 | ||
442 | /* | |
443 | * r600,rv610,rv630,rv620,rv635,rv670,rs780,rv770,rv730,rv710 | |
444 | */ | |
445 | uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg); | |
446 | void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
447 | ||
448 | #endif |