Commit | Line | Data |
---|---|---|
771fe6b9 JG |
1 | /* |
2 | * Copyright 2008 Advanced Micro Devices, Inc. | |
3 | * Copyright 2008 Red Hat Inc. | |
4 | * Copyright 2009 Jerome Glisse. | |
5 | * | |
6 | * Permission is hereby granted, free of charge, to any person obtaining a | |
7 | * copy of this software and associated documentation files (the "Software"), | |
8 | * to deal in the Software without restriction, including without limitation | |
9 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, | |
10 | * and/or sell copies of the Software, and to permit persons to whom the | |
11 | * Software is furnished to do so, subject to the following conditions: | |
12 | * | |
13 | * The above copyright notice and this permission notice shall be included in | |
14 | * all copies or substantial portions of the Software. | |
15 | * | |
16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |
17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |
18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL | |
19 | * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR | |
20 | * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, | |
21 | * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR | |
22 | * OTHER DEALINGS IN THE SOFTWARE. | |
23 | * | |
24 | * Authors: Dave Airlie | |
25 | * Alex Deucher | |
26 | * Jerome Glisse | |
27 | */ | |
28 | #ifndef __RADEON_ASIC_H__ | |
29 | #define __RADEON_ASIC_H__ | |
30 | ||
31 | /* | |
32 | * common functions | |
33 | */ | |
34 | void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock); | |
35 | void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable); | |
36 | ||
37 | void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock); | |
38 | void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock); | |
39 | void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable); | |
40 | ||
41 | /* | |
42 | * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280 | |
43 | */ | |
068a117c | 44 | int r100_init(struct radeon_device *rdev); |
551ebd83 | 45 | int r200_init(struct radeon_device *rdev); |
771fe6b9 JG |
46 | uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg); |
47 | void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
48 | void r100_errata(struct radeon_device *rdev); | |
49 | void r100_vram_info(struct radeon_device *rdev); | |
50 | int r100_gpu_reset(struct radeon_device *rdev); | |
51 | int r100_mc_init(struct radeon_device *rdev); | |
52 | void r100_mc_fini(struct radeon_device *rdev); | |
7ed220d7 | 53 | u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc); |
771fe6b9 JG |
54 | int r100_wb_init(struct radeon_device *rdev); |
55 | void r100_wb_fini(struct radeon_device *rdev); | |
4aac0473 JG |
56 | int r100_pci_gart_init(struct radeon_device *rdev); |
57 | void r100_pci_gart_fini(struct radeon_device *rdev); | |
58 | int r100_pci_gart_enable(struct radeon_device *rdev); | |
771fe6b9 JG |
59 | void r100_pci_gart_disable(struct radeon_device *rdev); |
60 | void r100_pci_gart_tlb_flush(struct radeon_device *rdev); | |
61 | int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); | |
62 | int r100_cp_init(struct radeon_device *rdev, unsigned ring_size); | |
63 | void r100_cp_fini(struct radeon_device *rdev); | |
64 | void r100_cp_disable(struct radeon_device *rdev); | |
3ce0a23d | 65 | void r100_cp_commit(struct radeon_device *rdev); |
771fe6b9 JG |
66 | void r100_ring_start(struct radeon_device *rdev); |
67 | int r100_irq_set(struct radeon_device *rdev); | |
68 | int r100_irq_process(struct radeon_device *rdev); | |
69 | void r100_fence_ring_emit(struct radeon_device *rdev, | |
70 | struct radeon_fence *fence); | |
71 | int r100_cs_parse(struct radeon_cs_parser *p); | |
72 | void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
73 | uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg); | |
74 | int r100_copy_blit(struct radeon_device *rdev, | |
75 | uint64_t src_offset, | |
76 | uint64_t dst_offset, | |
77 | unsigned num_pages, | |
78 | struct radeon_fence *fence); | |
e024e110 DA |
79 | int r100_set_surface_reg(struct radeon_device *rdev, int reg, |
80 | uint32_t tiling_flags, uint32_t pitch, | |
81 | uint32_t offset, uint32_t obj_size); | |
82 | int r100_clear_surface_reg(struct radeon_device *rdev, int reg); | |
c93bb85b | 83 | void r100_bandwidth_update(struct radeon_device *rdev); |
3ce0a23d JG |
84 | void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); |
85 | int r100_ib_test(struct radeon_device *rdev); | |
86 | int r100_ring_test(struct radeon_device *rdev); | |
771fe6b9 JG |
87 | |
88 | static struct radeon_asic r100_asic = { | |
068a117c | 89 | .init = &r100_init, |
771fe6b9 JG |
90 | .errata = &r100_errata, |
91 | .vram_info = &r100_vram_info, | |
92 | .gpu_reset = &r100_gpu_reset, | |
93 | .mc_init = &r100_mc_init, | |
94 | .mc_fini = &r100_mc_fini, | |
95 | .wb_init = &r100_wb_init, | |
96 | .wb_fini = &r100_wb_fini, | |
4aac0473 JG |
97 | .gart_init = &r100_pci_gart_init, |
98 | .gart_fini = &r100_pci_gart_fini, | |
99 | .gart_enable = &r100_pci_gart_enable, | |
771fe6b9 JG |
100 | .gart_disable = &r100_pci_gart_disable, |
101 | .gart_tlb_flush = &r100_pci_gart_tlb_flush, | |
102 | .gart_set_page = &r100_pci_gart_set_page, | |
103 | .cp_init = &r100_cp_init, | |
104 | .cp_fini = &r100_cp_fini, | |
105 | .cp_disable = &r100_cp_disable, | |
3ce0a23d | 106 | .cp_commit = &r100_cp_commit, |
771fe6b9 | 107 | .ring_start = &r100_ring_start, |
3ce0a23d JG |
108 | .ring_test = &r100_ring_test, |
109 | .ring_ib_execute = &r100_ring_ib_execute, | |
110 | .ib_test = &r100_ib_test, | |
771fe6b9 JG |
111 | .irq_set = &r100_irq_set, |
112 | .irq_process = &r100_irq_process, | |
7ed220d7 | 113 | .get_vblank_counter = &r100_get_vblank_counter, |
771fe6b9 JG |
114 | .fence_ring_emit = &r100_fence_ring_emit, |
115 | .cs_parse = &r100_cs_parse, | |
116 | .copy_blit = &r100_copy_blit, | |
117 | .copy_dma = NULL, | |
118 | .copy = &r100_copy_blit, | |
119 | .set_engine_clock = &radeon_legacy_set_engine_clock, | |
120 | .set_memory_clock = NULL, | |
121 | .set_pcie_lanes = NULL, | |
122 | .set_clock_gating = &radeon_legacy_set_clock_gating, | |
e024e110 DA |
123 | .set_surface_reg = r100_set_surface_reg, |
124 | .clear_surface_reg = r100_clear_surface_reg, | |
c93bb85b | 125 | .bandwidth_update = &r100_bandwidth_update, |
771fe6b9 JG |
126 | }; |
127 | ||
128 | ||
129 | /* | |
130 | * r300,r350,rv350,rv380 | |
131 | */ | |
207bf9e9 JG |
132 | extern int r300_init(struct radeon_device *rdev); |
133 | extern void r300_fini(struct radeon_device *rdev); | |
134 | extern int r300_suspend(struct radeon_device *rdev); | |
135 | extern int r300_resume(struct radeon_device *rdev); | |
136 | extern int r300_gpu_reset(struct radeon_device *rdev); | |
137 | extern void r300_ring_start(struct radeon_device *rdev); | |
138 | extern void r300_fence_ring_emit(struct radeon_device *rdev, | |
139 | struct radeon_fence *fence); | |
140 | extern int r300_cs_parse(struct radeon_cs_parser *p); | |
141 | extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev); | |
142 | extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); | |
143 | extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg); | |
144 | extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
145 | extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes); | |
146 | extern int r300_copy_dma(struct radeon_device *rdev, | |
147 | uint64_t src_offset, | |
148 | uint64_t dst_offset, | |
149 | unsigned num_pages, | |
150 | struct radeon_fence *fence); | |
771fe6b9 | 151 | static struct radeon_asic r300_asic = { |
068a117c | 152 | .init = &r300_init, |
207bf9e9 JG |
153 | .fini = &r300_fini, |
154 | .suspend = &r300_suspend, | |
155 | .resume = &r300_resume, | |
156 | .errata = NULL, | |
157 | .vram_info = NULL, | |
771fe6b9 | 158 | .gpu_reset = &r300_gpu_reset, |
207bf9e9 JG |
159 | .mc_init = NULL, |
160 | .mc_fini = NULL, | |
161 | .wb_init = NULL, | |
162 | .wb_fini = NULL, | |
163 | .gart_init = NULL, | |
164 | .gart_fini = NULL, | |
165 | .gart_enable = NULL, | |
166 | .gart_disable = NULL, | |
771fe6b9 JG |
167 | .gart_tlb_flush = &r100_pci_gart_tlb_flush, |
168 | .gart_set_page = &r100_pci_gart_set_page, | |
207bf9e9 JG |
169 | .cp_init = NULL, |
170 | .cp_fini = NULL, | |
171 | .cp_disable = NULL, | |
3ce0a23d | 172 | .cp_commit = &r100_cp_commit, |
771fe6b9 | 173 | .ring_start = &r300_ring_start, |
3ce0a23d JG |
174 | .ring_test = &r100_ring_test, |
175 | .ring_ib_execute = &r100_ring_ib_execute, | |
207bf9e9 | 176 | .ib_test = NULL, |
771fe6b9 JG |
177 | .irq_set = &r100_irq_set, |
178 | .irq_process = &r100_irq_process, | |
7ed220d7 | 179 | .get_vblank_counter = &r100_get_vblank_counter, |
771fe6b9 JG |
180 | .fence_ring_emit = &r300_fence_ring_emit, |
181 | .cs_parse = &r300_cs_parse, | |
182 | .copy_blit = &r100_copy_blit, | |
183 | .copy_dma = &r300_copy_dma, | |
184 | .copy = &r100_copy_blit, | |
185 | .set_engine_clock = &radeon_legacy_set_engine_clock, | |
186 | .set_memory_clock = NULL, | |
187 | .set_pcie_lanes = &rv370_set_pcie_lanes, | |
188 | .set_clock_gating = &radeon_legacy_set_clock_gating, | |
e024e110 DA |
189 | .set_surface_reg = r100_set_surface_reg, |
190 | .clear_surface_reg = r100_clear_surface_reg, | |
c93bb85b | 191 | .bandwidth_update = &r100_bandwidth_update, |
771fe6b9 JG |
192 | }; |
193 | ||
194 | /* | |
195 | * r420,r423,rv410 | |
196 | */ | |
9f022ddf JG |
197 | extern int r420_init(struct radeon_device *rdev); |
198 | extern void r420_fini(struct radeon_device *rdev); | |
199 | extern int r420_suspend(struct radeon_device *rdev); | |
200 | extern int r420_resume(struct radeon_device *rdev); | |
771fe6b9 | 201 | static struct radeon_asic r420_asic = { |
9f022ddf JG |
202 | .init = &r420_init, |
203 | .fini = &r420_fini, | |
204 | .suspend = &r420_suspend, | |
205 | .resume = &r420_resume, | |
206 | .errata = NULL, | |
207 | .vram_info = NULL, | |
771fe6b9 | 208 | .gpu_reset = &r300_gpu_reset, |
9f022ddf JG |
209 | .mc_init = NULL, |
210 | .mc_fini = NULL, | |
211 | .wb_init = NULL, | |
212 | .wb_fini = NULL, | |
4aac0473 JG |
213 | .gart_enable = NULL, |
214 | .gart_disable = NULL, | |
771fe6b9 JG |
215 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, |
216 | .gart_set_page = &rv370_pcie_gart_set_page, | |
9f022ddf JG |
217 | .cp_init = NULL, |
218 | .cp_fini = NULL, | |
219 | .cp_disable = NULL, | |
3ce0a23d | 220 | .cp_commit = &r100_cp_commit, |
771fe6b9 | 221 | .ring_start = &r300_ring_start, |
3ce0a23d JG |
222 | .ring_test = &r100_ring_test, |
223 | .ring_ib_execute = &r100_ring_ib_execute, | |
9f022ddf | 224 | .ib_test = NULL, |
771fe6b9 JG |
225 | .irq_set = &r100_irq_set, |
226 | .irq_process = &r100_irq_process, | |
7ed220d7 | 227 | .get_vblank_counter = &r100_get_vblank_counter, |
771fe6b9 JG |
228 | .fence_ring_emit = &r300_fence_ring_emit, |
229 | .cs_parse = &r300_cs_parse, | |
230 | .copy_blit = &r100_copy_blit, | |
231 | .copy_dma = &r300_copy_dma, | |
232 | .copy = &r100_copy_blit, | |
233 | .set_engine_clock = &radeon_atom_set_engine_clock, | |
234 | .set_memory_clock = &radeon_atom_set_memory_clock, | |
235 | .set_pcie_lanes = &rv370_set_pcie_lanes, | |
236 | .set_clock_gating = &radeon_atom_set_clock_gating, | |
e024e110 DA |
237 | .set_surface_reg = r100_set_surface_reg, |
238 | .clear_surface_reg = r100_clear_surface_reg, | |
c93bb85b | 239 | .bandwidth_update = &r100_bandwidth_update, |
771fe6b9 JG |
240 | }; |
241 | ||
242 | ||
243 | /* | |
244 | * rs400,rs480 | |
245 | */ | |
ca6ffc64 JG |
246 | extern int rs400_init(struct radeon_device *rdev); |
247 | extern void rs400_fini(struct radeon_device *rdev); | |
248 | extern int rs400_suspend(struct radeon_device *rdev); | |
249 | extern int rs400_resume(struct radeon_device *rdev); | |
771fe6b9 JG |
250 | void rs400_gart_tlb_flush(struct radeon_device *rdev); |
251 | int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); | |
252 | uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg); | |
253 | void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
254 | static struct radeon_asic rs400_asic = { | |
ca6ffc64 JG |
255 | .init = &rs400_init, |
256 | .fini = &rs400_fini, | |
257 | .suspend = &rs400_suspend, | |
258 | .resume = &rs400_resume, | |
259 | .errata = NULL, | |
260 | .vram_info = NULL, | |
771fe6b9 | 261 | .gpu_reset = &r300_gpu_reset, |
ca6ffc64 JG |
262 | .mc_init = NULL, |
263 | .mc_fini = NULL, | |
264 | .wb_init = NULL, | |
265 | .wb_fini = NULL, | |
266 | .gart_init = NULL, | |
267 | .gart_fini = NULL, | |
268 | .gart_enable = NULL, | |
269 | .gart_disable = NULL, | |
771fe6b9 JG |
270 | .gart_tlb_flush = &rs400_gart_tlb_flush, |
271 | .gart_set_page = &rs400_gart_set_page, | |
ca6ffc64 JG |
272 | .cp_init = NULL, |
273 | .cp_fini = NULL, | |
274 | .cp_disable = NULL, | |
3ce0a23d | 275 | .cp_commit = &r100_cp_commit, |
771fe6b9 | 276 | .ring_start = &r300_ring_start, |
3ce0a23d JG |
277 | .ring_test = &r100_ring_test, |
278 | .ring_ib_execute = &r100_ring_ib_execute, | |
ca6ffc64 | 279 | .ib_test = NULL, |
771fe6b9 JG |
280 | .irq_set = &r100_irq_set, |
281 | .irq_process = &r100_irq_process, | |
7ed220d7 | 282 | .get_vblank_counter = &r100_get_vblank_counter, |
771fe6b9 JG |
283 | .fence_ring_emit = &r300_fence_ring_emit, |
284 | .cs_parse = &r300_cs_parse, | |
285 | .copy_blit = &r100_copy_blit, | |
286 | .copy_dma = &r300_copy_dma, | |
287 | .copy = &r100_copy_blit, | |
288 | .set_engine_clock = &radeon_legacy_set_engine_clock, | |
289 | .set_memory_clock = NULL, | |
290 | .set_pcie_lanes = NULL, | |
291 | .set_clock_gating = &radeon_legacy_set_clock_gating, | |
e024e110 DA |
292 | .set_surface_reg = r100_set_surface_reg, |
293 | .clear_surface_reg = r100_clear_surface_reg, | |
c93bb85b | 294 | .bandwidth_update = &r100_bandwidth_update, |
771fe6b9 JG |
295 | }; |
296 | ||
297 | ||
298 | /* | |
299 | * rs600. | |
300 | */ | |
3f7dc91a | 301 | int rs600_init(struct radeon_device *rdev); |
771fe6b9 JG |
302 | void rs600_errata(struct radeon_device *rdev); |
303 | void rs600_vram_info(struct radeon_device *rdev); | |
304 | int rs600_mc_init(struct radeon_device *rdev); | |
305 | void rs600_mc_fini(struct radeon_device *rdev); | |
306 | int rs600_irq_set(struct radeon_device *rdev); | |
7ed220d7 MD |
307 | int rs600_irq_process(struct radeon_device *rdev); |
308 | u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc); | |
4aac0473 JG |
309 | int rs600_gart_init(struct radeon_device *rdev); |
310 | void rs600_gart_fini(struct radeon_device *rdev); | |
771fe6b9 JG |
311 | int rs600_gart_enable(struct radeon_device *rdev); |
312 | void rs600_gart_disable(struct radeon_device *rdev); | |
313 | void rs600_gart_tlb_flush(struct radeon_device *rdev); | |
314 | int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); | |
315 | uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg); | |
316 | void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
c93bb85b | 317 | void rs600_bandwidth_update(struct radeon_device *rdev); |
771fe6b9 | 318 | static struct radeon_asic rs600_asic = { |
3f7dc91a | 319 | .init = &rs600_init, |
771fe6b9 JG |
320 | .errata = &rs600_errata, |
321 | .vram_info = &rs600_vram_info, | |
322 | .gpu_reset = &r300_gpu_reset, | |
323 | .mc_init = &rs600_mc_init, | |
324 | .mc_fini = &rs600_mc_fini, | |
325 | .wb_init = &r100_wb_init, | |
326 | .wb_fini = &r100_wb_fini, | |
4aac0473 JG |
327 | .gart_init = &rs600_gart_init, |
328 | .gart_fini = &rs600_gart_fini, | |
771fe6b9 JG |
329 | .gart_enable = &rs600_gart_enable, |
330 | .gart_disable = &rs600_gart_disable, | |
331 | .gart_tlb_flush = &rs600_gart_tlb_flush, | |
332 | .gart_set_page = &rs600_gart_set_page, | |
333 | .cp_init = &r100_cp_init, | |
334 | .cp_fini = &r100_cp_fini, | |
335 | .cp_disable = &r100_cp_disable, | |
3ce0a23d | 336 | .cp_commit = &r100_cp_commit, |
771fe6b9 | 337 | .ring_start = &r300_ring_start, |
3ce0a23d JG |
338 | .ring_test = &r100_ring_test, |
339 | .ring_ib_execute = &r100_ring_ib_execute, | |
340 | .ib_test = &r100_ib_test, | |
771fe6b9 | 341 | .irq_set = &rs600_irq_set, |
7ed220d7 MD |
342 | .irq_process = &rs600_irq_process, |
343 | .get_vblank_counter = &rs600_get_vblank_counter, | |
771fe6b9 JG |
344 | .fence_ring_emit = &r300_fence_ring_emit, |
345 | .cs_parse = &r300_cs_parse, | |
346 | .copy_blit = &r100_copy_blit, | |
347 | .copy_dma = &r300_copy_dma, | |
348 | .copy = &r100_copy_blit, | |
349 | .set_engine_clock = &radeon_atom_set_engine_clock, | |
350 | .set_memory_clock = &radeon_atom_set_memory_clock, | |
351 | .set_pcie_lanes = NULL, | |
352 | .set_clock_gating = &radeon_atom_set_clock_gating, | |
c93bb85b | 353 | .bandwidth_update = &rs600_bandwidth_update, |
771fe6b9 JG |
354 | }; |
355 | ||
356 | ||
357 | /* | |
358 | * rs690,rs740 | |
359 | */ | |
360 | void rs690_errata(struct radeon_device *rdev); | |
361 | void rs690_vram_info(struct radeon_device *rdev); | |
362 | int rs690_mc_init(struct radeon_device *rdev); | |
363 | void rs690_mc_fini(struct radeon_device *rdev); | |
364 | uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg); | |
365 | void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
c93bb85b | 366 | void rs690_bandwidth_update(struct radeon_device *rdev); |
771fe6b9 | 367 | static struct radeon_asic rs690_asic = { |
3f7dc91a | 368 | .init = &rs600_init, |
771fe6b9 JG |
369 | .errata = &rs690_errata, |
370 | .vram_info = &rs690_vram_info, | |
371 | .gpu_reset = &r300_gpu_reset, | |
372 | .mc_init = &rs690_mc_init, | |
373 | .mc_fini = &rs690_mc_fini, | |
374 | .wb_init = &r100_wb_init, | |
375 | .wb_fini = &r100_wb_fini, | |
4aac0473 JG |
376 | .gart_init = &rs400_gart_init, |
377 | .gart_fini = &rs400_gart_fini, | |
771fe6b9 JG |
378 | .gart_enable = &rs400_gart_enable, |
379 | .gart_disable = &rs400_gart_disable, | |
380 | .gart_tlb_flush = &rs400_gart_tlb_flush, | |
381 | .gart_set_page = &rs400_gart_set_page, | |
382 | .cp_init = &r100_cp_init, | |
383 | .cp_fini = &r100_cp_fini, | |
384 | .cp_disable = &r100_cp_disable, | |
3ce0a23d | 385 | .cp_commit = &r100_cp_commit, |
771fe6b9 | 386 | .ring_start = &r300_ring_start, |
3ce0a23d JG |
387 | .ring_test = &r100_ring_test, |
388 | .ring_ib_execute = &r100_ring_ib_execute, | |
389 | .ib_test = &r100_ib_test, | |
771fe6b9 | 390 | .irq_set = &rs600_irq_set, |
7ed220d7 MD |
391 | .irq_process = &rs600_irq_process, |
392 | .get_vblank_counter = &rs600_get_vblank_counter, | |
771fe6b9 JG |
393 | .fence_ring_emit = &r300_fence_ring_emit, |
394 | .cs_parse = &r300_cs_parse, | |
395 | .copy_blit = &r100_copy_blit, | |
396 | .copy_dma = &r300_copy_dma, | |
397 | .copy = &r300_copy_dma, | |
398 | .set_engine_clock = &radeon_atom_set_engine_clock, | |
399 | .set_memory_clock = &radeon_atom_set_memory_clock, | |
400 | .set_pcie_lanes = NULL, | |
401 | .set_clock_gating = &radeon_atom_set_clock_gating, | |
e024e110 DA |
402 | .set_surface_reg = r100_set_surface_reg, |
403 | .clear_surface_reg = r100_clear_surface_reg, | |
c93bb85b | 404 | .bandwidth_update = &rs690_bandwidth_update, |
771fe6b9 JG |
405 | }; |
406 | ||
407 | ||
408 | /* | |
409 | * rv515 | |
410 | */ | |
068a117c | 411 | int rv515_init(struct radeon_device *rdev); |
d39c3b89 | 412 | void rv515_fini(struct radeon_device *rdev); |
771fe6b9 | 413 | int rv515_gpu_reset(struct radeon_device *rdev); |
771fe6b9 JG |
414 | uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg); |
415 | void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
416 | void rv515_ring_start(struct radeon_device *rdev); | |
417 | uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg); | |
418 | void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
c93bb85b | 419 | void rv515_bandwidth_update(struct radeon_device *rdev); |
d39c3b89 JG |
420 | int rv515_resume(struct radeon_device *rdev); |
421 | int rv515_suspend(struct radeon_device *rdev); | |
771fe6b9 | 422 | static struct radeon_asic rv515_asic = { |
068a117c | 423 | .init = &rv515_init, |
d39c3b89 JG |
424 | .fini = &rv515_fini, |
425 | .suspend = &rv515_suspend, | |
426 | .resume = &rv515_resume, | |
427 | .errata = NULL, | |
428 | .vram_info = NULL, | |
771fe6b9 | 429 | .gpu_reset = &rv515_gpu_reset, |
d39c3b89 JG |
430 | .mc_init = NULL, |
431 | .mc_fini = NULL, | |
432 | .wb_init = NULL, | |
433 | .wb_fini = NULL, | |
4aac0473 JG |
434 | .gart_init = &rv370_pcie_gart_init, |
435 | .gart_fini = &rv370_pcie_gart_fini, | |
d39c3b89 JG |
436 | .gart_enable = NULL, |
437 | .gart_disable = NULL, | |
771fe6b9 JG |
438 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, |
439 | .gart_set_page = &rv370_pcie_gart_set_page, | |
d39c3b89 JG |
440 | .cp_init = NULL, |
441 | .cp_fini = NULL, | |
442 | .cp_disable = NULL, | |
3ce0a23d | 443 | .cp_commit = &r100_cp_commit, |
771fe6b9 | 444 | .ring_start = &rv515_ring_start, |
3ce0a23d JG |
445 | .ring_test = &r100_ring_test, |
446 | .ring_ib_execute = &r100_ring_ib_execute, | |
d39c3b89 | 447 | .ib_test = NULL, |
7ed220d7 MD |
448 | .irq_set = &rs600_irq_set, |
449 | .irq_process = &rs600_irq_process, | |
450 | .get_vblank_counter = &rs600_get_vblank_counter, | |
771fe6b9 | 451 | .fence_ring_emit = &r300_fence_ring_emit, |
068a117c | 452 | .cs_parse = &r300_cs_parse, |
771fe6b9 JG |
453 | .copy_blit = &r100_copy_blit, |
454 | .copy_dma = &r300_copy_dma, | |
455 | .copy = &r100_copy_blit, | |
456 | .set_engine_clock = &radeon_atom_set_engine_clock, | |
457 | .set_memory_clock = &radeon_atom_set_memory_clock, | |
458 | .set_pcie_lanes = &rv370_set_pcie_lanes, | |
459 | .set_clock_gating = &radeon_atom_set_clock_gating, | |
e024e110 DA |
460 | .set_surface_reg = r100_set_surface_reg, |
461 | .clear_surface_reg = r100_clear_surface_reg, | |
c93bb85b | 462 | .bandwidth_update = &rv515_bandwidth_update, |
771fe6b9 JG |
463 | }; |
464 | ||
465 | ||
466 | /* | |
467 | * r520,rv530,rv560,rv570,r580 | |
468 | */ | |
d39c3b89 | 469 | int r520_init(struct radeon_device *rdev); |
f0ed1f65 | 470 | int r520_resume(struct radeon_device *rdev); |
771fe6b9 | 471 | static struct radeon_asic r520_asic = { |
d39c3b89 | 472 | .init = &r520_init, |
f0ed1f65 JG |
473 | .fini = &rv515_fini, |
474 | .suspend = &rv515_suspend, | |
475 | .resume = &r520_resume, | |
476 | .errata = NULL, | |
477 | .vram_info = NULL, | |
771fe6b9 | 478 | .gpu_reset = &rv515_gpu_reset, |
f0ed1f65 JG |
479 | .mc_init = NULL, |
480 | .mc_fini = NULL, | |
481 | .wb_init = NULL, | |
482 | .wb_fini = NULL, | |
483 | .gart_init = NULL, | |
484 | .gart_fini = NULL, | |
485 | .gart_enable = NULL, | |
486 | .gart_disable = NULL, | |
771fe6b9 JG |
487 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, |
488 | .gart_set_page = &rv370_pcie_gart_set_page, | |
f0ed1f65 JG |
489 | .cp_init = NULL, |
490 | .cp_fini = NULL, | |
491 | .cp_disable = NULL, | |
3ce0a23d | 492 | .cp_commit = &r100_cp_commit, |
771fe6b9 | 493 | .ring_start = &rv515_ring_start, |
3ce0a23d JG |
494 | .ring_test = &r100_ring_test, |
495 | .ring_ib_execute = &r100_ring_ib_execute, | |
f0ed1f65 | 496 | .ib_test = NULL, |
7ed220d7 MD |
497 | .irq_set = &rs600_irq_set, |
498 | .irq_process = &rs600_irq_process, | |
499 | .get_vblank_counter = &rs600_get_vblank_counter, | |
771fe6b9 | 500 | .fence_ring_emit = &r300_fence_ring_emit, |
068a117c | 501 | .cs_parse = &r300_cs_parse, |
771fe6b9 JG |
502 | .copy_blit = &r100_copy_blit, |
503 | .copy_dma = &r300_copy_dma, | |
504 | .copy = &r100_copy_blit, | |
505 | .set_engine_clock = &radeon_atom_set_engine_clock, | |
506 | .set_memory_clock = &radeon_atom_set_memory_clock, | |
507 | .set_pcie_lanes = &rv370_set_pcie_lanes, | |
508 | .set_clock_gating = &radeon_atom_set_clock_gating, | |
e024e110 DA |
509 | .set_surface_reg = r100_set_surface_reg, |
510 | .clear_surface_reg = r100_clear_surface_reg, | |
f0ed1f65 | 511 | .bandwidth_update = &rv515_bandwidth_update, |
771fe6b9 JG |
512 | }; |
513 | ||
514 | /* | |
3ce0a23d | 515 | * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880 |
771fe6b9 | 516 | */ |
3ce0a23d JG |
517 | int r600_init(struct radeon_device *rdev); |
518 | void r600_fini(struct radeon_device *rdev); | |
519 | int r600_suspend(struct radeon_device *rdev); | |
520 | int r600_resume(struct radeon_device *rdev); | |
521 | int r600_wb_init(struct radeon_device *rdev); | |
522 | void r600_wb_fini(struct radeon_device *rdev); | |
523 | void r600_cp_commit(struct radeon_device *rdev); | |
524 | void r600_pcie_gart_tlb_flush(struct radeon_device *rdev); | |
771fe6b9 JG |
525 | uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg); |
526 | void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
3ce0a23d JG |
527 | int r600_cs_parse(struct radeon_cs_parser *p); |
528 | void r600_fence_ring_emit(struct radeon_device *rdev, | |
529 | struct radeon_fence *fence); | |
530 | int r600_copy_dma(struct radeon_device *rdev, | |
531 | uint64_t src_offset, | |
532 | uint64_t dst_offset, | |
533 | unsigned num_pages, | |
534 | struct radeon_fence *fence); | |
535 | int r600_irq_process(struct radeon_device *rdev); | |
536 | int r600_irq_set(struct radeon_device *rdev); | |
537 | int r600_gpu_reset(struct radeon_device *rdev); | |
538 | int r600_set_surface_reg(struct radeon_device *rdev, int reg, | |
539 | uint32_t tiling_flags, uint32_t pitch, | |
540 | uint32_t offset, uint32_t obj_size); | |
541 | int r600_clear_surface_reg(struct radeon_device *rdev, int reg); | |
542 | void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); | |
543 | int r600_ib_test(struct radeon_device *rdev); | |
544 | int r600_ring_test(struct radeon_device *rdev); | |
545 | int r600_copy_blit(struct radeon_device *rdev, | |
546 | uint64_t src_offset, uint64_t dst_offset, | |
547 | unsigned num_pages, struct radeon_fence *fence); | |
548 | ||
549 | static struct radeon_asic r600_asic = { | |
550 | .errata = NULL, | |
551 | .init = &r600_init, | |
552 | .fini = &r600_fini, | |
553 | .suspend = &r600_suspend, | |
554 | .resume = &r600_resume, | |
555 | .cp_commit = &r600_cp_commit, | |
556 | .vram_info = NULL, | |
557 | .gpu_reset = &r600_gpu_reset, | |
558 | .mc_init = NULL, | |
559 | .mc_fini = NULL, | |
560 | .wb_init = &r600_wb_init, | |
561 | .wb_fini = &r600_wb_fini, | |
562 | .gart_enable = NULL, | |
563 | .gart_disable = NULL, | |
564 | .gart_tlb_flush = &r600_pcie_gart_tlb_flush, | |
565 | .gart_set_page = &rs600_gart_set_page, | |
566 | .cp_init = NULL, | |
567 | .cp_fini = NULL, | |
568 | .cp_disable = NULL, | |
569 | .ring_start = NULL, | |
570 | .ring_test = &r600_ring_test, | |
571 | .ring_ib_execute = &r600_ring_ib_execute, | |
572 | .ib_test = &r600_ib_test, | |
573 | .irq_set = &r600_irq_set, | |
574 | .irq_process = &r600_irq_process, | |
575 | .fence_ring_emit = &r600_fence_ring_emit, | |
576 | .cs_parse = &r600_cs_parse, | |
577 | .copy_blit = &r600_copy_blit, | |
578 | .copy_dma = &r600_copy_blit, | |
a3812877 | 579 | .copy = &r600_copy_blit, |
3ce0a23d JG |
580 | .set_engine_clock = &radeon_atom_set_engine_clock, |
581 | .set_memory_clock = &radeon_atom_set_memory_clock, | |
582 | .set_pcie_lanes = NULL, | |
583 | .set_clock_gating = &radeon_atom_set_clock_gating, | |
584 | .set_surface_reg = r600_set_surface_reg, | |
585 | .clear_surface_reg = r600_clear_surface_reg, | |
f0ed1f65 | 586 | .bandwidth_update = &rv515_bandwidth_update, |
3ce0a23d JG |
587 | }; |
588 | ||
589 | /* | |
590 | * rv770,rv730,rv710,rv740 | |
591 | */ | |
592 | int rv770_init(struct radeon_device *rdev); | |
593 | void rv770_fini(struct radeon_device *rdev); | |
594 | int rv770_suspend(struct radeon_device *rdev); | |
595 | int rv770_resume(struct radeon_device *rdev); | |
596 | int rv770_gpu_reset(struct radeon_device *rdev); | |
597 | ||
598 | static struct radeon_asic rv770_asic = { | |
599 | .errata = NULL, | |
600 | .init = &rv770_init, | |
601 | .fini = &rv770_fini, | |
602 | .suspend = &rv770_suspend, | |
603 | .resume = &rv770_resume, | |
604 | .cp_commit = &r600_cp_commit, | |
605 | .vram_info = NULL, | |
606 | .gpu_reset = &rv770_gpu_reset, | |
607 | .mc_init = NULL, | |
608 | .mc_fini = NULL, | |
609 | .wb_init = &r600_wb_init, | |
610 | .wb_fini = &r600_wb_fini, | |
611 | .gart_enable = NULL, | |
612 | .gart_disable = NULL, | |
613 | .gart_tlb_flush = &r600_pcie_gart_tlb_flush, | |
614 | .gart_set_page = &rs600_gart_set_page, | |
615 | .cp_init = NULL, | |
616 | .cp_fini = NULL, | |
617 | .cp_disable = NULL, | |
618 | .ring_start = NULL, | |
619 | .ring_test = &r600_ring_test, | |
620 | .ring_ib_execute = &r600_ring_ib_execute, | |
621 | .ib_test = &r600_ib_test, | |
622 | .irq_set = &r600_irq_set, | |
623 | .irq_process = &r600_irq_process, | |
624 | .fence_ring_emit = &r600_fence_ring_emit, | |
625 | .cs_parse = &r600_cs_parse, | |
626 | .copy_blit = &r600_copy_blit, | |
627 | .copy_dma = &r600_copy_blit, | |
a3812877 | 628 | .copy = &r600_copy_blit, |
3ce0a23d JG |
629 | .set_engine_clock = &radeon_atom_set_engine_clock, |
630 | .set_memory_clock = &radeon_atom_set_memory_clock, | |
631 | .set_pcie_lanes = NULL, | |
632 | .set_clock_gating = &radeon_atom_set_clock_gating, | |
633 | .set_surface_reg = r600_set_surface_reg, | |
634 | .clear_surface_reg = r600_clear_surface_reg, | |
f0ed1f65 | 635 | .bandwidth_update = &rv515_bandwidth_update, |
3ce0a23d | 636 | }; |
771fe6b9 JG |
637 | |
638 | #endif |