drm/radeon/kms: add initial colortiling support.
[deliverable/linux.git] / drivers / gpu / drm / radeon / radeon_asic.h
1 /*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
27 */
28 #ifndef __RADEON_ASIC_H__
29 #define __RADEON_ASIC_H__
30
31 /*
32 * common functions
33 */
34 void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
35 void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
36
37 void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
38 void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
39 void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
40
41 /*
42 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
43 */
44 int r100_init(struct radeon_device *rdev);
45 uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
46 void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
47 void r100_errata(struct radeon_device *rdev);
48 void r100_vram_info(struct radeon_device *rdev);
49 int r100_gpu_reset(struct radeon_device *rdev);
50 int r100_mc_init(struct radeon_device *rdev);
51 void r100_mc_fini(struct radeon_device *rdev);
52 int r100_wb_init(struct radeon_device *rdev);
53 void r100_wb_fini(struct radeon_device *rdev);
54 int r100_gart_enable(struct radeon_device *rdev);
55 void r100_pci_gart_disable(struct radeon_device *rdev);
56 void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
57 int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
58 int r100_cp_init(struct radeon_device *rdev, unsigned ring_size);
59 void r100_cp_fini(struct radeon_device *rdev);
60 void r100_cp_disable(struct radeon_device *rdev);
61 void r100_ring_start(struct radeon_device *rdev);
62 int r100_irq_set(struct radeon_device *rdev);
63 int r100_irq_process(struct radeon_device *rdev);
64 void r100_fence_ring_emit(struct radeon_device *rdev,
65 struct radeon_fence *fence);
66 int r100_cs_parse(struct radeon_cs_parser *p);
67 void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
68 uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
69 int r100_copy_blit(struct radeon_device *rdev,
70 uint64_t src_offset,
71 uint64_t dst_offset,
72 unsigned num_pages,
73 struct radeon_fence *fence);
74 int r100_set_surface_reg(struct radeon_device *rdev, int reg,
75 uint32_t tiling_flags, uint32_t pitch,
76 uint32_t offset, uint32_t obj_size);
77 int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
78
79 static struct radeon_asic r100_asic = {
80 .init = &r100_init,
81 .errata = &r100_errata,
82 .vram_info = &r100_vram_info,
83 .gpu_reset = &r100_gpu_reset,
84 .mc_init = &r100_mc_init,
85 .mc_fini = &r100_mc_fini,
86 .wb_init = &r100_wb_init,
87 .wb_fini = &r100_wb_fini,
88 .gart_enable = &r100_gart_enable,
89 .gart_disable = &r100_pci_gart_disable,
90 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
91 .gart_set_page = &r100_pci_gart_set_page,
92 .cp_init = &r100_cp_init,
93 .cp_fini = &r100_cp_fini,
94 .cp_disable = &r100_cp_disable,
95 .ring_start = &r100_ring_start,
96 .irq_set = &r100_irq_set,
97 .irq_process = &r100_irq_process,
98 .fence_ring_emit = &r100_fence_ring_emit,
99 .cs_parse = &r100_cs_parse,
100 .copy_blit = &r100_copy_blit,
101 .copy_dma = NULL,
102 .copy = &r100_copy_blit,
103 .set_engine_clock = &radeon_legacy_set_engine_clock,
104 .set_memory_clock = NULL,
105 .set_pcie_lanes = NULL,
106 .set_clock_gating = &radeon_legacy_set_clock_gating,
107 .set_surface_reg = r100_set_surface_reg,
108 .clear_surface_reg = r100_clear_surface_reg,
109 };
110
111
112 /*
113 * r300,r350,rv350,rv380
114 */
115 int r300_init(struct radeon_device *rdev);
116 void r300_errata(struct radeon_device *rdev);
117 void r300_vram_info(struct radeon_device *rdev);
118 int r300_gpu_reset(struct radeon_device *rdev);
119 int r300_mc_init(struct radeon_device *rdev);
120 void r300_mc_fini(struct radeon_device *rdev);
121 void r300_ring_start(struct radeon_device *rdev);
122 void r300_fence_ring_emit(struct radeon_device *rdev,
123 struct radeon_fence *fence);
124 int r300_cs_parse(struct radeon_cs_parser *p);
125 int r300_gart_enable(struct radeon_device *rdev);
126 void rv370_pcie_gart_disable(struct radeon_device *rdev);
127 void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
128 int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
129 uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
130 void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
131 void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
132 int r300_copy_dma(struct radeon_device *rdev,
133 uint64_t src_offset,
134 uint64_t dst_offset,
135 unsigned num_pages,
136 struct radeon_fence *fence);
137
138 static struct radeon_asic r300_asic = {
139 .init = &r300_init,
140 .errata = &r300_errata,
141 .vram_info = &r300_vram_info,
142 .gpu_reset = &r300_gpu_reset,
143 .mc_init = &r300_mc_init,
144 .mc_fini = &r300_mc_fini,
145 .wb_init = &r100_wb_init,
146 .wb_fini = &r100_wb_fini,
147 .gart_enable = &r300_gart_enable,
148 .gart_disable = &r100_pci_gart_disable,
149 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
150 .gart_set_page = &r100_pci_gart_set_page,
151 .cp_init = &r100_cp_init,
152 .cp_fini = &r100_cp_fini,
153 .cp_disable = &r100_cp_disable,
154 .ring_start = &r300_ring_start,
155 .irq_set = &r100_irq_set,
156 .irq_process = &r100_irq_process,
157 .fence_ring_emit = &r300_fence_ring_emit,
158 .cs_parse = &r300_cs_parse,
159 .copy_blit = &r100_copy_blit,
160 .copy_dma = &r300_copy_dma,
161 .copy = &r100_copy_blit,
162 .set_engine_clock = &radeon_legacy_set_engine_clock,
163 .set_memory_clock = NULL,
164 .set_pcie_lanes = &rv370_set_pcie_lanes,
165 .set_clock_gating = &radeon_legacy_set_clock_gating,
166 .set_surface_reg = r100_set_surface_reg,
167 .clear_surface_reg = r100_clear_surface_reg,
168 };
169
170 /*
171 * r420,r423,rv410
172 */
173 void r420_errata(struct radeon_device *rdev);
174 void r420_vram_info(struct radeon_device *rdev);
175 int r420_mc_init(struct radeon_device *rdev);
176 void r420_mc_fini(struct radeon_device *rdev);
177 static struct radeon_asic r420_asic = {
178 .init = &r300_init,
179 .errata = &r420_errata,
180 .vram_info = &r420_vram_info,
181 .gpu_reset = &r300_gpu_reset,
182 .mc_init = &r420_mc_init,
183 .mc_fini = &r420_mc_fini,
184 .wb_init = &r100_wb_init,
185 .wb_fini = &r100_wb_fini,
186 .gart_enable = &r300_gart_enable,
187 .gart_disable = &rv370_pcie_gart_disable,
188 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
189 .gart_set_page = &rv370_pcie_gart_set_page,
190 .cp_init = &r100_cp_init,
191 .cp_fini = &r100_cp_fini,
192 .cp_disable = &r100_cp_disable,
193 .ring_start = &r300_ring_start,
194 .irq_set = &r100_irq_set,
195 .irq_process = &r100_irq_process,
196 .fence_ring_emit = &r300_fence_ring_emit,
197 .cs_parse = &r300_cs_parse,
198 .copy_blit = &r100_copy_blit,
199 .copy_dma = &r300_copy_dma,
200 .copy = &r100_copy_blit,
201 .set_engine_clock = &radeon_atom_set_engine_clock,
202 .set_memory_clock = &radeon_atom_set_memory_clock,
203 .set_pcie_lanes = &rv370_set_pcie_lanes,
204 .set_clock_gating = &radeon_atom_set_clock_gating,
205 .set_surface_reg = r100_set_surface_reg,
206 .clear_surface_reg = r100_clear_surface_reg,
207 };
208
209
210 /*
211 * rs400,rs480
212 */
213 void rs400_errata(struct radeon_device *rdev);
214 void rs400_vram_info(struct radeon_device *rdev);
215 int rs400_mc_init(struct radeon_device *rdev);
216 void rs400_mc_fini(struct radeon_device *rdev);
217 int rs400_gart_enable(struct radeon_device *rdev);
218 void rs400_gart_disable(struct radeon_device *rdev);
219 void rs400_gart_tlb_flush(struct radeon_device *rdev);
220 int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
221 uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
222 void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
223 static struct radeon_asic rs400_asic = {
224 .init = &r300_init,
225 .errata = &rs400_errata,
226 .vram_info = &rs400_vram_info,
227 .gpu_reset = &r300_gpu_reset,
228 .mc_init = &rs400_mc_init,
229 .mc_fini = &rs400_mc_fini,
230 .wb_init = &r100_wb_init,
231 .wb_fini = &r100_wb_fini,
232 .gart_enable = &rs400_gart_enable,
233 .gart_disable = &rs400_gart_disable,
234 .gart_tlb_flush = &rs400_gart_tlb_flush,
235 .gart_set_page = &rs400_gart_set_page,
236 .cp_init = &r100_cp_init,
237 .cp_fini = &r100_cp_fini,
238 .cp_disable = &r100_cp_disable,
239 .ring_start = &r300_ring_start,
240 .irq_set = &r100_irq_set,
241 .irq_process = &r100_irq_process,
242 .fence_ring_emit = &r300_fence_ring_emit,
243 .cs_parse = &r300_cs_parse,
244 .copy_blit = &r100_copy_blit,
245 .copy_dma = &r300_copy_dma,
246 .copy = &r100_copy_blit,
247 .set_engine_clock = &radeon_legacy_set_engine_clock,
248 .set_memory_clock = NULL,
249 .set_pcie_lanes = NULL,
250 .set_clock_gating = &radeon_legacy_set_clock_gating,
251 .set_surface_reg = r100_set_surface_reg,
252 .clear_surface_reg = r100_clear_surface_reg,
253 };
254
255
256 /*
257 * rs600.
258 */
259 void rs600_errata(struct radeon_device *rdev);
260 void rs600_vram_info(struct radeon_device *rdev);
261 int rs600_mc_init(struct radeon_device *rdev);
262 void rs600_mc_fini(struct radeon_device *rdev);
263 int rs600_irq_set(struct radeon_device *rdev);
264 int rs600_gart_enable(struct radeon_device *rdev);
265 void rs600_gart_disable(struct radeon_device *rdev);
266 void rs600_gart_tlb_flush(struct radeon_device *rdev);
267 int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
268 uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
269 void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
270 static struct radeon_asic rs600_asic = {
271 .init = &r300_init,
272 .errata = &rs600_errata,
273 .vram_info = &rs600_vram_info,
274 .gpu_reset = &r300_gpu_reset,
275 .mc_init = &rs600_mc_init,
276 .mc_fini = &rs600_mc_fini,
277 .wb_init = &r100_wb_init,
278 .wb_fini = &r100_wb_fini,
279 .gart_enable = &rs600_gart_enable,
280 .gart_disable = &rs600_gart_disable,
281 .gart_tlb_flush = &rs600_gart_tlb_flush,
282 .gart_set_page = &rs600_gart_set_page,
283 .cp_init = &r100_cp_init,
284 .cp_fini = &r100_cp_fini,
285 .cp_disable = &r100_cp_disable,
286 .ring_start = &r300_ring_start,
287 .irq_set = &rs600_irq_set,
288 .irq_process = &r100_irq_process,
289 .fence_ring_emit = &r300_fence_ring_emit,
290 .cs_parse = &r300_cs_parse,
291 .copy_blit = &r100_copy_blit,
292 .copy_dma = &r300_copy_dma,
293 .copy = &r100_copy_blit,
294 .set_engine_clock = &radeon_atom_set_engine_clock,
295 .set_memory_clock = &radeon_atom_set_memory_clock,
296 .set_pcie_lanes = NULL,
297 .set_clock_gating = &radeon_atom_set_clock_gating,
298 };
299
300
301 /*
302 * rs690,rs740
303 */
304 void rs690_errata(struct radeon_device *rdev);
305 void rs690_vram_info(struct radeon_device *rdev);
306 int rs690_mc_init(struct radeon_device *rdev);
307 void rs690_mc_fini(struct radeon_device *rdev);
308 uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
309 void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
310 static struct radeon_asic rs690_asic = {
311 .init = &r300_init,
312 .errata = &rs690_errata,
313 .vram_info = &rs690_vram_info,
314 .gpu_reset = &r300_gpu_reset,
315 .mc_init = &rs690_mc_init,
316 .mc_fini = &rs690_mc_fini,
317 .wb_init = &r100_wb_init,
318 .wb_fini = &r100_wb_fini,
319 .gart_enable = &rs400_gart_enable,
320 .gart_disable = &rs400_gart_disable,
321 .gart_tlb_flush = &rs400_gart_tlb_flush,
322 .gart_set_page = &rs400_gart_set_page,
323 .cp_init = &r100_cp_init,
324 .cp_fini = &r100_cp_fini,
325 .cp_disable = &r100_cp_disable,
326 .ring_start = &r300_ring_start,
327 .irq_set = &rs600_irq_set,
328 .irq_process = &r100_irq_process,
329 .fence_ring_emit = &r300_fence_ring_emit,
330 .cs_parse = &r300_cs_parse,
331 .copy_blit = &r100_copy_blit,
332 .copy_dma = &r300_copy_dma,
333 .copy = &r300_copy_dma,
334 .set_engine_clock = &radeon_atom_set_engine_clock,
335 .set_memory_clock = &radeon_atom_set_memory_clock,
336 .set_pcie_lanes = NULL,
337 .set_clock_gating = &radeon_atom_set_clock_gating,
338 .set_surface_reg = r100_set_surface_reg,
339 .clear_surface_reg = r100_clear_surface_reg,
340 };
341
342
343 /*
344 * rv515
345 */
346 int rv515_init(struct radeon_device *rdev);
347 void rv515_errata(struct radeon_device *rdev);
348 void rv515_vram_info(struct radeon_device *rdev);
349 int rv515_gpu_reset(struct radeon_device *rdev);
350 int rv515_mc_init(struct radeon_device *rdev);
351 void rv515_mc_fini(struct radeon_device *rdev);
352 uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
353 void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
354 void rv515_ring_start(struct radeon_device *rdev);
355 uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
356 void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
357 static struct radeon_asic rv515_asic = {
358 .init = &rv515_init,
359 .errata = &rv515_errata,
360 .vram_info = &rv515_vram_info,
361 .gpu_reset = &rv515_gpu_reset,
362 .mc_init = &rv515_mc_init,
363 .mc_fini = &rv515_mc_fini,
364 .wb_init = &r100_wb_init,
365 .wb_fini = &r100_wb_fini,
366 .gart_enable = &r300_gart_enable,
367 .gart_disable = &rv370_pcie_gart_disable,
368 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
369 .gart_set_page = &rv370_pcie_gart_set_page,
370 .cp_init = &r100_cp_init,
371 .cp_fini = &r100_cp_fini,
372 .cp_disable = &r100_cp_disable,
373 .ring_start = &rv515_ring_start,
374 .irq_set = &r100_irq_set,
375 .irq_process = &r100_irq_process,
376 .fence_ring_emit = &r300_fence_ring_emit,
377 .cs_parse = &r300_cs_parse,
378 .copy_blit = &r100_copy_blit,
379 .copy_dma = &r300_copy_dma,
380 .copy = &r100_copy_blit,
381 .set_engine_clock = &radeon_atom_set_engine_clock,
382 .set_memory_clock = &radeon_atom_set_memory_clock,
383 .set_pcie_lanes = &rv370_set_pcie_lanes,
384 .set_clock_gating = &radeon_atom_set_clock_gating,
385 .set_surface_reg = r100_set_surface_reg,
386 .clear_surface_reg = r100_clear_surface_reg,
387 };
388
389
390 /*
391 * r520,rv530,rv560,rv570,r580
392 */
393 void r520_errata(struct radeon_device *rdev);
394 void r520_vram_info(struct radeon_device *rdev);
395 int r520_mc_init(struct radeon_device *rdev);
396 void r520_mc_fini(struct radeon_device *rdev);
397 static struct radeon_asic r520_asic = {
398 .init = &rv515_init,
399 .errata = &r520_errata,
400 .vram_info = &r520_vram_info,
401 .gpu_reset = &rv515_gpu_reset,
402 .mc_init = &r520_mc_init,
403 .mc_fini = &r520_mc_fini,
404 .wb_init = &r100_wb_init,
405 .wb_fini = &r100_wb_fini,
406 .gart_enable = &r300_gart_enable,
407 .gart_disable = &rv370_pcie_gart_disable,
408 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
409 .gart_set_page = &rv370_pcie_gart_set_page,
410 .cp_init = &r100_cp_init,
411 .cp_fini = &r100_cp_fini,
412 .cp_disable = &r100_cp_disable,
413 .ring_start = &rv515_ring_start,
414 .irq_set = &r100_irq_set,
415 .irq_process = &r100_irq_process,
416 .fence_ring_emit = &r300_fence_ring_emit,
417 .cs_parse = &r300_cs_parse,
418 .copy_blit = &r100_copy_blit,
419 .copy_dma = &r300_copy_dma,
420 .copy = &r100_copy_blit,
421 .set_engine_clock = &radeon_atom_set_engine_clock,
422 .set_memory_clock = &radeon_atom_set_memory_clock,
423 .set_pcie_lanes = &rv370_set_pcie_lanes,
424 .set_clock_gating = &radeon_atom_set_clock_gating,
425 .set_surface_reg = r100_set_surface_reg,
426 .clear_surface_reg = r100_clear_surface_reg,
427 };
428
429 /*
430 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rv770,rv730,rv710
431 */
432 uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
433 void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
434
435 #endif
This page took 0.04049 seconds and 5 git commands to generate.