drm/radeon/kms: add LTE/GTE discard + rv515 two sided stencil register.
[deliverable/linux.git] / drivers / gpu / drm / radeon / radeon_asic.h
1 /*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
27 */
28 #ifndef __RADEON_ASIC_H__
29 #define __RADEON_ASIC_H__
30
31 /*
32 * common functions
33 */
34 void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
35 void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
36
37 void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
38 void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
39 void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
40
41 /*
42 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
43 */
44 int r100_init(struct radeon_device *rdev);
45 uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
46 void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
47 void r100_errata(struct radeon_device *rdev);
48 void r100_vram_info(struct radeon_device *rdev);
49 int r100_gpu_reset(struct radeon_device *rdev);
50 int r100_mc_init(struct radeon_device *rdev);
51 void r100_mc_fini(struct radeon_device *rdev);
52 u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
53 int r100_wb_init(struct radeon_device *rdev);
54 void r100_wb_fini(struct radeon_device *rdev);
55 int r100_gart_enable(struct radeon_device *rdev);
56 void r100_pci_gart_disable(struct radeon_device *rdev);
57 void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
58 int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
59 int r100_cp_init(struct radeon_device *rdev, unsigned ring_size);
60 void r100_cp_fini(struct radeon_device *rdev);
61 void r100_cp_disable(struct radeon_device *rdev);
62 void r100_ring_start(struct radeon_device *rdev);
63 int r100_irq_set(struct radeon_device *rdev);
64 int r100_irq_process(struct radeon_device *rdev);
65 void r100_fence_ring_emit(struct radeon_device *rdev,
66 struct radeon_fence *fence);
67 int r100_cs_parse(struct radeon_cs_parser *p);
68 void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
69 uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
70 int r100_copy_blit(struct radeon_device *rdev,
71 uint64_t src_offset,
72 uint64_t dst_offset,
73 unsigned num_pages,
74 struct radeon_fence *fence);
75 int r100_set_surface_reg(struct radeon_device *rdev, int reg,
76 uint32_t tiling_flags, uint32_t pitch,
77 uint32_t offset, uint32_t obj_size);
78 int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
79 void r100_bandwidth_update(struct radeon_device *rdev);
80
81 static struct radeon_asic r100_asic = {
82 .init = &r100_init,
83 .errata = &r100_errata,
84 .vram_info = &r100_vram_info,
85 .gpu_reset = &r100_gpu_reset,
86 .mc_init = &r100_mc_init,
87 .mc_fini = &r100_mc_fini,
88 .wb_init = &r100_wb_init,
89 .wb_fini = &r100_wb_fini,
90 .gart_enable = &r100_gart_enable,
91 .gart_disable = &r100_pci_gart_disable,
92 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
93 .gart_set_page = &r100_pci_gart_set_page,
94 .cp_init = &r100_cp_init,
95 .cp_fini = &r100_cp_fini,
96 .cp_disable = &r100_cp_disable,
97 .ring_start = &r100_ring_start,
98 .irq_set = &r100_irq_set,
99 .irq_process = &r100_irq_process,
100 .get_vblank_counter = &r100_get_vblank_counter,
101 .fence_ring_emit = &r100_fence_ring_emit,
102 .cs_parse = &r100_cs_parse,
103 .copy_blit = &r100_copy_blit,
104 .copy_dma = NULL,
105 .copy = &r100_copy_blit,
106 .set_engine_clock = &radeon_legacy_set_engine_clock,
107 .set_memory_clock = NULL,
108 .set_pcie_lanes = NULL,
109 .set_clock_gating = &radeon_legacy_set_clock_gating,
110 .set_surface_reg = r100_set_surface_reg,
111 .clear_surface_reg = r100_clear_surface_reg,
112 .bandwidth_update = &r100_bandwidth_update,
113 };
114
115
116 /*
117 * r300,r350,rv350,rv380
118 */
119 int r300_init(struct radeon_device *rdev);
120 void r300_errata(struct radeon_device *rdev);
121 void r300_vram_info(struct radeon_device *rdev);
122 int r300_gpu_reset(struct radeon_device *rdev);
123 int r300_mc_init(struct radeon_device *rdev);
124 void r300_mc_fini(struct radeon_device *rdev);
125 void r300_ring_start(struct radeon_device *rdev);
126 void r300_fence_ring_emit(struct radeon_device *rdev,
127 struct radeon_fence *fence);
128 int r300_cs_parse(struct radeon_cs_parser *p);
129 int r300_gart_enable(struct radeon_device *rdev);
130 void rv370_pcie_gart_disable(struct radeon_device *rdev);
131 void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
132 int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
133 uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
134 void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
135 void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
136 int r300_copy_dma(struct radeon_device *rdev,
137 uint64_t src_offset,
138 uint64_t dst_offset,
139 unsigned num_pages,
140 struct radeon_fence *fence);
141
142 static struct radeon_asic r300_asic = {
143 .init = &r300_init,
144 .errata = &r300_errata,
145 .vram_info = &r300_vram_info,
146 .gpu_reset = &r300_gpu_reset,
147 .mc_init = &r300_mc_init,
148 .mc_fini = &r300_mc_fini,
149 .wb_init = &r100_wb_init,
150 .wb_fini = &r100_wb_fini,
151 .gart_enable = &r300_gart_enable,
152 .gart_disable = &r100_pci_gart_disable,
153 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
154 .gart_set_page = &r100_pci_gart_set_page,
155 .cp_init = &r100_cp_init,
156 .cp_fini = &r100_cp_fini,
157 .cp_disable = &r100_cp_disable,
158 .ring_start = &r300_ring_start,
159 .irq_set = &r100_irq_set,
160 .irq_process = &r100_irq_process,
161 .get_vblank_counter = &r100_get_vblank_counter,
162 .fence_ring_emit = &r300_fence_ring_emit,
163 .cs_parse = &r300_cs_parse,
164 .copy_blit = &r100_copy_blit,
165 .copy_dma = &r300_copy_dma,
166 .copy = &r100_copy_blit,
167 .set_engine_clock = &radeon_legacy_set_engine_clock,
168 .set_memory_clock = NULL,
169 .set_pcie_lanes = &rv370_set_pcie_lanes,
170 .set_clock_gating = &radeon_legacy_set_clock_gating,
171 .set_surface_reg = r100_set_surface_reg,
172 .clear_surface_reg = r100_clear_surface_reg,
173 .bandwidth_update = &r100_bandwidth_update,
174 };
175
176 /*
177 * r420,r423,rv410
178 */
179 void r420_errata(struct radeon_device *rdev);
180 void r420_vram_info(struct radeon_device *rdev);
181 int r420_mc_init(struct radeon_device *rdev);
182 void r420_mc_fini(struct radeon_device *rdev);
183 static struct radeon_asic r420_asic = {
184 .init = &r300_init,
185 .errata = &r420_errata,
186 .vram_info = &r420_vram_info,
187 .gpu_reset = &r300_gpu_reset,
188 .mc_init = &r420_mc_init,
189 .mc_fini = &r420_mc_fini,
190 .wb_init = &r100_wb_init,
191 .wb_fini = &r100_wb_fini,
192 .gart_enable = &r300_gart_enable,
193 .gart_disable = &rv370_pcie_gart_disable,
194 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
195 .gart_set_page = &rv370_pcie_gart_set_page,
196 .cp_init = &r100_cp_init,
197 .cp_fini = &r100_cp_fini,
198 .cp_disable = &r100_cp_disable,
199 .ring_start = &r300_ring_start,
200 .irq_set = &r100_irq_set,
201 .irq_process = &r100_irq_process,
202 .get_vblank_counter = &r100_get_vblank_counter,
203 .fence_ring_emit = &r300_fence_ring_emit,
204 .cs_parse = &r300_cs_parse,
205 .copy_blit = &r100_copy_blit,
206 .copy_dma = &r300_copy_dma,
207 .copy = &r100_copy_blit,
208 .set_engine_clock = &radeon_atom_set_engine_clock,
209 .set_memory_clock = &radeon_atom_set_memory_clock,
210 .set_pcie_lanes = &rv370_set_pcie_lanes,
211 .set_clock_gating = &radeon_atom_set_clock_gating,
212 .set_surface_reg = r100_set_surface_reg,
213 .clear_surface_reg = r100_clear_surface_reg,
214 .bandwidth_update = &r100_bandwidth_update,
215 };
216
217
218 /*
219 * rs400,rs480
220 */
221 void rs400_errata(struct radeon_device *rdev);
222 void rs400_vram_info(struct radeon_device *rdev);
223 int rs400_mc_init(struct radeon_device *rdev);
224 void rs400_mc_fini(struct radeon_device *rdev);
225 int rs400_gart_enable(struct radeon_device *rdev);
226 void rs400_gart_disable(struct radeon_device *rdev);
227 void rs400_gart_tlb_flush(struct radeon_device *rdev);
228 int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
229 uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
230 void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
231 static struct radeon_asic rs400_asic = {
232 .init = &r300_init,
233 .errata = &rs400_errata,
234 .vram_info = &rs400_vram_info,
235 .gpu_reset = &r300_gpu_reset,
236 .mc_init = &rs400_mc_init,
237 .mc_fini = &rs400_mc_fini,
238 .wb_init = &r100_wb_init,
239 .wb_fini = &r100_wb_fini,
240 .gart_enable = &rs400_gart_enable,
241 .gart_disable = &rs400_gart_disable,
242 .gart_tlb_flush = &rs400_gart_tlb_flush,
243 .gart_set_page = &rs400_gart_set_page,
244 .cp_init = &r100_cp_init,
245 .cp_fini = &r100_cp_fini,
246 .cp_disable = &r100_cp_disable,
247 .ring_start = &r300_ring_start,
248 .irq_set = &r100_irq_set,
249 .irq_process = &r100_irq_process,
250 .get_vblank_counter = &r100_get_vblank_counter,
251 .fence_ring_emit = &r300_fence_ring_emit,
252 .cs_parse = &r300_cs_parse,
253 .copy_blit = &r100_copy_blit,
254 .copy_dma = &r300_copy_dma,
255 .copy = &r100_copy_blit,
256 .set_engine_clock = &radeon_legacy_set_engine_clock,
257 .set_memory_clock = NULL,
258 .set_pcie_lanes = NULL,
259 .set_clock_gating = &radeon_legacy_set_clock_gating,
260 .set_surface_reg = r100_set_surface_reg,
261 .clear_surface_reg = r100_clear_surface_reg,
262 .bandwidth_update = &r100_bandwidth_update,
263 };
264
265
266 /*
267 * rs600.
268 */
269 int rs600_init(struct radeon_device *dev);
270 void rs600_errata(struct radeon_device *rdev);
271 void rs600_vram_info(struct radeon_device *rdev);
272 int rs600_mc_init(struct radeon_device *rdev);
273 void rs600_mc_fini(struct radeon_device *rdev);
274 int rs600_irq_set(struct radeon_device *rdev);
275 int rs600_irq_process(struct radeon_device *rdev);
276 u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
277 int rs600_gart_enable(struct radeon_device *rdev);
278 void rs600_gart_disable(struct radeon_device *rdev);
279 void rs600_gart_tlb_flush(struct radeon_device *rdev);
280 int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
281 uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
282 void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
283 void rs600_bandwidth_update(struct radeon_device *rdev);
284 static struct radeon_asic rs600_asic = {
285 .init = &rs600_init,
286 .errata = &rs600_errata,
287 .vram_info = &rs600_vram_info,
288 .gpu_reset = &r300_gpu_reset,
289 .mc_init = &rs600_mc_init,
290 .mc_fini = &rs600_mc_fini,
291 .wb_init = &r100_wb_init,
292 .wb_fini = &r100_wb_fini,
293 .gart_enable = &rs600_gart_enable,
294 .gart_disable = &rs600_gart_disable,
295 .gart_tlb_flush = &rs600_gart_tlb_flush,
296 .gart_set_page = &rs600_gart_set_page,
297 .cp_init = &r100_cp_init,
298 .cp_fini = &r100_cp_fini,
299 .cp_disable = &r100_cp_disable,
300 .ring_start = &r300_ring_start,
301 .irq_set = &rs600_irq_set,
302 .irq_process = &rs600_irq_process,
303 .get_vblank_counter = &rs600_get_vblank_counter,
304 .fence_ring_emit = &r300_fence_ring_emit,
305 .cs_parse = &r300_cs_parse,
306 .copy_blit = &r100_copy_blit,
307 .copy_dma = &r300_copy_dma,
308 .copy = &r100_copy_blit,
309 .set_engine_clock = &radeon_atom_set_engine_clock,
310 .set_memory_clock = &radeon_atom_set_memory_clock,
311 .set_pcie_lanes = NULL,
312 .set_clock_gating = &radeon_atom_set_clock_gating,
313 .bandwidth_update = &rs600_bandwidth_update,
314 };
315
316
317 /*
318 * rs690,rs740
319 */
320 void rs690_errata(struct radeon_device *rdev);
321 void rs690_vram_info(struct radeon_device *rdev);
322 int rs690_mc_init(struct radeon_device *rdev);
323 void rs690_mc_fini(struct radeon_device *rdev);
324 uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
325 void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
326 void rs690_bandwidth_update(struct radeon_device *rdev);
327 static struct radeon_asic rs690_asic = {
328 .init = &rs600_init,
329 .errata = &rs690_errata,
330 .vram_info = &rs690_vram_info,
331 .gpu_reset = &r300_gpu_reset,
332 .mc_init = &rs690_mc_init,
333 .mc_fini = &rs690_mc_fini,
334 .wb_init = &r100_wb_init,
335 .wb_fini = &r100_wb_fini,
336 .gart_enable = &rs400_gart_enable,
337 .gart_disable = &rs400_gart_disable,
338 .gart_tlb_flush = &rs400_gart_tlb_flush,
339 .gart_set_page = &rs400_gart_set_page,
340 .cp_init = &r100_cp_init,
341 .cp_fini = &r100_cp_fini,
342 .cp_disable = &r100_cp_disable,
343 .ring_start = &r300_ring_start,
344 .irq_set = &rs600_irq_set,
345 .irq_process = &rs600_irq_process,
346 .get_vblank_counter = &rs600_get_vblank_counter,
347 .fence_ring_emit = &r300_fence_ring_emit,
348 .cs_parse = &r300_cs_parse,
349 .copy_blit = &r100_copy_blit,
350 .copy_dma = &r300_copy_dma,
351 .copy = &r300_copy_dma,
352 .set_engine_clock = &radeon_atom_set_engine_clock,
353 .set_memory_clock = &radeon_atom_set_memory_clock,
354 .set_pcie_lanes = NULL,
355 .set_clock_gating = &radeon_atom_set_clock_gating,
356 .set_surface_reg = r100_set_surface_reg,
357 .clear_surface_reg = r100_clear_surface_reg,
358 .bandwidth_update = &rs690_bandwidth_update,
359 };
360
361
362 /*
363 * rv515
364 */
365 int rv515_init(struct radeon_device *rdev);
366 void rv515_errata(struct radeon_device *rdev);
367 void rv515_vram_info(struct radeon_device *rdev);
368 int rv515_gpu_reset(struct radeon_device *rdev);
369 int rv515_mc_init(struct radeon_device *rdev);
370 void rv515_mc_fini(struct radeon_device *rdev);
371 uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
372 void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
373 void rv515_ring_start(struct radeon_device *rdev);
374 uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
375 void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
376 void rv515_bandwidth_update(struct radeon_device *rdev);
377 static struct radeon_asic rv515_asic = {
378 .init = &rv515_init,
379 .errata = &rv515_errata,
380 .vram_info = &rv515_vram_info,
381 .gpu_reset = &rv515_gpu_reset,
382 .mc_init = &rv515_mc_init,
383 .mc_fini = &rv515_mc_fini,
384 .wb_init = &r100_wb_init,
385 .wb_fini = &r100_wb_fini,
386 .gart_enable = &r300_gart_enable,
387 .gart_disable = &rv370_pcie_gart_disable,
388 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
389 .gart_set_page = &rv370_pcie_gart_set_page,
390 .cp_init = &r100_cp_init,
391 .cp_fini = &r100_cp_fini,
392 .cp_disable = &r100_cp_disable,
393 .ring_start = &rv515_ring_start,
394 .irq_set = &rs600_irq_set,
395 .irq_process = &rs600_irq_process,
396 .get_vblank_counter = &rs600_get_vblank_counter,
397 .fence_ring_emit = &r300_fence_ring_emit,
398 .cs_parse = &r300_cs_parse,
399 .copy_blit = &r100_copy_blit,
400 .copy_dma = &r300_copy_dma,
401 .copy = &r100_copy_blit,
402 .set_engine_clock = &radeon_atom_set_engine_clock,
403 .set_memory_clock = &radeon_atom_set_memory_clock,
404 .set_pcie_lanes = &rv370_set_pcie_lanes,
405 .set_clock_gating = &radeon_atom_set_clock_gating,
406 .set_surface_reg = r100_set_surface_reg,
407 .clear_surface_reg = r100_clear_surface_reg,
408 .bandwidth_update = &rv515_bandwidth_update,
409 };
410
411
412 /*
413 * r520,rv530,rv560,rv570,r580
414 */
415 void r520_errata(struct radeon_device *rdev);
416 void r520_vram_info(struct radeon_device *rdev);
417 int r520_mc_init(struct radeon_device *rdev);
418 void r520_mc_fini(struct radeon_device *rdev);
419 void r520_bandwidth_update(struct radeon_device *rdev);
420 static struct radeon_asic r520_asic = {
421 .init = &rv515_init,
422 .errata = &r520_errata,
423 .vram_info = &r520_vram_info,
424 .gpu_reset = &rv515_gpu_reset,
425 .mc_init = &r520_mc_init,
426 .mc_fini = &r520_mc_fini,
427 .wb_init = &r100_wb_init,
428 .wb_fini = &r100_wb_fini,
429 .gart_enable = &r300_gart_enable,
430 .gart_disable = &rv370_pcie_gart_disable,
431 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
432 .gart_set_page = &rv370_pcie_gart_set_page,
433 .cp_init = &r100_cp_init,
434 .cp_fini = &r100_cp_fini,
435 .cp_disable = &r100_cp_disable,
436 .ring_start = &rv515_ring_start,
437 .irq_set = &rs600_irq_set,
438 .irq_process = &rs600_irq_process,
439 .get_vblank_counter = &rs600_get_vblank_counter,
440 .fence_ring_emit = &r300_fence_ring_emit,
441 .cs_parse = &r300_cs_parse,
442 .copy_blit = &r100_copy_blit,
443 .copy_dma = &r300_copy_dma,
444 .copy = &r100_copy_blit,
445 .set_engine_clock = &radeon_atom_set_engine_clock,
446 .set_memory_clock = &radeon_atom_set_memory_clock,
447 .set_pcie_lanes = &rv370_set_pcie_lanes,
448 .set_clock_gating = &radeon_atom_set_clock_gating,
449 .set_surface_reg = r100_set_surface_reg,
450 .clear_surface_reg = r100_clear_surface_reg,
451 .bandwidth_update = &r520_bandwidth_update,
452 };
453
454 /*
455 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rv770,rv730,rv710
456 */
457 uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
458 void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
459
460 #endif
This page took 0.042634 seconds and 6 git commands to generate.