drm/radeon/kms: fix bandwidth computation on avivo hardware
[deliverable/linux.git] / drivers / gpu / drm / radeon / radeon_asic.h
CommitLineData
771fe6b9
JG
1/*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
27 */
28#ifndef __RADEON_ASIC_H__
29#define __RADEON_ASIC_H__
30
31/*
32 * common functions
33 */
34void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
35void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
36
37void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
38void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
39void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
40
41/*
42 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
43 */
068a117c 44int r100_init(struct radeon_device *rdev);
771fe6b9
JG
45uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
46void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
47void r100_errata(struct radeon_device *rdev);
48void r100_vram_info(struct radeon_device *rdev);
49int r100_gpu_reset(struct radeon_device *rdev);
50int r100_mc_init(struct radeon_device *rdev);
51void r100_mc_fini(struct radeon_device *rdev);
52int r100_wb_init(struct radeon_device *rdev);
53void r100_wb_fini(struct radeon_device *rdev);
54int r100_gart_enable(struct radeon_device *rdev);
55void r100_pci_gart_disable(struct radeon_device *rdev);
56void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
57int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
58int r100_cp_init(struct radeon_device *rdev, unsigned ring_size);
59void r100_cp_fini(struct radeon_device *rdev);
60void r100_cp_disable(struct radeon_device *rdev);
61void r100_ring_start(struct radeon_device *rdev);
62int r100_irq_set(struct radeon_device *rdev);
63int r100_irq_process(struct radeon_device *rdev);
64void r100_fence_ring_emit(struct radeon_device *rdev,
65 struct radeon_fence *fence);
66int r100_cs_parse(struct radeon_cs_parser *p);
67void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
68uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
69int r100_copy_blit(struct radeon_device *rdev,
70 uint64_t src_offset,
71 uint64_t dst_offset,
72 unsigned num_pages,
73 struct radeon_fence *fence);
e024e110
DA
74int r100_set_surface_reg(struct radeon_device *rdev, int reg,
75 uint32_t tiling_flags, uint32_t pitch,
76 uint32_t offset, uint32_t obj_size);
77int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
c93bb85b 78void r100_bandwidth_update(struct radeon_device *rdev);
771fe6b9
JG
79
80static struct radeon_asic r100_asic = {
068a117c 81 .init = &r100_init,
771fe6b9
JG
82 .errata = &r100_errata,
83 .vram_info = &r100_vram_info,
84 .gpu_reset = &r100_gpu_reset,
85 .mc_init = &r100_mc_init,
86 .mc_fini = &r100_mc_fini,
87 .wb_init = &r100_wb_init,
88 .wb_fini = &r100_wb_fini,
89 .gart_enable = &r100_gart_enable,
90 .gart_disable = &r100_pci_gart_disable,
91 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
92 .gart_set_page = &r100_pci_gart_set_page,
93 .cp_init = &r100_cp_init,
94 .cp_fini = &r100_cp_fini,
95 .cp_disable = &r100_cp_disable,
96 .ring_start = &r100_ring_start,
97 .irq_set = &r100_irq_set,
98 .irq_process = &r100_irq_process,
99 .fence_ring_emit = &r100_fence_ring_emit,
100 .cs_parse = &r100_cs_parse,
101 .copy_blit = &r100_copy_blit,
102 .copy_dma = NULL,
103 .copy = &r100_copy_blit,
104 .set_engine_clock = &radeon_legacy_set_engine_clock,
105 .set_memory_clock = NULL,
106 .set_pcie_lanes = NULL,
107 .set_clock_gating = &radeon_legacy_set_clock_gating,
e024e110
DA
108 .set_surface_reg = r100_set_surface_reg,
109 .clear_surface_reg = r100_clear_surface_reg,
c93bb85b 110 .bandwidth_update = &r100_bandwidth_update,
771fe6b9
JG
111};
112
113
114/*
115 * r300,r350,rv350,rv380
116 */
068a117c 117int r300_init(struct radeon_device *rdev);
771fe6b9
JG
118void r300_errata(struct radeon_device *rdev);
119void r300_vram_info(struct radeon_device *rdev);
120int r300_gpu_reset(struct radeon_device *rdev);
121int r300_mc_init(struct radeon_device *rdev);
122void r300_mc_fini(struct radeon_device *rdev);
123void r300_ring_start(struct radeon_device *rdev);
124void r300_fence_ring_emit(struct radeon_device *rdev,
125 struct radeon_fence *fence);
126int r300_cs_parse(struct radeon_cs_parser *p);
127int r300_gart_enable(struct radeon_device *rdev);
128void rv370_pcie_gart_disable(struct radeon_device *rdev);
129void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
130int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
131uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
132void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
133void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
134int r300_copy_dma(struct radeon_device *rdev,
135 uint64_t src_offset,
136 uint64_t dst_offset,
137 unsigned num_pages,
138 struct radeon_fence *fence);
e024e110 139
771fe6b9 140static struct radeon_asic r300_asic = {
068a117c 141 .init = &r300_init,
771fe6b9
JG
142 .errata = &r300_errata,
143 .vram_info = &r300_vram_info,
144 .gpu_reset = &r300_gpu_reset,
145 .mc_init = &r300_mc_init,
146 .mc_fini = &r300_mc_fini,
147 .wb_init = &r100_wb_init,
148 .wb_fini = &r100_wb_fini,
149 .gart_enable = &r300_gart_enable,
150 .gart_disable = &r100_pci_gart_disable,
151 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
152 .gart_set_page = &r100_pci_gart_set_page,
153 .cp_init = &r100_cp_init,
154 .cp_fini = &r100_cp_fini,
155 .cp_disable = &r100_cp_disable,
156 .ring_start = &r300_ring_start,
157 .irq_set = &r100_irq_set,
158 .irq_process = &r100_irq_process,
159 .fence_ring_emit = &r300_fence_ring_emit,
160 .cs_parse = &r300_cs_parse,
161 .copy_blit = &r100_copy_blit,
162 .copy_dma = &r300_copy_dma,
163 .copy = &r100_copy_blit,
164 .set_engine_clock = &radeon_legacy_set_engine_clock,
165 .set_memory_clock = NULL,
166 .set_pcie_lanes = &rv370_set_pcie_lanes,
167 .set_clock_gating = &radeon_legacy_set_clock_gating,
e024e110
DA
168 .set_surface_reg = r100_set_surface_reg,
169 .clear_surface_reg = r100_clear_surface_reg,
c93bb85b 170 .bandwidth_update = &r100_bandwidth_update,
771fe6b9
JG
171};
172
173/*
174 * r420,r423,rv410
175 */
176void r420_errata(struct radeon_device *rdev);
177void r420_vram_info(struct radeon_device *rdev);
178int r420_mc_init(struct radeon_device *rdev);
179void r420_mc_fini(struct radeon_device *rdev);
180static struct radeon_asic r420_asic = {
068a117c 181 .init = &r300_init,
771fe6b9
JG
182 .errata = &r420_errata,
183 .vram_info = &r420_vram_info,
184 .gpu_reset = &r300_gpu_reset,
185 .mc_init = &r420_mc_init,
186 .mc_fini = &r420_mc_fini,
187 .wb_init = &r100_wb_init,
188 .wb_fini = &r100_wb_fini,
189 .gart_enable = &r300_gart_enable,
190 .gart_disable = &rv370_pcie_gart_disable,
191 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
192 .gart_set_page = &rv370_pcie_gart_set_page,
193 .cp_init = &r100_cp_init,
194 .cp_fini = &r100_cp_fini,
195 .cp_disable = &r100_cp_disable,
196 .ring_start = &r300_ring_start,
197 .irq_set = &r100_irq_set,
198 .irq_process = &r100_irq_process,
199 .fence_ring_emit = &r300_fence_ring_emit,
200 .cs_parse = &r300_cs_parse,
201 .copy_blit = &r100_copy_blit,
202 .copy_dma = &r300_copy_dma,
203 .copy = &r100_copy_blit,
204 .set_engine_clock = &radeon_atom_set_engine_clock,
205 .set_memory_clock = &radeon_atom_set_memory_clock,
206 .set_pcie_lanes = &rv370_set_pcie_lanes,
207 .set_clock_gating = &radeon_atom_set_clock_gating,
e024e110
DA
208 .set_surface_reg = r100_set_surface_reg,
209 .clear_surface_reg = r100_clear_surface_reg,
c93bb85b 210 .bandwidth_update = &r100_bandwidth_update,
771fe6b9
JG
211};
212
213
214/*
215 * rs400,rs480
216 */
217void rs400_errata(struct radeon_device *rdev);
218void rs400_vram_info(struct radeon_device *rdev);
219int rs400_mc_init(struct radeon_device *rdev);
220void rs400_mc_fini(struct radeon_device *rdev);
221int rs400_gart_enable(struct radeon_device *rdev);
222void rs400_gart_disable(struct radeon_device *rdev);
223void rs400_gart_tlb_flush(struct radeon_device *rdev);
224int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
225uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
226void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
227static struct radeon_asic rs400_asic = {
068a117c 228 .init = &r300_init,
771fe6b9
JG
229 .errata = &rs400_errata,
230 .vram_info = &rs400_vram_info,
231 .gpu_reset = &r300_gpu_reset,
232 .mc_init = &rs400_mc_init,
233 .mc_fini = &rs400_mc_fini,
234 .wb_init = &r100_wb_init,
235 .wb_fini = &r100_wb_fini,
236 .gart_enable = &rs400_gart_enable,
237 .gart_disable = &rs400_gart_disable,
238 .gart_tlb_flush = &rs400_gart_tlb_flush,
239 .gart_set_page = &rs400_gart_set_page,
240 .cp_init = &r100_cp_init,
241 .cp_fini = &r100_cp_fini,
242 .cp_disable = &r100_cp_disable,
243 .ring_start = &r300_ring_start,
244 .irq_set = &r100_irq_set,
245 .irq_process = &r100_irq_process,
246 .fence_ring_emit = &r300_fence_ring_emit,
247 .cs_parse = &r300_cs_parse,
248 .copy_blit = &r100_copy_blit,
249 .copy_dma = &r300_copy_dma,
250 .copy = &r100_copy_blit,
251 .set_engine_clock = &radeon_legacy_set_engine_clock,
252 .set_memory_clock = NULL,
253 .set_pcie_lanes = NULL,
254 .set_clock_gating = &radeon_legacy_set_clock_gating,
e024e110
DA
255 .set_surface_reg = r100_set_surface_reg,
256 .clear_surface_reg = r100_clear_surface_reg,
c93bb85b 257 .bandwidth_update = &r100_bandwidth_update,
771fe6b9
JG
258};
259
260
261/*
262 * rs600.
263 */
264void rs600_errata(struct radeon_device *rdev);
265void rs600_vram_info(struct radeon_device *rdev);
266int rs600_mc_init(struct radeon_device *rdev);
267void rs600_mc_fini(struct radeon_device *rdev);
268int rs600_irq_set(struct radeon_device *rdev);
269int rs600_gart_enable(struct radeon_device *rdev);
270void rs600_gart_disable(struct radeon_device *rdev);
271void rs600_gart_tlb_flush(struct radeon_device *rdev);
272int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
273uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
274void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
c93bb85b 275void rs600_bandwidth_update(struct radeon_device *rdev);
771fe6b9 276static struct radeon_asic rs600_asic = {
068a117c 277 .init = &r300_init,
771fe6b9
JG
278 .errata = &rs600_errata,
279 .vram_info = &rs600_vram_info,
280 .gpu_reset = &r300_gpu_reset,
281 .mc_init = &rs600_mc_init,
282 .mc_fini = &rs600_mc_fini,
283 .wb_init = &r100_wb_init,
284 .wb_fini = &r100_wb_fini,
285 .gart_enable = &rs600_gart_enable,
286 .gart_disable = &rs600_gart_disable,
287 .gart_tlb_flush = &rs600_gart_tlb_flush,
288 .gart_set_page = &rs600_gart_set_page,
289 .cp_init = &r100_cp_init,
290 .cp_fini = &r100_cp_fini,
291 .cp_disable = &r100_cp_disable,
292 .ring_start = &r300_ring_start,
293 .irq_set = &rs600_irq_set,
294 .irq_process = &r100_irq_process,
295 .fence_ring_emit = &r300_fence_ring_emit,
296 .cs_parse = &r300_cs_parse,
297 .copy_blit = &r100_copy_blit,
298 .copy_dma = &r300_copy_dma,
299 .copy = &r100_copy_blit,
300 .set_engine_clock = &radeon_atom_set_engine_clock,
301 .set_memory_clock = &radeon_atom_set_memory_clock,
302 .set_pcie_lanes = NULL,
303 .set_clock_gating = &radeon_atom_set_clock_gating,
c93bb85b 304 .bandwidth_update = &rs600_bandwidth_update,
771fe6b9
JG
305};
306
307
308/*
309 * rs690,rs740
310 */
311void rs690_errata(struct radeon_device *rdev);
312void rs690_vram_info(struct radeon_device *rdev);
313int rs690_mc_init(struct radeon_device *rdev);
314void rs690_mc_fini(struct radeon_device *rdev);
315uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
316void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
c93bb85b 317void rs690_bandwidth_update(struct radeon_device *rdev);
771fe6b9 318static struct radeon_asic rs690_asic = {
068a117c 319 .init = &r300_init,
771fe6b9
JG
320 .errata = &rs690_errata,
321 .vram_info = &rs690_vram_info,
322 .gpu_reset = &r300_gpu_reset,
323 .mc_init = &rs690_mc_init,
324 .mc_fini = &rs690_mc_fini,
325 .wb_init = &r100_wb_init,
326 .wb_fini = &r100_wb_fini,
327 .gart_enable = &rs400_gart_enable,
328 .gart_disable = &rs400_gart_disable,
329 .gart_tlb_flush = &rs400_gart_tlb_flush,
330 .gart_set_page = &rs400_gart_set_page,
331 .cp_init = &r100_cp_init,
332 .cp_fini = &r100_cp_fini,
333 .cp_disable = &r100_cp_disable,
334 .ring_start = &r300_ring_start,
335 .irq_set = &rs600_irq_set,
336 .irq_process = &r100_irq_process,
337 .fence_ring_emit = &r300_fence_ring_emit,
338 .cs_parse = &r300_cs_parse,
339 .copy_blit = &r100_copy_blit,
340 .copy_dma = &r300_copy_dma,
341 .copy = &r300_copy_dma,
342 .set_engine_clock = &radeon_atom_set_engine_clock,
343 .set_memory_clock = &radeon_atom_set_memory_clock,
344 .set_pcie_lanes = NULL,
345 .set_clock_gating = &radeon_atom_set_clock_gating,
e024e110
DA
346 .set_surface_reg = r100_set_surface_reg,
347 .clear_surface_reg = r100_clear_surface_reg,
c93bb85b 348 .bandwidth_update = &rs690_bandwidth_update,
771fe6b9
JG
349};
350
351
352/*
353 * rv515
354 */
068a117c 355int rv515_init(struct radeon_device *rdev);
771fe6b9
JG
356void rv515_errata(struct radeon_device *rdev);
357void rv515_vram_info(struct radeon_device *rdev);
358int rv515_gpu_reset(struct radeon_device *rdev);
359int rv515_mc_init(struct radeon_device *rdev);
360void rv515_mc_fini(struct radeon_device *rdev);
361uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
362void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
363void rv515_ring_start(struct radeon_device *rdev);
364uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
365void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
c93bb85b 366void rv515_bandwidth_update(struct radeon_device *rdev);
771fe6b9 367static struct radeon_asic rv515_asic = {
068a117c 368 .init = &rv515_init,
771fe6b9
JG
369 .errata = &rv515_errata,
370 .vram_info = &rv515_vram_info,
371 .gpu_reset = &rv515_gpu_reset,
372 .mc_init = &rv515_mc_init,
373 .mc_fini = &rv515_mc_fini,
374 .wb_init = &r100_wb_init,
375 .wb_fini = &r100_wb_fini,
376 .gart_enable = &r300_gart_enable,
377 .gart_disable = &rv370_pcie_gart_disable,
378 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
379 .gart_set_page = &rv370_pcie_gart_set_page,
380 .cp_init = &r100_cp_init,
381 .cp_fini = &r100_cp_fini,
382 .cp_disable = &r100_cp_disable,
383 .ring_start = &rv515_ring_start,
384 .irq_set = &r100_irq_set,
385 .irq_process = &r100_irq_process,
386 .fence_ring_emit = &r300_fence_ring_emit,
068a117c 387 .cs_parse = &r300_cs_parse,
771fe6b9
JG
388 .copy_blit = &r100_copy_blit,
389 .copy_dma = &r300_copy_dma,
390 .copy = &r100_copy_blit,
391 .set_engine_clock = &radeon_atom_set_engine_clock,
392 .set_memory_clock = &radeon_atom_set_memory_clock,
393 .set_pcie_lanes = &rv370_set_pcie_lanes,
394 .set_clock_gating = &radeon_atom_set_clock_gating,
e024e110
DA
395 .set_surface_reg = r100_set_surface_reg,
396 .clear_surface_reg = r100_clear_surface_reg,
c93bb85b 397 .bandwidth_update = &rv515_bandwidth_update,
771fe6b9
JG
398};
399
400
401/*
402 * r520,rv530,rv560,rv570,r580
403 */
404void r520_errata(struct radeon_device *rdev);
405void r520_vram_info(struct radeon_device *rdev);
406int r520_mc_init(struct radeon_device *rdev);
407void r520_mc_fini(struct radeon_device *rdev);
c93bb85b 408void r520_bandwidth_update(struct radeon_device *rdev);
771fe6b9 409static struct radeon_asic r520_asic = {
068a117c 410 .init = &rv515_init,
771fe6b9
JG
411 .errata = &r520_errata,
412 .vram_info = &r520_vram_info,
413 .gpu_reset = &rv515_gpu_reset,
414 .mc_init = &r520_mc_init,
415 .mc_fini = &r520_mc_fini,
416 .wb_init = &r100_wb_init,
417 .wb_fini = &r100_wb_fini,
418 .gart_enable = &r300_gart_enable,
419 .gart_disable = &rv370_pcie_gart_disable,
420 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
421 .gart_set_page = &rv370_pcie_gart_set_page,
422 .cp_init = &r100_cp_init,
423 .cp_fini = &r100_cp_fini,
424 .cp_disable = &r100_cp_disable,
425 .ring_start = &rv515_ring_start,
426 .irq_set = &r100_irq_set,
427 .irq_process = &r100_irq_process,
428 .fence_ring_emit = &r300_fence_ring_emit,
068a117c 429 .cs_parse = &r300_cs_parse,
771fe6b9
JG
430 .copy_blit = &r100_copy_blit,
431 .copy_dma = &r300_copy_dma,
432 .copy = &r100_copy_blit,
433 .set_engine_clock = &radeon_atom_set_engine_clock,
434 .set_memory_clock = &radeon_atom_set_memory_clock,
435 .set_pcie_lanes = &rv370_set_pcie_lanes,
436 .set_clock_gating = &radeon_atom_set_clock_gating,
e024e110
DA
437 .set_surface_reg = r100_set_surface_reg,
438 .clear_surface_reg = r100_clear_surface_reg,
c93bb85b 439 .bandwidth_update = &r520_bandwidth_update,
771fe6b9
JG
440};
441
442/*
443 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rv770,rv730,rv710
444 */
445uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
446void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
447
448#endif
This page took 0.082433 seconds and 5 git commands to generate.