Merge branch 'fix/hda' into for-linus
[deliverable/linux.git] / drivers / gpu / drm / radeon / evergreen.c
1 /*
2 * Copyright 2010 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Alex Deucher
23 */
24 #include <linux/firmware.h>
25 #include <linux/platform_device.h>
26 #include <linux/slab.h>
27 #include "drmP.h"
28 #include "radeon.h"
29 #include "radeon_asic.h"
30 #include "radeon_drm.h"
31 #include "rv770d.h"
32 #include "atom.h"
33 #include "avivod.h"
34 #include "evergreen_reg.h"
35
36 static void evergreen_gpu_init(struct radeon_device *rdev);
37 void evergreen_fini(struct radeon_device *rdev);
38
39 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
40 {
41 bool connected = false;
42 /* XXX */
43 return connected;
44 }
45
46 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
47 enum radeon_hpd_id hpd)
48 {
49 /* XXX */
50 }
51
52 void evergreen_hpd_init(struct radeon_device *rdev)
53 {
54 /* XXX */
55 }
56
57
58 void evergreen_bandwidth_update(struct radeon_device *rdev)
59 {
60 /* XXX */
61 }
62
63 void evergreen_hpd_fini(struct radeon_device *rdev)
64 {
65 /* XXX */
66 }
67
68 static int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
69 {
70 unsigned i;
71 u32 tmp;
72
73 for (i = 0; i < rdev->usec_timeout; i++) {
74 /* read MC_STATUS */
75 tmp = RREG32(SRBM_STATUS) & 0x1F00;
76 if (!tmp)
77 return 0;
78 udelay(1);
79 }
80 return -1;
81 }
82
83 /*
84 * GART
85 */
86 int evergreen_pcie_gart_enable(struct radeon_device *rdev)
87 {
88 u32 tmp;
89 int r, i;
90
91 if (rdev->gart.table.vram.robj == NULL) {
92 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
93 return -EINVAL;
94 }
95 r = radeon_gart_table_vram_pin(rdev);
96 if (r)
97 return r;
98 radeon_gart_restore(rdev);
99 /* Setup L2 cache */
100 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
101 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
102 EFFECTIVE_L2_QUEUE_SIZE(7));
103 WREG32(VM_L2_CNTL2, 0);
104 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
105 /* Setup TLB control */
106 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
107 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
108 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
109 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
110 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
111 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
112 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
113 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
114 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
115 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
116 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
117 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
118 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
119 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
120 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
121 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
122 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
123 (u32)(rdev->dummy_page.addr >> 12));
124 for (i = 1; i < 7; i++)
125 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
126
127 r600_pcie_gart_tlb_flush(rdev);
128 rdev->gart.ready = true;
129 return 0;
130 }
131
132 void evergreen_pcie_gart_disable(struct radeon_device *rdev)
133 {
134 u32 tmp;
135 int i, r;
136
137 /* Disable all tables */
138 for (i = 0; i < 7; i++)
139 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
140
141 /* Setup L2 cache */
142 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
143 EFFECTIVE_L2_QUEUE_SIZE(7));
144 WREG32(VM_L2_CNTL2, 0);
145 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
146 /* Setup TLB control */
147 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
148 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
149 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
150 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
151 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
152 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
153 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
154 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
155 if (rdev->gart.table.vram.robj) {
156 r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
157 if (likely(r == 0)) {
158 radeon_bo_kunmap(rdev->gart.table.vram.robj);
159 radeon_bo_unpin(rdev->gart.table.vram.robj);
160 radeon_bo_unreserve(rdev->gart.table.vram.robj);
161 }
162 }
163 }
164
165 void evergreen_pcie_gart_fini(struct radeon_device *rdev)
166 {
167 evergreen_pcie_gart_disable(rdev);
168 radeon_gart_table_vram_free(rdev);
169 radeon_gart_fini(rdev);
170 }
171
172
173 void evergreen_agp_enable(struct radeon_device *rdev)
174 {
175 u32 tmp;
176 int i;
177
178 /* Setup L2 cache */
179 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
180 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
181 EFFECTIVE_L2_QUEUE_SIZE(7));
182 WREG32(VM_L2_CNTL2, 0);
183 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
184 /* Setup TLB control */
185 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
186 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
187 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
188 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
189 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
190 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
191 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
192 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
193 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
194 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
195 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
196 for (i = 0; i < 7; i++)
197 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
198 }
199
200 static void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
201 {
202 save->vga_control[0] = RREG32(D1VGA_CONTROL);
203 save->vga_control[1] = RREG32(D2VGA_CONTROL);
204 save->vga_control[2] = RREG32(EVERGREEN_D3VGA_CONTROL);
205 save->vga_control[3] = RREG32(EVERGREEN_D4VGA_CONTROL);
206 save->vga_control[4] = RREG32(EVERGREEN_D5VGA_CONTROL);
207 save->vga_control[5] = RREG32(EVERGREEN_D6VGA_CONTROL);
208 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
209 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
210 save->crtc_control[0] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
211 save->crtc_control[1] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
212 save->crtc_control[2] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
213 save->crtc_control[3] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
214 save->crtc_control[4] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
215 save->crtc_control[5] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
216
217 /* Stop all video */
218 WREG32(VGA_RENDER_CONTROL, 0);
219 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
220 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
221 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
222 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
223 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
224 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
225 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
226 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
227 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
228 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
229 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
230 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
231 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
232 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
233 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
234 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
235 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
236 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
237
238 WREG32(D1VGA_CONTROL, 0);
239 WREG32(D2VGA_CONTROL, 0);
240 WREG32(EVERGREEN_D3VGA_CONTROL, 0);
241 WREG32(EVERGREEN_D4VGA_CONTROL, 0);
242 WREG32(EVERGREEN_D5VGA_CONTROL, 0);
243 WREG32(EVERGREEN_D6VGA_CONTROL, 0);
244 }
245
246 static void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
247 {
248 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
249 upper_32_bits(rdev->mc.vram_start));
250 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
251 upper_32_bits(rdev->mc.vram_start));
252 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
253 (u32)rdev->mc.vram_start);
254 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
255 (u32)rdev->mc.vram_start);
256
257 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
258 upper_32_bits(rdev->mc.vram_start));
259 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
260 upper_32_bits(rdev->mc.vram_start));
261 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
262 (u32)rdev->mc.vram_start);
263 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
264 (u32)rdev->mc.vram_start);
265
266 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
267 upper_32_bits(rdev->mc.vram_start));
268 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
269 upper_32_bits(rdev->mc.vram_start));
270 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
271 (u32)rdev->mc.vram_start);
272 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
273 (u32)rdev->mc.vram_start);
274
275 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
276 upper_32_bits(rdev->mc.vram_start));
277 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
278 upper_32_bits(rdev->mc.vram_start));
279 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
280 (u32)rdev->mc.vram_start);
281 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
282 (u32)rdev->mc.vram_start);
283
284 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
285 upper_32_bits(rdev->mc.vram_start));
286 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
287 upper_32_bits(rdev->mc.vram_start));
288 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
289 (u32)rdev->mc.vram_start);
290 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
291 (u32)rdev->mc.vram_start);
292
293 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
294 upper_32_bits(rdev->mc.vram_start));
295 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
296 upper_32_bits(rdev->mc.vram_start));
297 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
298 (u32)rdev->mc.vram_start);
299 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
300 (u32)rdev->mc.vram_start);
301
302 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
303 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
304 /* Unlock host access */
305 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
306 mdelay(1);
307 /* Restore video state */
308 WREG32(D1VGA_CONTROL, save->vga_control[0]);
309 WREG32(D2VGA_CONTROL, save->vga_control[1]);
310 WREG32(EVERGREEN_D3VGA_CONTROL, save->vga_control[2]);
311 WREG32(EVERGREEN_D4VGA_CONTROL, save->vga_control[3]);
312 WREG32(EVERGREEN_D5VGA_CONTROL, save->vga_control[4]);
313 WREG32(EVERGREEN_D6VGA_CONTROL, save->vga_control[5]);
314 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
315 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
316 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
317 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
318 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
319 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
320 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, save->crtc_control[0]);
321 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, save->crtc_control[1]);
322 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, save->crtc_control[2]);
323 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, save->crtc_control[3]);
324 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, save->crtc_control[4]);
325 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, save->crtc_control[5]);
326 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
327 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
328 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
329 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
330 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
331 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
332 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
333 }
334
335 static void evergreen_mc_program(struct radeon_device *rdev)
336 {
337 struct evergreen_mc_save save;
338 u32 tmp;
339 int i, j;
340
341 /* Initialize HDP */
342 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
343 WREG32((0x2c14 + j), 0x00000000);
344 WREG32((0x2c18 + j), 0x00000000);
345 WREG32((0x2c1c + j), 0x00000000);
346 WREG32((0x2c20 + j), 0x00000000);
347 WREG32((0x2c24 + j), 0x00000000);
348 }
349 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
350
351 evergreen_mc_stop(rdev, &save);
352 if (evergreen_mc_wait_for_idle(rdev)) {
353 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
354 }
355 /* Lockout access through VGA aperture*/
356 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
357 /* Update configuration */
358 if (rdev->flags & RADEON_IS_AGP) {
359 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
360 /* VRAM before AGP */
361 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
362 rdev->mc.vram_start >> 12);
363 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
364 rdev->mc.gtt_end >> 12);
365 } else {
366 /* VRAM after AGP */
367 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
368 rdev->mc.gtt_start >> 12);
369 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
370 rdev->mc.vram_end >> 12);
371 }
372 } else {
373 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
374 rdev->mc.vram_start >> 12);
375 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
376 rdev->mc.vram_end >> 12);
377 }
378 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, 0);
379 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
380 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
381 WREG32(MC_VM_FB_LOCATION, tmp);
382 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
383 WREG32(HDP_NONSURFACE_INFO, (2 << 7));
384 WREG32(HDP_NONSURFACE_SIZE, (rdev->mc.mc_vram_size - 1) | 0x3FF);
385 if (rdev->flags & RADEON_IS_AGP) {
386 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
387 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
388 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
389 } else {
390 WREG32(MC_VM_AGP_BASE, 0);
391 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
392 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
393 }
394 if (evergreen_mc_wait_for_idle(rdev)) {
395 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
396 }
397 evergreen_mc_resume(rdev, &save);
398 /* we need to own VRAM, so turn off the VGA renderer here
399 * to stop it overwriting our objects */
400 rv515_vga_render_disable(rdev);
401 }
402
403 #if 0
404 /*
405 * CP.
406 */
407 static void evergreen_cp_stop(struct radeon_device *rdev)
408 {
409 /* XXX */
410 }
411
412
413 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
414 {
415 /* XXX */
416
417 return 0;
418 }
419
420
421 /*
422 * Core functions
423 */
424 static u32 evergreen_get_tile_pipe_to_backend_map(u32 num_tile_pipes,
425 u32 num_backends,
426 u32 backend_disable_mask)
427 {
428 u32 backend_map = 0;
429
430 return backend_map;
431 }
432 #endif
433
434 static void evergreen_gpu_init(struct radeon_device *rdev)
435 {
436 /* XXX */
437 }
438
439 int evergreen_mc_init(struct radeon_device *rdev)
440 {
441 u32 tmp;
442 int chansize, numchan;
443
444 /* Get VRAM informations */
445 rdev->mc.vram_is_ddr = true;
446 tmp = RREG32(MC_ARB_RAMCFG);
447 if (tmp & CHANSIZE_OVERRIDE) {
448 chansize = 16;
449 } else if (tmp & CHANSIZE_MASK) {
450 chansize = 64;
451 } else {
452 chansize = 32;
453 }
454 tmp = RREG32(MC_SHARED_CHMAP);
455 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
456 case 0:
457 default:
458 numchan = 1;
459 break;
460 case 1:
461 numchan = 2;
462 break;
463 case 2:
464 numchan = 4;
465 break;
466 case 3:
467 numchan = 8;
468 break;
469 }
470 rdev->mc.vram_width = numchan * chansize;
471 /* Could aper size report 0 ? */
472 rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0);
473 rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0);
474 /* Setup GPU memory space */
475 /* size in MB on evergreen */
476 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
477 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
478 rdev->mc.visible_vram_size = rdev->mc.aper_size;
479 /* FIXME remove this once we support unmappable VRAM */
480 if (rdev->mc.mc_vram_size > rdev->mc.aper_size) {
481 rdev->mc.mc_vram_size = rdev->mc.aper_size;
482 rdev->mc.real_vram_size = rdev->mc.aper_size;
483 }
484 r600_vram_gtt_location(rdev, &rdev->mc);
485 radeon_update_bandwidth_info(rdev);
486
487 return 0;
488 }
489
490 int evergreen_gpu_reset(struct radeon_device *rdev)
491 {
492 /* FIXME: implement for evergreen */
493 return 0;
494 }
495
496 static int evergreen_startup(struct radeon_device *rdev)
497 {
498 #if 0
499 int r;
500
501 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
502 r = r600_init_microcode(rdev);
503 if (r) {
504 DRM_ERROR("Failed to load firmware!\n");
505 return r;
506 }
507 }
508 #endif
509 evergreen_mc_program(rdev);
510 #if 0
511 if (rdev->flags & RADEON_IS_AGP) {
512 evergreem_agp_enable(rdev);
513 } else {
514 r = evergreen_pcie_gart_enable(rdev);
515 if (r)
516 return r;
517 }
518 #endif
519 evergreen_gpu_init(rdev);
520 #if 0
521 if (!rdev->r600_blit.shader_obj) {
522 r = r600_blit_init(rdev);
523 if (r) {
524 DRM_ERROR("radeon: failed blitter (%d).\n", r);
525 return r;
526 }
527 }
528
529 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
530 if (unlikely(r != 0))
531 return r;
532 r = radeon_bo_pin(rdev->r600_blit.shader_obj, RADEON_GEM_DOMAIN_VRAM,
533 &rdev->r600_blit.shader_gpu_addr);
534 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
535 if (r) {
536 DRM_ERROR("failed to pin blit object %d\n", r);
537 return r;
538 }
539
540 /* Enable IRQ */
541 r = r600_irq_init(rdev);
542 if (r) {
543 DRM_ERROR("radeon: IH init failed (%d).\n", r);
544 radeon_irq_kms_fini(rdev);
545 return r;
546 }
547 r600_irq_set(rdev);
548
549 r = radeon_ring_init(rdev, rdev->cp.ring_size);
550 if (r)
551 return r;
552 r = evergreen_cp_load_microcode(rdev);
553 if (r)
554 return r;
555 r = r600_cp_resume(rdev);
556 if (r)
557 return r;
558 /* write back buffer are not vital so don't worry about failure */
559 r600_wb_enable(rdev);
560 #endif
561 return 0;
562 }
563
564 int evergreen_resume(struct radeon_device *rdev)
565 {
566 int r;
567
568 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
569 * posting will perform necessary task to bring back GPU into good
570 * shape.
571 */
572 /* post card */
573 atom_asic_init(rdev->mode_info.atom_context);
574 /* Initialize clocks */
575 r = radeon_clocks_init(rdev);
576 if (r) {
577 return r;
578 }
579
580 r = evergreen_startup(rdev);
581 if (r) {
582 DRM_ERROR("r600 startup failed on resume\n");
583 return r;
584 }
585 #if 0
586 r = r600_ib_test(rdev);
587 if (r) {
588 DRM_ERROR("radeon: failled testing IB (%d).\n", r);
589 return r;
590 }
591 #endif
592 return r;
593
594 }
595
596 int evergreen_suspend(struct radeon_device *rdev)
597 {
598 #if 0
599 int r;
600
601 /* FIXME: we should wait for ring to be empty */
602 r700_cp_stop(rdev);
603 rdev->cp.ready = false;
604 r600_wb_disable(rdev);
605 evergreen_pcie_gart_disable(rdev);
606 /* unpin shaders bo */
607 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
608 if (likely(r == 0)) {
609 radeon_bo_unpin(rdev->r600_blit.shader_obj);
610 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
611 }
612 #endif
613 return 0;
614 }
615
616 static bool evergreen_card_posted(struct radeon_device *rdev)
617 {
618 u32 reg;
619
620 /* first check CRTCs */
621 reg = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) |
622 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) |
623 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) |
624 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) |
625 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) |
626 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
627 if (reg & EVERGREEN_CRTC_MASTER_EN)
628 return true;
629
630 /* then check MEM_SIZE, in case the crtcs are off */
631 if (RREG32(CONFIG_MEMSIZE))
632 return true;
633
634 return false;
635 }
636
637 /* Plan is to move initialization in that function and use
638 * helper function so that radeon_device_init pretty much
639 * do nothing more than calling asic specific function. This
640 * should also allow to remove a bunch of callback function
641 * like vram_info.
642 */
643 int evergreen_init(struct radeon_device *rdev)
644 {
645 int r;
646
647 r = radeon_dummy_page_init(rdev);
648 if (r)
649 return r;
650 /* This don't do much */
651 r = radeon_gem_init(rdev);
652 if (r)
653 return r;
654 /* Read BIOS */
655 if (!radeon_get_bios(rdev)) {
656 if (ASIC_IS_AVIVO(rdev))
657 return -EINVAL;
658 }
659 /* Must be an ATOMBIOS */
660 if (!rdev->is_atom_bios) {
661 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n");
662 return -EINVAL;
663 }
664 r = radeon_atombios_init(rdev);
665 if (r)
666 return r;
667 /* Post card if necessary */
668 if (!evergreen_card_posted(rdev)) {
669 if (!rdev->bios) {
670 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
671 return -EINVAL;
672 }
673 DRM_INFO("GPU not posted. posting now...\n");
674 atom_asic_init(rdev->mode_info.atom_context);
675 }
676 /* Initialize scratch registers */
677 r600_scratch_init(rdev);
678 /* Initialize surface registers */
679 radeon_surface_init(rdev);
680 /* Initialize clocks */
681 radeon_get_clock_info(rdev->ddev);
682 r = radeon_clocks_init(rdev);
683 if (r)
684 return r;
685 /* Initialize power management */
686 radeon_pm_init(rdev);
687 /* Fence driver */
688 r = radeon_fence_driver_init(rdev);
689 if (r)
690 return r;
691 /* initialize AGP */
692 if (rdev->flags & RADEON_IS_AGP) {
693 r = radeon_agp_init(rdev);
694 if (r)
695 radeon_agp_disable(rdev);
696 }
697 /* initialize memory controller */
698 r = evergreen_mc_init(rdev);
699 if (r)
700 return r;
701 /* Memory manager */
702 r = radeon_bo_init(rdev);
703 if (r)
704 return r;
705 #if 0
706 r = radeon_irq_kms_init(rdev);
707 if (r)
708 return r;
709
710 rdev->cp.ring_obj = NULL;
711 r600_ring_init(rdev, 1024 * 1024);
712
713 rdev->ih.ring_obj = NULL;
714 r600_ih_ring_init(rdev, 64 * 1024);
715
716 r = r600_pcie_gart_init(rdev);
717 if (r)
718 return r;
719 #endif
720 rdev->accel_working = false;
721 r = evergreen_startup(rdev);
722 if (r) {
723 evergreen_suspend(rdev);
724 /*r600_wb_fini(rdev);*/
725 /*radeon_ring_fini(rdev);*/
726 /*evergreen_pcie_gart_fini(rdev);*/
727 rdev->accel_working = false;
728 }
729 if (rdev->accel_working) {
730 r = radeon_ib_pool_init(rdev);
731 if (r) {
732 DRM_ERROR("radeon: failed initializing IB pool (%d).\n", r);
733 rdev->accel_working = false;
734 }
735 r = r600_ib_test(rdev);
736 if (r) {
737 DRM_ERROR("radeon: failed testing IB (%d).\n", r);
738 rdev->accel_working = false;
739 }
740 }
741 return 0;
742 }
743
744 void evergreen_fini(struct radeon_device *rdev)
745 {
746 radeon_pm_fini(rdev);
747 evergreen_suspend(rdev);
748 #if 0
749 r600_blit_fini(rdev);
750 r600_irq_fini(rdev);
751 radeon_irq_kms_fini(rdev);
752 radeon_ring_fini(rdev);
753 r600_wb_fini(rdev);
754 evergreen_pcie_gart_fini(rdev);
755 #endif
756 radeon_gem_fini(rdev);
757 radeon_fence_driver_fini(rdev);
758 radeon_clocks_fini(rdev);
759 radeon_agp_fini(rdev);
760 radeon_bo_fini(rdev);
761 radeon_atombios_fini(rdev);
762 kfree(rdev->bios);
763 rdev->bios = NULL;
764 radeon_dummy_page_fini(rdev);
765 }
This page took 0.048444 seconds and 6 git commands to generate.