drm/radeon: add dpm UVD handling for evergreen/btc asics
[deliverable/linux.git] / drivers / gpu / drm / radeon / rv770_dpm.c
1 /*
2 * Copyright 2011 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Alex Deucher
23 */
24
25 #include "drmP.h"
26 #include "radeon.h"
27 #include "rv770d.h"
28 #include "r600_dpm.h"
29 #include "rv770_dpm.h"
30 #include "cypress_dpm.h"
31 #include "atom.h"
32
33 #define MC_CG_ARB_FREQ_F0 0x0a
34 #define MC_CG_ARB_FREQ_F1 0x0b
35 #define MC_CG_ARB_FREQ_F2 0x0c
36 #define MC_CG_ARB_FREQ_F3 0x0d
37
38 #define MC_CG_SEQ_DRAMCONF_S0 0x05
39 #define MC_CG_SEQ_DRAMCONF_S1 0x06
40
41 #define PCIE_BUS_CLK 10000
42 #define TCLK (PCIE_BUS_CLK / 10)
43
44 #define SMC_RAM_END 0xC000
45
46 struct rv7xx_ps *rv770_get_ps(struct radeon_ps *rps)
47 {
48 struct rv7xx_ps *ps = rps->ps_priv;
49
50 return ps;
51 }
52
53 struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev)
54 {
55 struct rv7xx_power_info *pi = rdev->pm.dpm.priv;
56
57 return pi;
58 }
59
60 struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev)
61 {
62 struct evergreen_power_info *pi = rdev->pm.dpm.priv;
63
64 return pi;
65 }
66
67 static void rv770_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
68 bool enable)
69 {
70 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
71 u32 tmp;
72
73 tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
74 if (enable) {
75 tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
76 tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
77 tmp |= LC_GEN2_EN_STRAP;
78 } else {
79 if (!pi->boot_in_gen2) {
80 tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
81 tmp &= ~LC_GEN2_EN_STRAP;
82 }
83 }
84 if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
85 (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
86 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
87
88 }
89
90 static void rv770_enable_l0s(struct radeon_device *rdev)
91 {
92 u32 tmp;
93
94 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL) & ~LC_L0S_INACTIVITY_MASK;
95 tmp |= LC_L0S_INACTIVITY(3);
96 WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp);
97 }
98
99 static void rv770_enable_l1(struct radeon_device *rdev)
100 {
101 u32 tmp;
102
103 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL);
104 tmp &= ~LC_L1_INACTIVITY_MASK;
105 tmp |= LC_L1_INACTIVITY(4);
106 tmp &= ~LC_PMI_TO_L1_DIS;
107 tmp &= ~LC_ASPM_TO_L1_DIS;
108 WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp);
109 }
110
111 static void rv770_enable_pll_sleep_in_l1(struct radeon_device *rdev)
112 {
113 u32 tmp;
114
115 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL) & ~LC_L1_INACTIVITY_MASK;
116 tmp |= LC_L1_INACTIVITY(8);
117 WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp);
118
119 /* NOTE, this is a PCIE indirect reg, not PCIE PORT */
120 tmp = RREG32_PCIE(PCIE_P_CNTL);
121 tmp |= P_PLL_PWRDN_IN_L1L23;
122 tmp &= ~P_PLL_BUF_PDNB;
123 tmp &= ~P_PLL_PDNB;
124 tmp |= P_ALLOW_PRX_FRONTEND_SHUTOFF;
125 WREG32_PCIE(PCIE_P_CNTL, tmp);
126 }
127
128 static void rv770_gfx_clock_gating_enable(struct radeon_device *rdev,
129 bool enable)
130 {
131 if (enable)
132 WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN);
133 else {
134 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
135 WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
136 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
137 RREG32(GB_TILING_CONFIG);
138 }
139 }
140
141 static void rv770_mg_clock_gating_enable(struct radeon_device *rdev,
142 bool enable)
143 {
144 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
145
146 if (enable) {
147 u32 mgcg_cgtt_local0;
148
149 if (rdev->family == CHIP_RV770)
150 mgcg_cgtt_local0 = RV770_MGCGTTLOCAL0_DFLT;
151 else
152 mgcg_cgtt_local0 = RV7XX_MGCGTTLOCAL0_DFLT;
153
154 WREG32(CG_CGTT_LOCAL_0, mgcg_cgtt_local0);
155 WREG32(CG_CGTT_LOCAL_1, (RV770_MGCGTTLOCAL1_DFLT & 0xFFFFCFFF));
156
157 if (pi->mgcgtssm)
158 WREG32(CGTS_SM_CTRL_REG, RV770_MGCGCGTSSMCTRL_DFLT);
159 } else {
160 WREG32(CG_CGTT_LOCAL_0, 0xFFFFFFFF);
161 WREG32(CG_CGTT_LOCAL_1, 0xFFFFCFFF);
162 }
163 }
164
165 void rv770_restore_cgcg(struct radeon_device *rdev)
166 {
167 bool dpm_en = false, cg_en = false;
168
169 if (RREG32(GENERAL_PWRMGT) & GLOBAL_PWRMGT_EN)
170 dpm_en = true;
171 if (RREG32(SCLK_PWRMGT_CNTL) & DYN_GFX_CLK_OFF_EN)
172 cg_en = true;
173
174 if (dpm_en && !cg_en)
175 WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN);
176 }
177
178 static void rv770_start_dpm(struct radeon_device *rdev)
179 {
180 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~SCLK_PWRMGT_OFF);
181
182 WREG32_P(MCLK_PWRMGT_CNTL, 0, ~MPLL_PWRMGT_OFF);
183
184 WREG32_P(GENERAL_PWRMGT, GLOBAL_PWRMGT_EN, ~GLOBAL_PWRMGT_EN);
185 }
186
187 void rv770_stop_dpm(struct radeon_device *rdev)
188 {
189 PPSMC_Result result;
190
191 result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_TwoLevelsDisabled);
192
193 if (result != PPSMC_Result_OK)
194 DRM_ERROR("Could not force DPM to low.\n");
195
196 WREG32_P(GENERAL_PWRMGT, 0, ~GLOBAL_PWRMGT_EN);
197
198 WREG32_P(SCLK_PWRMGT_CNTL, SCLK_PWRMGT_OFF, ~SCLK_PWRMGT_OFF);
199
200 WREG32_P(MCLK_PWRMGT_CNTL, MPLL_PWRMGT_OFF, ~MPLL_PWRMGT_OFF);
201 }
202
203 bool rv770_dpm_enabled(struct radeon_device *rdev)
204 {
205 if (RREG32(GENERAL_PWRMGT) & GLOBAL_PWRMGT_EN)
206 return true;
207 else
208 return false;
209 }
210
211 void rv770_enable_thermal_protection(struct radeon_device *rdev,
212 bool enable)
213 {
214 if (enable)
215 WREG32_P(GENERAL_PWRMGT, 0, ~THERMAL_PROTECTION_DIS);
216 else
217 WREG32_P(GENERAL_PWRMGT, THERMAL_PROTECTION_DIS, ~THERMAL_PROTECTION_DIS);
218 }
219
220 void rv770_enable_acpi_pm(struct radeon_device *rdev)
221 {
222 WREG32_P(GENERAL_PWRMGT, STATIC_PM_EN, ~STATIC_PM_EN);
223 }
224
225 u8 rv770_get_seq_value(struct radeon_device *rdev,
226 struct rv7xx_pl *pl)
227 {
228 return (pl->flags & ATOM_PPLIB_R600_FLAGS_LOWPOWER) ?
229 MC_CG_SEQ_DRAMCONF_S0 : MC_CG_SEQ_DRAMCONF_S1;
230 }
231
232 int rv770_read_smc_soft_register(struct radeon_device *rdev,
233 u16 reg_offset, u32 *value)
234 {
235 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
236
237 return rv770_read_smc_sram_dword(rdev,
238 pi->soft_regs_start + reg_offset,
239 value, pi->sram_end);
240 }
241
242 int rv770_write_smc_soft_register(struct radeon_device *rdev,
243 u16 reg_offset, u32 value)
244 {
245 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
246
247 return rv770_write_smc_sram_dword(rdev,
248 pi->soft_regs_start + reg_offset,
249 value, pi->sram_end);
250 }
251
252 int rv770_populate_smc_t(struct radeon_device *rdev,
253 struct radeon_ps *radeon_state,
254 RV770_SMC_SWSTATE *smc_state)
255 {
256 struct rv7xx_ps *state = rv770_get_ps(radeon_state);
257 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
258 int i;
259 int a_n;
260 int a_d;
261 u8 l[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE];
262 u8 r[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE];
263 u32 a_t;
264
265 l[0] = 0;
266 r[2] = 100;
267
268 a_n = (int)state->medium.sclk * pi->lmp +
269 (int)state->low.sclk * (R600_AH_DFLT - pi->rlp);
270 a_d = (int)state->low.sclk * (100 - (int)pi->rlp) +
271 (int)state->medium.sclk * pi->lmp;
272
273 l[1] = (u8)(pi->lmp - (int)pi->lmp * a_n / a_d);
274 r[0] = (u8)(pi->rlp + (100 - (int)pi->rlp) * a_n / a_d);
275
276 a_n = (int)state->high.sclk * pi->lhp + (int)state->medium.sclk *
277 (R600_AH_DFLT - pi->rmp);
278 a_d = (int)state->medium.sclk * (100 - (int)pi->rmp) +
279 (int)state->high.sclk * pi->lhp;
280
281 l[2] = (u8)(pi->lhp - (int)pi->lhp * a_n / a_d);
282 r[1] = (u8)(pi->rmp + (100 - (int)pi->rmp) * a_n / a_d);
283
284 for (i = 0; i < (RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1); i++) {
285 a_t = CG_R(r[i] * pi->bsp / 200) | CG_L(l[i] * pi->bsp / 200);
286 smc_state->levels[i].aT = cpu_to_be32(a_t);
287 }
288
289 a_t = CG_R(r[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1] * pi->pbsp / 200) |
290 CG_L(l[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1] * pi->pbsp / 200);
291
292 smc_state->levels[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1].aT =
293 cpu_to_be32(a_t);
294
295 return 0;
296 }
297
298 int rv770_populate_smc_sp(struct radeon_device *rdev,
299 struct radeon_ps *radeon_state,
300 RV770_SMC_SWSTATE *smc_state)
301 {
302 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
303 int i;
304
305 for (i = 0; i < (RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1); i++)
306 smc_state->levels[i].bSP = cpu_to_be32(pi->dsp);
307
308 smc_state->levels[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1].bSP =
309 cpu_to_be32(pi->psp);
310
311 return 0;
312 }
313
314 static void rv770_calculate_fractional_mpll_feedback_divider(u32 memory_clock,
315 u32 reference_clock,
316 bool gddr5,
317 struct atom_clock_dividers *dividers,
318 u32 *clkf,
319 u32 *clkfrac)
320 {
321 u32 post_divider, reference_divider, feedback_divider8;
322 u32 fyclk;
323
324 if (gddr5)
325 fyclk = (memory_clock * 8) / 2;
326 else
327 fyclk = (memory_clock * 4) / 2;
328
329 post_divider = dividers->post_div;
330 reference_divider = dividers->ref_div;
331
332 feedback_divider8 =
333 (8 * fyclk * reference_divider * post_divider) / reference_clock;
334
335 *clkf = feedback_divider8 / 8;
336 *clkfrac = feedback_divider8 % 8;
337 }
338
339 static int rv770_encode_yclk_post_div(u32 postdiv, u32 *encoded_postdiv)
340 {
341 int ret = 0;
342
343 switch (postdiv) {
344 case 1:
345 *encoded_postdiv = 0;
346 break;
347 case 2:
348 *encoded_postdiv = 1;
349 break;
350 case 4:
351 *encoded_postdiv = 2;
352 break;
353 case 8:
354 *encoded_postdiv = 3;
355 break;
356 case 16:
357 *encoded_postdiv = 4;
358 break;
359 default:
360 ret = -EINVAL;
361 break;
362 }
363
364 return ret;
365 }
366
367 u32 rv770_map_clkf_to_ibias(struct radeon_device *rdev, u32 clkf)
368 {
369 if (clkf <= 0x10)
370 return 0x4B;
371 if (clkf <= 0x19)
372 return 0x5B;
373 if (clkf <= 0x21)
374 return 0x2B;
375 if (clkf <= 0x27)
376 return 0x6C;
377 if (clkf <= 0x31)
378 return 0x9D;
379 return 0xC6;
380 }
381
382 static int rv770_populate_mclk_value(struct radeon_device *rdev,
383 u32 engine_clock, u32 memory_clock,
384 RV7XX_SMC_MCLK_VALUE *mclk)
385 {
386 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
387 u8 encoded_reference_dividers[] = { 0, 16, 17, 20, 21 };
388 u32 mpll_ad_func_cntl =
389 pi->clk_regs.rv770.mpll_ad_func_cntl;
390 u32 mpll_ad_func_cntl_2 =
391 pi->clk_regs.rv770.mpll_ad_func_cntl_2;
392 u32 mpll_dq_func_cntl =
393 pi->clk_regs.rv770.mpll_dq_func_cntl;
394 u32 mpll_dq_func_cntl_2 =
395 pi->clk_regs.rv770.mpll_dq_func_cntl_2;
396 u32 mclk_pwrmgt_cntl =
397 pi->clk_regs.rv770.mclk_pwrmgt_cntl;
398 u32 dll_cntl = pi->clk_regs.rv770.dll_cntl;
399 struct atom_clock_dividers dividers;
400 u32 reference_clock = rdev->clock.mpll.reference_freq;
401 u32 clkf, clkfrac;
402 u32 postdiv_yclk;
403 u32 ibias;
404 int ret;
405
406 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
407 memory_clock, false, &dividers);
408 if (ret)
409 return ret;
410
411 if ((dividers.ref_div < 1) || (dividers.ref_div > 5))
412 return -EINVAL;
413
414 rv770_calculate_fractional_mpll_feedback_divider(memory_clock, reference_clock,
415 pi->mem_gddr5,
416 &dividers, &clkf, &clkfrac);
417
418 ret = rv770_encode_yclk_post_div(dividers.post_div, &postdiv_yclk);
419 if (ret)
420 return ret;
421
422 ibias = rv770_map_clkf_to_ibias(rdev, clkf);
423
424 mpll_ad_func_cntl &= ~(CLKR_MASK |
425 YCLK_POST_DIV_MASK |
426 CLKF_MASK |
427 CLKFRAC_MASK |
428 IBIAS_MASK);
429 mpll_ad_func_cntl |= CLKR(encoded_reference_dividers[dividers.ref_div - 1]);
430 mpll_ad_func_cntl |= YCLK_POST_DIV(postdiv_yclk);
431 mpll_ad_func_cntl |= CLKF(clkf);
432 mpll_ad_func_cntl |= CLKFRAC(clkfrac);
433 mpll_ad_func_cntl |= IBIAS(ibias);
434
435 if (dividers.vco_mode)
436 mpll_ad_func_cntl_2 |= VCO_MODE;
437 else
438 mpll_ad_func_cntl_2 &= ~VCO_MODE;
439
440 if (pi->mem_gddr5) {
441 rv770_calculate_fractional_mpll_feedback_divider(memory_clock,
442 reference_clock,
443 pi->mem_gddr5,
444 &dividers, &clkf, &clkfrac);
445
446 ibias = rv770_map_clkf_to_ibias(rdev, clkf);
447
448 ret = rv770_encode_yclk_post_div(dividers.post_div, &postdiv_yclk);
449 if (ret)
450 return ret;
451
452 mpll_dq_func_cntl &= ~(CLKR_MASK |
453 YCLK_POST_DIV_MASK |
454 CLKF_MASK |
455 CLKFRAC_MASK |
456 IBIAS_MASK);
457 mpll_dq_func_cntl |= CLKR(encoded_reference_dividers[dividers.ref_div - 1]);
458 mpll_dq_func_cntl |= YCLK_POST_DIV(postdiv_yclk);
459 mpll_dq_func_cntl |= CLKF(clkf);
460 mpll_dq_func_cntl |= CLKFRAC(clkfrac);
461 mpll_dq_func_cntl |= IBIAS(ibias);
462
463 if (dividers.vco_mode)
464 mpll_dq_func_cntl_2 |= VCO_MODE;
465 else
466 mpll_dq_func_cntl_2 &= ~VCO_MODE;
467 }
468
469 mclk->mclk770.mclk_value = cpu_to_be32(memory_clock);
470 mclk->mclk770.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
471 mclk->mclk770.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
472 mclk->mclk770.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
473 mclk->mclk770.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
474 mclk->mclk770.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
475 mclk->mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl);
476
477 return 0;
478 }
479
480 static int rv770_populate_sclk_value(struct radeon_device *rdev,
481 u32 engine_clock,
482 RV770_SMC_SCLK_VALUE *sclk)
483 {
484 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
485 struct atom_clock_dividers dividers;
486 u32 spll_func_cntl =
487 pi->clk_regs.rv770.cg_spll_func_cntl;
488 u32 spll_func_cntl_2 =
489 pi->clk_regs.rv770.cg_spll_func_cntl_2;
490 u32 spll_func_cntl_3 =
491 pi->clk_regs.rv770.cg_spll_func_cntl_3;
492 u32 cg_spll_spread_spectrum =
493 pi->clk_regs.rv770.cg_spll_spread_spectrum;
494 u32 cg_spll_spread_spectrum_2 =
495 pi->clk_regs.rv770.cg_spll_spread_spectrum_2;
496 u64 tmp;
497 u32 reference_clock = rdev->clock.spll.reference_freq;
498 u32 reference_divider, post_divider;
499 u32 fbdiv;
500 int ret;
501
502 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
503 engine_clock, false, &dividers);
504 if (ret)
505 return ret;
506
507 reference_divider = 1 + dividers.ref_div;
508
509 if (dividers.enable_post_div)
510 post_divider = (0x0f & (dividers.post_div >> 4)) + (0x0f & dividers.post_div) + 2;
511 else
512 post_divider = 1;
513
514 tmp = (u64) engine_clock * reference_divider * post_divider * 16384;
515 do_div(tmp, reference_clock);
516 fbdiv = (u32) tmp;
517
518 if (dividers.enable_post_div)
519 spll_func_cntl |= SPLL_DIVEN;
520 else
521 spll_func_cntl &= ~SPLL_DIVEN;
522 spll_func_cntl &= ~(SPLL_HILEN_MASK | SPLL_LOLEN_MASK | SPLL_REF_DIV_MASK);
523 spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div);
524 spll_func_cntl |= SPLL_HILEN((dividers.post_div >> 4) & 0xf);
525 spll_func_cntl |= SPLL_LOLEN(dividers.post_div & 0xf);
526
527 spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
528 spll_func_cntl_2 |= SCLK_MUX_SEL(2);
529
530 spll_func_cntl_3 &= ~SPLL_FB_DIV_MASK;
531 spll_func_cntl_3 |= SPLL_FB_DIV(fbdiv);
532 spll_func_cntl_3 |= SPLL_DITHEN;
533
534 if (pi->sclk_ss) {
535 struct radeon_atom_ss ss;
536 u32 vco_freq = engine_clock * post_divider;
537
538 if (radeon_atombios_get_asic_ss_info(rdev, &ss,
539 ASIC_INTERNAL_ENGINE_SS, vco_freq)) {
540 u32 clk_s = reference_clock * 5 / (reference_divider * ss.rate);
541 u32 clk_v = ss.percentage * fbdiv / (clk_s * 10000);
542
543 cg_spll_spread_spectrum &= ~CLKS_MASK;
544 cg_spll_spread_spectrum |= CLKS(clk_s);
545 cg_spll_spread_spectrum |= SSEN;
546
547 cg_spll_spread_spectrum_2 &= ~CLKV_MASK;
548 cg_spll_spread_spectrum_2 |= CLKV(clk_v);
549 }
550 }
551
552 sclk->sclk_value = cpu_to_be32(engine_clock);
553 sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
554 sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
555 sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
556 sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(cg_spll_spread_spectrum);
557 sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(cg_spll_spread_spectrum_2);
558
559 return 0;
560 }
561
562 int rv770_populate_vddc_value(struct radeon_device *rdev, u16 vddc,
563 RV770_SMC_VOLTAGE_VALUE *voltage)
564 {
565 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
566 int i;
567
568 if (!pi->voltage_control) {
569 voltage->index = 0;
570 voltage->value = 0;
571 return 0;
572 }
573
574 for (i = 0; i < pi->valid_vddc_entries; i++) {
575 if (vddc <= pi->vddc_table[i].vddc) {
576 voltage->index = pi->vddc_table[i].vddc_index;
577 voltage->value = cpu_to_be16(vddc);
578 break;
579 }
580 }
581
582 if (i == pi->valid_vddc_entries)
583 return -EINVAL;
584
585 return 0;
586 }
587
588 int rv770_populate_mvdd_value(struct radeon_device *rdev, u32 mclk,
589 RV770_SMC_VOLTAGE_VALUE *voltage)
590 {
591 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
592
593 if (!pi->mvdd_control) {
594 voltage->index = MVDD_HIGH_INDEX;
595 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
596 return 0;
597 }
598
599 if (mclk <= pi->mvdd_split_frequency) {
600 voltage->index = MVDD_LOW_INDEX;
601 voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
602 } else {
603 voltage->index = MVDD_HIGH_INDEX;
604 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
605 }
606
607 return 0;
608 }
609
610 static int rv770_convert_power_level_to_smc(struct radeon_device *rdev,
611 struct rv7xx_pl *pl,
612 RV770_SMC_HW_PERFORMANCE_LEVEL *level,
613 u8 watermark_level)
614 {
615 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
616 int ret;
617
618 level->gen2PCIE = pi->pcie_gen2 ?
619 ((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0;
620 level->gen2XSP = (pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0;
621 level->backbias = (pl->flags & ATOM_PPLIB_R600_FLAGS_BACKBIASENABLE) ? 1 : 0;
622 level->displayWatermark = watermark_level;
623
624 if (rdev->family == CHIP_RV740)
625 ret = rv740_populate_sclk_value(rdev, pl->sclk,
626 &level->sclk);
627 else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
628 ret = rv730_populate_sclk_value(rdev, pl->sclk,
629 &level->sclk);
630 else
631 ret = rv770_populate_sclk_value(rdev, pl->sclk,
632 &level->sclk);
633 if (ret)
634 return ret;
635
636 if (rdev->family == CHIP_RV740) {
637 if (pi->mem_gddr5) {
638 if (pl->mclk <= pi->mclk_strobe_mode_threshold)
639 level->strobeMode =
640 rv740_get_mclk_frequency_ratio(pl->mclk) | 0x10;
641 else
642 level->strobeMode = 0;
643
644 if (pl->mclk > pi->mclk_edc_enable_threshold)
645 level->mcFlags = SMC_MC_EDC_RD_FLAG | SMC_MC_EDC_WR_FLAG;
646 else
647 level->mcFlags = 0;
648 }
649 ret = rv740_populate_mclk_value(rdev, pl->sclk,
650 pl->mclk, &level->mclk);
651 } else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
652 ret = rv730_populate_mclk_value(rdev, pl->sclk,
653 pl->mclk, &level->mclk);
654 else
655 ret = rv770_populate_mclk_value(rdev, pl->sclk,
656 pl->mclk, &level->mclk);
657 if (ret)
658 return ret;
659
660 ret = rv770_populate_vddc_value(rdev, pl->vddc,
661 &level->vddc);
662 if (ret)
663 return ret;
664
665 ret = rv770_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
666
667 return ret;
668 }
669
670 static int rv770_convert_power_state_to_smc(struct radeon_device *rdev,
671 struct radeon_ps *radeon_state,
672 RV770_SMC_SWSTATE *smc_state)
673 {
674 struct rv7xx_ps *state = rv770_get_ps(radeon_state);
675 int ret;
676
677 if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC))
678 smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
679
680 ret = rv770_convert_power_level_to_smc(rdev,
681 &state->low,
682 &smc_state->levels[0],
683 PPSMC_DISPLAY_WATERMARK_LOW);
684 if (ret)
685 return ret;
686
687 ret = rv770_convert_power_level_to_smc(rdev,
688 &state->medium,
689 &smc_state->levels[1],
690 PPSMC_DISPLAY_WATERMARK_LOW);
691 if (ret)
692 return ret;
693
694 ret = rv770_convert_power_level_to_smc(rdev,
695 &state->high,
696 &smc_state->levels[2],
697 PPSMC_DISPLAY_WATERMARK_HIGH);
698 if (ret)
699 return ret;
700
701 smc_state->levels[0].arbValue = MC_CG_ARB_FREQ_F1;
702 smc_state->levels[1].arbValue = MC_CG_ARB_FREQ_F2;
703 smc_state->levels[2].arbValue = MC_CG_ARB_FREQ_F3;
704
705 smc_state->levels[0].seqValue = rv770_get_seq_value(rdev,
706 &state->low);
707 smc_state->levels[1].seqValue = rv770_get_seq_value(rdev,
708 &state->medium);
709 smc_state->levels[2].seqValue = rv770_get_seq_value(rdev,
710 &state->high);
711
712 rv770_populate_smc_sp(rdev, radeon_state, smc_state);
713
714 return rv770_populate_smc_t(rdev, radeon_state, smc_state);
715
716 }
717
718 u32 rv770_calculate_memory_refresh_rate(struct radeon_device *rdev,
719 u32 engine_clock)
720 {
721 u32 dram_rows;
722 u32 dram_refresh_rate;
723 u32 mc_arb_rfsh_rate;
724 u32 tmp;
725
726 tmp = (RREG32(MC_ARB_RAMCFG) & NOOFROWS_MASK) >> NOOFROWS_SHIFT;
727 dram_rows = 1 << (tmp + 10);
728 tmp = RREG32(MC_SEQ_MISC0) & 3;
729 dram_refresh_rate = 1 << (tmp + 3);
730 mc_arb_rfsh_rate = ((engine_clock * 10) * dram_refresh_rate / dram_rows - 32) / 64;
731
732 return mc_arb_rfsh_rate;
733 }
734
735 static void rv770_program_memory_timing_parameters(struct radeon_device *rdev,
736 struct radeon_ps *radeon_state)
737 {
738 struct rv7xx_ps *state = rv770_get_ps(radeon_state);
739 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
740 u32 sqm_ratio;
741 u32 arb_refresh_rate;
742 u32 high_clock;
743
744 if (state->high.sclk < (state->low.sclk * 0xFF / 0x40))
745 high_clock = state->high.sclk;
746 else
747 high_clock = (state->low.sclk * 0xFF / 0x40);
748
749 radeon_atom_set_engine_dram_timings(rdev, high_clock,
750 state->high.mclk);
751
752 sqm_ratio =
753 STATE0(64 * high_clock / pi->boot_sclk) |
754 STATE1(64 * high_clock / state->low.sclk) |
755 STATE2(64 * high_clock / state->medium.sclk) |
756 STATE3(64 * high_clock / state->high.sclk);
757 WREG32(MC_ARB_SQM_RATIO, sqm_ratio);
758
759 arb_refresh_rate =
760 POWERMODE0(rv770_calculate_memory_refresh_rate(rdev, pi->boot_sclk)) |
761 POWERMODE1(rv770_calculate_memory_refresh_rate(rdev, state->low.sclk)) |
762 POWERMODE2(rv770_calculate_memory_refresh_rate(rdev, state->medium.sclk)) |
763 POWERMODE3(rv770_calculate_memory_refresh_rate(rdev, state->high.sclk));
764 WREG32(MC_ARB_RFSH_RATE, arb_refresh_rate);
765 }
766
767 void rv770_enable_backbias(struct radeon_device *rdev,
768 bool enable)
769 {
770 if (enable)
771 WREG32_P(GENERAL_PWRMGT, BACKBIAS_PAD_EN, ~BACKBIAS_PAD_EN);
772 else
773 WREG32_P(GENERAL_PWRMGT, 0, ~(BACKBIAS_VALUE | BACKBIAS_PAD_EN));
774 }
775
776 static void rv770_enable_spread_spectrum(struct radeon_device *rdev,
777 bool enable)
778 {
779 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
780
781 if (enable) {
782 if (pi->sclk_ss)
783 WREG32_P(GENERAL_PWRMGT, DYN_SPREAD_SPECTRUM_EN, ~DYN_SPREAD_SPECTRUM_EN);
784
785 if (pi->mclk_ss) {
786 if (rdev->family == CHIP_RV740)
787 rv740_enable_mclk_spread_spectrum(rdev, true);
788 }
789 } else {
790 WREG32_P(CG_SPLL_SPREAD_SPECTRUM, 0, ~SSEN);
791
792 WREG32_P(GENERAL_PWRMGT, 0, ~DYN_SPREAD_SPECTRUM_EN);
793
794 WREG32_P(CG_MPLL_SPREAD_SPECTRUM, 0, ~SSEN);
795
796 if (rdev->family == CHIP_RV740)
797 rv740_enable_mclk_spread_spectrum(rdev, false);
798 }
799 }
800
801 static void rv770_program_mpll_timing_parameters(struct radeon_device *rdev)
802 {
803 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
804
805 if ((rdev->family == CHIP_RV770) && !pi->mem_gddr5) {
806 WREG32(MPLL_TIME,
807 (MPLL_LOCK_TIME(R600_MPLLLOCKTIME_DFLT * pi->ref_div) |
808 MPLL_RESET_TIME(R600_MPLLRESETTIME_DFLT)));
809 }
810 }
811
812 void rv770_setup_bsp(struct radeon_device *rdev)
813 {
814 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
815 u32 xclk = radeon_get_xclk(rdev);
816
817 r600_calculate_u_and_p(pi->asi,
818 xclk,
819 16,
820 &pi->bsp,
821 &pi->bsu);
822
823 r600_calculate_u_and_p(pi->pasi,
824 xclk,
825 16,
826 &pi->pbsp,
827 &pi->pbsu);
828
829 pi->dsp = BSP(pi->bsp) | BSU(pi->bsu);
830 pi->psp = BSP(pi->pbsp) | BSU(pi->pbsu);
831
832 WREG32(CG_BSP, pi->dsp);
833
834 }
835
836 void rv770_program_git(struct radeon_device *rdev)
837 {
838 WREG32_P(CG_GIT, CG_GICST(R600_GICST_DFLT), ~CG_GICST_MASK);
839 }
840
841 void rv770_program_tp(struct radeon_device *rdev)
842 {
843 int i;
844 enum r600_td td = R600_TD_DFLT;
845
846 for (i = 0; i < R600_PM_NUMBER_OF_TC; i++)
847 WREG32(CG_FFCT_0 + (i * 4), (UTC_0(r600_utc[i]) | DTC_0(r600_dtc[i])));
848
849 if (td == R600_TD_AUTO)
850 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~FIR_FORCE_TREND_SEL);
851 else
852 WREG32_P(SCLK_PWRMGT_CNTL, FIR_FORCE_TREND_SEL, ~FIR_FORCE_TREND_SEL);
853 if (td == R600_TD_UP)
854 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~FIR_TREND_MODE);
855 if (td == R600_TD_DOWN)
856 WREG32_P(SCLK_PWRMGT_CNTL, FIR_TREND_MODE, ~FIR_TREND_MODE);
857 }
858
859 void rv770_program_tpp(struct radeon_device *rdev)
860 {
861 WREG32(CG_TPC, R600_TPC_DFLT);
862 }
863
864 void rv770_program_sstp(struct radeon_device *rdev)
865 {
866 WREG32(CG_SSP, (SSTU(R600_SSTU_DFLT) | SST(R600_SST_DFLT)));
867 }
868
869 void rv770_program_engine_speed_parameters(struct radeon_device *rdev)
870 {
871 WREG32_P(SPLL_CNTL_MODE, SPLL_DIV_SYNC, ~SPLL_DIV_SYNC);
872 }
873
874 static void rv770_enable_display_gap(struct radeon_device *rdev)
875 {
876 u32 tmp = RREG32(CG_DISPLAY_GAP_CNTL);
877
878 tmp &= ~(DISP1_GAP_MCHG_MASK | DISP2_GAP_MCHG_MASK);
879 tmp |= (DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE) |
880 DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE));
881 WREG32(CG_DISPLAY_GAP_CNTL, tmp);
882 }
883
884 void rv770_program_vc(struct radeon_device *rdev)
885 {
886 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
887
888 WREG32(CG_FTV, pi->vrc);
889 }
890
891 void rv770_clear_vc(struct radeon_device *rdev)
892 {
893 WREG32(CG_FTV, 0);
894 }
895
896 int rv770_upload_firmware(struct radeon_device *rdev)
897 {
898 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
899 int ret;
900
901 rv770_reset_smc(rdev);
902 rv770_stop_smc_clock(rdev);
903
904 ret = rv770_load_smc_ucode(rdev, pi->sram_end);
905 if (ret)
906 return ret;
907
908 return 0;
909 }
910
911 static int rv770_populate_smc_acpi_state(struct radeon_device *rdev,
912 RV770_SMC_STATETABLE *table)
913 {
914 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
915
916 u32 mpll_ad_func_cntl =
917 pi->clk_regs.rv770.mpll_ad_func_cntl;
918 u32 mpll_ad_func_cntl_2 =
919 pi->clk_regs.rv770.mpll_ad_func_cntl_2;
920 u32 mpll_dq_func_cntl =
921 pi->clk_regs.rv770.mpll_dq_func_cntl;
922 u32 mpll_dq_func_cntl_2 =
923 pi->clk_regs.rv770.mpll_dq_func_cntl_2;
924 u32 spll_func_cntl =
925 pi->clk_regs.rv770.cg_spll_func_cntl;
926 u32 spll_func_cntl_2 =
927 pi->clk_regs.rv770.cg_spll_func_cntl_2;
928 u32 spll_func_cntl_3 =
929 pi->clk_regs.rv770.cg_spll_func_cntl_3;
930 u32 mclk_pwrmgt_cntl;
931 u32 dll_cntl;
932
933 table->ACPIState = table->initialState;
934
935 table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
936
937 if (pi->acpi_vddc) {
938 rv770_populate_vddc_value(rdev, pi->acpi_vddc,
939 &table->ACPIState.levels[0].vddc);
940 if (pi->pcie_gen2) {
941 if (pi->acpi_pcie_gen2)
942 table->ACPIState.levels[0].gen2PCIE = 1;
943 else
944 table->ACPIState.levels[0].gen2PCIE = 0;
945 } else
946 table->ACPIState.levels[0].gen2PCIE = 0;
947 if (pi->acpi_pcie_gen2)
948 table->ACPIState.levels[0].gen2XSP = 1;
949 else
950 table->ACPIState.levels[0].gen2XSP = 0;
951 } else {
952 rv770_populate_vddc_value(rdev, pi->min_vddc_in_table,
953 &table->ACPIState.levels[0].vddc);
954 table->ACPIState.levels[0].gen2PCIE = 0;
955 }
956
957
958 mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
959
960 mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
961
962 mclk_pwrmgt_cntl = (MRDCKA0_RESET |
963 MRDCKA1_RESET |
964 MRDCKB0_RESET |
965 MRDCKB1_RESET |
966 MRDCKC0_RESET |
967 MRDCKC1_RESET |
968 MRDCKD0_RESET |
969 MRDCKD1_RESET);
970
971 dll_cntl = 0xff000000;
972
973 spll_func_cntl |= SPLL_RESET | SPLL_SLEEP | SPLL_BYPASS_EN;
974
975 spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
976 spll_func_cntl_2 |= SCLK_MUX_SEL(4);
977
978 table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
979 table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
980 table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
981 table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
982
983 table->ACPIState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
984 table->ACPIState.levels[0].mclk.mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl);
985
986 table->ACPIState.levels[0].mclk.mclk770.mclk_value = 0;
987
988 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
989 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
990 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
991
992 table->ACPIState.levels[0].sclk.sclk_value = 0;
993
994 rv770_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
995
996 table->ACPIState.levels[1] = table->ACPIState.levels[0];
997 table->ACPIState.levels[2] = table->ACPIState.levels[0];
998
999 return 0;
1000 }
1001
1002 int rv770_populate_initial_mvdd_value(struct radeon_device *rdev,
1003 RV770_SMC_VOLTAGE_VALUE *voltage)
1004 {
1005 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1006
1007 if ((pi->s0_vid_lower_smio_cntl & pi->mvdd_mask_low) ==
1008 (pi->mvdd_low_smio[MVDD_LOW_INDEX] & pi->mvdd_mask_low) ) {
1009 voltage->index = MVDD_LOW_INDEX;
1010 voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
1011 } else {
1012 voltage->index = MVDD_HIGH_INDEX;
1013 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1014 }
1015
1016 return 0;
1017 }
1018
1019 static int rv770_populate_smc_initial_state(struct radeon_device *rdev,
1020 struct radeon_ps *radeon_state,
1021 RV770_SMC_STATETABLE *table)
1022 {
1023 struct rv7xx_ps *initial_state = rv770_get_ps(radeon_state);
1024 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1025 u32 a_t;
1026
1027 table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL =
1028 cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl);
1029 table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 =
1030 cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl_2);
1031 table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL =
1032 cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl);
1033 table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 =
1034 cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl_2);
1035 table->initialState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL =
1036 cpu_to_be32(pi->clk_regs.rv770.mclk_pwrmgt_cntl);
1037 table->initialState.levels[0].mclk.mclk770.vDLL_CNTL =
1038 cpu_to_be32(pi->clk_regs.rv770.dll_cntl);
1039
1040 table->initialState.levels[0].mclk.mclk770.vMPLL_SS =
1041 cpu_to_be32(pi->clk_regs.rv770.mpll_ss1);
1042 table->initialState.levels[0].mclk.mclk770.vMPLL_SS2 =
1043 cpu_to_be32(pi->clk_regs.rv770.mpll_ss2);
1044
1045 table->initialState.levels[0].mclk.mclk770.mclk_value =
1046 cpu_to_be32(initial_state->low.mclk);
1047
1048 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1049 cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl);
1050 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1051 cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_2);
1052 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1053 cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_3);
1054 table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
1055 cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum);
1056 table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 =
1057 cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum_2);
1058
1059 table->initialState.levels[0].sclk.sclk_value =
1060 cpu_to_be32(initial_state->low.sclk);
1061
1062 table->initialState.levels[0].arbValue = MC_CG_ARB_FREQ_F0;
1063
1064 table->initialState.levels[0].seqValue =
1065 rv770_get_seq_value(rdev, &initial_state->low);
1066
1067 rv770_populate_vddc_value(rdev,
1068 initial_state->low.vddc,
1069 &table->initialState.levels[0].vddc);
1070 rv770_populate_initial_mvdd_value(rdev,
1071 &table->initialState.levels[0].mvdd);
1072
1073 a_t = CG_R(0xffff) | CG_L(0);
1074 table->initialState.levels[0].aT = cpu_to_be32(a_t);
1075
1076 table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
1077
1078 if (pi->boot_in_gen2)
1079 table->initialState.levels[0].gen2PCIE = 1;
1080 else
1081 table->initialState.levels[0].gen2PCIE = 0;
1082 if (initial_state->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
1083 table->initialState.levels[0].gen2XSP = 1;
1084 else
1085 table->initialState.levels[0].gen2XSP = 0;
1086
1087 if (rdev->family == CHIP_RV740) {
1088 if (pi->mem_gddr5) {
1089 if (initial_state->low.mclk <= pi->mclk_strobe_mode_threshold)
1090 table->initialState.levels[0].strobeMode =
1091 rv740_get_mclk_frequency_ratio(initial_state->low.mclk) | 0x10;
1092 else
1093 table->initialState.levels[0].strobeMode = 0;
1094
1095 if (initial_state->low.mclk >= pi->mclk_edc_enable_threshold)
1096 table->initialState.levels[0].mcFlags = SMC_MC_EDC_RD_FLAG | SMC_MC_EDC_WR_FLAG;
1097 else
1098 table->initialState.levels[0].mcFlags = 0;
1099 }
1100 }
1101
1102 table->initialState.levels[1] = table->initialState.levels[0];
1103 table->initialState.levels[2] = table->initialState.levels[0];
1104
1105 table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
1106
1107 return 0;
1108 }
1109
1110 static int rv770_populate_smc_vddc_table(struct radeon_device *rdev,
1111 RV770_SMC_STATETABLE *table)
1112 {
1113 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1114 int i;
1115
1116 for (i = 0; i < pi->valid_vddc_entries; i++) {
1117 table->highSMIO[pi->vddc_table[i].vddc_index] =
1118 pi->vddc_table[i].high_smio;
1119 table->lowSMIO[pi->vddc_table[i].vddc_index] =
1120 cpu_to_be32(pi->vddc_table[i].low_smio);
1121 }
1122
1123 table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_VDDC] = 0;
1124 table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_VDDC] =
1125 cpu_to_be32(pi->vddc_mask_low);
1126
1127 for (i = 0;
1128 ((i < pi->valid_vddc_entries) &&
1129 (pi->max_vddc_in_table >
1130 pi->vddc_table[i].vddc));
1131 i++);
1132
1133 table->maxVDDCIndexInPPTable =
1134 pi->vddc_table[i].vddc_index;
1135
1136 return 0;
1137 }
1138
1139 static int rv770_populate_smc_mvdd_table(struct radeon_device *rdev,
1140 RV770_SMC_STATETABLE *table)
1141 {
1142 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1143
1144 if (pi->mvdd_control) {
1145 table->lowSMIO[MVDD_HIGH_INDEX] |=
1146 cpu_to_be32(pi->mvdd_low_smio[MVDD_HIGH_INDEX]);
1147 table->lowSMIO[MVDD_LOW_INDEX] |=
1148 cpu_to_be32(pi->mvdd_low_smio[MVDD_LOW_INDEX]);
1149
1150 table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_MVDD] = 0;
1151 table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_MVDD] =
1152 cpu_to_be32(pi->mvdd_mask_low);
1153 }
1154
1155 return 0;
1156 }
1157
1158 static int rv770_init_smc_table(struct radeon_device *rdev)
1159 {
1160 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1161 struct radeon_ps *radeon_boot_state = rdev->pm.dpm.boot_ps;
1162 struct rv7xx_ps *boot_state = rv770_get_ps(radeon_boot_state);
1163 RV770_SMC_STATETABLE *table = &pi->smc_statetable;
1164 int ret;
1165
1166 memset(table, 0, sizeof(RV770_SMC_STATETABLE));
1167
1168 pi->boot_sclk = boot_state->low.sclk;
1169
1170 rv770_populate_smc_vddc_table(rdev, table);
1171 rv770_populate_smc_mvdd_table(rdev, table);
1172
1173 switch (rdev->pm.int_thermal_type) {
1174 case THERMAL_TYPE_RV770:
1175 case THERMAL_TYPE_ADT7473_WITH_INTERNAL:
1176 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
1177 break;
1178 case THERMAL_TYPE_NONE:
1179 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
1180 break;
1181 case THERMAL_TYPE_EXTERNAL_GPIO:
1182 default:
1183 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
1184 break;
1185 }
1186
1187 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC) {
1188 table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
1189
1190 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_DONT_WAIT_FOR_VBLANK_ON_ALERT)
1191 table->extraFlags |= PPSMC_EXTRAFLAGS_AC2DC_DONT_WAIT_FOR_VBLANK;
1192
1193 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_GOTO_BOOT_ON_ALERT)
1194 table->extraFlags |= PPSMC_EXTRAFLAGS_AC2DC_ACTION_GOTOINITIALSTATE;
1195 }
1196
1197 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
1198 table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
1199
1200 if (pi->mem_gddr5)
1201 table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
1202
1203 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1204 ret = rv730_populate_smc_initial_state(rdev, radeon_boot_state, table);
1205 else
1206 ret = rv770_populate_smc_initial_state(rdev, radeon_boot_state, table);
1207 if (ret)
1208 return ret;
1209
1210 if (rdev->family == CHIP_RV740)
1211 ret = rv740_populate_smc_acpi_state(rdev, table);
1212 else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1213 ret = rv730_populate_smc_acpi_state(rdev, table);
1214 else
1215 ret = rv770_populate_smc_acpi_state(rdev, table);
1216 if (ret)
1217 return ret;
1218
1219 table->driverState = table->initialState;
1220
1221 return rv770_copy_bytes_to_smc(rdev,
1222 pi->state_table_start,
1223 (const u8 *)table,
1224 sizeof(RV770_SMC_STATETABLE),
1225 pi->sram_end);
1226 }
1227
1228 static int rv770_construct_vddc_table(struct radeon_device *rdev)
1229 {
1230 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1231 u16 min, max, step;
1232 u32 steps = 0;
1233 u8 vddc_index = 0;
1234 u32 i;
1235
1236 radeon_atom_get_min_voltage(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &min);
1237 radeon_atom_get_max_voltage(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &max);
1238 radeon_atom_get_voltage_step(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &step);
1239
1240 steps = (max - min) / step + 1;
1241
1242 if (steps > MAX_NO_VREG_STEPS)
1243 return -EINVAL;
1244
1245 for (i = 0; i < steps; i++) {
1246 u32 gpio_pins, gpio_mask;
1247
1248 pi->vddc_table[i].vddc = (u16)(min + i * step);
1249 radeon_atom_get_voltage_gpio_settings(rdev,
1250 pi->vddc_table[i].vddc,
1251 SET_VOLTAGE_TYPE_ASIC_VDDC,
1252 &gpio_pins, &gpio_mask);
1253 pi->vddc_table[i].low_smio = gpio_pins & gpio_mask;
1254 pi->vddc_table[i].high_smio = 0;
1255 pi->vddc_mask_low = gpio_mask;
1256 if (i > 0) {
1257 if ((pi->vddc_table[i].low_smio !=
1258 pi->vddc_table[i - 1].low_smio ) ||
1259 (pi->vddc_table[i].high_smio !=
1260 pi->vddc_table[i - 1].high_smio))
1261 vddc_index++;
1262 }
1263 pi->vddc_table[i].vddc_index = vddc_index;
1264 }
1265
1266 pi->valid_vddc_entries = (u8)steps;
1267
1268 return 0;
1269 }
1270
1271 static u32 rv770_get_mclk_split_point(struct atom_memory_info *memory_info)
1272 {
1273 if (memory_info->mem_type == MEM_TYPE_GDDR3)
1274 return 30000;
1275
1276 return 0;
1277 }
1278
1279 static int rv770_get_mvdd_pin_configuration(struct radeon_device *rdev)
1280 {
1281 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1282 u32 gpio_pins, gpio_mask;
1283
1284 radeon_atom_get_voltage_gpio_settings(rdev,
1285 MVDD_HIGH_VALUE, SET_VOLTAGE_TYPE_ASIC_MVDDC,
1286 &gpio_pins, &gpio_mask);
1287 pi->mvdd_mask_low = gpio_mask;
1288 pi->mvdd_low_smio[MVDD_HIGH_INDEX] =
1289 gpio_pins & gpio_mask;
1290
1291 radeon_atom_get_voltage_gpio_settings(rdev,
1292 MVDD_LOW_VALUE, SET_VOLTAGE_TYPE_ASIC_MVDDC,
1293 &gpio_pins, &gpio_mask);
1294 pi->mvdd_low_smio[MVDD_LOW_INDEX] =
1295 gpio_pins & gpio_mask;
1296
1297 return 0;
1298 }
1299
1300 u8 rv770_get_memory_module_index(struct radeon_device *rdev)
1301 {
1302 return (u8) ((RREG32(BIOS_SCRATCH_4) >> 16) & 0xff);
1303 }
1304
1305 static int rv770_get_mvdd_configuration(struct radeon_device *rdev)
1306 {
1307 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1308 u8 memory_module_index;
1309 struct atom_memory_info memory_info;
1310
1311 memory_module_index = rv770_get_memory_module_index(rdev);
1312
1313 if (radeon_atom_get_memory_info(rdev, memory_module_index, &memory_info)) {
1314 pi->mvdd_control = false;
1315 return 0;
1316 }
1317
1318 pi->mvdd_split_frequency =
1319 rv770_get_mclk_split_point(&memory_info);
1320
1321 if (pi->mvdd_split_frequency == 0) {
1322 pi->mvdd_control = false;
1323 return 0;
1324 }
1325
1326 return rv770_get_mvdd_pin_configuration(rdev);
1327 }
1328
1329 void rv770_enable_voltage_control(struct radeon_device *rdev,
1330 bool enable)
1331 {
1332 if (enable)
1333 WREG32_P(GENERAL_PWRMGT, VOLT_PWRMGT_EN, ~VOLT_PWRMGT_EN);
1334 else
1335 WREG32_P(GENERAL_PWRMGT, 0, ~VOLT_PWRMGT_EN);
1336 }
1337
1338 static void rv770_program_display_gap(struct radeon_device *rdev)
1339 {
1340 u32 tmp = RREG32(CG_DISPLAY_GAP_CNTL);
1341
1342 tmp &= ~(DISP1_GAP_MCHG_MASK | DISP2_GAP_MCHG_MASK);
1343 if (RREG32(AVIVO_D1CRTC_CONTROL) & AVIVO_CRTC_EN) {
1344 tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_VBLANK);
1345 tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1346 } else if (RREG32(AVIVO_D2CRTC_CONTROL) & AVIVO_CRTC_EN) {
1347 tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1348 tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_VBLANK);
1349 } else {
1350 tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1351 tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1352 }
1353 WREG32(CG_DISPLAY_GAP_CNTL, tmp);
1354 }
1355
1356 static void rv770_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
1357 bool enable)
1358 {
1359 rv770_enable_bif_dynamic_pcie_gen2(rdev, enable);
1360
1361 if (enable)
1362 WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE);
1363 else
1364 WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE);
1365 }
1366
1367 static void r7xx_program_memory_timing_parameters(struct radeon_device *rdev)
1368 {
1369 struct radeon_ps *radeon_new_state = rdev->pm.dpm.requested_ps;
1370
1371 if ((rdev->family == CHIP_RV730) ||
1372 (rdev->family == CHIP_RV710) ||
1373 (rdev->family == CHIP_RV740))
1374 rv730_program_memory_timing_parameters(rdev, radeon_new_state);
1375 else
1376 rv770_program_memory_timing_parameters(rdev, radeon_new_state);
1377 }
1378
1379 static int rv770_upload_sw_state(struct radeon_device *rdev)
1380 {
1381 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1382 struct radeon_ps *radeon_new_state = rdev->pm.dpm.requested_ps;
1383 u16 address = pi->state_table_start +
1384 offsetof(RV770_SMC_STATETABLE, driverState);
1385 RV770_SMC_SWSTATE state = { 0 };
1386 int ret;
1387
1388 ret = rv770_convert_power_state_to_smc(rdev, radeon_new_state, &state);
1389 if (ret)
1390 return ret;
1391
1392 return rv770_copy_bytes_to_smc(rdev, address, (const u8 *)&state,
1393 sizeof(RV770_SMC_SWSTATE),
1394 pi->sram_end);
1395 }
1396
1397 int rv770_halt_smc(struct radeon_device *rdev)
1398 {
1399 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_Halt) != PPSMC_Result_OK)
1400 return -EINVAL;
1401
1402 if (rv770_wait_for_smc_inactive(rdev) != PPSMC_Result_OK)
1403 return -EINVAL;
1404
1405 return 0;
1406 }
1407
1408 int rv770_resume_smc(struct radeon_device *rdev)
1409 {
1410 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_Resume) != PPSMC_Result_OK)
1411 return -EINVAL;
1412 return 0;
1413 }
1414
1415 int rv770_set_sw_state(struct radeon_device *rdev)
1416 {
1417 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_SwitchToSwState) != PPSMC_Result_OK)
1418 return -EINVAL;
1419 return 0;
1420 }
1421
1422 int rv770_set_boot_state(struct radeon_device *rdev)
1423 {
1424 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_SwitchToInitialState) != PPSMC_Result_OK)
1425 return -EINVAL;
1426 return 0;
1427 }
1428
1429 void rv770_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev)
1430 {
1431 struct rv7xx_ps *new_state = rv770_get_ps(rdev->pm.dpm.requested_ps);
1432 struct rv7xx_ps *current_state = rv770_get_ps(rdev->pm.dpm.current_ps);
1433
1434 if ((rdev->pm.dpm.requested_ps->vclk == rdev->pm.dpm.current_ps->vclk) &&
1435 (rdev->pm.dpm.requested_ps->dclk == rdev->pm.dpm.current_ps->dclk))
1436 return;
1437
1438 if (new_state->high.sclk >= current_state->high.sclk)
1439 return;
1440
1441 radeon_set_uvd_clocks(rdev, rdev->pm.dpm.requested_ps->vclk,
1442 rdev->pm.dpm.requested_ps->dclk);
1443 }
1444
1445 void rv770_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev)
1446 {
1447 struct rv7xx_ps *new_state = rv770_get_ps(rdev->pm.dpm.requested_ps);
1448 struct rv7xx_ps *current_state = rv770_get_ps(rdev->pm.dpm.current_ps);
1449
1450 if ((rdev->pm.dpm.requested_ps->vclk == rdev->pm.dpm.current_ps->vclk) &&
1451 (rdev->pm.dpm.requested_ps->dclk == rdev->pm.dpm.current_ps->dclk))
1452 return;
1453
1454 if (new_state->high.sclk < current_state->high.sclk)
1455 return;
1456
1457 radeon_set_uvd_clocks(rdev, rdev->pm.dpm.requested_ps->vclk,
1458 rdev->pm.dpm.requested_ps->dclk);
1459 }
1460
1461 int rv770_restrict_performance_levels_before_switch(struct radeon_device *rdev)
1462 {
1463 if (rv770_send_msg_to_smc(rdev, (PPSMC_Msg)(PPSMC_MSG_NoForcedLevel)) != PPSMC_Result_OK)
1464 return -EINVAL;
1465
1466 if (rv770_send_msg_to_smc(rdev, (PPSMC_Msg)(PPSMC_MSG_TwoLevelsDisabled)) != PPSMC_Result_OK)
1467 return -EINVAL;
1468
1469 return 0;
1470 }
1471
1472 int rv770_unrestrict_performance_levels_after_switch(struct radeon_device *rdev)
1473 {
1474 if (rv770_send_msg_to_smc(rdev, (PPSMC_Msg)(PPSMC_MSG_NoForcedLevel)) != PPSMC_Result_OK)
1475 return -EINVAL;
1476
1477 if (rv770_send_msg_to_smc(rdev, (PPSMC_Msg)(PPSMC_MSG_ZeroLevelsDisabled)) != PPSMC_Result_OK)
1478 return -EINVAL;
1479
1480 return 0;
1481 }
1482
1483 void r7xx_start_smc(struct radeon_device *rdev)
1484 {
1485 rv770_start_smc(rdev);
1486 rv770_start_smc_clock(rdev);
1487 }
1488
1489
1490 void r7xx_stop_smc(struct radeon_device *rdev)
1491 {
1492 rv770_reset_smc(rdev);
1493 rv770_stop_smc_clock(rdev);
1494 }
1495
1496 static void rv770_read_clock_registers(struct radeon_device *rdev)
1497 {
1498 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1499
1500 pi->clk_regs.rv770.cg_spll_func_cntl =
1501 RREG32(CG_SPLL_FUNC_CNTL);
1502 pi->clk_regs.rv770.cg_spll_func_cntl_2 =
1503 RREG32(CG_SPLL_FUNC_CNTL_2);
1504 pi->clk_regs.rv770.cg_spll_func_cntl_3 =
1505 RREG32(CG_SPLL_FUNC_CNTL_3);
1506 pi->clk_regs.rv770.cg_spll_spread_spectrum =
1507 RREG32(CG_SPLL_SPREAD_SPECTRUM);
1508 pi->clk_regs.rv770.cg_spll_spread_spectrum_2 =
1509 RREG32(CG_SPLL_SPREAD_SPECTRUM_2);
1510 pi->clk_regs.rv770.mpll_ad_func_cntl =
1511 RREG32(MPLL_AD_FUNC_CNTL);
1512 pi->clk_regs.rv770.mpll_ad_func_cntl_2 =
1513 RREG32(MPLL_AD_FUNC_CNTL_2);
1514 pi->clk_regs.rv770.mpll_dq_func_cntl =
1515 RREG32(MPLL_DQ_FUNC_CNTL);
1516 pi->clk_regs.rv770.mpll_dq_func_cntl_2 =
1517 RREG32(MPLL_DQ_FUNC_CNTL_2);
1518 pi->clk_regs.rv770.mclk_pwrmgt_cntl =
1519 RREG32(MCLK_PWRMGT_CNTL);
1520 pi->clk_regs.rv770.dll_cntl = RREG32(DLL_CNTL);
1521 }
1522
1523 static void r7xx_read_clock_registers(struct radeon_device *rdev)
1524 {
1525 if (rdev->family == CHIP_RV740)
1526 rv740_read_clock_registers(rdev);
1527 else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1528 rv730_read_clock_registers(rdev);
1529 else
1530 rv770_read_clock_registers(rdev);
1531 }
1532
1533 void rv770_read_voltage_smio_registers(struct radeon_device *rdev)
1534 {
1535 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1536
1537 pi->s0_vid_lower_smio_cntl =
1538 RREG32(S0_VID_LOWER_SMIO_CNTL);
1539 }
1540
1541 void rv770_reset_smio_status(struct radeon_device *rdev)
1542 {
1543 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1544 u32 sw_smio_index, vid_smio_cntl;
1545
1546 sw_smio_index =
1547 (RREG32(GENERAL_PWRMGT) & SW_SMIO_INDEX_MASK) >> SW_SMIO_INDEX_SHIFT;
1548 switch (sw_smio_index) {
1549 case 3:
1550 vid_smio_cntl = RREG32(S3_VID_LOWER_SMIO_CNTL);
1551 break;
1552 case 2:
1553 vid_smio_cntl = RREG32(S2_VID_LOWER_SMIO_CNTL);
1554 break;
1555 case 1:
1556 vid_smio_cntl = RREG32(S1_VID_LOWER_SMIO_CNTL);
1557 break;
1558 case 0:
1559 return;
1560 default:
1561 vid_smio_cntl = pi->s0_vid_lower_smio_cntl;
1562 break;
1563 }
1564
1565 WREG32(S0_VID_LOWER_SMIO_CNTL, vid_smio_cntl);
1566 WREG32_P(GENERAL_PWRMGT, SW_SMIO_INDEX(0), ~SW_SMIO_INDEX_MASK);
1567 }
1568
1569 void rv770_get_memory_type(struct radeon_device *rdev)
1570 {
1571 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1572 u32 tmp;
1573
1574 tmp = RREG32(MC_SEQ_MISC0);
1575
1576 if (((tmp & MC_SEQ_MISC0_GDDR5_MASK) >> MC_SEQ_MISC0_GDDR5_SHIFT) ==
1577 MC_SEQ_MISC0_GDDR5_VALUE)
1578 pi->mem_gddr5 = true;
1579 else
1580 pi->mem_gddr5 = false;
1581
1582 }
1583
1584 void rv770_get_pcie_gen2_status(struct radeon_device *rdev)
1585 {
1586 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1587 u32 tmp;
1588
1589 tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
1590
1591 if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
1592 (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
1593 pi->pcie_gen2 = true;
1594 else
1595 pi->pcie_gen2 = false;
1596
1597 if (pi->pcie_gen2) {
1598 if (tmp & LC_CURRENT_DATA_RATE)
1599 pi->boot_in_gen2 = true;
1600 else
1601 pi->boot_in_gen2 = false;
1602 } else
1603 pi->boot_in_gen2 = false;
1604 }
1605
1606 #if 0
1607 static int rv770_enter_ulp_state(struct radeon_device *rdev)
1608 {
1609 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1610
1611 if (pi->gfx_clock_gating) {
1612 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
1613 WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
1614 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
1615 RREG32(GB_TILING_CONFIG);
1616 }
1617
1618 WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower),
1619 ~HOST_SMC_MSG_MASK);
1620
1621 udelay(7000);
1622
1623 return 0;
1624 }
1625
1626 static int rv770_exit_ulp_state(struct radeon_device *rdev)
1627 {
1628 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1629 int i;
1630
1631 WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_ResumeFromMinimumPower),
1632 ~HOST_SMC_MSG_MASK);
1633
1634 udelay(7000);
1635
1636 for (i = 0; i < rdev->usec_timeout; i++) {
1637 if (((RREG32(SMC_MSG) & HOST_SMC_RESP_MASK) >> HOST_SMC_RESP_SHIFT) == 1)
1638 break;
1639 udelay(1000);
1640 }
1641
1642 if (pi->gfx_clock_gating)
1643 WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN);
1644
1645 return 0;
1646 }
1647 #endif
1648
1649 static void rv770_get_mclk_odt_threshold(struct radeon_device *rdev)
1650 {
1651 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1652 u8 memory_module_index;
1653 struct atom_memory_info memory_info;
1654
1655 pi->mclk_odt_threshold = 0;
1656
1657 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) {
1658 memory_module_index = rv770_get_memory_module_index(rdev);
1659
1660 if (radeon_atom_get_memory_info(rdev, memory_module_index, &memory_info))
1661 return;
1662
1663 if (memory_info.mem_type == MEM_TYPE_DDR2 ||
1664 memory_info.mem_type == MEM_TYPE_DDR3)
1665 pi->mclk_odt_threshold = 30000;
1666 }
1667 }
1668
1669 void rv770_get_max_vddc(struct radeon_device *rdev)
1670 {
1671 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1672 u16 vddc;
1673
1674 if (radeon_atom_get_max_vddc(rdev, 0, 0, &vddc))
1675 pi->max_vddc = 0;
1676 else
1677 pi->max_vddc = vddc;
1678 }
1679
1680 void rv770_program_response_times(struct radeon_device *rdev)
1681 {
1682 u32 voltage_response_time, backbias_response_time;
1683 u32 acpi_delay_time, vbi_time_out;
1684 u32 vddc_dly, bb_dly, acpi_dly, vbi_dly;
1685 u32 reference_clock;
1686
1687 voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time;
1688 backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time;
1689
1690 if (voltage_response_time == 0)
1691 voltage_response_time = 1000;
1692
1693 if (backbias_response_time == 0)
1694 backbias_response_time = 1000;
1695
1696 acpi_delay_time = 15000;
1697 vbi_time_out = 100000;
1698
1699 reference_clock = radeon_get_xclk(rdev);
1700
1701 vddc_dly = (voltage_response_time * reference_clock) / 1600;
1702 bb_dly = (backbias_response_time * reference_clock) / 1600;
1703 acpi_dly = (acpi_delay_time * reference_clock) / 1600;
1704 vbi_dly = (vbi_time_out * reference_clock) / 1600;
1705
1706 rv770_write_smc_soft_register(rdev,
1707 RV770_SMC_SOFT_REGISTER_delay_vreg, vddc_dly);
1708 rv770_write_smc_soft_register(rdev,
1709 RV770_SMC_SOFT_REGISTER_delay_bbias, bb_dly);
1710 rv770_write_smc_soft_register(rdev,
1711 RV770_SMC_SOFT_REGISTER_delay_acpi, acpi_dly);
1712 rv770_write_smc_soft_register(rdev,
1713 RV770_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly);
1714 #if 0
1715 /* XXX look up hw revision */
1716 if (WEKIVA_A21)
1717 rv770_write_smc_soft_register(rdev,
1718 RV770_SMC_SOFT_REGISTER_baby_step_timer,
1719 0x10);
1720 #endif
1721 }
1722
1723 static void rv770_program_dcodt_before_state_switch(struct radeon_device *rdev)
1724 {
1725 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1726 struct radeon_ps *radeon_new_state = rdev->pm.dpm.requested_ps;
1727 struct radeon_ps *radeon_current_state = rdev->pm.dpm.current_ps;
1728 struct rv7xx_ps *new_state = rv770_get_ps(radeon_new_state);
1729 struct rv7xx_ps *current_state = rv770_get_ps(radeon_current_state);
1730 bool current_use_dc = false;
1731 bool new_use_dc = false;
1732
1733 if (pi->mclk_odt_threshold == 0)
1734 return;
1735
1736 if (current_state->high.mclk <= pi->mclk_odt_threshold)
1737 current_use_dc = true;
1738
1739 if (new_state->high.mclk <= pi->mclk_odt_threshold)
1740 new_use_dc = true;
1741
1742 if (current_use_dc == new_use_dc)
1743 return;
1744
1745 if (!current_use_dc && new_use_dc)
1746 return;
1747
1748 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1749 rv730_program_dcodt(rdev, new_use_dc);
1750 }
1751
1752 static void rv770_program_dcodt_after_state_switch(struct radeon_device *rdev)
1753 {
1754 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1755 struct radeon_ps *radeon_new_state = rdev->pm.dpm.requested_ps;
1756 struct radeon_ps *radeon_current_state = rdev->pm.dpm.current_ps;
1757 struct rv7xx_ps *new_state = rv770_get_ps(radeon_new_state);
1758 struct rv7xx_ps *current_state = rv770_get_ps(radeon_current_state);
1759 bool current_use_dc = false;
1760 bool new_use_dc = false;
1761
1762 if (pi->mclk_odt_threshold == 0)
1763 return;
1764
1765 if (current_state->high.mclk <= pi->mclk_odt_threshold)
1766 current_use_dc = true;
1767
1768 if (new_state->high.mclk <= pi->mclk_odt_threshold)
1769 new_use_dc = true;
1770
1771 if (current_use_dc == new_use_dc)
1772 return;
1773
1774 if (current_use_dc && !new_use_dc)
1775 return;
1776
1777 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1778 rv730_program_dcodt(rdev, new_use_dc);
1779 }
1780
1781 static void rv770_retrieve_odt_values(struct radeon_device *rdev)
1782 {
1783 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1784
1785 if (pi->mclk_odt_threshold == 0)
1786 return;
1787
1788 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1789 rv730_get_odt_values(rdev);
1790 }
1791
1792 static void rv770_set_dpm_event_sources(struct radeon_device *rdev, u32 sources)
1793 {
1794 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1795 bool want_thermal_protection;
1796 enum radeon_dpm_event_src dpm_event_src;
1797
1798 switch (sources) {
1799 case 0:
1800 default:
1801 want_thermal_protection = false;
1802 break;
1803 case (1 << RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL):
1804 want_thermal_protection = true;
1805 dpm_event_src = RADEON_DPM_EVENT_SRC_DIGITAL;
1806 break;
1807
1808 case (1 << RADEON_DPM_AUTO_THROTTLE_SRC_EXTERNAL):
1809 want_thermal_protection = true;
1810 dpm_event_src = RADEON_DPM_EVENT_SRC_EXTERNAL;
1811 break;
1812
1813 case ((1 << RADEON_DPM_AUTO_THROTTLE_SRC_EXTERNAL) |
1814 (1 << RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL)):
1815 want_thermal_protection = true;
1816 dpm_event_src = RADEON_DPM_EVENT_SRC_DIGIAL_OR_EXTERNAL;
1817 break;
1818 }
1819
1820 if (want_thermal_protection) {
1821 WREG32_P(CG_THERMAL_CTRL, DPM_EVENT_SRC(dpm_event_src), ~DPM_EVENT_SRC_MASK);
1822 if (pi->thermal_protection)
1823 WREG32_P(GENERAL_PWRMGT, 0, ~THERMAL_PROTECTION_DIS);
1824 } else {
1825 WREG32_P(GENERAL_PWRMGT, THERMAL_PROTECTION_DIS, ~THERMAL_PROTECTION_DIS);
1826 }
1827 }
1828
1829 void rv770_enable_auto_throttle_source(struct radeon_device *rdev,
1830 enum radeon_dpm_auto_throttle_src source,
1831 bool enable)
1832 {
1833 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1834
1835 if (enable) {
1836 if (!(pi->active_auto_throttle_sources & (1 << source))) {
1837 pi->active_auto_throttle_sources |= 1 << source;
1838 rv770_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources);
1839 }
1840 } else {
1841 if (pi->active_auto_throttle_sources & (1 << source)) {
1842 pi->active_auto_throttle_sources &= ~(1 << source);
1843 rv770_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources);
1844 }
1845 }
1846 }
1847
1848 int rv770_set_thermal_temperature_range(struct radeon_device *rdev,
1849 int min_temp, int max_temp)
1850 {
1851 int low_temp = 0 * 1000;
1852 int high_temp = 255 * 1000;
1853
1854 if (low_temp < min_temp)
1855 low_temp = min_temp;
1856 if (high_temp > max_temp)
1857 high_temp = max_temp;
1858 if (high_temp < low_temp) {
1859 DRM_ERROR("invalid thermal range: %d - %d\n", low_temp, high_temp);
1860 return -EINVAL;
1861 }
1862
1863 WREG32_P(CG_THERMAL_INT, DIG_THERM_INTH(high_temp / 1000), ~DIG_THERM_INTH_MASK);
1864 WREG32_P(CG_THERMAL_INT, DIG_THERM_INTL(low_temp / 1000), ~DIG_THERM_INTL_MASK);
1865 WREG32_P(CG_THERMAL_CTRL, DIG_THERM_DPM(high_temp / 1000), ~DIG_THERM_DPM_MASK);
1866
1867 rdev->pm.dpm.thermal.min_temp = low_temp;
1868 rdev->pm.dpm.thermal.max_temp = high_temp;
1869
1870 return 0;
1871 }
1872
1873 int rv770_dpm_enable(struct radeon_device *rdev)
1874 {
1875 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1876
1877 if (pi->gfx_clock_gating)
1878 rv770_restore_cgcg(rdev);
1879
1880 if (rv770_dpm_enabled(rdev))
1881 return -EINVAL;
1882
1883 if (pi->voltage_control) {
1884 rv770_enable_voltage_control(rdev, true);
1885 rv770_construct_vddc_table(rdev);
1886 }
1887
1888 if (pi->dcodt)
1889 rv770_retrieve_odt_values(rdev);
1890
1891 if (pi->mvdd_control)
1892 rv770_get_mvdd_configuration(rdev);
1893
1894 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS)
1895 rv770_enable_backbias(rdev, true);
1896
1897 rv770_enable_spread_spectrum(rdev, true);
1898
1899 if (pi->thermal_protection)
1900 rv770_enable_thermal_protection(rdev, true);
1901
1902 rv770_program_mpll_timing_parameters(rdev);
1903 rv770_setup_bsp(rdev);
1904 rv770_program_git(rdev);
1905 rv770_program_tp(rdev);
1906 rv770_program_tpp(rdev);
1907 rv770_program_sstp(rdev);
1908 rv770_program_engine_speed_parameters(rdev);
1909 rv770_enable_display_gap(rdev);
1910 rv770_program_vc(rdev);
1911
1912 if (pi->dynamic_pcie_gen2)
1913 rv770_enable_dynamic_pcie_gen2(rdev, true);
1914
1915 if (rv770_upload_firmware(rdev))
1916 return -EINVAL;
1917 /* get ucode version ? */
1918 if (rv770_init_smc_table(rdev))
1919 return -EINVAL;
1920 rv770_program_response_times(rdev);
1921 r7xx_start_smc(rdev);
1922
1923 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1924 rv730_start_dpm(rdev);
1925 else
1926 rv770_start_dpm(rdev);
1927
1928 if (pi->gfx_clock_gating)
1929 rv770_gfx_clock_gating_enable(rdev, true);
1930
1931 if (pi->mg_clock_gating)
1932 rv770_mg_clock_gating_enable(rdev, true);
1933
1934 if (rdev->irq.installed &&
1935 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
1936 PPSMC_Result result;
1937
1938 rv770_set_thermal_temperature_range(rdev, R600_TEMP_RANGE_MIN, R600_TEMP_RANGE_MAX);
1939 rdev->irq.dpm_thermal = true;
1940 radeon_irq_set(rdev);
1941 result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableThermalInterrupt);
1942
1943 if (result != PPSMC_Result_OK)
1944 DRM_DEBUG_KMS("Could not enable thermal interrupts.\n");
1945 }
1946
1947 rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
1948
1949 return 0;
1950 }
1951
1952 void rv770_dpm_disable(struct radeon_device *rdev)
1953 {
1954 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1955
1956 if (!rv770_dpm_enabled(rdev))
1957 return;
1958
1959 rv770_clear_vc(rdev);
1960
1961 if (pi->thermal_protection)
1962 rv770_enable_thermal_protection(rdev, false);
1963
1964 rv770_enable_spread_spectrum(rdev, false);
1965
1966 if (pi->dynamic_pcie_gen2)
1967 rv770_enable_dynamic_pcie_gen2(rdev, false);
1968
1969 if (rdev->irq.installed &&
1970 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
1971 rdev->irq.dpm_thermal = false;
1972 radeon_irq_set(rdev);
1973 }
1974
1975 if (pi->gfx_clock_gating)
1976 rv770_gfx_clock_gating_enable(rdev, false);
1977
1978 if (pi->mg_clock_gating)
1979 rv770_mg_clock_gating_enable(rdev, false);
1980
1981 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1982 rv730_stop_dpm(rdev);
1983 else
1984 rv770_stop_dpm(rdev);
1985
1986 r7xx_stop_smc(rdev);
1987 rv770_reset_smio_status(rdev);
1988 }
1989
1990 int rv770_dpm_set_power_state(struct radeon_device *rdev)
1991 {
1992 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1993
1994 rv770_restrict_performance_levels_before_switch(rdev);
1995 rv770_set_uvd_clock_before_set_eng_clock(rdev);
1996 rv770_halt_smc(rdev);
1997 rv770_upload_sw_state(rdev);
1998 r7xx_program_memory_timing_parameters(rdev);
1999 if (pi->dcodt)
2000 rv770_program_dcodt_before_state_switch(rdev);
2001 rv770_resume_smc(rdev);
2002 rv770_set_sw_state(rdev);
2003 if (pi->dcodt)
2004 rv770_program_dcodt_after_state_switch(rdev);
2005 rv770_set_uvd_clock_after_set_eng_clock(rdev);
2006 rv770_unrestrict_performance_levels_after_switch(rdev);
2007
2008 return 0;
2009 }
2010
2011 void rv770_dpm_reset_asic(struct radeon_device *rdev)
2012 {
2013 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2014
2015 rv770_restrict_performance_levels_before_switch(rdev);
2016 if (pi->dcodt)
2017 rv770_program_dcodt_before_state_switch(rdev);
2018 rv770_set_boot_state(rdev);
2019 if (pi->dcodt)
2020 rv770_program_dcodt_after_state_switch(rdev);
2021 }
2022
2023 void rv770_dpm_setup_asic(struct radeon_device *rdev)
2024 {
2025 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2026
2027 r7xx_read_clock_registers(rdev);
2028 rv770_read_voltage_smio_registers(rdev);
2029 rv770_get_memory_type(rdev);
2030 if (pi->dcodt)
2031 rv770_get_mclk_odt_threshold(rdev);
2032 rv770_get_pcie_gen2_status(rdev);
2033
2034 rv770_enable_acpi_pm(rdev);
2035
2036 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_ASPM_L0s)
2037 rv770_enable_l0s(rdev);
2038 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_ASPM_L1)
2039 rv770_enable_l1(rdev);
2040 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_TURNOFFPLL_ASPML1)
2041 rv770_enable_pll_sleep_in_l1(rdev);
2042 }
2043
2044 void rv770_dpm_display_configuration_changed(struct radeon_device *rdev)
2045 {
2046 rv770_program_display_gap(rdev);
2047 }
2048
2049 union power_info {
2050 struct _ATOM_POWERPLAY_INFO info;
2051 struct _ATOM_POWERPLAY_INFO_V2 info_2;
2052 struct _ATOM_POWERPLAY_INFO_V3 info_3;
2053 struct _ATOM_PPLIB_POWERPLAYTABLE pplib;
2054 struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2;
2055 struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3;
2056 };
2057
2058 union pplib_clock_info {
2059 struct _ATOM_PPLIB_R600_CLOCK_INFO r600;
2060 struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780;
2061 struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen;
2062 struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo;
2063 };
2064
2065 union pplib_power_state {
2066 struct _ATOM_PPLIB_STATE v1;
2067 struct _ATOM_PPLIB_STATE_V2 v2;
2068 };
2069
2070 static void rv7xx_parse_pplib_non_clock_info(struct radeon_device *rdev,
2071 struct radeon_ps *rps,
2072 struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info,
2073 u8 table_rev)
2074 {
2075 rps->caps = le32_to_cpu(non_clock_info->ulCapsAndSettings);
2076 rps->class = le16_to_cpu(non_clock_info->usClassification);
2077 rps->class2 = le16_to_cpu(non_clock_info->usClassification2);
2078
2079 if (ATOM_PPLIB_NONCLOCKINFO_VER1 < table_rev) {
2080 rps->vclk = le32_to_cpu(non_clock_info->ulVCLK);
2081 rps->dclk = le32_to_cpu(non_clock_info->ulDCLK);
2082 } else if (r600_is_uvd_state(rps->class, rps->class2)) {
2083 rps->vclk = RV770_DEFAULT_VCLK_FREQ;
2084 rps->dclk = RV770_DEFAULT_DCLK_FREQ;
2085 } else {
2086 rps->vclk = 0;
2087 rps->dclk = 0;
2088 }
2089
2090 if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT)
2091 rdev->pm.dpm.boot_ps = rps;
2092 if (rps->class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE)
2093 rdev->pm.dpm.uvd_ps = rps;
2094 }
2095
2096 static void rv7xx_parse_pplib_clock_info(struct radeon_device *rdev,
2097 struct radeon_ps *rps, int index,
2098 union pplib_clock_info *clock_info)
2099 {
2100 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2101 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2102 struct rv7xx_ps *ps = rv770_get_ps(rps);
2103 u32 sclk, mclk;
2104 u16 vddc;
2105 struct rv7xx_pl *pl;
2106
2107 switch (index) {
2108 case 0:
2109 pl = &ps->low;
2110 break;
2111 case 1:
2112 pl = &ps->medium;
2113 break;
2114 case 2:
2115 default:
2116 pl = &ps->high;
2117 break;
2118 }
2119
2120 if (rdev->family >= CHIP_CEDAR) {
2121 sclk = le16_to_cpu(clock_info->evergreen.usEngineClockLow);
2122 sclk |= clock_info->evergreen.ucEngineClockHigh << 16;
2123 mclk = le16_to_cpu(clock_info->evergreen.usMemoryClockLow);
2124 mclk |= clock_info->evergreen.ucMemoryClockHigh << 16;
2125
2126 pl->vddc = le16_to_cpu(clock_info->evergreen.usVDDC);
2127 pl->vddci = le16_to_cpu(clock_info->evergreen.usVDDCI);
2128 pl->flags = le32_to_cpu(clock_info->evergreen.ulFlags);
2129 } else {
2130 sclk = le16_to_cpu(clock_info->r600.usEngineClockLow);
2131 sclk |= clock_info->r600.ucEngineClockHigh << 16;
2132 mclk = le16_to_cpu(clock_info->r600.usMemoryClockLow);
2133 mclk |= clock_info->r600.ucMemoryClockHigh << 16;
2134
2135 pl->vddc = le16_to_cpu(clock_info->r600.usVDDC);
2136 pl->flags = le32_to_cpu(clock_info->r600.ulFlags);
2137 }
2138
2139 pl->mclk = mclk;
2140 pl->sclk = sclk;
2141
2142 /* patch up vddc if necessary */
2143 if (pl->vddc == 0xff01) {
2144 if (radeon_atom_get_max_vddc(rdev, 0, 0, &vddc) == 0)
2145 pl->vddc = vddc;
2146 }
2147
2148 if (rps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) {
2149 pi->acpi_vddc = pl->vddc;
2150 if (rdev->family >= CHIP_CEDAR)
2151 eg_pi->acpi_vddci = pl->vddci;
2152 if (ps->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
2153 pi->acpi_pcie_gen2 = true;
2154 else
2155 pi->acpi_pcie_gen2 = false;
2156 }
2157
2158 if (rps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) {
2159 if (rdev->family >= CHIP_BARTS) {
2160 eg_pi->ulv.supported = true;
2161 eg_pi->ulv.pl = pl;
2162 }
2163 }
2164
2165 if (pi->min_vddc_in_table > pl->vddc)
2166 pi->min_vddc_in_table = pl->vddc;
2167
2168 if (pi->max_vddc_in_table < pl->vddc)
2169 pi->max_vddc_in_table = pl->vddc;
2170
2171 /* patch up boot state */
2172 if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) {
2173 u16 vddc, vddci;
2174 radeon_atombios_get_default_voltages(rdev, &vddc, &vddci);
2175 pl->mclk = rdev->clock.default_mclk;
2176 pl->sclk = rdev->clock.default_sclk;
2177 pl->vddc = vddc;
2178 pl->vddci = vddci;
2179 }
2180 }
2181
2182 int rv7xx_parse_power_table(struct radeon_device *rdev)
2183 {
2184 struct radeon_mode_info *mode_info = &rdev->mode_info;
2185 struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info;
2186 union pplib_power_state *power_state;
2187 int i, j;
2188 union pplib_clock_info *clock_info;
2189 union power_info *power_info;
2190 int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
2191 u16 data_offset;
2192 u8 frev, crev;
2193 struct rv7xx_ps *ps;
2194
2195 if (!atom_parse_data_header(mode_info->atom_context, index, NULL,
2196 &frev, &crev, &data_offset))
2197 return -EINVAL;
2198 power_info = (union power_info *)(mode_info->atom_context->bios + data_offset);
2199
2200 rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) *
2201 power_info->pplib.ucNumStates, GFP_KERNEL);
2202 if (!rdev->pm.dpm.ps)
2203 return -ENOMEM;
2204 rdev->pm.dpm.platform_caps = le32_to_cpu(power_info->pplib.ulPlatformCaps);
2205 rdev->pm.dpm.backbias_response_time = le16_to_cpu(power_info->pplib.usBackbiasTime);
2206 rdev->pm.dpm.voltage_response_time = le16_to_cpu(power_info->pplib.usVoltageTime);
2207
2208 for (i = 0; i < power_info->pplib.ucNumStates; i++) {
2209 power_state = (union pplib_power_state *)
2210 (mode_info->atom_context->bios + data_offset +
2211 le16_to_cpu(power_info->pplib.usStateArrayOffset) +
2212 i * power_info->pplib.ucStateEntrySize);
2213 non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *)
2214 (mode_info->atom_context->bios + data_offset +
2215 le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset) +
2216 (power_state->v1.ucNonClockStateIndex *
2217 power_info->pplib.ucNonClockSize));
2218 if (power_info->pplib.ucStateEntrySize - 1) {
2219 ps = kzalloc(sizeof(struct rv7xx_ps), GFP_KERNEL);
2220 if (ps == NULL) {
2221 kfree(rdev->pm.dpm.ps);
2222 return -ENOMEM;
2223 }
2224 rdev->pm.dpm.ps[i].ps_priv = ps;
2225 rv7xx_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i],
2226 non_clock_info,
2227 power_info->pplib.ucNonClockSize);
2228 for (j = 0; j < (power_info->pplib.ucStateEntrySize - 1); j++) {
2229 clock_info = (union pplib_clock_info *)
2230 (mode_info->atom_context->bios + data_offset +
2231 le16_to_cpu(power_info->pplib.usClockInfoArrayOffset) +
2232 (power_state->v1.ucClockStateIndices[j] *
2233 power_info->pplib.ucClockInfoSize));
2234 rv7xx_parse_pplib_clock_info(rdev,
2235 &rdev->pm.dpm.ps[i], j,
2236 clock_info);
2237 }
2238 }
2239 }
2240 rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates;
2241 return 0;
2242 }
2243
2244 int rv770_dpm_init(struct radeon_device *rdev)
2245 {
2246 struct rv7xx_power_info *pi;
2247 int index = GetIndexIntoMasterTable(DATA, ASIC_InternalSS_Info);
2248 uint16_t data_offset, size;
2249 uint8_t frev, crev;
2250 struct atom_clock_dividers dividers;
2251 int ret;
2252
2253 pi = kzalloc(sizeof(struct rv7xx_power_info), GFP_KERNEL);
2254 if (pi == NULL)
2255 return -ENOMEM;
2256 rdev->pm.dpm.priv = pi;
2257
2258 rv770_get_max_vddc(rdev);
2259
2260 pi->acpi_vddc = 0;
2261 pi->min_vddc_in_table = 0;
2262 pi->max_vddc_in_table = 0;
2263
2264 ret = rv7xx_parse_power_table(rdev);
2265 if (ret)
2266 return ret;
2267
2268 if (rdev->pm.dpm.voltage_response_time == 0)
2269 rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
2270 if (rdev->pm.dpm.backbias_response_time == 0)
2271 rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
2272
2273 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
2274 0, false, &dividers);
2275 if (ret)
2276 pi->ref_div = dividers.ref_div + 1;
2277 else
2278 pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
2279
2280 pi->mclk_strobe_mode_threshold = 30000;
2281 pi->mclk_edc_enable_threshold = 30000;
2282
2283 pi->rlp = RV770_RLP_DFLT;
2284 pi->rmp = RV770_RMP_DFLT;
2285 pi->lhp = RV770_LHP_DFLT;
2286 pi->lmp = RV770_LMP_DFLT;
2287
2288 pi->voltage_control =
2289 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC);
2290
2291 pi->mvdd_control =
2292 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC);
2293
2294 if (atom_parse_data_header(rdev->mode_info.atom_context, index, &size,
2295 &frev, &crev, &data_offset)) {
2296 pi->sclk_ss = true;
2297 pi->mclk_ss = true;
2298 pi->dynamic_ss = true;
2299 } else {
2300 pi->sclk_ss = false;
2301 pi->mclk_ss = false;
2302 pi->dynamic_ss = false;
2303 }
2304
2305 pi->asi = RV770_ASI_DFLT;
2306 pi->pasi = RV770_HASI_DFLT;
2307 pi->vrc = RV770_VRC_DFLT;
2308
2309 pi->power_gating = false;
2310
2311 pi->gfx_clock_gating = true;
2312
2313 pi->mg_clock_gating = true;
2314 pi->mgcgtssm = true;
2315
2316 pi->dynamic_pcie_gen2 = true;
2317
2318 if (pi->gfx_clock_gating &&
2319 (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE))
2320 pi->thermal_protection = true;
2321 else
2322 pi->thermal_protection = false;
2323
2324 pi->display_gap = true;
2325
2326 if (rdev->flags & RADEON_IS_MOBILITY)
2327 pi->dcodt = true;
2328 else
2329 pi->dcodt = false;
2330
2331 pi->ulps = true;
2332
2333 pi->mclk_stutter_mode_threshold = 0;
2334
2335 pi->sram_end = SMC_RAM_END;
2336 pi->state_table_start = RV770_SMC_TABLE_ADDRESS;
2337 pi->soft_regs_start = RV770_SMC_SOFT_REGISTERS_START;
2338
2339 return 0;
2340 }
2341
2342 void rv770_dpm_print_power_state(struct radeon_device *rdev,
2343 struct radeon_ps *rps)
2344 {
2345 struct rv7xx_ps *ps = rv770_get_ps(rps);
2346 struct rv7xx_pl *pl;
2347
2348 r600_dpm_print_class_info(rps->class, rps->class2);
2349 r600_dpm_print_cap_info(rps->caps);
2350 printk("\tuvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
2351 if (rdev->family >= CHIP_CEDAR) {
2352 pl = &ps->low;
2353 printk("\t\tpower level 0 sclk: %u mclk: %u vddc: %u vddci: %u\n",
2354 pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2355 pl = &ps->medium;
2356 printk("\t\tpower level 1 sclk: %u mclk: %u vddc: %u vddci: %u\n",
2357 pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2358 pl = &ps->high;
2359 printk("\t\tpower level 2 sclk: %u mclk: %u vddc: %u vddci: %u\n",
2360 pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2361 } else {
2362 pl = &ps->low;
2363 printk("\t\tpower level 0 sclk: %u mclk: %u vddc: %u\n",
2364 pl->sclk, pl->mclk, pl->vddc);
2365 pl = &ps->medium;
2366 printk("\t\tpower level 1 sclk: %u mclk: %u vddc: %u\n",
2367 pl->sclk, pl->mclk, pl->vddc);
2368 pl = &ps->high;
2369 printk("\t\tpower level 2 sclk: %u mclk: %u vddc: %u\n",
2370 pl->sclk, pl->mclk, pl->vddc);
2371 }
2372 r600_dpm_print_ps_status(rdev, rps);
2373 }
2374
2375 void rv770_dpm_fini(struct radeon_device *rdev)
2376 {
2377 int i;
2378
2379 for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
2380 kfree(rdev->pm.dpm.ps[i].ps_priv);
2381 }
2382 kfree(rdev->pm.dpm.ps);
2383 kfree(rdev->pm.dpm.priv);
2384 }
2385
2386 u32 rv770_dpm_get_sclk(struct radeon_device *rdev, bool low)
2387 {
2388 struct rv7xx_ps *requested_state = rv770_get_ps(rdev->pm.dpm.requested_ps);
2389
2390 if (low)
2391 return requested_state->low.sclk;
2392 else
2393 return requested_state->high.sclk;
2394 }
2395
2396 u32 rv770_dpm_get_mclk(struct radeon_device *rdev, bool low)
2397 {
2398 struct rv7xx_ps *requested_state = rv770_get_ps(rdev->pm.dpm.requested_ps);
2399
2400 if (low)
2401 return requested_state->low.mclk;
2402 else
2403 return requested_state->high.mclk;
2404 }
This page took 0.087437 seconds and 5 git commands to generate.