drm/i915: export error state ref handling
[deliverable/linux.git] / drivers / gpu / drm / radeon / rv770_dpm.c
CommitLineData
66229b20
AD
1/*
2 * Copyright 2011 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Alex Deucher
23 */
24
25#include "drmP.h"
26#include "radeon.h"
27#include "rv770d.h"
28#include "r600_dpm.h"
29#include "rv770_dpm.h"
dc50ba7f 30#include "cypress_dpm.h"
66229b20
AD
31#include "atom.h"
32
33#define MC_CG_ARB_FREQ_F0 0x0a
34#define MC_CG_ARB_FREQ_F1 0x0b
35#define MC_CG_ARB_FREQ_F2 0x0c
36#define MC_CG_ARB_FREQ_F3 0x0d
37
38#define MC_CG_SEQ_DRAMCONF_S0 0x05
39#define MC_CG_SEQ_DRAMCONF_S1 0x06
40
41#define PCIE_BUS_CLK 10000
42#define TCLK (PCIE_BUS_CLK / 10)
43
44#define SMC_RAM_END 0xC000
45
46struct rv7xx_ps *rv770_get_ps(struct radeon_ps *rps)
47{
48 struct rv7xx_ps *ps = rps->ps_priv;
49
50 return ps;
51}
52
53struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev)
54{
55 struct rv7xx_power_info *pi = rdev->pm.dpm.priv;
56
57 return pi;
58}
59
dc50ba7f
AD
60struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev)
61{
62 struct evergreen_power_info *pi = rdev->pm.dpm.priv;
63
64 return pi;
65}
66
66229b20
AD
67static void rv770_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
68 bool enable)
69{
70 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
71 u32 tmp;
72
73 tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
74 if (enable) {
75 tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
76 tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
77 tmp |= LC_GEN2_EN_STRAP;
78 } else {
79 if (!pi->boot_in_gen2) {
80 tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
81 tmp &= ~LC_GEN2_EN_STRAP;
82 }
83 }
84 if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
85 (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
86 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
87
88}
89
90static void rv770_enable_l0s(struct radeon_device *rdev)
91{
92 u32 tmp;
93
94 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL) & ~LC_L0S_INACTIVITY_MASK;
95 tmp |= LC_L0S_INACTIVITY(3);
96 WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp);
97}
98
99static void rv770_enable_l1(struct radeon_device *rdev)
100{
101 u32 tmp;
102
103 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL);
104 tmp &= ~LC_L1_INACTIVITY_MASK;
105 tmp |= LC_L1_INACTIVITY(4);
106 tmp &= ~LC_PMI_TO_L1_DIS;
107 tmp &= ~LC_ASPM_TO_L1_DIS;
108 WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp);
109}
110
111static void rv770_enable_pll_sleep_in_l1(struct radeon_device *rdev)
112{
113 u32 tmp;
114
115 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL) & ~LC_L1_INACTIVITY_MASK;
116 tmp |= LC_L1_INACTIVITY(8);
117 WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp);
118
119 /* NOTE, this is a PCIE indirect reg, not PCIE PORT */
120 tmp = RREG32_PCIE(PCIE_P_CNTL);
121 tmp |= P_PLL_PWRDN_IN_L1L23;
122 tmp &= ~P_PLL_BUF_PDNB;
123 tmp &= ~P_PLL_PDNB;
124 tmp |= P_ALLOW_PRX_FRONTEND_SHUTOFF;
125 WREG32_PCIE(PCIE_P_CNTL, tmp);
126}
127
128static void rv770_gfx_clock_gating_enable(struct radeon_device *rdev,
129 bool enable)
130{
131 if (enable)
132 WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN);
133 else {
134 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
135 WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
136 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
137 RREG32(GB_TILING_CONFIG);
138 }
139}
140
141static void rv770_mg_clock_gating_enable(struct radeon_device *rdev,
142 bool enable)
143{
144 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
145
146 if (enable) {
147 u32 mgcg_cgtt_local0;
148
149 if (rdev->family == CHIP_RV770)
150 mgcg_cgtt_local0 = RV770_MGCGTTLOCAL0_DFLT;
151 else
152 mgcg_cgtt_local0 = RV7XX_MGCGTTLOCAL0_DFLT;
153
154 WREG32(CG_CGTT_LOCAL_0, mgcg_cgtt_local0);
155 WREG32(CG_CGTT_LOCAL_1, (RV770_MGCGTTLOCAL1_DFLT & 0xFFFFCFFF));
156
157 if (pi->mgcgtssm)
158 WREG32(CGTS_SM_CTRL_REG, RV770_MGCGCGTSSMCTRL_DFLT);
159 } else {
160 WREG32(CG_CGTT_LOCAL_0, 0xFFFFFFFF);
161 WREG32(CG_CGTT_LOCAL_1, 0xFFFFCFFF);
162 }
163}
164
165void rv770_restore_cgcg(struct radeon_device *rdev)
166{
167 bool dpm_en = false, cg_en = false;
168
169 if (RREG32(GENERAL_PWRMGT) & GLOBAL_PWRMGT_EN)
170 dpm_en = true;
171 if (RREG32(SCLK_PWRMGT_CNTL) & DYN_GFX_CLK_OFF_EN)
172 cg_en = true;
173
174 if (dpm_en && !cg_en)
175 WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN);
176}
177
178static void rv770_start_dpm(struct radeon_device *rdev)
179{
180 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~SCLK_PWRMGT_OFF);
181
182 WREG32_P(MCLK_PWRMGT_CNTL, 0, ~MPLL_PWRMGT_OFF);
183
184 WREG32_P(GENERAL_PWRMGT, GLOBAL_PWRMGT_EN, ~GLOBAL_PWRMGT_EN);
185}
186
187void rv770_stop_dpm(struct radeon_device *rdev)
188{
189 PPSMC_Result result;
190
191 result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_TwoLevelsDisabled);
192
193 if (result != PPSMC_Result_OK)
194 DRM_ERROR("Could not force DPM to low.\n");
195
196 WREG32_P(GENERAL_PWRMGT, 0, ~GLOBAL_PWRMGT_EN);
197
198 WREG32_P(SCLK_PWRMGT_CNTL, SCLK_PWRMGT_OFF, ~SCLK_PWRMGT_OFF);
199
200 WREG32_P(MCLK_PWRMGT_CNTL, MPLL_PWRMGT_OFF, ~MPLL_PWRMGT_OFF);
201}
202
203bool rv770_dpm_enabled(struct radeon_device *rdev)
204{
205 if (RREG32(GENERAL_PWRMGT) & GLOBAL_PWRMGT_EN)
206 return true;
207 else
208 return false;
209}
210
211void rv770_enable_thermal_protection(struct radeon_device *rdev,
212 bool enable)
213{
214 if (enable)
215 WREG32_P(GENERAL_PWRMGT, 0, ~THERMAL_PROTECTION_DIS);
216 else
217 WREG32_P(GENERAL_PWRMGT, THERMAL_PROTECTION_DIS, ~THERMAL_PROTECTION_DIS);
218}
219
220void rv770_enable_acpi_pm(struct radeon_device *rdev)
221{
222 WREG32_P(GENERAL_PWRMGT, STATIC_PM_EN, ~STATIC_PM_EN);
223}
224
225u8 rv770_get_seq_value(struct radeon_device *rdev,
226 struct rv7xx_pl *pl)
227{
228 return (pl->flags & ATOM_PPLIB_R600_FLAGS_LOWPOWER) ?
229 MC_CG_SEQ_DRAMCONF_S0 : MC_CG_SEQ_DRAMCONF_S1;
230}
231
232int rv770_read_smc_soft_register(struct radeon_device *rdev,
233 u16 reg_offset, u32 *value)
234{
235 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
236
237 return rv770_read_smc_sram_dword(rdev,
238 pi->soft_regs_start + reg_offset,
239 value, pi->sram_end);
240}
241
242int rv770_write_smc_soft_register(struct radeon_device *rdev,
243 u16 reg_offset, u32 value)
244{
245 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
246
247 return rv770_write_smc_sram_dword(rdev,
248 pi->soft_regs_start + reg_offset,
249 value, pi->sram_end);
250}
251
252int rv770_populate_smc_t(struct radeon_device *rdev,
253 struct radeon_ps *radeon_state,
254 RV770_SMC_SWSTATE *smc_state)
255{
256 struct rv7xx_ps *state = rv770_get_ps(radeon_state);
257 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
258 int i;
259 int a_n;
260 int a_d;
261 u8 l[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE];
262 u8 r[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE];
263 u32 a_t;
264
265 l[0] = 0;
266 r[2] = 100;
267
f85392bc
AD
268 a_n = (int)state->medium.sclk * pi->lmp +
269 (int)state->low.sclk * (R600_AH_DFLT - pi->rlp);
270 a_d = (int)state->low.sclk * (100 - (int)pi->rlp) +
271 (int)state->medium.sclk * pi->lmp;
66229b20 272
f85392bc
AD
273 l[1] = (u8)(pi->lmp - (int)pi->lmp * a_n / a_d);
274 r[0] = (u8)(pi->rlp + (100 - (int)pi->rlp) * a_n / a_d);
66229b20 275
f85392bc
AD
276 a_n = (int)state->high.sclk * pi->lhp + (int)state->medium.sclk *
277 (R600_AH_DFLT - pi->rmp);
278 a_d = (int)state->medium.sclk * (100 - (int)pi->rmp) +
279 (int)state->high.sclk * pi->lhp;
66229b20 280
f85392bc
AD
281 l[2] = (u8)(pi->lhp - (int)pi->lhp * a_n / a_d);
282 r[1] = (u8)(pi->rmp + (100 - (int)pi->rmp) * a_n / a_d);
66229b20
AD
283
284 for (i = 0; i < (RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1); i++) {
285 a_t = CG_R(r[i] * pi->bsp / 200) | CG_L(l[i] * pi->bsp / 200);
286 smc_state->levels[i].aT = cpu_to_be32(a_t);
287 }
288
289 a_t = CG_R(r[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1] * pi->pbsp / 200) |
290 CG_L(l[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1] * pi->pbsp / 200);
291
292 smc_state->levels[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1].aT =
293 cpu_to_be32(a_t);
294
295 return 0;
296}
297
298int rv770_populate_smc_sp(struct radeon_device *rdev,
299 struct radeon_ps *radeon_state,
300 RV770_SMC_SWSTATE *smc_state)
301{
302 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
303 int i;
304
305 for (i = 0; i < (RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1); i++)
306 smc_state->levels[i].bSP = cpu_to_be32(pi->dsp);
307
308 smc_state->levels[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1].bSP =
309 cpu_to_be32(pi->psp);
310
311 return 0;
312}
313
314static void rv770_calculate_fractional_mpll_feedback_divider(u32 memory_clock,
315 u32 reference_clock,
316 bool gddr5,
317 struct atom_clock_dividers *dividers,
318 u32 *clkf,
319 u32 *clkfrac)
320{
321 u32 post_divider, reference_divider, feedback_divider8;
322 u32 fyclk;
323
324 if (gddr5)
325 fyclk = (memory_clock * 8) / 2;
326 else
327 fyclk = (memory_clock * 4) / 2;
328
329 post_divider = dividers->post_div;
330 reference_divider = dividers->ref_div;
331
332 feedback_divider8 =
333 (8 * fyclk * reference_divider * post_divider) / reference_clock;
334
335 *clkf = feedback_divider8 / 8;
336 *clkfrac = feedback_divider8 % 8;
337}
338
339static int rv770_encode_yclk_post_div(u32 postdiv, u32 *encoded_postdiv)
340{
341 int ret = 0;
342
343 switch (postdiv) {
344 case 1:
345 *encoded_postdiv = 0;
346 break;
347 case 2:
348 *encoded_postdiv = 1;
349 break;
350 case 4:
351 *encoded_postdiv = 2;
352 break;
353 case 8:
354 *encoded_postdiv = 3;
355 break;
356 case 16:
357 *encoded_postdiv = 4;
358 break;
359 default:
360 ret = -EINVAL;
361 break;
362 }
363
364 return ret;
365}
366
367u32 rv770_map_clkf_to_ibias(struct radeon_device *rdev, u32 clkf)
368{
369 if (clkf <= 0x10)
370 return 0x4B;
371 if (clkf <= 0x19)
372 return 0x5B;
373 if (clkf <= 0x21)
374 return 0x2B;
375 if (clkf <= 0x27)
376 return 0x6C;
377 if (clkf <= 0x31)
378 return 0x9D;
379 return 0xC6;
380}
381
382static int rv770_populate_mclk_value(struct radeon_device *rdev,
383 u32 engine_clock, u32 memory_clock,
384 RV7XX_SMC_MCLK_VALUE *mclk)
385{
386 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
387 u8 encoded_reference_dividers[] = { 0, 16, 17, 20, 21 };
388 u32 mpll_ad_func_cntl =
389 pi->clk_regs.rv770.mpll_ad_func_cntl;
390 u32 mpll_ad_func_cntl_2 =
391 pi->clk_regs.rv770.mpll_ad_func_cntl_2;
392 u32 mpll_dq_func_cntl =
393 pi->clk_regs.rv770.mpll_dq_func_cntl;
394 u32 mpll_dq_func_cntl_2 =
395 pi->clk_regs.rv770.mpll_dq_func_cntl_2;
396 u32 mclk_pwrmgt_cntl =
397 pi->clk_regs.rv770.mclk_pwrmgt_cntl;
398 u32 dll_cntl = pi->clk_regs.rv770.dll_cntl;
399 struct atom_clock_dividers dividers;
400 u32 reference_clock = rdev->clock.mpll.reference_freq;
401 u32 clkf, clkfrac;
402 u32 postdiv_yclk;
403 u32 ibias;
404 int ret;
405
406 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
407 memory_clock, false, &dividers);
408 if (ret)
409 return ret;
410
411 if ((dividers.ref_div < 1) || (dividers.ref_div > 5))
412 return -EINVAL;
413
414 rv770_calculate_fractional_mpll_feedback_divider(memory_clock, reference_clock,
415 pi->mem_gddr5,
416 &dividers, &clkf, &clkfrac);
417
418 ret = rv770_encode_yclk_post_div(dividers.post_div, &postdiv_yclk);
419 if (ret)
420 return ret;
421
422 ibias = rv770_map_clkf_to_ibias(rdev, clkf);
423
424 mpll_ad_func_cntl &= ~(CLKR_MASK |
425 YCLK_POST_DIV_MASK |
426 CLKF_MASK |
427 CLKFRAC_MASK |
428 IBIAS_MASK);
429 mpll_ad_func_cntl |= CLKR(encoded_reference_dividers[dividers.ref_div - 1]);
430 mpll_ad_func_cntl |= YCLK_POST_DIV(postdiv_yclk);
431 mpll_ad_func_cntl |= CLKF(clkf);
432 mpll_ad_func_cntl |= CLKFRAC(clkfrac);
433 mpll_ad_func_cntl |= IBIAS(ibias);
434
435 if (dividers.vco_mode)
436 mpll_ad_func_cntl_2 |= VCO_MODE;
437 else
438 mpll_ad_func_cntl_2 &= ~VCO_MODE;
439
440 if (pi->mem_gddr5) {
441 rv770_calculate_fractional_mpll_feedback_divider(memory_clock,
442 reference_clock,
443 pi->mem_gddr5,
444 &dividers, &clkf, &clkfrac);
445
446 ibias = rv770_map_clkf_to_ibias(rdev, clkf);
447
448 ret = rv770_encode_yclk_post_div(dividers.post_div, &postdiv_yclk);
449 if (ret)
450 return ret;
451
452 mpll_dq_func_cntl &= ~(CLKR_MASK |
453 YCLK_POST_DIV_MASK |
454 CLKF_MASK |
455 CLKFRAC_MASK |
456 IBIAS_MASK);
457 mpll_dq_func_cntl |= CLKR(encoded_reference_dividers[dividers.ref_div - 1]);
458 mpll_dq_func_cntl |= YCLK_POST_DIV(postdiv_yclk);
459 mpll_dq_func_cntl |= CLKF(clkf);
460 mpll_dq_func_cntl |= CLKFRAC(clkfrac);
461 mpll_dq_func_cntl |= IBIAS(ibias);
462
463 if (dividers.vco_mode)
464 mpll_dq_func_cntl_2 |= VCO_MODE;
465 else
466 mpll_dq_func_cntl_2 &= ~VCO_MODE;
467 }
468
469 mclk->mclk770.mclk_value = cpu_to_be32(memory_clock);
470 mclk->mclk770.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
471 mclk->mclk770.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
472 mclk->mclk770.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
473 mclk->mclk770.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
474 mclk->mclk770.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
475 mclk->mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl);
476
477 return 0;
478}
479
480static int rv770_populate_sclk_value(struct radeon_device *rdev,
481 u32 engine_clock,
482 RV770_SMC_SCLK_VALUE *sclk)
483{
484 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
485 struct atom_clock_dividers dividers;
486 u32 spll_func_cntl =
487 pi->clk_regs.rv770.cg_spll_func_cntl;
488 u32 spll_func_cntl_2 =
489 pi->clk_regs.rv770.cg_spll_func_cntl_2;
490 u32 spll_func_cntl_3 =
491 pi->clk_regs.rv770.cg_spll_func_cntl_3;
492 u32 cg_spll_spread_spectrum =
493 pi->clk_regs.rv770.cg_spll_spread_spectrum;
494 u32 cg_spll_spread_spectrum_2 =
495 pi->clk_regs.rv770.cg_spll_spread_spectrum_2;
496 u64 tmp;
497 u32 reference_clock = rdev->clock.spll.reference_freq;
498 u32 reference_divider, post_divider;
499 u32 fbdiv;
500 int ret;
501
502 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
503 engine_clock, false, &dividers);
504 if (ret)
505 return ret;
506
507 reference_divider = 1 + dividers.ref_div;
508
509 if (dividers.enable_post_div)
510 post_divider = (0x0f & (dividers.post_div >> 4)) + (0x0f & dividers.post_div) + 2;
511 else
512 post_divider = 1;
513
514 tmp = (u64) engine_clock * reference_divider * post_divider * 16384;
515 do_div(tmp, reference_clock);
516 fbdiv = (u32) tmp;
517
518 if (dividers.enable_post_div)
519 spll_func_cntl |= SPLL_DIVEN;
520 else
521 spll_func_cntl &= ~SPLL_DIVEN;
522 spll_func_cntl &= ~(SPLL_HILEN_MASK | SPLL_LOLEN_MASK | SPLL_REF_DIV_MASK);
523 spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div);
524 spll_func_cntl |= SPLL_HILEN((dividers.post_div >> 4) & 0xf);
525 spll_func_cntl |= SPLL_LOLEN(dividers.post_div & 0xf);
526
527 spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
528 spll_func_cntl_2 |= SCLK_MUX_SEL(2);
529
530 spll_func_cntl_3 &= ~SPLL_FB_DIV_MASK;
531 spll_func_cntl_3 |= SPLL_FB_DIV(fbdiv);
532 spll_func_cntl_3 |= SPLL_DITHEN;
533
534 if (pi->sclk_ss) {
535 struct radeon_atom_ss ss;
536 u32 vco_freq = engine_clock * post_divider;
537
538 if (radeon_atombios_get_asic_ss_info(rdev, &ss,
539 ASIC_INTERNAL_ENGINE_SS, vco_freq)) {
540 u32 clk_s = reference_clock * 5 / (reference_divider * ss.rate);
541 u32 clk_v = ss.percentage * fbdiv / (clk_s * 10000);
542
543 cg_spll_spread_spectrum &= ~CLKS_MASK;
544 cg_spll_spread_spectrum |= CLKS(clk_s);
545 cg_spll_spread_spectrum |= SSEN;
546
547 cg_spll_spread_spectrum_2 &= ~CLKV_MASK;
548 cg_spll_spread_spectrum_2 |= CLKV(clk_v);
549 }
550 }
551
552 sclk->sclk_value = cpu_to_be32(engine_clock);
553 sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
554 sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
555 sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
556 sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(cg_spll_spread_spectrum);
557 sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(cg_spll_spread_spectrum_2);
558
559 return 0;
560}
561
562int rv770_populate_vddc_value(struct radeon_device *rdev, u16 vddc,
563 RV770_SMC_VOLTAGE_VALUE *voltage)
564{
565 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
566 int i;
567
568 if (!pi->voltage_control) {
569 voltage->index = 0;
570 voltage->value = 0;
571 return 0;
572 }
573
574 for (i = 0; i < pi->valid_vddc_entries; i++) {
575 if (vddc <= pi->vddc_table[i].vddc) {
576 voltage->index = pi->vddc_table[i].vddc_index;
577 voltage->value = cpu_to_be16(vddc);
578 break;
579 }
580 }
581
582 if (i == pi->valid_vddc_entries)
583 return -EINVAL;
584
585 return 0;
586}
587
588int rv770_populate_mvdd_value(struct radeon_device *rdev, u32 mclk,
589 RV770_SMC_VOLTAGE_VALUE *voltage)
590{
591 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
592
593 if (!pi->mvdd_control) {
594 voltage->index = MVDD_HIGH_INDEX;
595 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
596 return 0;
597 }
598
599 if (mclk <= pi->mvdd_split_frequency) {
600 voltage->index = MVDD_LOW_INDEX;
601 voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
602 } else {
603 voltage->index = MVDD_HIGH_INDEX;
604 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
605 }
606
607 return 0;
608}
609
610static int rv770_convert_power_level_to_smc(struct radeon_device *rdev,
611 struct rv7xx_pl *pl,
612 RV770_SMC_HW_PERFORMANCE_LEVEL *level,
613 u8 watermark_level)
614{
615 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
616 int ret;
617
618 level->gen2PCIE = pi->pcie_gen2 ?
619 ((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0;
620 level->gen2XSP = (pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0;
621 level->backbias = (pl->flags & ATOM_PPLIB_R600_FLAGS_BACKBIASENABLE) ? 1 : 0;
622 level->displayWatermark = watermark_level;
623
624 if (rdev->family == CHIP_RV740)
625 ret = rv740_populate_sclk_value(rdev, pl->sclk,
626 &level->sclk);
627 else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
628 ret = rv730_populate_sclk_value(rdev, pl->sclk,
629 &level->sclk);
630 else
631 ret = rv770_populate_sclk_value(rdev, pl->sclk,
632 &level->sclk);
633 if (ret)
634 return ret;
635
636 if (rdev->family == CHIP_RV740) {
637 if (pi->mem_gddr5) {
638 if (pl->mclk <= pi->mclk_strobe_mode_threshold)
639 level->strobeMode =
640 rv740_get_mclk_frequency_ratio(pl->mclk) | 0x10;
641 else
642 level->strobeMode = 0;
643
644 if (pl->mclk > pi->mclk_edc_enable_threshold)
645 level->mcFlags = SMC_MC_EDC_RD_FLAG | SMC_MC_EDC_WR_FLAG;
646 else
647 level->mcFlags = 0;
648 }
649 ret = rv740_populate_mclk_value(rdev, pl->sclk,
650 pl->mclk, &level->mclk);
651 } else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
652 ret = rv730_populate_mclk_value(rdev, pl->sclk,
653 pl->mclk, &level->mclk);
654 else
655 ret = rv770_populate_mclk_value(rdev, pl->sclk,
656 pl->mclk, &level->mclk);
657 if (ret)
658 return ret;
659
660 ret = rv770_populate_vddc_value(rdev, pl->vddc,
661 &level->vddc);
662 if (ret)
663 return ret;
664
665 ret = rv770_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
666
667 return ret;
668}
669
670static int rv770_convert_power_state_to_smc(struct radeon_device *rdev,
671 struct radeon_ps *radeon_state,
672 RV770_SMC_SWSTATE *smc_state)
673{
674 struct rv7xx_ps *state = rv770_get_ps(radeon_state);
675 int ret;
676
677 if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC))
678 smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
679
680 ret = rv770_convert_power_level_to_smc(rdev,
681 &state->low,
682 &smc_state->levels[0],
683 PPSMC_DISPLAY_WATERMARK_LOW);
684 if (ret)
685 return ret;
686
687 ret = rv770_convert_power_level_to_smc(rdev,
688 &state->medium,
689 &smc_state->levels[1],
690 PPSMC_DISPLAY_WATERMARK_LOW);
691 if (ret)
692 return ret;
693
694 ret = rv770_convert_power_level_to_smc(rdev,
695 &state->high,
696 &smc_state->levels[2],
697 PPSMC_DISPLAY_WATERMARK_HIGH);
698 if (ret)
699 return ret;
700
701 smc_state->levels[0].arbValue = MC_CG_ARB_FREQ_F1;
702 smc_state->levels[1].arbValue = MC_CG_ARB_FREQ_F2;
703 smc_state->levels[2].arbValue = MC_CG_ARB_FREQ_F3;
704
705 smc_state->levels[0].seqValue = rv770_get_seq_value(rdev,
706 &state->low);
707 smc_state->levels[1].seqValue = rv770_get_seq_value(rdev,
708 &state->medium);
709 smc_state->levels[2].seqValue = rv770_get_seq_value(rdev,
710 &state->high);
711
712 rv770_populate_smc_sp(rdev, radeon_state, smc_state);
713
714 return rv770_populate_smc_t(rdev, radeon_state, smc_state);
715
716}
717
718u32 rv770_calculate_memory_refresh_rate(struct radeon_device *rdev,
719 u32 engine_clock)
720{
721 u32 dram_rows;
722 u32 dram_refresh_rate;
723 u32 mc_arb_rfsh_rate;
724 u32 tmp;
725
726 tmp = (RREG32(MC_ARB_RAMCFG) & NOOFROWS_MASK) >> NOOFROWS_SHIFT;
727 dram_rows = 1 << (tmp + 10);
728 tmp = RREG32(MC_SEQ_MISC0) & 3;
729 dram_refresh_rate = 1 << (tmp + 3);
730 mc_arb_rfsh_rate = ((engine_clock * 10) * dram_refresh_rate / dram_rows - 32) / 64;
731
732 return mc_arb_rfsh_rate;
733}
734
735static void rv770_program_memory_timing_parameters(struct radeon_device *rdev,
736 struct radeon_ps *radeon_state)
737{
738 struct rv7xx_ps *state = rv770_get_ps(radeon_state);
739 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
740 u32 sqm_ratio;
741 u32 arb_refresh_rate;
742 u32 high_clock;
743
744 if (state->high.sclk < (state->low.sclk * 0xFF / 0x40))
745 high_clock = state->high.sclk;
746 else
747 high_clock = (state->low.sclk * 0xFF / 0x40);
748
749 radeon_atom_set_engine_dram_timings(rdev, high_clock,
750 state->high.mclk);
751
752 sqm_ratio =
753 STATE0(64 * high_clock / pi->boot_sclk) |
754 STATE1(64 * high_clock / state->low.sclk) |
755 STATE2(64 * high_clock / state->medium.sclk) |
756 STATE3(64 * high_clock / state->high.sclk);
757 WREG32(MC_ARB_SQM_RATIO, sqm_ratio);
758
759 arb_refresh_rate =
760 POWERMODE0(rv770_calculate_memory_refresh_rate(rdev, pi->boot_sclk)) |
761 POWERMODE1(rv770_calculate_memory_refresh_rate(rdev, state->low.sclk)) |
762 POWERMODE2(rv770_calculate_memory_refresh_rate(rdev, state->medium.sclk)) |
763 POWERMODE3(rv770_calculate_memory_refresh_rate(rdev, state->high.sclk));
764 WREG32(MC_ARB_RFSH_RATE, arb_refresh_rate);
765}
766
767void rv770_enable_backbias(struct radeon_device *rdev,
768 bool enable)
769{
770 if (enable)
771 WREG32_P(GENERAL_PWRMGT, BACKBIAS_PAD_EN, ~BACKBIAS_PAD_EN);
772 else
773 WREG32_P(GENERAL_PWRMGT, 0, ~(BACKBIAS_VALUE | BACKBIAS_PAD_EN));
774}
775
776static void rv770_enable_spread_spectrum(struct radeon_device *rdev,
777 bool enable)
778{
779 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
780
781 if (enable) {
782 if (pi->sclk_ss)
783 WREG32_P(GENERAL_PWRMGT, DYN_SPREAD_SPECTRUM_EN, ~DYN_SPREAD_SPECTRUM_EN);
784
785 if (pi->mclk_ss) {
786 if (rdev->family == CHIP_RV740)
787 rv740_enable_mclk_spread_spectrum(rdev, true);
788 }
789 } else {
790 WREG32_P(CG_SPLL_SPREAD_SPECTRUM, 0, ~SSEN);
791
792 WREG32_P(GENERAL_PWRMGT, 0, ~DYN_SPREAD_SPECTRUM_EN);
793
794 WREG32_P(CG_MPLL_SPREAD_SPECTRUM, 0, ~SSEN);
795
796 if (rdev->family == CHIP_RV740)
797 rv740_enable_mclk_spread_spectrum(rdev, false);
798 }
799}
800
801static void rv770_program_mpll_timing_parameters(struct radeon_device *rdev)
802{
803 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
804
805 if ((rdev->family == CHIP_RV770) && !pi->mem_gddr5) {
806 WREG32(MPLL_TIME,
807 (MPLL_LOCK_TIME(R600_MPLLLOCKTIME_DFLT * pi->ref_div) |
808 MPLL_RESET_TIME(R600_MPLLRESETTIME_DFLT)));
809 }
810}
811
812void rv770_setup_bsp(struct radeon_device *rdev)
813{
814 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
815 u32 xclk = radeon_get_xclk(rdev);
816
817 r600_calculate_u_and_p(pi->asi,
818 xclk,
819 16,
820 &pi->bsp,
821 &pi->bsu);
822
823 r600_calculate_u_and_p(pi->pasi,
824 xclk,
825 16,
826 &pi->pbsp,
827 &pi->pbsu);
828
829 pi->dsp = BSP(pi->bsp) | BSU(pi->bsu);
830 pi->psp = BSP(pi->pbsp) | BSU(pi->pbsu);
831
832 WREG32(CG_BSP, pi->dsp);
833
834}
835
836void rv770_program_git(struct radeon_device *rdev)
837{
838 WREG32_P(CG_GIT, CG_GICST(R600_GICST_DFLT), ~CG_GICST_MASK);
839}
840
841void rv770_program_tp(struct radeon_device *rdev)
842{
843 int i;
844 enum r600_td td = R600_TD_DFLT;
845
846 for (i = 0; i < R600_PM_NUMBER_OF_TC; i++)
847 WREG32(CG_FFCT_0 + (i * 4), (UTC_0(r600_utc[i]) | DTC_0(r600_dtc[i])));
848
849 if (td == R600_TD_AUTO)
850 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~FIR_FORCE_TREND_SEL);
851 else
852 WREG32_P(SCLK_PWRMGT_CNTL, FIR_FORCE_TREND_SEL, ~FIR_FORCE_TREND_SEL);
853 if (td == R600_TD_UP)
854 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~FIR_TREND_MODE);
855 if (td == R600_TD_DOWN)
856 WREG32_P(SCLK_PWRMGT_CNTL, FIR_TREND_MODE, ~FIR_TREND_MODE);
857}
858
859void rv770_program_tpp(struct radeon_device *rdev)
860{
861 WREG32(CG_TPC, R600_TPC_DFLT);
862}
863
864void rv770_program_sstp(struct radeon_device *rdev)
865{
866 WREG32(CG_SSP, (SSTU(R600_SSTU_DFLT) | SST(R600_SST_DFLT)));
867}
868
869void rv770_program_engine_speed_parameters(struct radeon_device *rdev)
870{
871 WREG32_P(SPLL_CNTL_MODE, SPLL_DIV_SYNC, ~SPLL_DIV_SYNC);
872}
873
874static void rv770_enable_display_gap(struct radeon_device *rdev)
875{
876 u32 tmp = RREG32(CG_DISPLAY_GAP_CNTL);
877
878 tmp &= ~(DISP1_GAP_MCHG_MASK | DISP2_GAP_MCHG_MASK);
879 tmp |= (DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE) |
880 DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE));
881 WREG32(CG_DISPLAY_GAP_CNTL, tmp);
882}
883
884void rv770_program_vc(struct radeon_device *rdev)
885{
886 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
887
888 WREG32(CG_FTV, pi->vrc);
889}
890
891void rv770_clear_vc(struct radeon_device *rdev)
892{
893 WREG32(CG_FTV, 0);
894}
895
896int rv770_upload_firmware(struct radeon_device *rdev)
897{
898 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
899 int ret;
900
901 rv770_reset_smc(rdev);
902 rv770_stop_smc_clock(rdev);
903
904 ret = rv770_load_smc_ucode(rdev, pi->sram_end);
905 if (ret)
906 return ret;
907
908 return 0;
909}
910
911static int rv770_populate_smc_acpi_state(struct radeon_device *rdev,
912 RV770_SMC_STATETABLE *table)
913{
914 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
915
916 u32 mpll_ad_func_cntl =
917 pi->clk_regs.rv770.mpll_ad_func_cntl;
918 u32 mpll_ad_func_cntl_2 =
919 pi->clk_regs.rv770.mpll_ad_func_cntl_2;
920 u32 mpll_dq_func_cntl =
921 pi->clk_regs.rv770.mpll_dq_func_cntl;
922 u32 mpll_dq_func_cntl_2 =
923 pi->clk_regs.rv770.mpll_dq_func_cntl_2;
924 u32 spll_func_cntl =
925 pi->clk_regs.rv770.cg_spll_func_cntl;
926 u32 spll_func_cntl_2 =
927 pi->clk_regs.rv770.cg_spll_func_cntl_2;
928 u32 spll_func_cntl_3 =
929 pi->clk_regs.rv770.cg_spll_func_cntl_3;
930 u32 mclk_pwrmgt_cntl;
931 u32 dll_cntl;
932
933 table->ACPIState = table->initialState;
934
935 table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
936
937 if (pi->acpi_vddc) {
938 rv770_populate_vddc_value(rdev, pi->acpi_vddc,
939 &table->ACPIState.levels[0].vddc);
940 if (pi->pcie_gen2) {
941 if (pi->acpi_pcie_gen2)
942 table->ACPIState.levels[0].gen2PCIE = 1;
943 else
944 table->ACPIState.levels[0].gen2PCIE = 0;
945 } else
946 table->ACPIState.levels[0].gen2PCIE = 0;
947 if (pi->acpi_pcie_gen2)
948 table->ACPIState.levels[0].gen2XSP = 1;
949 else
950 table->ACPIState.levels[0].gen2XSP = 0;
951 } else {
952 rv770_populate_vddc_value(rdev, pi->min_vddc_in_table,
953 &table->ACPIState.levels[0].vddc);
954 table->ACPIState.levels[0].gen2PCIE = 0;
955 }
956
957
958 mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
959
960 mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
961
962 mclk_pwrmgt_cntl = (MRDCKA0_RESET |
963 MRDCKA1_RESET |
964 MRDCKB0_RESET |
965 MRDCKB1_RESET |
966 MRDCKC0_RESET |
967 MRDCKC1_RESET |
968 MRDCKD0_RESET |
969 MRDCKD1_RESET);
970
971 dll_cntl = 0xff000000;
972
973 spll_func_cntl |= SPLL_RESET | SPLL_SLEEP | SPLL_BYPASS_EN;
974
975 spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
976 spll_func_cntl_2 |= SCLK_MUX_SEL(4);
977
978 table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
979 table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
980 table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
981 table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
982
983 table->ACPIState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
984 table->ACPIState.levels[0].mclk.mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl);
985
986 table->ACPIState.levels[0].mclk.mclk770.mclk_value = 0;
987
988 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
989 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
990 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
991
992 table->ACPIState.levels[0].sclk.sclk_value = 0;
993
994 rv770_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
995
996 table->ACPIState.levels[1] = table->ACPIState.levels[0];
997 table->ACPIState.levels[2] = table->ACPIState.levels[0];
998
999 return 0;
1000}
1001
1002int rv770_populate_initial_mvdd_value(struct radeon_device *rdev,
1003 RV770_SMC_VOLTAGE_VALUE *voltage)
1004{
1005 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1006
1007 if ((pi->s0_vid_lower_smio_cntl & pi->mvdd_mask_low) ==
1008 (pi->mvdd_low_smio[MVDD_LOW_INDEX] & pi->mvdd_mask_low) ) {
1009 voltage->index = MVDD_LOW_INDEX;
1010 voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
1011 } else {
1012 voltage->index = MVDD_HIGH_INDEX;
1013 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1014 }
1015
1016 return 0;
1017}
1018
1019static int rv770_populate_smc_initial_state(struct radeon_device *rdev,
1020 struct radeon_ps *radeon_state,
1021 RV770_SMC_STATETABLE *table)
1022{
1023 struct rv7xx_ps *initial_state = rv770_get_ps(radeon_state);
1024 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1025 u32 a_t;
1026
1027 table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL =
1028 cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl);
1029 table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 =
1030 cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl_2);
1031 table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL =
1032 cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl);
1033 table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 =
1034 cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl_2);
1035 table->initialState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL =
1036 cpu_to_be32(pi->clk_regs.rv770.mclk_pwrmgt_cntl);
1037 table->initialState.levels[0].mclk.mclk770.vDLL_CNTL =
1038 cpu_to_be32(pi->clk_regs.rv770.dll_cntl);
1039
1040 table->initialState.levels[0].mclk.mclk770.vMPLL_SS =
1041 cpu_to_be32(pi->clk_regs.rv770.mpll_ss1);
1042 table->initialState.levels[0].mclk.mclk770.vMPLL_SS2 =
1043 cpu_to_be32(pi->clk_regs.rv770.mpll_ss2);
1044
1045 table->initialState.levels[0].mclk.mclk770.mclk_value =
1046 cpu_to_be32(initial_state->low.mclk);
1047
1048 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1049 cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl);
1050 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1051 cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_2);
1052 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1053 cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_3);
1054 table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
1055 cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum);
1056 table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 =
1057 cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum_2);
1058
1059 table->initialState.levels[0].sclk.sclk_value =
1060 cpu_to_be32(initial_state->low.sclk);
1061
1062 table->initialState.levels[0].arbValue = MC_CG_ARB_FREQ_F0;
1063
1064 table->initialState.levels[0].seqValue =
1065 rv770_get_seq_value(rdev, &initial_state->low);
1066
1067 rv770_populate_vddc_value(rdev,
1068 initial_state->low.vddc,
1069 &table->initialState.levels[0].vddc);
1070 rv770_populate_initial_mvdd_value(rdev,
1071 &table->initialState.levels[0].mvdd);
1072
1073 a_t = CG_R(0xffff) | CG_L(0);
1074 table->initialState.levels[0].aT = cpu_to_be32(a_t);
1075
1076 table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
1077
1078 if (pi->boot_in_gen2)
1079 table->initialState.levels[0].gen2PCIE = 1;
1080 else
1081 table->initialState.levels[0].gen2PCIE = 0;
1082 if (initial_state->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
1083 table->initialState.levels[0].gen2XSP = 1;
1084 else
1085 table->initialState.levels[0].gen2XSP = 0;
1086
1087 if (rdev->family == CHIP_RV740) {
1088 if (pi->mem_gddr5) {
1089 if (initial_state->low.mclk <= pi->mclk_strobe_mode_threshold)
1090 table->initialState.levels[0].strobeMode =
1091 rv740_get_mclk_frequency_ratio(initial_state->low.mclk) | 0x10;
1092 else
1093 table->initialState.levels[0].strobeMode = 0;
1094
1095 if (initial_state->low.mclk >= pi->mclk_edc_enable_threshold)
1096 table->initialState.levels[0].mcFlags = SMC_MC_EDC_RD_FLAG | SMC_MC_EDC_WR_FLAG;
1097 else
1098 table->initialState.levels[0].mcFlags = 0;
1099 }
1100 }
1101
1102 table->initialState.levels[1] = table->initialState.levels[0];
1103 table->initialState.levels[2] = table->initialState.levels[0];
1104
1105 table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
1106
1107 return 0;
1108}
1109
1110static int rv770_populate_smc_vddc_table(struct radeon_device *rdev,
1111 RV770_SMC_STATETABLE *table)
1112{
1113 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1114 int i;
1115
1116 for (i = 0; i < pi->valid_vddc_entries; i++) {
1117 table->highSMIO[pi->vddc_table[i].vddc_index] =
1118 pi->vddc_table[i].high_smio;
1119 table->lowSMIO[pi->vddc_table[i].vddc_index] =
1120 cpu_to_be32(pi->vddc_table[i].low_smio);
1121 }
1122
1123 table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_VDDC] = 0;
1124 table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_VDDC] =
1125 cpu_to_be32(pi->vddc_mask_low);
1126
1127 for (i = 0;
1128 ((i < pi->valid_vddc_entries) &&
1129 (pi->max_vddc_in_table >
1130 pi->vddc_table[i].vddc));
1131 i++);
1132
1133 table->maxVDDCIndexInPPTable =
1134 pi->vddc_table[i].vddc_index;
1135
1136 return 0;
1137}
1138
1139static int rv770_populate_smc_mvdd_table(struct radeon_device *rdev,
1140 RV770_SMC_STATETABLE *table)
1141{
1142 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1143
1144 if (pi->mvdd_control) {
1145 table->lowSMIO[MVDD_HIGH_INDEX] |=
1146 cpu_to_be32(pi->mvdd_low_smio[MVDD_HIGH_INDEX]);
1147 table->lowSMIO[MVDD_LOW_INDEX] |=
1148 cpu_to_be32(pi->mvdd_low_smio[MVDD_LOW_INDEX]);
1149
1150 table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_MVDD] = 0;
1151 table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_MVDD] =
1152 cpu_to_be32(pi->mvdd_mask_low);
1153 }
1154
1155 return 0;
1156}
1157
5d77d776
AD
1158static int rv770_init_smc_table(struct radeon_device *rdev,
1159 struct radeon_ps *radeon_boot_state)
66229b20
AD
1160{
1161 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
66229b20
AD
1162 struct rv7xx_ps *boot_state = rv770_get_ps(radeon_boot_state);
1163 RV770_SMC_STATETABLE *table = &pi->smc_statetable;
1164 int ret;
1165
1166 memset(table, 0, sizeof(RV770_SMC_STATETABLE));
1167
1168 pi->boot_sclk = boot_state->low.sclk;
1169
1170 rv770_populate_smc_vddc_table(rdev, table);
1171 rv770_populate_smc_mvdd_table(rdev, table);
1172
1173 switch (rdev->pm.int_thermal_type) {
1174 case THERMAL_TYPE_RV770:
1175 case THERMAL_TYPE_ADT7473_WITH_INTERNAL:
1176 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
1177 break;
1178 case THERMAL_TYPE_NONE:
1179 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
1180 break;
1181 case THERMAL_TYPE_EXTERNAL_GPIO:
1182 default:
1183 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
1184 break;
1185 }
1186
1187 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC) {
1188 table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
1189
1190 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_DONT_WAIT_FOR_VBLANK_ON_ALERT)
1191 table->extraFlags |= PPSMC_EXTRAFLAGS_AC2DC_DONT_WAIT_FOR_VBLANK;
1192
1193 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_GOTO_BOOT_ON_ALERT)
1194 table->extraFlags |= PPSMC_EXTRAFLAGS_AC2DC_ACTION_GOTOINITIALSTATE;
1195 }
1196
1197 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
1198 table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
1199
1200 if (pi->mem_gddr5)
1201 table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
1202
1203 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1204 ret = rv730_populate_smc_initial_state(rdev, radeon_boot_state, table);
1205 else
1206 ret = rv770_populate_smc_initial_state(rdev, radeon_boot_state, table);
1207 if (ret)
1208 return ret;
1209
1210 if (rdev->family == CHIP_RV740)
1211 ret = rv740_populate_smc_acpi_state(rdev, table);
1212 else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1213 ret = rv730_populate_smc_acpi_state(rdev, table);
1214 else
1215 ret = rv770_populate_smc_acpi_state(rdev, table);
1216 if (ret)
1217 return ret;
1218
1219 table->driverState = table->initialState;
1220
1221 return rv770_copy_bytes_to_smc(rdev,
1222 pi->state_table_start,
1223 (const u8 *)table,
1224 sizeof(RV770_SMC_STATETABLE),
1225 pi->sram_end);
1226}
1227
1228static int rv770_construct_vddc_table(struct radeon_device *rdev)
1229{
1230 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1231 u16 min, max, step;
1232 u32 steps = 0;
1233 u8 vddc_index = 0;
1234 u32 i;
1235
1236 radeon_atom_get_min_voltage(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &min);
1237 radeon_atom_get_max_voltage(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &max);
1238 radeon_atom_get_voltage_step(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &step);
1239
1240 steps = (max - min) / step + 1;
1241
1242 if (steps > MAX_NO_VREG_STEPS)
1243 return -EINVAL;
1244
1245 for (i = 0; i < steps; i++) {
1246 u32 gpio_pins, gpio_mask;
1247
1248 pi->vddc_table[i].vddc = (u16)(min + i * step);
1249 radeon_atom_get_voltage_gpio_settings(rdev,
1250 pi->vddc_table[i].vddc,
1251 SET_VOLTAGE_TYPE_ASIC_VDDC,
1252 &gpio_pins, &gpio_mask);
1253 pi->vddc_table[i].low_smio = gpio_pins & gpio_mask;
1254 pi->vddc_table[i].high_smio = 0;
1255 pi->vddc_mask_low = gpio_mask;
1256 if (i > 0) {
1257 if ((pi->vddc_table[i].low_smio !=
1258 pi->vddc_table[i - 1].low_smio ) ||
1259 (pi->vddc_table[i].high_smio !=
1260 pi->vddc_table[i - 1].high_smio))
1261 vddc_index++;
1262 }
1263 pi->vddc_table[i].vddc_index = vddc_index;
1264 }
1265
1266 pi->valid_vddc_entries = (u8)steps;
1267
1268 return 0;
1269}
1270
1271static u32 rv770_get_mclk_split_point(struct atom_memory_info *memory_info)
1272{
1273 if (memory_info->mem_type == MEM_TYPE_GDDR3)
1274 return 30000;
1275
1276 return 0;
1277}
1278
1279static int rv770_get_mvdd_pin_configuration(struct radeon_device *rdev)
1280{
1281 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1282 u32 gpio_pins, gpio_mask;
1283
1284 radeon_atom_get_voltage_gpio_settings(rdev,
1285 MVDD_HIGH_VALUE, SET_VOLTAGE_TYPE_ASIC_MVDDC,
1286 &gpio_pins, &gpio_mask);
1287 pi->mvdd_mask_low = gpio_mask;
1288 pi->mvdd_low_smio[MVDD_HIGH_INDEX] =
1289 gpio_pins & gpio_mask;
1290
1291 radeon_atom_get_voltage_gpio_settings(rdev,
1292 MVDD_LOW_VALUE, SET_VOLTAGE_TYPE_ASIC_MVDDC,
1293 &gpio_pins, &gpio_mask);
1294 pi->mvdd_low_smio[MVDD_LOW_INDEX] =
1295 gpio_pins & gpio_mask;
1296
1297 return 0;
1298}
1299
1300u8 rv770_get_memory_module_index(struct radeon_device *rdev)
1301{
1302 return (u8) ((RREG32(BIOS_SCRATCH_4) >> 16) & 0xff);
1303}
1304
1305static int rv770_get_mvdd_configuration(struct radeon_device *rdev)
1306{
1307 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1308 u8 memory_module_index;
1309 struct atom_memory_info memory_info;
1310
1311 memory_module_index = rv770_get_memory_module_index(rdev);
1312
1313 if (radeon_atom_get_memory_info(rdev, memory_module_index, &memory_info)) {
1314 pi->mvdd_control = false;
1315 return 0;
1316 }
1317
1318 pi->mvdd_split_frequency =
1319 rv770_get_mclk_split_point(&memory_info);
1320
1321 if (pi->mvdd_split_frequency == 0) {
1322 pi->mvdd_control = false;
1323 return 0;
1324 }
1325
1326 return rv770_get_mvdd_pin_configuration(rdev);
1327}
1328
1329void rv770_enable_voltage_control(struct radeon_device *rdev,
1330 bool enable)
1331{
1332 if (enable)
1333 WREG32_P(GENERAL_PWRMGT, VOLT_PWRMGT_EN, ~VOLT_PWRMGT_EN);
1334 else
1335 WREG32_P(GENERAL_PWRMGT, 0, ~VOLT_PWRMGT_EN);
1336}
1337
1338static void rv770_program_display_gap(struct radeon_device *rdev)
1339{
1340 u32 tmp = RREG32(CG_DISPLAY_GAP_CNTL);
1341
1342 tmp &= ~(DISP1_GAP_MCHG_MASK | DISP2_GAP_MCHG_MASK);
1343 if (RREG32(AVIVO_D1CRTC_CONTROL) & AVIVO_CRTC_EN) {
1344 tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_VBLANK);
1345 tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1346 } else if (RREG32(AVIVO_D2CRTC_CONTROL) & AVIVO_CRTC_EN) {
1347 tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1348 tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_VBLANK);
1349 } else {
1350 tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1351 tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1352 }
1353 WREG32(CG_DISPLAY_GAP_CNTL, tmp);
1354}
1355
1356static void rv770_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
1357 bool enable)
1358{
1359 rv770_enable_bif_dynamic_pcie_gen2(rdev, enable);
1360
1361 if (enable)
1362 WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE);
1363 else
1364 WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE);
1365}
1366
5d77d776
AD
1367static void r7xx_program_memory_timing_parameters(struct radeon_device *rdev,
1368 struct radeon_ps *radeon_new_state)
66229b20 1369{
66229b20
AD
1370 if ((rdev->family == CHIP_RV730) ||
1371 (rdev->family == CHIP_RV710) ||
1372 (rdev->family == CHIP_RV740))
1373 rv730_program_memory_timing_parameters(rdev, radeon_new_state);
1374 else
1375 rv770_program_memory_timing_parameters(rdev, radeon_new_state);
1376}
1377
5d77d776
AD
1378static int rv770_upload_sw_state(struct radeon_device *rdev,
1379 struct radeon_ps *radeon_new_state)
66229b20
AD
1380{
1381 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
66229b20
AD
1382 u16 address = pi->state_table_start +
1383 offsetof(RV770_SMC_STATETABLE, driverState);
1384 RV770_SMC_SWSTATE state = { 0 };
1385 int ret;
1386
1387 ret = rv770_convert_power_state_to_smc(rdev, radeon_new_state, &state);
1388 if (ret)
1389 return ret;
1390
1391 return rv770_copy_bytes_to_smc(rdev, address, (const u8 *)&state,
1392 sizeof(RV770_SMC_SWSTATE),
1393 pi->sram_end);
1394}
1395
1396int rv770_halt_smc(struct radeon_device *rdev)
1397{
1398 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_Halt) != PPSMC_Result_OK)
1399 return -EINVAL;
1400
1401 if (rv770_wait_for_smc_inactive(rdev) != PPSMC_Result_OK)
1402 return -EINVAL;
1403
1404 return 0;
1405}
1406
1407int rv770_resume_smc(struct radeon_device *rdev)
1408{
1409 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_Resume) != PPSMC_Result_OK)
1410 return -EINVAL;
1411 return 0;
1412}
1413
1414int rv770_set_sw_state(struct radeon_device *rdev)
1415{
1416 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_SwitchToSwState) != PPSMC_Result_OK)
1417 return -EINVAL;
1418 return 0;
1419}
1420
1421int rv770_set_boot_state(struct radeon_device *rdev)
1422{
1423 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_SwitchToInitialState) != PPSMC_Result_OK)
1424 return -EINVAL;
1425 return 0;
1426}
1427
5d77d776
AD
1428void rv770_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev,
1429 struct radeon_ps *new_ps,
1430 struct radeon_ps *old_ps)
7c464f68 1431{
5d77d776
AD
1432 struct rv7xx_ps *new_state = rv770_get_ps(new_ps);
1433 struct rv7xx_ps *current_state = rv770_get_ps(old_ps);
7c464f68 1434
5d77d776
AD
1435 if ((new_ps->vclk == old_ps->vclk) &&
1436 (new_ps->dclk == old_ps->dclk))
7c464f68
AD
1437 return;
1438
1439 if (new_state->high.sclk >= current_state->high.sclk)
1440 return;
1441
e38bb5ae 1442 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
7c464f68
AD
1443}
1444
5d77d776
AD
1445void rv770_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev,
1446 struct radeon_ps *new_ps,
1447 struct radeon_ps *old_ps)
7c464f68 1448{
5d77d776
AD
1449 struct rv7xx_ps *new_state = rv770_get_ps(new_ps);
1450 struct rv7xx_ps *current_state = rv770_get_ps(old_ps);
7c464f68 1451
5d77d776
AD
1452 if ((new_ps->vclk == old_ps->vclk) &&
1453 (new_ps->dclk == old_ps->dclk))
7c464f68
AD
1454 return;
1455
1456 if (new_state->high.sclk < current_state->high.sclk)
1457 return;
1458
5d77d776 1459 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
7c464f68
AD
1460}
1461
66229b20
AD
1462int rv770_restrict_performance_levels_before_switch(struct radeon_device *rdev)
1463{
1464 if (rv770_send_msg_to_smc(rdev, (PPSMC_Msg)(PPSMC_MSG_NoForcedLevel)) != PPSMC_Result_OK)
1465 return -EINVAL;
1466
1467 if (rv770_send_msg_to_smc(rdev, (PPSMC_Msg)(PPSMC_MSG_TwoLevelsDisabled)) != PPSMC_Result_OK)
1468 return -EINVAL;
1469
1470 return 0;
1471}
1472
1473int rv770_unrestrict_performance_levels_after_switch(struct radeon_device *rdev)
1474{
1475 if (rv770_send_msg_to_smc(rdev, (PPSMC_Msg)(PPSMC_MSG_NoForcedLevel)) != PPSMC_Result_OK)
1476 return -EINVAL;
1477
1478 if (rv770_send_msg_to_smc(rdev, (PPSMC_Msg)(PPSMC_MSG_ZeroLevelsDisabled)) != PPSMC_Result_OK)
1479 return -EINVAL;
1480
1481 return 0;
1482}
1483
1484void r7xx_start_smc(struct radeon_device *rdev)
1485{
1486 rv770_start_smc(rdev);
1487 rv770_start_smc_clock(rdev);
1488}
1489
1490
1491void r7xx_stop_smc(struct radeon_device *rdev)
1492{
1493 rv770_reset_smc(rdev);
1494 rv770_stop_smc_clock(rdev);
1495}
1496
1497static void rv770_read_clock_registers(struct radeon_device *rdev)
1498{
1499 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1500
1501 pi->clk_regs.rv770.cg_spll_func_cntl =
1502 RREG32(CG_SPLL_FUNC_CNTL);
1503 pi->clk_regs.rv770.cg_spll_func_cntl_2 =
1504 RREG32(CG_SPLL_FUNC_CNTL_2);
1505 pi->clk_regs.rv770.cg_spll_func_cntl_3 =
1506 RREG32(CG_SPLL_FUNC_CNTL_3);
1507 pi->clk_regs.rv770.cg_spll_spread_spectrum =
1508 RREG32(CG_SPLL_SPREAD_SPECTRUM);
1509 pi->clk_regs.rv770.cg_spll_spread_spectrum_2 =
1510 RREG32(CG_SPLL_SPREAD_SPECTRUM_2);
1511 pi->clk_regs.rv770.mpll_ad_func_cntl =
1512 RREG32(MPLL_AD_FUNC_CNTL);
1513 pi->clk_regs.rv770.mpll_ad_func_cntl_2 =
1514 RREG32(MPLL_AD_FUNC_CNTL_2);
1515 pi->clk_regs.rv770.mpll_dq_func_cntl =
1516 RREG32(MPLL_DQ_FUNC_CNTL);
1517 pi->clk_regs.rv770.mpll_dq_func_cntl_2 =
1518 RREG32(MPLL_DQ_FUNC_CNTL_2);
1519 pi->clk_regs.rv770.mclk_pwrmgt_cntl =
1520 RREG32(MCLK_PWRMGT_CNTL);
1521 pi->clk_regs.rv770.dll_cntl = RREG32(DLL_CNTL);
1522}
1523
1524static void r7xx_read_clock_registers(struct radeon_device *rdev)
1525{
1526 if (rdev->family == CHIP_RV740)
1527 rv740_read_clock_registers(rdev);
1528 else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1529 rv730_read_clock_registers(rdev);
1530 else
1531 rv770_read_clock_registers(rdev);
1532}
1533
1534void rv770_read_voltage_smio_registers(struct radeon_device *rdev)
1535{
1536 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1537
1538 pi->s0_vid_lower_smio_cntl =
1539 RREG32(S0_VID_LOWER_SMIO_CNTL);
1540}
1541
1542void rv770_reset_smio_status(struct radeon_device *rdev)
1543{
1544 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1545 u32 sw_smio_index, vid_smio_cntl;
1546
1547 sw_smio_index =
1548 (RREG32(GENERAL_PWRMGT) & SW_SMIO_INDEX_MASK) >> SW_SMIO_INDEX_SHIFT;
1549 switch (sw_smio_index) {
1550 case 3:
1551 vid_smio_cntl = RREG32(S3_VID_LOWER_SMIO_CNTL);
1552 break;
1553 case 2:
1554 vid_smio_cntl = RREG32(S2_VID_LOWER_SMIO_CNTL);
1555 break;
1556 case 1:
1557 vid_smio_cntl = RREG32(S1_VID_LOWER_SMIO_CNTL);
1558 break;
1559 case 0:
1560 return;
1561 default:
1562 vid_smio_cntl = pi->s0_vid_lower_smio_cntl;
1563 break;
1564 }
1565
1566 WREG32(S0_VID_LOWER_SMIO_CNTL, vid_smio_cntl);
1567 WREG32_P(GENERAL_PWRMGT, SW_SMIO_INDEX(0), ~SW_SMIO_INDEX_MASK);
1568}
1569
1570void rv770_get_memory_type(struct radeon_device *rdev)
1571{
1572 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1573 u32 tmp;
1574
1575 tmp = RREG32(MC_SEQ_MISC0);
1576
1577 if (((tmp & MC_SEQ_MISC0_GDDR5_MASK) >> MC_SEQ_MISC0_GDDR5_SHIFT) ==
1578 MC_SEQ_MISC0_GDDR5_VALUE)
1579 pi->mem_gddr5 = true;
1580 else
1581 pi->mem_gddr5 = false;
1582
1583}
1584
1585void rv770_get_pcie_gen2_status(struct radeon_device *rdev)
1586{
1587 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1588 u32 tmp;
1589
1590 tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
1591
1592 if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
1593 (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
1594 pi->pcie_gen2 = true;
1595 else
1596 pi->pcie_gen2 = false;
1597
1598 if (pi->pcie_gen2) {
1599 if (tmp & LC_CURRENT_DATA_RATE)
1600 pi->boot_in_gen2 = true;
1601 else
1602 pi->boot_in_gen2 = false;
1603 } else
1604 pi->boot_in_gen2 = false;
1605}
1606
1607#if 0
1608static int rv770_enter_ulp_state(struct radeon_device *rdev)
1609{
1610 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1611
1612 if (pi->gfx_clock_gating) {
1613 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
1614 WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
1615 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
1616 RREG32(GB_TILING_CONFIG);
1617 }
1618
1619 WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower),
1620 ~HOST_SMC_MSG_MASK);
1621
1622 udelay(7000);
1623
1624 return 0;
1625}
1626
1627static int rv770_exit_ulp_state(struct radeon_device *rdev)
1628{
1629 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1630 int i;
1631
1632 WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_ResumeFromMinimumPower),
1633 ~HOST_SMC_MSG_MASK);
1634
1635 udelay(7000);
1636
1637 for (i = 0; i < rdev->usec_timeout; i++) {
1638 if (((RREG32(SMC_MSG) & HOST_SMC_RESP_MASK) >> HOST_SMC_RESP_SHIFT) == 1)
1639 break;
1640 udelay(1000);
1641 }
1642
1643 if (pi->gfx_clock_gating)
1644 WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN);
1645
1646 return 0;
1647}
1648#endif
1649
1650static void rv770_get_mclk_odt_threshold(struct radeon_device *rdev)
1651{
1652 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1653 u8 memory_module_index;
1654 struct atom_memory_info memory_info;
1655
1656 pi->mclk_odt_threshold = 0;
1657
1658 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) {
1659 memory_module_index = rv770_get_memory_module_index(rdev);
1660
1661 if (radeon_atom_get_memory_info(rdev, memory_module_index, &memory_info))
1662 return;
1663
1664 if (memory_info.mem_type == MEM_TYPE_DDR2 ||
1665 memory_info.mem_type == MEM_TYPE_DDR3)
1666 pi->mclk_odt_threshold = 30000;
1667 }
1668}
1669
1670void rv770_get_max_vddc(struct radeon_device *rdev)
1671{
1672 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1673 u16 vddc;
1674
1675 if (radeon_atom_get_max_vddc(rdev, 0, 0, &vddc))
1676 pi->max_vddc = 0;
1677 else
1678 pi->max_vddc = vddc;
1679}
1680
1681void rv770_program_response_times(struct radeon_device *rdev)
1682{
1683 u32 voltage_response_time, backbias_response_time;
1684 u32 acpi_delay_time, vbi_time_out;
1685 u32 vddc_dly, bb_dly, acpi_dly, vbi_dly;
1686 u32 reference_clock;
1687
1688 voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time;
1689 backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time;
1690
1691 if (voltage_response_time == 0)
1692 voltage_response_time = 1000;
1693
1694 if (backbias_response_time == 0)
1695 backbias_response_time = 1000;
1696
1697 acpi_delay_time = 15000;
1698 vbi_time_out = 100000;
1699
1700 reference_clock = radeon_get_xclk(rdev);
1701
1702 vddc_dly = (voltage_response_time * reference_clock) / 1600;
1703 bb_dly = (backbias_response_time * reference_clock) / 1600;
1704 acpi_dly = (acpi_delay_time * reference_clock) / 1600;
1705 vbi_dly = (vbi_time_out * reference_clock) / 1600;
1706
1707 rv770_write_smc_soft_register(rdev,
1708 RV770_SMC_SOFT_REGISTER_delay_vreg, vddc_dly);
1709 rv770_write_smc_soft_register(rdev,
1710 RV770_SMC_SOFT_REGISTER_delay_bbias, bb_dly);
1711 rv770_write_smc_soft_register(rdev,
1712 RV770_SMC_SOFT_REGISTER_delay_acpi, acpi_dly);
1713 rv770_write_smc_soft_register(rdev,
1714 RV770_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly);
1715#if 0
1716 /* XXX look up hw revision */
1717 if (WEKIVA_A21)
1718 rv770_write_smc_soft_register(rdev,
1719 RV770_SMC_SOFT_REGISTER_baby_step_timer,
1720 0x10);
1721#endif
1722}
1723
5d77d776
AD
1724static void rv770_program_dcodt_before_state_switch(struct radeon_device *rdev,
1725 struct radeon_ps *radeon_new_state,
1726 struct radeon_ps *radeon_current_state)
66229b20
AD
1727{
1728 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
66229b20
AD
1729 struct rv7xx_ps *new_state = rv770_get_ps(radeon_new_state);
1730 struct rv7xx_ps *current_state = rv770_get_ps(radeon_current_state);
1731 bool current_use_dc = false;
1732 bool new_use_dc = false;
1733
1734 if (pi->mclk_odt_threshold == 0)
1735 return;
1736
1737 if (current_state->high.mclk <= pi->mclk_odt_threshold)
1738 current_use_dc = true;
1739
1740 if (new_state->high.mclk <= pi->mclk_odt_threshold)
1741 new_use_dc = true;
1742
1743 if (current_use_dc == new_use_dc)
1744 return;
1745
1746 if (!current_use_dc && new_use_dc)
1747 return;
1748
1749 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1750 rv730_program_dcodt(rdev, new_use_dc);
1751}
1752
5d77d776
AD
1753static void rv770_program_dcodt_after_state_switch(struct radeon_device *rdev,
1754 struct radeon_ps *radeon_new_state,
1755 struct radeon_ps *radeon_current_state)
66229b20
AD
1756{
1757 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
66229b20
AD
1758 struct rv7xx_ps *new_state = rv770_get_ps(radeon_new_state);
1759 struct rv7xx_ps *current_state = rv770_get_ps(radeon_current_state);
1760 bool current_use_dc = false;
1761 bool new_use_dc = false;
1762
1763 if (pi->mclk_odt_threshold == 0)
1764 return;
1765
1766 if (current_state->high.mclk <= pi->mclk_odt_threshold)
1767 current_use_dc = true;
1768
1769 if (new_state->high.mclk <= pi->mclk_odt_threshold)
1770 new_use_dc = true;
1771
1772 if (current_use_dc == new_use_dc)
1773 return;
1774
1775 if (current_use_dc && !new_use_dc)
1776 return;
1777
1778 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1779 rv730_program_dcodt(rdev, new_use_dc);
1780}
1781
1782static void rv770_retrieve_odt_values(struct radeon_device *rdev)
1783{
1784 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1785
1786 if (pi->mclk_odt_threshold == 0)
1787 return;
1788
1789 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1790 rv730_get_odt_values(rdev);
1791}
1792
1793static void rv770_set_dpm_event_sources(struct radeon_device *rdev, u32 sources)
1794{
1795 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1796 bool want_thermal_protection;
1797 enum radeon_dpm_event_src dpm_event_src;
1798
1799 switch (sources) {
1800 case 0:
1801 default:
1802 want_thermal_protection = false;
1803 break;
1804 case (1 << RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL):
1805 want_thermal_protection = true;
1806 dpm_event_src = RADEON_DPM_EVENT_SRC_DIGITAL;
1807 break;
1808
1809 case (1 << RADEON_DPM_AUTO_THROTTLE_SRC_EXTERNAL):
1810 want_thermal_protection = true;
1811 dpm_event_src = RADEON_DPM_EVENT_SRC_EXTERNAL;
1812 break;
1813
1814 case ((1 << RADEON_DPM_AUTO_THROTTLE_SRC_EXTERNAL) |
1815 (1 << RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL)):
1816 want_thermal_protection = true;
1817 dpm_event_src = RADEON_DPM_EVENT_SRC_DIGIAL_OR_EXTERNAL;
1818 break;
1819 }
1820
1821 if (want_thermal_protection) {
1822 WREG32_P(CG_THERMAL_CTRL, DPM_EVENT_SRC(dpm_event_src), ~DPM_EVENT_SRC_MASK);
1823 if (pi->thermal_protection)
1824 WREG32_P(GENERAL_PWRMGT, 0, ~THERMAL_PROTECTION_DIS);
1825 } else {
1826 WREG32_P(GENERAL_PWRMGT, THERMAL_PROTECTION_DIS, ~THERMAL_PROTECTION_DIS);
1827 }
1828}
1829
1830void rv770_enable_auto_throttle_source(struct radeon_device *rdev,
1831 enum radeon_dpm_auto_throttle_src source,
1832 bool enable)
1833{
1834 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1835
1836 if (enable) {
1837 if (!(pi->active_auto_throttle_sources & (1 << source))) {
1838 pi->active_auto_throttle_sources |= 1 << source;
1839 rv770_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources);
1840 }
1841 } else {
1842 if (pi->active_auto_throttle_sources & (1 << source)) {
1843 pi->active_auto_throttle_sources &= ~(1 << source);
1844 rv770_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources);
1845 }
1846 }
1847}
1848
dc50ba7f
AD
1849int rv770_set_thermal_temperature_range(struct radeon_device *rdev,
1850 int min_temp, int max_temp)
66229b20
AD
1851{
1852 int low_temp = 0 * 1000;
1853 int high_temp = 255 * 1000;
1854
1855 if (low_temp < min_temp)
1856 low_temp = min_temp;
1857 if (high_temp > max_temp)
1858 high_temp = max_temp;
1859 if (high_temp < low_temp) {
1860 DRM_ERROR("invalid thermal range: %d - %d\n", low_temp, high_temp);
1861 return -EINVAL;
1862 }
1863
1864 WREG32_P(CG_THERMAL_INT, DIG_THERM_INTH(high_temp / 1000), ~DIG_THERM_INTH_MASK);
1865 WREG32_P(CG_THERMAL_INT, DIG_THERM_INTL(low_temp / 1000), ~DIG_THERM_INTL_MASK);
1866 WREG32_P(CG_THERMAL_CTRL, DIG_THERM_DPM(high_temp / 1000), ~DIG_THERM_DPM_MASK);
1867
1868 rdev->pm.dpm.thermal.min_temp = low_temp;
1869 rdev->pm.dpm.thermal.max_temp = high_temp;
1870
1871 return 0;
1872}
1873
1874int rv770_dpm_enable(struct radeon_device *rdev)
1875{
1876 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
5d77d776 1877 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
b97721f3 1878 int ret;
66229b20
AD
1879
1880 if (pi->gfx_clock_gating)
1881 rv770_restore_cgcg(rdev);
1882
1883 if (rv770_dpm_enabled(rdev))
1884 return -EINVAL;
1885
1886 if (pi->voltage_control) {
1887 rv770_enable_voltage_control(rdev, true);
b97721f3 1888 ret = rv770_construct_vddc_table(rdev);
fa4b5471
AD
1889 if (ret) {
1890 DRM_ERROR("rv770_construct_vddc_table failed\n");
b97721f3 1891 return ret;
fa4b5471 1892 }
66229b20
AD
1893 }
1894
1895 if (pi->dcodt)
1896 rv770_retrieve_odt_values(rdev);
1897
b97721f3
AD
1898 if (pi->mvdd_control) {
1899 ret = rv770_get_mvdd_configuration(rdev);
fa4b5471
AD
1900 if (ret) {
1901 DRM_ERROR("rv770_get_mvdd_configuration failed\n");
b97721f3 1902 return ret;
fa4b5471 1903 }
b97721f3 1904 }
66229b20
AD
1905
1906 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS)
1907 rv770_enable_backbias(rdev, true);
1908
1909 rv770_enable_spread_spectrum(rdev, true);
1910
1911 if (pi->thermal_protection)
1912 rv770_enable_thermal_protection(rdev, true);
1913
1914 rv770_program_mpll_timing_parameters(rdev);
1915 rv770_setup_bsp(rdev);
1916 rv770_program_git(rdev);
1917 rv770_program_tp(rdev);
1918 rv770_program_tpp(rdev);
1919 rv770_program_sstp(rdev);
1920 rv770_program_engine_speed_parameters(rdev);
1921 rv770_enable_display_gap(rdev);
1922 rv770_program_vc(rdev);
1923
1924 if (pi->dynamic_pcie_gen2)
1925 rv770_enable_dynamic_pcie_gen2(rdev, true);
1926
b97721f3 1927 ret = rv770_upload_firmware(rdev);
fa4b5471
AD
1928 if (ret) {
1929 DRM_ERROR("rv770_upload_firmware failed\n");
b97721f3 1930 return ret;
fa4b5471 1931 }
b97721f3 1932 ret = rv770_init_smc_table(rdev, boot_ps);
fa4b5471
AD
1933 if (ret) {
1934 DRM_ERROR("rv770_init_smc_table failed\n");
b97721f3 1935 return ret;
fa4b5471 1936 }
b97721f3 1937
66229b20
AD
1938 rv770_program_response_times(rdev);
1939 r7xx_start_smc(rdev);
1940
1941 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1942 rv730_start_dpm(rdev);
1943 else
1944 rv770_start_dpm(rdev);
1945
1946 if (pi->gfx_clock_gating)
1947 rv770_gfx_clock_gating_enable(rdev, true);
1948
1949 if (pi->mg_clock_gating)
1950 rv770_mg_clock_gating_enable(rdev, true);
1951
1952 if (rdev->irq.installed &&
1953 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
1954 PPSMC_Result result;
1955
b97721f3
AD
1956 ret = rv770_set_thermal_temperature_range(rdev, R600_TEMP_RANGE_MIN, R600_TEMP_RANGE_MAX);
1957 if (ret)
1958 return ret;
66229b20
AD
1959 rdev->irq.dpm_thermal = true;
1960 radeon_irq_set(rdev);
1961 result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableThermalInterrupt);
1962
1963 if (result != PPSMC_Result_OK)
1964 DRM_DEBUG_KMS("Could not enable thermal interrupts.\n");
1965 }
1966
1967 rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
1968
1969 return 0;
1970}
1971
1972void rv770_dpm_disable(struct radeon_device *rdev)
1973{
1974 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1975
1976 if (!rv770_dpm_enabled(rdev))
1977 return;
1978
1979 rv770_clear_vc(rdev);
1980
1981 if (pi->thermal_protection)
1982 rv770_enable_thermal_protection(rdev, false);
1983
1984 rv770_enable_spread_spectrum(rdev, false);
1985
1986 if (pi->dynamic_pcie_gen2)
1987 rv770_enable_dynamic_pcie_gen2(rdev, false);
1988
1989 if (rdev->irq.installed &&
1990 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
1991 rdev->irq.dpm_thermal = false;
1992 radeon_irq_set(rdev);
1993 }
1994
1995 if (pi->gfx_clock_gating)
1996 rv770_gfx_clock_gating_enable(rdev, false);
1997
1998 if (pi->mg_clock_gating)
1999 rv770_mg_clock_gating_enable(rdev, false);
2000
2001 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
2002 rv730_stop_dpm(rdev);
2003 else
2004 rv770_stop_dpm(rdev);
2005
2006 r7xx_stop_smc(rdev);
2007 rv770_reset_smio_status(rdev);
2008}
2009
2010int rv770_dpm_set_power_state(struct radeon_device *rdev)
2011{
2012 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
5d77d776
AD
2013 struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
2014 struct radeon_ps *old_ps = rdev->pm.dpm.current_ps;
b97721f3 2015 int ret;
66229b20 2016
b97721f3 2017 ret = rv770_restrict_performance_levels_before_switch(rdev);
72dd2c54
AD
2018 if (ret) {
2019 DRM_ERROR("rv770_restrict_performance_levels_before_switch failed\n");
b97721f3 2020 return ret;
72dd2c54 2021 }
5d77d776 2022 rv770_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps);
b97721f3 2023 ret = rv770_halt_smc(rdev);
72dd2c54
AD
2024 if (ret) {
2025 DRM_ERROR("rv770_halt_smc failed\n");
b97721f3 2026 return ret;
72dd2c54 2027 }
b97721f3 2028 ret = rv770_upload_sw_state(rdev, new_ps);
72dd2c54
AD
2029 if (ret) {
2030 DRM_ERROR("rv770_upload_sw_state failed\n");
b97721f3 2031 return ret;
72dd2c54 2032 }
5d77d776 2033 r7xx_program_memory_timing_parameters(rdev, new_ps);
66229b20 2034 if (pi->dcodt)
5d77d776 2035 rv770_program_dcodt_before_state_switch(rdev, new_ps, old_ps);
b97721f3 2036 ret = rv770_resume_smc(rdev);
72dd2c54
AD
2037 if (ret) {
2038 DRM_ERROR("rv770_resume_smc failed\n");
b97721f3 2039 return ret;
72dd2c54 2040 }
b97721f3 2041 ret = rv770_set_sw_state(rdev);
72dd2c54
AD
2042 if (ret) {
2043 DRM_ERROR("rv770_set_sw_state failed\n");
b97721f3 2044 return ret;
72dd2c54 2045 }
66229b20 2046 if (pi->dcodt)
5d77d776
AD
2047 rv770_program_dcodt_after_state_switch(rdev, new_ps, old_ps);
2048 rv770_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps);
b97721f3 2049 ret = rv770_unrestrict_performance_levels_after_switch(rdev);
173dbb0e
AD
2050 if (ret) {
2051 DRM_ERROR("rv770_unrestrict_performance_levels_after_switch failed\n");
b97721f3 2052 return ret;
173dbb0e 2053 }
66229b20
AD
2054
2055 return 0;
2056}
2057
2058void rv770_dpm_reset_asic(struct radeon_device *rdev)
2059{
2060 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
5d77d776 2061 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
66229b20
AD
2062
2063 rv770_restrict_performance_levels_before_switch(rdev);
2064 if (pi->dcodt)
5d77d776 2065 rv770_program_dcodt_before_state_switch(rdev, boot_ps, boot_ps);
66229b20
AD
2066 rv770_set_boot_state(rdev);
2067 if (pi->dcodt)
5d77d776 2068 rv770_program_dcodt_after_state_switch(rdev, boot_ps, boot_ps);
66229b20
AD
2069}
2070
2071void rv770_dpm_setup_asic(struct radeon_device *rdev)
2072{
2073 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2074
2075 r7xx_read_clock_registers(rdev);
2076 rv770_read_voltage_smio_registers(rdev);
2077 rv770_get_memory_type(rdev);
2078 if (pi->dcodt)
2079 rv770_get_mclk_odt_threshold(rdev);
2080 rv770_get_pcie_gen2_status(rdev);
2081
2082 rv770_enable_acpi_pm(rdev);
2083
2084 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_ASPM_L0s)
2085 rv770_enable_l0s(rdev);
2086 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_ASPM_L1)
2087 rv770_enable_l1(rdev);
2088 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_TURNOFFPLL_ASPML1)
2089 rv770_enable_pll_sleep_in_l1(rdev);
2090}
2091
2092void rv770_dpm_display_configuration_changed(struct radeon_device *rdev)
2093{
2094 rv770_program_display_gap(rdev);
2095}
2096
2097union power_info {
2098 struct _ATOM_POWERPLAY_INFO info;
2099 struct _ATOM_POWERPLAY_INFO_V2 info_2;
2100 struct _ATOM_POWERPLAY_INFO_V3 info_3;
2101 struct _ATOM_PPLIB_POWERPLAYTABLE pplib;
2102 struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2;
2103 struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3;
2104};
2105
2106union pplib_clock_info {
2107 struct _ATOM_PPLIB_R600_CLOCK_INFO r600;
2108 struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780;
2109 struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen;
2110 struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo;
2111};
2112
2113union pplib_power_state {
2114 struct _ATOM_PPLIB_STATE v1;
2115 struct _ATOM_PPLIB_STATE_V2 v2;
2116};
2117
2118static void rv7xx_parse_pplib_non_clock_info(struct radeon_device *rdev,
2119 struct radeon_ps *rps,
2120 struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info,
2121 u8 table_rev)
2122{
2123 rps->caps = le32_to_cpu(non_clock_info->ulCapsAndSettings);
2124 rps->class = le16_to_cpu(non_clock_info->usClassification);
2125 rps->class2 = le16_to_cpu(non_clock_info->usClassification2);
2126
2127 if (ATOM_PPLIB_NONCLOCKINFO_VER1 < table_rev) {
2128 rps->vclk = le32_to_cpu(non_clock_info->ulVCLK);
2129 rps->dclk = le32_to_cpu(non_clock_info->ulDCLK);
2130 } else if (r600_is_uvd_state(rps->class, rps->class2)) {
2131 rps->vclk = RV770_DEFAULT_VCLK_FREQ;
2132 rps->dclk = RV770_DEFAULT_DCLK_FREQ;
2133 } else {
2134 rps->vclk = 0;
2135 rps->dclk = 0;
2136 }
2137
2138 if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT)
2139 rdev->pm.dpm.boot_ps = rps;
2140 if (rps->class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE)
2141 rdev->pm.dpm.uvd_ps = rps;
2142}
2143
2144static void rv7xx_parse_pplib_clock_info(struct radeon_device *rdev,
2145 struct radeon_ps *rps, int index,
2146 union pplib_clock_info *clock_info)
2147{
2148 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
dc50ba7f 2149 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
66229b20
AD
2150 struct rv7xx_ps *ps = rv770_get_ps(rps);
2151 u32 sclk, mclk;
2152 u16 vddc;
2153 struct rv7xx_pl *pl;
2154
2155 switch (index) {
2156 case 0:
2157 pl = &ps->low;
2158 break;
2159 case 1:
2160 pl = &ps->medium;
2161 break;
2162 case 2:
2163 default:
2164 pl = &ps->high;
2165 break;
2166 }
2167
dc50ba7f
AD
2168 if (rdev->family >= CHIP_CEDAR) {
2169 sclk = le16_to_cpu(clock_info->evergreen.usEngineClockLow);
2170 sclk |= clock_info->evergreen.ucEngineClockHigh << 16;
2171 mclk = le16_to_cpu(clock_info->evergreen.usMemoryClockLow);
2172 mclk |= clock_info->evergreen.ucMemoryClockHigh << 16;
66229b20 2173
dc50ba7f
AD
2174 pl->vddc = le16_to_cpu(clock_info->evergreen.usVDDC);
2175 pl->vddci = le16_to_cpu(clock_info->evergreen.usVDDCI);
2176 pl->flags = le32_to_cpu(clock_info->evergreen.ulFlags);
2177 } else {
2178 sclk = le16_to_cpu(clock_info->r600.usEngineClockLow);
2179 sclk |= clock_info->r600.ucEngineClockHigh << 16;
2180 mclk = le16_to_cpu(clock_info->r600.usMemoryClockLow);
2181 mclk |= clock_info->r600.ucMemoryClockHigh << 16;
2182
2183 pl->vddc = le16_to_cpu(clock_info->r600.usVDDC);
2184 pl->flags = le32_to_cpu(clock_info->r600.ulFlags);
2185 }
66229b20
AD
2186
2187 pl->mclk = mclk;
2188 pl->sclk = sclk;
2189
2190 /* patch up vddc if necessary */
2191 if (pl->vddc == 0xff01) {
2192 if (radeon_atom_get_max_vddc(rdev, 0, 0, &vddc) == 0)
2193 pl->vddc = vddc;
2194 }
2195
2196 if (rps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) {
2197 pi->acpi_vddc = pl->vddc;
dc50ba7f
AD
2198 if (rdev->family >= CHIP_CEDAR)
2199 eg_pi->acpi_vddci = pl->vddci;
66229b20
AD
2200 if (ps->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
2201 pi->acpi_pcie_gen2 = true;
2202 else
2203 pi->acpi_pcie_gen2 = false;
2204 }
2205
dc50ba7f
AD
2206 if (rps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) {
2207 if (rdev->family >= CHIP_BARTS) {
2208 eg_pi->ulv.supported = true;
2209 eg_pi->ulv.pl = pl;
2210 }
2211 }
2212
66229b20
AD
2213 if (pi->min_vddc_in_table > pl->vddc)
2214 pi->min_vddc_in_table = pl->vddc;
2215
2216 if (pi->max_vddc_in_table < pl->vddc)
2217 pi->max_vddc_in_table = pl->vddc;
2218
2219 /* patch up boot state */
2220 if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) {
2abba66e
AD
2221 u16 vddc, vddci, mvdd;
2222 radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd);
66229b20
AD
2223 pl->mclk = rdev->clock.default_mclk;
2224 pl->sclk = rdev->clock.default_sclk;
2225 pl->vddc = vddc;
2226 pl->vddci = vddci;
2227 }
d22b7e40
AD
2228
2229 if (rdev->family >= CHIP_BARTS) {
2230 if ((rps->class & ATOM_PPLIB_CLASSIFICATION_UI_MASK) ==
2231 ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE) {
2232 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk;
2233 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk;
2234 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc;
2235 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci;
2236 }
2237 }
66229b20
AD
2238}
2239
2240int rv7xx_parse_power_table(struct radeon_device *rdev)
2241{
2242 struct radeon_mode_info *mode_info = &rdev->mode_info;
2243 struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info;
2244 union pplib_power_state *power_state;
2245 int i, j;
2246 union pplib_clock_info *clock_info;
2247 union power_info *power_info;
2248 int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
2249 u16 data_offset;
2250 u8 frev, crev;
2251 struct rv7xx_ps *ps;
2252
2253 if (!atom_parse_data_header(mode_info->atom_context, index, NULL,
2254 &frev, &crev, &data_offset))
2255 return -EINVAL;
2256 power_info = (union power_info *)(mode_info->atom_context->bios + data_offset);
2257
2258 rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) *
2259 power_info->pplib.ucNumStates, GFP_KERNEL);
2260 if (!rdev->pm.dpm.ps)
2261 return -ENOMEM;
2262 rdev->pm.dpm.platform_caps = le32_to_cpu(power_info->pplib.ulPlatformCaps);
2263 rdev->pm.dpm.backbias_response_time = le16_to_cpu(power_info->pplib.usBackbiasTime);
2264 rdev->pm.dpm.voltage_response_time = le16_to_cpu(power_info->pplib.usVoltageTime);
2265
2266 for (i = 0; i < power_info->pplib.ucNumStates; i++) {
2267 power_state = (union pplib_power_state *)
2268 (mode_info->atom_context->bios + data_offset +
2269 le16_to_cpu(power_info->pplib.usStateArrayOffset) +
2270 i * power_info->pplib.ucStateEntrySize);
2271 non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *)
2272 (mode_info->atom_context->bios + data_offset +
2273 le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset) +
2274 (power_state->v1.ucNonClockStateIndex *
2275 power_info->pplib.ucNonClockSize));
2276 if (power_info->pplib.ucStateEntrySize - 1) {
2277 ps = kzalloc(sizeof(struct rv7xx_ps), GFP_KERNEL);
2278 if (ps == NULL) {
2279 kfree(rdev->pm.dpm.ps);
2280 return -ENOMEM;
2281 }
2282 rdev->pm.dpm.ps[i].ps_priv = ps;
2283 rv7xx_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i],
2284 non_clock_info,
2285 power_info->pplib.ucNonClockSize);
2286 for (j = 0; j < (power_info->pplib.ucStateEntrySize - 1); j++) {
2287 clock_info = (union pplib_clock_info *)
2288 (mode_info->atom_context->bios + data_offset +
2289 le16_to_cpu(power_info->pplib.usClockInfoArrayOffset) +
2290 (power_state->v1.ucClockStateIndices[j] *
2291 power_info->pplib.ucClockInfoSize));
2292 rv7xx_parse_pplib_clock_info(rdev,
2293 &rdev->pm.dpm.ps[i], j,
2294 clock_info);
2295 }
2296 }
2297 }
2298 rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates;
2299 return 0;
2300}
2301
2302int rv770_dpm_init(struct radeon_device *rdev)
2303{
2304 struct rv7xx_power_info *pi;
2305 int index = GetIndexIntoMasterTable(DATA, ASIC_InternalSS_Info);
2306 uint16_t data_offset, size;
2307 uint8_t frev, crev;
2308 struct atom_clock_dividers dividers;
2309 int ret;
2310
2311 pi = kzalloc(sizeof(struct rv7xx_power_info), GFP_KERNEL);
2312 if (pi == NULL)
2313 return -ENOMEM;
2314 rdev->pm.dpm.priv = pi;
2315
2316 rv770_get_max_vddc(rdev);
2317
2318 pi->acpi_vddc = 0;
2319 pi->min_vddc_in_table = 0;
2320 pi->max_vddc_in_table = 0;
2321
2322 ret = rv7xx_parse_power_table(rdev);
2323 if (ret)
2324 return ret;
2325
2326 if (rdev->pm.dpm.voltage_response_time == 0)
2327 rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
2328 if (rdev->pm.dpm.backbias_response_time == 0)
2329 rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
2330
2331 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
2332 0, false, &dividers);
2333 if (ret)
2334 pi->ref_div = dividers.ref_div + 1;
2335 else
2336 pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
2337
2338 pi->mclk_strobe_mode_threshold = 30000;
2339 pi->mclk_edc_enable_threshold = 30000;
2340
f85392bc
AD
2341 pi->rlp = RV770_RLP_DFLT;
2342 pi->rmp = RV770_RMP_DFLT;
2343 pi->lhp = RV770_LHP_DFLT;
2344 pi->lmp = RV770_LMP_DFLT;
2345
66229b20 2346 pi->voltage_control =
58653abd 2347 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0);
66229b20
AD
2348
2349 pi->mvdd_control =
58653abd 2350 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0);
66229b20
AD
2351
2352 if (atom_parse_data_header(rdev->mode_info.atom_context, index, &size,
2353 &frev, &crev, &data_offset)) {
2354 pi->sclk_ss = true;
2355 pi->mclk_ss = true;
2356 pi->dynamic_ss = true;
2357 } else {
2358 pi->sclk_ss = false;
2359 pi->mclk_ss = false;
2360 pi->dynamic_ss = false;
2361 }
2362
2363 pi->asi = RV770_ASI_DFLT;
2364 pi->pasi = RV770_HASI_DFLT;
2365 pi->vrc = RV770_VRC_DFLT;
2366
2367 pi->power_gating = false;
2368
2369 pi->gfx_clock_gating = true;
2370
2371 pi->mg_clock_gating = true;
2372 pi->mgcgtssm = true;
2373
2374 pi->dynamic_pcie_gen2 = true;
2375
2376 if (pi->gfx_clock_gating &&
2377 (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE))
2378 pi->thermal_protection = true;
2379 else
2380 pi->thermal_protection = false;
2381
2382 pi->display_gap = true;
2383
2384 if (rdev->flags & RADEON_IS_MOBILITY)
2385 pi->dcodt = true;
2386 else
2387 pi->dcodt = false;
2388
2389 pi->ulps = true;
2390
2391 pi->mclk_stutter_mode_threshold = 0;
2392
2393 pi->sram_end = SMC_RAM_END;
2394 pi->state_table_start = RV770_SMC_TABLE_ADDRESS;
2395 pi->soft_regs_start = RV770_SMC_SOFT_REGISTERS_START;
2396
2397 return 0;
2398}
2399
2400void rv770_dpm_print_power_state(struct radeon_device *rdev,
2401 struct radeon_ps *rps)
2402{
2403 struct rv7xx_ps *ps = rv770_get_ps(rps);
2404 struct rv7xx_pl *pl;
2405
2406 r600_dpm_print_class_info(rps->class, rps->class2);
2407 r600_dpm_print_cap_info(rps->caps);
2408 printk("\tuvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
2409 if (rdev->family >= CHIP_CEDAR) {
2410 pl = &ps->low;
2411 printk("\t\tpower level 0 sclk: %u mclk: %u vddc: %u vddci: %u\n",
2412 pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2413 pl = &ps->medium;
2414 printk("\t\tpower level 1 sclk: %u mclk: %u vddc: %u vddci: %u\n",
2415 pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2416 pl = &ps->high;
2417 printk("\t\tpower level 2 sclk: %u mclk: %u vddc: %u vddci: %u\n",
2418 pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2419 } else {
2420 pl = &ps->low;
2421 printk("\t\tpower level 0 sclk: %u mclk: %u vddc: %u\n",
2422 pl->sclk, pl->mclk, pl->vddc);
2423 pl = &ps->medium;
2424 printk("\t\tpower level 1 sclk: %u mclk: %u vddc: %u\n",
2425 pl->sclk, pl->mclk, pl->vddc);
2426 pl = &ps->high;
2427 printk("\t\tpower level 2 sclk: %u mclk: %u vddc: %u\n",
2428 pl->sclk, pl->mclk, pl->vddc);
2429 }
2430 r600_dpm_print_ps_status(rdev, rps);
2431}
2432
2433void rv770_dpm_fini(struct radeon_device *rdev)
2434{
2435 int i;
2436
2437 for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
2438 kfree(rdev->pm.dpm.ps[i].ps_priv);
2439 }
2440 kfree(rdev->pm.dpm.ps);
2441 kfree(rdev->pm.dpm.priv);
2442}
2443
2444u32 rv770_dpm_get_sclk(struct radeon_device *rdev, bool low)
2445{
2446 struct rv7xx_ps *requested_state = rv770_get_ps(rdev->pm.dpm.requested_ps);
2447
2448 if (low)
2449 return requested_state->low.sclk;
2450 else
2451 return requested_state->high.sclk;
2452}
2453
2454u32 rv770_dpm_get_mclk(struct radeon_device *rdev, bool low)
2455{
2456 struct rv7xx_ps *requested_state = rv770_get_ps(rdev->pm.dpm.requested_ps);
2457
2458 if (low)
2459 return requested_state->low.mclk;
2460 else
2461 return requested_state->high.mclk;
2462}
This page took 0.129833 seconds and 5 git commands to generate.