1 /*
2 * Copyright 2011 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Alex Deucher
23 */
24
25 #include "radeon.h"
26 #include "radeon_asic.h"
27 #include "rv770d.h"
28 #include "r600_dpm.h"
29 #include "rv770_dpm.h"
30 #include "cypress_dpm.h"
31 #include "atom.h"
32 #include <linux/seq_file.h>
33
34 #define MC_CG_ARB_FREQ_F0 0x0a
35 #define MC_CG_ARB_FREQ_F1 0x0b
36 #define MC_CG_ARB_FREQ_F2 0x0c
37 #define MC_CG_ARB_FREQ_F3 0x0d
38
39 #define MC_CG_SEQ_DRAMCONF_S0 0x05
40 #define MC_CG_SEQ_DRAMCONF_S1 0x06
41
42 #define PCIE_BUS_CLK 10000
43 #define TCLK (PCIE_BUS_CLK / 10)
44
45 #define SMC_RAM_END 0xC000
46
rv770_get_ps(struct radeon_ps * rps)47 struct rv7xx_ps *rv770_get_ps(struct radeon_ps *rps)
48 {
49 struct rv7xx_ps *ps = rps->ps_priv;
50
51 return ps;
52 }
53
rv770_get_pi(struct radeon_device * rdev)54 struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev)
55 {
56 struct rv7xx_power_info *pi = rdev->pm.dpm.priv;
57
58 return pi;
59 }
60
evergreen_get_pi(struct radeon_device * rdev)61 struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev)
62 {
63 struct evergreen_power_info *pi = rdev->pm.dpm.priv;
64
65 return pi;
66 }
67
rv770_enable_bif_dynamic_pcie_gen2(struct radeon_device * rdev,bool enable)68 static void rv770_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
69 bool enable)
70 {
71 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
72 u32 tmp;
73
74 tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
75 if (enable) {
76 tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
77 tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
78 tmp |= LC_GEN2_EN_STRAP;
79 } else {
80 if (!pi->boot_in_gen2) {
81 tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
82 tmp &= ~LC_GEN2_EN_STRAP;
83 }
84 }
85 if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
86 (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
87 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
88
89 }
90
rv770_enable_l0s(struct radeon_device * rdev)91 static void rv770_enable_l0s(struct radeon_device *rdev)
92 {
93 u32 tmp;
94
95 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL) & ~LC_L0S_INACTIVITY_MASK;
96 tmp |= LC_L0S_INACTIVITY(3);
97 WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp);
98 }
99
rv770_enable_l1(struct radeon_device * rdev)100 static void rv770_enable_l1(struct radeon_device *rdev)
101 {
102 u32 tmp;
103
104 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL);
105 tmp &= ~LC_L1_INACTIVITY_MASK;
106 tmp |= LC_L1_INACTIVITY(4);
107 tmp &= ~LC_PMI_TO_L1_DIS;
108 tmp &= ~LC_ASPM_TO_L1_DIS;
109 WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp);
110 }
111
rv770_enable_pll_sleep_in_l1(struct radeon_device * rdev)112 static void rv770_enable_pll_sleep_in_l1(struct radeon_device *rdev)
113 {
114 u32 tmp;
115
116 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL) & ~LC_L1_INACTIVITY_MASK;
117 tmp |= LC_L1_INACTIVITY(8);
118 WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp);
119
120 /* NOTE, this is a PCIE indirect reg, not PCIE PORT */
121 tmp = RREG32_PCIE(PCIE_P_CNTL);
122 tmp |= P_PLL_PWRDN_IN_L1L23;
123 tmp &= ~P_PLL_BUF_PDNB;
124 tmp &= ~P_PLL_PDNB;
125 tmp |= P_ALLOW_PRX_FRONTEND_SHUTOFF;
126 WREG32_PCIE(PCIE_P_CNTL, tmp);
127 }
128
rv770_gfx_clock_gating_enable(struct radeon_device * rdev,bool enable)129 static void rv770_gfx_clock_gating_enable(struct radeon_device *rdev,
130 bool enable)
131 {
132 if (enable)
133 WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN);
134 else {
135 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
136 WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
137 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
138 RREG32(GB_TILING_CONFIG);
139 }
140 }
141
rv770_mg_clock_gating_enable(struct radeon_device * rdev,bool enable)142 static void rv770_mg_clock_gating_enable(struct radeon_device *rdev,
143 bool enable)
144 {
145 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
146
147 if (enable) {
148 u32 mgcg_cgtt_local0;
149
150 if (rdev->family == CHIP_RV770)
151 mgcg_cgtt_local0 = RV770_MGCGTTLOCAL0_DFLT;
152 else
153 mgcg_cgtt_local0 = RV7XX_MGCGTTLOCAL0_DFLT;
154
155 WREG32(CG_CGTT_LOCAL_0, mgcg_cgtt_local0);
156 WREG32(CG_CGTT_LOCAL_1, (RV770_MGCGTTLOCAL1_DFLT & 0xFFFFCFFF));
157
158 if (pi->mgcgtssm)
159 WREG32(CGTS_SM_CTRL_REG, RV770_MGCGCGTSSMCTRL_DFLT);
160 } else {
161 WREG32(CG_CGTT_LOCAL_0, 0xFFFFFFFF);
162 WREG32(CG_CGTT_LOCAL_1, 0xFFFFCFFF);
163 }
164 }
165
rv770_restore_cgcg(struct radeon_device * rdev)166 void rv770_restore_cgcg(struct radeon_device *rdev)
167 {
168 bool dpm_en = false, cg_en = false;
169
170 if (RREG32(GENERAL_PWRMGT) & GLOBAL_PWRMGT_EN)
171 dpm_en = true;
172 if (RREG32(SCLK_PWRMGT_CNTL) & DYN_GFX_CLK_OFF_EN)
173 cg_en = true;
174
175 if (dpm_en && !cg_en)
176 WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN);
177 }
178
rv770_start_dpm(struct radeon_device * rdev)179 static void rv770_start_dpm(struct radeon_device *rdev)
180 {
181 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~SCLK_PWRMGT_OFF);
182
183 WREG32_P(MCLK_PWRMGT_CNTL, 0, ~MPLL_PWRMGT_OFF);
184
185 WREG32_P(GENERAL_PWRMGT, GLOBAL_PWRMGT_EN, ~GLOBAL_PWRMGT_EN);
186 }
187
rv770_stop_dpm(struct radeon_device * rdev)188 void rv770_stop_dpm(struct radeon_device *rdev)
189 {
190 PPSMC_Result result;
191
192 result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_TwoLevelsDisabled);
193
194 if (result != PPSMC_Result_OK)
195 DRM_DEBUG("Could not force DPM to low.\n");
196
197 WREG32_P(GENERAL_PWRMGT, 0, ~GLOBAL_PWRMGT_EN);
198
199 WREG32_P(SCLK_PWRMGT_CNTL, SCLK_PWRMGT_OFF, ~SCLK_PWRMGT_OFF);
200
201 WREG32_P(MCLK_PWRMGT_CNTL, MPLL_PWRMGT_OFF, ~MPLL_PWRMGT_OFF);
202 }
203
rv770_dpm_enabled(struct radeon_device * rdev)204 bool rv770_dpm_enabled(struct radeon_device *rdev)
205 {
206 if (RREG32(GENERAL_PWRMGT) & GLOBAL_PWRMGT_EN)
207 return true;
208 else
209 return false;
210 }
211
rv770_enable_thermal_protection(struct radeon_device * rdev,bool enable)212 void rv770_enable_thermal_protection(struct radeon_device *rdev,
213 bool enable)
214 {
215 if (enable)
216 WREG32_P(GENERAL_PWRMGT, 0, ~THERMAL_PROTECTION_DIS);
217 else
218 WREG32_P(GENERAL_PWRMGT, THERMAL_PROTECTION_DIS, ~THERMAL_PROTECTION_DIS);
219 }
220
rv770_enable_acpi_pm(struct radeon_device * rdev)221 void rv770_enable_acpi_pm(struct radeon_device *rdev)
222 {
223 WREG32_P(GENERAL_PWRMGT, STATIC_PM_EN, ~STATIC_PM_EN);
224 }
225
rv770_get_seq_value(struct radeon_device * rdev,struct rv7xx_pl * pl)226 u8 rv770_get_seq_value(struct radeon_device *rdev,
227 struct rv7xx_pl *pl)
228 {
229 return (pl->flags & ATOM_PPLIB_R600_FLAGS_LOWPOWER) ?
230 MC_CG_SEQ_DRAMCONF_S0 : MC_CG_SEQ_DRAMCONF_S1;
231 }
232
233 #if 0
234 int rv770_read_smc_soft_register(struct radeon_device *rdev,
235 u16 reg_offset, u32 *value)
236 {
237 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
238
239 return rv770_read_smc_sram_dword(rdev,
240 pi->soft_regs_start + reg_offset,
241 value, pi->sram_end);
242 }
243 #endif
244
rv770_write_smc_soft_register(struct radeon_device * rdev,u16 reg_offset,u32 value)245 int rv770_write_smc_soft_register(struct radeon_device *rdev,
246 u16 reg_offset, u32 value)
247 {
248 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
249
250 return rv770_write_smc_sram_dword(rdev,
251 pi->soft_regs_start + reg_offset,
252 value, pi->sram_end);
253 }
254
rv770_populate_smc_t(struct radeon_device * rdev,struct radeon_ps * radeon_state,RV770_SMC_SWSTATE * smc_state)255 int rv770_populate_smc_t(struct radeon_device *rdev,
256 struct radeon_ps *radeon_state,
257 RV770_SMC_SWSTATE *smc_state)
258 {
259 struct rv7xx_ps *state = rv770_get_ps(radeon_state);
260 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
261 int i;
262 int a_n;
263 int a_d;
264 u8 l[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE];
265 u8 r[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE];
266 u32 a_t;
267
268 l[0] = 0;
269 r[2] = 100;
270
271 a_n = (int)state->medium.sclk * pi->lmp +
272 (int)state->low.sclk * (R600_AH_DFLT - pi->rlp);
273 a_d = (int)state->low.sclk * (100 - (int)pi->rlp) +
274 (int)state->medium.sclk * pi->lmp;
275
276 l[1] = (u8)(pi->lmp - (int)pi->lmp * a_n / a_d);
277 r[0] = (u8)(pi->rlp + (100 - (int)pi->rlp) * a_n / a_d);
278
279 a_n = (int)state->high.sclk * pi->lhp + (int)state->medium.sclk *
280 (R600_AH_DFLT - pi->rmp);
281 a_d = (int)state->medium.sclk * (100 - (int)pi->rmp) +
282 (int)state->high.sclk * pi->lhp;
283
284 l[2] = (u8)(pi->lhp - (int)pi->lhp * a_n / a_d);
285 r[1] = (u8)(pi->rmp + (100 - (int)pi->rmp) * a_n / a_d);
286
287 for (i = 0; i < (RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1); i++) {
288 a_t = CG_R(r[i] * pi->bsp / 200) | CG_L(l[i] * pi->bsp / 200);
289 smc_state->levels[i].aT = cpu_to_be32(a_t);
290 }
291
292 a_t = CG_R(r[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1] * pi->pbsp / 200) |
293 CG_L(l[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1] * pi->pbsp / 200);
294
295 smc_state->levels[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1].aT =
296 cpu_to_be32(a_t);
297
298 return 0;
299 }
300
rv770_populate_smc_sp(struct radeon_device * rdev,struct radeon_ps * radeon_state,RV770_SMC_SWSTATE * smc_state)301 int rv770_populate_smc_sp(struct radeon_device *rdev,
302 struct radeon_ps *radeon_state,
303 RV770_SMC_SWSTATE *smc_state)
304 {
305 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
306 int i;
307
308 for (i = 0; i < (RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1); i++)
309 smc_state->levels[i].bSP = cpu_to_be32(pi->dsp);
310
311 smc_state->levels[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1].bSP =
312 cpu_to_be32(pi->psp);
313
314 return 0;
315 }
316
rv770_calculate_fractional_mpll_feedback_divider(u32 memory_clock,u32 reference_clock,bool gddr5,struct atom_clock_dividers * dividers,u32 * clkf,u32 * clkfrac)317 static void rv770_calculate_fractional_mpll_feedback_divider(u32 memory_clock,
318 u32 reference_clock,
319 bool gddr5,
320 struct atom_clock_dividers *dividers,
321 u32 *clkf,
322 u32 *clkfrac)
323 {
324 u32 post_divider, reference_divider, feedback_divider8;
325 u32 fyclk;
326
327 if (gddr5)
328 fyclk = (memory_clock * 8) / 2;
329 else
330 fyclk = (memory_clock * 4) / 2;
331
332 post_divider = dividers->post_div;
333 reference_divider = dividers->ref_div;
334
335 feedback_divider8 =
336 (8 * fyclk * reference_divider * post_divider) / reference_clock;
337
338 *clkf = feedback_divider8 / 8;
339 *clkfrac = feedback_divider8 % 8;
340 }
341
rv770_encode_yclk_post_div(u32 postdiv,u32 * encoded_postdiv)342 static int rv770_encode_yclk_post_div(u32 postdiv, u32 *encoded_postdiv)
343 {
344 int ret = 0;
345
346 switch (postdiv) {
347 case 1:
348 *encoded_postdiv = 0;
349 break;
350 case 2:
351 *encoded_postdiv = 1;
352 break;
353 case 4:
354 *encoded_postdiv = 2;
355 break;
356 case 8:
357 *encoded_postdiv = 3;
358 break;
359 case 16:
360 *encoded_postdiv = 4;
361 break;
362 default:
363 ret = -EINVAL;
364 break;
365 }
366
367 return ret;
368 }
369
rv770_map_clkf_to_ibias(struct radeon_device * rdev,u32 clkf)370 u32 rv770_map_clkf_to_ibias(struct radeon_device *rdev, u32 clkf)
371 {
372 if (clkf <= 0x10)
373 return 0x4B;
374 if (clkf <= 0x19)
375 return 0x5B;
376 if (clkf <= 0x21)
377 return 0x2B;
378 if (clkf <= 0x27)
379 return 0x6C;
380 if (clkf <= 0x31)
381 return 0x9D;
382 return 0xC6;
383 }
384
rv770_populate_mclk_value(struct radeon_device * rdev,u32 engine_clock,u32 memory_clock,RV7XX_SMC_MCLK_VALUE * mclk)385 static int rv770_populate_mclk_value(struct radeon_device *rdev,
386 u32 engine_clock, u32 memory_clock,
387 RV7XX_SMC_MCLK_VALUE *mclk)
388 {
389 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
390 u8 encoded_reference_dividers[] = { 0, 16, 17, 20, 21 };
391 u32 mpll_ad_func_cntl =
392 pi->clk_regs.rv770.mpll_ad_func_cntl;
393 u32 mpll_ad_func_cntl_2 =
394 pi->clk_regs.rv770.mpll_ad_func_cntl_2;
395 u32 mpll_dq_func_cntl =
396 pi->clk_regs.rv770.mpll_dq_func_cntl;
397 u32 mpll_dq_func_cntl_2 =
398 pi->clk_regs.rv770.mpll_dq_func_cntl_2;
399 u32 mclk_pwrmgt_cntl =
400 pi->clk_regs.rv770.mclk_pwrmgt_cntl;
401 u32 dll_cntl = pi->clk_regs.rv770.dll_cntl;
402 struct atom_clock_dividers dividers;
403 u32 reference_clock = rdev->clock.mpll.reference_freq;
404 u32 clkf, clkfrac;
405 u32 postdiv_yclk;
406 u32 ibias;
407 int ret;
408
409 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
410 memory_clock, false, ÷rs);
411 if (ret)
412 return ret;
413
414 if ((dividers.ref_div < 1) || (dividers.ref_div > 5))
415 return -EINVAL;
416
417 rv770_calculate_fractional_mpll_feedback_divider(memory_clock, reference_clock,
418 pi->mem_gddr5,
419 ÷rs, &clkf, &clkfrac);
420
421 ret = rv770_encode_yclk_post_div(dividers.post_div, &postdiv_yclk);
422 if (ret)
423 return ret;
424
425 ibias = rv770_map_clkf_to_ibias(rdev, clkf);
426
427 mpll_ad_func_cntl &= ~(CLKR_MASK |
428 YCLK_POST_DIV_MASK |
429 CLKF_MASK |
430 CLKFRAC_MASK |
431 IBIAS_MASK);
432 mpll_ad_func_cntl |= CLKR(encoded_reference_dividers[dividers.ref_div - 1]);
433 mpll_ad_func_cntl |= YCLK_POST_DIV(postdiv_yclk);
434 mpll_ad_func_cntl |= CLKF(clkf);
435 mpll_ad_func_cntl |= CLKFRAC(clkfrac);
436 mpll_ad_func_cntl |= IBIAS(ibias);
437
438 if (dividers.vco_mode)
439 mpll_ad_func_cntl_2 |= VCO_MODE;
440 else
441 mpll_ad_func_cntl_2 &= ~VCO_MODE;
442
443 if (pi->mem_gddr5) {
444 rv770_calculate_fractional_mpll_feedback_divider(memory_clock,
445 reference_clock,
446 pi->mem_gddr5,
447 ÷rs, &clkf, &clkfrac);
448
449 ibias = rv770_map_clkf_to_ibias(rdev, clkf);
450
451 ret = rv770_encode_yclk_post_div(dividers.post_div, &postdiv_yclk);
452 if (ret)
453 return ret;
454
455 mpll_dq_func_cntl &= ~(CLKR_MASK |
456 YCLK_POST_DIV_MASK |
457 CLKF_MASK |
458 CLKFRAC_MASK |
459 IBIAS_MASK);
460 mpll_dq_func_cntl |= CLKR(encoded_reference_dividers[dividers.ref_div - 1]);
461 mpll_dq_func_cntl |= YCLK_POST_DIV(postdiv_yclk);
462 mpll_dq_func_cntl |= CLKF(clkf);
463 mpll_dq_func_cntl |= CLKFRAC(clkfrac);
464 mpll_dq_func_cntl |= IBIAS(ibias);
465
466 if (dividers.vco_mode)
467 mpll_dq_func_cntl_2 |= VCO_MODE;
468 else
469 mpll_dq_func_cntl_2 &= ~VCO_MODE;
470 }
471
472 mclk->mclk770.mclk_value = cpu_to_be32(memory_clock);
473 mclk->mclk770.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
474 mclk->mclk770.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
475 mclk->mclk770.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
476 mclk->mclk770.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
477 mclk->mclk770.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
478 mclk->mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl);
479
480 return 0;
481 }
482
rv770_populate_sclk_value(struct radeon_device * rdev,u32 engine_clock,RV770_SMC_SCLK_VALUE * sclk)483 static int rv770_populate_sclk_value(struct radeon_device *rdev,
484 u32 engine_clock,
485 RV770_SMC_SCLK_VALUE *sclk)
486 {
487 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
488 struct atom_clock_dividers dividers;
489 u32 spll_func_cntl =
490 pi->clk_regs.rv770.cg_spll_func_cntl;
491 u32 spll_func_cntl_2 =
492 pi->clk_regs.rv770.cg_spll_func_cntl_2;
493 u32 spll_func_cntl_3 =
494 pi->clk_regs.rv770.cg_spll_func_cntl_3;
495 u32 cg_spll_spread_spectrum =
496 pi->clk_regs.rv770.cg_spll_spread_spectrum;
497 u32 cg_spll_spread_spectrum_2 =
498 pi->clk_regs.rv770.cg_spll_spread_spectrum_2;
499 u64 tmp;
500 u32 reference_clock = rdev->clock.spll.reference_freq;
501 u32 reference_divider, post_divider;
502 u32 fbdiv;
503 int ret;
504
505 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
506 engine_clock, false, ÷rs);
507 if (ret)
508 return ret;
509
510 reference_divider = 1 + dividers.ref_div;
511
512 if (dividers.enable_post_div)
513 post_divider = (0x0f & (dividers.post_div >> 4)) + (0x0f & dividers.post_div) + 2;
514 else
515 post_divider = 1;
516
517 tmp = (u64) engine_clock * reference_divider * post_divider * 16384;
518 do_div(tmp, reference_clock);
519 fbdiv = (u32) tmp;
520
521 if (dividers.enable_post_div)
522 spll_func_cntl |= SPLL_DIVEN;
523 else
524 spll_func_cntl &= ~SPLL_DIVEN;
525 spll_func_cntl &= ~(SPLL_HILEN_MASK | SPLL_LOLEN_MASK | SPLL_REF_DIV_MASK);
526 spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div);
527 spll_func_cntl |= SPLL_HILEN((dividers.post_div >> 4) & 0xf);
528 spll_func_cntl |= SPLL_LOLEN(dividers.post_div & 0xf);
529
530 spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
531 spll_func_cntl_2 |= SCLK_MUX_SEL(2);
532
533 spll_func_cntl_3 &= ~SPLL_FB_DIV_MASK;
534 spll_func_cntl_3 |= SPLL_FB_DIV(fbdiv);
535 spll_func_cntl_3 |= SPLL_DITHEN;
536
537 if (pi->sclk_ss) {
538 struct radeon_atom_ss ss;
539 u32 vco_freq = engine_clock * post_divider;
540
541 if (radeon_atombios_get_asic_ss_info(rdev, &ss,
542 ASIC_INTERNAL_ENGINE_SS, vco_freq)) {
543 u32 clk_s = reference_clock * 5 / (reference_divider * ss.rate);
544 u32 clk_v = ss.percentage * fbdiv / (clk_s * 10000);
545
546 cg_spll_spread_spectrum &= ~CLKS_MASK;
547 cg_spll_spread_spectrum |= CLKS(clk_s);
548 cg_spll_spread_spectrum |= SSEN;
549
550 cg_spll_spread_spectrum_2 &= ~CLKV_MASK;
551 cg_spll_spread_spectrum_2 |= CLKV(clk_v);
552 }
553 }
554
555 sclk->sclk_value = cpu_to_be32(engine_clock);
556 sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
557 sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
558 sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
559 sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(cg_spll_spread_spectrum);
560 sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(cg_spll_spread_spectrum_2);
561
562 return 0;
563 }
564
rv770_populate_vddc_value(struct radeon_device * rdev,u16 vddc,RV770_SMC_VOLTAGE_VALUE * voltage)565 int rv770_populate_vddc_value(struct radeon_device *rdev, u16 vddc,
566 RV770_SMC_VOLTAGE_VALUE *voltage)
567 {
568 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
569 int i;
570
571 if (!pi->voltage_control) {
572 voltage->index = 0;
573 voltage->value = 0;
574 return 0;
575 }
576
577 for (i = 0; i < pi->valid_vddc_entries; i++) {
578 if (vddc <= pi->vddc_table[i].vddc) {
579 voltage->index = pi->vddc_table[i].vddc_index;
580 voltage->value = cpu_to_be16(vddc);
581 break;
582 }
583 }
584
585 if (i == pi->valid_vddc_entries)
586 return -EINVAL;
587
588 return 0;
589 }
590
rv770_populate_mvdd_value(struct radeon_device * rdev,u32 mclk,RV770_SMC_VOLTAGE_VALUE * voltage)591 int rv770_populate_mvdd_value(struct radeon_device *rdev, u32 mclk,
592 RV770_SMC_VOLTAGE_VALUE *voltage)
593 {
594 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
595
596 if (!pi->mvdd_control) {
597 voltage->index = MVDD_HIGH_INDEX;
598 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
599 return 0;
600 }
601
602 if (mclk <= pi->mvdd_split_frequency) {
603 voltage->index = MVDD_LOW_INDEX;
604 voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
605 } else {
606 voltage->index = MVDD_HIGH_INDEX;
607 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
608 }
609
610 return 0;
611 }
612
rv770_convert_power_level_to_smc(struct radeon_device * rdev,struct rv7xx_pl * pl,RV770_SMC_HW_PERFORMANCE_LEVEL * level,u8 watermark_level)613 static int rv770_convert_power_level_to_smc(struct radeon_device *rdev,
614 struct rv7xx_pl *pl,
615 RV770_SMC_HW_PERFORMANCE_LEVEL *level,
616 u8 watermark_level)
617 {
618 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
619 int ret;
620
621 level->gen2PCIE = pi->pcie_gen2 ?
622 ((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0;
623 level->gen2XSP = (pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0;
624 level->backbias = (pl->flags & ATOM_PPLIB_R600_FLAGS_BACKBIASENABLE) ? 1 : 0;
625 level->displayWatermark = watermark_level;
626
627 if (rdev->family == CHIP_RV740)
628 ret = rv740_populate_sclk_value(rdev, pl->sclk,
629 &level->sclk);
630 else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
631 ret = rv730_populate_sclk_value(rdev, pl->sclk,
632 &level->sclk);
633 else
634 ret = rv770_populate_sclk_value(rdev, pl->sclk,
635 &level->sclk);
636 if (ret)
637 return ret;
638
639 if (rdev->family == CHIP_RV740) {
640 if (pi->mem_gddr5) {
641 if (pl->mclk <= pi->mclk_strobe_mode_threshold)
642 level->strobeMode =
643 rv740_get_mclk_frequency_ratio(pl->mclk) | 0x10;
644 else
645 level->strobeMode = 0;
646
647 if (pl->mclk > pi->mclk_edc_enable_threshold)
648 level->mcFlags = SMC_MC_EDC_RD_FLAG | SMC_MC_EDC_WR_FLAG;
649 else
650 level->mcFlags = 0;
651 }
652 ret = rv740_populate_mclk_value(rdev, pl->sclk,
653 pl->mclk, &level->mclk);
654 } else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
655 ret = rv730_populate_mclk_value(rdev, pl->sclk,
656 pl->mclk, &level->mclk);
657 else
658 ret = rv770_populate_mclk_value(rdev, pl->sclk,
659 pl->mclk, &level->mclk);
660 if (ret)
661 return ret;
662
663 ret = rv770_populate_vddc_value(rdev, pl->vddc,
664 &level->vddc);
665 if (ret)
666 return ret;
667
668 ret = rv770_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
669
670 return ret;
671 }
672
rv770_convert_power_state_to_smc(struct radeon_device * rdev,struct radeon_ps * radeon_state,RV770_SMC_SWSTATE * smc_state)673 static int rv770_convert_power_state_to_smc(struct radeon_device *rdev,
674 struct radeon_ps *radeon_state,
675 RV770_SMC_SWSTATE *smc_state)
676 {
677 struct rv7xx_ps *state = rv770_get_ps(radeon_state);
678 int ret;
679
680 if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC))
681 smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
682
683 ret = rv770_convert_power_level_to_smc(rdev,
684 &state->low,
685 &smc_state->levels[0],
686 PPSMC_DISPLAY_WATERMARK_LOW);
687 if (ret)
688 return ret;
689
690 ret = rv770_convert_power_level_to_smc(rdev,
691 &state->medium,
692 &smc_state->levels[1],
693 PPSMC_DISPLAY_WATERMARK_LOW);
694 if (ret)
695 return ret;
696
697 ret = rv770_convert_power_level_to_smc(rdev,
698 &state->high,
699 &smc_state->levels[2],
700 PPSMC_DISPLAY_WATERMARK_HIGH);
701 if (ret)
702 return ret;
703
704 smc_state->levels[0].arbValue = MC_CG_ARB_FREQ_F1;
705 smc_state->levels[1].arbValue = MC_CG_ARB_FREQ_F2;
706 smc_state->levels[2].arbValue = MC_CG_ARB_FREQ_F3;
707
708 smc_state->levels[0].seqValue = rv770_get_seq_value(rdev,
709 &state->low);
710 smc_state->levels[1].seqValue = rv770_get_seq_value(rdev,
711 &state->medium);
712 smc_state->levels[2].seqValue = rv770_get_seq_value(rdev,
713 &state->high);
714
715 rv770_populate_smc_sp(rdev, radeon_state, smc_state);
716
717 return rv770_populate_smc_t(rdev, radeon_state, smc_state);
718
719 }
720
rv770_calculate_memory_refresh_rate(struct radeon_device * rdev,u32 engine_clock)721 u32 rv770_calculate_memory_refresh_rate(struct radeon_device *rdev,
722 u32 engine_clock)
723 {
724 u32 dram_rows;
725 u32 dram_refresh_rate;
726 u32 mc_arb_rfsh_rate;
727 u32 tmp;
728
729 tmp = (RREG32(MC_ARB_RAMCFG) & NOOFROWS_MASK) >> NOOFROWS_SHIFT;
730 dram_rows = 1 << (tmp + 10);
731 tmp = RREG32(MC_SEQ_MISC0) & 3;
732 dram_refresh_rate = 1 << (tmp + 3);
733 mc_arb_rfsh_rate = ((engine_clock * 10) * dram_refresh_rate / dram_rows - 32) / 64;
734
735 return mc_arb_rfsh_rate;
736 }
737
rv770_program_memory_timing_parameters(struct radeon_device * rdev,struct radeon_ps * radeon_state)738 static void rv770_program_memory_timing_parameters(struct radeon_device *rdev,
739 struct radeon_ps *radeon_state)
740 {
741 struct rv7xx_ps *state = rv770_get_ps(radeon_state);
742 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
743 u32 sqm_ratio;
744 u32 arb_refresh_rate;
745 u32 high_clock;
746
747 if (state->high.sclk < (state->low.sclk * 0xFF / 0x40))
748 high_clock = state->high.sclk;
749 else
750 high_clock = (state->low.sclk * 0xFF / 0x40);
751
752 radeon_atom_set_engine_dram_timings(rdev, high_clock,
753 state->high.mclk);
754
755 sqm_ratio =
756 STATE0(64 * high_clock / pi->boot_sclk) |
757 STATE1(64 * high_clock / state->low.sclk) |
758 STATE2(64 * high_clock / state->medium.sclk) |
759 STATE3(64 * high_clock / state->high.sclk);
760 WREG32(MC_ARB_SQM_RATIO, sqm_ratio);
761
762 arb_refresh_rate =
763 POWERMODE0(rv770_calculate_memory_refresh_rate(rdev, pi->boot_sclk)) |
764 POWERMODE1(rv770_calculate_memory_refresh_rate(rdev, state->low.sclk)) |
765 POWERMODE2(rv770_calculate_memory_refresh_rate(rdev, state->medium.sclk)) |
766 POWERMODE3(rv770_calculate_memory_refresh_rate(rdev, state->high.sclk));
767 WREG32(MC_ARB_RFSH_RATE, arb_refresh_rate);
768 }
769
rv770_enable_backbias(struct radeon_device * rdev,bool enable)770 void rv770_enable_backbias(struct radeon_device *rdev,
771 bool enable)
772 {
773 if (enable)
774 WREG32_P(GENERAL_PWRMGT, BACKBIAS_PAD_EN, ~BACKBIAS_PAD_EN);
775 else
776 WREG32_P(GENERAL_PWRMGT, 0, ~(BACKBIAS_VALUE | BACKBIAS_PAD_EN));
777 }
778
rv770_enable_spread_spectrum(struct radeon_device * rdev,bool enable)779 static void rv770_enable_spread_spectrum(struct radeon_device *rdev,
780 bool enable)
781 {
782 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
783
784 if (enable) {
785 if (pi->sclk_ss)
786 WREG32_P(GENERAL_PWRMGT, DYN_SPREAD_SPECTRUM_EN, ~DYN_SPREAD_SPECTRUM_EN);
787
788 if (pi->mclk_ss) {
789 if (rdev->family == CHIP_RV740)
790 rv740_enable_mclk_spread_spectrum(rdev, true);
791 }
792 } else {
793 WREG32_P(CG_SPLL_SPREAD_SPECTRUM, 0, ~SSEN);
794
795 WREG32_P(GENERAL_PWRMGT, 0, ~DYN_SPREAD_SPECTRUM_EN);
796
797 WREG32_P(CG_MPLL_SPREAD_SPECTRUM, 0, ~SSEN);
798
799 if (rdev->family == CHIP_RV740)
800 rv740_enable_mclk_spread_spectrum(rdev, false);
801 }
802 }
803
rv770_program_mpll_timing_parameters(struct radeon_device * rdev)804 static void rv770_program_mpll_timing_parameters(struct radeon_device *rdev)
805 {
806 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
807
808 if ((rdev->family == CHIP_RV770) && !pi->mem_gddr5) {
809 WREG32(MPLL_TIME,
810 (MPLL_LOCK_TIME(R600_MPLLLOCKTIME_DFLT * pi->ref_div) |
811 MPLL_RESET_TIME(R600_MPLLRESETTIME_DFLT)));
812 }
813 }
814
rv770_setup_bsp(struct radeon_device * rdev)815 void rv770_setup_bsp(struct radeon_device *rdev)
816 {
817 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
818 u32 xclk = radeon_get_xclk(rdev);
819
820 r600_calculate_u_and_p(pi->asi,
821 xclk,
822 16,
823 &pi->bsp,
824 &pi->bsu);
825
826 r600_calculate_u_and_p(pi->pasi,
827 xclk,
828 16,
829 &pi->pbsp,
830 &pi->pbsu);
831
832 pi->dsp = BSP(pi->bsp) | BSU(pi->bsu);
833 pi->psp = BSP(pi->pbsp) | BSU(pi->pbsu);
834
835 WREG32(CG_BSP, pi->dsp);
836
837 }
838
rv770_program_git(struct radeon_device * rdev)839 void rv770_program_git(struct radeon_device *rdev)
840 {
841 WREG32_P(CG_GIT, CG_GICST(R600_GICST_DFLT), ~CG_GICST_MASK);
842 }
843
rv770_program_tp(struct radeon_device * rdev)844 void rv770_program_tp(struct radeon_device *rdev)
845 {
846 int i;
847 enum r600_td td = R600_TD_DFLT;
848
849 for (i = 0; i < R600_PM_NUMBER_OF_TC; i++)
850 WREG32(CG_FFCT_0 + (i * 4), (UTC_0(r600_utc[i]) | DTC_0(r600_dtc[i])));
851
852 if (td == R600_TD_AUTO)
853 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~FIR_FORCE_TREND_SEL);
854 else
855 WREG32_P(SCLK_PWRMGT_CNTL, FIR_FORCE_TREND_SEL, ~FIR_FORCE_TREND_SEL);
856 if (td == R600_TD_UP)
857 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~FIR_TREND_MODE);
858 if (td == R600_TD_DOWN)
859 WREG32_P(SCLK_PWRMGT_CNTL, FIR_TREND_MODE, ~FIR_TREND_MODE);
860 }
861
rv770_program_tpp(struct radeon_device * rdev)862 void rv770_program_tpp(struct radeon_device *rdev)
863 {
864 WREG32(CG_TPC, R600_TPC_DFLT);
865 }
866
rv770_program_sstp(struct radeon_device * rdev)867 void rv770_program_sstp(struct radeon_device *rdev)
868 {
869 WREG32(CG_SSP, (SSTU(R600_SSTU_DFLT) | SST(R600_SST_DFLT)));
870 }
871
rv770_program_engine_speed_parameters(struct radeon_device * rdev)872 void rv770_program_engine_speed_parameters(struct radeon_device *rdev)
873 {
874 WREG32_P(SPLL_CNTL_MODE, SPLL_DIV_SYNC, ~SPLL_DIV_SYNC);
875 }
876
rv770_enable_display_gap(struct radeon_device * rdev)877 static void rv770_enable_display_gap(struct radeon_device *rdev)
878 {
879 u32 tmp = RREG32(CG_DISPLAY_GAP_CNTL);
880
881 tmp &= ~(DISP1_GAP_MCHG_MASK | DISP2_GAP_MCHG_MASK);
882 tmp |= (DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE) |
883 DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE));
884 WREG32(CG_DISPLAY_GAP_CNTL, tmp);
885 }
886
rv770_program_vc(struct radeon_device * rdev)887 void rv770_program_vc(struct radeon_device *rdev)
888 {
889 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
890
891 WREG32(CG_FTV, pi->vrc);
892 }
893
rv770_clear_vc(struct radeon_device * rdev)894 void rv770_clear_vc(struct radeon_device *rdev)
895 {
896 WREG32(CG_FTV, 0);
897 }
898
rv770_upload_firmware(struct radeon_device * rdev)899 int rv770_upload_firmware(struct radeon_device *rdev)
900 {
901 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
902 int ret;
903
904 rv770_reset_smc(rdev);
905 rv770_stop_smc_clock(rdev);
906
907 ret = rv770_load_smc_ucode(rdev, pi->sram_end);
908 if (ret)
909 return ret;
910
911 return 0;
912 }
913
rv770_populate_smc_acpi_state(struct radeon_device * rdev,RV770_SMC_STATETABLE * table)914 static int rv770_populate_smc_acpi_state(struct radeon_device *rdev,
915 RV770_SMC_STATETABLE *table)
916 {
917 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
918
919 u32 mpll_ad_func_cntl =
920 pi->clk_regs.rv770.mpll_ad_func_cntl;
921 u32 mpll_ad_func_cntl_2 =
922 pi->clk_regs.rv770.mpll_ad_func_cntl_2;
923 u32 mpll_dq_func_cntl =
924 pi->clk_regs.rv770.mpll_dq_func_cntl;
925 u32 mpll_dq_func_cntl_2 =
926 pi->clk_regs.rv770.mpll_dq_func_cntl_2;
927 u32 spll_func_cntl =
928 pi->clk_regs.rv770.cg_spll_func_cntl;
929 u32 spll_func_cntl_2 =
930 pi->clk_regs.rv770.cg_spll_func_cntl_2;
931 u32 spll_func_cntl_3 =
932 pi->clk_regs.rv770.cg_spll_func_cntl_3;
933 u32 mclk_pwrmgt_cntl;
934 u32 dll_cntl;
935
936 table->ACPIState = table->initialState;
937
938 table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
939
940 if (pi->acpi_vddc) {
941 rv770_populate_vddc_value(rdev, pi->acpi_vddc,
942 &table->ACPIState.levels[0].vddc);
943 if (pi->pcie_gen2) {
944 if (pi->acpi_pcie_gen2)
945 table->ACPIState.levels[0].gen2PCIE = 1;
946 else
947 table->ACPIState.levels[0].gen2PCIE = 0;
948 } else
949 table->ACPIState.levels[0].gen2PCIE = 0;
950 if (pi->acpi_pcie_gen2)
951 table->ACPIState.levels[0].gen2XSP = 1;
952 else
953 table->ACPIState.levels[0].gen2XSP = 0;
954 } else {
955 rv770_populate_vddc_value(rdev, pi->min_vddc_in_table,
956 &table->ACPIState.levels[0].vddc);
957 table->ACPIState.levels[0].gen2PCIE = 0;
958 }
959
960
961 mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
962
963 mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
964
965 mclk_pwrmgt_cntl = (MRDCKA0_RESET |
966 MRDCKA1_RESET |
967 MRDCKB0_RESET |
968 MRDCKB1_RESET |
969 MRDCKC0_RESET |
970 MRDCKC1_RESET |
971 MRDCKD0_RESET |
972 MRDCKD1_RESET);
973
974 dll_cntl = 0xff000000;
975
976 spll_func_cntl |= SPLL_RESET | SPLL_SLEEP | SPLL_BYPASS_EN;
977
978 spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
979 spll_func_cntl_2 |= SCLK_MUX_SEL(4);
980
981 table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
982 table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
983 table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
984 table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
985
986 table->ACPIState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
987 table->ACPIState.levels[0].mclk.mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl);
988
989 table->ACPIState.levels[0].mclk.mclk770.mclk_value = 0;
990
991 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
992 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
993 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
994
995 table->ACPIState.levels[0].sclk.sclk_value = 0;
996
997 rv770_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
998
999 table->ACPIState.levels[1] = table->ACPIState.levels[0];
1000 table->ACPIState.levels[2] = table->ACPIState.levels[0];
1001
1002 return 0;
1003 }
1004
rv770_populate_initial_mvdd_value(struct radeon_device * rdev,RV770_SMC_VOLTAGE_VALUE * voltage)1005 int rv770_populate_initial_mvdd_value(struct radeon_device *rdev,
1006 RV770_SMC_VOLTAGE_VALUE *voltage)
1007 {
1008 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1009
1010 if ((pi->s0_vid_lower_smio_cntl & pi->mvdd_mask_low) ==
1011 (pi->mvdd_low_smio[MVDD_LOW_INDEX] & pi->mvdd_mask_low) ) {
1012 voltage->index = MVDD_LOW_INDEX;
1013 voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
1014 } else {
1015 voltage->index = MVDD_HIGH_INDEX;
1016 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1017 }
1018
1019 return 0;
1020 }
1021
rv770_populate_smc_initial_state(struct radeon_device * rdev,struct radeon_ps * radeon_state,RV770_SMC_STATETABLE * table)1022 static int rv770_populate_smc_initial_state(struct radeon_device *rdev,
1023 struct radeon_ps *radeon_state,
1024 RV770_SMC_STATETABLE *table)
1025 {
1026 struct rv7xx_ps *initial_state = rv770_get_ps(radeon_state);
1027 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1028 u32 a_t;
1029
1030 table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL =
1031 cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl);
1032 table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 =
1033 cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl_2);
1034 table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL =
1035 cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl);
1036 table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 =
1037 cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl_2);
1038 table->initialState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL =
1039 cpu_to_be32(pi->clk_regs.rv770.mclk_pwrmgt_cntl);
1040 table->initialState.levels[0].mclk.mclk770.vDLL_CNTL =
1041 cpu_to_be32(pi->clk_regs.rv770.dll_cntl);
1042
1043 table->initialState.levels[0].mclk.mclk770.vMPLL_SS =
1044 cpu_to_be32(pi->clk_regs.rv770.mpll_ss1);
1045 table->initialState.levels[0].mclk.mclk770.vMPLL_SS2 =
1046 cpu_to_be32(pi->clk_regs.rv770.mpll_ss2);
1047
1048 table->initialState.levels[0].mclk.mclk770.mclk_value =
1049 cpu_to_be32(initial_state->low.mclk);
1050
1051 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1052 cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl);
1053 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1054 cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_2);
1055 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1056 cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_3);
1057 table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
1058 cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum);
1059 table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 =
1060 cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum_2);
1061
1062 table->initialState.levels[0].sclk.sclk_value =
1063 cpu_to_be32(initial_state->low.sclk);
1064
1065 table->initialState.levels[0].arbValue = MC_CG_ARB_FREQ_F0;
1066
1067 table->initialState.levels[0].seqValue =
1068 rv770_get_seq_value(rdev, &initial_state->low);
1069
1070 rv770_populate_vddc_value(rdev,
1071 initial_state->low.vddc,
1072 &table->initialState.levels[0].vddc);
1073 rv770_populate_initial_mvdd_value(rdev,
1074 &table->initialState.levels[0].mvdd);
1075
1076 a_t = CG_R(0xffff) | CG_L(0);
1077 table->initialState.levels[0].aT = cpu_to_be32(a_t);
1078
1079 table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
1080
1081 if (pi->boot_in_gen2)
1082 table->initialState.levels[0].gen2PCIE = 1;
1083 else
1084 table->initialState.levels[0].gen2PCIE = 0;
1085 if (initial_state->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
1086 table->initialState.levels[0].gen2XSP = 1;
1087 else
1088 table->initialState.levels[0].gen2XSP = 0;
1089
1090 if (rdev->family == CHIP_RV740) {
1091 if (pi->mem_gddr5) {
1092 if (initial_state->low.mclk <= pi->mclk_strobe_mode_threshold)
1093 table->initialState.levels[0].strobeMode =
1094 rv740_get_mclk_frequency_ratio(initial_state->low.mclk) | 0x10;
1095 else
1096 table->initialState.levels[0].strobeMode = 0;
1097
1098 if (initial_state->low.mclk >= pi->mclk_edc_enable_threshold)
1099 table->initialState.levels[0].mcFlags = SMC_MC_EDC_RD_FLAG | SMC_MC_EDC_WR_FLAG;
1100 else
1101 table->initialState.levels[0].mcFlags = 0;
1102 }
1103 }
1104
1105 table->initialState.levels[1] = table->initialState.levels[0];
1106 table->initialState.levels[2] = table->initialState.levels[0];
1107
1108 table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
1109
1110 return 0;
1111 }
1112
rv770_populate_smc_vddc_table(struct radeon_device * rdev,RV770_SMC_STATETABLE * table)1113 static int rv770_populate_smc_vddc_table(struct radeon_device *rdev,
1114 RV770_SMC_STATETABLE *table)
1115 {
1116 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1117 int i;
1118
1119 for (i = 0; i < pi->valid_vddc_entries; i++) {
1120 table->highSMIO[pi->vddc_table[i].vddc_index] =
1121 pi->vddc_table[i].high_smio;
1122 table->lowSMIO[pi->vddc_table[i].vddc_index] =
1123 cpu_to_be32(pi->vddc_table[i].low_smio);
1124 }
1125
1126 table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_VDDC] = 0;
1127 table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_VDDC] =
1128 cpu_to_be32(pi->vddc_mask_low);
1129
1130 for (i = 0;
1131 ((i < pi->valid_vddc_entries) &&
1132 (pi->max_vddc_in_table >
1133 pi->vddc_table[i].vddc));
1134 i++);
1135
1136 table->maxVDDCIndexInPPTable =
1137 pi->vddc_table[i].vddc_index;
1138
1139 return 0;
1140 }
1141
rv770_populate_smc_mvdd_table(struct radeon_device * rdev,RV770_SMC_STATETABLE * table)1142 static int rv770_populate_smc_mvdd_table(struct radeon_device *rdev,
1143 RV770_SMC_STATETABLE *table)
1144 {
1145 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1146
1147 if (pi->mvdd_control) {
1148 table->lowSMIO[MVDD_HIGH_INDEX] |=
1149 cpu_to_be32(pi->mvdd_low_smio[MVDD_HIGH_INDEX]);
1150 table->lowSMIO[MVDD_LOW_INDEX] |=
1151 cpu_to_be32(pi->mvdd_low_smio[MVDD_LOW_INDEX]);
1152
1153 table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_MVDD] = 0;
1154 table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_MVDD] =
1155 cpu_to_be32(pi->mvdd_mask_low);
1156 }
1157
1158 return 0;
1159 }
1160
rv770_init_smc_table(struct radeon_device * rdev,struct radeon_ps * radeon_boot_state)1161 static int rv770_init_smc_table(struct radeon_device *rdev,
1162 struct radeon_ps *radeon_boot_state)
1163 {
1164 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1165 struct rv7xx_ps *boot_state = rv770_get_ps(radeon_boot_state);
1166 RV770_SMC_STATETABLE *table = &pi->smc_statetable;
1167 int ret;
1168
1169 memset(table, 0, sizeof(RV770_SMC_STATETABLE));
1170
1171 pi->boot_sclk = boot_state->low.sclk;
1172
1173 rv770_populate_smc_vddc_table(rdev, table);
1174 rv770_populate_smc_mvdd_table(rdev, table);
1175
1176 switch (rdev->pm.int_thermal_type) {
1177 case THERMAL_TYPE_RV770:
1178 case THERMAL_TYPE_ADT7473_WITH_INTERNAL:
1179 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
1180 break;
1181 case THERMAL_TYPE_NONE:
1182 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
1183 break;
1184 case THERMAL_TYPE_EXTERNAL_GPIO:
1185 default:
1186 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
1187 break;
1188 }
1189
1190 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC) {
1191 table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
1192
1193 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_DONT_WAIT_FOR_VBLANK_ON_ALERT)
1194 table->extraFlags |= PPSMC_EXTRAFLAGS_AC2DC_DONT_WAIT_FOR_VBLANK;
1195
1196 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_GOTO_BOOT_ON_ALERT)
1197 table->extraFlags |= PPSMC_EXTRAFLAGS_AC2DC_ACTION_GOTOINITIALSTATE;
1198 }
1199
1200 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
1201 table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
1202
1203 if (pi->mem_gddr5)
1204 table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
1205
1206 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1207 ret = rv730_populate_smc_initial_state(rdev, radeon_boot_state, table);
1208 else
1209 ret = rv770_populate_smc_initial_state(rdev, radeon_boot_state, table);
1210 if (ret)
1211 return ret;
1212
1213 if (rdev->family == CHIP_RV740)
1214 ret = rv740_populate_smc_acpi_state(rdev, table);
1215 else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1216 ret = rv730_populate_smc_acpi_state(rdev, table);
1217 else
1218 ret = rv770_populate_smc_acpi_state(rdev, table);
1219 if (ret)
1220 return ret;
1221
1222 table->driverState = table->initialState;
1223
1224 return rv770_copy_bytes_to_smc(rdev,
1225 pi->state_table_start,
1226 (const u8 *)table,
1227 sizeof(RV770_SMC_STATETABLE),
1228 pi->sram_end);
1229 }
1230
rv770_construct_vddc_table(struct radeon_device * rdev)1231 static int rv770_construct_vddc_table(struct radeon_device *rdev)
1232 {
1233 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1234 u16 min, max, step;
1235 u32 steps = 0;
1236 u8 vddc_index = 0;
1237 u32 i;
1238
1239 radeon_atom_get_min_voltage(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &min);
1240 radeon_atom_get_max_voltage(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &max);
1241 radeon_atom_get_voltage_step(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &step);
1242
1243 steps = (max - min) / step + 1;
1244
1245 if (steps > MAX_NO_VREG_STEPS)
1246 return -EINVAL;
1247
1248 for (i = 0; i < steps; i++) {
1249 u32 gpio_pins, gpio_mask;
1250
1251 pi->vddc_table[i].vddc = (u16)(min + i * step);
1252 radeon_atom_get_voltage_gpio_settings(rdev,
1253 pi->vddc_table[i].vddc,
1254 SET_VOLTAGE_TYPE_ASIC_VDDC,
1255 &gpio_pins, &gpio_mask);
1256 pi->vddc_table[i].low_smio = gpio_pins & gpio_mask;
1257 pi->vddc_table[i].high_smio = 0;
1258 pi->vddc_mask_low = gpio_mask;
1259 if (i > 0) {
1260 if ((pi->vddc_table[i].low_smio !=
1261 pi->vddc_table[i - 1].low_smio ) ||
1262 (pi->vddc_table[i].high_smio !=
1263 pi->vddc_table[i - 1].high_smio))
1264 vddc_index++;
1265 }
1266 pi->vddc_table[i].vddc_index = vddc_index;
1267 }
1268
1269 pi->valid_vddc_entries = (u8)steps;
1270
1271 return 0;
1272 }
1273
rv770_get_mclk_split_point(struct atom_memory_info * memory_info)1274 static u32 rv770_get_mclk_split_point(struct atom_memory_info *memory_info)
1275 {
1276 if (memory_info->mem_type == MEM_TYPE_GDDR3)
1277 return 30000;
1278
1279 return 0;
1280 }
1281
rv770_get_mvdd_pin_configuration(struct radeon_device * rdev)1282 static int rv770_get_mvdd_pin_configuration(struct radeon_device *rdev)
1283 {
1284 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1285 u32 gpio_pins, gpio_mask;
1286
1287 radeon_atom_get_voltage_gpio_settings(rdev,
1288 MVDD_HIGH_VALUE, SET_VOLTAGE_TYPE_ASIC_MVDDC,
1289 &gpio_pins, &gpio_mask);
1290 pi->mvdd_mask_low = gpio_mask;
1291 pi->mvdd_low_smio[MVDD_HIGH_INDEX] =
1292 gpio_pins & gpio_mask;
1293
1294 radeon_atom_get_voltage_gpio_settings(rdev,
1295 MVDD_LOW_VALUE, SET_VOLTAGE_TYPE_ASIC_MVDDC,
1296 &gpio_pins, &gpio_mask);
1297 pi->mvdd_low_smio[MVDD_LOW_INDEX] =
1298 gpio_pins & gpio_mask;
1299
1300 return 0;
1301 }
1302
rv770_get_memory_module_index(struct radeon_device * rdev)1303 u8 rv770_get_memory_module_index(struct radeon_device *rdev)
1304 {
1305 return (u8) ((RREG32(BIOS_SCRATCH_4) >> 16) & 0xff);
1306 }
1307
rv770_get_mvdd_configuration(struct radeon_device * rdev)1308 static int rv770_get_mvdd_configuration(struct radeon_device *rdev)
1309 {
1310 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1311 u8 memory_module_index;
1312 struct atom_memory_info memory_info;
1313
1314 memory_module_index = rv770_get_memory_module_index(rdev);
1315
1316 if (radeon_atom_get_memory_info(rdev, memory_module_index, &memory_info)) {
1317 pi->mvdd_control = false;
1318 return 0;
1319 }
1320
1321 pi->mvdd_split_frequency =
1322 rv770_get_mclk_split_point(&memory_info);
1323
1324 if (pi->mvdd_split_frequency == 0) {
1325 pi->mvdd_control = false;
1326 return 0;
1327 }
1328
1329 return rv770_get_mvdd_pin_configuration(rdev);
1330 }
1331
rv770_enable_voltage_control(struct radeon_device * rdev,bool enable)1332 void rv770_enable_voltage_control(struct radeon_device *rdev,
1333 bool enable)
1334 {
1335 if (enable)
1336 WREG32_P(GENERAL_PWRMGT, VOLT_PWRMGT_EN, ~VOLT_PWRMGT_EN);
1337 else
1338 WREG32_P(GENERAL_PWRMGT, 0, ~VOLT_PWRMGT_EN);
1339 }
1340
rv770_program_display_gap(struct radeon_device * rdev)1341 static void rv770_program_display_gap(struct radeon_device *rdev)
1342 {
1343 u32 tmp = RREG32(CG_DISPLAY_GAP_CNTL);
1344
1345 tmp &= ~(DISP1_GAP_MCHG_MASK | DISP2_GAP_MCHG_MASK);
1346 if (rdev->pm.dpm.new_active_crtcs & 1) {
1347 tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_VBLANK);
1348 tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1349 } else if (rdev->pm.dpm.new_active_crtcs & 2) {
1350 tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1351 tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_VBLANK);
1352 } else {
1353 tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1354 tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1355 }
1356 WREG32(CG_DISPLAY_GAP_CNTL, tmp);
1357 }
1358
rv770_enable_dynamic_pcie_gen2(struct radeon_device * rdev,bool enable)1359 static void rv770_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
1360 bool enable)
1361 {
1362 rv770_enable_bif_dynamic_pcie_gen2(rdev, enable);
1363
1364 if (enable)
1365 WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE);
1366 else
1367 WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE);
1368 }
1369
r7xx_program_memory_timing_parameters(struct radeon_device * rdev,struct radeon_ps * radeon_new_state)1370 static void r7xx_program_memory_timing_parameters(struct radeon_device *rdev,
1371 struct radeon_ps *radeon_new_state)
1372 {
1373 if ((rdev->family == CHIP_RV730) ||
1374 (rdev->family == CHIP_RV710) ||
1375 (rdev->family == CHIP_RV740))
1376 rv730_program_memory_timing_parameters(rdev, radeon_new_state);
1377 else
1378 rv770_program_memory_timing_parameters(rdev, radeon_new_state);
1379 }
1380
rv770_upload_sw_state(struct radeon_device * rdev,struct radeon_ps * radeon_new_state)1381 static int rv770_upload_sw_state(struct radeon_device *rdev,
1382 struct radeon_ps *radeon_new_state)
1383 {
1384 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1385 u16 address = pi->state_table_start +
1386 offsetof(RV770_SMC_STATETABLE, driverState);
1387 RV770_SMC_SWSTATE state = { 0 };
1388 int ret;
1389
1390 ret = rv770_convert_power_state_to_smc(rdev, radeon_new_state, &state);
1391 if (ret)
1392 return ret;
1393
1394 return rv770_copy_bytes_to_smc(rdev, address, (const u8 *)&state,
1395 sizeof(RV770_SMC_SWSTATE),
1396 pi->sram_end);
1397 }
1398
rv770_halt_smc(struct radeon_device * rdev)1399 int rv770_halt_smc(struct radeon_device *rdev)
1400 {
1401 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_Halt) != PPSMC_Result_OK)
1402 return -EINVAL;
1403
1404 if (rv770_wait_for_smc_inactive(rdev) != PPSMC_Result_OK)
1405 return -EINVAL;
1406
1407 return 0;
1408 }
1409
rv770_resume_smc(struct radeon_device * rdev)1410 int rv770_resume_smc(struct radeon_device *rdev)
1411 {
1412 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_Resume) != PPSMC_Result_OK)
1413 return -EINVAL;
1414 return 0;
1415 }
1416
rv770_set_sw_state(struct radeon_device * rdev)1417 int rv770_set_sw_state(struct radeon_device *rdev)
1418 {
1419 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_SwitchToSwState) != PPSMC_Result_OK)
1420 DRM_DEBUG("rv770_set_sw_state failed\n");
1421 return 0;
1422 }
1423
rv770_set_boot_state(struct radeon_device * rdev)1424 int rv770_set_boot_state(struct radeon_device *rdev)
1425 {
1426 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_SwitchToInitialState) != PPSMC_Result_OK)
1427 return -EINVAL;
1428 return 0;
1429 }
1430
rv770_set_uvd_clock_before_set_eng_clock(struct radeon_device * rdev,struct radeon_ps * new_ps,struct radeon_ps * old_ps)1431 void rv770_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev,
1432 struct radeon_ps *new_ps,
1433 struct radeon_ps *old_ps)
1434 {
1435 struct rv7xx_ps *new_state = rv770_get_ps(new_ps);
1436 struct rv7xx_ps *current_state = rv770_get_ps(old_ps);
1437
1438 if ((new_ps->vclk == old_ps->vclk) &&
1439 (new_ps->dclk == old_ps->dclk))
1440 return;
1441
1442 if (new_state->high.sclk >= current_state->high.sclk)
1443 return;
1444
1445 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
1446 }
1447
rv770_set_uvd_clock_after_set_eng_clock(struct radeon_device * rdev,struct radeon_ps * new_ps,struct radeon_ps * old_ps)1448 void rv770_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev,
1449 struct radeon_ps *new_ps,
1450 struct radeon_ps *old_ps)
1451 {
1452 struct rv7xx_ps *new_state = rv770_get_ps(new_ps);
1453 struct rv7xx_ps *current_state = rv770_get_ps(old_ps);
1454
1455 if ((new_ps->vclk == old_ps->vclk) &&
1456 (new_ps->dclk == old_ps->dclk))
1457 return;
1458
1459 if (new_state->high.sclk < current_state->high.sclk)
1460 return;
1461
1462 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
1463 }
1464
rv770_restrict_performance_levels_before_switch(struct radeon_device * rdev)1465 int rv770_restrict_performance_levels_before_switch(struct radeon_device *rdev)
1466 {
1467 if (rv770_send_msg_to_smc(rdev, (PPSMC_Msg)(PPSMC_MSG_NoForcedLevel)) != PPSMC_Result_OK)
1468 return -EINVAL;
1469
1470 if (rv770_send_msg_to_smc(rdev, (PPSMC_Msg)(PPSMC_MSG_TwoLevelsDisabled)) != PPSMC_Result_OK)
1471 return -EINVAL;
1472
1473 return 0;
1474 }
1475
rv770_dpm_force_performance_level(struct radeon_device * rdev,enum radeon_dpm_forced_level level)1476 int rv770_dpm_force_performance_level(struct radeon_device *rdev,
1477 enum radeon_dpm_forced_level level)
1478 {
1479 PPSMC_Msg msg;
1480
1481 if (level == RADEON_DPM_FORCED_LEVEL_HIGH) {
1482 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_ZeroLevelsDisabled) != PPSMC_Result_OK)
1483 return -EINVAL;
1484 msg = PPSMC_MSG_ForceHigh;
1485 } else if (level == RADEON_DPM_FORCED_LEVEL_LOW) {
1486 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK)
1487 return -EINVAL;
1488 msg = (PPSMC_Msg)(PPSMC_MSG_TwoLevelsDisabled);
1489 } else {
1490 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK)
1491 return -EINVAL;
1492 msg = (PPSMC_Msg)(PPSMC_MSG_ZeroLevelsDisabled);
1493 }
1494
1495 if (rv770_send_msg_to_smc(rdev, msg) != PPSMC_Result_OK)
1496 return -EINVAL;
1497
1498 rdev->pm.dpm.forced_level = level;
1499
1500 return 0;
1501 }
1502
r7xx_start_smc(struct radeon_device * rdev)1503 void r7xx_start_smc(struct radeon_device *rdev)
1504 {
1505 rv770_start_smc(rdev);
1506 rv770_start_smc_clock(rdev);
1507 }
1508
1509
r7xx_stop_smc(struct radeon_device * rdev)1510 void r7xx_stop_smc(struct radeon_device *rdev)
1511 {
1512 rv770_reset_smc(rdev);
1513 rv770_stop_smc_clock(rdev);
1514 }
1515
rv770_read_clock_registers(struct radeon_device * rdev)1516 static void rv770_read_clock_registers(struct radeon_device *rdev)
1517 {
1518 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1519
1520 pi->clk_regs.rv770.cg_spll_func_cntl =
1521 RREG32(CG_SPLL_FUNC_CNTL);
1522 pi->clk_regs.rv770.cg_spll_func_cntl_2 =
1523 RREG32(CG_SPLL_FUNC_CNTL_2);
1524 pi->clk_regs.rv770.cg_spll_func_cntl_3 =
1525 RREG32(CG_SPLL_FUNC_CNTL_3);
1526 pi->clk_regs.rv770.cg_spll_spread_spectrum =
1527 RREG32(CG_SPLL_SPREAD_SPECTRUM);
1528 pi->clk_regs.rv770.cg_spll_spread_spectrum_2 =
1529 RREG32(CG_SPLL_SPREAD_SPECTRUM_2);
1530 pi->clk_regs.rv770.mpll_ad_func_cntl =
1531 RREG32(MPLL_AD_FUNC_CNTL);
1532 pi->clk_regs.rv770.mpll_ad_func_cntl_2 =
1533 RREG32(MPLL_AD_FUNC_CNTL_2);
1534 pi->clk_regs.rv770.mpll_dq_func_cntl =
1535 RREG32(MPLL_DQ_FUNC_CNTL);
1536 pi->clk_regs.rv770.mpll_dq_func_cntl_2 =
1537 RREG32(MPLL_DQ_FUNC_CNTL_2);
1538 pi->clk_regs.rv770.mclk_pwrmgt_cntl =
1539 RREG32(MCLK_PWRMGT_CNTL);
1540 pi->clk_regs.rv770.dll_cntl = RREG32(DLL_CNTL);
1541 }
1542
r7xx_read_clock_registers(struct radeon_device * rdev)1543 static void r7xx_read_clock_registers(struct radeon_device *rdev)
1544 {
1545 if (rdev->family == CHIP_RV740)
1546 rv740_read_clock_registers(rdev);
1547 else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1548 rv730_read_clock_registers(rdev);
1549 else
1550 rv770_read_clock_registers(rdev);
1551 }
1552
rv770_read_voltage_smio_registers(struct radeon_device * rdev)1553 void rv770_read_voltage_smio_registers(struct radeon_device *rdev)
1554 {
1555 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1556
1557 pi->s0_vid_lower_smio_cntl =
1558 RREG32(S0_VID_LOWER_SMIO_CNTL);
1559 }
1560
rv770_reset_smio_status(struct radeon_device * rdev)1561 void rv770_reset_smio_status(struct radeon_device *rdev)
1562 {
1563 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1564 u32 sw_smio_index, vid_smio_cntl;
1565
1566 sw_smio_index =
1567 (RREG32(GENERAL_PWRMGT) & SW_SMIO_INDEX_MASK) >> SW_SMIO_INDEX_SHIFT;
1568 switch (sw_smio_index) {
1569 case 3:
1570 vid_smio_cntl = RREG32(S3_VID_LOWER_SMIO_CNTL);
1571 break;
1572 case 2:
1573 vid_smio_cntl = RREG32(S2_VID_LOWER_SMIO_CNTL);
1574 break;
1575 case 1:
1576 vid_smio_cntl = RREG32(S1_VID_LOWER_SMIO_CNTL);
1577 break;
1578 case 0:
1579 return;
1580 default:
1581 vid_smio_cntl = pi->s0_vid_lower_smio_cntl;
1582 break;
1583 }
1584
1585 WREG32(S0_VID_LOWER_SMIO_CNTL, vid_smio_cntl);
1586 WREG32_P(GENERAL_PWRMGT, SW_SMIO_INDEX(0), ~SW_SMIO_INDEX_MASK);
1587 }
1588
rv770_get_memory_type(struct radeon_device * rdev)1589 void rv770_get_memory_type(struct radeon_device *rdev)
1590 {
1591 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1592 u32 tmp;
1593
1594 tmp = RREG32(MC_SEQ_MISC0);
1595
1596 if (((tmp & MC_SEQ_MISC0_GDDR5_MASK) >> MC_SEQ_MISC0_GDDR5_SHIFT) ==
1597 MC_SEQ_MISC0_GDDR5_VALUE)
1598 pi->mem_gddr5 = true;
1599 else
1600 pi->mem_gddr5 = false;
1601
1602 }
1603
rv770_get_pcie_gen2_status(struct radeon_device * rdev)1604 void rv770_get_pcie_gen2_status(struct radeon_device *rdev)
1605 {
1606 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1607 u32 tmp;
1608
1609 tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
1610
1611 if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
1612 (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
1613 pi->pcie_gen2 = true;
1614 else
1615 pi->pcie_gen2 = false;
1616
1617 if (pi->pcie_gen2) {
1618 if (tmp & LC_CURRENT_DATA_RATE)
1619 pi->boot_in_gen2 = true;
1620 else
1621 pi->boot_in_gen2 = false;
1622 } else
1623 pi->boot_in_gen2 = false;
1624 }
1625
1626 #if 0
1627 static int rv770_enter_ulp_state(struct radeon_device *rdev)
1628 {
1629 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1630
1631 if (pi->gfx_clock_gating) {
1632 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
1633 WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
1634 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
1635 RREG32(GB_TILING_CONFIG);
1636 }
1637
1638 WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower),
1639 ~HOST_SMC_MSG_MASK);
1640
1641 udelay(7000);
1642
1643 return 0;
1644 }
1645
1646 static int rv770_exit_ulp_state(struct radeon_device *rdev)
1647 {
1648 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1649 int i;
1650
1651 WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_ResumeFromMinimumPower),
1652 ~HOST_SMC_MSG_MASK);
1653
1654 udelay(7000);
1655
1656 for (i = 0; i < rdev->usec_timeout; i++) {
1657 if (((RREG32(SMC_MSG) & HOST_SMC_RESP_MASK) >> HOST_SMC_RESP_SHIFT) == 1)
1658 break;
1659 udelay(1000);
1660 }
1661
1662 if (pi->gfx_clock_gating)
1663 WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN);
1664
1665 return 0;
1666 }
1667 #endif
1668
rv770_get_mclk_odt_threshold(struct radeon_device * rdev)1669 static void rv770_get_mclk_odt_threshold(struct radeon_device *rdev)
1670 {
1671 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1672 u8 memory_module_index;
1673 struct atom_memory_info memory_info;
1674
1675 pi->mclk_odt_threshold = 0;
1676
1677 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) {
1678 memory_module_index = rv770_get_memory_module_index(rdev);
1679
1680 if (radeon_atom_get_memory_info(rdev, memory_module_index, &memory_info))
1681 return;
1682
1683 if (memory_info.mem_type == MEM_TYPE_DDR2 ||
1684 memory_info.mem_type == MEM_TYPE_DDR3)
1685 pi->mclk_odt_threshold = 30000;
1686 }
1687 }
1688
rv770_get_max_vddc(struct radeon_device * rdev)1689 void rv770_get_max_vddc(struct radeon_device *rdev)
1690 {
1691 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1692 u16 vddc;
1693
1694 if (radeon_atom_get_max_vddc(rdev, 0, 0, &vddc))
1695 pi->max_vddc = 0;
1696 else
1697 pi->max_vddc = vddc;
1698 }
1699
rv770_program_response_times(struct radeon_device * rdev)1700 void rv770_program_response_times(struct radeon_device *rdev)
1701 {
1702 u32 voltage_response_time, backbias_response_time;
1703 u32 acpi_delay_time, vbi_time_out;
1704 u32 vddc_dly, bb_dly, acpi_dly, vbi_dly;
1705 u32 reference_clock;
1706
1707 voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time;
1708 backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time;
1709
1710 if (voltage_response_time == 0)
1711 voltage_response_time = 1000;
1712
1713 if (backbias_response_time == 0)
1714 backbias_response_time = 1000;
1715
1716 acpi_delay_time = 15000;
1717 vbi_time_out = 100000;
1718
1719 reference_clock = radeon_get_xclk(rdev);
1720
1721 vddc_dly = (voltage_response_time * reference_clock) / 1600;
1722 bb_dly = (backbias_response_time * reference_clock) / 1600;
1723 acpi_dly = (acpi_delay_time * reference_clock) / 1600;
1724 vbi_dly = (vbi_time_out * reference_clock) / 1600;
1725
1726 rv770_write_smc_soft_register(rdev,
1727 RV770_SMC_SOFT_REGISTER_delay_vreg, vddc_dly);
1728 rv770_write_smc_soft_register(rdev,
1729 RV770_SMC_SOFT_REGISTER_delay_bbias, bb_dly);
1730 rv770_write_smc_soft_register(rdev,
1731 RV770_SMC_SOFT_REGISTER_delay_acpi, acpi_dly);
1732 rv770_write_smc_soft_register(rdev,
1733 RV770_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly);
1734 #if 0
1735 /* XXX look up hw revision */
1736 if (WEKIVA_A21)
1737 rv770_write_smc_soft_register(rdev,
1738 RV770_SMC_SOFT_REGISTER_baby_step_timer,
1739 0x10);
1740 #endif
1741 }
1742
rv770_program_dcodt_before_state_switch(struct radeon_device * rdev,struct radeon_ps * radeon_new_state,struct radeon_ps * radeon_current_state)1743 static void rv770_program_dcodt_before_state_switch(struct radeon_device *rdev,
1744 struct radeon_ps *radeon_new_state,
1745 struct radeon_ps *radeon_current_state)
1746 {
1747 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1748 struct rv7xx_ps *new_state = rv770_get_ps(radeon_new_state);
1749 struct rv7xx_ps *current_state = rv770_get_ps(radeon_current_state);
1750 bool current_use_dc = false;
1751 bool new_use_dc = false;
1752
1753 if (pi->mclk_odt_threshold == 0)
1754 return;
1755
1756 if (current_state->high.mclk <= pi->mclk_odt_threshold)
1757 current_use_dc = true;
1758
1759 if (new_state->high.mclk <= pi->mclk_odt_threshold)
1760 new_use_dc = true;
1761
1762 if (current_use_dc == new_use_dc)
1763 return;
1764
1765 if (!current_use_dc && new_use_dc)
1766 return;
1767
1768 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1769 rv730_program_dcodt(rdev, new_use_dc);
1770 }
1771
rv770_program_dcodt_after_state_switch(struct radeon_device * rdev,struct radeon_ps * radeon_new_state,struct radeon_ps * radeon_current_state)1772 static void rv770_program_dcodt_after_state_switch(struct radeon_device *rdev,
1773 struct radeon_ps *radeon_new_state,
1774 struct radeon_ps *radeon_current_state)
1775 {
1776 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1777 struct rv7xx_ps *new_state = rv770_get_ps(radeon_new_state);
1778 struct rv7xx_ps *current_state = rv770_get_ps(radeon_current_state);
1779 bool current_use_dc = false;
1780 bool new_use_dc = false;
1781
1782 if (pi->mclk_odt_threshold == 0)
1783 return;
1784
1785 if (current_state->high.mclk <= pi->mclk_odt_threshold)
1786 current_use_dc = true;
1787
1788 if (new_state->high.mclk <= pi->mclk_odt_threshold)
1789 new_use_dc = true;
1790
1791 if (current_use_dc == new_use_dc)
1792 return;
1793
1794 if (current_use_dc && !new_use_dc)
1795 return;
1796
1797 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1798 rv730_program_dcodt(rdev, new_use_dc);
1799 }
1800
rv770_retrieve_odt_values(struct radeon_device * rdev)1801 static void rv770_retrieve_odt_values(struct radeon_device *rdev)
1802 {
1803 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1804
1805 if (pi->mclk_odt_threshold == 0)
1806 return;
1807
1808 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1809 rv730_get_odt_values(rdev);
1810 }
1811
rv770_set_dpm_event_sources(struct radeon_device * rdev,u32 sources)1812 static void rv770_set_dpm_event_sources(struct radeon_device *rdev, u32 sources)
1813 {
1814 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1815 bool want_thermal_protection;
1816 enum radeon_dpm_event_src dpm_event_src;
1817
1818 switch (sources) {
1819 case 0:
1820 default:
1821 want_thermal_protection = false;
1822 break;
1823 case (1 << RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL):
1824 want_thermal_protection = true;
1825 dpm_event_src = RADEON_DPM_EVENT_SRC_DIGITAL;
1826 break;
1827
1828 case (1 << RADEON_DPM_AUTO_THROTTLE_SRC_EXTERNAL):
1829 want_thermal_protection = true;
1830 dpm_event_src = RADEON_DPM_EVENT_SRC_EXTERNAL;
1831 break;
1832
1833 case ((1 << RADEON_DPM_AUTO_THROTTLE_SRC_EXTERNAL) |
1834 (1 << RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL)):
1835 want_thermal_protection = true;
1836 dpm_event_src = RADEON_DPM_EVENT_SRC_DIGIAL_OR_EXTERNAL;
1837 break;
1838 }
1839
1840 if (want_thermal_protection) {
1841 WREG32_P(CG_THERMAL_CTRL, DPM_EVENT_SRC(dpm_event_src), ~DPM_EVENT_SRC_MASK);
1842 if (pi->thermal_protection)
1843 WREG32_P(GENERAL_PWRMGT, 0, ~THERMAL_PROTECTION_DIS);
1844 } else {
1845 WREG32_P(GENERAL_PWRMGT, THERMAL_PROTECTION_DIS, ~THERMAL_PROTECTION_DIS);
1846 }
1847 }
1848
rv770_enable_auto_throttle_source(struct radeon_device * rdev,enum radeon_dpm_auto_throttle_src source,bool enable)1849 void rv770_enable_auto_throttle_source(struct radeon_device *rdev,
1850 enum radeon_dpm_auto_throttle_src source,
1851 bool enable)
1852 {
1853 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1854
1855 if (enable) {
1856 if (!(pi->active_auto_throttle_sources & (1 << source))) {
1857 pi->active_auto_throttle_sources |= 1 << source;
1858 rv770_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources);
1859 }
1860 } else {
1861 if (pi->active_auto_throttle_sources & (1 << source)) {
1862 pi->active_auto_throttle_sources &= ~(1 << source);
1863 rv770_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources);
1864 }
1865 }
1866 }
1867
rv770_set_thermal_temperature_range(struct radeon_device * rdev,int min_temp,int max_temp)1868 static int rv770_set_thermal_temperature_range(struct radeon_device *rdev,
1869 int min_temp, int max_temp)
1870 {
1871 int low_temp = 0 * 1000;
1872 int high_temp = 255 * 1000;
1873
1874 if (low_temp < min_temp)
1875 low_temp = min_temp;
1876 if (high_temp > max_temp)
1877 high_temp = max_temp;
1878 if (high_temp < low_temp) {
1879 DRM_ERROR("invalid thermal range: %d - %d\n", low_temp, high_temp);
1880 return -EINVAL;
1881 }
1882
1883 WREG32_P(CG_THERMAL_INT, DIG_THERM_INTH(high_temp / 1000), ~DIG_THERM_INTH_MASK);
1884 WREG32_P(CG_THERMAL_INT, DIG_THERM_INTL(low_temp / 1000), ~DIG_THERM_INTL_MASK);
1885 WREG32_P(CG_THERMAL_CTRL, DIG_THERM_DPM(high_temp / 1000), ~DIG_THERM_DPM_MASK);
1886
1887 rdev->pm.dpm.thermal.min_temp = low_temp;
1888 rdev->pm.dpm.thermal.max_temp = high_temp;
1889
1890 return 0;
1891 }
1892
rv770_dpm_enable(struct radeon_device * rdev)1893 int rv770_dpm_enable(struct radeon_device *rdev)
1894 {
1895 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1896 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
1897 int ret;
1898
1899 if (pi->gfx_clock_gating)
1900 rv770_restore_cgcg(rdev);
1901
1902 if (rv770_dpm_enabled(rdev))
1903 return -EINVAL;
1904
1905 if (pi->voltage_control) {
1906 rv770_enable_voltage_control(rdev, true);
1907 ret = rv770_construct_vddc_table(rdev);
1908 if (ret) {
1909 DRM_ERROR("rv770_construct_vddc_table failed\n");
1910 return ret;
1911 }
1912 }
1913
1914 if (pi->dcodt)
1915 rv770_retrieve_odt_values(rdev);
1916
1917 if (pi->mvdd_control) {
1918 ret = rv770_get_mvdd_configuration(rdev);
1919 if (ret) {
1920 DRM_ERROR("rv770_get_mvdd_configuration failed\n");
1921 return ret;
1922 }
1923 }
1924
1925 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS)
1926 rv770_enable_backbias(rdev, true);
1927
1928 rv770_enable_spread_spectrum(rdev, true);
1929
1930 if (pi->thermal_protection)
1931 rv770_enable_thermal_protection(rdev, true);
1932
1933 rv770_program_mpll_timing_parameters(rdev);
1934 rv770_setup_bsp(rdev);
1935 rv770_program_git(rdev);
1936 rv770_program_tp(rdev);
1937 rv770_program_tpp(rdev);
1938 rv770_program_sstp(rdev);
1939 rv770_program_engine_speed_parameters(rdev);
1940 rv770_enable_display_gap(rdev);
1941 rv770_program_vc(rdev);
1942
1943 if (pi->dynamic_pcie_gen2)
1944 rv770_enable_dynamic_pcie_gen2(rdev, true);
1945
1946 ret = rv770_upload_firmware(rdev);
1947 if (ret) {
1948 DRM_ERROR("rv770_upload_firmware failed\n");
1949 return ret;
1950 }
1951 ret = rv770_init_smc_table(rdev, boot_ps);
1952 if (ret) {
1953 DRM_ERROR("rv770_init_smc_table failed\n");
1954 return ret;
1955 }
1956
1957 rv770_program_response_times(rdev);
1958 r7xx_start_smc(rdev);
1959
1960 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1961 rv730_start_dpm(rdev);
1962 else
1963 rv770_start_dpm(rdev);
1964
1965 if (pi->gfx_clock_gating)
1966 rv770_gfx_clock_gating_enable(rdev, true);
1967
1968 if (pi->mg_clock_gating)
1969 rv770_mg_clock_gating_enable(rdev, true);
1970
1971 rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
1972
1973 return 0;
1974 }
1975
rv770_dpm_late_enable(struct radeon_device * rdev)1976 int rv770_dpm_late_enable(struct radeon_device *rdev)
1977 {
1978 int ret;
1979
1980 if (rdev->irq.installed &&
1981 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
1982 PPSMC_Result result;
1983
1984 ret = rv770_set_thermal_temperature_range(rdev, R600_TEMP_RANGE_MIN, R600_TEMP_RANGE_MAX);
1985 if (ret)
1986 return ret;
1987 rdev->irq.dpm_thermal = true;
1988 radeon_irq_set(rdev);
1989 result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableThermalInterrupt);
1990
1991 if (result != PPSMC_Result_OK)
1992 DRM_DEBUG_KMS("Could not enable thermal interrupts.\n");
1993 }
1994
1995 return 0;
1996 }
1997
rv770_dpm_disable(struct radeon_device * rdev)1998 void rv770_dpm_disable(struct radeon_device *rdev)
1999 {
2000 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2001
2002 if (!rv770_dpm_enabled(rdev))
2003 return;
2004
2005 rv770_clear_vc(rdev);
2006
2007 if (pi->thermal_protection)
2008 rv770_enable_thermal_protection(rdev, false);
2009
2010 rv770_enable_spread_spectrum(rdev, false);
2011
2012 if (pi->dynamic_pcie_gen2)
2013 rv770_enable_dynamic_pcie_gen2(rdev, false);
2014
2015 if (rdev->irq.installed &&
2016 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
2017 rdev->irq.dpm_thermal = false;
2018 radeon_irq_set(rdev);
2019 }
2020
2021 if (pi->gfx_clock_gating)
2022 rv770_gfx_clock_gating_enable(rdev, false);
2023
2024 if (pi->mg_clock_gating)
2025 rv770_mg_clock_gating_enable(rdev, false);
2026
2027 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
2028 rv730_stop_dpm(rdev);
2029 else
2030 rv770_stop_dpm(rdev);
2031
2032 r7xx_stop_smc(rdev);
2033 rv770_reset_smio_status(rdev);
2034 }
2035
rv770_dpm_set_power_state(struct radeon_device * rdev)2036 int rv770_dpm_set_power_state(struct radeon_device *rdev)
2037 {
2038 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2039 struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
2040 struct radeon_ps *old_ps = rdev->pm.dpm.current_ps;
2041 int ret;
2042
2043 ret = rv770_restrict_performance_levels_before_switch(rdev);
2044 if (ret) {
2045 DRM_ERROR("rv770_restrict_performance_levels_before_switch failed\n");
2046 return ret;
2047 }
2048 rv770_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps);
2049 ret = rv770_halt_smc(rdev);
2050 if (ret) {
2051 DRM_ERROR("rv770_halt_smc failed\n");
2052 return ret;
2053 }
2054 ret = rv770_upload_sw_state(rdev, new_ps);
2055 if (ret) {
2056 DRM_ERROR("rv770_upload_sw_state failed\n");
2057 return ret;
2058 }
2059 r7xx_program_memory_timing_parameters(rdev, new_ps);
2060 if (pi->dcodt)
2061 rv770_program_dcodt_before_state_switch(rdev, new_ps, old_ps);
2062 ret = rv770_resume_smc(rdev);
2063 if (ret) {
2064 DRM_ERROR("rv770_resume_smc failed\n");
2065 return ret;
2066 }
2067 ret = rv770_set_sw_state(rdev);
2068 if (ret) {
2069 DRM_ERROR("rv770_set_sw_state failed\n");
2070 return ret;
2071 }
2072 if (pi->dcodt)
2073 rv770_program_dcodt_after_state_switch(rdev, new_ps, old_ps);
2074 rv770_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps);
2075
2076 return 0;
2077 }
2078
2079 #if 0
2080 void rv770_dpm_reset_asic(struct radeon_device *rdev)
2081 {
2082 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2083 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
2084
2085 rv770_restrict_performance_levels_before_switch(rdev);
2086 if (pi->dcodt)
2087 rv770_program_dcodt_before_state_switch(rdev, boot_ps, boot_ps);
2088 rv770_set_boot_state(rdev);
2089 if (pi->dcodt)
2090 rv770_program_dcodt_after_state_switch(rdev, boot_ps, boot_ps);
2091 }
2092 #endif
2093
rv770_dpm_setup_asic(struct radeon_device * rdev)2094 void rv770_dpm_setup_asic(struct radeon_device *rdev)
2095 {
2096 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2097
2098 r7xx_read_clock_registers(rdev);
2099 rv770_read_voltage_smio_registers(rdev);
2100 rv770_get_memory_type(rdev);
2101 if (pi->dcodt)
2102 rv770_get_mclk_odt_threshold(rdev);
2103 rv770_get_pcie_gen2_status(rdev);
2104
2105 rv770_enable_acpi_pm(rdev);
2106
2107 if (radeon_aspm != 0) {
2108 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_ASPM_L0s)
2109 rv770_enable_l0s(rdev);
2110 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_ASPM_L1)
2111 rv770_enable_l1(rdev);
2112 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_TURNOFFPLL_ASPML1)
2113 rv770_enable_pll_sleep_in_l1(rdev);
2114 }
2115 }
2116
rv770_dpm_display_configuration_changed(struct radeon_device * rdev)2117 void rv770_dpm_display_configuration_changed(struct radeon_device *rdev)
2118 {
2119 rv770_program_display_gap(rdev);
2120 }
2121
2122 union power_info {
2123 struct _ATOM_POWERPLAY_INFO info;
2124 struct _ATOM_POWERPLAY_INFO_V2 info_2;
2125 struct _ATOM_POWERPLAY_INFO_V3 info_3;
2126 struct _ATOM_PPLIB_POWERPLAYTABLE pplib;
2127 struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2;
2128 struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3;
2129 };
2130
2131 union pplib_clock_info {
2132 struct _ATOM_PPLIB_R600_CLOCK_INFO r600;
2133 struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780;
2134 struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen;
2135 struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo;
2136 };
2137
2138 union pplib_power_state {
2139 struct _ATOM_PPLIB_STATE v1;
2140 struct _ATOM_PPLIB_STATE_V2 v2;
2141 };
2142
rv7xx_parse_pplib_non_clock_info(struct radeon_device * rdev,struct radeon_ps * rps,struct _ATOM_PPLIB_NONCLOCK_INFO * non_clock_info,u8 table_rev)2143 static void rv7xx_parse_pplib_non_clock_info(struct radeon_device *rdev,
2144 struct radeon_ps *rps,
2145 struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info,
2146 u8 table_rev)
2147 {
2148 rps->caps = le32_to_cpu(non_clock_info->ulCapsAndSettings);
2149 rps->class = le16_to_cpu(non_clock_info->usClassification);
2150 rps->class2 = le16_to_cpu(non_clock_info->usClassification2);
2151
2152 if (ATOM_PPLIB_NONCLOCKINFO_VER1 < table_rev) {
2153 rps->vclk = le32_to_cpu(non_clock_info->ulVCLK);
2154 rps->dclk = le32_to_cpu(non_clock_info->ulDCLK);
2155 } else {
2156 rps->vclk = 0;
2157 rps->dclk = 0;
2158 }
2159
2160 if (r600_is_uvd_state(rps->class, rps->class2)) {
2161 if ((rps->vclk == 0) || (rps->dclk == 0)) {
2162 rps->vclk = RV770_DEFAULT_VCLK_FREQ;
2163 rps->dclk = RV770_DEFAULT_DCLK_FREQ;
2164 }
2165 }
2166
2167 if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT)
2168 rdev->pm.dpm.boot_ps = rps;
2169 if (rps->class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE)
2170 rdev->pm.dpm.uvd_ps = rps;
2171 }
2172
rv7xx_parse_pplib_clock_info(struct radeon_device * rdev,struct radeon_ps * rps,int index,union pplib_clock_info * clock_info)2173 static void rv7xx_parse_pplib_clock_info(struct radeon_device *rdev,
2174 struct radeon_ps *rps, int index,
2175 union pplib_clock_info *clock_info)
2176 {
2177 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2178 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2179 struct rv7xx_ps *ps = rv770_get_ps(rps);
2180 u32 sclk, mclk;
2181 struct rv7xx_pl *pl;
2182
2183 switch (index) {
2184 case 0:
2185 pl = &ps->low;
2186 break;
2187 case 1:
2188 pl = &ps->medium;
2189 break;
2190 case 2:
2191 default:
2192 pl = &ps->high;
2193 break;
2194 }
2195
2196 if (rdev->family >= CHIP_CEDAR) {
2197 sclk = le16_to_cpu(clock_info->evergreen.usEngineClockLow);
2198 sclk |= clock_info->evergreen.ucEngineClockHigh << 16;
2199 mclk = le16_to_cpu(clock_info->evergreen.usMemoryClockLow);
2200 mclk |= clock_info->evergreen.ucMemoryClockHigh << 16;
2201
2202 pl->vddc = le16_to_cpu(clock_info->evergreen.usVDDC);
2203 pl->vddci = le16_to_cpu(clock_info->evergreen.usVDDCI);
2204 pl->flags = le32_to_cpu(clock_info->evergreen.ulFlags);
2205 } else {
2206 sclk = le16_to_cpu(clock_info->r600.usEngineClockLow);
2207 sclk |= clock_info->r600.ucEngineClockHigh << 16;
2208 mclk = le16_to_cpu(clock_info->r600.usMemoryClockLow);
2209 mclk |= clock_info->r600.ucMemoryClockHigh << 16;
2210
2211 pl->vddc = le16_to_cpu(clock_info->r600.usVDDC);
2212 pl->flags = le32_to_cpu(clock_info->r600.ulFlags);
2213 }
2214
2215 pl->mclk = mclk;
2216 pl->sclk = sclk;
2217
2218 /* patch up vddc if necessary */
2219 if (pl->vddc == 0xff01) {
2220 if (pi->max_vddc)
2221 pl->vddc = pi->max_vddc;
2222 }
2223
2224 if (rps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) {
2225 pi->acpi_vddc = pl->vddc;
2226 if (rdev->family >= CHIP_CEDAR)
2227 eg_pi->acpi_vddci = pl->vddci;
2228 if (ps->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
2229 pi->acpi_pcie_gen2 = true;
2230 else
2231 pi->acpi_pcie_gen2 = false;
2232 }
2233
2234 if (rps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) {
2235 if (rdev->family >= CHIP_BARTS) {
2236 eg_pi->ulv.supported = true;
2237 eg_pi->ulv.pl = pl;
2238 }
2239 }
2240
2241 if (pi->min_vddc_in_table > pl->vddc)
2242 pi->min_vddc_in_table = pl->vddc;
2243
2244 if (pi->max_vddc_in_table < pl->vddc)
2245 pi->max_vddc_in_table = pl->vddc;
2246
2247 /* patch up boot state */
2248 if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) {
2249 u16 vddc, vddci, mvdd;
2250 radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd);
2251 pl->mclk = rdev->clock.default_mclk;
2252 pl->sclk = rdev->clock.default_sclk;
2253 pl->vddc = vddc;
2254 pl->vddci = vddci;
2255 }
2256
2257 if ((rps->class & ATOM_PPLIB_CLASSIFICATION_UI_MASK) ==
2258 ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE) {
2259 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk;
2260 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk;
2261 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc;
2262 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci;
2263 }
2264 }
2265
rv7xx_parse_power_table(struct radeon_device * rdev)2266 int rv7xx_parse_power_table(struct radeon_device *rdev)
2267 {
2268 struct radeon_mode_info *mode_info = &rdev->mode_info;
2269 struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info;
2270 union pplib_power_state *power_state;
2271 int i, j;
2272 union pplib_clock_info *clock_info;
2273 union power_info *power_info;
2274 int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
2275 u16 data_offset;
2276 u8 frev, crev;
2277 struct rv7xx_ps *ps;
2278
2279 if (!atom_parse_data_header(mode_info->atom_context, index, NULL,
2280 &frev, &crev, &data_offset))
2281 return -EINVAL;
2282 power_info = (union power_info *)(mode_info->atom_context->bios + data_offset);
2283
2284 rdev->pm.dpm.ps = kcalloc(power_info->pplib.ucNumStates,
2285 sizeof(struct radeon_ps),
2286 GFP_KERNEL);
2287 if (!rdev->pm.dpm.ps)
2288 return -ENOMEM;
2289
2290 for (i = 0; i < power_info->pplib.ucNumStates; i++) {
2291 power_state = (union pplib_power_state *)
2292 (mode_info->atom_context->bios + data_offset +
2293 le16_to_cpu(power_info->pplib.usStateArrayOffset) +
2294 i * power_info->pplib.ucStateEntrySize);
2295 non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *)
2296 (mode_info->atom_context->bios + data_offset +
2297 le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset) +
2298 (power_state->v1.ucNonClockStateIndex *
2299 power_info->pplib.ucNonClockSize));
2300 if (power_info->pplib.ucStateEntrySize - 1) {
2301 u8 *idx;
2302 ps = kzalloc(sizeof(struct rv7xx_ps), GFP_KERNEL);
2303 if (ps == NULL) {
2304 kfree(rdev->pm.dpm.ps);
2305 return -ENOMEM;
2306 }
2307 rdev->pm.dpm.ps[i].ps_priv = ps;
2308 rv7xx_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i],
2309 non_clock_info,
2310 power_info->pplib.ucNonClockSize);
2311 idx = (u8 *)&power_state->v1.ucClockStateIndices[0];
2312 for (j = 0; j < (power_info->pplib.ucStateEntrySize - 1); j++) {
2313 clock_info = (union pplib_clock_info *)
2314 (mode_info->atom_context->bios + data_offset +
2315 le16_to_cpu(power_info->pplib.usClockInfoArrayOffset) +
2316 (idx[j] * power_info->pplib.ucClockInfoSize));
2317 rv7xx_parse_pplib_clock_info(rdev,
2318 &rdev->pm.dpm.ps[i], j,
2319 clock_info);
2320 }
2321 }
2322 }
2323 rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates;
2324 return 0;
2325 }
2326
rv770_get_engine_memory_ss(struct radeon_device * rdev)2327 void rv770_get_engine_memory_ss(struct radeon_device *rdev)
2328 {
2329 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2330 struct radeon_atom_ss ss;
2331
2332 pi->sclk_ss = radeon_atombios_get_asic_ss_info(rdev, &ss,
2333 ASIC_INTERNAL_ENGINE_SS, 0);
2334 pi->mclk_ss = radeon_atombios_get_asic_ss_info(rdev, &ss,
2335 ASIC_INTERNAL_MEMORY_SS, 0);
2336
2337 if (pi->sclk_ss || pi->mclk_ss)
2338 pi->dynamic_ss = true;
2339 else
2340 pi->dynamic_ss = false;
2341 }
2342
rv770_dpm_init(struct radeon_device * rdev)2343 int rv770_dpm_init(struct radeon_device *rdev)
2344 {
2345 struct rv7xx_power_info *pi;
2346 struct atom_clock_dividers dividers;
2347 int ret;
2348
2349 pi = kzalloc(sizeof(struct rv7xx_power_info), GFP_KERNEL);
2350 if (pi == NULL)
2351 return -ENOMEM;
2352 rdev->pm.dpm.priv = pi;
2353
2354 rv770_get_max_vddc(rdev);
2355
2356 pi->acpi_vddc = 0;
2357 pi->min_vddc_in_table = 0;
2358 pi->max_vddc_in_table = 0;
2359
2360 ret = r600_get_platform_caps(rdev);
2361 if (ret)
2362 return ret;
2363
2364 ret = rv7xx_parse_power_table(rdev);
2365 if (ret)
2366 return ret;
2367
2368 if (rdev->pm.dpm.voltage_response_time == 0)
2369 rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
2370 if (rdev->pm.dpm.backbias_response_time == 0)
2371 rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
2372
2373 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
2374 0, false, ÷rs);
2375 if (ret)
2376 pi->ref_div = dividers.ref_div + 1;
2377 else
2378 pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
2379
2380 pi->mclk_strobe_mode_threshold = 30000;
2381 pi->mclk_edc_enable_threshold = 30000;
2382
2383 pi->rlp = RV770_RLP_DFLT;
2384 pi->rmp = RV770_RMP_DFLT;
2385 pi->lhp = RV770_LHP_DFLT;
2386 pi->lmp = RV770_LMP_DFLT;
2387
2388 pi->voltage_control =
2389 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0);
2390
2391 pi->mvdd_control =
2392 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0);
2393
2394 rv770_get_engine_memory_ss(rdev);
2395
2396 pi->asi = RV770_ASI_DFLT;
2397 pi->pasi = RV770_HASI_DFLT;
2398 pi->vrc = RV770_VRC_DFLT;
2399
2400 pi->power_gating = false;
2401
2402 pi->gfx_clock_gating = true;
2403
2404 pi->mg_clock_gating = true;
2405 pi->mgcgtssm = true;
2406
2407 pi->dynamic_pcie_gen2 = true;
2408
2409 if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE)
2410 pi->thermal_protection = true;
2411 else
2412 pi->thermal_protection = false;
2413
2414 pi->display_gap = true;
2415
2416 if (rdev->flags & RADEON_IS_MOBILITY)
2417 pi->dcodt = true;
2418 else
2419 pi->dcodt = false;
2420
2421 pi->ulps = true;
2422
2423 pi->mclk_stutter_mode_threshold = 0;
2424
2425 pi->sram_end = SMC_RAM_END;
2426 pi->state_table_start = RV770_SMC_TABLE_ADDRESS;
2427 pi->soft_regs_start = RV770_SMC_SOFT_REGISTERS_START;
2428
2429 return 0;
2430 }
2431
rv770_dpm_print_power_state(struct radeon_device * rdev,struct radeon_ps * rps)2432 void rv770_dpm_print_power_state(struct radeon_device *rdev,
2433 struct radeon_ps *rps)
2434 {
2435 struct rv7xx_ps *ps = rv770_get_ps(rps);
2436 struct rv7xx_pl *pl;
2437
2438 r600_dpm_print_class_info(rps->class, rps->class2);
2439 r600_dpm_print_cap_info(rps->caps);
2440 printk("\tuvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
2441 if (rdev->family >= CHIP_CEDAR) {
2442 pl = &ps->low;
2443 printk("\t\tpower level 0 sclk: %u mclk: %u vddc: %u vddci: %u\n",
2444 pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2445 pl = &ps->medium;
2446 printk("\t\tpower level 1 sclk: %u mclk: %u vddc: %u vddci: %u\n",
2447 pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2448 pl = &ps->high;
2449 printk("\t\tpower level 2 sclk: %u mclk: %u vddc: %u vddci: %u\n",
2450 pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2451 } else {
2452 pl = &ps->low;
2453 printk("\t\tpower level 0 sclk: %u mclk: %u vddc: %u\n",
2454 pl->sclk, pl->mclk, pl->vddc);
2455 pl = &ps->medium;
2456 printk("\t\tpower level 1 sclk: %u mclk: %u vddc: %u\n",
2457 pl->sclk, pl->mclk, pl->vddc);
2458 pl = &ps->high;
2459 printk("\t\tpower level 2 sclk: %u mclk: %u vddc: %u\n",
2460 pl->sclk, pl->mclk, pl->vddc);
2461 }
2462 r600_dpm_print_ps_status(rdev, rps);
2463 }
2464
rv770_dpm_debugfs_print_current_performance_level(struct radeon_device * rdev,struct seq_file * m)2465 void rv770_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
2466 struct seq_file *m)
2467 {
2468 struct radeon_ps *rps = rdev->pm.dpm.current_ps;
2469 struct rv7xx_ps *ps = rv770_get_ps(rps);
2470 struct rv7xx_pl *pl;
2471 u32 current_index =
2472 (RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_PROFILE_INDEX_MASK) >>
2473 CURRENT_PROFILE_INDEX_SHIFT;
2474
2475 if (current_index > 2) {
2476 seq_printf(m, "invalid dpm profile %d\n", current_index);
2477 } else {
2478 if (current_index == 0)
2479 pl = &ps->low;
2480 else if (current_index == 1)
2481 pl = &ps->medium;
2482 else /* current_index == 2 */
2483 pl = &ps->high;
2484 seq_printf(m, "uvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
2485 if (rdev->family >= CHIP_CEDAR) {
2486 seq_printf(m, "power level %d sclk: %u mclk: %u vddc: %u vddci: %u\n",
2487 current_index, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2488 } else {
2489 seq_printf(m, "power level %d sclk: %u mclk: %u vddc: %u\n",
2490 current_index, pl->sclk, pl->mclk, pl->vddc);
2491 }
2492 }
2493 }
2494
rv770_dpm_get_current_sclk(struct radeon_device * rdev)2495 u32 rv770_dpm_get_current_sclk(struct radeon_device *rdev)
2496 {
2497 struct radeon_ps *rps = rdev->pm.dpm.current_ps;
2498 struct rv7xx_ps *ps = rv770_get_ps(rps);
2499 struct rv7xx_pl *pl;
2500 u32 current_index =
2501 (RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_PROFILE_INDEX_MASK) >>
2502 CURRENT_PROFILE_INDEX_SHIFT;
2503
2504 if (current_index > 2) {
2505 return 0;
2506 } else {
2507 if (current_index == 0)
2508 pl = &ps->low;
2509 else if (current_index == 1)
2510 pl = &ps->medium;
2511 else /* current_index == 2 */
2512 pl = &ps->high;
2513 return pl->sclk;
2514 }
2515 }
2516
rv770_dpm_get_current_mclk(struct radeon_device * rdev)2517 u32 rv770_dpm_get_current_mclk(struct radeon_device *rdev)
2518 {
2519 struct radeon_ps *rps = rdev->pm.dpm.current_ps;
2520 struct rv7xx_ps *ps = rv770_get_ps(rps);
2521 struct rv7xx_pl *pl;
2522 u32 current_index =
2523 (RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_PROFILE_INDEX_MASK) >>
2524 CURRENT_PROFILE_INDEX_SHIFT;
2525
2526 if (current_index > 2) {
2527 return 0;
2528 } else {
2529 if (current_index == 0)
2530 pl = &ps->low;
2531 else if (current_index == 1)
2532 pl = &ps->medium;
2533 else /* current_index == 2 */
2534 pl = &ps->high;
2535 return pl->mclk;
2536 }
2537 }
2538
rv770_dpm_fini(struct radeon_device * rdev)2539 void rv770_dpm_fini(struct radeon_device *rdev)
2540 {
2541 int i;
2542
2543 for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
2544 kfree(rdev->pm.dpm.ps[i].ps_priv);
2545 }
2546 kfree(rdev->pm.dpm.ps);
2547 kfree(rdev->pm.dpm.priv);
2548 }
2549
rv770_dpm_get_sclk(struct radeon_device * rdev,bool low)2550 u32 rv770_dpm_get_sclk(struct radeon_device *rdev, bool low)
2551 {
2552 struct rv7xx_ps *requested_state = rv770_get_ps(rdev->pm.dpm.requested_ps);
2553
2554 if (low)
2555 return requested_state->low.sclk;
2556 else
2557 return requested_state->high.sclk;
2558 }
2559
rv770_dpm_get_mclk(struct radeon_device * rdev,bool low)2560 u32 rv770_dpm_get_mclk(struct radeon_device *rdev, bool low)
2561 {
2562 struct rv7xx_ps *requested_state = rv770_get_ps(rdev->pm.dpm.requested_ps);
2563
2564 if (low)
2565 return requested_state->low.mclk;
2566 else
2567 return requested_state->high.mclk;
2568 }
2569
rv770_dpm_vblank_too_short(struct radeon_device * rdev)2570 bool rv770_dpm_vblank_too_short(struct radeon_device *rdev)
2571 {
2572 u32 vblank_time = r600_dpm_get_vblank_time(rdev);
2573 u32 switch_limit = 200; /* 300 */
2574
2575 /* RV770 */
2576 /* mclk switching doesn't seem to work reliably on desktop RV770s */
2577 if ((rdev->family == CHIP_RV770) &&
2578 !(rdev->flags & RADEON_IS_MOBILITY))
2579 switch_limit = 0xffffffff; /* disable mclk switching */
2580
2581 if (vblank_time < switch_limit)
2582 return true;
2583 else
2584 return false;
2585
2586 }
2587