1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun * Copyright 2011 Advanced Micro Devices, Inc.
3*4882a593Smuzhiyun *
4*4882a593Smuzhiyun * Permission is hereby granted, free of charge, to any person obtaining a
5*4882a593Smuzhiyun * copy of this software and associated documentation files (the "Software"),
6*4882a593Smuzhiyun * to deal in the Software without restriction, including without limitation
7*4882a593Smuzhiyun * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8*4882a593Smuzhiyun * and/or sell copies of the Software, and to permit persons to whom the
9*4882a593Smuzhiyun * Software is furnished to do so, subject to the following conditions:
10*4882a593Smuzhiyun *
11*4882a593Smuzhiyun * The above copyright notice and this permission notice shall be included in
12*4882a593Smuzhiyun * all copies or substantial portions of the Software.
13*4882a593Smuzhiyun *
14*4882a593Smuzhiyun * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15*4882a593Smuzhiyun * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16*4882a593Smuzhiyun * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17*4882a593Smuzhiyun * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18*4882a593Smuzhiyun * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19*4882a593Smuzhiyun * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20*4882a593Smuzhiyun * OTHER DEALINGS IN THE SOFTWARE.
21*4882a593Smuzhiyun *
22*4882a593Smuzhiyun * Authors: Alex Deucher
23*4882a593Smuzhiyun */
24*4882a593Smuzhiyun
25*4882a593Smuzhiyun #include "radeon.h"
26*4882a593Smuzhiyun #include "radeon_asic.h"
27*4882a593Smuzhiyun #include "rv770d.h"
28*4882a593Smuzhiyun #include "r600_dpm.h"
29*4882a593Smuzhiyun #include "rv770_dpm.h"
30*4882a593Smuzhiyun #include "cypress_dpm.h"
31*4882a593Smuzhiyun #include "atom.h"
32*4882a593Smuzhiyun #include <linux/seq_file.h>
33*4882a593Smuzhiyun
34*4882a593Smuzhiyun #define MC_CG_ARB_FREQ_F0 0x0a
35*4882a593Smuzhiyun #define MC_CG_ARB_FREQ_F1 0x0b
36*4882a593Smuzhiyun #define MC_CG_ARB_FREQ_F2 0x0c
37*4882a593Smuzhiyun #define MC_CG_ARB_FREQ_F3 0x0d
38*4882a593Smuzhiyun
39*4882a593Smuzhiyun #define MC_CG_SEQ_DRAMCONF_S0 0x05
40*4882a593Smuzhiyun #define MC_CG_SEQ_DRAMCONF_S1 0x06
41*4882a593Smuzhiyun
42*4882a593Smuzhiyun #define PCIE_BUS_CLK 10000
43*4882a593Smuzhiyun #define TCLK (PCIE_BUS_CLK / 10)
44*4882a593Smuzhiyun
45*4882a593Smuzhiyun #define SMC_RAM_END 0xC000
46*4882a593Smuzhiyun
rv770_get_ps(struct radeon_ps * rps)47*4882a593Smuzhiyun struct rv7xx_ps *rv770_get_ps(struct radeon_ps *rps)
48*4882a593Smuzhiyun {
49*4882a593Smuzhiyun struct rv7xx_ps *ps = rps->ps_priv;
50*4882a593Smuzhiyun
51*4882a593Smuzhiyun return ps;
52*4882a593Smuzhiyun }
53*4882a593Smuzhiyun
rv770_get_pi(struct radeon_device * rdev)54*4882a593Smuzhiyun struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev)
55*4882a593Smuzhiyun {
56*4882a593Smuzhiyun struct rv7xx_power_info *pi = rdev->pm.dpm.priv;
57*4882a593Smuzhiyun
58*4882a593Smuzhiyun return pi;
59*4882a593Smuzhiyun }
60*4882a593Smuzhiyun
evergreen_get_pi(struct radeon_device * rdev)61*4882a593Smuzhiyun struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev)
62*4882a593Smuzhiyun {
63*4882a593Smuzhiyun struct evergreen_power_info *pi = rdev->pm.dpm.priv;
64*4882a593Smuzhiyun
65*4882a593Smuzhiyun return pi;
66*4882a593Smuzhiyun }
67*4882a593Smuzhiyun
rv770_enable_bif_dynamic_pcie_gen2(struct radeon_device * rdev,bool enable)68*4882a593Smuzhiyun static void rv770_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
69*4882a593Smuzhiyun bool enable)
70*4882a593Smuzhiyun {
71*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
72*4882a593Smuzhiyun u32 tmp;
73*4882a593Smuzhiyun
74*4882a593Smuzhiyun tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
75*4882a593Smuzhiyun if (enable) {
76*4882a593Smuzhiyun tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
77*4882a593Smuzhiyun tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
78*4882a593Smuzhiyun tmp |= LC_GEN2_EN_STRAP;
79*4882a593Smuzhiyun } else {
80*4882a593Smuzhiyun if (!pi->boot_in_gen2) {
81*4882a593Smuzhiyun tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
82*4882a593Smuzhiyun tmp &= ~LC_GEN2_EN_STRAP;
83*4882a593Smuzhiyun }
84*4882a593Smuzhiyun }
85*4882a593Smuzhiyun if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
86*4882a593Smuzhiyun (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
87*4882a593Smuzhiyun WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
88*4882a593Smuzhiyun
89*4882a593Smuzhiyun }
90*4882a593Smuzhiyun
rv770_enable_l0s(struct radeon_device * rdev)91*4882a593Smuzhiyun static void rv770_enable_l0s(struct radeon_device *rdev)
92*4882a593Smuzhiyun {
93*4882a593Smuzhiyun u32 tmp;
94*4882a593Smuzhiyun
95*4882a593Smuzhiyun tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL) & ~LC_L0S_INACTIVITY_MASK;
96*4882a593Smuzhiyun tmp |= LC_L0S_INACTIVITY(3);
97*4882a593Smuzhiyun WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp);
98*4882a593Smuzhiyun }
99*4882a593Smuzhiyun
rv770_enable_l1(struct radeon_device * rdev)100*4882a593Smuzhiyun static void rv770_enable_l1(struct radeon_device *rdev)
101*4882a593Smuzhiyun {
102*4882a593Smuzhiyun u32 tmp;
103*4882a593Smuzhiyun
104*4882a593Smuzhiyun tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL);
105*4882a593Smuzhiyun tmp &= ~LC_L1_INACTIVITY_MASK;
106*4882a593Smuzhiyun tmp |= LC_L1_INACTIVITY(4);
107*4882a593Smuzhiyun tmp &= ~LC_PMI_TO_L1_DIS;
108*4882a593Smuzhiyun tmp &= ~LC_ASPM_TO_L1_DIS;
109*4882a593Smuzhiyun WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp);
110*4882a593Smuzhiyun }
111*4882a593Smuzhiyun
rv770_enable_pll_sleep_in_l1(struct radeon_device * rdev)112*4882a593Smuzhiyun static void rv770_enable_pll_sleep_in_l1(struct radeon_device *rdev)
113*4882a593Smuzhiyun {
114*4882a593Smuzhiyun u32 tmp;
115*4882a593Smuzhiyun
116*4882a593Smuzhiyun tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL) & ~LC_L1_INACTIVITY_MASK;
117*4882a593Smuzhiyun tmp |= LC_L1_INACTIVITY(8);
118*4882a593Smuzhiyun WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp);
119*4882a593Smuzhiyun
120*4882a593Smuzhiyun /* NOTE, this is a PCIE indirect reg, not PCIE PORT */
121*4882a593Smuzhiyun tmp = RREG32_PCIE(PCIE_P_CNTL);
122*4882a593Smuzhiyun tmp |= P_PLL_PWRDN_IN_L1L23;
123*4882a593Smuzhiyun tmp &= ~P_PLL_BUF_PDNB;
124*4882a593Smuzhiyun tmp &= ~P_PLL_PDNB;
125*4882a593Smuzhiyun tmp |= P_ALLOW_PRX_FRONTEND_SHUTOFF;
126*4882a593Smuzhiyun WREG32_PCIE(PCIE_P_CNTL, tmp);
127*4882a593Smuzhiyun }
128*4882a593Smuzhiyun
rv770_gfx_clock_gating_enable(struct radeon_device * rdev,bool enable)129*4882a593Smuzhiyun static void rv770_gfx_clock_gating_enable(struct radeon_device *rdev,
130*4882a593Smuzhiyun bool enable)
131*4882a593Smuzhiyun {
132*4882a593Smuzhiyun if (enable)
133*4882a593Smuzhiyun WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN);
134*4882a593Smuzhiyun else {
135*4882a593Smuzhiyun WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
136*4882a593Smuzhiyun WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
137*4882a593Smuzhiyun WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
138*4882a593Smuzhiyun RREG32(GB_TILING_CONFIG);
139*4882a593Smuzhiyun }
140*4882a593Smuzhiyun }
141*4882a593Smuzhiyun
rv770_mg_clock_gating_enable(struct radeon_device * rdev,bool enable)142*4882a593Smuzhiyun static void rv770_mg_clock_gating_enable(struct radeon_device *rdev,
143*4882a593Smuzhiyun bool enable)
144*4882a593Smuzhiyun {
145*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
146*4882a593Smuzhiyun
147*4882a593Smuzhiyun if (enable) {
148*4882a593Smuzhiyun u32 mgcg_cgtt_local0;
149*4882a593Smuzhiyun
150*4882a593Smuzhiyun if (rdev->family == CHIP_RV770)
151*4882a593Smuzhiyun mgcg_cgtt_local0 = RV770_MGCGTTLOCAL0_DFLT;
152*4882a593Smuzhiyun else
153*4882a593Smuzhiyun mgcg_cgtt_local0 = RV7XX_MGCGTTLOCAL0_DFLT;
154*4882a593Smuzhiyun
155*4882a593Smuzhiyun WREG32(CG_CGTT_LOCAL_0, mgcg_cgtt_local0);
156*4882a593Smuzhiyun WREG32(CG_CGTT_LOCAL_1, (RV770_MGCGTTLOCAL1_DFLT & 0xFFFFCFFF));
157*4882a593Smuzhiyun
158*4882a593Smuzhiyun if (pi->mgcgtssm)
159*4882a593Smuzhiyun WREG32(CGTS_SM_CTRL_REG, RV770_MGCGCGTSSMCTRL_DFLT);
160*4882a593Smuzhiyun } else {
161*4882a593Smuzhiyun WREG32(CG_CGTT_LOCAL_0, 0xFFFFFFFF);
162*4882a593Smuzhiyun WREG32(CG_CGTT_LOCAL_1, 0xFFFFCFFF);
163*4882a593Smuzhiyun }
164*4882a593Smuzhiyun }
165*4882a593Smuzhiyun
rv770_restore_cgcg(struct radeon_device * rdev)166*4882a593Smuzhiyun void rv770_restore_cgcg(struct radeon_device *rdev)
167*4882a593Smuzhiyun {
168*4882a593Smuzhiyun bool dpm_en = false, cg_en = false;
169*4882a593Smuzhiyun
170*4882a593Smuzhiyun if (RREG32(GENERAL_PWRMGT) & GLOBAL_PWRMGT_EN)
171*4882a593Smuzhiyun dpm_en = true;
172*4882a593Smuzhiyun if (RREG32(SCLK_PWRMGT_CNTL) & DYN_GFX_CLK_OFF_EN)
173*4882a593Smuzhiyun cg_en = true;
174*4882a593Smuzhiyun
175*4882a593Smuzhiyun if (dpm_en && !cg_en)
176*4882a593Smuzhiyun WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN);
177*4882a593Smuzhiyun }
178*4882a593Smuzhiyun
rv770_start_dpm(struct radeon_device * rdev)179*4882a593Smuzhiyun static void rv770_start_dpm(struct radeon_device *rdev)
180*4882a593Smuzhiyun {
181*4882a593Smuzhiyun WREG32_P(SCLK_PWRMGT_CNTL, 0, ~SCLK_PWRMGT_OFF);
182*4882a593Smuzhiyun
183*4882a593Smuzhiyun WREG32_P(MCLK_PWRMGT_CNTL, 0, ~MPLL_PWRMGT_OFF);
184*4882a593Smuzhiyun
185*4882a593Smuzhiyun WREG32_P(GENERAL_PWRMGT, GLOBAL_PWRMGT_EN, ~GLOBAL_PWRMGT_EN);
186*4882a593Smuzhiyun }
187*4882a593Smuzhiyun
rv770_stop_dpm(struct radeon_device * rdev)188*4882a593Smuzhiyun void rv770_stop_dpm(struct radeon_device *rdev)
189*4882a593Smuzhiyun {
190*4882a593Smuzhiyun PPSMC_Result result;
191*4882a593Smuzhiyun
192*4882a593Smuzhiyun result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_TwoLevelsDisabled);
193*4882a593Smuzhiyun
194*4882a593Smuzhiyun if (result != PPSMC_Result_OK)
195*4882a593Smuzhiyun DRM_DEBUG("Could not force DPM to low.\n");
196*4882a593Smuzhiyun
197*4882a593Smuzhiyun WREG32_P(GENERAL_PWRMGT, 0, ~GLOBAL_PWRMGT_EN);
198*4882a593Smuzhiyun
199*4882a593Smuzhiyun WREG32_P(SCLK_PWRMGT_CNTL, SCLK_PWRMGT_OFF, ~SCLK_PWRMGT_OFF);
200*4882a593Smuzhiyun
201*4882a593Smuzhiyun WREG32_P(MCLK_PWRMGT_CNTL, MPLL_PWRMGT_OFF, ~MPLL_PWRMGT_OFF);
202*4882a593Smuzhiyun }
203*4882a593Smuzhiyun
rv770_dpm_enabled(struct radeon_device * rdev)204*4882a593Smuzhiyun bool rv770_dpm_enabled(struct radeon_device *rdev)
205*4882a593Smuzhiyun {
206*4882a593Smuzhiyun if (RREG32(GENERAL_PWRMGT) & GLOBAL_PWRMGT_EN)
207*4882a593Smuzhiyun return true;
208*4882a593Smuzhiyun else
209*4882a593Smuzhiyun return false;
210*4882a593Smuzhiyun }
211*4882a593Smuzhiyun
rv770_enable_thermal_protection(struct radeon_device * rdev,bool enable)212*4882a593Smuzhiyun void rv770_enable_thermal_protection(struct radeon_device *rdev,
213*4882a593Smuzhiyun bool enable)
214*4882a593Smuzhiyun {
215*4882a593Smuzhiyun if (enable)
216*4882a593Smuzhiyun WREG32_P(GENERAL_PWRMGT, 0, ~THERMAL_PROTECTION_DIS);
217*4882a593Smuzhiyun else
218*4882a593Smuzhiyun WREG32_P(GENERAL_PWRMGT, THERMAL_PROTECTION_DIS, ~THERMAL_PROTECTION_DIS);
219*4882a593Smuzhiyun }
220*4882a593Smuzhiyun
rv770_enable_acpi_pm(struct radeon_device * rdev)221*4882a593Smuzhiyun void rv770_enable_acpi_pm(struct radeon_device *rdev)
222*4882a593Smuzhiyun {
223*4882a593Smuzhiyun WREG32_P(GENERAL_PWRMGT, STATIC_PM_EN, ~STATIC_PM_EN);
224*4882a593Smuzhiyun }
225*4882a593Smuzhiyun
rv770_get_seq_value(struct radeon_device * rdev,struct rv7xx_pl * pl)226*4882a593Smuzhiyun u8 rv770_get_seq_value(struct radeon_device *rdev,
227*4882a593Smuzhiyun struct rv7xx_pl *pl)
228*4882a593Smuzhiyun {
229*4882a593Smuzhiyun return (pl->flags & ATOM_PPLIB_R600_FLAGS_LOWPOWER) ?
230*4882a593Smuzhiyun MC_CG_SEQ_DRAMCONF_S0 : MC_CG_SEQ_DRAMCONF_S1;
231*4882a593Smuzhiyun }
232*4882a593Smuzhiyun
233*4882a593Smuzhiyun #if 0
234*4882a593Smuzhiyun int rv770_read_smc_soft_register(struct radeon_device *rdev,
235*4882a593Smuzhiyun u16 reg_offset, u32 *value)
236*4882a593Smuzhiyun {
237*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
238*4882a593Smuzhiyun
239*4882a593Smuzhiyun return rv770_read_smc_sram_dword(rdev,
240*4882a593Smuzhiyun pi->soft_regs_start + reg_offset,
241*4882a593Smuzhiyun value, pi->sram_end);
242*4882a593Smuzhiyun }
243*4882a593Smuzhiyun #endif
244*4882a593Smuzhiyun
rv770_write_smc_soft_register(struct radeon_device * rdev,u16 reg_offset,u32 value)245*4882a593Smuzhiyun int rv770_write_smc_soft_register(struct radeon_device *rdev,
246*4882a593Smuzhiyun u16 reg_offset, u32 value)
247*4882a593Smuzhiyun {
248*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
249*4882a593Smuzhiyun
250*4882a593Smuzhiyun return rv770_write_smc_sram_dword(rdev,
251*4882a593Smuzhiyun pi->soft_regs_start + reg_offset,
252*4882a593Smuzhiyun value, pi->sram_end);
253*4882a593Smuzhiyun }
254*4882a593Smuzhiyun
rv770_populate_smc_t(struct radeon_device * rdev,struct radeon_ps * radeon_state,RV770_SMC_SWSTATE * smc_state)255*4882a593Smuzhiyun int rv770_populate_smc_t(struct radeon_device *rdev,
256*4882a593Smuzhiyun struct radeon_ps *radeon_state,
257*4882a593Smuzhiyun RV770_SMC_SWSTATE *smc_state)
258*4882a593Smuzhiyun {
259*4882a593Smuzhiyun struct rv7xx_ps *state = rv770_get_ps(radeon_state);
260*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
261*4882a593Smuzhiyun int i;
262*4882a593Smuzhiyun int a_n;
263*4882a593Smuzhiyun int a_d;
264*4882a593Smuzhiyun u8 l[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE];
265*4882a593Smuzhiyun u8 r[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE];
266*4882a593Smuzhiyun u32 a_t;
267*4882a593Smuzhiyun
268*4882a593Smuzhiyun l[0] = 0;
269*4882a593Smuzhiyun r[2] = 100;
270*4882a593Smuzhiyun
271*4882a593Smuzhiyun a_n = (int)state->medium.sclk * pi->lmp +
272*4882a593Smuzhiyun (int)state->low.sclk * (R600_AH_DFLT - pi->rlp);
273*4882a593Smuzhiyun a_d = (int)state->low.sclk * (100 - (int)pi->rlp) +
274*4882a593Smuzhiyun (int)state->medium.sclk * pi->lmp;
275*4882a593Smuzhiyun
276*4882a593Smuzhiyun l[1] = (u8)(pi->lmp - (int)pi->lmp * a_n / a_d);
277*4882a593Smuzhiyun r[0] = (u8)(pi->rlp + (100 - (int)pi->rlp) * a_n / a_d);
278*4882a593Smuzhiyun
279*4882a593Smuzhiyun a_n = (int)state->high.sclk * pi->lhp + (int)state->medium.sclk *
280*4882a593Smuzhiyun (R600_AH_DFLT - pi->rmp);
281*4882a593Smuzhiyun a_d = (int)state->medium.sclk * (100 - (int)pi->rmp) +
282*4882a593Smuzhiyun (int)state->high.sclk * pi->lhp;
283*4882a593Smuzhiyun
284*4882a593Smuzhiyun l[2] = (u8)(pi->lhp - (int)pi->lhp * a_n / a_d);
285*4882a593Smuzhiyun r[1] = (u8)(pi->rmp + (100 - (int)pi->rmp) * a_n / a_d);
286*4882a593Smuzhiyun
287*4882a593Smuzhiyun for (i = 0; i < (RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1); i++) {
288*4882a593Smuzhiyun a_t = CG_R(r[i] * pi->bsp / 200) | CG_L(l[i] * pi->bsp / 200);
289*4882a593Smuzhiyun smc_state->levels[i].aT = cpu_to_be32(a_t);
290*4882a593Smuzhiyun }
291*4882a593Smuzhiyun
292*4882a593Smuzhiyun a_t = CG_R(r[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1] * pi->pbsp / 200) |
293*4882a593Smuzhiyun CG_L(l[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1] * pi->pbsp / 200);
294*4882a593Smuzhiyun
295*4882a593Smuzhiyun smc_state->levels[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1].aT =
296*4882a593Smuzhiyun cpu_to_be32(a_t);
297*4882a593Smuzhiyun
298*4882a593Smuzhiyun return 0;
299*4882a593Smuzhiyun }
300*4882a593Smuzhiyun
rv770_populate_smc_sp(struct radeon_device * rdev,struct radeon_ps * radeon_state,RV770_SMC_SWSTATE * smc_state)301*4882a593Smuzhiyun int rv770_populate_smc_sp(struct radeon_device *rdev,
302*4882a593Smuzhiyun struct radeon_ps *radeon_state,
303*4882a593Smuzhiyun RV770_SMC_SWSTATE *smc_state)
304*4882a593Smuzhiyun {
305*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
306*4882a593Smuzhiyun int i;
307*4882a593Smuzhiyun
308*4882a593Smuzhiyun for (i = 0; i < (RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1); i++)
309*4882a593Smuzhiyun smc_state->levels[i].bSP = cpu_to_be32(pi->dsp);
310*4882a593Smuzhiyun
311*4882a593Smuzhiyun smc_state->levels[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1].bSP =
312*4882a593Smuzhiyun cpu_to_be32(pi->psp);
313*4882a593Smuzhiyun
314*4882a593Smuzhiyun return 0;
315*4882a593Smuzhiyun }
316*4882a593Smuzhiyun
rv770_calculate_fractional_mpll_feedback_divider(u32 memory_clock,u32 reference_clock,bool gddr5,struct atom_clock_dividers * dividers,u32 * clkf,u32 * clkfrac)317*4882a593Smuzhiyun static void rv770_calculate_fractional_mpll_feedback_divider(u32 memory_clock,
318*4882a593Smuzhiyun u32 reference_clock,
319*4882a593Smuzhiyun bool gddr5,
320*4882a593Smuzhiyun struct atom_clock_dividers *dividers,
321*4882a593Smuzhiyun u32 *clkf,
322*4882a593Smuzhiyun u32 *clkfrac)
323*4882a593Smuzhiyun {
324*4882a593Smuzhiyun u32 post_divider, reference_divider, feedback_divider8;
325*4882a593Smuzhiyun u32 fyclk;
326*4882a593Smuzhiyun
327*4882a593Smuzhiyun if (gddr5)
328*4882a593Smuzhiyun fyclk = (memory_clock * 8) / 2;
329*4882a593Smuzhiyun else
330*4882a593Smuzhiyun fyclk = (memory_clock * 4) / 2;
331*4882a593Smuzhiyun
332*4882a593Smuzhiyun post_divider = dividers->post_div;
333*4882a593Smuzhiyun reference_divider = dividers->ref_div;
334*4882a593Smuzhiyun
335*4882a593Smuzhiyun feedback_divider8 =
336*4882a593Smuzhiyun (8 * fyclk * reference_divider * post_divider) / reference_clock;
337*4882a593Smuzhiyun
338*4882a593Smuzhiyun *clkf = feedback_divider8 / 8;
339*4882a593Smuzhiyun *clkfrac = feedback_divider8 % 8;
340*4882a593Smuzhiyun }
341*4882a593Smuzhiyun
rv770_encode_yclk_post_div(u32 postdiv,u32 * encoded_postdiv)342*4882a593Smuzhiyun static int rv770_encode_yclk_post_div(u32 postdiv, u32 *encoded_postdiv)
343*4882a593Smuzhiyun {
344*4882a593Smuzhiyun int ret = 0;
345*4882a593Smuzhiyun
346*4882a593Smuzhiyun switch (postdiv) {
347*4882a593Smuzhiyun case 1:
348*4882a593Smuzhiyun *encoded_postdiv = 0;
349*4882a593Smuzhiyun break;
350*4882a593Smuzhiyun case 2:
351*4882a593Smuzhiyun *encoded_postdiv = 1;
352*4882a593Smuzhiyun break;
353*4882a593Smuzhiyun case 4:
354*4882a593Smuzhiyun *encoded_postdiv = 2;
355*4882a593Smuzhiyun break;
356*4882a593Smuzhiyun case 8:
357*4882a593Smuzhiyun *encoded_postdiv = 3;
358*4882a593Smuzhiyun break;
359*4882a593Smuzhiyun case 16:
360*4882a593Smuzhiyun *encoded_postdiv = 4;
361*4882a593Smuzhiyun break;
362*4882a593Smuzhiyun default:
363*4882a593Smuzhiyun ret = -EINVAL;
364*4882a593Smuzhiyun break;
365*4882a593Smuzhiyun }
366*4882a593Smuzhiyun
367*4882a593Smuzhiyun return ret;
368*4882a593Smuzhiyun }
369*4882a593Smuzhiyun
rv770_map_clkf_to_ibias(struct radeon_device * rdev,u32 clkf)370*4882a593Smuzhiyun u32 rv770_map_clkf_to_ibias(struct radeon_device *rdev, u32 clkf)
371*4882a593Smuzhiyun {
372*4882a593Smuzhiyun if (clkf <= 0x10)
373*4882a593Smuzhiyun return 0x4B;
374*4882a593Smuzhiyun if (clkf <= 0x19)
375*4882a593Smuzhiyun return 0x5B;
376*4882a593Smuzhiyun if (clkf <= 0x21)
377*4882a593Smuzhiyun return 0x2B;
378*4882a593Smuzhiyun if (clkf <= 0x27)
379*4882a593Smuzhiyun return 0x6C;
380*4882a593Smuzhiyun if (clkf <= 0x31)
381*4882a593Smuzhiyun return 0x9D;
382*4882a593Smuzhiyun return 0xC6;
383*4882a593Smuzhiyun }
384*4882a593Smuzhiyun
rv770_populate_mclk_value(struct radeon_device * rdev,u32 engine_clock,u32 memory_clock,RV7XX_SMC_MCLK_VALUE * mclk)385*4882a593Smuzhiyun static int rv770_populate_mclk_value(struct radeon_device *rdev,
386*4882a593Smuzhiyun u32 engine_clock, u32 memory_clock,
387*4882a593Smuzhiyun RV7XX_SMC_MCLK_VALUE *mclk)
388*4882a593Smuzhiyun {
389*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
390*4882a593Smuzhiyun u8 encoded_reference_dividers[] = { 0, 16, 17, 20, 21 };
391*4882a593Smuzhiyun u32 mpll_ad_func_cntl =
392*4882a593Smuzhiyun pi->clk_regs.rv770.mpll_ad_func_cntl;
393*4882a593Smuzhiyun u32 mpll_ad_func_cntl_2 =
394*4882a593Smuzhiyun pi->clk_regs.rv770.mpll_ad_func_cntl_2;
395*4882a593Smuzhiyun u32 mpll_dq_func_cntl =
396*4882a593Smuzhiyun pi->clk_regs.rv770.mpll_dq_func_cntl;
397*4882a593Smuzhiyun u32 mpll_dq_func_cntl_2 =
398*4882a593Smuzhiyun pi->clk_regs.rv770.mpll_dq_func_cntl_2;
399*4882a593Smuzhiyun u32 mclk_pwrmgt_cntl =
400*4882a593Smuzhiyun pi->clk_regs.rv770.mclk_pwrmgt_cntl;
401*4882a593Smuzhiyun u32 dll_cntl = pi->clk_regs.rv770.dll_cntl;
402*4882a593Smuzhiyun struct atom_clock_dividers dividers;
403*4882a593Smuzhiyun u32 reference_clock = rdev->clock.mpll.reference_freq;
404*4882a593Smuzhiyun u32 clkf, clkfrac;
405*4882a593Smuzhiyun u32 postdiv_yclk;
406*4882a593Smuzhiyun u32 ibias;
407*4882a593Smuzhiyun int ret;
408*4882a593Smuzhiyun
409*4882a593Smuzhiyun ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
410*4882a593Smuzhiyun memory_clock, false, ÷rs);
411*4882a593Smuzhiyun if (ret)
412*4882a593Smuzhiyun return ret;
413*4882a593Smuzhiyun
414*4882a593Smuzhiyun if ((dividers.ref_div < 1) || (dividers.ref_div > 5))
415*4882a593Smuzhiyun return -EINVAL;
416*4882a593Smuzhiyun
417*4882a593Smuzhiyun rv770_calculate_fractional_mpll_feedback_divider(memory_clock, reference_clock,
418*4882a593Smuzhiyun pi->mem_gddr5,
419*4882a593Smuzhiyun ÷rs, &clkf, &clkfrac);
420*4882a593Smuzhiyun
421*4882a593Smuzhiyun ret = rv770_encode_yclk_post_div(dividers.post_div, &postdiv_yclk);
422*4882a593Smuzhiyun if (ret)
423*4882a593Smuzhiyun return ret;
424*4882a593Smuzhiyun
425*4882a593Smuzhiyun ibias = rv770_map_clkf_to_ibias(rdev, clkf);
426*4882a593Smuzhiyun
427*4882a593Smuzhiyun mpll_ad_func_cntl &= ~(CLKR_MASK |
428*4882a593Smuzhiyun YCLK_POST_DIV_MASK |
429*4882a593Smuzhiyun CLKF_MASK |
430*4882a593Smuzhiyun CLKFRAC_MASK |
431*4882a593Smuzhiyun IBIAS_MASK);
432*4882a593Smuzhiyun mpll_ad_func_cntl |= CLKR(encoded_reference_dividers[dividers.ref_div - 1]);
433*4882a593Smuzhiyun mpll_ad_func_cntl |= YCLK_POST_DIV(postdiv_yclk);
434*4882a593Smuzhiyun mpll_ad_func_cntl |= CLKF(clkf);
435*4882a593Smuzhiyun mpll_ad_func_cntl |= CLKFRAC(clkfrac);
436*4882a593Smuzhiyun mpll_ad_func_cntl |= IBIAS(ibias);
437*4882a593Smuzhiyun
438*4882a593Smuzhiyun if (dividers.vco_mode)
439*4882a593Smuzhiyun mpll_ad_func_cntl_2 |= VCO_MODE;
440*4882a593Smuzhiyun else
441*4882a593Smuzhiyun mpll_ad_func_cntl_2 &= ~VCO_MODE;
442*4882a593Smuzhiyun
443*4882a593Smuzhiyun if (pi->mem_gddr5) {
444*4882a593Smuzhiyun rv770_calculate_fractional_mpll_feedback_divider(memory_clock,
445*4882a593Smuzhiyun reference_clock,
446*4882a593Smuzhiyun pi->mem_gddr5,
447*4882a593Smuzhiyun ÷rs, &clkf, &clkfrac);
448*4882a593Smuzhiyun
449*4882a593Smuzhiyun ibias = rv770_map_clkf_to_ibias(rdev, clkf);
450*4882a593Smuzhiyun
451*4882a593Smuzhiyun ret = rv770_encode_yclk_post_div(dividers.post_div, &postdiv_yclk);
452*4882a593Smuzhiyun if (ret)
453*4882a593Smuzhiyun return ret;
454*4882a593Smuzhiyun
455*4882a593Smuzhiyun mpll_dq_func_cntl &= ~(CLKR_MASK |
456*4882a593Smuzhiyun YCLK_POST_DIV_MASK |
457*4882a593Smuzhiyun CLKF_MASK |
458*4882a593Smuzhiyun CLKFRAC_MASK |
459*4882a593Smuzhiyun IBIAS_MASK);
460*4882a593Smuzhiyun mpll_dq_func_cntl |= CLKR(encoded_reference_dividers[dividers.ref_div - 1]);
461*4882a593Smuzhiyun mpll_dq_func_cntl |= YCLK_POST_DIV(postdiv_yclk);
462*4882a593Smuzhiyun mpll_dq_func_cntl |= CLKF(clkf);
463*4882a593Smuzhiyun mpll_dq_func_cntl |= CLKFRAC(clkfrac);
464*4882a593Smuzhiyun mpll_dq_func_cntl |= IBIAS(ibias);
465*4882a593Smuzhiyun
466*4882a593Smuzhiyun if (dividers.vco_mode)
467*4882a593Smuzhiyun mpll_dq_func_cntl_2 |= VCO_MODE;
468*4882a593Smuzhiyun else
469*4882a593Smuzhiyun mpll_dq_func_cntl_2 &= ~VCO_MODE;
470*4882a593Smuzhiyun }
471*4882a593Smuzhiyun
472*4882a593Smuzhiyun mclk->mclk770.mclk_value = cpu_to_be32(memory_clock);
473*4882a593Smuzhiyun mclk->mclk770.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
474*4882a593Smuzhiyun mclk->mclk770.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
475*4882a593Smuzhiyun mclk->mclk770.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
476*4882a593Smuzhiyun mclk->mclk770.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
477*4882a593Smuzhiyun mclk->mclk770.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
478*4882a593Smuzhiyun mclk->mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl);
479*4882a593Smuzhiyun
480*4882a593Smuzhiyun return 0;
481*4882a593Smuzhiyun }
482*4882a593Smuzhiyun
rv770_populate_sclk_value(struct radeon_device * rdev,u32 engine_clock,RV770_SMC_SCLK_VALUE * sclk)483*4882a593Smuzhiyun static int rv770_populate_sclk_value(struct radeon_device *rdev,
484*4882a593Smuzhiyun u32 engine_clock,
485*4882a593Smuzhiyun RV770_SMC_SCLK_VALUE *sclk)
486*4882a593Smuzhiyun {
487*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
488*4882a593Smuzhiyun struct atom_clock_dividers dividers;
489*4882a593Smuzhiyun u32 spll_func_cntl =
490*4882a593Smuzhiyun pi->clk_regs.rv770.cg_spll_func_cntl;
491*4882a593Smuzhiyun u32 spll_func_cntl_2 =
492*4882a593Smuzhiyun pi->clk_regs.rv770.cg_spll_func_cntl_2;
493*4882a593Smuzhiyun u32 spll_func_cntl_3 =
494*4882a593Smuzhiyun pi->clk_regs.rv770.cg_spll_func_cntl_3;
495*4882a593Smuzhiyun u32 cg_spll_spread_spectrum =
496*4882a593Smuzhiyun pi->clk_regs.rv770.cg_spll_spread_spectrum;
497*4882a593Smuzhiyun u32 cg_spll_spread_spectrum_2 =
498*4882a593Smuzhiyun pi->clk_regs.rv770.cg_spll_spread_spectrum_2;
499*4882a593Smuzhiyun u64 tmp;
500*4882a593Smuzhiyun u32 reference_clock = rdev->clock.spll.reference_freq;
501*4882a593Smuzhiyun u32 reference_divider, post_divider;
502*4882a593Smuzhiyun u32 fbdiv;
503*4882a593Smuzhiyun int ret;
504*4882a593Smuzhiyun
505*4882a593Smuzhiyun ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
506*4882a593Smuzhiyun engine_clock, false, ÷rs);
507*4882a593Smuzhiyun if (ret)
508*4882a593Smuzhiyun return ret;
509*4882a593Smuzhiyun
510*4882a593Smuzhiyun reference_divider = 1 + dividers.ref_div;
511*4882a593Smuzhiyun
512*4882a593Smuzhiyun if (dividers.enable_post_div)
513*4882a593Smuzhiyun post_divider = (0x0f & (dividers.post_div >> 4)) + (0x0f & dividers.post_div) + 2;
514*4882a593Smuzhiyun else
515*4882a593Smuzhiyun post_divider = 1;
516*4882a593Smuzhiyun
517*4882a593Smuzhiyun tmp = (u64) engine_clock * reference_divider * post_divider * 16384;
518*4882a593Smuzhiyun do_div(tmp, reference_clock);
519*4882a593Smuzhiyun fbdiv = (u32) tmp;
520*4882a593Smuzhiyun
521*4882a593Smuzhiyun if (dividers.enable_post_div)
522*4882a593Smuzhiyun spll_func_cntl |= SPLL_DIVEN;
523*4882a593Smuzhiyun else
524*4882a593Smuzhiyun spll_func_cntl &= ~SPLL_DIVEN;
525*4882a593Smuzhiyun spll_func_cntl &= ~(SPLL_HILEN_MASK | SPLL_LOLEN_MASK | SPLL_REF_DIV_MASK);
526*4882a593Smuzhiyun spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div);
527*4882a593Smuzhiyun spll_func_cntl |= SPLL_HILEN((dividers.post_div >> 4) & 0xf);
528*4882a593Smuzhiyun spll_func_cntl |= SPLL_LOLEN(dividers.post_div & 0xf);
529*4882a593Smuzhiyun
530*4882a593Smuzhiyun spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
531*4882a593Smuzhiyun spll_func_cntl_2 |= SCLK_MUX_SEL(2);
532*4882a593Smuzhiyun
533*4882a593Smuzhiyun spll_func_cntl_3 &= ~SPLL_FB_DIV_MASK;
534*4882a593Smuzhiyun spll_func_cntl_3 |= SPLL_FB_DIV(fbdiv);
535*4882a593Smuzhiyun spll_func_cntl_3 |= SPLL_DITHEN;
536*4882a593Smuzhiyun
537*4882a593Smuzhiyun if (pi->sclk_ss) {
538*4882a593Smuzhiyun struct radeon_atom_ss ss;
539*4882a593Smuzhiyun u32 vco_freq = engine_clock * post_divider;
540*4882a593Smuzhiyun
541*4882a593Smuzhiyun if (radeon_atombios_get_asic_ss_info(rdev, &ss,
542*4882a593Smuzhiyun ASIC_INTERNAL_ENGINE_SS, vco_freq)) {
543*4882a593Smuzhiyun u32 clk_s = reference_clock * 5 / (reference_divider * ss.rate);
544*4882a593Smuzhiyun u32 clk_v = ss.percentage * fbdiv / (clk_s * 10000);
545*4882a593Smuzhiyun
546*4882a593Smuzhiyun cg_spll_spread_spectrum &= ~CLKS_MASK;
547*4882a593Smuzhiyun cg_spll_spread_spectrum |= CLKS(clk_s);
548*4882a593Smuzhiyun cg_spll_spread_spectrum |= SSEN;
549*4882a593Smuzhiyun
550*4882a593Smuzhiyun cg_spll_spread_spectrum_2 &= ~CLKV_MASK;
551*4882a593Smuzhiyun cg_spll_spread_spectrum_2 |= CLKV(clk_v);
552*4882a593Smuzhiyun }
553*4882a593Smuzhiyun }
554*4882a593Smuzhiyun
555*4882a593Smuzhiyun sclk->sclk_value = cpu_to_be32(engine_clock);
556*4882a593Smuzhiyun sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
557*4882a593Smuzhiyun sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
558*4882a593Smuzhiyun sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
559*4882a593Smuzhiyun sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(cg_spll_spread_spectrum);
560*4882a593Smuzhiyun sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(cg_spll_spread_spectrum_2);
561*4882a593Smuzhiyun
562*4882a593Smuzhiyun return 0;
563*4882a593Smuzhiyun }
564*4882a593Smuzhiyun
rv770_populate_vddc_value(struct radeon_device * rdev,u16 vddc,RV770_SMC_VOLTAGE_VALUE * voltage)565*4882a593Smuzhiyun int rv770_populate_vddc_value(struct radeon_device *rdev, u16 vddc,
566*4882a593Smuzhiyun RV770_SMC_VOLTAGE_VALUE *voltage)
567*4882a593Smuzhiyun {
568*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
569*4882a593Smuzhiyun int i;
570*4882a593Smuzhiyun
571*4882a593Smuzhiyun if (!pi->voltage_control) {
572*4882a593Smuzhiyun voltage->index = 0;
573*4882a593Smuzhiyun voltage->value = 0;
574*4882a593Smuzhiyun return 0;
575*4882a593Smuzhiyun }
576*4882a593Smuzhiyun
577*4882a593Smuzhiyun for (i = 0; i < pi->valid_vddc_entries; i++) {
578*4882a593Smuzhiyun if (vddc <= pi->vddc_table[i].vddc) {
579*4882a593Smuzhiyun voltage->index = pi->vddc_table[i].vddc_index;
580*4882a593Smuzhiyun voltage->value = cpu_to_be16(vddc);
581*4882a593Smuzhiyun break;
582*4882a593Smuzhiyun }
583*4882a593Smuzhiyun }
584*4882a593Smuzhiyun
585*4882a593Smuzhiyun if (i == pi->valid_vddc_entries)
586*4882a593Smuzhiyun return -EINVAL;
587*4882a593Smuzhiyun
588*4882a593Smuzhiyun return 0;
589*4882a593Smuzhiyun }
590*4882a593Smuzhiyun
rv770_populate_mvdd_value(struct radeon_device * rdev,u32 mclk,RV770_SMC_VOLTAGE_VALUE * voltage)591*4882a593Smuzhiyun int rv770_populate_mvdd_value(struct radeon_device *rdev, u32 mclk,
592*4882a593Smuzhiyun RV770_SMC_VOLTAGE_VALUE *voltage)
593*4882a593Smuzhiyun {
594*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
595*4882a593Smuzhiyun
596*4882a593Smuzhiyun if (!pi->mvdd_control) {
597*4882a593Smuzhiyun voltage->index = MVDD_HIGH_INDEX;
598*4882a593Smuzhiyun voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
599*4882a593Smuzhiyun return 0;
600*4882a593Smuzhiyun }
601*4882a593Smuzhiyun
602*4882a593Smuzhiyun if (mclk <= pi->mvdd_split_frequency) {
603*4882a593Smuzhiyun voltage->index = MVDD_LOW_INDEX;
604*4882a593Smuzhiyun voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
605*4882a593Smuzhiyun } else {
606*4882a593Smuzhiyun voltage->index = MVDD_HIGH_INDEX;
607*4882a593Smuzhiyun voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
608*4882a593Smuzhiyun }
609*4882a593Smuzhiyun
610*4882a593Smuzhiyun return 0;
611*4882a593Smuzhiyun }
612*4882a593Smuzhiyun
rv770_convert_power_level_to_smc(struct radeon_device * rdev,struct rv7xx_pl * pl,RV770_SMC_HW_PERFORMANCE_LEVEL * level,u8 watermark_level)613*4882a593Smuzhiyun static int rv770_convert_power_level_to_smc(struct radeon_device *rdev,
614*4882a593Smuzhiyun struct rv7xx_pl *pl,
615*4882a593Smuzhiyun RV770_SMC_HW_PERFORMANCE_LEVEL *level,
616*4882a593Smuzhiyun u8 watermark_level)
617*4882a593Smuzhiyun {
618*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
619*4882a593Smuzhiyun int ret;
620*4882a593Smuzhiyun
621*4882a593Smuzhiyun level->gen2PCIE = pi->pcie_gen2 ?
622*4882a593Smuzhiyun ((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0;
623*4882a593Smuzhiyun level->gen2XSP = (pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0;
624*4882a593Smuzhiyun level->backbias = (pl->flags & ATOM_PPLIB_R600_FLAGS_BACKBIASENABLE) ? 1 : 0;
625*4882a593Smuzhiyun level->displayWatermark = watermark_level;
626*4882a593Smuzhiyun
627*4882a593Smuzhiyun if (rdev->family == CHIP_RV740)
628*4882a593Smuzhiyun ret = rv740_populate_sclk_value(rdev, pl->sclk,
629*4882a593Smuzhiyun &level->sclk);
630*4882a593Smuzhiyun else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
631*4882a593Smuzhiyun ret = rv730_populate_sclk_value(rdev, pl->sclk,
632*4882a593Smuzhiyun &level->sclk);
633*4882a593Smuzhiyun else
634*4882a593Smuzhiyun ret = rv770_populate_sclk_value(rdev, pl->sclk,
635*4882a593Smuzhiyun &level->sclk);
636*4882a593Smuzhiyun if (ret)
637*4882a593Smuzhiyun return ret;
638*4882a593Smuzhiyun
639*4882a593Smuzhiyun if (rdev->family == CHIP_RV740) {
640*4882a593Smuzhiyun if (pi->mem_gddr5) {
641*4882a593Smuzhiyun if (pl->mclk <= pi->mclk_strobe_mode_threshold)
642*4882a593Smuzhiyun level->strobeMode =
643*4882a593Smuzhiyun rv740_get_mclk_frequency_ratio(pl->mclk) | 0x10;
644*4882a593Smuzhiyun else
645*4882a593Smuzhiyun level->strobeMode = 0;
646*4882a593Smuzhiyun
647*4882a593Smuzhiyun if (pl->mclk > pi->mclk_edc_enable_threshold)
648*4882a593Smuzhiyun level->mcFlags = SMC_MC_EDC_RD_FLAG | SMC_MC_EDC_WR_FLAG;
649*4882a593Smuzhiyun else
650*4882a593Smuzhiyun level->mcFlags = 0;
651*4882a593Smuzhiyun }
652*4882a593Smuzhiyun ret = rv740_populate_mclk_value(rdev, pl->sclk,
653*4882a593Smuzhiyun pl->mclk, &level->mclk);
654*4882a593Smuzhiyun } else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
655*4882a593Smuzhiyun ret = rv730_populate_mclk_value(rdev, pl->sclk,
656*4882a593Smuzhiyun pl->mclk, &level->mclk);
657*4882a593Smuzhiyun else
658*4882a593Smuzhiyun ret = rv770_populate_mclk_value(rdev, pl->sclk,
659*4882a593Smuzhiyun pl->mclk, &level->mclk);
660*4882a593Smuzhiyun if (ret)
661*4882a593Smuzhiyun return ret;
662*4882a593Smuzhiyun
663*4882a593Smuzhiyun ret = rv770_populate_vddc_value(rdev, pl->vddc,
664*4882a593Smuzhiyun &level->vddc);
665*4882a593Smuzhiyun if (ret)
666*4882a593Smuzhiyun return ret;
667*4882a593Smuzhiyun
668*4882a593Smuzhiyun ret = rv770_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
669*4882a593Smuzhiyun
670*4882a593Smuzhiyun return ret;
671*4882a593Smuzhiyun }
672*4882a593Smuzhiyun
rv770_convert_power_state_to_smc(struct radeon_device * rdev,struct radeon_ps * radeon_state,RV770_SMC_SWSTATE * smc_state)673*4882a593Smuzhiyun static int rv770_convert_power_state_to_smc(struct radeon_device *rdev,
674*4882a593Smuzhiyun struct radeon_ps *radeon_state,
675*4882a593Smuzhiyun RV770_SMC_SWSTATE *smc_state)
676*4882a593Smuzhiyun {
677*4882a593Smuzhiyun struct rv7xx_ps *state = rv770_get_ps(radeon_state);
678*4882a593Smuzhiyun int ret;
679*4882a593Smuzhiyun
680*4882a593Smuzhiyun if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC))
681*4882a593Smuzhiyun smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
682*4882a593Smuzhiyun
683*4882a593Smuzhiyun ret = rv770_convert_power_level_to_smc(rdev,
684*4882a593Smuzhiyun &state->low,
685*4882a593Smuzhiyun &smc_state->levels[0],
686*4882a593Smuzhiyun PPSMC_DISPLAY_WATERMARK_LOW);
687*4882a593Smuzhiyun if (ret)
688*4882a593Smuzhiyun return ret;
689*4882a593Smuzhiyun
690*4882a593Smuzhiyun ret = rv770_convert_power_level_to_smc(rdev,
691*4882a593Smuzhiyun &state->medium,
692*4882a593Smuzhiyun &smc_state->levels[1],
693*4882a593Smuzhiyun PPSMC_DISPLAY_WATERMARK_LOW);
694*4882a593Smuzhiyun if (ret)
695*4882a593Smuzhiyun return ret;
696*4882a593Smuzhiyun
697*4882a593Smuzhiyun ret = rv770_convert_power_level_to_smc(rdev,
698*4882a593Smuzhiyun &state->high,
699*4882a593Smuzhiyun &smc_state->levels[2],
700*4882a593Smuzhiyun PPSMC_DISPLAY_WATERMARK_HIGH);
701*4882a593Smuzhiyun if (ret)
702*4882a593Smuzhiyun return ret;
703*4882a593Smuzhiyun
704*4882a593Smuzhiyun smc_state->levels[0].arbValue = MC_CG_ARB_FREQ_F1;
705*4882a593Smuzhiyun smc_state->levels[1].arbValue = MC_CG_ARB_FREQ_F2;
706*4882a593Smuzhiyun smc_state->levels[2].arbValue = MC_CG_ARB_FREQ_F3;
707*4882a593Smuzhiyun
708*4882a593Smuzhiyun smc_state->levels[0].seqValue = rv770_get_seq_value(rdev,
709*4882a593Smuzhiyun &state->low);
710*4882a593Smuzhiyun smc_state->levels[1].seqValue = rv770_get_seq_value(rdev,
711*4882a593Smuzhiyun &state->medium);
712*4882a593Smuzhiyun smc_state->levels[2].seqValue = rv770_get_seq_value(rdev,
713*4882a593Smuzhiyun &state->high);
714*4882a593Smuzhiyun
715*4882a593Smuzhiyun rv770_populate_smc_sp(rdev, radeon_state, smc_state);
716*4882a593Smuzhiyun
717*4882a593Smuzhiyun return rv770_populate_smc_t(rdev, radeon_state, smc_state);
718*4882a593Smuzhiyun
719*4882a593Smuzhiyun }
720*4882a593Smuzhiyun
rv770_calculate_memory_refresh_rate(struct radeon_device * rdev,u32 engine_clock)721*4882a593Smuzhiyun u32 rv770_calculate_memory_refresh_rate(struct radeon_device *rdev,
722*4882a593Smuzhiyun u32 engine_clock)
723*4882a593Smuzhiyun {
724*4882a593Smuzhiyun u32 dram_rows;
725*4882a593Smuzhiyun u32 dram_refresh_rate;
726*4882a593Smuzhiyun u32 mc_arb_rfsh_rate;
727*4882a593Smuzhiyun u32 tmp;
728*4882a593Smuzhiyun
729*4882a593Smuzhiyun tmp = (RREG32(MC_ARB_RAMCFG) & NOOFROWS_MASK) >> NOOFROWS_SHIFT;
730*4882a593Smuzhiyun dram_rows = 1 << (tmp + 10);
731*4882a593Smuzhiyun tmp = RREG32(MC_SEQ_MISC0) & 3;
732*4882a593Smuzhiyun dram_refresh_rate = 1 << (tmp + 3);
733*4882a593Smuzhiyun mc_arb_rfsh_rate = ((engine_clock * 10) * dram_refresh_rate / dram_rows - 32) / 64;
734*4882a593Smuzhiyun
735*4882a593Smuzhiyun return mc_arb_rfsh_rate;
736*4882a593Smuzhiyun }
737*4882a593Smuzhiyun
rv770_program_memory_timing_parameters(struct radeon_device * rdev,struct radeon_ps * radeon_state)738*4882a593Smuzhiyun static void rv770_program_memory_timing_parameters(struct radeon_device *rdev,
739*4882a593Smuzhiyun struct radeon_ps *radeon_state)
740*4882a593Smuzhiyun {
741*4882a593Smuzhiyun struct rv7xx_ps *state = rv770_get_ps(radeon_state);
742*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
743*4882a593Smuzhiyun u32 sqm_ratio;
744*4882a593Smuzhiyun u32 arb_refresh_rate;
745*4882a593Smuzhiyun u32 high_clock;
746*4882a593Smuzhiyun
747*4882a593Smuzhiyun if (state->high.sclk < (state->low.sclk * 0xFF / 0x40))
748*4882a593Smuzhiyun high_clock = state->high.sclk;
749*4882a593Smuzhiyun else
750*4882a593Smuzhiyun high_clock = (state->low.sclk * 0xFF / 0x40);
751*4882a593Smuzhiyun
752*4882a593Smuzhiyun radeon_atom_set_engine_dram_timings(rdev, high_clock,
753*4882a593Smuzhiyun state->high.mclk);
754*4882a593Smuzhiyun
755*4882a593Smuzhiyun sqm_ratio =
756*4882a593Smuzhiyun STATE0(64 * high_clock / pi->boot_sclk) |
757*4882a593Smuzhiyun STATE1(64 * high_clock / state->low.sclk) |
758*4882a593Smuzhiyun STATE2(64 * high_clock / state->medium.sclk) |
759*4882a593Smuzhiyun STATE3(64 * high_clock / state->high.sclk);
760*4882a593Smuzhiyun WREG32(MC_ARB_SQM_RATIO, sqm_ratio);
761*4882a593Smuzhiyun
762*4882a593Smuzhiyun arb_refresh_rate =
763*4882a593Smuzhiyun POWERMODE0(rv770_calculate_memory_refresh_rate(rdev, pi->boot_sclk)) |
764*4882a593Smuzhiyun POWERMODE1(rv770_calculate_memory_refresh_rate(rdev, state->low.sclk)) |
765*4882a593Smuzhiyun POWERMODE2(rv770_calculate_memory_refresh_rate(rdev, state->medium.sclk)) |
766*4882a593Smuzhiyun POWERMODE3(rv770_calculate_memory_refresh_rate(rdev, state->high.sclk));
767*4882a593Smuzhiyun WREG32(MC_ARB_RFSH_RATE, arb_refresh_rate);
768*4882a593Smuzhiyun }
769*4882a593Smuzhiyun
rv770_enable_backbias(struct radeon_device * rdev,bool enable)770*4882a593Smuzhiyun void rv770_enable_backbias(struct radeon_device *rdev,
771*4882a593Smuzhiyun bool enable)
772*4882a593Smuzhiyun {
773*4882a593Smuzhiyun if (enable)
774*4882a593Smuzhiyun WREG32_P(GENERAL_PWRMGT, BACKBIAS_PAD_EN, ~BACKBIAS_PAD_EN);
775*4882a593Smuzhiyun else
776*4882a593Smuzhiyun WREG32_P(GENERAL_PWRMGT, 0, ~(BACKBIAS_VALUE | BACKBIAS_PAD_EN));
777*4882a593Smuzhiyun }
778*4882a593Smuzhiyun
rv770_enable_spread_spectrum(struct radeon_device * rdev,bool enable)779*4882a593Smuzhiyun static void rv770_enable_spread_spectrum(struct radeon_device *rdev,
780*4882a593Smuzhiyun bool enable)
781*4882a593Smuzhiyun {
782*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
783*4882a593Smuzhiyun
784*4882a593Smuzhiyun if (enable) {
785*4882a593Smuzhiyun if (pi->sclk_ss)
786*4882a593Smuzhiyun WREG32_P(GENERAL_PWRMGT, DYN_SPREAD_SPECTRUM_EN, ~DYN_SPREAD_SPECTRUM_EN);
787*4882a593Smuzhiyun
788*4882a593Smuzhiyun if (pi->mclk_ss) {
789*4882a593Smuzhiyun if (rdev->family == CHIP_RV740)
790*4882a593Smuzhiyun rv740_enable_mclk_spread_spectrum(rdev, true);
791*4882a593Smuzhiyun }
792*4882a593Smuzhiyun } else {
793*4882a593Smuzhiyun WREG32_P(CG_SPLL_SPREAD_SPECTRUM, 0, ~SSEN);
794*4882a593Smuzhiyun
795*4882a593Smuzhiyun WREG32_P(GENERAL_PWRMGT, 0, ~DYN_SPREAD_SPECTRUM_EN);
796*4882a593Smuzhiyun
797*4882a593Smuzhiyun WREG32_P(CG_MPLL_SPREAD_SPECTRUM, 0, ~SSEN);
798*4882a593Smuzhiyun
799*4882a593Smuzhiyun if (rdev->family == CHIP_RV740)
800*4882a593Smuzhiyun rv740_enable_mclk_spread_spectrum(rdev, false);
801*4882a593Smuzhiyun }
802*4882a593Smuzhiyun }
803*4882a593Smuzhiyun
rv770_program_mpll_timing_parameters(struct radeon_device * rdev)804*4882a593Smuzhiyun static void rv770_program_mpll_timing_parameters(struct radeon_device *rdev)
805*4882a593Smuzhiyun {
806*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
807*4882a593Smuzhiyun
808*4882a593Smuzhiyun if ((rdev->family == CHIP_RV770) && !pi->mem_gddr5) {
809*4882a593Smuzhiyun WREG32(MPLL_TIME,
810*4882a593Smuzhiyun (MPLL_LOCK_TIME(R600_MPLLLOCKTIME_DFLT * pi->ref_div) |
811*4882a593Smuzhiyun MPLL_RESET_TIME(R600_MPLLRESETTIME_DFLT)));
812*4882a593Smuzhiyun }
813*4882a593Smuzhiyun }
814*4882a593Smuzhiyun
rv770_setup_bsp(struct radeon_device * rdev)815*4882a593Smuzhiyun void rv770_setup_bsp(struct radeon_device *rdev)
816*4882a593Smuzhiyun {
817*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
818*4882a593Smuzhiyun u32 xclk = radeon_get_xclk(rdev);
819*4882a593Smuzhiyun
820*4882a593Smuzhiyun r600_calculate_u_and_p(pi->asi,
821*4882a593Smuzhiyun xclk,
822*4882a593Smuzhiyun 16,
823*4882a593Smuzhiyun &pi->bsp,
824*4882a593Smuzhiyun &pi->bsu);
825*4882a593Smuzhiyun
826*4882a593Smuzhiyun r600_calculate_u_and_p(pi->pasi,
827*4882a593Smuzhiyun xclk,
828*4882a593Smuzhiyun 16,
829*4882a593Smuzhiyun &pi->pbsp,
830*4882a593Smuzhiyun &pi->pbsu);
831*4882a593Smuzhiyun
832*4882a593Smuzhiyun pi->dsp = BSP(pi->bsp) | BSU(pi->bsu);
833*4882a593Smuzhiyun pi->psp = BSP(pi->pbsp) | BSU(pi->pbsu);
834*4882a593Smuzhiyun
835*4882a593Smuzhiyun WREG32(CG_BSP, pi->dsp);
836*4882a593Smuzhiyun
837*4882a593Smuzhiyun }
838*4882a593Smuzhiyun
rv770_program_git(struct radeon_device * rdev)839*4882a593Smuzhiyun void rv770_program_git(struct radeon_device *rdev)
840*4882a593Smuzhiyun {
841*4882a593Smuzhiyun WREG32_P(CG_GIT, CG_GICST(R600_GICST_DFLT), ~CG_GICST_MASK);
842*4882a593Smuzhiyun }
843*4882a593Smuzhiyun
rv770_program_tp(struct radeon_device * rdev)844*4882a593Smuzhiyun void rv770_program_tp(struct radeon_device *rdev)
845*4882a593Smuzhiyun {
846*4882a593Smuzhiyun int i;
847*4882a593Smuzhiyun enum r600_td td = R600_TD_DFLT;
848*4882a593Smuzhiyun
849*4882a593Smuzhiyun for (i = 0; i < R600_PM_NUMBER_OF_TC; i++)
850*4882a593Smuzhiyun WREG32(CG_FFCT_0 + (i * 4), (UTC_0(r600_utc[i]) | DTC_0(r600_dtc[i])));
851*4882a593Smuzhiyun
852*4882a593Smuzhiyun if (td == R600_TD_AUTO)
853*4882a593Smuzhiyun WREG32_P(SCLK_PWRMGT_CNTL, 0, ~FIR_FORCE_TREND_SEL);
854*4882a593Smuzhiyun else
855*4882a593Smuzhiyun WREG32_P(SCLK_PWRMGT_CNTL, FIR_FORCE_TREND_SEL, ~FIR_FORCE_TREND_SEL);
856*4882a593Smuzhiyun if (td == R600_TD_UP)
857*4882a593Smuzhiyun WREG32_P(SCLK_PWRMGT_CNTL, 0, ~FIR_TREND_MODE);
858*4882a593Smuzhiyun if (td == R600_TD_DOWN)
859*4882a593Smuzhiyun WREG32_P(SCLK_PWRMGT_CNTL, FIR_TREND_MODE, ~FIR_TREND_MODE);
860*4882a593Smuzhiyun }
861*4882a593Smuzhiyun
rv770_program_tpp(struct radeon_device * rdev)862*4882a593Smuzhiyun void rv770_program_tpp(struct radeon_device *rdev)
863*4882a593Smuzhiyun {
864*4882a593Smuzhiyun WREG32(CG_TPC, R600_TPC_DFLT);
865*4882a593Smuzhiyun }
866*4882a593Smuzhiyun
rv770_program_sstp(struct radeon_device * rdev)867*4882a593Smuzhiyun void rv770_program_sstp(struct radeon_device *rdev)
868*4882a593Smuzhiyun {
869*4882a593Smuzhiyun WREG32(CG_SSP, (SSTU(R600_SSTU_DFLT) | SST(R600_SST_DFLT)));
870*4882a593Smuzhiyun }
871*4882a593Smuzhiyun
rv770_program_engine_speed_parameters(struct radeon_device * rdev)872*4882a593Smuzhiyun void rv770_program_engine_speed_parameters(struct radeon_device *rdev)
873*4882a593Smuzhiyun {
874*4882a593Smuzhiyun WREG32_P(SPLL_CNTL_MODE, SPLL_DIV_SYNC, ~SPLL_DIV_SYNC);
875*4882a593Smuzhiyun }
876*4882a593Smuzhiyun
rv770_enable_display_gap(struct radeon_device * rdev)877*4882a593Smuzhiyun static void rv770_enable_display_gap(struct radeon_device *rdev)
878*4882a593Smuzhiyun {
879*4882a593Smuzhiyun u32 tmp = RREG32(CG_DISPLAY_GAP_CNTL);
880*4882a593Smuzhiyun
881*4882a593Smuzhiyun tmp &= ~(DISP1_GAP_MCHG_MASK | DISP2_GAP_MCHG_MASK);
882*4882a593Smuzhiyun tmp |= (DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE) |
883*4882a593Smuzhiyun DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE));
884*4882a593Smuzhiyun WREG32(CG_DISPLAY_GAP_CNTL, tmp);
885*4882a593Smuzhiyun }
886*4882a593Smuzhiyun
rv770_program_vc(struct radeon_device * rdev)887*4882a593Smuzhiyun void rv770_program_vc(struct radeon_device *rdev)
888*4882a593Smuzhiyun {
889*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
890*4882a593Smuzhiyun
891*4882a593Smuzhiyun WREG32(CG_FTV, pi->vrc);
892*4882a593Smuzhiyun }
893*4882a593Smuzhiyun
rv770_clear_vc(struct radeon_device * rdev)894*4882a593Smuzhiyun void rv770_clear_vc(struct radeon_device *rdev)
895*4882a593Smuzhiyun {
896*4882a593Smuzhiyun WREG32(CG_FTV, 0);
897*4882a593Smuzhiyun }
898*4882a593Smuzhiyun
rv770_upload_firmware(struct radeon_device * rdev)899*4882a593Smuzhiyun int rv770_upload_firmware(struct radeon_device *rdev)
900*4882a593Smuzhiyun {
901*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
902*4882a593Smuzhiyun int ret;
903*4882a593Smuzhiyun
904*4882a593Smuzhiyun rv770_reset_smc(rdev);
905*4882a593Smuzhiyun rv770_stop_smc_clock(rdev);
906*4882a593Smuzhiyun
907*4882a593Smuzhiyun ret = rv770_load_smc_ucode(rdev, pi->sram_end);
908*4882a593Smuzhiyun if (ret)
909*4882a593Smuzhiyun return ret;
910*4882a593Smuzhiyun
911*4882a593Smuzhiyun return 0;
912*4882a593Smuzhiyun }
913*4882a593Smuzhiyun
rv770_populate_smc_acpi_state(struct radeon_device * rdev,RV770_SMC_STATETABLE * table)914*4882a593Smuzhiyun static int rv770_populate_smc_acpi_state(struct radeon_device *rdev,
915*4882a593Smuzhiyun RV770_SMC_STATETABLE *table)
916*4882a593Smuzhiyun {
917*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
918*4882a593Smuzhiyun
919*4882a593Smuzhiyun u32 mpll_ad_func_cntl =
920*4882a593Smuzhiyun pi->clk_regs.rv770.mpll_ad_func_cntl;
921*4882a593Smuzhiyun u32 mpll_ad_func_cntl_2 =
922*4882a593Smuzhiyun pi->clk_regs.rv770.mpll_ad_func_cntl_2;
923*4882a593Smuzhiyun u32 mpll_dq_func_cntl =
924*4882a593Smuzhiyun pi->clk_regs.rv770.mpll_dq_func_cntl;
925*4882a593Smuzhiyun u32 mpll_dq_func_cntl_2 =
926*4882a593Smuzhiyun pi->clk_regs.rv770.mpll_dq_func_cntl_2;
927*4882a593Smuzhiyun u32 spll_func_cntl =
928*4882a593Smuzhiyun pi->clk_regs.rv770.cg_spll_func_cntl;
929*4882a593Smuzhiyun u32 spll_func_cntl_2 =
930*4882a593Smuzhiyun pi->clk_regs.rv770.cg_spll_func_cntl_2;
931*4882a593Smuzhiyun u32 spll_func_cntl_3 =
932*4882a593Smuzhiyun pi->clk_regs.rv770.cg_spll_func_cntl_3;
933*4882a593Smuzhiyun u32 mclk_pwrmgt_cntl;
934*4882a593Smuzhiyun u32 dll_cntl;
935*4882a593Smuzhiyun
936*4882a593Smuzhiyun table->ACPIState = table->initialState;
937*4882a593Smuzhiyun
938*4882a593Smuzhiyun table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
939*4882a593Smuzhiyun
940*4882a593Smuzhiyun if (pi->acpi_vddc) {
941*4882a593Smuzhiyun rv770_populate_vddc_value(rdev, pi->acpi_vddc,
942*4882a593Smuzhiyun &table->ACPIState.levels[0].vddc);
943*4882a593Smuzhiyun if (pi->pcie_gen2) {
944*4882a593Smuzhiyun if (pi->acpi_pcie_gen2)
945*4882a593Smuzhiyun table->ACPIState.levels[0].gen2PCIE = 1;
946*4882a593Smuzhiyun else
947*4882a593Smuzhiyun table->ACPIState.levels[0].gen2PCIE = 0;
948*4882a593Smuzhiyun } else
949*4882a593Smuzhiyun table->ACPIState.levels[0].gen2PCIE = 0;
950*4882a593Smuzhiyun if (pi->acpi_pcie_gen2)
951*4882a593Smuzhiyun table->ACPIState.levels[0].gen2XSP = 1;
952*4882a593Smuzhiyun else
953*4882a593Smuzhiyun table->ACPIState.levels[0].gen2XSP = 0;
954*4882a593Smuzhiyun } else {
955*4882a593Smuzhiyun rv770_populate_vddc_value(rdev, pi->min_vddc_in_table,
956*4882a593Smuzhiyun &table->ACPIState.levels[0].vddc);
957*4882a593Smuzhiyun table->ACPIState.levels[0].gen2PCIE = 0;
958*4882a593Smuzhiyun }
959*4882a593Smuzhiyun
960*4882a593Smuzhiyun
961*4882a593Smuzhiyun mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
962*4882a593Smuzhiyun
963*4882a593Smuzhiyun mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
964*4882a593Smuzhiyun
965*4882a593Smuzhiyun mclk_pwrmgt_cntl = (MRDCKA0_RESET |
966*4882a593Smuzhiyun MRDCKA1_RESET |
967*4882a593Smuzhiyun MRDCKB0_RESET |
968*4882a593Smuzhiyun MRDCKB1_RESET |
969*4882a593Smuzhiyun MRDCKC0_RESET |
970*4882a593Smuzhiyun MRDCKC1_RESET |
971*4882a593Smuzhiyun MRDCKD0_RESET |
972*4882a593Smuzhiyun MRDCKD1_RESET);
973*4882a593Smuzhiyun
974*4882a593Smuzhiyun dll_cntl = 0xff000000;
975*4882a593Smuzhiyun
976*4882a593Smuzhiyun spll_func_cntl |= SPLL_RESET | SPLL_SLEEP | SPLL_BYPASS_EN;
977*4882a593Smuzhiyun
978*4882a593Smuzhiyun spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
979*4882a593Smuzhiyun spll_func_cntl_2 |= SCLK_MUX_SEL(4);
980*4882a593Smuzhiyun
981*4882a593Smuzhiyun table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
982*4882a593Smuzhiyun table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
983*4882a593Smuzhiyun table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
984*4882a593Smuzhiyun table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
985*4882a593Smuzhiyun
986*4882a593Smuzhiyun table->ACPIState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
987*4882a593Smuzhiyun table->ACPIState.levels[0].mclk.mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl);
988*4882a593Smuzhiyun
989*4882a593Smuzhiyun table->ACPIState.levels[0].mclk.mclk770.mclk_value = 0;
990*4882a593Smuzhiyun
991*4882a593Smuzhiyun table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
992*4882a593Smuzhiyun table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
993*4882a593Smuzhiyun table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
994*4882a593Smuzhiyun
995*4882a593Smuzhiyun table->ACPIState.levels[0].sclk.sclk_value = 0;
996*4882a593Smuzhiyun
997*4882a593Smuzhiyun rv770_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
998*4882a593Smuzhiyun
999*4882a593Smuzhiyun table->ACPIState.levels[1] = table->ACPIState.levels[0];
1000*4882a593Smuzhiyun table->ACPIState.levels[2] = table->ACPIState.levels[0];
1001*4882a593Smuzhiyun
1002*4882a593Smuzhiyun return 0;
1003*4882a593Smuzhiyun }
1004*4882a593Smuzhiyun
rv770_populate_initial_mvdd_value(struct radeon_device * rdev,RV770_SMC_VOLTAGE_VALUE * voltage)1005*4882a593Smuzhiyun int rv770_populate_initial_mvdd_value(struct radeon_device *rdev,
1006*4882a593Smuzhiyun RV770_SMC_VOLTAGE_VALUE *voltage)
1007*4882a593Smuzhiyun {
1008*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1009*4882a593Smuzhiyun
1010*4882a593Smuzhiyun if ((pi->s0_vid_lower_smio_cntl & pi->mvdd_mask_low) ==
1011*4882a593Smuzhiyun (pi->mvdd_low_smio[MVDD_LOW_INDEX] & pi->mvdd_mask_low) ) {
1012*4882a593Smuzhiyun voltage->index = MVDD_LOW_INDEX;
1013*4882a593Smuzhiyun voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
1014*4882a593Smuzhiyun } else {
1015*4882a593Smuzhiyun voltage->index = MVDD_HIGH_INDEX;
1016*4882a593Smuzhiyun voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1017*4882a593Smuzhiyun }
1018*4882a593Smuzhiyun
1019*4882a593Smuzhiyun return 0;
1020*4882a593Smuzhiyun }
1021*4882a593Smuzhiyun
rv770_populate_smc_initial_state(struct radeon_device * rdev,struct radeon_ps * radeon_state,RV770_SMC_STATETABLE * table)1022*4882a593Smuzhiyun static int rv770_populate_smc_initial_state(struct radeon_device *rdev,
1023*4882a593Smuzhiyun struct radeon_ps *radeon_state,
1024*4882a593Smuzhiyun RV770_SMC_STATETABLE *table)
1025*4882a593Smuzhiyun {
1026*4882a593Smuzhiyun struct rv7xx_ps *initial_state = rv770_get_ps(radeon_state);
1027*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1028*4882a593Smuzhiyun u32 a_t;
1029*4882a593Smuzhiyun
1030*4882a593Smuzhiyun table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL =
1031*4882a593Smuzhiyun cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl);
1032*4882a593Smuzhiyun table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 =
1033*4882a593Smuzhiyun cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl_2);
1034*4882a593Smuzhiyun table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL =
1035*4882a593Smuzhiyun cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl);
1036*4882a593Smuzhiyun table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 =
1037*4882a593Smuzhiyun cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl_2);
1038*4882a593Smuzhiyun table->initialState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL =
1039*4882a593Smuzhiyun cpu_to_be32(pi->clk_regs.rv770.mclk_pwrmgt_cntl);
1040*4882a593Smuzhiyun table->initialState.levels[0].mclk.mclk770.vDLL_CNTL =
1041*4882a593Smuzhiyun cpu_to_be32(pi->clk_regs.rv770.dll_cntl);
1042*4882a593Smuzhiyun
1043*4882a593Smuzhiyun table->initialState.levels[0].mclk.mclk770.vMPLL_SS =
1044*4882a593Smuzhiyun cpu_to_be32(pi->clk_regs.rv770.mpll_ss1);
1045*4882a593Smuzhiyun table->initialState.levels[0].mclk.mclk770.vMPLL_SS2 =
1046*4882a593Smuzhiyun cpu_to_be32(pi->clk_regs.rv770.mpll_ss2);
1047*4882a593Smuzhiyun
1048*4882a593Smuzhiyun table->initialState.levels[0].mclk.mclk770.mclk_value =
1049*4882a593Smuzhiyun cpu_to_be32(initial_state->low.mclk);
1050*4882a593Smuzhiyun
1051*4882a593Smuzhiyun table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1052*4882a593Smuzhiyun cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl);
1053*4882a593Smuzhiyun table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1054*4882a593Smuzhiyun cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_2);
1055*4882a593Smuzhiyun table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1056*4882a593Smuzhiyun cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_3);
1057*4882a593Smuzhiyun table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
1058*4882a593Smuzhiyun cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum);
1059*4882a593Smuzhiyun table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 =
1060*4882a593Smuzhiyun cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum_2);
1061*4882a593Smuzhiyun
1062*4882a593Smuzhiyun table->initialState.levels[0].sclk.sclk_value =
1063*4882a593Smuzhiyun cpu_to_be32(initial_state->low.sclk);
1064*4882a593Smuzhiyun
1065*4882a593Smuzhiyun table->initialState.levels[0].arbValue = MC_CG_ARB_FREQ_F0;
1066*4882a593Smuzhiyun
1067*4882a593Smuzhiyun table->initialState.levels[0].seqValue =
1068*4882a593Smuzhiyun rv770_get_seq_value(rdev, &initial_state->low);
1069*4882a593Smuzhiyun
1070*4882a593Smuzhiyun rv770_populate_vddc_value(rdev,
1071*4882a593Smuzhiyun initial_state->low.vddc,
1072*4882a593Smuzhiyun &table->initialState.levels[0].vddc);
1073*4882a593Smuzhiyun rv770_populate_initial_mvdd_value(rdev,
1074*4882a593Smuzhiyun &table->initialState.levels[0].mvdd);
1075*4882a593Smuzhiyun
1076*4882a593Smuzhiyun a_t = CG_R(0xffff) | CG_L(0);
1077*4882a593Smuzhiyun table->initialState.levels[0].aT = cpu_to_be32(a_t);
1078*4882a593Smuzhiyun
1079*4882a593Smuzhiyun table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
1080*4882a593Smuzhiyun
1081*4882a593Smuzhiyun if (pi->boot_in_gen2)
1082*4882a593Smuzhiyun table->initialState.levels[0].gen2PCIE = 1;
1083*4882a593Smuzhiyun else
1084*4882a593Smuzhiyun table->initialState.levels[0].gen2PCIE = 0;
1085*4882a593Smuzhiyun if (initial_state->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
1086*4882a593Smuzhiyun table->initialState.levels[0].gen2XSP = 1;
1087*4882a593Smuzhiyun else
1088*4882a593Smuzhiyun table->initialState.levels[0].gen2XSP = 0;
1089*4882a593Smuzhiyun
1090*4882a593Smuzhiyun if (rdev->family == CHIP_RV740) {
1091*4882a593Smuzhiyun if (pi->mem_gddr5) {
1092*4882a593Smuzhiyun if (initial_state->low.mclk <= pi->mclk_strobe_mode_threshold)
1093*4882a593Smuzhiyun table->initialState.levels[0].strobeMode =
1094*4882a593Smuzhiyun rv740_get_mclk_frequency_ratio(initial_state->low.mclk) | 0x10;
1095*4882a593Smuzhiyun else
1096*4882a593Smuzhiyun table->initialState.levels[0].strobeMode = 0;
1097*4882a593Smuzhiyun
1098*4882a593Smuzhiyun if (initial_state->low.mclk >= pi->mclk_edc_enable_threshold)
1099*4882a593Smuzhiyun table->initialState.levels[0].mcFlags = SMC_MC_EDC_RD_FLAG | SMC_MC_EDC_WR_FLAG;
1100*4882a593Smuzhiyun else
1101*4882a593Smuzhiyun table->initialState.levels[0].mcFlags = 0;
1102*4882a593Smuzhiyun }
1103*4882a593Smuzhiyun }
1104*4882a593Smuzhiyun
1105*4882a593Smuzhiyun table->initialState.levels[1] = table->initialState.levels[0];
1106*4882a593Smuzhiyun table->initialState.levels[2] = table->initialState.levels[0];
1107*4882a593Smuzhiyun
1108*4882a593Smuzhiyun table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
1109*4882a593Smuzhiyun
1110*4882a593Smuzhiyun return 0;
1111*4882a593Smuzhiyun }
1112*4882a593Smuzhiyun
rv770_populate_smc_vddc_table(struct radeon_device * rdev,RV770_SMC_STATETABLE * table)1113*4882a593Smuzhiyun static int rv770_populate_smc_vddc_table(struct radeon_device *rdev,
1114*4882a593Smuzhiyun RV770_SMC_STATETABLE *table)
1115*4882a593Smuzhiyun {
1116*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1117*4882a593Smuzhiyun int i;
1118*4882a593Smuzhiyun
1119*4882a593Smuzhiyun for (i = 0; i < pi->valid_vddc_entries; i++) {
1120*4882a593Smuzhiyun table->highSMIO[pi->vddc_table[i].vddc_index] =
1121*4882a593Smuzhiyun pi->vddc_table[i].high_smio;
1122*4882a593Smuzhiyun table->lowSMIO[pi->vddc_table[i].vddc_index] =
1123*4882a593Smuzhiyun cpu_to_be32(pi->vddc_table[i].low_smio);
1124*4882a593Smuzhiyun }
1125*4882a593Smuzhiyun
1126*4882a593Smuzhiyun table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_VDDC] = 0;
1127*4882a593Smuzhiyun table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_VDDC] =
1128*4882a593Smuzhiyun cpu_to_be32(pi->vddc_mask_low);
1129*4882a593Smuzhiyun
1130*4882a593Smuzhiyun for (i = 0;
1131*4882a593Smuzhiyun ((i < pi->valid_vddc_entries) &&
1132*4882a593Smuzhiyun (pi->max_vddc_in_table >
1133*4882a593Smuzhiyun pi->vddc_table[i].vddc));
1134*4882a593Smuzhiyun i++);
1135*4882a593Smuzhiyun
1136*4882a593Smuzhiyun table->maxVDDCIndexInPPTable =
1137*4882a593Smuzhiyun pi->vddc_table[i].vddc_index;
1138*4882a593Smuzhiyun
1139*4882a593Smuzhiyun return 0;
1140*4882a593Smuzhiyun }
1141*4882a593Smuzhiyun
rv770_populate_smc_mvdd_table(struct radeon_device * rdev,RV770_SMC_STATETABLE * table)1142*4882a593Smuzhiyun static int rv770_populate_smc_mvdd_table(struct radeon_device *rdev,
1143*4882a593Smuzhiyun RV770_SMC_STATETABLE *table)
1144*4882a593Smuzhiyun {
1145*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1146*4882a593Smuzhiyun
1147*4882a593Smuzhiyun if (pi->mvdd_control) {
1148*4882a593Smuzhiyun table->lowSMIO[MVDD_HIGH_INDEX] |=
1149*4882a593Smuzhiyun cpu_to_be32(pi->mvdd_low_smio[MVDD_HIGH_INDEX]);
1150*4882a593Smuzhiyun table->lowSMIO[MVDD_LOW_INDEX] |=
1151*4882a593Smuzhiyun cpu_to_be32(pi->mvdd_low_smio[MVDD_LOW_INDEX]);
1152*4882a593Smuzhiyun
1153*4882a593Smuzhiyun table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_MVDD] = 0;
1154*4882a593Smuzhiyun table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_MVDD] =
1155*4882a593Smuzhiyun cpu_to_be32(pi->mvdd_mask_low);
1156*4882a593Smuzhiyun }
1157*4882a593Smuzhiyun
1158*4882a593Smuzhiyun return 0;
1159*4882a593Smuzhiyun }
1160*4882a593Smuzhiyun
rv770_init_smc_table(struct radeon_device * rdev,struct radeon_ps * radeon_boot_state)1161*4882a593Smuzhiyun static int rv770_init_smc_table(struct radeon_device *rdev,
1162*4882a593Smuzhiyun struct radeon_ps *radeon_boot_state)
1163*4882a593Smuzhiyun {
1164*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1165*4882a593Smuzhiyun struct rv7xx_ps *boot_state = rv770_get_ps(radeon_boot_state);
1166*4882a593Smuzhiyun RV770_SMC_STATETABLE *table = &pi->smc_statetable;
1167*4882a593Smuzhiyun int ret;
1168*4882a593Smuzhiyun
1169*4882a593Smuzhiyun memset(table, 0, sizeof(RV770_SMC_STATETABLE));
1170*4882a593Smuzhiyun
1171*4882a593Smuzhiyun pi->boot_sclk = boot_state->low.sclk;
1172*4882a593Smuzhiyun
1173*4882a593Smuzhiyun rv770_populate_smc_vddc_table(rdev, table);
1174*4882a593Smuzhiyun rv770_populate_smc_mvdd_table(rdev, table);
1175*4882a593Smuzhiyun
1176*4882a593Smuzhiyun switch (rdev->pm.int_thermal_type) {
1177*4882a593Smuzhiyun case THERMAL_TYPE_RV770:
1178*4882a593Smuzhiyun case THERMAL_TYPE_ADT7473_WITH_INTERNAL:
1179*4882a593Smuzhiyun table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
1180*4882a593Smuzhiyun break;
1181*4882a593Smuzhiyun case THERMAL_TYPE_NONE:
1182*4882a593Smuzhiyun table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
1183*4882a593Smuzhiyun break;
1184*4882a593Smuzhiyun case THERMAL_TYPE_EXTERNAL_GPIO:
1185*4882a593Smuzhiyun default:
1186*4882a593Smuzhiyun table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
1187*4882a593Smuzhiyun break;
1188*4882a593Smuzhiyun }
1189*4882a593Smuzhiyun
1190*4882a593Smuzhiyun if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC) {
1191*4882a593Smuzhiyun table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
1192*4882a593Smuzhiyun
1193*4882a593Smuzhiyun if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_DONT_WAIT_FOR_VBLANK_ON_ALERT)
1194*4882a593Smuzhiyun table->extraFlags |= PPSMC_EXTRAFLAGS_AC2DC_DONT_WAIT_FOR_VBLANK;
1195*4882a593Smuzhiyun
1196*4882a593Smuzhiyun if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_GOTO_BOOT_ON_ALERT)
1197*4882a593Smuzhiyun table->extraFlags |= PPSMC_EXTRAFLAGS_AC2DC_ACTION_GOTOINITIALSTATE;
1198*4882a593Smuzhiyun }
1199*4882a593Smuzhiyun
1200*4882a593Smuzhiyun if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
1201*4882a593Smuzhiyun table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
1202*4882a593Smuzhiyun
1203*4882a593Smuzhiyun if (pi->mem_gddr5)
1204*4882a593Smuzhiyun table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
1205*4882a593Smuzhiyun
1206*4882a593Smuzhiyun if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1207*4882a593Smuzhiyun ret = rv730_populate_smc_initial_state(rdev, radeon_boot_state, table);
1208*4882a593Smuzhiyun else
1209*4882a593Smuzhiyun ret = rv770_populate_smc_initial_state(rdev, radeon_boot_state, table);
1210*4882a593Smuzhiyun if (ret)
1211*4882a593Smuzhiyun return ret;
1212*4882a593Smuzhiyun
1213*4882a593Smuzhiyun if (rdev->family == CHIP_RV740)
1214*4882a593Smuzhiyun ret = rv740_populate_smc_acpi_state(rdev, table);
1215*4882a593Smuzhiyun else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1216*4882a593Smuzhiyun ret = rv730_populate_smc_acpi_state(rdev, table);
1217*4882a593Smuzhiyun else
1218*4882a593Smuzhiyun ret = rv770_populate_smc_acpi_state(rdev, table);
1219*4882a593Smuzhiyun if (ret)
1220*4882a593Smuzhiyun return ret;
1221*4882a593Smuzhiyun
1222*4882a593Smuzhiyun table->driverState = table->initialState;
1223*4882a593Smuzhiyun
1224*4882a593Smuzhiyun return rv770_copy_bytes_to_smc(rdev,
1225*4882a593Smuzhiyun pi->state_table_start,
1226*4882a593Smuzhiyun (const u8 *)table,
1227*4882a593Smuzhiyun sizeof(RV770_SMC_STATETABLE),
1228*4882a593Smuzhiyun pi->sram_end);
1229*4882a593Smuzhiyun }
1230*4882a593Smuzhiyun
rv770_construct_vddc_table(struct radeon_device * rdev)1231*4882a593Smuzhiyun static int rv770_construct_vddc_table(struct radeon_device *rdev)
1232*4882a593Smuzhiyun {
1233*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1234*4882a593Smuzhiyun u16 min, max, step;
1235*4882a593Smuzhiyun u32 steps = 0;
1236*4882a593Smuzhiyun u8 vddc_index = 0;
1237*4882a593Smuzhiyun u32 i;
1238*4882a593Smuzhiyun
1239*4882a593Smuzhiyun radeon_atom_get_min_voltage(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &min);
1240*4882a593Smuzhiyun radeon_atom_get_max_voltage(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &max);
1241*4882a593Smuzhiyun radeon_atom_get_voltage_step(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &step);
1242*4882a593Smuzhiyun
1243*4882a593Smuzhiyun steps = (max - min) / step + 1;
1244*4882a593Smuzhiyun
1245*4882a593Smuzhiyun if (steps > MAX_NO_VREG_STEPS)
1246*4882a593Smuzhiyun return -EINVAL;
1247*4882a593Smuzhiyun
1248*4882a593Smuzhiyun for (i = 0; i < steps; i++) {
1249*4882a593Smuzhiyun u32 gpio_pins, gpio_mask;
1250*4882a593Smuzhiyun
1251*4882a593Smuzhiyun pi->vddc_table[i].vddc = (u16)(min + i * step);
1252*4882a593Smuzhiyun radeon_atom_get_voltage_gpio_settings(rdev,
1253*4882a593Smuzhiyun pi->vddc_table[i].vddc,
1254*4882a593Smuzhiyun SET_VOLTAGE_TYPE_ASIC_VDDC,
1255*4882a593Smuzhiyun &gpio_pins, &gpio_mask);
1256*4882a593Smuzhiyun pi->vddc_table[i].low_smio = gpio_pins & gpio_mask;
1257*4882a593Smuzhiyun pi->vddc_table[i].high_smio = 0;
1258*4882a593Smuzhiyun pi->vddc_mask_low = gpio_mask;
1259*4882a593Smuzhiyun if (i > 0) {
1260*4882a593Smuzhiyun if ((pi->vddc_table[i].low_smio !=
1261*4882a593Smuzhiyun pi->vddc_table[i - 1].low_smio ) ||
1262*4882a593Smuzhiyun (pi->vddc_table[i].high_smio !=
1263*4882a593Smuzhiyun pi->vddc_table[i - 1].high_smio))
1264*4882a593Smuzhiyun vddc_index++;
1265*4882a593Smuzhiyun }
1266*4882a593Smuzhiyun pi->vddc_table[i].vddc_index = vddc_index;
1267*4882a593Smuzhiyun }
1268*4882a593Smuzhiyun
1269*4882a593Smuzhiyun pi->valid_vddc_entries = (u8)steps;
1270*4882a593Smuzhiyun
1271*4882a593Smuzhiyun return 0;
1272*4882a593Smuzhiyun }
1273*4882a593Smuzhiyun
rv770_get_mclk_split_point(struct atom_memory_info * memory_info)1274*4882a593Smuzhiyun static u32 rv770_get_mclk_split_point(struct atom_memory_info *memory_info)
1275*4882a593Smuzhiyun {
1276*4882a593Smuzhiyun if (memory_info->mem_type == MEM_TYPE_GDDR3)
1277*4882a593Smuzhiyun return 30000;
1278*4882a593Smuzhiyun
1279*4882a593Smuzhiyun return 0;
1280*4882a593Smuzhiyun }
1281*4882a593Smuzhiyun
rv770_get_mvdd_pin_configuration(struct radeon_device * rdev)1282*4882a593Smuzhiyun static int rv770_get_mvdd_pin_configuration(struct radeon_device *rdev)
1283*4882a593Smuzhiyun {
1284*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1285*4882a593Smuzhiyun u32 gpio_pins, gpio_mask;
1286*4882a593Smuzhiyun
1287*4882a593Smuzhiyun radeon_atom_get_voltage_gpio_settings(rdev,
1288*4882a593Smuzhiyun MVDD_HIGH_VALUE, SET_VOLTAGE_TYPE_ASIC_MVDDC,
1289*4882a593Smuzhiyun &gpio_pins, &gpio_mask);
1290*4882a593Smuzhiyun pi->mvdd_mask_low = gpio_mask;
1291*4882a593Smuzhiyun pi->mvdd_low_smio[MVDD_HIGH_INDEX] =
1292*4882a593Smuzhiyun gpio_pins & gpio_mask;
1293*4882a593Smuzhiyun
1294*4882a593Smuzhiyun radeon_atom_get_voltage_gpio_settings(rdev,
1295*4882a593Smuzhiyun MVDD_LOW_VALUE, SET_VOLTAGE_TYPE_ASIC_MVDDC,
1296*4882a593Smuzhiyun &gpio_pins, &gpio_mask);
1297*4882a593Smuzhiyun pi->mvdd_low_smio[MVDD_LOW_INDEX] =
1298*4882a593Smuzhiyun gpio_pins & gpio_mask;
1299*4882a593Smuzhiyun
1300*4882a593Smuzhiyun return 0;
1301*4882a593Smuzhiyun }
1302*4882a593Smuzhiyun
rv770_get_memory_module_index(struct radeon_device * rdev)1303*4882a593Smuzhiyun u8 rv770_get_memory_module_index(struct radeon_device *rdev)
1304*4882a593Smuzhiyun {
1305*4882a593Smuzhiyun return (u8) ((RREG32(BIOS_SCRATCH_4) >> 16) & 0xff);
1306*4882a593Smuzhiyun }
1307*4882a593Smuzhiyun
rv770_get_mvdd_configuration(struct radeon_device * rdev)1308*4882a593Smuzhiyun static int rv770_get_mvdd_configuration(struct radeon_device *rdev)
1309*4882a593Smuzhiyun {
1310*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1311*4882a593Smuzhiyun u8 memory_module_index;
1312*4882a593Smuzhiyun struct atom_memory_info memory_info;
1313*4882a593Smuzhiyun
1314*4882a593Smuzhiyun memory_module_index = rv770_get_memory_module_index(rdev);
1315*4882a593Smuzhiyun
1316*4882a593Smuzhiyun if (radeon_atom_get_memory_info(rdev, memory_module_index, &memory_info)) {
1317*4882a593Smuzhiyun pi->mvdd_control = false;
1318*4882a593Smuzhiyun return 0;
1319*4882a593Smuzhiyun }
1320*4882a593Smuzhiyun
1321*4882a593Smuzhiyun pi->mvdd_split_frequency =
1322*4882a593Smuzhiyun rv770_get_mclk_split_point(&memory_info);
1323*4882a593Smuzhiyun
1324*4882a593Smuzhiyun if (pi->mvdd_split_frequency == 0) {
1325*4882a593Smuzhiyun pi->mvdd_control = false;
1326*4882a593Smuzhiyun return 0;
1327*4882a593Smuzhiyun }
1328*4882a593Smuzhiyun
1329*4882a593Smuzhiyun return rv770_get_mvdd_pin_configuration(rdev);
1330*4882a593Smuzhiyun }
1331*4882a593Smuzhiyun
rv770_enable_voltage_control(struct radeon_device * rdev,bool enable)1332*4882a593Smuzhiyun void rv770_enable_voltage_control(struct radeon_device *rdev,
1333*4882a593Smuzhiyun bool enable)
1334*4882a593Smuzhiyun {
1335*4882a593Smuzhiyun if (enable)
1336*4882a593Smuzhiyun WREG32_P(GENERAL_PWRMGT, VOLT_PWRMGT_EN, ~VOLT_PWRMGT_EN);
1337*4882a593Smuzhiyun else
1338*4882a593Smuzhiyun WREG32_P(GENERAL_PWRMGT, 0, ~VOLT_PWRMGT_EN);
1339*4882a593Smuzhiyun }
1340*4882a593Smuzhiyun
rv770_program_display_gap(struct radeon_device * rdev)1341*4882a593Smuzhiyun static void rv770_program_display_gap(struct radeon_device *rdev)
1342*4882a593Smuzhiyun {
1343*4882a593Smuzhiyun u32 tmp = RREG32(CG_DISPLAY_GAP_CNTL);
1344*4882a593Smuzhiyun
1345*4882a593Smuzhiyun tmp &= ~(DISP1_GAP_MCHG_MASK | DISP2_GAP_MCHG_MASK);
1346*4882a593Smuzhiyun if (rdev->pm.dpm.new_active_crtcs & 1) {
1347*4882a593Smuzhiyun tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_VBLANK);
1348*4882a593Smuzhiyun tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1349*4882a593Smuzhiyun } else if (rdev->pm.dpm.new_active_crtcs & 2) {
1350*4882a593Smuzhiyun tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1351*4882a593Smuzhiyun tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_VBLANK);
1352*4882a593Smuzhiyun } else {
1353*4882a593Smuzhiyun tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1354*4882a593Smuzhiyun tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE);
1355*4882a593Smuzhiyun }
1356*4882a593Smuzhiyun WREG32(CG_DISPLAY_GAP_CNTL, tmp);
1357*4882a593Smuzhiyun }
1358*4882a593Smuzhiyun
rv770_enable_dynamic_pcie_gen2(struct radeon_device * rdev,bool enable)1359*4882a593Smuzhiyun static void rv770_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
1360*4882a593Smuzhiyun bool enable)
1361*4882a593Smuzhiyun {
1362*4882a593Smuzhiyun rv770_enable_bif_dynamic_pcie_gen2(rdev, enable);
1363*4882a593Smuzhiyun
1364*4882a593Smuzhiyun if (enable)
1365*4882a593Smuzhiyun WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE);
1366*4882a593Smuzhiyun else
1367*4882a593Smuzhiyun WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE);
1368*4882a593Smuzhiyun }
1369*4882a593Smuzhiyun
r7xx_program_memory_timing_parameters(struct radeon_device * rdev,struct radeon_ps * radeon_new_state)1370*4882a593Smuzhiyun static void r7xx_program_memory_timing_parameters(struct radeon_device *rdev,
1371*4882a593Smuzhiyun struct radeon_ps *radeon_new_state)
1372*4882a593Smuzhiyun {
1373*4882a593Smuzhiyun if ((rdev->family == CHIP_RV730) ||
1374*4882a593Smuzhiyun (rdev->family == CHIP_RV710) ||
1375*4882a593Smuzhiyun (rdev->family == CHIP_RV740))
1376*4882a593Smuzhiyun rv730_program_memory_timing_parameters(rdev, radeon_new_state);
1377*4882a593Smuzhiyun else
1378*4882a593Smuzhiyun rv770_program_memory_timing_parameters(rdev, radeon_new_state);
1379*4882a593Smuzhiyun }
1380*4882a593Smuzhiyun
rv770_upload_sw_state(struct radeon_device * rdev,struct radeon_ps * radeon_new_state)1381*4882a593Smuzhiyun static int rv770_upload_sw_state(struct radeon_device *rdev,
1382*4882a593Smuzhiyun struct radeon_ps *radeon_new_state)
1383*4882a593Smuzhiyun {
1384*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1385*4882a593Smuzhiyun u16 address = pi->state_table_start +
1386*4882a593Smuzhiyun offsetof(RV770_SMC_STATETABLE, driverState);
1387*4882a593Smuzhiyun RV770_SMC_SWSTATE state = { 0 };
1388*4882a593Smuzhiyun int ret;
1389*4882a593Smuzhiyun
1390*4882a593Smuzhiyun ret = rv770_convert_power_state_to_smc(rdev, radeon_new_state, &state);
1391*4882a593Smuzhiyun if (ret)
1392*4882a593Smuzhiyun return ret;
1393*4882a593Smuzhiyun
1394*4882a593Smuzhiyun return rv770_copy_bytes_to_smc(rdev, address, (const u8 *)&state,
1395*4882a593Smuzhiyun sizeof(RV770_SMC_SWSTATE),
1396*4882a593Smuzhiyun pi->sram_end);
1397*4882a593Smuzhiyun }
1398*4882a593Smuzhiyun
rv770_halt_smc(struct radeon_device * rdev)1399*4882a593Smuzhiyun int rv770_halt_smc(struct radeon_device *rdev)
1400*4882a593Smuzhiyun {
1401*4882a593Smuzhiyun if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_Halt) != PPSMC_Result_OK)
1402*4882a593Smuzhiyun return -EINVAL;
1403*4882a593Smuzhiyun
1404*4882a593Smuzhiyun if (rv770_wait_for_smc_inactive(rdev) != PPSMC_Result_OK)
1405*4882a593Smuzhiyun return -EINVAL;
1406*4882a593Smuzhiyun
1407*4882a593Smuzhiyun return 0;
1408*4882a593Smuzhiyun }
1409*4882a593Smuzhiyun
rv770_resume_smc(struct radeon_device * rdev)1410*4882a593Smuzhiyun int rv770_resume_smc(struct radeon_device *rdev)
1411*4882a593Smuzhiyun {
1412*4882a593Smuzhiyun if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_Resume) != PPSMC_Result_OK)
1413*4882a593Smuzhiyun return -EINVAL;
1414*4882a593Smuzhiyun return 0;
1415*4882a593Smuzhiyun }
1416*4882a593Smuzhiyun
rv770_set_sw_state(struct radeon_device * rdev)1417*4882a593Smuzhiyun int rv770_set_sw_state(struct radeon_device *rdev)
1418*4882a593Smuzhiyun {
1419*4882a593Smuzhiyun if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_SwitchToSwState) != PPSMC_Result_OK)
1420*4882a593Smuzhiyun DRM_DEBUG("rv770_set_sw_state failed\n");
1421*4882a593Smuzhiyun return 0;
1422*4882a593Smuzhiyun }
1423*4882a593Smuzhiyun
rv770_set_boot_state(struct radeon_device * rdev)1424*4882a593Smuzhiyun int rv770_set_boot_state(struct radeon_device *rdev)
1425*4882a593Smuzhiyun {
1426*4882a593Smuzhiyun if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_SwitchToInitialState) != PPSMC_Result_OK)
1427*4882a593Smuzhiyun return -EINVAL;
1428*4882a593Smuzhiyun return 0;
1429*4882a593Smuzhiyun }
1430*4882a593Smuzhiyun
rv770_set_uvd_clock_before_set_eng_clock(struct radeon_device * rdev,struct radeon_ps * new_ps,struct radeon_ps * old_ps)1431*4882a593Smuzhiyun void rv770_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev,
1432*4882a593Smuzhiyun struct radeon_ps *new_ps,
1433*4882a593Smuzhiyun struct radeon_ps *old_ps)
1434*4882a593Smuzhiyun {
1435*4882a593Smuzhiyun struct rv7xx_ps *new_state = rv770_get_ps(new_ps);
1436*4882a593Smuzhiyun struct rv7xx_ps *current_state = rv770_get_ps(old_ps);
1437*4882a593Smuzhiyun
1438*4882a593Smuzhiyun if ((new_ps->vclk == old_ps->vclk) &&
1439*4882a593Smuzhiyun (new_ps->dclk == old_ps->dclk))
1440*4882a593Smuzhiyun return;
1441*4882a593Smuzhiyun
1442*4882a593Smuzhiyun if (new_state->high.sclk >= current_state->high.sclk)
1443*4882a593Smuzhiyun return;
1444*4882a593Smuzhiyun
1445*4882a593Smuzhiyun radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
1446*4882a593Smuzhiyun }
1447*4882a593Smuzhiyun
rv770_set_uvd_clock_after_set_eng_clock(struct radeon_device * rdev,struct radeon_ps * new_ps,struct radeon_ps * old_ps)1448*4882a593Smuzhiyun void rv770_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev,
1449*4882a593Smuzhiyun struct radeon_ps *new_ps,
1450*4882a593Smuzhiyun struct radeon_ps *old_ps)
1451*4882a593Smuzhiyun {
1452*4882a593Smuzhiyun struct rv7xx_ps *new_state = rv770_get_ps(new_ps);
1453*4882a593Smuzhiyun struct rv7xx_ps *current_state = rv770_get_ps(old_ps);
1454*4882a593Smuzhiyun
1455*4882a593Smuzhiyun if ((new_ps->vclk == old_ps->vclk) &&
1456*4882a593Smuzhiyun (new_ps->dclk == old_ps->dclk))
1457*4882a593Smuzhiyun return;
1458*4882a593Smuzhiyun
1459*4882a593Smuzhiyun if (new_state->high.sclk < current_state->high.sclk)
1460*4882a593Smuzhiyun return;
1461*4882a593Smuzhiyun
1462*4882a593Smuzhiyun radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
1463*4882a593Smuzhiyun }
1464*4882a593Smuzhiyun
rv770_restrict_performance_levels_before_switch(struct radeon_device * rdev)1465*4882a593Smuzhiyun int rv770_restrict_performance_levels_before_switch(struct radeon_device *rdev)
1466*4882a593Smuzhiyun {
1467*4882a593Smuzhiyun if (rv770_send_msg_to_smc(rdev, (PPSMC_Msg)(PPSMC_MSG_NoForcedLevel)) != PPSMC_Result_OK)
1468*4882a593Smuzhiyun return -EINVAL;
1469*4882a593Smuzhiyun
1470*4882a593Smuzhiyun if (rv770_send_msg_to_smc(rdev, (PPSMC_Msg)(PPSMC_MSG_TwoLevelsDisabled)) != PPSMC_Result_OK)
1471*4882a593Smuzhiyun return -EINVAL;
1472*4882a593Smuzhiyun
1473*4882a593Smuzhiyun return 0;
1474*4882a593Smuzhiyun }
1475*4882a593Smuzhiyun
rv770_dpm_force_performance_level(struct radeon_device * rdev,enum radeon_dpm_forced_level level)1476*4882a593Smuzhiyun int rv770_dpm_force_performance_level(struct radeon_device *rdev,
1477*4882a593Smuzhiyun enum radeon_dpm_forced_level level)
1478*4882a593Smuzhiyun {
1479*4882a593Smuzhiyun PPSMC_Msg msg;
1480*4882a593Smuzhiyun
1481*4882a593Smuzhiyun if (level == RADEON_DPM_FORCED_LEVEL_HIGH) {
1482*4882a593Smuzhiyun if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_ZeroLevelsDisabled) != PPSMC_Result_OK)
1483*4882a593Smuzhiyun return -EINVAL;
1484*4882a593Smuzhiyun msg = PPSMC_MSG_ForceHigh;
1485*4882a593Smuzhiyun } else if (level == RADEON_DPM_FORCED_LEVEL_LOW) {
1486*4882a593Smuzhiyun if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK)
1487*4882a593Smuzhiyun return -EINVAL;
1488*4882a593Smuzhiyun msg = (PPSMC_Msg)(PPSMC_MSG_TwoLevelsDisabled);
1489*4882a593Smuzhiyun } else {
1490*4882a593Smuzhiyun if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK)
1491*4882a593Smuzhiyun return -EINVAL;
1492*4882a593Smuzhiyun msg = (PPSMC_Msg)(PPSMC_MSG_ZeroLevelsDisabled);
1493*4882a593Smuzhiyun }
1494*4882a593Smuzhiyun
1495*4882a593Smuzhiyun if (rv770_send_msg_to_smc(rdev, msg) != PPSMC_Result_OK)
1496*4882a593Smuzhiyun return -EINVAL;
1497*4882a593Smuzhiyun
1498*4882a593Smuzhiyun rdev->pm.dpm.forced_level = level;
1499*4882a593Smuzhiyun
1500*4882a593Smuzhiyun return 0;
1501*4882a593Smuzhiyun }
1502*4882a593Smuzhiyun
r7xx_start_smc(struct radeon_device * rdev)1503*4882a593Smuzhiyun void r7xx_start_smc(struct radeon_device *rdev)
1504*4882a593Smuzhiyun {
1505*4882a593Smuzhiyun rv770_start_smc(rdev);
1506*4882a593Smuzhiyun rv770_start_smc_clock(rdev);
1507*4882a593Smuzhiyun }
1508*4882a593Smuzhiyun
1509*4882a593Smuzhiyun
r7xx_stop_smc(struct radeon_device * rdev)1510*4882a593Smuzhiyun void r7xx_stop_smc(struct radeon_device *rdev)
1511*4882a593Smuzhiyun {
1512*4882a593Smuzhiyun rv770_reset_smc(rdev);
1513*4882a593Smuzhiyun rv770_stop_smc_clock(rdev);
1514*4882a593Smuzhiyun }
1515*4882a593Smuzhiyun
rv770_read_clock_registers(struct radeon_device * rdev)1516*4882a593Smuzhiyun static void rv770_read_clock_registers(struct radeon_device *rdev)
1517*4882a593Smuzhiyun {
1518*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1519*4882a593Smuzhiyun
1520*4882a593Smuzhiyun pi->clk_regs.rv770.cg_spll_func_cntl =
1521*4882a593Smuzhiyun RREG32(CG_SPLL_FUNC_CNTL);
1522*4882a593Smuzhiyun pi->clk_regs.rv770.cg_spll_func_cntl_2 =
1523*4882a593Smuzhiyun RREG32(CG_SPLL_FUNC_CNTL_2);
1524*4882a593Smuzhiyun pi->clk_regs.rv770.cg_spll_func_cntl_3 =
1525*4882a593Smuzhiyun RREG32(CG_SPLL_FUNC_CNTL_3);
1526*4882a593Smuzhiyun pi->clk_regs.rv770.cg_spll_spread_spectrum =
1527*4882a593Smuzhiyun RREG32(CG_SPLL_SPREAD_SPECTRUM);
1528*4882a593Smuzhiyun pi->clk_regs.rv770.cg_spll_spread_spectrum_2 =
1529*4882a593Smuzhiyun RREG32(CG_SPLL_SPREAD_SPECTRUM_2);
1530*4882a593Smuzhiyun pi->clk_regs.rv770.mpll_ad_func_cntl =
1531*4882a593Smuzhiyun RREG32(MPLL_AD_FUNC_CNTL);
1532*4882a593Smuzhiyun pi->clk_regs.rv770.mpll_ad_func_cntl_2 =
1533*4882a593Smuzhiyun RREG32(MPLL_AD_FUNC_CNTL_2);
1534*4882a593Smuzhiyun pi->clk_regs.rv770.mpll_dq_func_cntl =
1535*4882a593Smuzhiyun RREG32(MPLL_DQ_FUNC_CNTL);
1536*4882a593Smuzhiyun pi->clk_regs.rv770.mpll_dq_func_cntl_2 =
1537*4882a593Smuzhiyun RREG32(MPLL_DQ_FUNC_CNTL_2);
1538*4882a593Smuzhiyun pi->clk_regs.rv770.mclk_pwrmgt_cntl =
1539*4882a593Smuzhiyun RREG32(MCLK_PWRMGT_CNTL);
1540*4882a593Smuzhiyun pi->clk_regs.rv770.dll_cntl = RREG32(DLL_CNTL);
1541*4882a593Smuzhiyun }
1542*4882a593Smuzhiyun
r7xx_read_clock_registers(struct radeon_device * rdev)1543*4882a593Smuzhiyun static void r7xx_read_clock_registers(struct radeon_device *rdev)
1544*4882a593Smuzhiyun {
1545*4882a593Smuzhiyun if (rdev->family == CHIP_RV740)
1546*4882a593Smuzhiyun rv740_read_clock_registers(rdev);
1547*4882a593Smuzhiyun else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1548*4882a593Smuzhiyun rv730_read_clock_registers(rdev);
1549*4882a593Smuzhiyun else
1550*4882a593Smuzhiyun rv770_read_clock_registers(rdev);
1551*4882a593Smuzhiyun }
1552*4882a593Smuzhiyun
rv770_read_voltage_smio_registers(struct radeon_device * rdev)1553*4882a593Smuzhiyun void rv770_read_voltage_smio_registers(struct radeon_device *rdev)
1554*4882a593Smuzhiyun {
1555*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1556*4882a593Smuzhiyun
1557*4882a593Smuzhiyun pi->s0_vid_lower_smio_cntl =
1558*4882a593Smuzhiyun RREG32(S0_VID_LOWER_SMIO_CNTL);
1559*4882a593Smuzhiyun }
1560*4882a593Smuzhiyun
rv770_reset_smio_status(struct radeon_device * rdev)1561*4882a593Smuzhiyun void rv770_reset_smio_status(struct radeon_device *rdev)
1562*4882a593Smuzhiyun {
1563*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1564*4882a593Smuzhiyun u32 sw_smio_index, vid_smio_cntl;
1565*4882a593Smuzhiyun
1566*4882a593Smuzhiyun sw_smio_index =
1567*4882a593Smuzhiyun (RREG32(GENERAL_PWRMGT) & SW_SMIO_INDEX_MASK) >> SW_SMIO_INDEX_SHIFT;
1568*4882a593Smuzhiyun switch (sw_smio_index) {
1569*4882a593Smuzhiyun case 3:
1570*4882a593Smuzhiyun vid_smio_cntl = RREG32(S3_VID_LOWER_SMIO_CNTL);
1571*4882a593Smuzhiyun break;
1572*4882a593Smuzhiyun case 2:
1573*4882a593Smuzhiyun vid_smio_cntl = RREG32(S2_VID_LOWER_SMIO_CNTL);
1574*4882a593Smuzhiyun break;
1575*4882a593Smuzhiyun case 1:
1576*4882a593Smuzhiyun vid_smio_cntl = RREG32(S1_VID_LOWER_SMIO_CNTL);
1577*4882a593Smuzhiyun break;
1578*4882a593Smuzhiyun case 0:
1579*4882a593Smuzhiyun return;
1580*4882a593Smuzhiyun default:
1581*4882a593Smuzhiyun vid_smio_cntl = pi->s0_vid_lower_smio_cntl;
1582*4882a593Smuzhiyun break;
1583*4882a593Smuzhiyun }
1584*4882a593Smuzhiyun
1585*4882a593Smuzhiyun WREG32(S0_VID_LOWER_SMIO_CNTL, vid_smio_cntl);
1586*4882a593Smuzhiyun WREG32_P(GENERAL_PWRMGT, SW_SMIO_INDEX(0), ~SW_SMIO_INDEX_MASK);
1587*4882a593Smuzhiyun }
1588*4882a593Smuzhiyun
rv770_get_memory_type(struct radeon_device * rdev)1589*4882a593Smuzhiyun void rv770_get_memory_type(struct radeon_device *rdev)
1590*4882a593Smuzhiyun {
1591*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1592*4882a593Smuzhiyun u32 tmp;
1593*4882a593Smuzhiyun
1594*4882a593Smuzhiyun tmp = RREG32(MC_SEQ_MISC0);
1595*4882a593Smuzhiyun
1596*4882a593Smuzhiyun if (((tmp & MC_SEQ_MISC0_GDDR5_MASK) >> MC_SEQ_MISC0_GDDR5_SHIFT) ==
1597*4882a593Smuzhiyun MC_SEQ_MISC0_GDDR5_VALUE)
1598*4882a593Smuzhiyun pi->mem_gddr5 = true;
1599*4882a593Smuzhiyun else
1600*4882a593Smuzhiyun pi->mem_gddr5 = false;
1601*4882a593Smuzhiyun
1602*4882a593Smuzhiyun }
1603*4882a593Smuzhiyun
rv770_get_pcie_gen2_status(struct radeon_device * rdev)1604*4882a593Smuzhiyun void rv770_get_pcie_gen2_status(struct radeon_device *rdev)
1605*4882a593Smuzhiyun {
1606*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1607*4882a593Smuzhiyun u32 tmp;
1608*4882a593Smuzhiyun
1609*4882a593Smuzhiyun tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
1610*4882a593Smuzhiyun
1611*4882a593Smuzhiyun if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
1612*4882a593Smuzhiyun (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
1613*4882a593Smuzhiyun pi->pcie_gen2 = true;
1614*4882a593Smuzhiyun else
1615*4882a593Smuzhiyun pi->pcie_gen2 = false;
1616*4882a593Smuzhiyun
1617*4882a593Smuzhiyun if (pi->pcie_gen2) {
1618*4882a593Smuzhiyun if (tmp & LC_CURRENT_DATA_RATE)
1619*4882a593Smuzhiyun pi->boot_in_gen2 = true;
1620*4882a593Smuzhiyun else
1621*4882a593Smuzhiyun pi->boot_in_gen2 = false;
1622*4882a593Smuzhiyun } else
1623*4882a593Smuzhiyun pi->boot_in_gen2 = false;
1624*4882a593Smuzhiyun }
1625*4882a593Smuzhiyun
1626*4882a593Smuzhiyun #if 0
1627*4882a593Smuzhiyun static int rv770_enter_ulp_state(struct radeon_device *rdev)
1628*4882a593Smuzhiyun {
1629*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1630*4882a593Smuzhiyun
1631*4882a593Smuzhiyun if (pi->gfx_clock_gating) {
1632*4882a593Smuzhiyun WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
1633*4882a593Smuzhiyun WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
1634*4882a593Smuzhiyun WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
1635*4882a593Smuzhiyun RREG32(GB_TILING_CONFIG);
1636*4882a593Smuzhiyun }
1637*4882a593Smuzhiyun
1638*4882a593Smuzhiyun WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower),
1639*4882a593Smuzhiyun ~HOST_SMC_MSG_MASK);
1640*4882a593Smuzhiyun
1641*4882a593Smuzhiyun udelay(7000);
1642*4882a593Smuzhiyun
1643*4882a593Smuzhiyun return 0;
1644*4882a593Smuzhiyun }
1645*4882a593Smuzhiyun
1646*4882a593Smuzhiyun static int rv770_exit_ulp_state(struct radeon_device *rdev)
1647*4882a593Smuzhiyun {
1648*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1649*4882a593Smuzhiyun int i;
1650*4882a593Smuzhiyun
1651*4882a593Smuzhiyun WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_ResumeFromMinimumPower),
1652*4882a593Smuzhiyun ~HOST_SMC_MSG_MASK);
1653*4882a593Smuzhiyun
1654*4882a593Smuzhiyun udelay(7000);
1655*4882a593Smuzhiyun
1656*4882a593Smuzhiyun for (i = 0; i < rdev->usec_timeout; i++) {
1657*4882a593Smuzhiyun if (((RREG32(SMC_MSG) & HOST_SMC_RESP_MASK) >> HOST_SMC_RESP_SHIFT) == 1)
1658*4882a593Smuzhiyun break;
1659*4882a593Smuzhiyun udelay(1000);
1660*4882a593Smuzhiyun }
1661*4882a593Smuzhiyun
1662*4882a593Smuzhiyun if (pi->gfx_clock_gating)
1663*4882a593Smuzhiyun WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN);
1664*4882a593Smuzhiyun
1665*4882a593Smuzhiyun return 0;
1666*4882a593Smuzhiyun }
1667*4882a593Smuzhiyun #endif
1668*4882a593Smuzhiyun
rv770_get_mclk_odt_threshold(struct radeon_device * rdev)1669*4882a593Smuzhiyun static void rv770_get_mclk_odt_threshold(struct radeon_device *rdev)
1670*4882a593Smuzhiyun {
1671*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1672*4882a593Smuzhiyun u8 memory_module_index;
1673*4882a593Smuzhiyun struct atom_memory_info memory_info;
1674*4882a593Smuzhiyun
1675*4882a593Smuzhiyun pi->mclk_odt_threshold = 0;
1676*4882a593Smuzhiyun
1677*4882a593Smuzhiyun if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) {
1678*4882a593Smuzhiyun memory_module_index = rv770_get_memory_module_index(rdev);
1679*4882a593Smuzhiyun
1680*4882a593Smuzhiyun if (radeon_atom_get_memory_info(rdev, memory_module_index, &memory_info))
1681*4882a593Smuzhiyun return;
1682*4882a593Smuzhiyun
1683*4882a593Smuzhiyun if (memory_info.mem_type == MEM_TYPE_DDR2 ||
1684*4882a593Smuzhiyun memory_info.mem_type == MEM_TYPE_DDR3)
1685*4882a593Smuzhiyun pi->mclk_odt_threshold = 30000;
1686*4882a593Smuzhiyun }
1687*4882a593Smuzhiyun }
1688*4882a593Smuzhiyun
rv770_get_max_vddc(struct radeon_device * rdev)1689*4882a593Smuzhiyun void rv770_get_max_vddc(struct radeon_device *rdev)
1690*4882a593Smuzhiyun {
1691*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1692*4882a593Smuzhiyun u16 vddc;
1693*4882a593Smuzhiyun
1694*4882a593Smuzhiyun if (radeon_atom_get_max_vddc(rdev, 0, 0, &vddc))
1695*4882a593Smuzhiyun pi->max_vddc = 0;
1696*4882a593Smuzhiyun else
1697*4882a593Smuzhiyun pi->max_vddc = vddc;
1698*4882a593Smuzhiyun }
1699*4882a593Smuzhiyun
rv770_program_response_times(struct radeon_device * rdev)1700*4882a593Smuzhiyun void rv770_program_response_times(struct radeon_device *rdev)
1701*4882a593Smuzhiyun {
1702*4882a593Smuzhiyun u32 voltage_response_time, backbias_response_time;
1703*4882a593Smuzhiyun u32 acpi_delay_time, vbi_time_out;
1704*4882a593Smuzhiyun u32 vddc_dly, bb_dly, acpi_dly, vbi_dly;
1705*4882a593Smuzhiyun u32 reference_clock;
1706*4882a593Smuzhiyun
1707*4882a593Smuzhiyun voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time;
1708*4882a593Smuzhiyun backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time;
1709*4882a593Smuzhiyun
1710*4882a593Smuzhiyun if (voltage_response_time == 0)
1711*4882a593Smuzhiyun voltage_response_time = 1000;
1712*4882a593Smuzhiyun
1713*4882a593Smuzhiyun if (backbias_response_time == 0)
1714*4882a593Smuzhiyun backbias_response_time = 1000;
1715*4882a593Smuzhiyun
1716*4882a593Smuzhiyun acpi_delay_time = 15000;
1717*4882a593Smuzhiyun vbi_time_out = 100000;
1718*4882a593Smuzhiyun
1719*4882a593Smuzhiyun reference_clock = radeon_get_xclk(rdev);
1720*4882a593Smuzhiyun
1721*4882a593Smuzhiyun vddc_dly = (voltage_response_time * reference_clock) / 1600;
1722*4882a593Smuzhiyun bb_dly = (backbias_response_time * reference_clock) / 1600;
1723*4882a593Smuzhiyun acpi_dly = (acpi_delay_time * reference_clock) / 1600;
1724*4882a593Smuzhiyun vbi_dly = (vbi_time_out * reference_clock) / 1600;
1725*4882a593Smuzhiyun
1726*4882a593Smuzhiyun rv770_write_smc_soft_register(rdev,
1727*4882a593Smuzhiyun RV770_SMC_SOFT_REGISTER_delay_vreg, vddc_dly);
1728*4882a593Smuzhiyun rv770_write_smc_soft_register(rdev,
1729*4882a593Smuzhiyun RV770_SMC_SOFT_REGISTER_delay_bbias, bb_dly);
1730*4882a593Smuzhiyun rv770_write_smc_soft_register(rdev,
1731*4882a593Smuzhiyun RV770_SMC_SOFT_REGISTER_delay_acpi, acpi_dly);
1732*4882a593Smuzhiyun rv770_write_smc_soft_register(rdev,
1733*4882a593Smuzhiyun RV770_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly);
1734*4882a593Smuzhiyun #if 0
1735*4882a593Smuzhiyun /* XXX look up hw revision */
1736*4882a593Smuzhiyun if (WEKIVA_A21)
1737*4882a593Smuzhiyun rv770_write_smc_soft_register(rdev,
1738*4882a593Smuzhiyun RV770_SMC_SOFT_REGISTER_baby_step_timer,
1739*4882a593Smuzhiyun 0x10);
1740*4882a593Smuzhiyun #endif
1741*4882a593Smuzhiyun }
1742*4882a593Smuzhiyun
rv770_program_dcodt_before_state_switch(struct radeon_device * rdev,struct radeon_ps * radeon_new_state,struct radeon_ps * radeon_current_state)1743*4882a593Smuzhiyun static void rv770_program_dcodt_before_state_switch(struct radeon_device *rdev,
1744*4882a593Smuzhiyun struct radeon_ps *radeon_new_state,
1745*4882a593Smuzhiyun struct radeon_ps *radeon_current_state)
1746*4882a593Smuzhiyun {
1747*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1748*4882a593Smuzhiyun struct rv7xx_ps *new_state = rv770_get_ps(radeon_new_state);
1749*4882a593Smuzhiyun struct rv7xx_ps *current_state = rv770_get_ps(radeon_current_state);
1750*4882a593Smuzhiyun bool current_use_dc = false;
1751*4882a593Smuzhiyun bool new_use_dc = false;
1752*4882a593Smuzhiyun
1753*4882a593Smuzhiyun if (pi->mclk_odt_threshold == 0)
1754*4882a593Smuzhiyun return;
1755*4882a593Smuzhiyun
1756*4882a593Smuzhiyun if (current_state->high.mclk <= pi->mclk_odt_threshold)
1757*4882a593Smuzhiyun current_use_dc = true;
1758*4882a593Smuzhiyun
1759*4882a593Smuzhiyun if (new_state->high.mclk <= pi->mclk_odt_threshold)
1760*4882a593Smuzhiyun new_use_dc = true;
1761*4882a593Smuzhiyun
1762*4882a593Smuzhiyun if (current_use_dc == new_use_dc)
1763*4882a593Smuzhiyun return;
1764*4882a593Smuzhiyun
1765*4882a593Smuzhiyun if (!current_use_dc && new_use_dc)
1766*4882a593Smuzhiyun return;
1767*4882a593Smuzhiyun
1768*4882a593Smuzhiyun if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1769*4882a593Smuzhiyun rv730_program_dcodt(rdev, new_use_dc);
1770*4882a593Smuzhiyun }
1771*4882a593Smuzhiyun
rv770_program_dcodt_after_state_switch(struct radeon_device * rdev,struct radeon_ps * radeon_new_state,struct radeon_ps * radeon_current_state)1772*4882a593Smuzhiyun static void rv770_program_dcodt_after_state_switch(struct radeon_device *rdev,
1773*4882a593Smuzhiyun struct radeon_ps *radeon_new_state,
1774*4882a593Smuzhiyun struct radeon_ps *radeon_current_state)
1775*4882a593Smuzhiyun {
1776*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1777*4882a593Smuzhiyun struct rv7xx_ps *new_state = rv770_get_ps(radeon_new_state);
1778*4882a593Smuzhiyun struct rv7xx_ps *current_state = rv770_get_ps(radeon_current_state);
1779*4882a593Smuzhiyun bool current_use_dc = false;
1780*4882a593Smuzhiyun bool new_use_dc = false;
1781*4882a593Smuzhiyun
1782*4882a593Smuzhiyun if (pi->mclk_odt_threshold == 0)
1783*4882a593Smuzhiyun return;
1784*4882a593Smuzhiyun
1785*4882a593Smuzhiyun if (current_state->high.mclk <= pi->mclk_odt_threshold)
1786*4882a593Smuzhiyun current_use_dc = true;
1787*4882a593Smuzhiyun
1788*4882a593Smuzhiyun if (new_state->high.mclk <= pi->mclk_odt_threshold)
1789*4882a593Smuzhiyun new_use_dc = true;
1790*4882a593Smuzhiyun
1791*4882a593Smuzhiyun if (current_use_dc == new_use_dc)
1792*4882a593Smuzhiyun return;
1793*4882a593Smuzhiyun
1794*4882a593Smuzhiyun if (current_use_dc && !new_use_dc)
1795*4882a593Smuzhiyun return;
1796*4882a593Smuzhiyun
1797*4882a593Smuzhiyun if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1798*4882a593Smuzhiyun rv730_program_dcodt(rdev, new_use_dc);
1799*4882a593Smuzhiyun }
1800*4882a593Smuzhiyun
rv770_retrieve_odt_values(struct radeon_device * rdev)1801*4882a593Smuzhiyun static void rv770_retrieve_odt_values(struct radeon_device *rdev)
1802*4882a593Smuzhiyun {
1803*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1804*4882a593Smuzhiyun
1805*4882a593Smuzhiyun if (pi->mclk_odt_threshold == 0)
1806*4882a593Smuzhiyun return;
1807*4882a593Smuzhiyun
1808*4882a593Smuzhiyun if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1809*4882a593Smuzhiyun rv730_get_odt_values(rdev);
1810*4882a593Smuzhiyun }
1811*4882a593Smuzhiyun
rv770_set_dpm_event_sources(struct radeon_device * rdev,u32 sources)1812*4882a593Smuzhiyun static void rv770_set_dpm_event_sources(struct radeon_device *rdev, u32 sources)
1813*4882a593Smuzhiyun {
1814*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1815*4882a593Smuzhiyun bool want_thermal_protection;
1816*4882a593Smuzhiyun enum radeon_dpm_event_src dpm_event_src;
1817*4882a593Smuzhiyun
1818*4882a593Smuzhiyun switch (sources) {
1819*4882a593Smuzhiyun case 0:
1820*4882a593Smuzhiyun default:
1821*4882a593Smuzhiyun want_thermal_protection = false;
1822*4882a593Smuzhiyun break;
1823*4882a593Smuzhiyun case (1 << RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL):
1824*4882a593Smuzhiyun want_thermal_protection = true;
1825*4882a593Smuzhiyun dpm_event_src = RADEON_DPM_EVENT_SRC_DIGITAL;
1826*4882a593Smuzhiyun break;
1827*4882a593Smuzhiyun
1828*4882a593Smuzhiyun case (1 << RADEON_DPM_AUTO_THROTTLE_SRC_EXTERNAL):
1829*4882a593Smuzhiyun want_thermal_protection = true;
1830*4882a593Smuzhiyun dpm_event_src = RADEON_DPM_EVENT_SRC_EXTERNAL;
1831*4882a593Smuzhiyun break;
1832*4882a593Smuzhiyun
1833*4882a593Smuzhiyun case ((1 << RADEON_DPM_AUTO_THROTTLE_SRC_EXTERNAL) |
1834*4882a593Smuzhiyun (1 << RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL)):
1835*4882a593Smuzhiyun want_thermal_protection = true;
1836*4882a593Smuzhiyun dpm_event_src = RADEON_DPM_EVENT_SRC_DIGIAL_OR_EXTERNAL;
1837*4882a593Smuzhiyun break;
1838*4882a593Smuzhiyun }
1839*4882a593Smuzhiyun
1840*4882a593Smuzhiyun if (want_thermal_protection) {
1841*4882a593Smuzhiyun WREG32_P(CG_THERMAL_CTRL, DPM_EVENT_SRC(dpm_event_src), ~DPM_EVENT_SRC_MASK);
1842*4882a593Smuzhiyun if (pi->thermal_protection)
1843*4882a593Smuzhiyun WREG32_P(GENERAL_PWRMGT, 0, ~THERMAL_PROTECTION_DIS);
1844*4882a593Smuzhiyun } else {
1845*4882a593Smuzhiyun WREG32_P(GENERAL_PWRMGT, THERMAL_PROTECTION_DIS, ~THERMAL_PROTECTION_DIS);
1846*4882a593Smuzhiyun }
1847*4882a593Smuzhiyun }
1848*4882a593Smuzhiyun
rv770_enable_auto_throttle_source(struct radeon_device * rdev,enum radeon_dpm_auto_throttle_src source,bool enable)1849*4882a593Smuzhiyun void rv770_enable_auto_throttle_source(struct radeon_device *rdev,
1850*4882a593Smuzhiyun enum radeon_dpm_auto_throttle_src source,
1851*4882a593Smuzhiyun bool enable)
1852*4882a593Smuzhiyun {
1853*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1854*4882a593Smuzhiyun
1855*4882a593Smuzhiyun if (enable) {
1856*4882a593Smuzhiyun if (!(pi->active_auto_throttle_sources & (1 << source))) {
1857*4882a593Smuzhiyun pi->active_auto_throttle_sources |= 1 << source;
1858*4882a593Smuzhiyun rv770_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources);
1859*4882a593Smuzhiyun }
1860*4882a593Smuzhiyun } else {
1861*4882a593Smuzhiyun if (pi->active_auto_throttle_sources & (1 << source)) {
1862*4882a593Smuzhiyun pi->active_auto_throttle_sources &= ~(1 << source);
1863*4882a593Smuzhiyun rv770_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources);
1864*4882a593Smuzhiyun }
1865*4882a593Smuzhiyun }
1866*4882a593Smuzhiyun }
1867*4882a593Smuzhiyun
rv770_set_thermal_temperature_range(struct radeon_device * rdev,int min_temp,int max_temp)1868*4882a593Smuzhiyun static int rv770_set_thermal_temperature_range(struct radeon_device *rdev,
1869*4882a593Smuzhiyun int min_temp, int max_temp)
1870*4882a593Smuzhiyun {
1871*4882a593Smuzhiyun int low_temp = 0 * 1000;
1872*4882a593Smuzhiyun int high_temp = 255 * 1000;
1873*4882a593Smuzhiyun
1874*4882a593Smuzhiyun if (low_temp < min_temp)
1875*4882a593Smuzhiyun low_temp = min_temp;
1876*4882a593Smuzhiyun if (high_temp > max_temp)
1877*4882a593Smuzhiyun high_temp = max_temp;
1878*4882a593Smuzhiyun if (high_temp < low_temp) {
1879*4882a593Smuzhiyun DRM_ERROR("invalid thermal range: %d - %d\n", low_temp, high_temp);
1880*4882a593Smuzhiyun return -EINVAL;
1881*4882a593Smuzhiyun }
1882*4882a593Smuzhiyun
1883*4882a593Smuzhiyun WREG32_P(CG_THERMAL_INT, DIG_THERM_INTH(high_temp / 1000), ~DIG_THERM_INTH_MASK);
1884*4882a593Smuzhiyun WREG32_P(CG_THERMAL_INT, DIG_THERM_INTL(low_temp / 1000), ~DIG_THERM_INTL_MASK);
1885*4882a593Smuzhiyun WREG32_P(CG_THERMAL_CTRL, DIG_THERM_DPM(high_temp / 1000), ~DIG_THERM_DPM_MASK);
1886*4882a593Smuzhiyun
1887*4882a593Smuzhiyun rdev->pm.dpm.thermal.min_temp = low_temp;
1888*4882a593Smuzhiyun rdev->pm.dpm.thermal.max_temp = high_temp;
1889*4882a593Smuzhiyun
1890*4882a593Smuzhiyun return 0;
1891*4882a593Smuzhiyun }
1892*4882a593Smuzhiyun
rv770_dpm_enable(struct radeon_device * rdev)1893*4882a593Smuzhiyun int rv770_dpm_enable(struct radeon_device *rdev)
1894*4882a593Smuzhiyun {
1895*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1896*4882a593Smuzhiyun struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
1897*4882a593Smuzhiyun int ret;
1898*4882a593Smuzhiyun
1899*4882a593Smuzhiyun if (pi->gfx_clock_gating)
1900*4882a593Smuzhiyun rv770_restore_cgcg(rdev);
1901*4882a593Smuzhiyun
1902*4882a593Smuzhiyun if (rv770_dpm_enabled(rdev))
1903*4882a593Smuzhiyun return -EINVAL;
1904*4882a593Smuzhiyun
1905*4882a593Smuzhiyun if (pi->voltage_control) {
1906*4882a593Smuzhiyun rv770_enable_voltage_control(rdev, true);
1907*4882a593Smuzhiyun ret = rv770_construct_vddc_table(rdev);
1908*4882a593Smuzhiyun if (ret) {
1909*4882a593Smuzhiyun DRM_ERROR("rv770_construct_vddc_table failed\n");
1910*4882a593Smuzhiyun return ret;
1911*4882a593Smuzhiyun }
1912*4882a593Smuzhiyun }
1913*4882a593Smuzhiyun
1914*4882a593Smuzhiyun if (pi->dcodt)
1915*4882a593Smuzhiyun rv770_retrieve_odt_values(rdev);
1916*4882a593Smuzhiyun
1917*4882a593Smuzhiyun if (pi->mvdd_control) {
1918*4882a593Smuzhiyun ret = rv770_get_mvdd_configuration(rdev);
1919*4882a593Smuzhiyun if (ret) {
1920*4882a593Smuzhiyun DRM_ERROR("rv770_get_mvdd_configuration failed\n");
1921*4882a593Smuzhiyun return ret;
1922*4882a593Smuzhiyun }
1923*4882a593Smuzhiyun }
1924*4882a593Smuzhiyun
1925*4882a593Smuzhiyun if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS)
1926*4882a593Smuzhiyun rv770_enable_backbias(rdev, true);
1927*4882a593Smuzhiyun
1928*4882a593Smuzhiyun rv770_enable_spread_spectrum(rdev, true);
1929*4882a593Smuzhiyun
1930*4882a593Smuzhiyun if (pi->thermal_protection)
1931*4882a593Smuzhiyun rv770_enable_thermal_protection(rdev, true);
1932*4882a593Smuzhiyun
1933*4882a593Smuzhiyun rv770_program_mpll_timing_parameters(rdev);
1934*4882a593Smuzhiyun rv770_setup_bsp(rdev);
1935*4882a593Smuzhiyun rv770_program_git(rdev);
1936*4882a593Smuzhiyun rv770_program_tp(rdev);
1937*4882a593Smuzhiyun rv770_program_tpp(rdev);
1938*4882a593Smuzhiyun rv770_program_sstp(rdev);
1939*4882a593Smuzhiyun rv770_program_engine_speed_parameters(rdev);
1940*4882a593Smuzhiyun rv770_enable_display_gap(rdev);
1941*4882a593Smuzhiyun rv770_program_vc(rdev);
1942*4882a593Smuzhiyun
1943*4882a593Smuzhiyun if (pi->dynamic_pcie_gen2)
1944*4882a593Smuzhiyun rv770_enable_dynamic_pcie_gen2(rdev, true);
1945*4882a593Smuzhiyun
1946*4882a593Smuzhiyun ret = rv770_upload_firmware(rdev);
1947*4882a593Smuzhiyun if (ret) {
1948*4882a593Smuzhiyun DRM_ERROR("rv770_upload_firmware failed\n");
1949*4882a593Smuzhiyun return ret;
1950*4882a593Smuzhiyun }
1951*4882a593Smuzhiyun ret = rv770_init_smc_table(rdev, boot_ps);
1952*4882a593Smuzhiyun if (ret) {
1953*4882a593Smuzhiyun DRM_ERROR("rv770_init_smc_table failed\n");
1954*4882a593Smuzhiyun return ret;
1955*4882a593Smuzhiyun }
1956*4882a593Smuzhiyun
1957*4882a593Smuzhiyun rv770_program_response_times(rdev);
1958*4882a593Smuzhiyun r7xx_start_smc(rdev);
1959*4882a593Smuzhiyun
1960*4882a593Smuzhiyun if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
1961*4882a593Smuzhiyun rv730_start_dpm(rdev);
1962*4882a593Smuzhiyun else
1963*4882a593Smuzhiyun rv770_start_dpm(rdev);
1964*4882a593Smuzhiyun
1965*4882a593Smuzhiyun if (pi->gfx_clock_gating)
1966*4882a593Smuzhiyun rv770_gfx_clock_gating_enable(rdev, true);
1967*4882a593Smuzhiyun
1968*4882a593Smuzhiyun if (pi->mg_clock_gating)
1969*4882a593Smuzhiyun rv770_mg_clock_gating_enable(rdev, true);
1970*4882a593Smuzhiyun
1971*4882a593Smuzhiyun rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
1972*4882a593Smuzhiyun
1973*4882a593Smuzhiyun return 0;
1974*4882a593Smuzhiyun }
1975*4882a593Smuzhiyun
rv770_dpm_late_enable(struct radeon_device * rdev)1976*4882a593Smuzhiyun int rv770_dpm_late_enable(struct radeon_device *rdev)
1977*4882a593Smuzhiyun {
1978*4882a593Smuzhiyun int ret;
1979*4882a593Smuzhiyun
1980*4882a593Smuzhiyun if (rdev->irq.installed &&
1981*4882a593Smuzhiyun r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
1982*4882a593Smuzhiyun PPSMC_Result result;
1983*4882a593Smuzhiyun
1984*4882a593Smuzhiyun ret = rv770_set_thermal_temperature_range(rdev, R600_TEMP_RANGE_MIN, R600_TEMP_RANGE_MAX);
1985*4882a593Smuzhiyun if (ret)
1986*4882a593Smuzhiyun return ret;
1987*4882a593Smuzhiyun rdev->irq.dpm_thermal = true;
1988*4882a593Smuzhiyun radeon_irq_set(rdev);
1989*4882a593Smuzhiyun result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableThermalInterrupt);
1990*4882a593Smuzhiyun
1991*4882a593Smuzhiyun if (result != PPSMC_Result_OK)
1992*4882a593Smuzhiyun DRM_DEBUG_KMS("Could not enable thermal interrupts.\n");
1993*4882a593Smuzhiyun }
1994*4882a593Smuzhiyun
1995*4882a593Smuzhiyun return 0;
1996*4882a593Smuzhiyun }
1997*4882a593Smuzhiyun
rv770_dpm_disable(struct radeon_device * rdev)1998*4882a593Smuzhiyun void rv770_dpm_disable(struct radeon_device *rdev)
1999*4882a593Smuzhiyun {
2000*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2001*4882a593Smuzhiyun
2002*4882a593Smuzhiyun if (!rv770_dpm_enabled(rdev))
2003*4882a593Smuzhiyun return;
2004*4882a593Smuzhiyun
2005*4882a593Smuzhiyun rv770_clear_vc(rdev);
2006*4882a593Smuzhiyun
2007*4882a593Smuzhiyun if (pi->thermal_protection)
2008*4882a593Smuzhiyun rv770_enable_thermal_protection(rdev, false);
2009*4882a593Smuzhiyun
2010*4882a593Smuzhiyun rv770_enable_spread_spectrum(rdev, false);
2011*4882a593Smuzhiyun
2012*4882a593Smuzhiyun if (pi->dynamic_pcie_gen2)
2013*4882a593Smuzhiyun rv770_enable_dynamic_pcie_gen2(rdev, false);
2014*4882a593Smuzhiyun
2015*4882a593Smuzhiyun if (rdev->irq.installed &&
2016*4882a593Smuzhiyun r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
2017*4882a593Smuzhiyun rdev->irq.dpm_thermal = false;
2018*4882a593Smuzhiyun radeon_irq_set(rdev);
2019*4882a593Smuzhiyun }
2020*4882a593Smuzhiyun
2021*4882a593Smuzhiyun if (pi->gfx_clock_gating)
2022*4882a593Smuzhiyun rv770_gfx_clock_gating_enable(rdev, false);
2023*4882a593Smuzhiyun
2024*4882a593Smuzhiyun if (pi->mg_clock_gating)
2025*4882a593Smuzhiyun rv770_mg_clock_gating_enable(rdev, false);
2026*4882a593Smuzhiyun
2027*4882a593Smuzhiyun if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710))
2028*4882a593Smuzhiyun rv730_stop_dpm(rdev);
2029*4882a593Smuzhiyun else
2030*4882a593Smuzhiyun rv770_stop_dpm(rdev);
2031*4882a593Smuzhiyun
2032*4882a593Smuzhiyun r7xx_stop_smc(rdev);
2033*4882a593Smuzhiyun rv770_reset_smio_status(rdev);
2034*4882a593Smuzhiyun }
2035*4882a593Smuzhiyun
rv770_dpm_set_power_state(struct radeon_device * rdev)2036*4882a593Smuzhiyun int rv770_dpm_set_power_state(struct radeon_device *rdev)
2037*4882a593Smuzhiyun {
2038*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2039*4882a593Smuzhiyun struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
2040*4882a593Smuzhiyun struct radeon_ps *old_ps = rdev->pm.dpm.current_ps;
2041*4882a593Smuzhiyun int ret;
2042*4882a593Smuzhiyun
2043*4882a593Smuzhiyun ret = rv770_restrict_performance_levels_before_switch(rdev);
2044*4882a593Smuzhiyun if (ret) {
2045*4882a593Smuzhiyun DRM_ERROR("rv770_restrict_performance_levels_before_switch failed\n");
2046*4882a593Smuzhiyun return ret;
2047*4882a593Smuzhiyun }
2048*4882a593Smuzhiyun rv770_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps);
2049*4882a593Smuzhiyun ret = rv770_halt_smc(rdev);
2050*4882a593Smuzhiyun if (ret) {
2051*4882a593Smuzhiyun DRM_ERROR("rv770_halt_smc failed\n");
2052*4882a593Smuzhiyun return ret;
2053*4882a593Smuzhiyun }
2054*4882a593Smuzhiyun ret = rv770_upload_sw_state(rdev, new_ps);
2055*4882a593Smuzhiyun if (ret) {
2056*4882a593Smuzhiyun DRM_ERROR("rv770_upload_sw_state failed\n");
2057*4882a593Smuzhiyun return ret;
2058*4882a593Smuzhiyun }
2059*4882a593Smuzhiyun r7xx_program_memory_timing_parameters(rdev, new_ps);
2060*4882a593Smuzhiyun if (pi->dcodt)
2061*4882a593Smuzhiyun rv770_program_dcodt_before_state_switch(rdev, new_ps, old_ps);
2062*4882a593Smuzhiyun ret = rv770_resume_smc(rdev);
2063*4882a593Smuzhiyun if (ret) {
2064*4882a593Smuzhiyun DRM_ERROR("rv770_resume_smc failed\n");
2065*4882a593Smuzhiyun return ret;
2066*4882a593Smuzhiyun }
2067*4882a593Smuzhiyun ret = rv770_set_sw_state(rdev);
2068*4882a593Smuzhiyun if (ret) {
2069*4882a593Smuzhiyun DRM_ERROR("rv770_set_sw_state failed\n");
2070*4882a593Smuzhiyun return ret;
2071*4882a593Smuzhiyun }
2072*4882a593Smuzhiyun if (pi->dcodt)
2073*4882a593Smuzhiyun rv770_program_dcodt_after_state_switch(rdev, new_ps, old_ps);
2074*4882a593Smuzhiyun rv770_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps);
2075*4882a593Smuzhiyun
2076*4882a593Smuzhiyun return 0;
2077*4882a593Smuzhiyun }
2078*4882a593Smuzhiyun
2079*4882a593Smuzhiyun #if 0
2080*4882a593Smuzhiyun void rv770_dpm_reset_asic(struct radeon_device *rdev)
2081*4882a593Smuzhiyun {
2082*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2083*4882a593Smuzhiyun struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
2084*4882a593Smuzhiyun
2085*4882a593Smuzhiyun rv770_restrict_performance_levels_before_switch(rdev);
2086*4882a593Smuzhiyun if (pi->dcodt)
2087*4882a593Smuzhiyun rv770_program_dcodt_before_state_switch(rdev, boot_ps, boot_ps);
2088*4882a593Smuzhiyun rv770_set_boot_state(rdev);
2089*4882a593Smuzhiyun if (pi->dcodt)
2090*4882a593Smuzhiyun rv770_program_dcodt_after_state_switch(rdev, boot_ps, boot_ps);
2091*4882a593Smuzhiyun }
2092*4882a593Smuzhiyun #endif
2093*4882a593Smuzhiyun
rv770_dpm_setup_asic(struct radeon_device * rdev)2094*4882a593Smuzhiyun void rv770_dpm_setup_asic(struct radeon_device *rdev)
2095*4882a593Smuzhiyun {
2096*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2097*4882a593Smuzhiyun
2098*4882a593Smuzhiyun r7xx_read_clock_registers(rdev);
2099*4882a593Smuzhiyun rv770_read_voltage_smio_registers(rdev);
2100*4882a593Smuzhiyun rv770_get_memory_type(rdev);
2101*4882a593Smuzhiyun if (pi->dcodt)
2102*4882a593Smuzhiyun rv770_get_mclk_odt_threshold(rdev);
2103*4882a593Smuzhiyun rv770_get_pcie_gen2_status(rdev);
2104*4882a593Smuzhiyun
2105*4882a593Smuzhiyun rv770_enable_acpi_pm(rdev);
2106*4882a593Smuzhiyun
2107*4882a593Smuzhiyun if (radeon_aspm != 0) {
2108*4882a593Smuzhiyun if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_ASPM_L0s)
2109*4882a593Smuzhiyun rv770_enable_l0s(rdev);
2110*4882a593Smuzhiyun if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_ASPM_L1)
2111*4882a593Smuzhiyun rv770_enable_l1(rdev);
2112*4882a593Smuzhiyun if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_TURNOFFPLL_ASPML1)
2113*4882a593Smuzhiyun rv770_enable_pll_sleep_in_l1(rdev);
2114*4882a593Smuzhiyun }
2115*4882a593Smuzhiyun }
2116*4882a593Smuzhiyun
rv770_dpm_display_configuration_changed(struct radeon_device * rdev)2117*4882a593Smuzhiyun void rv770_dpm_display_configuration_changed(struct radeon_device *rdev)
2118*4882a593Smuzhiyun {
2119*4882a593Smuzhiyun rv770_program_display_gap(rdev);
2120*4882a593Smuzhiyun }
2121*4882a593Smuzhiyun
2122*4882a593Smuzhiyun union power_info {
2123*4882a593Smuzhiyun struct _ATOM_POWERPLAY_INFO info;
2124*4882a593Smuzhiyun struct _ATOM_POWERPLAY_INFO_V2 info_2;
2125*4882a593Smuzhiyun struct _ATOM_POWERPLAY_INFO_V3 info_3;
2126*4882a593Smuzhiyun struct _ATOM_PPLIB_POWERPLAYTABLE pplib;
2127*4882a593Smuzhiyun struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2;
2128*4882a593Smuzhiyun struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3;
2129*4882a593Smuzhiyun };
2130*4882a593Smuzhiyun
2131*4882a593Smuzhiyun union pplib_clock_info {
2132*4882a593Smuzhiyun struct _ATOM_PPLIB_R600_CLOCK_INFO r600;
2133*4882a593Smuzhiyun struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780;
2134*4882a593Smuzhiyun struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen;
2135*4882a593Smuzhiyun struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo;
2136*4882a593Smuzhiyun };
2137*4882a593Smuzhiyun
2138*4882a593Smuzhiyun union pplib_power_state {
2139*4882a593Smuzhiyun struct _ATOM_PPLIB_STATE v1;
2140*4882a593Smuzhiyun struct _ATOM_PPLIB_STATE_V2 v2;
2141*4882a593Smuzhiyun };
2142*4882a593Smuzhiyun
rv7xx_parse_pplib_non_clock_info(struct radeon_device * rdev,struct radeon_ps * rps,struct _ATOM_PPLIB_NONCLOCK_INFO * non_clock_info,u8 table_rev)2143*4882a593Smuzhiyun static void rv7xx_parse_pplib_non_clock_info(struct radeon_device *rdev,
2144*4882a593Smuzhiyun struct radeon_ps *rps,
2145*4882a593Smuzhiyun struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info,
2146*4882a593Smuzhiyun u8 table_rev)
2147*4882a593Smuzhiyun {
2148*4882a593Smuzhiyun rps->caps = le32_to_cpu(non_clock_info->ulCapsAndSettings);
2149*4882a593Smuzhiyun rps->class = le16_to_cpu(non_clock_info->usClassification);
2150*4882a593Smuzhiyun rps->class2 = le16_to_cpu(non_clock_info->usClassification2);
2151*4882a593Smuzhiyun
2152*4882a593Smuzhiyun if (ATOM_PPLIB_NONCLOCKINFO_VER1 < table_rev) {
2153*4882a593Smuzhiyun rps->vclk = le32_to_cpu(non_clock_info->ulVCLK);
2154*4882a593Smuzhiyun rps->dclk = le32_to_cpu(non_clock_info->ulDCLK);
2155*4882a593Smuzhiyun } else {
2156*4882a593Smuzhiyun rps->vclk = 0;
2157*4882a593Smuzhiyun rps->dclk = 0;
2158*4882a593Smuzhiyun }
2159*4882a593Smuzhiyun
2160*4882a593Smuzhiyun if (r600_is_uvd_state(rps->class, rps->class2)) {
2161*4882a593Smuzhiyun if ((rps->vclk == 0) || (rps->dclk == 0)) {
2162*4882a593Smuzhiyun rps->vclk = RV770_DEFAULT_VCLK_FREQ;
2163*4882a593Smuzhiyun rps->dclk = RV770_DEFAULT_DCLK_FREQ;
2164*4882a593Smuzhiyun }
2165*4882a593Smuzhiyun }
2166*4882a593Smuzhiyun
2167*4882a593Smuzhiyun if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT)
2168*4882a593Smuzhiyun rdev->pm.dpm.boot_ps = rps;
2169*4882a593Smuzhiyun if (rps->class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE)
2170*4882a593Smuzhiyun rdev->pm.dpm.uvd_ps = rps;
2171*4882a593Smuzhiyun }
2172*4882a593Smuzhiyun
rv7xx_parse_pplib_clock_info(struct radeon_device * rdev,struct radeon_ps * rps,int index,union pplib_clock_info * clock_info)2173*4882a593Smuzhiyun static void rv7xx_parse_pplib_clock_info(struct radeon_device *rdev,
2174*4882a593Smuzhiyun struct radeon_ps *rps, int index,
2175*4882a593Smuzhiyun union pplib_clock_info *clock_info)
2176*4882a593Smuzhiyun {
2177*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2178*4882a593Smuzhiyun struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2179*4882a593Smuzhiyun struct rv7xx_ps *ps = rv770_get_ps(rps);
2180*4882a593Smuzhiyun u32 sclk, mclk;
2181*4882a593Smuzhiyun struct rv7xx_pl *pl;
2182*4882a593Smuzhiyun
2183*4882a593Smuzhiyun switch (index) {
2184*4882a593Smuzhiyun case 0:
2185*4882a593Smuzhiyun pl = &ps->low;
2186*4882a593Smuzhiyun break;
2187*4882a593Smuzhiyun case 1:
2188*4882a593Smuzhiyun pl = &ps->medium;
2189*4882a593Smuzhiyun break;
2190*4882a593Smuzhiyun case 2:
2191*4882a593Smuzhiyun default:
2192*4882a593Smuzhiyun pl = &ps->high;
2193*4882a593Smuzhiyun break;
2194*4882a593Smuzhiyun }
2195*4882a593Smuzhiyun
2196*4882a593Smuzhiyun if (rdev->family >= CHIP_CEDAR) {
2197*4882a593Smuzhiyun sclk = le16_to_cpu(clock_info->evergreen.usEngineClockLow);
2198*4882a593Smuzhiyun sclk |= clock_info->evergreen.ucEngineClockHigh << 16;
2199*4882a593Smuzhiyun mclk = le16_to_cpu(clock_info->evergreen.usMemoryClockLow);
2200*4882a593Smuzhiyun mclk |= clock_info->evergreen.ucMemoryClockHigh << 16;
2201*4882a593Smuzhiyun
2202*4882a593Smuzhiyun pl->vddc = le16_to_cpu(clock_info->evergreen.usVDDC);
2203*4882a593Smuzhiyun pl->vddci = le16_to_cpu(clock_info->evergreen.usVDDCI);
2204*4882a593Smuzhiyun pl->flags = le32_to_cpu(clock_info->evergreen.ulFlags);
2205*4882a593Smuzhiyun } else {
2206*4882a593Smuzhiyun sclk = le16_to_cpu(clock_info->r600.usEngineClockLow);
2207*4882a593Smuzhiyun sclk |= clock_info->r600.ucEngineClockHigh << 16;
2208*4882a593Smuzhiyun mclk = le16_to_cpu(clock_info->r600.usMemoryClockLow);
2209*4882a593Smuzhiyun mclk |= clock_info->r600.ucMemoryClockHigh << 16;
2210*4882a593Smuzhiyun
2211*4882a593Smuzhiyun pl->vddc = le16_to_cpu(clock_info->r600.usVDDC);
2212*4882a593Smuzhiyun pl->flags = le32_to_cpu(clock_info->r600.ulFlags);
2213*4882a593Smuzhiyun }
2214*4882a593Smuzhiyun
2215*4882a593Smuzhiyun pl->mclk = mclk;
2216*4882a593Smuzhiyun pl->sclk = sclk;
2217*4882a593Smuzhiyun
2218*4882a593Smuzhiyun /* patch up vddc if necessary */
2219*4882a593Smuzhiyun if (pl->vddc == 0xff01) {
2220*4882a593Smuzhiyun if (pi->max_vddc)
2221*4882a593Smuzhiyun pl->vddc = pi->max_vddc;
2222*4882a593Smuzhiyun }
2223*4882a593Smuzhiyun
2224*4882a593Smuzhiyun if (rps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) {
2225*4882a593Smuzhiyun pi->acpi_vddc = pl->vddc;
2226*4882a593Smuzhiyun if (rdev->family >= CHIP_CEDAR)
2227*4882a593Smuzhiyun eg_pi->acpi_vddci = pl->vddci;
2228*4882a593Smuzhiyun if (ps->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
2229*4882a593Smuzhiyun pi->acpi_pcie_gen2 = true;
2230*4882a593Smuzhiyun else
2231*4882a593Smuzhiyun pi->acpi_pcie_gen2 = false;
2232*4882a593Smuzhiyun }
2233*4882a593Smuzhiyun
2234*4882a593Smuzhiyun if (rps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) {
2235*4882a593Smuzhiyun if (rdev->family >= CHIP_BARTS) {
2236*4882a593Smuzhiyun eg_pi->ulv.supported = true;
2237*4882a593Smuzhiyun eg_pi->ulv.pl = pl;
2238*4882a593Smuzhiyun }
2239*4882a593Smuzhiyun }
2240*4882a593Smuzhiyun
2241*4882a593Smuzhiyun if (pi->min_vddc_in_table > pl->vddc)
2242*4882a593Smuzhiyun pi->min_vddc_in_table = pl->vddc;
2243*4882a593Smuzhiyun
2244*4882a593Smuzhiyun if (pi->max_vddc_in_table < pl->vddc)
2245*4882a593Smuzhiyun pi->max_vddc_in_table = pl->vddc;
2246*4882a593Smuzhiyun
2247*4882a593Smuzhiyun /* patch up boot state */
2248*4882a593Smuzhiyun if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) {
2249*4882a593Smuzhiyun u16 vddc, vddci, mvdd;
2250*4882a593Smuzhiyun radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd);
2251*4882a593Smuzhiyun pl->mclk = rdev->clock.default_mclk;
2252*4882a593Smuzhiyun pl->sclk = rdev->clock.default_sclk;
2253*4882a593Smuzhiyun pl->vddc = vddc;
2254*4882a593Smuzhiyun pl->vddci = vddci;
2255*4882a593Smuzhiyun }
2256*4882a593Smuzhiyun
2257*4882a593Smuzhiyun if ((rps->class & ATOM_PPLIB_CLASSIFICATION_UI_MASK) ==
2258*4882a593Smuzhiyun ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE) {
2259*4882a593Smuzhiyun rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk;
2260*4882a593Smuzhiyun rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk;
2261*4882a593Smuzhiyun rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc;
2262*4882a593Smuzhiyun rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci;
2263*4882a593Smuzhiyun }
2264*4882a593Smuzhiyun }
2265*4882a593Smuzhiyun
rv7xx_parse_power_table(struct radeon_device * rdev)2266*4882a593Smuzhiyun int rv7xx_parse_power_table(struct radeon_device *rdev)
2267*4882a593Smuzhiyun {
2268*4882a593Smuzhiyun struct radeon_mode_info *mode_info = &rdev->mode_info;
2269*4882a593Smuzhiyun struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info;
2270*4882a593Smuzhiyun union pplib_power_state *power_state;
2271*4882a593Smuzhiyun int i, j;
2272*4882a593Smuzhiyun union pplib_clock_info *clock_info;
2273*4882a593Smuzhiyun union power_info *power_info;
2274*4882a593Smuzhiyun int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
2275*4882a593Smuzhiyun u16 data_offset;
2276*4882a593Smuzhiyun u8 frev, crev;
2277*4882a593Smuzhiyun struct rv7xx_ps *ps;
2278*4882a593Smuzhiyun
2279*4882a593Smuzhiyun if (!atom_parse_data_header(mode_info->atom_context, index, NULL,
2280*4882a593Smuzhiyun &frev, &crev, &data_offset))
2281*4882a593Smuzhiyun return -EINVAL;
2282*4882a593Smuzhiyun power_info = (union power_info *)(mode_info->atom_context->bios + data_offset);
2283*4882a593Smuzhiyun
2284*4882a593Smuzhiyun rdev->pm.dpm.ps = kcalloc(power_info->pplib.ucNumStates,
2285*4882a593Smuzhiyun sizeof(struct radeon_ps),
2286*4882a593Smuzhiyun GFP_KERNEL);
2287*4882a593Smuzhiyun if (!rdev->pm.dpm.ps)
2288*4882a593Smuzhiyun return -ENOMEM;
2289*4882a593Smuzhiyun
2290*4882a593Smuzhiyun for (i = 0; i < power_info->pplib.ucNumStates; i++) {
2291*4882a593Smuzhiyun power_state = (union pplib_power_state *)
2292*4882a593Smuzhiyun (mode_info->atom_context->bios + data_offset +
2293*4882a593Smuzhiyun le16_to_cpu(power_info->pplib.usStateArrayOffset) +
2294*4882a593Smuzhiyun i * power_info->pplib.ucStateEntrySize);
2295*4882a593Smuzhiyun non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *)
2296*4882a593Smuzhiyun (mode_info->atom_context->bios + data_offset +
2297*4882a593Smuzhiyun le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset) +
2298*4882a593Smuzhiyun (power_state->v1.ucNonClockStateIndex *
2299*4882a593Smuzhiyun power_info->pplib.ucNonClockSize));
2300*4882a593Smuzhiyun if (power_info->pplib.ucStateEntrySize - 1) {
2301*4882a593Smuzhiyun u8 *idx;
2302*4882a593Smuzhiyun ps = kzalloc(sizeof(struct rv7xx_ps), GFP_KERNEL);
2303*4882a593Smuzhiyun if (ps == NULL) {
2304*4882a593Smuzhiyun kfree(rdev->pm.dpm.ps);
2305*4882a593Smuzhiyun return -ENOMEM;
2306*4882a593Smuzhiyun }
2307*4882a593Smuzhiyun rdev->pm.dpm.ps[i].ps_priv = ps;
2308*4882a593Smuzhiyun rv7xx_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i],
2309*4882a593Smuzhiyun non_clock_info,
2310*4882a593Smuzhiyun power_info->pplib.ucNonClockSize);
2311*4882a593Smuzhiyun idx = (u8 *)&power_state->v1.ucClockStateIndices[0];
2312*4882a593Smuzhiyun for (j = 0; j < (power_info->pplib.ucStateEntrySize - 1); j++) {
2313*4882a593Smuzhiyun clock_info = (union pplib_clock_info *)
2314*4882a593Smuzhiyun (mode_info->atom_context->bios + data_offset +
2315*4882a593Smuzhiyun le16_to_cpu(power_info->pplib.usClockInfoArrayOffset) +
2316*4882a593Smuzhiyun (idx[j] * power_info->pplib.ucClockInfoSize));
2317*4882a593Smuzhiyun rv7xx_parse_pplib_clock_info(rdev,
2318*4882a593Smuzhiyun &rdev->pm.dpm.ps[i], j,
2319*4882a593Smuzhiyun clock_info);
2320*4882a593Smuzhiyun }
2321*4882a593Smuzhiyun }
2322*4882a593Smuzhiyun }
2323*4882a593Smuzhiyun rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates;
2324*4882a593Smuzhiyun return 0;
2325*4882a593Smuzhiyun }
2326*4882a593Smuzhiyun
rv770_get_engine_memory_ss(struct radeon_device * rdev)2327*4882a593Smuzhiyun void rv770_get_engine_memory_ss(struct radeon_device *rdev)
2328*4882a593Smuzhiyun {
2329*4882a593Smuzhiyun struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2330*4882a593Smuzhiyun struct radeon_atom_ss ss;
2331*4882a593Smuzhiyun
2332*4882a593Smuzhiyun pi->sclk_ss = radeon_atombios_get_asic_ss_info(rdev, &ss,
2333*4882a593Smuzhiyun ASIC_INTERNAL_ENGINE_SS, 0);
2334*4882a593Smuzhiyun pi->mclk_ss = radeon_atombios_get_asic_ss_info(rdev, &ss,
2335*4882a593Smuzhiyun ASIC_INTERNAL_MEMORY_SS, 0);
2336*4882a593Smuzhiyun
2337*4882a593Smuzhiyun if (pi->sclk_ss || pi->mclk_ss)
2338*4882a593Smuzhiyun pi->dynamic_ss = true;
2339*4882a593Smuzhiyun else
2340*4882a593Smuzhiyun pi->dynamic_ss = false;
2341*4882a593Smuzhiyun }
2342*4882a593Smuzhiyun
rv770_dpm_init(struct radeon_device * rdev)2343*4882a593Smuzhiyun int rv770_dpm_init(struct radeon_device *rdev)
2344*4882a593Smuzhiyun {
2345*4882a593Smuzhiyun struct rv7xx_power_info *pi;
2346*4882a593Smuzhiyun struct atom_clock_dividers dividers;
2347*4882a593Smuzhiyun int ret;
2348*4882a593Smuzhiyun
2349*4882a593Smuzhiyun pi = kzalloc(sizeof(struct rv7xx_power_info), GFP_KERNEL);
2350*4882a593Smuzhiyun if (pi == NULL)
2351*4882a593Smuzhiyun return -ENOMEM;
2352*4882a593Smuzhiyun rdev->pm.dpm.priv = pi;
2353*4882a593Smuzhiyun
2354*4882a593Smuzhiyun rv770_get_max_vddc(rdev);
2355*4882a593Smuzhiyun
2356*4882a593Smuzhiyun pi->acpi_vddc = 0;
2357*4882a593Smuzhiyun pi->min_vddc_in_table = 0;
2358*4882a593Smuzhiyun pi->max_vddc_in_table = 0;
2359*4882a593Smuzhiyun
2360*4882a593Smuzhiyun ret = r600_get_platform_caps(rdev);
2361*4882a593Smuzhiyun if (ret)
2362*4882a593Smuzhiyun return ret;
2363*4882a593Smuzhiyun
2364*4882a593Smuzhiyun ret = rv7xx_parse_power_table(rdev);
2365*4882a593Smuzhiyun if (ret)
2366*4882a593Smuzhiyun return ret;
2367*4882a593Smuzhiyun
2368*4882a593Smuzhiyun if (rdev->pm.dpm.voltage_response_time == 0)
2369*4882a593Smuzhiyun rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
2370*4882a593Smuzhiyun if (rdev->pm.dpm.backbias_response_time == 0)
2371*4882a593Smuzhiyun rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
2372*4882a593Smuzhiyun
2373*4882a593Smuzhiyun ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
2374*4882a593Smuzhiyun 0, false, ÷rs);
2375*4882a593Smuzhiyun if (ret)
2376*4882a593Smuzhiyun pi->ref_div = dividers.ref_div + 1;
2377*4882a593Smuzhiyun else
2378*4882a593Smuzhiyun pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
2379*4882a593Smuzhiyun
2380*4882a593Smuzhiyun pi->mclk_strobe_mode_threshold = 30000;
2381*4882a593Smuzhiyun pi->mclk_edc_enable_threshold = 30000;
2382*4882a593Smuzhiyun
2383*4882a593Smuzhiyun pi->rlp = RV770_RLP_DFLT;
2384*4882a593Smuzhiyun pi->rmp = RV770_RMP_DFLT;
2385*4882a593Smuzhiyun pi->lhp = RV770_LHP_DFLT;
2386*4882a593Smuzhiyun pi->lmp = RV770_LMP_DFLT;
2387*4882a593Smuzhiyun
2388*4882a593Smuzhiyun pi->voltage_control =
2389*4882a593Smuzhiyun radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0);
2390*4882a593Smuzhiyun
2391*4882a593Smuzhiyun pi->mvdd_control =
2392*4882a593Smuzhiyun radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0);
2393*4882a593Smuzhiyun
2394*4882a593Smuzhiyun rv770_get_engine_memory_ss(rdev);
2395*4882a593Smuzhiyun
2396*4882a593Smuzhiyun pi->asi = RV770_ASI_DFLT;
2397*4882a593Smuzhiyun pi->pasi = RV770_HASI_DFLT;
2398*4882a593Smuzhiyun pi->vrc = RV770_VRC_DFLT;
2399*4882a593Smuzhiyun
2400*4882a593Smuzhiyun pi->power_gating = false;
2401*4882a593Smuzhiyun
2402*4882a593Smuzhiyun pi->gfx_clock_gating = true;
2403*4882a593Smuzhiyun
2404*4882a593Smuzhiyun pi->mg_clock_gating = true;
2405*4882a593Smuzhiyun pi->mgcgtssm = true;
2406*4882a593Smuzhiyun
2407*4882a593Smuzhiyun pi->dynamic_pcie_gen2 = true;
2408*4882a593Smuzhiyun
2409*4882a593Smuzhiyun if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE)
2410*4882a593Smuzhiyun pi->thermal_protection = true;
2411*4882a593Smuzhiyun else
2412*4882a593Smuzhiyun pi->thermal_protection = false;
2413*4882a593Smuzhiyun
2414*4882a593Smuzhiyun pi->display_gap = true;
2415*4882a593Smuzhiyun
2416*4882a593Smuzhiyun if (rdev->flags & RADEON_IS_MOBILITY)
2417*4882a593Smuzhiyun pi->dcodt = true;
2418*4882a593Smuzhiyun else
2419*4882a593Smuzhiyun pi->dcodt = false;
2420*4882a593Smuzhiyun
2421*4882a593Smuzhiyun pi->ulps = true;
2422*4882a593Smuzhiyun
2423*4882a593Smuzhiyun pi->mclk_stutter_mode_threshold = 0;
2424*4882a593Smuzhiyun
2425*4882a593Smuzhiyun pi->sram_end = SMC_RAM_END;
2426*4882a593Smuzhiyun pi->state_table_start = RV770_SMC_TABLE_ADDRESS;
2427*4882a593Smuzhiyun pi->soft_regs_start = RV770_SMC_SOFT_REGISTERS_START;
2428*4882a593Smuzhiyun
2429*4882a593Smuzhiyun return 0;
2430*4882a593Smuzhiyun }
2431*4882a593Smuzhiyun
rv770_dpm_print_power_state(struct radeon_device * rdev,struct radeon_ps * rps)2432*4882a593Smuzhiyun void rv770_dpm_print_power_state(struct radeon_device *rdev,
2433*4882a593Smuzhiyun struct radeon_ps *rps)
2434*4882a593Smuzhiyun {
2435*4882a593Smuzhiyun struct rv7xx_ps *ps = rv770_get_ps(rps);
2436*4882a593Smuzhiyun struct rv7xx_pl *pl;
2437*4882a593Smuzhiyun
2438*4882a593Smuzhiyun r600_dpm_print_class_info(rps->class, rps->class2);
2439*4882a593Smuzhiyun r600_dpm_print_cap_info(rps->caps);
2440*4882a593Smuzhiyun printk("\tuvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
2441*4882a593Smuzhiyun if (rdev->family >= CHIP_CEDAR) {
2442*4882a593Smuzhiyun pl = &ps->low;
2443*4882a593Smuzhiyun printk("\t\tpower level 0 sclk: %u mclk: %u vddc: %u vddci: %u\n",
2444*4882a593Smuzhiyun pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2445*4882a593Smuzhiyun pl = &ps->medium;
2446*4882a593Smuzhiyun printk("\t\tpower level 1 sclk: %u mclk: %u vddc: %u vddci: %u\n",
2447*4882a593Smuzhiyun pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2448*4882a593Smuzhiyun pl = &ps->high;
2449*4882a593Smuzhiyun printk("\t\tpower level 2 sclk: %u mclk: %u vddc: %u vddci: %u\n",
2450*4882a593Smuzhiyun pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2451*4882a593Smuzhiyun } else {
2452*4882a593Smuzhiyun pl = &ps->low;
2453*4882a593Smuzhiyun printk("\t\tpower level 0 sclk: %u mclk: %u vddc: %u\n",
2454*4882a593Smuzhiyun pl->sclk, pl->mclk, pl->vddc);
2455*4882a593Smuzhiyun pl = &ps->medium;
2456*4882a593Smuzhiyun printk("\t\tpower level 1 sclk: %u mclk: %u vddc: %u\n",
2457*4882a593Smuzhiyun pl->sclk, pl->mclk, pl->vddc);
2458*4882a593Smuzhiyun pl = &ps->high;
2459*4882a593Smuzhiyun printk("\t\tpower level 2 sclk: %u mclk: %u vddc: %u\n",
2460*4882a593Smuzhiyun pl->sclk, pl->mclk, pl->vddc);
2461*4882a593Smuzhiyun }
2462*4882a593Smuzhiyun r600_dpm_print_ps_status(rdev, rps);
2463*4882a593Smuzhiyun }
2464*4882a593Smuzhiyun
rv770_dpm_debugfs_print_current_performance_level(struct radeon_device * rdev,struct seq_file * m)2465*4882a593Smuzhiyun void rv770_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
2466*4882a593Smuzhiyun struct seq_file *m)
2467*4882a593Smuzhiyun {
2468*4882a593Smuzhiyun struct radeon_ps *rps = rdev->pm.dpm.current_ps;
2469*4882a593Smuzhiyun struct rv7xx_ps *ps = rv770_get_ps(rps);
2470*4882a593Smuzhiyun struct rv7xx_pl *pl;
2471*4882a593Smuzhiyun u32 current_index =
2472*4882a593Smuzhiyun (RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_PROFILE_INDEX_MASK) >>
2473*4882a593Smuzhiyun CURRENT_PROFILE_INDEX_SHIFT;
2474*4882a593Smuzhiyun
2475*4882a593Smuzhiyun if (current_index > 2) {
2476*4882a593Smuzhiyun seq_printf(m, "invalid dpm profile %d\n", current_index);
2477*4882a593Smuzhiyun } else {
2478*4882a593Smuzhiyun if (current_index == 0)
2479*4882a593Smuzhiyun pl = &ps->low;
2480*4882a593Smuzhiyun else if (current_index == 1)
2481*4882a593Smuzhiyun pl = &ps->medium;
2482*4882a593Smuzhiyun else /* current_index == 2 */
2483*4882a593Smuzhiyun pl = &ps->high;
2484*4882a593Smuzhiyun seq_printf(m, "uvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
2485*4882a593Smuzhiyun if (rdev->family >= CHIP_CEDAR) {
2486*4882a593Smuzhiyun seq_printf(m, "power level %d sclk: %u mclk: %u vddc: %u vddci: %u\n",
2487*4882a593Smuzhiyun current_index, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
2488*4882a593Smuzhiyun } else {
2489*4882a593Smuzhiyun seq_printf(m, "power level %d sclk: %u mclk: %u vddc: %u\n",
2490*4882a593Smuzhiyun current_index, pl->sclk, pl->mclk, pl->vddc);
2491*4882a593Smuzhiyun }
2492*4882a593Smuzhiyun }
2493*4882a593Smuzhiyun }
2494*4882a593Smuzhiyun
rv770_dpm_get_current_sclk(struct radeon_device * rdev)2495*4882a593Smuzhiyun u32 rv770_dpm_get_current_sclk(struct radeon_device *rdev)
2496*4882a593Smuzhiyun {
2497*4882a593Smuzhiyun struct radeon_ps *rps = rdev->pm.dpm.current_ps;
2498*4882a593Smuzhiyun struct rv7xx_ps *ps = rv770_get_ps(rps);
2499*4882a593Smuzhiyun struct rv7xx_pl *pl;
2500*4882a593Smuzhiyun u32 current_index =
2501*4882a593Smuzhiyun (RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_PROFILE_INDEX_MASK) >>
2502*4882a593Smuzhiyun CURRENT_PROFILE_INDEX_SHIFT;
2503*4882a593Smuzhiyun
2504*4882a593Smuzhiyun if (current_index > 2) {
2505*4882a593Smuzhiyun return 0;
2506*4882a593Smuzhiyun } else {
2507*4882a593Smuzhiyun if (current_index == 0)
2508*4882a593Smuzhiyun pl = &ps->low;
2509*4882a593Smuzhiyun else if (current_index == 1)
2510*4882a593Smuzhiyun pl = &ps->medium;
2511*4882a593Smuzhiyun else /* current_index == 2 */
2512*4882a593Smuzhiyun pl = &ps->high;
2513*4882a593Smuzhiyun return pl->sclk;
2514*4882a593Smuzhiyun }
2515*4882a593Smuzhiyun }
2516*4882a593Smuzhiyun
rv770_dpm_get_current_mclk(struct radeon_device * rdev)2517*4882a593Smuzhiyun u32 rv770_dpm_get_current_mclk(struct radeon_device *rdev)
2518*4882a593Smuzhiyun {
2519*4882a593Smuzhiyun struct radeon_ps *rps = rdev->pm.dpm.current_ps;
2520*4882a593Smuzhiyun struct rv7xx_ps *ps = rv770_get_ps(rps);
2521*4882a593Smuzhiyun struct rv7xx_pl *pl;
2522*4882a593Smuzhiyun u32 current_index =
2523*4882a593Smuzhiyun (RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_PROFILE_INDEX_MASK) >>
2524*4882a593Smuzhiyun CURRENT_PROFILE_INDEX_SHIFT;
2525*4882a593Smuzhiyun
2526*4882a593Smuzhiyun if (current_index > 2) {
2527*4882a593Smuzhiyun return 0;
2528*4882a593Smuzhiyun } else {
2529*4882a593Smuzhiyun if (current_index == 0)
2530*4882a593Smuzhiyun pl = &ps->low;
2531*4882a593Smuzhiyun else if (current_index == 1)
2532*4882a593Smuzhiyun pl = &ps->medium;
2533*4882a593Smuzhiyun else /* current_index == 2 */
2534*4882a593Smuzhiyun pl = &ps->high;
2535*4882a593Smuzhiyun return pl->mclk;
2536*4882a593Smuzhiyun }
2537*4882a593Smuzhiyun }
2538*4882a593Smuzhiyun
rv770_dpm_fini(struct radeon_device * rdev)2539*4882a593Smuzhiyun void rv770_dpm_fini(struct radeon_device *rdev)
2540*4882a593Smuzhiyun {
2541*4882a593Smuzhiyun int i;
2542*4882a593Smuzhiyun
2543*4882a593Smuzhiyun for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
2544*4882a593Smuzhiyun kfree(rdev->pm.dpm.ps[i].ps_priv);
2545*4882a593Smuzhiyun }
2546*4882a593Smuzhiyun kfree(rdev->pm.dpm.ps);
2547*4882a593Smuzhiyun kfree(rdev->pm.dpm.priv);
2548*4882a593Smuzhiyun }
2549*4882a593Smuzhiyun
rv770_dpm_get_sclk(struct radeon_device * rdev,bool low)2550*4882a593Smuzhiyun u32 rv770_dpm_get_sclk(struct radeon_device *rdev, bool low)
2551*4882a593Smuzhiyun {
2552*4882a593Smuzhiyun struct rv7xx_ps *requested_state = rv770_get_ps(rdev->pm.dpm.requested_ps);
2553*4882a593Smuzhiyun
2554*4882a593Smuzhiyun if (low)
2555*4882a593Smuzhiyun return requested_state->low.sclk;
2556*4882a593Smuzhiyun else
2557*4882a593Smuzhiyun return requested_state->high.sclk;
2558*4882a593Smuzhiyun }
2559*4882a593Smuzhiyun
rv770_dpm_get_mclk(struct radeon_device * rdev,bool low)2560*4882a593Smuzhiyun u32 rv770_dpm_get_mclk(struct radeon_device *rdev, bool low)
2561*4882a593Smuzhiyun {
2562*4882a593Smuzhiyun struct rv7xx_ps *requested_state = rv770_get_ps(rdev->pm.dpm.requested_ps);
2563*4882a593Smuzhiyun
2564*4882a593Smuzhiyun if (low)
2565*4882a593Smuzhiyun return requested_state->low.mclk;
2566*4882a593Smuzhiyun else
2567*4882a593Smuzhiyun return requested_state->high.mclk;
2568*4882a593Smuzhiyun }
2569*4882a593Smuzhiyun
rv770_dpm_vblank_too_short(struct radeon_device * rdev)2570*4882a593Smuzhiyun bool rv770_dpm_vblank_too_short(struct radeon_device *rdev)
2571*4882a593Smuzhiyun {
2572*4882a593Smuzhiyun u32 vblank_time = r600_dpm_get_vblank_time(rdev);
2573*4882a593Smuzhiyun u32 switch_limit = 200; /* 300 */
2574*4882a593Smuzhiyun
2575*4882a593Smuzhiyun /* RV770 */
2576*4882a593Smuzhiyun /* mclk switching doesn't seem to work reliably on desktop RV770s */
2577*4882a593Smuzhiyun if ((rdev->family == CHIP_RV770) &&
2578*4882a593Smuzhiyun !(rdev->flags & RADEON_IS_MOBILITY))
2579*4882a593Smuzhiyun switch_limit = 0xffffffff; /* disable mclk switching */
2580*4882a593Smuzhiyun
2581*4882a593Smuzhiyun if (vblank_time < switch_limit)
2582*4882a593Smuzhiyun return true;
2583*4882a593Smuzhiyun else
2584*4882a593Smuzhiyun return false;
2585*4882a593Smuzhiyun
2586*4882a593Smuzhiyun }
2587