xref: /OK3568_Linux_fs/kernel/drivers/gpu/drm/radeon/ni_dpm.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun  * Copyright 2012 Advanced Micro Devices, Inc.
3*4882a593Smuzhiyun  *
4*4882a593Smuzhiyun  * Permission is hereby granted, free of charge, to any person obtaining a
5*4882a593Smuzhiyun  * copy of this software and associated documentation files (the "Software"),
6*4882a593Smuzhiyun  * to deal in the Software without restriction, including without limitation
7*4882a593Smuzhiyun  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8*4882a593Smuzhiyun  * and/or sell copies of the Software, and to permit persons to whom the
9*4882a593Smuzhiyun  * Software is furnished to do so, subject to the following conditions:
10*4882a593Smuzhiyun  *
11*4882a593Smuzhiyun  * The above copyright notice and this permission notice shall be included in
12*4882a593Smuzhiyun  * all copies or substantial portions of the Software.
13*4882a593Smuzhiyun  *
14*4882a593Smuzhiyun  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15*4882a593Smuzhiyun  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16*4882a593Smuzhiyun  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17*4882a593Smuzhiyun  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18*4882a593Smuzhiyun  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19*4882a593Smuzhiyun  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20*4882a593Smuzhiyun  * OTHER DEALINGS IN THE SOFTWARE.
21*4882a593Smuzhiyun  *
22*4882a593Smuzhiyun  */
23*4882a593Smuzhiyun 
24*4882a593Smuzhiyun #include <linux/math64.h>
25*4882a593Smuzhiyun #include <linux/pci.h>
26*4882a593Smuzhiyun #include <linux/seq_file.h>
27*4882a593Smuzhiyun 
28*4882a593Smuzhiyun #include "atom.h"
29*4882a593Smuzhiyun #include "ni_dpm.h"
30*4882a593Smuzhiyun #include "nid.h"
31*4882a593Smuzhiyun #include "r600_dpm.h"
32*4882a593Smuzhiyun #include "radeon.h"
33*4882a593Smuzhiyun #include "radeon_asic.h"
34*4882a593Smuzhiyun 
35*4882a593Smuzhiyun #define MC_CG_ARB_FREQ_F0           0x0a
36*4882a593Smuzhiyun #define MC_CG_ARB_FREQ_F1           0x0b
37*4882a593Smuzhiyun #define MC_CG_ARB_FREQ_F2           0x0c
38*4882a593Smuzhiyun #define MC_CG_ARB_FREQ_F3           0x0d
39*4882a593Smuzhiyun 
40*4882a593Smuzhiyun #define SMC_RAM_END 0xC000
41*4882a593Smuzhiyun 
42*4882a593Smuzhiyun static const struct ni_cac_weights cac_weights_cayman_xt =
43*4882a593Smuzhiyun {
44*4882a593Smuzhiyun 	0x15,
45*4882a593Smuzhiyun 	0x2,
46*4882a593Smuzhiyun 	0x19,
47*4882a593Smuzhiyun 	0x2,
48*4882a593Smuzhiyun 	0x8,
49*4882a593Smuzhiyun 	0x14,
50*4882a593Smuzhiyun 	0x2,
51*4882a593Smuzhiyun 	0x16,
52*4882a593Smuzhiyun 	0xE,
53*4882a593Smuzhiyun 	0x17,
54*4882a593Smuzhiyun 	0x13,
55*4882a593Smuzhiyun 	0x2B,
56*4882a593Smuzhiyun 	0x10,
57*4882a593Smuzhiyun 	0x7,
58*4882a593Smuzhiyun 	0x5,
59*4882a593Smuzhiyun 	0x5,
60*4882a593Smuzhiyun 	0x5,
61*4882a593Smuzhiyun 	0x2,
62*4882a593Smuzhiyun 	0x3,
63*4882a593Smuzhiyun 	0x9,
64*4882a593Smuzhiyun 	0x10,
65*4882a593Smuzhiyun 	0x10,
66*4882a593Smuzhiyun 	0x2B,
67*4882a593Smuzhiyun 	0xA,
68*4882a593Smuzhiyun 	0x9,
69*4882a593Smuzhiyun 	0x4,
70*4882a593Smuzhiyun 	0xD,
71*4882a593Smuzhiyun 	0xD,
72*4882a593Smuzhiyun 	0x3E,
73*4882a593Smuzhiyun 	0x18,
74*4882a593Smuzhiyun 	0x14,
75*4882a593Smuzhiyun 	0,
76*4882a593Smuzhiyun 	0x3,
77*4882a593Smuzhiyun 	0x3,
78*4882a593Smuzhiyun 	0x5,
79*4882a593Smuzhiyun 	0,
80*4882a593Smuzhiyun 	0x2,
81*4882a593Smuzhiyun 	0,
82*4882a593Smuzhiyun 	0,
83*4882a593Smuzhiyun 	0,
84*4882a593Smuzhiyun 	0,
85*4882a593Smuzhiyun 	0,
86*4882a593Smuzhiyun 	0,
87*4882a593Smuzhiyun 	0,
88*4882a593Smuzhiyun 	0,
89*4882a593Smuzhiyun 	0,
90*4882a593Smuzhiyun 	0x1CC,
91*4882a593Smuzhiyun 	0,
92*4882a593Smuzhiyun 	0x164,
93*4882a593Smuzhiyun 	1,
94*4882a593Smuzhiyun 	1,
95*4882a593Smuzhiyun 	1,
96*4882a593Smuzhiyun 	1,
97*4882a593Smuzhiyun 	12,
98*4882a593Smuzhiyun 	12,
99*4882a593Smuzhiyun 	12,
100*4882a593Smuzhiyun 	0x12,
101*4882a593Smuzhiyun 	0x1F,
102*4882a593Smuzhiyun 	132,
103*4882a593Smuzhiyun 	5,
104*4882a593Smuzhiyun 	7,
105*4882a593Smuzhiyun 	0,
106*4882a593Smuzhiyun 	{ 0, 0, 0, 0, 0, 0, 0, 0 },
107*4882a593Smuzhiyun 	{ 0, 0, 0, 0 },
108*4882a593Smuzhiyun 	true
109*4882a593Smuzhiyun };
110*4882a593Smuzhiyun 
111*4882a593Smuzhiyun static const struct ni_cac_weights cac_weights_cayman_pro =
112*4882a593Smuzhiyun {
113*4882a593Smuzhiyun 	0x16,
114*4882a593Smuzhiyun 	0x4,
115*4882a593Smuzhiyun 	0x10,
116*4882a593Smuzhiyun 	0x2,
117*4882a593Smuzhiyun 	0xA,
118*4882a593Smuzhiyun 	0x16,
119*4882a593Smuzhiyun 	0x2,
120*4882a593Smuzhiyun 	0x18,
121*4882a593Smuzhiyun 	0x10,
122*4882a593Smuzhiyun 	0x1A,
123*4882a593Smuzhiyun 	0x16,
124*4882a593Smuzhiyun 	0x2D,
125*4882a593Smuzhiyun 	0x12,
126*4882a593Smuzhiyun 	0xA,
127*4882a593Smuzhiyun 	0x6,
128*4882a593Smuzhiyun 	0x6,
129*4882a593Smuzhiyun 	0x6,
130*4882a593Smuzhiyun 	0x2,
131*4882a593Smuzhiyun 	0x4,
132*4882a593Smuzhiyun 	0xB,
133*4882a593Smuzhiyun 	0x11,
134*4882a593Smuzhiyun 	0x11,
135*4882a593Smuzhiyun 	0x2D,
136*4882a593Smuzhiyun 	0xC,
137*4882a593Smuzhiyun 	0xC,
138*4882a593Smuzhiyun 	0x7,
139*4882a593Smuzhiyun 	0x10,
140*4882a593Smuzhiyun 	0x10,
141*4882a593Smuzhiyun 	0x3F,
142*4882a593Smuzhiyun 	0x1A,
143*4882a593Smuzhiyun 	0x16,
144*4882a593Smuzhiyun 	0,
145*4882a593Smuzhiyun 	0x7,
146*4882a593Smuzhiyun 	0x4,
147*4882a593Smuzhiyun 	0x6,
148*4882a593Smuzhiyun 	1,
149*4882a593Smuzhiyun 	0x2,
150*4882a593Smuzhiyun 	0x1,
151*4882a593Smuzhiyun 	0,
152*4882a593Smuzhiyun 	0,
153*4882a593Smuzhiyun 	0,
154*4882a593Smuzhiyun 	0,
155*4882a593Smuzhiyun 	0,
156*4882a593Smuzhiyun 	0,
157*4882a593Smuzhiyun 	0x30,
158*4882a593Smuzhiyun 	0,
159*4882a593Smuzhiyun 	0x1CF,
160*4882a593Smuzhiyun 	0,
161*4882a593Smuzhiyun 	0x166,
162*4882a593Smuzhiyun 	1,
163*4882a593Smuzhiyun 	1,
164*4882a593Smuzhiyun 	1,
165*4882a593Smuzhiyun 	1,
166*4882a593Smuzhiyun 	12,
167*4882a593Smuzhiyun 	12,
168*4882a593Smuzhiyun 	12,
169*4882a593Smuzhiyun 	0x15,
170*4882a593Smuzhiyun 	0x1F,
171*4882a593Smuzhiyun 	132,
172*4882a593Smuzhiyun 	6,
173*4882a593Smuzhiyun 	6,
174*4882a593Smuzhiyun 	0,
175*4882a593Smuzhiyun 	{ 0, 0, 0, 0, 0, 0, 0, 0 },
176*4882a593Smuzhiyun 	{ 0, 0, 0, 0 },
177*4882a593Smuzhiyun 	true
178*4882a593Smuzhiyun };
179*4882a593Smuzhiyun 
180*4882a593Smuzhiyun static const struct ni_cac_weights cac_weights_cayman_le =
181*4882a593Smuzhiyun {
182*4882a593Smuzhiyun 	0x7,
183*4882a593Smuzhiyun 	0xE,
184*4882a593Smuzhiyun 	0x1,
185*4882a593Smuzhiyun 	0xA,
186*4882a593Smuzhiyun 	0x1,
187*4882a593Smuzhiyun 	0x3F,
188*4882a593Smuzhiyun 	0x2,
189*4882a593Smuzhiyun 	0x18,
190*4882a593Smuzhiyun 	0x10,
191*4882a593Smuzhiyun 	0x1A,
192*4882a593Smuzhiyun 	0x1,
193*4882a593Smuzhiyun 	0x3F,
194*4882a593Smuzhiyun 	0x1,
195*4882a593Smuzhiyun 	0xE,
196*4882a593Smuzhiyun 	0x6,
197*4882a593Smuzhiyun 	0x6,
198*4882a593Smuzhiyun 	0x6,
199*4882a593Smuzhiyun 	0x2,
200*4882a593Smuzhiyun 	0x4,
201*4882a593Smuzhiyun 	0x9,
202*4882a593Smuzhiyun 	0x1A,
203*4882a593Smuzhiyun 	0x1A,
204*4882a593Smuzhiyun 	0x2C,
205*4882a593Smuzhiyun 	0xA,
206*4882a593Smuzhiyun 	0x11,
207*4882a593Smuzhiyun 	0x8,
208*4882a593Smuzhiyun 	0x19,
209*4882a593Smuzhiyun 	0x19,
210*4882a593Smuzhiyun 	0x1,
211*4882a593Smuzhiyun 	0x1,
212*4882a593Smuzhiyun 	0x1A,
213*4882a593Smuzhiyun 	0,
214*4882a593Smuzhiyun 	0x8,
215*4882a593Smuzhiyun 	0x5,
216*4882a593Smuzhiyun 	0x8,
217*4882a593Smuzhiyun 	0x1,
218*4882a593Smuzhiyun 	0x3,
219*4882a593Smuzhiyun 	0x1,
220*4882a593Smuzhiyun 	0,
221*4882a593Smuzhiyun 	0,
222*4882a593Smuzhiyun 	0,
223*4882a593Smuzhiyun 	0,
224*4882a593Smuzhiyun 	0,
225*4882a593Smuzhiyun 	0,
226*4882a593Smuzhiyun 	0x38,
227*4882a593Smuzhiyun 	0x38,
228*4882a593Smuzhiyun 	0x239,
229*4882a593Smuzhiyun 	0x3,
230*4882a593Smuzhiyun 	0x18A,
231*4882a593Smuzhiyun 	1,
232*4882a593Smuzhiyun 	1,
233*4882a593Smuzhiyun 	1,
234*4882a593Smuzhiyun 	1,
235*4882a593Smuzhiyun 	12,
236*4882a593Smuzhiyun 	12,
237*4882a593Smuzhiyun 	12,
238*4882a593Smuzhiyun 	0x15,
239*4882a593Smuzhiyun 	0x22,
240*4882a593Smuzhiyun 	132,
241*4882a593Smuzhiyun 	6,
242*4882a593Smuzhiyun 	6,
243*4882a593Smuzhiyun 	0,
244*4882a593Smuzhiyun 	{ 0, 0, 0, 0, 0, 0, 0, 0 },
245*4882a593Smuzhiyun 	{ 0, 0, 0, 0 },
246*4882a593Smuzhiyun 	true
247*4882a593Smuzhiyun };
248*4882a593Smuzhiyun 
249*4882a593Smuzhiyun #define NISLANDS_MGCG_SEQUENCE  300
250*4882a593Smuzhiyun 
251*4882a593Smuzhiyun static const u32 cayman_cgcg_cgls_default[] =
252*4882a593Smuzhiyun {
253*4882a593Smuzhiyun 	0x000008f8, 0x00000010, 0xffffffff,
254*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
255*4882a593Smuzhiyun 	0x000008f8, 0x00000011, 0xffffffff,
256*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
257*4882a593Smuzhiyun 	0x000008f8, 0x00000012, 0xffffffff,
258*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
259*4882a593Smuzhiyun 	0x000008f8, 0x00000013, 0xffffffff,
260*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
261*4882a593Smuzhiyun 	0x000008f8, 0x00000014, 0xffffffff,
262*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
263*4882a593Smuzhiyun 	0x000008f8, 0x00000015, 0xffffffff,
264*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
265*4882a593Smuzhiyun 	0x000008f8, 0x00000016, 0xffffffff,
266*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
267*4882a593Smuzhiyun 	0x000008f8, 0x00000017, 0xffffffff,
268*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
269*4882a593Smuzhiyun 	0x000008f8, 0x00000018, 0xffffffff,
270*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
271*4882a593Smuzhiyun 	0x000008f8, 0x00000019, 0xffffffff,
272*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
273*4882a593Smuzhiyun 	0x000008f8, 0x0000001a, 0xffffffff,
274*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
275*4882a593Smuzhiyun 	0x000008f8, 0x0000001b, 0xffffffff,
276*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
277*4882a593Smuzhiyun 	0x000008f8, 0x00000020, 0xffffffff,
278*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
279*4882a593Smuzhiyun 	0x000008f8, 0x00000021, 0xffffffff,
280*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
281*4882a593Smuzhiyun 	0x000008f8, 0x00000022, 0xffffffff,
282*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
283*4882a593Smuzhiyun 	0x000008f8, 0x00000023, 0xffffffff,
284*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
285*4882a593Smuzhiyun 	0x000008f8, 0x00000024, 0xffffffff,
286*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
287*4882a593Smuzhiyun 	0x000008f8, 0x00000025, 0xffffffff,
288*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
289*4882a593Smuzhiyun 	0x000008f8, 0x00000026, 0xffffffff,
290*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
291*4882a593Smuzhiyun 	0x000008f8, 0x00000027, 0xffffffff,
292*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
293*4882a593Smuzhiyun 	0x000008f8, 0x00000028, 0xffffffff,
294*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
295*4882a593Smuzhiyun 	0x000008f8, 0x00000029, 0xffffffff,
296*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
297*4882a593Smuzhiyun 	0x000008f8, 0x0000002a, 0xffffffff,
298*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
299*4882a593Smuzhiyun 	0x000008f8, 0x0000002b, 0xffffffff,
300*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff
301*4882a593Smuzhiyun };
302*4882a593Smuzhiyun #define CAYMAN_CGCG_CGLS_DEFAULT_LENGTH sizeof(cayman_cgcg_cgls_default) / (3 * sizeof(u32))
303*4882a593Smuzhiyun 
304*4882a593Smuzhiyun static const u32 cayman_cgcg_cgls_disable[] =
305*4882a593Smuzhiyun {
306*4882a593Smuzhiyun 	0x000008f8, 0x00000010, 0xffffffff,
307*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
308*4882a593Smuzhiyun 	0x000008f8, 0x00000011, 0xffffffff,
309*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
310*4882a593Smuzhiyun 	0x000008f8, 0x00000012, 0xffffffff,
311*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
312*4882a593Smuzhiyun 	0x000008f8, 0x00000013, 0xffffffff,
313*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
314*4882a593Smuzhiyun 	0x000008f8, 0x00000014, 0xffffffff,
315*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
316*4882a593Smuzhiyun 	0x000008f8, 0x00000015, 0xffffffff,
317*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
318*4882a593Smuzhiyun 	0x000008f8, 0x00000016, 0xffffffff,
319*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
320*4882a593Smuzhiyun 	0x000008f8, 0x00000017, 0xffffffff,
321*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
322*4882a593Smuzhiyun 	0x000008f8, 0x00000018, 0xffffffff,
323*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
324*4882a593Smuzhiyun 	0x000008f8, 0x00000019, 0xffffffff,
325*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
326*4882a593Smuzhiyun 	0x000008f8, 0x0000001a, 0xffffffff,
327*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
328*4882a593Smuzhiyun 	0x000008f8, 0x0000001b, 0xffffffff,
329*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
330*4882a593Smuzhiyun 	0x000008f8, 0x00000020, 0xffffffff,
331*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
332*4882a593Smuzhiyun 	0x000008f8, 0x00000021, 0xffffffff,
333*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
334*4882a593Smuzhiyun 	0x000008f8, 0x00000022, 0xffffffff,
335*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
336*4882a593Smuzhiyun 	0x000008f8, 0x00000023, 0xffffffff,
337*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
338*4882a593Smuzhiyun 	0x000008f8, 0x00000024, 0xffffffff,
339*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
340*4882a593Smuzhiyun 	0x000008f8, 0x00000025, 0xffffffff,
341*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
342*4882a593Smuzhiyun 	0x000008f8, 0x00000026, 0xffffffff,
343*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
344*4882a593Smuzhiyun 	0x000008f8, 0x00000027, 0xffffffff,
345*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
346*4882a593Smuzhiyun 	0x000008f8, 0x00000028, 0xffffffff,
347*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
348*4882a593Smuzhiyun 	0x000008f8, 0x00000029, 0xffffffff,
349*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
350*4882a593Smuzhiyun 	0x000008f8, 0x0000002a, 0xffffffff,
351*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
352*4882a593Smuzhiyun 	0x000008f8, 0x0000002b, 0xffffffff,
353*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
354*4882a593Smuzhiyun 	0x00000644, 0x000f7902, 0x001f4180,
355*4882a593Smuzhiyun 	0x00000644, 0x000f3802, 0x001f4180
356*4882a593Smuzhiyun };
357*4882a593Smuzhiyun #define CAYMAN_CGCG_CGLS_DISABLE_LENGTH sizeof(cayman_cgcg_cgls_disable) / (3 * sizeof(u32))
358*4882a593Smuzhiyun 
359*4882a593Smuzhiyun static const u32 cayman_cgcg_cgls_enable[] =
360*4882a593Smuzhiyun {
361*4882a593Smuzhiyun 	0x00000644, 0x000f7882, 0x001f4080,
362*4882a593Smuzhiyun 	0x000008f8, 0x00000010, 0xffffffff,
363*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
364*4882a593Smuzhiyun 	0x000008f8, 0x00000011, 0xffffffff,
365*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
366*4882a593Smuzhiyun 	0x000008f8, 0x00000012, 0xffffffff,
367*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
368*4882a593Smuzhiyun 	0x000008f8, 0x00000013, 0xffffffff,
369*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
370*4882a593Smuzhiyun 	0x000008f8, 0x00000014, 0xffffffff,
371*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
372*4882a593Smuzhiyun 	0x000008f8, 0x00000015, 0xffffffff,
373*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
374*4882a593Smuzhiyun 	0x000008f8, 0x00000016, 0xffffffff,
375*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
376*4882a593Smuzhiyun 	0x000008f8, 0x00000017, 0xffffffff,
377*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
378*4882a593Smuzhiyun 	0x000008f8, 0x00000018, 0xffffffff,
379*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
380*4882a593Smuzhiyun 	0x000008f8, 0x00000019, 0xffffffff,
381*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
382*4882a593Smuzhiyun 	0x000008f8, 0x0000001a, 0xffffffff,
383*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
384*4882a593Smuzhiyun 	0x000008f8, 0x0000001b, 0xffffffff,
385*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
386*4882a593Smuzhiyun 	0x000008f8, 0x00000020, 0xffffffff,
387*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
388*4882a593Smuzhiyun 	0x000008f8, 0x00000021, 0xffffffff,
389*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
390*4882a593Smuzhiyun 	0x000008f8, 0x00000022, 0xffffffff,
391*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
392*4882a593Smuzhiyun 	0x000008f8, 0x00000023, 0xffffffff,
393*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
394*4882a593Smuzhiyun 	0x000008f8, 0x00000024, 0xffffffff,
395*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
396*4882a593Smuzhiyun 	0x000008f8, 0x00000025, 0xffffffff,
397*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
398*4882a593Smuzhiyun 	0x000008f8, 0x00000026, 0xffffffff,
399*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
400*4882a593Smuzhiyun 	0x000008f8, 0x00000027, 0xffffffff,
401*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
402*4882a593Smuzhiyun 	0x000008f8, 0x00000028, 0xffffffff,
403*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
404*4882a593Smuzhiyun 	0x000008f8, 0x00000029, 0xffffffff,
405*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
406*4882a593Smuzhiyun 	0x000008f8, 0x0000002a, 0xffffffff,
407*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
408*4882a593Smuzhiyun 	0x000008f8, 0x0000002b, 0xffffffff,
409*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff
410*4882a593Smuzhiyun };
411*4882a593Smuzhiyun #define CAYMAN_CGCG_CGLS_ENABLE_LENGTH  sizeof(cayman_cgcg_cgls_enable) / (3 * sizeof(u32))
412*4882a593Smuzhiyun 
413*4882a593Smuzhiyun static const u32 cayman_mgcg_default[] =
414*4882a593Smuzhiyun {
415*4882a593Smuzhiyun 	0x0000802c, 0xc0000000, 0xffffffff,
416*4882a593Smuzhiyun 	0x00003fc4, 0xc0000000, 0xffffffff,
417*4882a593Smuzhiyun 	0x00005448, 0x00000100, 0xffffffff,
418*4882a593Smuzhiyun 	0x000055e4, 0x00000100, 0xffffffff,
419*4882a593Smuzhiyun 	0x0000160c, 0x00000100, 0xffffffff,
420*4882a593Smuzhiyun 	0x00008984, 0x06000100, 0xffffffff,
421*4882a593Smuzhiyun 	0x0000c164, 0x00000100, 0xffffffff,
422*4882a593Smuzhiyun 	0x00008a18, 0x00000100, 0xffffffff,
423*4882a593Smuzhiyun 	0x0000897c, 0x06000100, 0xffffffff,
424*4882a593Smuzhiyun 	0x00008b28, 0x00000100, 0xffffffff,
425*4882a593Smuzhiyun 	0x00009144, 0x00800200, 0xffffffff,
426*4882a593Smuzhiyun 	0x00009a60, 0x00000100, 0xffffffff,
427*4882a593Smuzhiyun 	0x00009868, 0x00000100, 0xffffffff,
428*4882a593Smuzhiyun 	0x00008d58, 0x00000100, 0xffffffff,
429*4882a593Smuzhiyun 	0x00009510, 0x00000100, 0xffffffff,
430*4882a593Smuzhiyun 	0x0000949c, 0x00000100, 0xffffffff,
431*4882a593Smuzhiyun 	0x00009654, 0x00000100, 0xffffffff,
432*4882a593Smuzhiyun 	0x00009030, 0x00000100, 0xffffffff,
433*4882a593Smuzhiyun 	0x00009034, 0x00000100, 0xffffffff,
434*4882a593Smuzhiyun 	0x00009038, 0x00000100, 0xffffffff,
435*4882a593Smuzhiyun 	0x0000903c, 0x00000100, 0xffffffff,
436*4882a593Smuzhiyun 	0x00009040, 0x00000100, 0xffffffff,
437*4882a593Smuzhiyun 	0x0000a200, 0x00000100, 0xffffffff,
438*4882a593Smuzhiyun 	0x0000a204, 0x00000100, 0xffffffff,
439*4882a593Smuzhiyun 	0x0000a208, 0x00000100, 0xffffffff,
440*4882a593Smuzhiyun 	0x0000a20c, 0x00000100, 0xffffffff,
441*4882a593Smuzhiyun 	0x00009744, 0x00000100, 0xffffffff,
442*4882a593Smuzhiyun 	0x00003f80, 0x00000100, 0xffffffff,
443*4882a593Smuzhiyun 	0x0000a210, 0x00000100, 0xffffffff,
444*4882a593Smuzhiyun 	0x0000a214, 0x00000100, 0xffffffff,
445*4882a593Smuzhiyun 	0x000004d8, 0x00000100, 0xffffffff,
446*4882a593Smuzhiyun 	0x00009664, 0x00000100, 0xffffffff,
447*4882a593Smuzhiyun 	0x00009698, 0x00000100, 0xffffffff,
448*4882a593Smuzhiyun 	0x000004d4, 0x00000200, 0xffffffff,
449*4882a593Smuzhiyun 	0x000004d0, 0x00000000, 0xffffffff,
450*4882a593Smuzhiyun 	0x000030cc, 0x00000104, 0xffffffff,
451*4882a593Smuzhiyun 	0x0000d0c0, 0x00000100, 0xffffffff,
452*4882a593Smuzhiyun 	0x0000d8c0, 0x00000100, 0xffffffff,
453*4882a593Smuzhiyun 	0x0000802c, 0x40000000, 0xffffffff,
454*4882a593Smuzhiyun 	0x00003fc4, 0x40000000, 0xffffffff,
455*4882a593Smuzhiyun 	0x0000915c, 0x00010000, 0xffffffff,
456*4882a593Smuzhiyun 	0x00009160, 0x00030002, 0xffffffff,
457*4882a593Smuzhiyun 	0x00009164, 0x00050004, 0xffffffff,
458*4882a593Smuzhiyun 	0x00009168, 0x00070006, 0xffffffff,
459*4882a593Smuzhiyun 	0x00009178, 0x00070000, 0xffffffff,
460*4882a593Smuzhiyun 	0x0000917c, 0x00030002, 0xffffffff,
461*4882a593Smuzhiyun 	0x00009180, 0x00050004, 0xffffffff,
462*4882a593Smuzhiyun 	0x0000918c, 0x00010006, 0xffffffff,
463*4882a593Smuzhiyun 	0x00009190, 0x00090008, 0xffffffff,
464*4882a593Smuzhiyun 	0x00009194, 0x00070000, 0xffffffff,
465*4882a593Smuzhiyun 	0x00009198, 0x00030002, 0xffffffff,
466*4882a593Smuzhiyun 	0x0000919c, 0x00050004, 0xffffffff,
467*4882a593Smuzhiyun 	0x000091a8, 0x00010006, 0xffffffff,
468*4882a593Smuzhiyun 	0x000091ac, 0x00090008, 0xffffffff,
469*4882a593Smuzhiyun 	0x000091b0, 0x00070000, 0xffffffff,
470*4882a593Smuzhiyun 	0x000091b4, 0x00030002, 0xffffffff,
471*4882a593Smuzhiyun 	0x000091b8, 0x00050004, 0xffffffff,
472*4882a593Smuzhiyun 	0x000091c4, 0x00010006, 0xffffffff,
473*4882a593Smuzhiyun 	0x000091c8, 0x00090008, 0xffffffff,
474*4882a593Smuzhiyun 	0x000091cc, 0x00070000, 0xffffffff,
475*4882a593Smuzhiyun 	0x000091d0, 0x00030002, 0xffffffff,
476*4882a593Smuzhiyun 	0x000091d4, 0x00050004, 0xffffffff,
477*4882a593Smuzhiyun 	0x000091e0, 0x00010006, 0xffffffff,
478*4882a593Smuzhiyun 	0x000091e4, 0x00090008, 0xffffffff,
479*4882a593Smuzhiyun 	0x000091e8, 0x00000000, 0xffffffff,
480*4882a593Smuzhiyun 	0x000091ec, 0x00070000, 0xffffffff,
481*4882a593Smuzhiyun 	0x000091f0, 0x00030002, 0xffffffff,
482*4882a593Smuzhiyun 	0x000091f4, 0x00050004, 0xffffffff,
483*4882a593Smuzhiyun 	0x00009200, 0x00010006, 0xffffffff,
484*4882a593Smuzhiyun 	0x00009204, 0x00090008, 0xffffffff,
485*4882a593Smuzhiyun 	0x00009208, 0x00070000, 0xffffffff,
486*4882a593Smuzhiyun 	0x0000920c, 0x00030002, 0xffffffff,
487*4882a593Smuzhiyun 	0x00009210, 0x00050004, 0xffffffff,
488*4882a593Smuzhiyun 	0x0000921c, 0x00010006, 0xffffffff,
489*4882a593Smuzhiyun 	0x00009220, 0x00090008, 0xffffffff,
490*4882a593Smuzhiyun 	0x00009224, 0x00070000, 0xffffffff,
491*4882a593Smuzhiyun 	0x00009228, 0x00030002, 0xffffffff,
492*4882a593Smuzhiyun 	0x0000922c, 0x00050004, 0xffffffff,
493*4882a593Smuzhiyun 	0x00009238, 0x00010006, 0xffffffff,
494*4882a593Smuzhiyun 	0x0000923c, 0x00090008, 0xffffffff,
495*4882a593Smuzhiyun 	0x00009240, 0x00070000, 0xffffffff,
496*4882a593Smuzhiyun 	0x00009244, 0x00030002, 0xffffffff,
497*4882a593Smuzhiyun 	0x00009248, 0x00050004, 0xffffffff,
498*4882a593Smuzhiyun 	0x00009254, 0x00010006, 0xffffffff,
499*4882a593Smuzhiyun 	0x00009258, 0x00090008, 0xffffffff,
500*4882a593Smuzhiyun 	0x0000925c, 0x00070000, 0xffffffff,
501*4882a593Smuzhiyun 	0x00009260, 0x00030002, 0xffffffff,
502*4882a593Smuzhiyun 	0x00009264, 0x00050004, 0xffffffff,
503*4882a593Smuzhiyun 	0x00009270, 0x00010006, 0xffffffff,
504*4882a593Smuzhiyun 	0x00009274, 0x00090008, 0xffffffff,
505*4882a593Smuzhiyun 	0x00009278, 0x00070000, 0xffffffff,
506*4882a593Smuzhiyun 	0x0000927c, 0x00030002, 0xffffffff,
507*4882a593Smuzhiyun 	0x00009280, 0x00050004, 0xffffffff,
508*4882a593Smuzhiyun 	0x0000928c, 0x00010006, 0xffffffff,
509*4882a593Smuzhiyun 	0x00009290, 0x00090008, 0xffffffff,
510*4882a593Smuzhiyun 	0x000092a8, 0x00070000, 0xffffffff,
511*4882a593Smuzhiyun 	0x000092ac, 0x00030002, 0xffffffff,
512*4882a593Smuzhiyun 	0x000092b0, 0x00050004, 0xffffffff,
513*4882a593Smuzhiyun 	0x000092bc, 0x00010006, 0xffffffff,
514*4882a593Smuzhiyun 	0x000092c0, 0x00090008, 0xffffffff,
515*4882a593Smuzhiyun 	0x000092c4, 0x00070000, 0xffffffff,
516*4882a593Smuzhiyun 	0x000092c8, 0x00030002, 0xffffffff,
517*4882a593Smuzhiyun 	0x000092cc, 0x00050004, 0xffffffff,
518*4882a593Smuzhiyun 	0x000092d8, 0x00010006, 0xffffffff,
519*4882a593Smuzhiyun 	0x000092dc, 0x00090008, 0xffffffff,
520*4882a593Smuzhiyun 	0x00009294, 0x00000000, 0xffffffff,
521*4882a593Smuzhiyun 	0x0000802c, 0x40010000, 0xffffffff,
522*4882a593Smuzhiyun 	0x00003fc4, 0x40010000, 0xffffffff,
523*4882a593Smuzhiyun 	0x0000915c, 0x00010000, 0xffffffff,
524*4882a593Smuzhiyun 	0x00009160, 0x00030002, 0xffffffff,
525*4882a593Smuzhiyun 	0x00009164, 0x00050004, 0xffffffff,
526*4882a593Smuzhiyun 	0x00009168, 0x00070006, 0xffffffff,
527*4882a593Smuzhiyun 	0x00009178, 0x00070000, 0xffffffff,
528*4882a593Smuzhiyun 	0x0000917c, 0x00030002, 0xffffffff,
529*4882a593Smuzhiyun 	0x00009180, 0x00050004, 0xffffffff,
530*4882a593Smuzhiyun 	0x0000918c, 0x00010006, 0xffffffff,
531*4882a593Smuzhiyun 	0x00009190, 0x00090008, 0xffffffff,
532*4882a593Smuzhiyun 	0x00009194, 0x00070000, 0xffffffff,
533*4882a593Smuzhiyun 	0x00009198, 0x00030002, 0xffffffff,
534*4882a593Smuzhiyun 	0x0000919c, 0x00050004, 0xffffffff,
535*4882a593Smuzhiyun 	0x000091a8, 0x00010006, 0xffffffff,
536*4882a593Smuzhiyun 	0x000091ac, 0x00090008, 0xffffffff,
537*4882a593Smuzhiyun 	0x000091b0, 0x00070000, 0xffffffff,
538*4882a593Smuzhiyun 	0x000091b4, 0x00030002, 0xffffffff,
539*4882a593Smuzhiyun 	0x000091b8, 0x00050004, 0xffffffff,
540*4882a593Smuzhiyun 	0x000091c4, 0x00010006, 0xffffffff,
541*4882a593Smuzhiyun 	0x000091c8, 0x00090008, 0xffffffff,
542*4882a593Smuzhiyun 	0x000091cc, 0x00070000, 0xffffffff,
543*4882a593Smuzhiyun 	0x000091d0, 0x00030002, 0xffffffff,
544*4882a593Smuzhiyun 	0x000091d4, 0x00050004, 0xffffffff,
545*4882a593Smuzhiyun 	0x000091e0, 0x00010006, 0xffffffff,
546*4882a593Smuzhiyun 	0x000091e4, 0x00090008, 0xffffffff,
547*4882a593Smuzhiyun 	0x000091e8, 0x00000000, 0xffffffff,
548*4882a593Smuzhiyun 	0x000091ec, 0x00070000, 0xffffffff,
549*4882a593Smuzhiyun 	0x000091f0, 0x00030002, 0xffffffff,
550*4882a593Smuzhiyun 	0x000091f4, 0x00050004, 0xffffffff,
551*4882a593Smuzhiyun 	0x00009200, 0x00010006, 0xffffffff,
552*4882a593Smuzhiyun 	0x00009204, 0x00090008, 0xffffffff,
553*4882a593Smuzhiyun 	0x00009208, 0x00070000, 0xffffffff,
554*4882a593Smuzhiyun 	0x0000920c, 0x00030002, 0xffffffff,
555*4882a593Smuzhiyun 	0x00009210, 0x00050004, 0xffffffff,
556*4882a593Smuzhiyun 	0x0000921c, 0x00010006, 0xffffffff,
557*4882a593Smuzhiyun 	0x00009220, 0x00090008, 0xffffffff,
558*4882a593Smuzhiyun 	0x00009224, 0x00070000, 0xffffffff,
559*4882a593Smuzhiyun 	0x00009228, 0x00030002, 0xffffffff,
560*4882a593Smuzhiyun 	0x0000922c, 0x00050004, 0xffffffff,
561*4882a593Smuzhiyun 	0x00009238, 0x00010006, 0xffffffff,
562*4882a593Smuzhiyun 	0x0000923c, 0x00090008, 0xffffffff,
563*4882a593Smuzhiyun 	0x00009240, 0x00070000, 0xffffffff,
564*4882a593Smuzhiyun 	0x00009244, 0x00030002, 0xffffffff,
565*4882a593Smuzhiyun 	0x00009248, 0x00050004, 0xffffffff,
566*4882a593Smuzhiyun 	0x00009254, 0x00010006, 0xffffffff,
567*4882a593Smuzhiyun 	0x00009258, 0x00090008, 0xffffffff,
568*4882a593Smuzhiyun 	0x0000925c, 0x00070000, 0xffffffff,
569*4882a593Smuzhiyun 	0x00009260, 0x00030002, 0xffffffff,
570*4882a593Smuzhiyun 	0x00009264, 0x00050004, 0xffffffff,
571*4882a593Smuzhiyun 	0x00009270, 0x00010006, 0xffffffff,
572*4882a593Smuzhiyun 	0x00009274, 0x00090008, 0xffffffff,
573*4882a593Smuzhiyun 	0x00009278, 0x00070000, 0xffffffff,
574*4882a593Smuzhiyun 	0x0000927c, 0x00030002, 0xffffffff,
575*4882a593Smuzhiyun 	0x00009280, 0x00050004, 0xffffffff,
576*4882a593Smuzhiyun 	0x0000928c, 0x00010006, 0xffffffff,
577*4882a593Smuzhiyun 	0x00009290, 0x00090008, 0xffffffff,
578*4882a593Smuzhiyun 	0x000092a8, 0x00070000, 0xffffffff,
579*4882a593Smuzhiyun 	0x000092ac, 0x00030002, 0xffffffff,
580*4882a593Smuzhiyun 	0x000092b0, 0x00050004, 0xffffffff,
581*4882a593Smuzhiyun 	0x000092bc, 0x00010006, 0xffffffff,
582*4882a593Smuzhiyun 	0x000092c0, 0x00090008, 0xffffffff,
583*4882a593Smuzhiyun 	0x000092c4, 0x00070000, 0xffffffff,
584*4882a593Smuzhiyun 	0x000092c8, 0x00030002, 0xffffffff,
585*4882a593Smuzhiyun 	0x000092cc, 0x00050004, 0xffffffff,
586*4882a593Smuzhiyun 	0x000092d8, 0x00010006, 0xffffffff,
587*4882a593Smuzhiyun 	0x000092dc, 0x00090008, 0xffffffff,
588*4882a593Smuzhiyun 	0x00009294, 0x00000000, 0xffffffff,
589*4882a593Smuzhiyun 	0x0000802c, 0xc0000000, 0xffffffff,
590*4882a593Smuzhiyun 	0x00003fc4, 0xc0000000, 0xffffffff,
591*4882a593Smuzhiyun 	0x000008f8, 0x00000010, 0xffffffff,
592*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
593*4882a593Smuzhiyun 	0x000008f8, 0x00000011, 0xffffffff,
594*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
595*4882a593Smuzhiyun 	0x000008f8, 0x00000012, 0xffffffff,
596*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
597*4882a593Smuzhiyun 	0x000008f8, 0x00000013, 0xffffffff,
598*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
599*4882a593Smuzhiyun 	0x000008f8, 0x00000014, 0xffffffff,
600*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
601*4882a593Smuzhiyun 	0x000008f8, 0x00000015, 0xffffffff,
602*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
603*4882a593Smuzhiyun 	0x000008f8, 0x00000016, 0xffffffff,
604*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
605*4882a593Smuzhiyun 	0x000008f8, 0x00000017, 0xffffffff,
606*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
607*4882a593Smuzhiyun 	0x000008f8, 0x00000018, 0xffffffff,
608*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
609*4882a593Smuzhiyun 	0x000008f8, 0x00000019, 0xffffffff,
610*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
611*4882a593Smuzhiyun 	0x000008f8, 0x0000001a, 0xffffffff,
612*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
613*4882a593Smuzhiyun 	0x000008f8, 0x0000001b, 0xffffffff,
614*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff
615*4882a593Smuzhiyun };
616*4882a593Smuzhiyun #define CAYMAN_MGCG_DEFAULT_LENGTH sizeof(cayman_mgcg_default) / (3 * sizeof(u32))
617*4882a593Smuzhiyun 
618*4882a593Smuzhiyun static const u32 cayman_mgcg_disable[] =
619*4882a593Smuzhiyun {
620*4882a593Smuzhiyun 	0x0000802c, 0xc0000000, 0xffffffff,
621*4882a593Smuzhiyun 	0x000008f8, 0x00000000, 0xffffffff,
622*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
623*4882a593Smuzhiyun 	0x000008f8, 0x00000001, 0xffffffff,
624*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
625*4882a593Smuzhiyun 	0x000008f8, 0x00000002, 0xffffffff,
626*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
627*4882a593Smuzhiyun 	0x000008f8, 0x00000003, 0xffffffff,
628*4882a593Smuzhiyun 	0x000008fc, 0xffffffff, 0xffffffff,
629*4882a593Smuzhiyun 	0x00009150, 0x00600000, 0xffffffff
630*4882a593Smuzhiyun };
631*4882a593Smuzhiyun #define CAYMAN_MGCG_DISABLE_LENGTH   sizeof(cayman_mgcg_disable) / (3 * sizeof(u32))
632*4882a593Smuzhiyun 
633*4882a593Smuzhiyun static const u32 cayman_mgcg_enable[] =
634*4882a593Smuzhiyun {
635*4882a593Smuzhiyun 	0x0000802c, 0xc0000000, 0xffffffff,
636*4882a593Smuzhiyun 	0x000008f8, 0x00000000, 0xffffffff,
637*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
638*4882a593Smuzhiyun 	0x000008f8, 0x00000001, 0xffffffff,
639*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
640*4882a593Smuzhiyun 	0x000008f8, 0x00000002, 0xffffffff,
641*4882a593Smuzhiyun 	0x000008fc, 0x00600000, 0xffffffff,
642*4882a593Smuzhiyun 	0x000008f8, 0x00000003, 0xffffffff,
643*4882a593Smuzhiyun 	0x000008fc, 0x00000000, 0xffffffff,
644*4882a593Smuzhiyun 	0x00009150, 0x96944200, 0xffffffff
645*4882a593Smuzhiyun };
646*4882a593Smuzhiyun 
647*4882a593Smuzhiyun #define CAYMAN_MGCG_ENABLE_LENGTH   sizeof(cayman_mgcg_enable) / (3 * sizeof(u32))
648*4882a593Smuzhiyun 
649*4882a593Smuzhiyun #define NISLANDS_SYSLS_SEQUENCE  100
650*4882a593Smuzhiyun 
651*4882a593Smuzhiyun static const u32 cayman_sysls_default[] =
652*4882a593Smuzhiyun {
653*4882a593Smuzhiyun 	/* Register,   Value,     Mask bits */
654*4882a593Smuzhiyun 	0x000055e8, 0x00000000, 0xffffffff,
655*4882a593Smuzhiyun 	0x0000d0bc, 0x00000000, 0xffffffff,
656*4882a593Smuzhiyun 	0x0000d8bc, 0x00000000, 0xffffffff,
657*4882a593Smuzhiyun 	0x000015c0, 0x000c1401, 0xffffffff,
658*4882a593Smuzhiyun 	0x0000264c, 0x000c0400, 0xffffffff,
659*4882a593Smuzhiyun 	0x00002648, 0x000c0400, 0xffffffff,
660*4882a593Smuzhiyun 	0x00002650, 0x000c0400, 0xffffffff,
661*4882a593Smuzhiyun 	0x000020b8, 0x000c0400, 0xffffffff,
662*4882a593Smuzhiyun 	0x000020bc, 0x000c0400, 0xffffffff,
663*4882a593Smuzhiyun 	0x000020c0, 0x000c0c80, 0xffffffff,
664*4882a593Smuzhiyun 	0x0000f4a0, 0x000000c0, 0xffffffff,
665*4882a593Smuzhiyun 	0x0000f4a4, 0x00680fff, 0xffffffff,
666*4882a593Smuzhiyun 	0x00002f50, 0x00000404, 0xffffffff,
667*4882a593Smuzhiyun 	0x000004c8, 0x00000001, 0xffffffff,
668*4882a593Smuzhiyun 	0x000064ec, 0x00000000, 0xffffffff,
669*4882a593Smuzhiyun 	0x00000c7c, 0x00000000, 0xffffffff,
670*4882a593Smuzhiyun 	0x00008dfc, 0x00000000, 0xffffffff
671*4882a593Smuzhiyun };
672*4882a593Smuzhiyun #define CAYMAN_SYSLS_DEFAULT_LENGTH sizeof(cayman_sysls_default) / (3 * sizeof(u32))
673*4882a593Smuzhiyun 
674*4882a593Smuzhiyun static const u32 cayman_sysls_disable[] =
675*4882a593Smuzhiyun {
676*4882a593Smuzhiyun 	/* Register,   Value,     Mask bits */
677*4882a593Smuzhiyun 	0x0000d0c0, 0x00000000, 0xffffffff,
678*4882a593Smuzhiyun 	0x0000d8c0, 0x00000000, 0xffffffff,
679*4882a593Smuzhiyun 	0x000055e8, 0x00000000, 0xffffffff,
680*4882a593Smuzhiyun 	0x0000d0bc, 0x00000000, 0xffffffff,
681*4882a593Smuzhiyun 	0x0000d8bc, 0x00000000, 0xffffffff,
682*4882a593Smuzhiyun 	0x000015c0, 0x00041401, 0xffffffff,
683*4882a593Smuzhiyun 	0x0000264c, 0x00040400, 0xffffffff,
684*4882a593Smuzhiyun 	0x00002648, 0x00040400, 0xffffffff,
685*4882a593Smuzhiyun 	0x00002650, 0x00040400, 0xffffffff,
686*4882a593Smuzhiyun 	0x000020b8, 0x00040400, 0xffffffff,
687*4882a593Smuzhiyun 	0x000020bc, 0x00040400, 0xffffffff,
688*4882a593Smuzhiyun 	0x000020c0, 0x00040c80, 0xffffffff,
689*4882a593Smuzhiyun 	0x0000f4a0, 0x000000c0, 0xffffffff,
690*4882a593Smuzhiyun 	0x0000f4a4, 0x00680000, 0xffffffff,
691*4882a593Smuzhiyun 	0x00002f50, 0x00000404, 0xffffffff,
692*4882a593Smuzhiyun 	0x000004c8, 0x00000001, 0xffffffff,
693*4882a593Smuzhiyun 	0x000064ec, 0x00007ffd, 0xffffffff,
694*4882a593Smuzhiyun 	0x00000c7c, 0x0000ff00, 0xffffffff,
695*4882a593Smuzhiyun 	0x00008dfc, 0x0000007f, 0xffffffff
696*4882a593Smuzhiyun };
697*4882a593Smuzhiyun #define CAYMAN_SYSLS_DISABLE_LENGTH sizeof(cayman_sysls_disable) / (3 * sizeof(u32))
698*4882a593Smuzhiyun 
699*4882a593Smuzhiyun static const u32 cayman_sysls_enable[] =
700*4882a593Smuzhiyun {
701*4882a593Smuzhiyun 	/* Register,   Value,     Mask bits */
702*4882a593Smuzhiyun 	0x000055e8, 0x00000001, 0xffffffff,
703*4882a593Smuzhiyun 	0x0000d0bc, 0x00000100, 0xffffffff,
704*4882a593Smuzhiyun 	0x0000d8bc, 0x00000100, 0xffffffff,
705*4882a593Smuzhiyun 	0x000015c0, 0x000c1401, 0xffffffff,
706*4882a593Smuzhiyun 	0x0000264c, 0x000c0400, 0xffffffff,
707*4882a593Smuzhiyun 	0x00002648, 0x000c0400, 0xffffffff,
708*4882a593Smuzhiyun 	0x00002650, 0x000c0400, 0xffffffff,
709*4882a593Smuzhiyun 	0x000020b8, 0x000c0400, 0xffffffff,
710*4882a593Smuzhiyun 	0x000020bc, 0x000c0400, 0xffffffff,
711*4882a593Smuzhiyun 	0x000020c0, 0x000c0c80, 0xffffffff,
712*4882a593Smuzhiyun 	0x0000f4a0, 0x000000c0, 0xffffffff,
713*4882a593Smuzhiyun 	0x0000f4a4, 0x00680fff, 0xffffffff,
714*4882a593Smuzhiyun 	0x00002f50, 0x00000903, 0xffffffff,
715*4882a593Smuzhiyun 	0x000004c8, 0x00000000, 0xffffffff,
716*4882a593Smuzhiyun 	0x000064ec, 0x00000000, 0xffffffff,
717*4882a593Smuzhiyun 	0x00000c7c, 0x00000000, 0xffffffff,
718*4882a593Smuzhiyun 	0x00008dfc, 0x00000000, 0xffffffff
719*4882a593Smuzhiyun };
720*4882a593Smuzhiyun #define CAYMAN_SYSLS_ENABLE_LENGTH sizeof(cayman_sysls_enable) / (3 * sizeof(u32))
721*4882a593Smuzhiyun 
722*4882a593Smuzhiyun struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev);
723*4882a593Smuzhiyun struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev);
724*4882a593Smuzhiyun 
725*4882a593Smuzhiyun extern int ni_mc_load_microcode(struct radeon_device *rdev);
726*4882a593Smuzhiyun 
ni_get_pi(struct radeon_device * rdev)727*4882a593Smuzhiyun struct ni_power_info *ni_get_pi(struct radeon_device *rdev)
728*4882a593Smuzhiyun {
729*4882a593Smuzhiyun 	struct ni_power_info *pi = rdev->pm.dpm.priv;
730*4882a593Smuzhiyun 
731*4882a593Smuzhiyun 	return pi;
732*4882a593Smuzhiyun }
733*4882a593Smuzhiyun 
ni_get_ps(struct radeon_ps * rps)734*4882a593Smuzhiyun struct ni_ps *ni_get_ps(struct radeon_ps *rps)
735*4882a593Smuzhiyun {
736*4882a593Smuzhiyun 	struct ni_ps *ps = rps->ps_priv;
737*4882a593Smuzhiyun 
738*4882a593Smuzhiyun 	return ps;
739*4882a593Smuzhiyun }
740*4882a593Smuzhiyun 
ni_calculate_leakage_for_v_and_t_formula(const struct ni_leakage_coeffients * coeff,u16 v,s32 t,u32 ileakage,u32 * leakage)741*4882a593Smuzhiyun static void ni_calculate_leakage_for_v_and_t_formula(const struct ni_leakage_coeffients *coeff,
742*4882a593Smuzhiyun 						     u16 v, s32 t,
743*4882a593Smuzhiyun 						     u32 ileakage,
744*4882a593Smuzhiyun 						     u32 *leakage)
745*4882a593Smuzhiyun {
746*4882a593Smuzhiyun 	s64 kt, kv, leakage_w, i_leakage, vddc, temperature;
747*4882a593Smuzhiyun 
748*4882a593Smuzhiyun 	i_leakage = div64_s64(drm_int2fixp(ileakage), 1000);
749*4882a593Smuzhiyun 	vddc = div64_s64(drm_int2fixp(v), 1000);
750*4882a593Smuzhiyun 	temperature = div64_s64(drm_int2fixp(t), 1000);
751*4882a593Smuzhiyun 
752*4882a593Smuzhiyun 	kt = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->at), 1000),
753*4882a593Smuzhiyun 			  drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bt), 1000), temperature)));
754*4882a593Smuzhiyun 	kv = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->av), 1000),
755*4882a593Smuzhiyun 			  drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bv), 1000), vddc)));
756*4882a593Smuzhiyun 
757*4882a593Smuzhiyun 	leakage_w = drm_fixp_mul(drm_fixp_mul(drm_fixp_mul(i_leakage, kt), kv), vddc);
758*4882a593Smuzhiyun 
759*4882a593Smuzhiyun 	*leakage = drm_fixp2int(leakage_w * 1000);
760*4882a593Smuzhiyun }
761*4882a593Smuzhiyun 
ni_calculate_leakage_for_v_and_t(struct radeon_device * rdev,const struct ni_leakage_coeffients * coeff,u16 v,s32 t,u32 i_leakage,u32 * leakage)762*4882a593Smuzhiyun static void ni_calculate_leakage_for_v_and_t(struct radeon_device *rdev,
763*4882a593Smuzhiyun 					     const struct ni_leakage_coeffients *coeff,
764*4882a593Smuzhiyun 					     u16 v,
765*4882a593Smuzhiyun 					     s32 t,
766*4882a593Smuzhiyun 					     u32 i_leakage,
767*4882a593Smuzhiyun 					     u32 *leakage)
768*4882a593Smuzhiyun {
769*4882a593Smuzhiyun 	ni_calculate_leakage_for_v_and_t_formula(coeff, v, t, i_leakage, leakage);
770*4882a593Smuzhiyun }
771*4882a593Smuzhiyun 
ni_dpm_vblank_too_short(struct radeon_device * rdev)772*4882a593Smuzhiyun bool ni_dpm_vblank_too_short(struct radeon_device *rdev)
773*4882a593Smuzhiyun {
774*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
775*4882a593Smuzhiyun 	u32 vblank_time = r600_dpm_get_vblank_time(rdev);
776*4882a593Smuzhiyun 	/* we never hit the non-gddr5 limit so disable it */
777*4882a593Smuzhiyun 	u32 switch_limit = pi->mem_gddr5 ? 450 : 0;
778*4882a593Smuzhiyun 
779*4882a593Smuzhiyun 	if (vblank_time < switch_limit)
780*4882a593Smuzhiyun 		return true;
781*4882a593Smuzhiyun 	else
782*4882a593Smuzhiyun 		return false;
783*4882a593Smuzhiyun 
784*4882a593Smuzhiyun }
785*4882a593Smuzhiyun 
ni_apply_state_adjust_rules(struct radeon_device * rdev,struct radeon_ps * rps)786*4882a593Smuzhiyun static void ni_apply_state_adjust_rules(struct radeon_device *rdev,
787*4882a593Smuzhiyun 					struct radeon_ps *rps)
788*4882a593Smuzhiyun {
789*4882a593Smuzhiyun 	struct ni_ps *ps = ni_get_ps(rps);
790*4882a593Smuzhiyun 	struct radeon_clock_and_voltage_limits *max_limits;
791*4882a593Smuzhiyun 	bool disable_mclk_switching;
792*4882a593Smuzhiyun 	u32 mclk;
793*4882a593Smuzhiyun 	u16 vddci;
794*4882a593Smuzhiyun 	int i;
795*4882a593Smuzhiyun 
796*4882a593Smuzhiyun 	if ((rdev->pm.dpm.new_active_crtc_count > 1) ||
797*4882a593Smuzhiyun 	    ni_dpm_vblank_too_short(rdev))
798*4882a593Smuzhiyun 		disable_mclk_switching = true;
799*4882a593Smuzhiyun 	else
800*4882a593Smuzhiyun 		disable_mclk_switching = false;
801*4882a593Smuzhiyun 
802*4882a593Smuzhiyun 	if (rdev->pm.dpm.ac_power)
803*4882a593Smuzhiyun 		max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
804*4882a593Smuzhiyun 	else
805*4882a593Smuzhiyun 		max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc;
806*4882a593Smuzhiyun 
807*4882a593Smuzhiyun 	if (rdev->pm.dpm.ac_power == false) {
808*4882a593Smuzhiyun 		for (i = 0; i < ps->performance_level_count; i++) {
809*4882a593Smuzhiyun 			if (ps->performance_levels[i].mclk > max_limits->mclk)
810*4882a593Smuzhiyun 				ps->performance_levels[i].mclk = max_limits->mclk;
811*4882a593Smuzhiyun 			if (ps->performance_levels[i].sclk > max_limits->sclk)
812*4882a593Smuzhiyun 				ps->performance_levels[i].sclk = max_limits->sclk;
813*4882a593Smuzhiyun 			if (ps->performance_levels[i].vddc > max_limits->vddc)
814*4882a593Smuzhiyun 				ps->performance_levels[i].vddc = max_limits->vddc;
815*4882a593Smuzhiyun 			if (ps->performance_levels[i].vddci > max_limits->vddci)
816*4882a593Smuzhiyun 				ps->performance_levels[i].vddci = max_limits->vddci;
817*4882a593Smuzhiyun 		}
818*4882a593Smuzhiyun 	}
819*4882a593Smuzhiyun 
820*4882a593Smuzhiyun 	/* XXX validate the min clocks required for display */
821*4882a593Smuzhiyun 
822*4882a593Smuzhiyun 	/* adjust low state */
823*4882a593Smuzhiyun 	if (disable_mclk_switching) {
824*4882a593Smuzhiyun 		ps->performance_levels[0].mclk =
825*4882a593Smuzhiyun 			ps->performance_levels[ps->performance_level_count - 1].mclk;
826*4882a593Smuzhiyun 		ps->performance_levels[0].vddci =
827*4882a593Smuzhiyun 			ps->performance_levels[ps->performance_level_count - 1].vddci;
828*4882a593Smuzhiyun 	}
829*4882a593Smuzhiyun 
830*4882a593Smuzhiyun 	btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
831*4882a593Smuzhiyun 				  &ps->performance_levels[0].sclk,
832*4882a593Smuzhiyun 				  &ps->performance_levels[0].mclk);
833*4882a593Smuzhiyun 
834*4882a593Smuzhiyun 	for (i = 1; i < ps->performance_level_count; i++) {
835*4882a593Smuzhiyun 		if (ps->performance_levels[i].sclk < ps->performance_levels[i - 1].sclk)
836*4882a593Smuzhiyun 			ps->performance_levels[i].sclk = ps->performance_levels[i - 1].sclk;
837*4882a593Smuzhiyun 		if (ps->performance_levels[i].vddc < ps->performance_levels[i - 1].vddc)
838*4882a593Smuzhiyun 			ps->performance_levels[i].vddc = ps->performance_levels[i - 1].vddc;
839*4882a593Smuzhiyun 	}
840*4882a593Smuzhiyun 
841*4882a593Smuzhiyun 	/* adjust remaining states */
842*4882a593Smuzhiyun 	if (disable_mclk_switching) {
843*4882a593Smuzhiyun 		mclk = ps->performance_levels[0].mclk;
844*4882a593Smuzhiyun 		vddci = ps->performance_levels[0].vddci;
845*4882a593Smuzhiyun 		for (i = 1; i < ps->performance_level_count; i++) {
846*4882a593Smuzhiyun 			if (mclk < ps->performance_levels[i].mclk)
847*4882a593Smuzhiyun 				mclk = ps->performance_levels[i].mclk;
848*4882a593Smuzhiyun 			if (vddci < ps->performance_levels[i].vddci)
849*4882a593Smuzhiyun 				vddci = ps->performance_levels[i].vddci;
850*4882a593Smuzhiyun 		}
851*4882a593Smuzhiyun 		for (i = 0; i < ps->performance_level_count; i++) {
852*4882a593Smuzhiyun 			ps->performance_levels[i].mclk = mclk;
853*4882a593Smuzhiyun 			ps->performance_levels[i].vddci = vddci;
854*4882a593Smuzhiyun 		}
855*4882a593Smuzhiyun 	} else {
856*4882a593Smuzhiyun 		for (i = 1; i < ps->performance_level_count; i++) {
857*4882a593Smuzhiyun 			if (ps->performance_levels[i].mclk < ps->performance_levels[i - 1].mclk)
858*4882a593Smuzhiyun 				ps->performance_levels[i].mclk = ps->performance_levels[i - 1].mclk;
859*4882a593Smuzhiyun 			if (ps->performance_levels[i].vddci < ps->performance_levels[i - 1].vddci)
860*4882a593Smuzhiyun 				ps->performance_levels[i].vddci = ps->performance_levels[i - 1].vddci;
861*4882a593Smuzhiyun 		}
862*4882a593Smuzhiyun 	}
863*4882a593Smuzhiyun 
864*4882a593Smuzhiyun 	for (i = 1; i < ps->performance_level_count; i++)
865*4882a593Smuzhiyun 		btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
866*4882a593Smuzhiyun 					  &ps->performance_levels[i].sclk,
867*4882a593Smuzhiyun 					  &ps->performance_levels[i].mclk);
868*4882a593Smuzhiyun 
869*4882a593Smuzhiyun 	for (i = 0; i < ps->performance_level_count; i++)
870*4882a593Smuzhiyun 		btc_adjust_clock_combinations(rdev, max_limits,
871*4882a593Smuzhiyun 					      &ps->performance_levels[i]);
872*4882a593Smuzhiyun 
873*4882a593Smuzhiyun 	for (i = 0; i < ps->performance_level_count; i++) {
874*4882a593Smuzhiyun 		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk,
875*4882a593Smuzhiyun 						   ps->performance_levels[i].sclk,
876*4882a593Smuzhiyun 						   max_limits->vddc,  &ps->performance_levels[i].vddc);
877*4882a593Smuzhiyun 		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk,
878*4882a593Smuzhiyun 						   ps->performance_levels[i].mclk,
879*4882a593Smuzhiyun 						   max_limits->vddci, &ps->performance_levels[i].vddci);
880*4882a593Smuzhiyun 		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk,
881*4882a593Smuzhiyun 						   ps->performance_levels[i].mclk,
882*4882a593Smuzhiyun 						   max_limits->vddc,  &ps->performance_levels[i].vddc);
883*4882a593Smuzhiyun 		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk,
884*4882a593Smuzhiyun 						   rdev->clock.current_dispclk,
885*4882a593Smuzhiyun 						   max_limits->vddc,  &ps->performance_levels[i].vddc);
886*4882a593Smuzhiyun 	}
887*4882a593Smuzhiyun 
888*4882a593Smuzhiyun 	for (i = 0; i < ps->performance_level_count; i++) {
889*4882a593Smuzhiyun 		btc_apply_voltage_delta_rules(rdev,
890*4882a593Smuzhiyun 					      max_limits->vddc, max_limits->vddci,
891*4882a593Smuzhiyun 					      &ps->performance_levels[i].vddc,
892*4882a593Smuzhiyun 					      &ps->performance_levels[i].vddci);
893*4882a593Smuzhiyun 	}
894*4882a593Smuzhiyun 
895*4882a593Smuzhiyun 	ps->dc_compatible = true;
896*4882a593Smuzhiyun 	for (i = 0; i < ps->performance_level_count; i++) {
897*4882a593Smuzhiyun 		if (ps->performance_levels[i].vddc > rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc)
898*4882a593Smuzhiyun 			ps->dc_compatible = false;
899*4882a593Smuzhiyun 
900*4882a593Smuzhiyun 		if (ps->performance_levels[i].vddc < rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2)
901*4882a593Smuzhiyun 			ps->performance_levels[i].flags &= ~ATOM_PPLIB_R600_FLAGS_PCIEGEN2;
902*4882a593Smuzhiyun 	}
903*4882a593Smuzhiyun }
904*4882a593Smuzhiyun 
ni_cg_clockgating_default(struct radeon_device * rdev)905*4882a593Smuzhiyun static void ni_cg_clockgating_default(struct radeon_device *rdev)
906*4882a593Smuzhiyun {
907*4882a593Smuzhiyun 	u32 count;
908*4882a593Smuzhiyun 	const u32 *ps = NULL;
909*4882a593Smuzhiyun 
910*4882a593Smuzhiyun 	ps = (const u32 *)&cayman_cgcg_cgls_default;
911*4882a593Smuzhiyun 	count = CAYMAN_CGCG_CGLS_DEFAULT_LENGTH;
912*4882a593Smuzhiyun 
913*4882a593Smuzhiyun 	btc_program_mgcg_hw_sequence(rdev, ps, count);
914*4882a593Smuzhiyun }
915*4882a593Smuzhiyun 
ni_gfx_clockgating_enable(struct radeon_device * rdev,bool enable)916*4882a593Smuzhiyun static void ni_gfx_clockgating_enable(struct radeon_device *rdev,
917*4882a593Smuzhiyun 				      bool enable)
918*4882a593Smuzhiyun {
919*4882a593Smuzhiyun 	u32 count;
920*4882a593Smuzhiyun 	const u32 *ps = NULL;
921*4882a593Smuzhiyun 
922*4882a593Smuzhiyun 	if (enable) {
923*4882a593Smuzhiyun 		ps = (const u32 *)&cayman_cgcg_cgls_enable;
924*4882a593Smuzhiyun 		count = CAYMAN_CGCG_CGLS_ENABLE_LENGTH;
925*4882a593Smuzhiyun 	} else {
926*4882a593Smuzhiyun 		ps = (const u32 *)&cayman_cgcg_cgls_disable;
927*4882a593Smuzhiyun 		count = CAYMAN_CGCG_CGLS_DISABLE_LENGTH;
928*4882a593Smuzhiyun 	}
929*4882a593Smuzhiyun 
930*4882a593Smuzhiyun 	btc_program_mgcg_hw_sequence(rdev, ps, count);
931*4882a593Smuzhiyun }
932*4882a593Smuzhiyun 
ni_mg_clockgating_default(struct radeon_device * rdev)933*4882a593Smuzhiyun static void ni_mg_clockgating_default(struct radeon_device *rdev)
934*4882a593Smuzhiyun {
935*4882a593Smuzhiyun 	u32 count;
936*4882a593Smuzhiyun 	const u32 *ps = NULL;
937*4882a593Smuzhiyun 
938*4882a593Smuzhiyun 	ps = (const u32 *)&cayman_mgcg_default;
939*4882a593Smuzhiyun 	count = CAYMAN_MGCG_DEFAULT_LENGTH;
940*4882a593Smuzhiyun 
941*4882a593Smuzhiyun 	btc_program_mgcg_hw_sequence(rdev, ps, count);
942*4882a593Smuzhiyun }
943*4882a593Smuzhiyun 
ni_mg_clockgating_enable(struct radeon_device * rdev,bool enable)944*4882a593Smuzhiyun static void ni_mg_clockgating_enable(struct radeon_device *rdev,
945*4882a593Smuzhiyun 				     bool enable)
946*4882a593Smuzhiyun {
947*4882a593Smuzhiyun 	u32 count;
948*4882a593Smuzhiyun 	const u32 *ps = NULL;
949*4882a593Smuzhiyun 
950*4882a593Smuzhiyun 	if (enable) {
951*4882a593Smuzhiyun 		ps = (const u32 *)&cayman_mgcg_enable;
952*4882a593Smuzhiyun 		count = CAYMAN_MGCG_ENABLE_LENGTH;
953*4882a593Smuzhiyun 	} else {
954*4882a593Smuzhiyun 		ps = (const u32 *)&cayman_mgcg_disable;
955*4882a593Smuzhiyun 		count = CAYMAN_MGCG_DISABLE_LENGTH;
956*4882a593Smuzhiyun 	}
957*4882a593Smuzhiyun 
958*4882a593Smuzhiyun 	btc_program_mgcg_hw_sequence(rdev, ps, count);
959*4882a593Smuzhiyun }
960*4882a593Smuzhiyun 
ni_ls_clockgating_default(struct radeon_device * rdev)961*4882a593Smuzhiyun static void ni_ls_clockgating_default(struct radeon_device *rdev)
962*4882a593Smuzhiyun {
963*4882a593Smuzhiyun 	u32 count;
964*4882a593Smuzhiyun 	const u32 *ps = NULL;
965*4882a593Smuzhiyun 
966*4882a593Smuzhiyun 	ps = (const u32 *)&cayman_sysls_default;
967*4882a593Smuzhiyun 	count = CAYMAN_SYSLS_DEFAULT_LENGTH;
968*4882a593Smuzhiyun 
969*4882a593Smuzhiyun 	btc_program_mgcg_hw_sequence(rdev, ps, count);
970*4882a593Smuzhiyun }
971*4882a593Smuzhiyun 
ni_ls_clockgating_enable(struct radeon_device * rdev,bool enable)972*4882a593Smuzhiyun static void ni_ls_clockgating_enable(struct radeon_device *rdev,
973*4882a593Smuzhiyun 				     bool enable)
974*4882a593Smuzhiyun {
975*4882a593Smuzhiyun 	u32 count;
976*4882a593Smuzhiyun 	const u32 *ps = NULL;
977*4882a593Smuzhiyun 
978*4882a593Smuzhiyun 	if (enable) {
979*4882a593Smuzhiyun 		ps = (const u32 *)&cayman_sysls_enable;
980*4882a593Smuzhiyun 		count = CAYMAN_SYSLS_ENABLE_LENGTH;
981*4882a593Smuzhiyun 	} else {
982*4882a593Smuzhiyun 		ps = (const u32 *)&cayman_sysls_disable;
983*4882a593Smuzhiyun 		count = CAYMAN_SYSLS_DISABLE_LENGTH;
984*4882a593Smuzhiyun 	}
985*4882a593Smuzhiyun 
986*4882a593Smuzhiyun 	btc_program_mgcg_hw_sequence(rdev, ps, count);
987*4882a593Smuzhiyun 
988*4882a593Smuzhiyun }
989*4882a593Smuzhiyun 
ni_patch_single_dependency_table_based_on_leakage(struct radeon_device * rdev,struct radeon_clock_voltage_dependency_table * table)990*4882a593Smuzhiyun static int ni_patch_single_dependency_table_based_on_leakage(struct radeon_device *rdev,
991*4882a593Smuzhiyun 							     struct radeon_clock_voltage_dependency_table *table)
992*4882a593Smuzhiyun {
993*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
994*4882a593Smuzhiyun 	u32 i;
995*4882a593Smuzhiyun 
996*4882a593Smuzhiyun 	if (table) {
997*4882a593Smuzhiyun 		for (i = 0; i < table->count; i++) {
998*4882a593Smuzhiyun 			if (0xff01 == table->entries[i].v) {
999*4882a593Smuzhiyun 				if (pi->max_vddc == 0)
1000*4882a593Smuzhiyun 					return -EINVAL;
1001*4882a593Smuzhiyun 				table->entries[i].v = pi->max_vddc;
1002*4882a593Smuzhiyun 			}
1003*4882a593Smuzhiyun 		}
1004*4882a593Smuzhiyun 	}
1005*4882a593Smuzhiyun 	return 0;
1006*4882a593Smuzhiyun }
1007*4882a593Smuzhiyun 
ni_patch_dependency_tables_based_on_leakage(struct radeon_device * rdev)1008*4882a593Smuzhiyun static int ni_patch_dependency_tables_based_on_leakage(struct radeon_device *rdev)
1009*4882a593Smuzhiyun {
1010*4882a593Smuzhiyun 	int ret = 0;
1011*4882a593Smuzhiyun 
1012*4882a593Smuzhiyun 	ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
1013*4882a593Smuzhiyun 								&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk);
1014*4882a593Smuzhiyun 
1015*4882a593Smuzhiyun 	ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
1016*4882a593Smuzhiyun 								&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk);
1017*4882a593Smuzhiyun 	return ret;
1018*4882a593Smuzhiyun }
1019*4882a593Smuzhiyun 
ni_stop_dpm(struct radeon_device * rdev)1020*4882a593Smuzhiyun static void ni_stop_dpm(struct radeon_device *rdev)
1021*4882a593Smuzhiyun {
1022*4882a593Smuzhiyun 	WREG32_P(GENERAL_PWRMGT, 0, ~GLOBAL_PWRMGT_EN);
1023*4882a593Smuzhiyun }
1024*4882a593Smuzhiyun 
1025*4882a593Smuzhiyun #if 0
1026*4882a593Smuzhiyun static int ni_notify_hw_of_power_source(struct radeon_device *rdev,
1027*4882a593Smuzhiyun 					bool ac_power)
1028*4882a593Smuzhiyun {
1029*4882a593Smuzhiyun 	if (ac_power)
1030*4882a593Smuzhiyun 		return (rv770_send_msg_to_smc(rdev, PPSMC_MSG_RunningOnAC) == PPSMC_Result_OK) ?
1031*4882a593Smuzhiyun 			0 : -EINVAL;
1032*4882a593Smuzhiyun 
1033*4882a593Smuzhiyun 	return 0;
1034*4882a593Smuzhiyun }
1035*4882a593Smuzhiyun #endif
1036*4882a593Smuzhiyun 
ni_send_msg_to_smc_with_parameter(struct radeon_device * rdev,PPSMC_Msg msg,u32 parameter)1037*4882a593Smuzhiyun static PPSMC_Result ni_send_msg_to_smc_with_parameter(struct radeon_device *rdev,
1038*4882a593Smuzhiyun 						      PPSMC_Msg msg, u32 parameter)
1039*4882a593Smuzhiyun {
1040*4882a593Smuzhiyun 	WREG32(SMC_SCRATCH0, parameter);
1041*4882a593Smuzhiyun 	return rv770_send_msg_to_smc(rdev, msg);
1042*4882a593Smuzhiyun }
1043*4882a593Smuzhiyun 
ni_restrict_performance_levels_before_switch(struct radeon_device * rdev)1044*4882a593Smuzhiyun static int ni_restrict_performance_levels_before_switch(struct radeon_device *rdev)
1045*4882a593Smuzhiyun {
1046*4882a593Smuzhiyun 	if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK)
1047*4882a593Smuzhiyun 		return -EINVAL;
1048*4882a593Smuzhiyun 
1049*4882a593Smuzhiyun 	return (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) == PPSMC_Result_OK) ?
1050*4882a593Smuzhiyun 		0 : -EINVAL;
1051*4882a593Smuzhiyun }
1052*4882a593Smuzhiyun 
ni_dpm_force_performance_level(struct radeon_device * rdev,enum radeon_dpm_forced_level level)1053*4882a593Smuzhiyun int ni_dpm_force_performance_level(struct radeon_device *rdev,
1054*4882a593Smuzhiyun 				   enum radeon_dpm_forced_level level)
1055*4882a593Smuzhiyun {
1056*4882a593Smuzhiyun 	if (level == RADEON_DPM_FORCED_LEVEL_HIGH) {
1057*4882a593Smuzhiyun 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK)
1058*4882a593Smuzhiyun 			return -EINVAL;
1059*4882a593Smuzhiyun 
1060*4882a593Smuzhiyun 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 1) != PPSMC_Result_OK)
1061*4882a593Smuzhiyun 			return -EINVAL;
1062*4882a593Smuzhiyun 	} else if (level == RADEON_DPM_FORCED_LEVEL_LOW) {
1063*4882a593Smuzhiyun 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
1064*4882a593Smuzhiyun 			return -EINVAL;
1065*4882a593Smuzhiyun 
1066*4882a593Smuzhiyun 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) != PPSMC_Result_OK)
1067*4882a593Smuzhiyun 			return -EINVAL;
1068*4882a593Smuzhiyun 	} else if (level == RADEON_DPM_FORCED_LEVEL_AUTO) {
1069*4882a593Smuzhiyun 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
1070*4882a593Smuzhiyun 			return -EINVAL;
1071*4882a593Smuzhiyun 
1072*4882a593Smuzhiyun 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK)
1073*4882a593Smuzhiyun 			return -EINVAL;
1074*4882a593Smuzhiyun 	}
1075*4882a593Smuzhiyun 
1076*4882a593Smuzhiyun 	rdev->pm.dpm.forced_level = level;
1077*4882a593Smuzhiyun 
1078*4882a593Smuzhiyun 	return 0;
1079*4882a593Smuzhiyun }
1080*4882a593Smuzhiyun 
ni_stop_smc(struct radeon_device * rdev)1081*4882a593Smuzhiyun static void ni_stop_smc(struct radeon_device *rdev)
1082*4882a593Smuzhiyun {
1083*4882a593Smuzhiyun 	u32 tmp;
1084*4882a593Smuzhiyun 	int i;
1085*4882a593Smuzhiyun 
1086*4882a593Smuzhiyun 	for (i = 0; i < rdev->usec_timeout; i++) {
1087*4882a593Smuzhiyun 		tmp = RREG32(LB_SYNC_RESET_SEL) & LB_SYNC_RESET_SEL_MASK;
1088*4882a593Smuzhiyun 		if (tmp != 1)
1089*4882a593Smuzhiyun 			break;
1090*4882a593Smuzhiyun 		udelay(1);
1091*4882a593Smuzhiyun 	}
1092*4882a593Smuzhiyun 
1093*4882a593Smuzhiyun 	udelay(100);
1094*4882a593Smuzhiyun 
1095*4882a593Smuzhiyun 	r7xx_stop_smc(rdev);
1096*4882a593Smuzhiyun }
1097*4882a593Smuzhiyun 
ni_process_firmware_header(struct radeon_device * rdev)1098*4882a593Smuzhiyun static int ni_process_firmware_header(struct radeon_device *rdev)
1099*4882a593Smuzhiyun {
1100*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1101*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1102*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1103*4882a593Smuzhiyun 	u32 tmp;
1104*4882a593Smuzhiyun 	int ret;
1105*4882a593Smuzhiyun 
1106*4882a593Smuzhiyun 	ret = rv770_read_smc_sram_dword(rdev,
1107*4882a593Smuzhiyun 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1108*4882a593Smuzhiyun 					NISLANDS_SMC_FIRMWARE_HEADER_stateTable,
1109*4882a593Smuzhiyun 					&tmp, pi->sram_end);
1110*4882a593Smuzhiyun 
1111*4882a593Smuzhiyun 	if (ret)
1112*4882a593Smuzhiyun 		return ret;
1113*4882a593Smuzhiyun 
1114*4882a593Smuzhiyun 	pi->state_table_start = (u16)tmp;
1115*4882a593Smuzhiyun 
1116*4882a593Smuzhiyun 	ret = rv770_read_smc_sram_dword(rdev,
1117*4882a593Smuzhiyun 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1118*4882a593Smuzhiyun 					NISLANDS_SMC_FIRMWARE_HEADER_softRegisters,
1119*4882a593Smuzhiyun 					&tmp, pi->sram_end);
1120*4882a593Smuzhiyun 
1121*4882a593Smuzhiyun 	if (ret)
1122*4882a593Smuzhiyun 		return ret;
1123*4882a593Smuzhiyun 
1124*4882a593Smuzhiyun 	pi->soft_regs_start = (u16)tmp;
1125*4882a593Smuzhiyun 
1126*4882a593Smuzhiyun 	ret = rv770_read_smc_sram_dword(rdev,
1127*4882a593Smuzhiyun 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1128*4882a593Smuzhiyun 					NISLANDS_SMC_FIRMWARE_HEADER_mcRegisterTable,
1129*4882a593Smuzhiyun 					&tmp, pi->sram_end);
1130*4882a593Smuzhiyun 
1131*4882a593Smuzhiyun 	if (ret)
1132*4882a593Smuzhiyun 		return ret;
1133*4882a593Smuzhiyun 
1134*4882a593Smuzhiyun 	eg_pi->mc_reg_table_start = (u16)tmp;
1135*4882a593Smuzhiyun 
1136*4882a593Smuzhiyun 	ret = rv770_read_smc_sram_dword(rdev,
1137*4882a593Smuzhiyun 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1138*4882a593Smuzhiyun 					NISLANDS_SMC_FIRMWARE_HEADER_fanTable,
1139*4882a593Smuzhiyun 					&tmp, pi->sram_end);
1140*4882a593Smuzhiyun 
1141*4882a593Smuzhiyun 	if (ret)
1142*4882a593Smuzhiyun 		return ret;
1143*4882a593Smuzhiyun 
1144*4882a593Smuzhiyun 	ni_pi->fan_table_start = (u16)tmp;
1145*4882a593Smuzhiyun 
1146*4882a593Smuzhiyun 	ret = rv770_read_smc_sram_dword(rdev,
1147*4882a593Smuzhiyun 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1148*4882a593Smuzhiyun 					NISLANDS_SMC_FIRMWARE_HEADER_mcArbDramAutoRefreshTable,
1149*4882a593Smuzhiyun 					&tmp, pi->sram_end);
1150*4882a593Smuzhiyun 
1151*4882a593Smuzhiyun 	if (ret)
1152*4882a593Smuzhiyun 		return ret;
1153*4882a593Smuzhiyun 
1154*4882a593Smuzhiyun 	ni_pi->arb_table_start = (u16)tmp;
1155*4882a593Smuzhiyun 
1156*4882a593Smuzhiyun 	ret = rv770_read_smc_sram_dword(rdev,
1157*4882a593Smuzhiyun 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1158*4882a593Smuzhiyun 					NISLANDS_SMC_FIRMWARE_HEADER_cacTable,
1159*4882a593Smuzhiyun 					&tmp, pi->sram_end);
1160*4882a593Smuzhiyun 
1161*4882a593Smuzhiyun 	if (ret)
1162*4882a593Smuzhiyun 		return ret;
1163*4882a593Smuzhiyun 
1164*4882a593Smuzhiyun 	ni_pi->cac_table_start = (u16)tmp;
1165*4882a593Smuzhiyun 
1166*4882a593Smuzhiyun 	ret = rv770_read_smc_sram_dword(rdev,
1167*4882a593Smuzhiyun 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1168*4882a593Smuzhiyun 					NISLANDS_SMC_FIRMWARE_HEADER_spllTable,
1169*4882a593Smuzhiyun 					&tmp, pi->sram_end);
1170*4882a593Smuzhiyun 
1171*4882a593Smuzhiyun 	if (ret)
1172*4882a593Smuzhiyun 		return ret;
1173*4882a593Smuzhiyun 
1174*4882a593Smuzhiyun 	ni_pi->spll_table_start = (u16)tmp;
1175*4882a593Smuzhiyun 
1176*4882a593Smuzhiyun 
1177*4882a593Smuzhiyun 	return ret;
1178*4882a593Smuzhiyun }
1179*4882a593Smuzhiyun 
ni_read_clock_registers(struct radeon_device * rdev)1180*4882a593Smuzhiyun static void ni_read_clock_registers(struct radeon_device *rdev)
1181*4882a593Smuzhiyun {
1182*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1183*4882a593Smuzhiyun 
1184*4882a593Smuzhiyun 	ni_pi->clock_registers.cg_spll_func_cntl = RREG32(CG_SPLL_FUNC_CNTL);
1185*4882a593Smuzhiyun 	ni_pi->clock_registers.cg_spll_func_cntl_2 = RREG32(CG_SPLL_FUNC_CNTL_2);
1186*4882a593Smuzhiyun 	ni_pi->clock_registers.cg_spll_func_cntl_3 = RREG32(CG_SPLL_FUNC_CNTL_3);
1187*4882a593Smuzhiyun 	ni_pi->clock_registers.cg_spll_func_cntl_4 = RREG32(CG_SPLL_FUNC_CNTL_4);
1188*4882a593Smuzhiyun 	ni_pi->clock_registers.cg_spll_spread_spectrum = RREG32(CG_SPLL_SPREAD_SPECTRUM);
1189*4882a593Smuzhiyun 	ni_pi->clock_registers.cg_spll_spread_spectrum_2 = RREG32(CG_SPLL_SPREAD_SPECTRUM_2);
1190*4882a593Smuzhiyun 	ni_pi->clock_registers.mpll_ad_func_cntl = RREG32(MPLL_AD_FUNC_CNTL);
1191*4882a593Smuzhiyun 	ni_pi->clock_registers.mpll_ad_func_cntl_2 = RREG32(MPLL_AD_FUNC_CNTL_2);
1192*4882a593Smuzhiyun 	ni_pi->clock_registers.mpll_dq_func_cntl = RREG32(MPLL_DQ_FUNC_CNTL);
1193*4882a593Smuzhiyun 	ni_pi->clock_registers.mpll_dq_func_cntl_2 = RREG32(MPLL_DQ_FUNC_CNTL_2);
1194*4882a593Smuzhiyun 	ni_pi->clock_registers.mclk_pwrmgt_cntl = RREG32(MCLK_PWRMGT_CNTL);
1195*4882a593Smuzhiyun 	ni_pi->clock_registers.dll_cntl = RREG32(DLL_CNTL);
1196*4882a593Smuzhiyun 	ni_pi->clock_registers.mpll_ss1 = RREG32(MPLL_SS1);
1197*4882a593Smuzhiyun 	ni_pi->clock_registers.mpll_ss2 = RREG32(MPLL_SS2);
1198*4882a593Smuzhiyun }
1199*4882a593Smuzhiyun 
1200*4882a593Smuzhiyun #if 0
1201*4882a593Smuzhiyun static int ni_enter_ulp_state(struct radeon_device *rdev)
1202*4882a593Smuzhiyun {
1203*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1204*4882a593Smuzhiyun 
1205*4882a593Smuzhiyun 	if (pi->gfx_clock_gating) {
1206*4882a593Smuzhiyun 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
1207*4882a593Smuzhiyun 		WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
1208*4882a593Smuzhiyun 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
1209*4882a593Smuzhiyun 		RREG32(GB_ADDR_CONFIG);
1210*4882a593Smuzhiyun 	}
1211*4882a593Smuzhiyun 
1212*4882a593Smuzhiyun 	WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower),
1213*4882a593Smuzhiyun 		 ~HOST_SMC_MSG_MASK);
1214*4882a593Smuzhiyun 
1215*4882a593Smuzhiyun 	udelay(25000);
1216*4882a593Smuzhiyun 
1217*4882a593Smuzhiyun 	return 0;
1218*4882a593Smuzhiyun }
1219*4882a593Smuzhiyun #endif
1220*4882a593Smuzhiyun 
ni_program_response_times(struct radeon_device * rdev)1221*4882a593Smuzhiyun static void ni_program_response_times(struct radeon_device *rdev)
1222*4882a593Smuzhiyun {
1223*4882a593Smuzhiyun 	u32 voltage_response_time, backbias_response_time, acpi_delay_time, vbi_time_out;
1224*4882a593Smuzhiyun 	u32 vddc_dly, bb_dly, acpi_dly, vbi_dly, mclk_switch_limit;
1225*4882a593Smuzhiyun 	u32 reference_clock;
1226*4882a593Smuzhiyun 
1227*4882a593Smuzhiyun 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mvdd_chg_time, 1);
1228*4882a593Smuzhiyun 
1229*4882a593Smuzhiyun 	voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time;
1230*4882a593Smuzhiyun 	backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time;
1231*4882a593Smuzhiyun 
1232*4882a593Smuzhiyun 	if (voltage_response_time == 0)
1233*4882a593Smuzhiyun 		voltage_response_time = 1000;
1234*4882a593Smuzhiyun 
1235*4882a593Smuzhiyun 	if (backbias_response_time == 0)
1236*4882a593Smuzhiyun 		backbias_response_time = 1000;
1237*4882a593Smuzhiyun 
1238*4882a593Smuzhiyun 	acpi_delay_time = 15000;
1239*4882a593Smuzhiyun 	vbi_time_out = 100000;
1240*4882a593Smuzhiyun 
1241*4882a593Smuzhiyun 	reference_clock = radeon_get_xclk(rdev);
1242*4882a593Smuzhiyun 
1243*4882a593Smuzhiyun 	vddc_dly = (voltage_response_time  * reference_clock) / 1600;
1244*4882a593Smuzhiyun 	bb_dly   = (backbias_response_time * reference_clock) / 1600;
1245*4882a593Smuzhiyun 	acpi_dly = (acpi_delay_time * reference_clock) / 1600;
1246*4882a593Smuzhiyun 	vbi_dly  = (vbi_time_out * reference_clock) / 1600;
1247*4882a593Smuzhiyun 
1248*4882a593Smuzhiyun 	mclk_switch_limit = (460 * reference_clock) / 100;
1249*4882a593Smuzhiyun 
1250*4882a593Smuzhiyun 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_vreg,  vddc_dly);
1251*4882a593Smuzhiyun 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_bbias, bb_dly);
1252*4882a593Smuzhiyun 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_acpi,  acpi_dly);
1253*4882a593Smuzhiyun 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly);
1254*4882a593Smuzhiyun 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mc_block_delay, 0xAA);
1255*4882a593Smuzhiyun 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_switch_lim, mclk_switch_limit);
1256*4882a593Smuzhiyun }
1257*4882a593Smuzhiyun 
ni_populate_smc_voltage_table(struct radeon_device * rdev,struct atom_voltage_table * voltage_table,NISLANDS_SMC_STATETABLE * table)1258*4882a593Smuzhiyun static void ni_populate_smc_voltage_table(struct radeon_device *rdev,
1259*4882a593Smuzhiyun 					  struct atom_voltage_table *voltage_table,
1260*4882a593Smuzhiyun 					  NISLANDS_SMC_STATETABLE *table)
1261*4882a593Smuzhiyun {
1262*4882a593Smuzhiyun 	unsigned int i;
1263*4882a593Smuzhiyun 
1264*4882a593Smuzhiyun 	for (i = 0; i < voltage_table->count; i++) {
1265*4882a593Smuzhiyun 		table->highSMIO[i] = 0;
1266*4882a593Smuzhiyun 		table->lowSMIO[i] |= cpu_to_be32(voltage_table->entries[i].smio_low);
1267*4882a593Smuzhiyun 	}
1268*4882a593Smuzhiyun }
1269*4882a593Smuzhiyun 
ni_populate_smc_voltage_tables(struct radeon_device * rdev,NISLANDS_SMC_STATETABLE * table)1270*4882a593Smuzhiyun static void ni_populate_smc_voltage_tables(struct radeon_device *rdev,
1271*4882a593Smuzhiyun 					   NISLANDS_SMC_STATETABLE *table)
1272*4882a593Smuzhiyun {
1273*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1274*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1275*4882a593Smuzhiyun 	unsigned char i;
1276*4882a593Smuzhiyun 
1277*4882a593Smuzhiyun 	if (eg_pi->vddc_voltage_table.count) {
1278*4882a593Smuzhiyun 		ni_populate_smc_voltage_table(rdev, &eg_pi->vddc_voltage_table, table);
1279*4882a593Smuzhiyun 		table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] = 0;
1280*4882a593Smuzhiyun 		table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] =
1281*4882a593Smuzhiyun 			cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
1282*4882a593Smuzhiyun 
1283*4882a593Smuzhiyun 		for (i = 0; i < eg_pi->vddc_voltage_table.count; i++) {
1284*4882a593Smuzhiyun 			if (pi->max_vddc_in_table <= eg_pi->vddc_voltage_table.entries[i].value) {
1285*4882a593Smuzhiyun 				table->maxVDDCIndexInPPTable = i;
1286*4882a593Smuzhiyun 				break;
1287*4882a593Smuzhiyun 			}
1288*4882a593Smuzhiyun 		}
1289*4882a593Smuzhiyun 	}
1290*4882a593Smuzhiyun 
1291*4882a593Smuzhiyun 	if (eg_pi->vddci_voltage_table.count) {
1292*4882a593Smuzhiyun 		ni_populate_smc_voltage_table(rdev, &eg_pi->vddci_voltage_table, table);
1293*4882a593Smuzhiyun 
1294*4882a593Smuzhiyun 		table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] = 0;
1295*4882a593Smuzhiyun 		table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] =
1296*4882a593Smuzhiyun 			cpu_to_be32(eg_pi->vddci_voltage_table.mask_low);
1297*4882a593Smuzhiyun 	}
1298*4882a593Smuzhiyun }
1299*4882a593Smuzhiyun 
ni_populate_voltage_value(struct radeon_device * rdev,struct atom_voltage_table * table,u16 value,NISLANDS_SMC_VOLTAGE_VALUE * voltage)1300*4882a593Smuzhiyun static int ni_populate_voltage_value(struct radeon_device *rdev,
1301*4882a593Smuzhiyun 				     struct atom_voltage_table *table,
1302*4882a593Smuzhiyun 				     u16 value,
1303*4882a593Smuzhiyun 				     NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1304*4882a593Smuzhiyun {
1305*4882a593Smuzhiyun 	unsigned int i;
1306*4882a593Smuzhiyun 
1307*4882a593Smuzhiyun 	for (i = 0; i < table->count; i++) {
1308*4882a593Smuzhiyun 		if (value <= table->entries[i].value) {
1309*4882a593Smuzhiyun 			voltage->index = (u8)i;
1310*4882a593Smuzhiyun 			voltage->value = cpu_to_be16(table->entries[i].value);
1311*4882a593Smuzhiyun 			break;
1312*4882a593Smuzhiyun 		}
1313*4882a593Smuzhiyun 	}
1314*4882a593Smuzhiyun 
1315*4882a593Smuzhiyun 	if (i >= table->count)
1316*4882a593Smuzhiyun 		return -EINVAL;
1317*4882a593Smuzhiyun 
1318*4882a593Smuzhiyun 	return 0;
1319*4882a593Smuzhiyun }
1320*4882a593Smuzhiyun 
ni_populate_mvdd_value(struct radeon_device * rdev,u32 mclk,NISLANDS_SMC_VOLTAGE_VALUE * voltage)1321*4882a593Smuzhiyun static void ni_populate_mvdd_value(struct radeon_device *rdev,
1322*4882a593Smuzhiyun 				   u32 mclk,
1323*4882a593Smuzhiyun 				   NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1324*4882a593Smuzhiyun {
1325*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1326*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1327*4882a593Smuzhiyun 
1328*4882a593Smuzhiyun 	if (!pi->mvdd_control) {
1329*4882a593Smuzhiyun 		voltage->index = eg_pi->mvdd_high_index;
1330*4882a593Smuzhiyun 		voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1331*4882a593Smuzhiyun 		return;
1332*4882a593Smuzhiyun 	}
1333*4882a593Smuzhiyun 
1334*4882a593Smuzhiyun 	if (mclk <= pi->mvdd_split_frequency) {
1335*4882a593Smuzhiyun 		voltage->index = eg_pi->mvdd_low_index;
1336*4882a593Smuzhiyun 		voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
1337*4882a593Smuzhiyun 	} else {
1338*4882a593Smuzhiyun 		voltage->index = eg_pi->mvdd_high_index;
1339*4882a593Smuzhiyun 		voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1340*4882a593Smuzhiyun 	}
1341*4882a593Smuzhiyun }
1342*4882a593Smuzhiyun 
ni_get_std_voltage_value(struct radeon_device * rdev,NISLANDS_SMC_VOLTAGE_VALUE * voltage,u16 * std_voltage)1343*4882a593Smuzhiyun static int ni_get_std_voltage_value(struct radeon_device *rdev,
1344*4882a593Smuzhiyun 				    NISLANDS_SMC_VOLTAGE_VALUE *voltage,
1345*4882a593Smuzhiyun 				    u16 *std_voltage)
1346*4882a593Smuzhiyun {
1347*4882a593Smuzhiyun 	if (rdev->pm.dpm.dyn_state.cac_leakage_table.entries &&
1348*4882a593Smuzhiyun 	    ((u32)voltage->index < rdev->pm.dpm.dyn_state.cac_leakage_table.count))
1349*4882a593Smuzhiyun 		*std_voltage = rdev->pm.dpm.dyn_state.cac_leakage_table.entries[voltage->index].vddc;
1350*4882a593Smuzhiyun 	else
1351*4882a593Smuzhiyun 		*std_voltage = be16_to_cpu(voltage->value);
1352*4882a593Smuzhiyun 
1353*4882a593Smuzhiyun 	return 0;
1354*4882a593Smuzhiyun }
1355*4882a593Smuzhiyun 
ni_populate_std_voltage_value(struct radeon_device * rdev,u16 value,u8 index,NISLANDS_SMC_VOLTAGE_VALUE * voltage)1356*4882a593Smuzhiyun static void ni_populate_std_voltage_value(struct radeon_device *rdev,
1357*4882a593Smuzhiyun 					  u16 value, u8 index,
1358*4882a593Smuzhiyun 					  NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1359*4882a593Smuzhiyun {
1360*4882a593Smuzhiyun 	voltage->index = index;
1361*4882a593Smuzhiyun 	voltage->value = cpu_to_be16(value);
1362*4882a593Smuzhiyun }
1363*4882a593Smuzhiyun 
ni_get_smc_power_scaling_factor(struct radeon_device * rdev)1364*4882a593Smuzhiyun static u32 ni_get_smc_power_scaling_factor(struct radeon_device *rdev)
1365*4882a593Smuzhiyun {
1366*4882a593Smuzhiyun 	u32 xclk_period;
1367*4882a593Smuzhiyun 	u32 xclk = radeon_get_xclk(rdev);
1368*4882a593Smuzhiyun 	u32 tmp = RREG32(CG_CAC_CTRL) & TID_CNT_MASK;
1369*4882a593Smuzhiyun 
1370*4882a593Smuzhiyun 	xclk_period = (1000000000UL / xclk);
1371*4882a593Smuzhiyun 	xclk_period /= 10000UL;
1372*4882a593Smuzhiyun 
1373*4882a593Smuzhiyun 	return tmp * xclk_period;
1374*4882a593Smuzhiyun }
1375*4882a593Smuzhiyun 
ni_scale_power_for_smc(u32 power_in_watts,u32 scaling_factor)1376*4882a593Smuzhiyun static u32 ni_scale_power_for_smc(u32 power_in_watts, u32 scaling_factor)
1377*4882a593Smuzhiyun {
1378*4882a593Smuzhiyun 	return (power_in_watts * scaling_factor) << 2;
1379*4882a593Smuzhiyun }
1380*4882a593Smuzhiyun 
ni_calculate_power_boost_limit(struct radeon_device * rdev,struct radeon_ps * radeon_state,u32 near_tdp_limit)1381*4882a593Smuzhiyun static u32 ni_calculate_power_boost_limit(struct radeon_device *rdev,
1382*4882a593Smuzhiyun 					  struct radeon_ps *radeon_state,
1383*4882a593Smuzhiyun 					  u32 near_tdp_limit)
1384*4882a593Smuzhiyun {
1385*4882a593Smuzhiyun 	struct ni_ps *state = ni_get_ps(radeon_state);
1386*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1387*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1388*4882a593Smuzhiyun 	u32 power_boost_limit = 0;
1389*4882a593Smuzhiyun 	int ret;
1390*4882a593Smuzhiyun 
1391*4882a593Smuzhiyun 	if (ni_pi->enable_power_containment &&
1392*4882a593Smuzhiyun 	    ni_pi->use_power_boost_limit) {
1393*4882a593Smuzhiyun 		NISLANDS_SMC_VOLTAGE_VALUE vddc;
1394*4882a593Smuzhiyun 		u16 std_vddc_med;
1395*4882a593Smuzhiyun 		u16 std_vddc_high;
1396*4882a593Smuzhiyun 		u64 tmp, n, d;
1397*4882a593Smuzhiyun 
1398*4882a593Smuzhiyun 		if (state->performance_level_count < 3)
1399*4882a593Smuzhiyun 			return 0;
1400*4882a593Smuzhiyun 
1401*4882a593Smuzhiyun 		ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1402*4882a593Smuzhiyun 						state->performance_levels[state->performance_level_count - 2].vddc,
1403*4882a593Smuzhiyun 						&vddc);
1404*4882a593Smuzhiyun 		if (ret)
1405*4882a593Smuzhiyun 			return 0;
1406*4882a593Smuzhiyun 
1407*4882a593Smuzhiyun 		ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_med);
1408*4882a593Smuzhiyun 		if (ret)
1409*4882a593Smuzhiyun 			return 0;
1410*4882a593Smuzhiyun 
1411*4882a593Smuzhiyun 		ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1412*4882a593Smuzhiyun 						state->performance_levels[state->performance_level_count - 1].vddc,
1413*4882a593Smuzhiyun 						&vddc);
1414*4882a593Smuzhiyun 		if (ret)
1415*4882a593Smuzhiyun 			return 0;
1416*4882a593Smuzhiyun 
1417*4882a593Smuzhiyun 		ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_high);
1418*4882a593Smuzhiyun 		if (ret)
1419*4882a593Smuzhiyun 			return 0;
1420*4882a593Smuzhiyun 
1421*4882a593Smuzhiyun 		n = ((u64)near_tdp_limit * ((u64)std_vddc_med * (u64)std_vddc_med) * 90);
1422*4882a593Smuzhiyun 		d = ((u64)std_vddc_high * (u64)std_vddc_high * 100);
1423*4882a593Smuzhiyun 		tmp = div64_u64(n, d);
1424*4882a593Smuzhiyun 
1425*4882a593Smuzhiyun 		if (tmp >> 32)
1426*4882a593Smuzhiyun 			return 0;
1427*4882a593Smuzhiyun 		power_boost_limit = (u32)tmp;
1428*4882a593Smuzhiyun 	}
1429*4882a593Smuzhiyun 
1430*4882a593Smuzhiyun 	return power_boost_limit;
1431*4882a593Smuzhiyun }
1432*4882a593Smuzhiyun 
ni_calculate_adjusted_tdp_limits(struct radeon_device * rdev,bool adjust_polarity,u32 tdp_adjustment,u32 * tdp_limit,u32 * near_tdp_limit)1433*4882a593Smuzhiyun static int ni_calculate_adjusted_tdp_limits(struct radeon_device *rdev,
1434*4882a593Smuzhiyun 					    bool adjust_polarity,
1435*4882a593Smuzhiyun 					    u32 tdp_adjustment,
1436*4882a593Smuzhiyun 					    u32 *tdp_limit,
1437*4882a593Smuzhiyun 					    u32 *near_tdp_limit)
1438*4882a593Smuzhiyun {
1439*4882a593Smuzhiyun 	if (tdp_adjustment > (u32)rdev->pm.dpm.tdp_od_limit)
1440*4882a593Smuzhiyun 		return -EINVAL;
1441*4882a593Smuzhiyun 
1442*4882a593Smuzhiyun 	if (adjust_polarity) {
1443*4882a593Smuzhiyun 		*tdp_limit = ((100 + tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
1444*4882a593Smuzhiyun 		*near_tdp_limit = rdev->pm.dpm.near_tdp_limit + (*tdp_limit - rdev->pm.dpm.tdp_limit);
1445*4882a593Smuzhiyun 	} else {
1446*4882a593Smuzhiyun 		*tdp_limit = ((100 - tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
1447*4882a593Smuzhiyun 		*near_tdp_limit = rdev->pm.dpm.near_tdp_limit - (rdev->pm.dpm.tdp_limit - *tdp_limit);
1448*4882a593Smuzhiyun 	}
1449*4882a593Smuzhiyun 
1450*4882a593Smuzhiyun 	return 0;
1451*4882a593Smuzhiyun }
1452*4882a593Smuzhiyun 
ni_populate_smc_tdp_limits(struct radeon_device * rdev,struct radeon_ps * radeon_state)1453*4882a593Smuzhiyun static int ni_populate_smc_tdp_limits(struct radeon_device *rdev,
1454*4882a593Smuzhiyun 				      struct radeon_ps *radeon_state)
1455*4882a593Smuzhiyun {
1456*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1457*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1458*4882a593Smuzhiyun 
1459*4882a593Smuzhiyun 	if (ni_pi->enable_power_containment) {
1460*4882a593Smuzhiyun 		NISLANDS_SMC_STATETABLE *smc_table = &ni_pi->smc_statetable;
1461*4882a593Smuzhiyun 		u32 scaling_factor = ni_get_smc_power_scaling_factor(rdev);
1462*4882a593Smuzhiyun 		u32 tdp_limit;
1463*4882a593Smuzhiyun 		u32 near_tdp_limit;
1464*4882a593Smuzhiyun 		u32 power_boost_limit;
1465*4882a593Smuzhiyun 		int ret;
1466*4882a593Smuzhiyun 
1467*4882a593Smuzhiyun 		if (scaling_factor == 0)
1468*4882a593Smuzhiyun 			return -EINVAL;
1469*4882a593Smuzhiyun 
1470*4882a593Smuzhiyun 		memset(smc_table, 0, sizeof(NISLANDS_SMC_STATETABLE));
1471*4882a593Smuzhiyun 
1472*4882a593Smuzhiyun 		ret = ni_calculate_adjusted_tdp_limits(rdev,
1473*4882a593Smuzhiyun 						       false, /* ??? */
1474*4882a593Smuzhiyun 						       rdev->pm.dpm.tdp_adjustment,
1475*4882a593Smuzhiyun 						       &tdp_limit,
1476*4882a593Smuzhiyun 						       &near_tdp_limit);
1477*4882a593Smuzhiyun 		if (ret)
1478*4882a593Smuzhiyun 			return ret;
1479*4882a593Smuzhiyun 
1480*4882a593Smuzhiyun 		power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state,
1481*4882a593Smuzhiyun 								   near_tdp_limit);
1482*4882a593Smuzhiyun 
1483*4882a593Smuzhiyun 		smc_table->dpm2Params.TDPLimit =
1484*4882a593Smuzhiyun 			cpu_to_be32(ni_scale_power_for_smc(tdp_limit, scaling_factor));
1485*4882a593Smuzhiyun 		smc_table->dpm2Params.NearTDPLimit =
1486*4882a593Smuzhiyun 			cpu_to_be32(ni_scale_power_for_smc(near_tdp_limit, scaling_factor));
1487*4882a593Smuzhiyun 		smc_table->dpm2Params.SafePowerLimit =
1488*4882a593Smuzhiyun 			cpu_to_be32(ni_scale_power_for_smc((near_tdp_limit * NISLANDS_DPM2_TDP_SAFE_LIMIT_PERCENT) / 100,
1489*4882a593Smuzhiyun 							   scaling_factor));
1490*4882a593Smuzhiyun 		smc_table->dpm2Params.PowerBoostLimit =
1491*4882a593Smuzhiyun 			cpu_to_be32(ni_scale_power_for_smc(power_boost_limit, scaling_factor));
1492*4882a593Smuzhiyun 
1493*4882a593Smuzhiyun 		ret = rv770_copy_bytes_to_smc(rdev,
1494*4882a593Smuzhiyun 					      (u16)(pi->state_table_start + offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
1495*4882a593Smuzhiyun 						    offsetof(PP_NIslands_DPM2Parameters, TDPLimit)),
1496*4882a593Smuzhiyun 					      (u8 *)(&smc_table->dpm2Params.TDPLimit),
1497*4882a593Smuzhiyun 					      sizeof(u32) * 4, pi->sram_end);
1498*4882a593Smuzhiyun 		if (ret)
1499*4882a593Smuzhiyun 			return ret;
1500*4882a593Smuzhiyun 	}
1501*4882a593Smuzhiyun 
1502*4882a593Smuzhiyun 	return 0;
1503*4882a593Smuzhiyun }
1504*4882a593Smuzhiyun 
ni_copy_and_switch_arb_sets(struct radeon_device * rdev,u32 arb_freq_src,u32 arb_freq_dest)1505*4882a593Smuzhiyun int ni_copy_and_switch_arb_sets(struct radeon_device *rdev,
1506*4882a593Smuzhiyun 				u32 arb_freq_src, u32 arb_freq_dest)
1507*4882a593Smuzhiyun {
1508*4882a593Smuzhiyun 	u32 mc_arb_dram_timing;
1509*4882a593Smuzhiyun 	u32 mc_arb_dram_timing2;
1510*4882a593Smuzhiyun 	u32 burst_time;
1511*4882a593Smuzhiyun 	u32 mc_cg_config;
1512*4882a593Smuzhiyun 
1513*4882a593Smuzhiyun 	switch (arb_freq_src) {
1514*4882a593Smuzhiyun 	case MC_CG_ARB_FREQ_F0:
1515*4882a593Smuzhiyun 		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING);
1516*4882a593Smuzhiyun 		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
1517*4882a593Smuzhiyun 		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE0_MASK) >> STATE0_SHIFT;
1518*4882a593Smuzhiyun 		break;
1519*4882a593Smuzhiyun 	case MC_CG_ARB_FREQ_F1:
1520*4882a593Smuzhiyun 		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_1);
1521*4882a593Smuzhiyun 		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_1);
1522*4882a593Smuzhiyun 		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE1_MASK) >> STATE1_SHIFT;
1523*4882a593Smuzhiyun 		break;
1524*4882a593Smuzhiyun 	case MC_CG_ARB_FREQ_F2:
1525*4882a593Smuzhiyun 		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_2);
1526*4882a593Smuzhiyun 		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_2);
1527*4882a593Smuzhiyun 		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE2_MASK) >> STATE2_SHIFT;
1528*4882a593Smuzhiyun 		break;
1529*4882a593Smuzhiyun 	case MC_CG_ARB_FREQ_F3:
1530*4882a593Smuzhiyun 		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_3);
1531*4882a593Smuzhiyun 		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_3);
1532*4882a593Smuzhiyun 		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE3_MASK) >> STATE3_SHIFT;
1533*4882a593Smuzhiyun 		break;
1534*4882a593Smuzhiyun 	default:
1535*4882a593Smuzhiyun 		return -EINVAL;
1536*4882a593Smuzhiyun 	}
1537*4882a593Smuzhiyun 
1538*4882a593Smuzhiyun 	switch (arb_freq_dest) {
1539*4882a593Smuzhiyun 	case MC_CG_ARB_FREQ_F0:
1540*4882a593Smuzhiyun 		WREG32(MC_ARB_DRAM_TIMING, mc_arb_dram_timing);
1541*4882a593Smuzhiyun 		WREG32(MC_ARB_DRAM_TIMING2, mc_arb_dram_timing2);
1542*4882a593Smuzhiyun 		WREG32_P(MC_ARB_BURST_TIME, STATE0(burst_time), ~STATE0_MASK);
1543*4882a593Smuzhiyun 		break;
1544*4882a593Smuzhiyun 	case MC_CG_ARB_FREQ_F1:
1545*4882a593Smuzhiyun 		WREG32(MC_ARB_DRAM_TIMING_1, mc_arb_dram_timing);
1546*4882a593Smuzhiyun 		WREG32(MC_ARB_DRAM_TIMING2_1, mc_arb_dram_timing2);
1547*4882a593Smuzhiyun 		WREG32_P(MC_ARB_BURST_TIME, STATE1(burst_time), ~STATE1_MASK);
1548*4882a593Smuzhiyun 		break;
1549*4882a593Smuzhiyun 	case MC_CG_ARB_FREQ_F2:
1550*4882a593Smuzhiyun 		WREG32(MC_ARB_DRAM_TIMING_2, mc_arb_dram_timing);
1551*4882a593Smuzhiyun 		WREG32(MC_ARB_DRAM_TIMING2_2, mc_arb_dram_timing2);
1552*4882a593Smuzhiyun 		WREG32_P(MC_ARB_BURST_TIME, STATE2(burst_time), ~STATE2_MASK);
1553*4882a593Smuzhiyun 		break;
1554*4882a593Smuzhiyun 	case MC_CG_ARB_FREQ_F3:
1555*4882a593Smuzhiyun 		WREG32(MC_ARB_DRAM_TIMING_3, mc_arb_dram_timing);
1556*4882a593Smuzhiyun 		WREG32(MC_ARB_DRAM_TIMING2_3, mc_arb_dram_timing2);
1557*4882a593Smuzhiyun 		WREG32_P(MC_ARB_BURST_TIME, STATE3(burst_time), ~STATE3_MASK);
1558*4882a593Smuzhiyun 		break;
1559*4882a593Smuzhiyun 	default:
1560*4882a593Smuzhiyun 		return -EINVAL;
1561*4882a593Smuzhiyun 	}
1562*4882a593Smuzhiyun 
1563*4882a593Smuzhiyun 	mc_cg_config = RREG32(MC_CG_CONFIG) | 0x0000000F;
1564*4882a593Smuzhiyun 	WREG32(MC_CG_CONFIG, mc_cg_config);
1565*4882a593Smuzhiyun 	WREG32_P(MC_ARB_CG, CG_ARB_REQ(arb_freq_dest), ~CG_ARB_REQ_MASK);
1566*4882a593Smuzhiyun 
1567*4882a593Smuzhiyun 	return 0;
1568*4882a593Smuzhiyun }
1569*4882a593Smuzhiyun 
ni_init_arb_table_index(struct radeon_device * rdev)1570*4882a593Smuzhiyun static int ni_init_arb_table_index(struct radeon_device *rdev)
1571*4882a593Smuzhiyun {
1572*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1573*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1574*4882a593Smuzhiyun 	u32 tmp;
1575*4882a593Smuzhiyun 	int ret;
1576*4882a593Smuzhiyun 
1577*4882a593Smuzhiyun 	ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
1578*4882a593Smuzhiyun 					&tmp, pi->sram_end);
1579*4882a593Smuzhiyun 	if (ret)
1580*4882a593Smuzhiyun 		return ret;
1581*4882a593Smuzhiyun 
1582*4882a593Smuzhiyun 	tmp &= 0x00FFFFFF;
1583*4882a593Smuzhiyun 	tmp |= ((u32)MC_CG_ARB_FREQ_F1) << 24;
1584*4882a593Smuzhiyun 
1585*4882a593Smuzhiyun 	return rv770_write_smc_sram_dword(rdev, ni_pi->arb_table_start,
1586*4882a593Smuzhiyun 					  tmp, pi->sram_end);
1587*4882a593Smuzhiyun }
1588*4882a593Smuzhiyun 
ni_initial_switch_from_arb_f0_to_f1(struct radeon_device * rdev)1589*4882a593Smuzhiyun static int ni_initial_switch_from_arb_f0_to_f1(struct radeon_device *rdev)
1590*4882a593Smuzhiyun {
1591*4882a593Smuzhiyun 	return ni_copy_and_switch_arb_sets(rdev, MC_CG_ARB_FREQ_F0, MC_CG_ARB_FREQ_F1);
1592*4882a593Smuzhiyun }
1593*4882a593Smuzhiyun 
ni_force_switch_to_arb_f0(struct radeon_device * rdev)1594*4882a593Smuzhiyun static int ni_force_switch_to_arb_f0(struct radeon_device *rdev)
1595*4882a593Smuzhiyun {
1596*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1597*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1598*4882a593Smuzhiyun 	u32 tmp;
1599*4882a593Smuzhiyun 	int ret;
1600*4882a593Smuzhiyun 
1601*4882a593Smuzhiyun 	ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
1602*4882a593Smuzhiyun 					&tmp, pi->sram_end);
1603*4882a593Smuzhiyun 	if (ret)
1604*4882a593Smuzhiyun 		return ret;
1605*4882a593Smuzhiyun 
1606*4882a593Smuzhiyun 	tmp = (tmp >> 24) & 0xff;
1607*4882a593Smuzhiyun 
1608*4882a593Smuzhiyun 	if (tmp == MC_CG_ARB_FREQ_F0)
1609*4882a593Smuzhiyun 		return 0;
1610*4882a593Smuzhiyun 
1611*4882a593Smuzhiyun 	return ni_copy_and_switch_arb_sets(rdev, tmp, MC_CG_ARB_FREQ_F0);
1612*4882a593Smuzhiyun }
1613*4882a593Smuzhiyun 
ni_populate_memory_timing_parameters(struct radeon_device * rdev,struct rv7xx_pl * pl,SMC_NIslands_MCArbDramTimingRegisterSet * arb_regs)1614*4882a593Smuzhiyun static int ni_populate_memory_timing_parameters(struct radeon_device *rdev,
1615*4882a593Smuzhiyun 						struct rv7xx_pl *pl,
1616*4882a593Smuzhiyun 						SMC_NIslands_MCArbDramTimingRegisterSet *arb_regs)
1617*4882a593Smuzhiyun {
1618*4882a593Smuzhiyun 	u32 dram_timing;
1619*4882a593Smuzhiyun 	u32 dram_timing2;
1620*4882a593Smuzhiyun 
1621*4882a593Smuzhiyun 	arb_regs->mc_arb_rfsh_rate =
1622*4882a593Smuzhiyun 		(u8)rv770_calculate_memory_refresh_rate(rdev, pl->sclk);
1623*4882a593Smuzhiyun 
1624*4882a593Smuzhiyun 
1625*4882a593Smuzhiyun 	radeon_atom_set_engine_dram_timings(rdev, pl->sclk, pl->mclk);
1626*4882a593Smuzhiyun 
1627*4882a593Smuzhiyun 	dram_timing = RREG32(MC_ARB_DRAM_TIMING);
1628*4882a593Smuzhiyun 	dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
1629*4882a593Smuzhiyun 
1630*4882a593Smuzhiyun 	arb_regs->mc_arb_dram_timing  = cpu_to_be32(dram_timing);
1631*4882a593Smuzhiyun 	arb_regs->mc_arb_dram_timing2 = cpu_to_be32(dram_timing2);
1632*4882a593Smuzhiyun 
1633*4882a593Smuzhiyun 	return 0;
1634*4882a593Smuzhiyun }
1635*4882a593Smuzhiyun 
ni_do_program_memory_timing_parameters(struct radeon_device * rdev,struct radeon_ps * radeon_state,unsigned int first_arb_set)1636*4882a593Smuzhiyun static int ni_do_program_memory_timing_parameters(struct radeon_device *rdev,
1637*4882a593Smuzhiyun 						  struct radeon_ps *radeon_state,
1638*4882a593Smuzhiyun 						  unsigned int first_arb_set)
1639*4882a593Smuzhiyun {
1640*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1641*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1642*4882a593Smuzhiyun 	struct ni_ps *state = ni_get_ps(radeon_state);
1643*4882a593Smuzhiyun 	SMC_NIslands_MCArbDramTimingRegisterSet arb_regs = { 0 };
1644*4882a593Smuzhiyun 	int i, ret = 0;
1645*4882a593Smuzhiyun 
1646*4882a593Smuzhiyun 	for (i = 0; i < state->performance_level_count; i++) {
1647*4882a593Smuzhiyun 		ret = ni_populate_memory_timing_parameters(rdev, &state->performance_levels[i], &arb_regs);
1648*4882a593Smuzhiyun 		if (ret)
1649*4882a593Smuzhiyun 			break;
1650*4882a593Smuzhiyun 
1651*4882a593Smuzhiyun 		ret = rv770_copy_bytes_to_smc(rdev,
1652*4882a593Smuzhiyun 					      (u16)(ni_pi->arb_table_start +
1653*4882a593Smuzhiyun 						    offsetof(SMC_NIslands_MCArbDramTimingRegisters, data) +
1654*4882a593Smuzhiyun 						    sizeof(SMC_NIslands_MCArbDramTimingRegisterSet) * (first_arb_set + i)),
1655*4882a593Smuzhiyun 					      (u8 *)&arb_regs,
1656*4882a593Smuzhiyun 					      (u16)sizeof(SMC_NIslands_MCArbDramTimingRegisterSet),
1657*4882a593Smuzhiyun 					      pi->sram_end);
1658*4882a593Smuzhiyun 		if (ret)
1659*4882a593Smuzhiyun 			break;
1660*4882a593Smuzhiyun 	}
1661*4882a593Smuzhiyun 	return ret;
1662*4882a593Smuzhiyun }
1663*4882a593Smuzhiyun 
ni_program_memory_timing_parameters(struct radeon_device * rdev,struct radeon_ps * radeon_new_state)1664*4882a593Smuzhiyun static int ni_program_memory_timing_parameters(struct radeon_device *rdev,
1665*4882a593Smuzhiyun 					       struct radeon_ps *radeon_new_state)
1666*4882a593Smuzhiyun {
1667*4882a593Smuzhiyun 	return ni_do_program_memory_timing_parameters(rdev, radeon_new_state,
1668*4882a593Smuzhiyun 						      NISLANDS_DRIVER_STATE_ARB_INDEX);
1669*4882a593Smuzhiyun }
1670*4882a593Smuzhiyun 
ni_populate_initial_mvdd_value(struct radeon_device * rdev,struct NISLANDS_SMC_VOLTAGE_VALUE * voltage)1671*4882a593Smuzhiyun static void ni_populate_initial_mvdd_value(struct radeon_device *rdev,
1672*4882a593Smuzhiyun 					   struct NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1673*4882a593Smuzhiyun {
1674*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1675*4882a593Smuzhiyun 
1676*4882a593Smuzhiyun 	voltage->index = eg_pi->mvdd_high_index;
1677*4882a593Smuzhiyun 	voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1678*4882a593Smuzhiyun }
1679*4882a593Smuzhiyun 
ni_populate_smc_initial_state(struct radeon_device * rdev,struct radeon_ps * radeon_initial_state,NISLANDS_SMC_STATETABLE * table)1680*4882a593Smuzhiyun static int ni_populate_smc_initial_state(struct radeon_device *rdev,
1681*4882a593Smuzhiyun 					 struct radeon_ps *radeon_initial_state,
1682*4882a593Smuzhiyun 					 NISLANDS_SMC_STATETABLE *table)
1683*4882a593Smuzhiyun {
1684*4882a593Smuzhiyun 	struct ni_ps *initial_state = ni_get_ps(radeon_initial_state);
1685*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1686*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1687*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1688*4882a593Smuzhiyun 	u32 reg;
1689*4882a593Smuzhiyun 	int ret;
1690*4882a593Smuzhiyun 
1691*4882a593Smuzhiyun 	table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL =
1692*4882a593Smuzhiyun 		cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl);
1693*4882a593Smuzhiyun 	table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 =
1694*4882a593Smuzhiyun 		cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl_2);
1695*4882a593Smuzhiyun 	table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL =
1696*4882a593Smuzhiyun 		cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl);
1697*4882a593Smuzhiyun 	table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 =
1698*4882a593Smuzhiyun 		cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl_2);
1699*4882a593Smuzhiyun 	table->initialState.levels[0].mclk.vMCLK_PWRMGT_CNTL =
1700*4882a593Smuzhiyun 		cpu_to_be32(ni_pi->clock_registers.mclk_pwrmgt_cntl);
1701*4882a593Smuzhiyun 	table->initialState.levels[0].mclk.vDLL_CNTL =
1702*4882a593Smuzhiyun 		cpu_to_be32(ni_pi->clock_registers.dll_cntl);
1703*4882a593Smuzhiyun 	table->initialState.levels[0].mclk.vMPLL_SS =
1704*4882a593Smuzhiyun 		cpu_to_be32(ni_pi->clock_registers.mpll_ss1);
1705*4882a593Smuzhiyun 	table->initialState.levels[0].mclk.vMPLL_SS2 =
1706*4882a593Smuzhiyun 		cpu_to_be32(ni_pi->clock_registers.mpll_ss2);
1707*4882a593Smuzhiyun 	table->initialState.levels[0].mclk.mclk_value =
1708*4882a593Smuzhiyun 		cpu_to_be32(initial_state->performance_levels[0].mclk);
1709*4882a593Smuzhiyun 
1710*4882a593Smuzhiyun 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1711*4882a593Smuzhiyun 		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl);
1712*4882a593Smuzhiyun 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1713*4882a593Smuzhiyun 		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_2);
1714*4882a593Smuzhiyun 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1715*4882a593Smuzhiyun 		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_3);
1716*4882a593Smuzhiyun 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 =
1717*4882a593Smuzhiyun 		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_4);
1718*4882a593Smuzhiyun 	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
1719*4882a593Smuzhiyun 		cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum);
1720*4882a593Smuzhiyun 	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 =
1721*4882a593Smuzhiyun 		cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum_2);
1722*4882a593Smuzhiyun 	table->initialState.levels[0].sclk.sclk_value =
1723*4882a593Smuzhiyun 		cpu_to_be32(initial_state->performance_levels[0].sclk);
1724*4882a593Smuzhiyun 	table->initialState.levels[0].arbRefreshState =
1725*4882a593Smuzhiyun 		NISLANDS_INITIAL_STATE_ARB_INDEX;
1726*4882a593Smuzhiyun 
1727*4882a593Smuzhiyun 	table->initialState.levels[0].ACIndex = 0;
1728*4882a593Smuzhiyun 
1729*4882a593Smuzhiyun 	ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1730*4882a593Smuzhiyun 					initial_state->performance_levels[0].vddc,
1731*4882a593Smuzhiyun 					&table->initialState.levels[0].vddc);
1732*4882a593Smuzhiyun 	if (!ret) {
1733*4882a593Smuzhiyun 		u16 std_vddc;
1734*4882a593Smuzhiyun 
1735*4882a593Smuzhiyun 		ret = ni_get_std_voltage_value(rdev,
1736*4882a593Smuzhiyun 					       &table->initialState.levels[0].vddc,
1737*4882a593Smuzhiyun 					       &std_vddc);
1738*4882a593Smuzhiyun 		if (!ret)
1739*4882a593Smuzhiyun 			ni_populate_std_voltage_value(rdev, std_vddc,
1740*4882a593Smuzhiyun 						      table->initialState.levels[0].vddc.index,
1741*4882a593Smuzhiyun 						      &table->initialState.levels[0].std_vddc);
1742*4882a593Smuzhiyun 	}
1743*4882a593Smuzhiyun 
1744*4882a593Smuzhiyun 	if (eg_pi->vddci_control)
1745*4882a593Smuzhiyun 		ni_populate_voltage_value(rdev,
1746*4882a593Smuzhiyun 					  &eg_pi->vddci_voltage_table,
1747*4882a593Smuzhiyun 					  initial_state->performance_levels[0].vddci,
1748*4882a593Smuzhiyun 					  &table->initialState.levels[0].vddci);
1749*4882a593Smuzhiyun 
1750*4882a593Smuzhiyun 	ni_populate_initial_mvdd_value(rdev, &table->initialState.levels[0].mvdd);
1751*4882a593Smuzhiyun 
1752*4882a593Smuzhiyun 	reg = CG_R(0xffff) | CG_L(0);
1753*4882a593Smuzhiyun 	table->initialState.levels[0].aT = cpu_to_be32(reg);
1754*4882a593Smuzhiyun 
1755*4882a593Smuzhiyun 	table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
1756*4882a593Smuzhiyun 
1757*4882a593Smuzhiyun 	if (pi->boot_in_gen2)
1758*4882a593Smuzhiyun 		table->initialState.levels[0].gen2PCIE = 1;
1759*4882a593Smuzhiyun 	else
1760*4882a593Smuzhiyun 		table->initialState.levels[0].gen2PCIE = 0;
1761*4882a593Smuzhiyun 
1762*4882a593Smuzhiyun 	if (pi->mem_gddr5) {
1763*4882a593Smuzhiyun 		table->initialState.levels[0].strobeMode =
1764*4882a593Smuzhiyun 			cypress_get_strobe_mode_settings(rdev,
1765*4882a593Smuzhiyun 							 initial_state->performance_levels[0].mclk);
1766*4882a593Smuzhiyun 
1767*4882a593Smuzhiyun 		if (initial_state->performance_levels[0].mclk > pi->mclk_edc_enable_threshold)
1768*4882a593Smuzhiyun 			table->initialState.levels[0].mcFlags = NISLANDS_SMC_MC_EDC_RD_FLAG | NISLANDS_SMC_MC_EDC_WR_FLAG;
1769*4882a593Smuzhiyun 		else
1770*4882a593Smuzhiyun 			table->initialState.levels[0].mcFlags =  0;
1771*4882a593Smuzhiyun 	}
1772*4882a593Smuzhiyun 
1773*4882a593Smuzhiyun 	table->initialState.levelCount = 1;
1774*4882a593Smuzhiyun 
1775*4882a593Smuzhiyun 	table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
1776*4882a593Smuzhiyun 
1777*4882a593Smuzhiyun 	table->initialState.levels[0].dpm2.MaxPS = 0;
1778*4882a593Smuzhiyun 	table->initialState.levels[0].dpm2.NearTDPDec = 0;
1779*4882a593Smuzhiyun 	table->initialState.levels[0].dpm2.AboveSafeInc = 0;
1780*4882a593Smuzhiyun 	table->initialState.levels[0].dpm2.BelowSafeInc = 0;
1781*4882a593Smuzhiyun 
1782*4882a593Smuzhiyun 	reg = MIN_POWER_MASK | MAX_POWER_MASK;
1783*4882a593Smuzhiyun 	table->initialState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
1784*4882a593Smuzhiyun 
1785*4882a593Smuzhiyun 	reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
1786*4882a593Smuzhiyun 	table->initialState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
1787*4882a593Smuzhiyun 
1788*4882a593Smuzhiyun 	return 0;
1789*4882a593Smuzhiyun }
1790*4882a593Smuzhiyun 
ni_populate_smc_acpi_state(struct radeon_device * rdev,NISLANDS_SMC_STATETABLE * table)1791*4882a593Smuzhiyun static int ni_populate_smc_acpi_state(struct radeon_device *rdev,
1792*4882a593Smuzhiyun 				      NISLANDS_SMC_STATETABLE *table)
1793*4882a593Smuzhiyun {
1794*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1795*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1796*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1797*4882a593Smuzhiyun 	u32 mpll_ad_func_cntl   = ni_pi->clock_registers.mpll_ad_func_cntl;
1798*4882a593Smuzhiyun 	u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
1799*4882a593Smuzhiyun 	u32 mpll_dq_func_cntl   = ni_pi->clock_registers.mpll_dq_func_cntl;
1800*4882a593Smuzhiyun 	u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
1801*4882a593Smuzhiyun 	u32 spll_func_cntl      = ni_pi->clock_registers.cg_spll_func_cntl;
1802*4882a593Smuzhiyun 	u32 spll_func_cntl_2    = ni_pi->clock_registers.cg_spll_func_cntl_2;
1803*4882a593Smuzhiyun 	u32 spll_func_cntl_3    = ni_pi->clock_registers.cg_spll_func_cntl_3;
1804*4882a593Smuzhiyun 	u32 spll_func_cntl_4    = ni_pi->clock_registers.cg_spll_func_cntl_4;
1805*4882a593Smuzhiyun 	u32 mclk_pwrmgt_cntl    = ni_pi->clock_registers.mclk_pwrmgt_cntl;
1806*4882a593Smuzhiyun 	u32 dll_cntl            = ni_pi->clock_registers.dll_cntl;
1807*4882a593Smuzhiyun 	u32 reg;
1808*4882a593Smuzhiyun 	int ret;
1809*4882a593Smuzhiyun 
1810*4882a593Smuzhiyun 	table->ACPIState = table->initialState;
1811*4882a593Smuzhiyun 
1812*4882a593Smuzhiyun 	table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
1813*4882a593Smuzhiyun 
1814*4882a593Smuzhiyun 	if (pi->acpi_vddc) {
1815*4882a593Smuzhiyun 		ret = ni_populate_voltage_value(rdev,
1816*4882a593Smuzhiyun 						&eg_pi->vddc_voltage_table,
1817*4882a593Smuzhiyun 						pi->acpi_vddc, &table->ACPIState.levels[0].vddc);
1818*4882a593Smuzhiyun 		if (!ret) {
1819*4882a593Smuzhiyun 			u16 std_vddc;
1820*4882a593Smuzhiyun 
1821*4882a593Smuzhiyun 			ret = ni_get_std_voltage_value(rdev,
1822*4882a593Smuzhiyun 						       &table->ACPIState.levels[0].vddc, &std_vddc);
1823*4882a593Smuzhiyun 			if (!ret)
1824*4882a593Smuzhiyun 				ni_populate_std_voltage_value(rdev, std_vddc,
1825*4882a593Smuzhiyun 							      table->ACPIState.levels[0].vddc.index,
1826*4882a593Smuzhiyun 							      &table->ACPIState.levels[0].std_vddc);
1827*4882a593Smuzhiyun 		}
1828*4882a593Smuzhiyun 
1829*4882a593Smuzhiyun 		if (pi->pcie_gen2) {
1830*4882a593Smuzhiyun 			if (pi->acpi_pcie_gen2)
1831*4882a593Smuzhiyun 				table->ACPIState.levels[0].gen2PCIE = 1;
1832*4882a593Smuzhiyun 			else
1833*4882a593Smuzhiyun 				table->ACPIState.levels[0].gen2PCIE = 0;
1834*4882a593Smuzhiyun 		} else {
1835*4882a593Smuzhiyun 			table->ACPIState.levels[0].gen2PCIE = 0;
1836*4882a593Smuzhiyun 		}
1837*4882a593Smuzhiyun 	} else {
1838*4882a593Smuzhiyun 		ret = ni_populate_voltage_value(rdev,
1839*4882a593Smuzhiyun 						&eg_pi->vddc_voltage_table,
1840*4882a593Smuzhiyun 						pi->min_vddc_in_table,
1841*4882a593Smuzhiyun 						&table->ACPIState.levels[0].vddc);
1842*4882a593Smuzhiyun 		if (!ret) {
1843*4882a593Smuzhiyun 			u16 std_vddc;
1844*4882a593Smuzhiyun 
1845*4882a593Smuzhiyun 			ret = ni_get_std_voltage_value(rdev,
1846*4882a593Smuzhiyun 						       &table->ACPIState.levels[0].vddc,
1847*4882a593Smuzhiyun 						       &std_vddc);
1848*4882a593Smuzhiyun 			if (!ret)
1849*4882a593Smuzhiyun 				ni_populate_std_voltage_value(rdev, std_vddc,
1850*4882a593Smuzhiyun 							      table->ACPIState.levels[0].vddc.index,
1851*4882a593Smuzhiyun 							      &table->ACPIState.levels[0].std_vddc);
1852*4882a593Smuzhiyun 		}
1853*4882a593Smuzhiyun 		table->ACPIState.levels[0].gen2PCIE = 0;
1854*4882a593Smuzhiyun 	}
1855*4882a593Smuzhiyun 
1856*4882a593Smuzhiyun 	if (eg_pi->acpi_vddci) {
1857*4882a593Smuzhiyun 		if (eg_pi->vddci_control)
1858*4882a593Smuzhiyun 			ni_populate_voltage_value(rdev,
1859*4882a593Smuzhiyun 						  &eg_pi->vddci_voltage_table,
1860*4882a593Smuzhiyun 						  eg_pi->acpi_vddci,
1861*4882a593Smuzhiyun 						  &table->ACPIState.levels[0].vddci);
1862*4882a593Smuzhiyun 	}
1863*4882a593Smuzhiyun 
1864*4882a593Smuzhiyun 
1865*4882a593Smuzhiyun 	mpll_ad_func_cntl &= ~PDNB;
1866*4882a593Smuzhiyun 
1867*4882a593Smuzhiyun 	mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
1868*4882a593Smuzhiyun 
1869*4882a593Smuzhiyun 	if (pi->mem_gddr5)
1870*4882a593Smuzhiyun 		mpll_dq_func_cntl &= ~PDNB;
1871*4882a593Smuzhiyun 	mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN | BYPASS;
1872*4882a593Smuzhiyun 
1873*4882a593Smuzhiyun 
1874*4882a593Smuzhiyun 	mclk_pwrmgt_cntl |= (MRDCKA0_RESET |
1875*4882a593Smuzhiyun 			     MRDCKA1_RESET |
1876*4882a593Smuzhiyun 			     MRDCKB0_RESET |
1877*4882a593Smuzhiyun 			     MRDCKB1_RESET |
1878*4882a593Smuzhiyun 			     MRDCKC0_RESET |
1879*4882a593Smuzhiyun 			     MRDCKC1_RESET |
1880*4882a593Smuzhiyun 			     MRDCKD0_RESET |
1881*4882a593Smuzhiyun 			     MRDCKD1_RESET);
1882*4882a593Smuzhiyun 
1883*4882a593Smuzhiyun 	mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
1884*4882a593Smuzhiyun 			      MRDCKA1_PDNB |
1885*4882a593Smuzhiyun 			      MRDCKB0_PDNB |
1886*4882a593Smuzhiyun 			      MRDCKB1_PDNB |
1887*4882a593Smuzhiyun 			      MRDCKC0_PDNB |
1888*4882a593Smuzhiyun 			      MRDCKC1_PDNB |
1889*4882a593Smuzhiyun 			      MRDCKD0_PDNB |
1890*4882a593Smuzhiyun 			      MRDCKD1_PDNB);
1891*4882a593Smuzhiyun 
1892*4882a593Smuzhiyun 	dll_cntl |= (MRDCKA0_BYPASS |
1893*4882a593Smuzhiyun 		     MRDCKA1_BYPASS |
1894*4882a593Smuzhiyun 		     MRDCKB0_BYPASS |
1895*4882a593Smuzhiyun 		     MRDCKB1_BYPASS |
1896*4882a593Smuzhiyun 		     MRDCKC0_BYPASS |
1897*4882a593Smuzhiyun 		     MRDCKC1_BYPASS |
1898*4882a593Smuzhiyun 		     MRDCKD0_BYPASS |
1899*4882a593Smuzhiyun 		     MRDCKD1_BYPASS);
1900*4882a593Smuzhiyun 
1901*4882a593Smuzhiyun 	spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
1902*4882a593Smuzhiyun 	spll_func_cntl_2 |= SCLK_MUX_SEL(4);
1903*4882a593Smuzhiyun 
1904*4882a593Smuzhiyun 	table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
1905*4882a593Smuzhiyun 	table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
1906*4882a593Smuzhiyun 	table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
1907*4882a593Smuzhiyun 	table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
1908*4882a593Smuzhiyun 	table->ACPIState.levels[0].mclk.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
1909*4882a593Smuzhiyun 	table->ACPIState.levels[0].mclk.vDLL_CNTL = cpu_to_be32(dll_cntl);
1910*4882a593Smuzhiyun 
1911*4882a593Smuzhiyun 	table->ACPIState.levels[0].mclk.mclk_value = 0;
1912*4882a593Smuzhiyun 
1913*4882a593Smuzhiyun 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
1914*4882a593Smuzhiyun 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
1915*4882a593Smuzhiyun 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
1916*4882a593Smuzhiyun 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(spll_func_cntl_4);
1917*4882a593Smuzhiyun 
1918*4882a593Smuzhiyun 	table->ACPIState.levels[0].sclk.sclk_value = 0;
1919*4882a593Smuzhiyun 
1920*4882a593Smuzhiyun 	ni_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
1921*4882a593Smuzhiyun 
1922*4882a593Smuzhiyun 	if (eg_pi->dynamic_ac_timing)
1923*4882a593Smuzhiyun 		table->ACPIState.levels[0].ACIndex = 1;
1924*4882a593Smuzhiyun 
1925*4882a593Smuzhiyun 	table->ACPIState.levels[0].dpm2.MaxPS = 0;
1926*4882a593Smuzhiyun 	table->ACPIState.levels[0].dpm2.NearTDPDec = 0;
1927*4882a593Smuzhiyun 	table->ACPIState.levels[0].dpm2.AboveSafeInc = 0;
1928*4882a593Smuzhiyun 	table->ACPIState.levels[0].dpm2.BelowSafeInc = 0;
1929*4882a593Smuzhiyun 
1930*4882a593Smuzhiyun 	reg = MIN_POWER_MASK | MAX_POWER_MASK;
1931*4882a593Smuzhiyun 	table->ACPIState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
1932*4882a593Smuzhiyun 
1933*4882a593Smuzhiyun 	reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
1934*4882a593Smuzhiyun 	table->ACPIState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
1935*4882a593Smuzhiyun 
1936*4882a593Smuzhiyun 	return 0;
1937*4882a593Smuzhiyun }
1938*4882a593Smuzhiyun 
ni_init_smc_table(struct radeon_device * rdev)1939*4882a593Smuzhiyun static int ni_init_smc_table(struct radeon_device *rdev)
1940*4882a593Smuzhiyun {
1941*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1942*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1943*4882a593Smuzhiyun 	int ret;
1944*4882a593Smuzhiyun 	struct radeon_ps *radeon_boot_state = rdev->pm.dpm.boot_ps;
1945*4882a593Smuzhiyun 	NISLANDS_SMC_STATETABLE *table = &ni_pi->smc_statetable;
1946*4882a593Smuzhiyun 
1947*4882a593Smuzhiyun 	memset(table, 0, sizeof(NISLANDS_SMC_STATETABLE));
1948*4882a593Smuzhiyun 
1949*4882a593Smuzhiyun 	ni_populate_smc_voltage_tables(rdev, table);
1950*4882a593Smuzhiyun 
1951*4882a593Smuzhiyun 	switch (rdev->pm.int_thermal_type) {
1952*4882a593Smuzhiyun 	case THERMAL_TYPE_NI:
1953*4882a593Smuzhiyun 	case THERMAL_TYPE_EMC2103_WITH_INTERNAL:
1954*4882a593Smuzhiyun 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
1955*4882a593Smuzhiyun 		break;
1956*4882a593Smuzhiyun 	case THERMAL_TYPE_NONE:
1957*4882a593Smuzhiyun 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
1958*4882a593Smuzhiyun 		break;
1959*4882a593Smuzhiyun 	default:
1960*4882a593Smuzhiyun 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
1961*4882a593Smuzhiyun 		break;
1962*4882a593Smuzhiyun 	}
1963*4882a593Smuzhiyun 
1964*4882a593Smuzhiyun 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC)
1965*4882a593Smuzhiyun 		table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
1966*4882a593Smuzhiyun 
1967*4882a593Smuzhiyun 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT)
1968*4882a593Smuzhiyun 		table->systemFlags |= PPSMC_SYSTEMFLAG_REGULATOR_HOT;
1969*4882a593Smuzhiyun 
1970*4882a593Smuzhiyun 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
1971*4882a593Smuzhiyun 		table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
1972*4882a593Smuzhiyun 
1973*4882a593Smuzhiyun 	if (pi->mem_gddr5)
1974*4882a593Smuzhiyun 		table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
1975*4882a593Smuzhiyun 
1976*4882a593Smuzhiyun 	ret = ni_populate_smc_initial_state(rdev, radeon_boot_state, table);
1977*4882a593Smuzhiyun 	if (ret)
1978*4882a593Smuzhiyun 		return ret;
1979*4882a593Smuzhiyun 
1980*4882a593Smuzhiyun 	ret = ni_populate_smc_acpi_state(rdev, table);
1981*4882a593Smuzhiyun 	if (ret)
1982*4882a593Smuzhiyun 		return ret;
1983*4882a593Smuzhiyun 
1984*4882a593Smuzhiyun 	table->driverState = table->initialState;
1985*4882a593Smuzhiyun 
1986*4882a593Smuzhiyun 	table->ULVState = table->initialState;
1987*4882a593Smuzhiyun 
1988*4882a593Smuzhiyun 	ret = ni_do_program_memory_timing_parameters(rdev, radeon_boot_state,
1989*4882a593Smuzhiyun 						     NISLANDS_INITIAL_STATE_ARB_INDEX);
1990*4882a593Smuzhiyun 	if (ret)
1991*4882a593Smuzhiyun 		return ret;
1992*4882a593Smuzhiyun 
1993*4882a593Smuzhiyun 	return rv770_copy_bytes_to_smc(rdev, pi->state_table_start, (u8 *)table,
1994*4882a593Smuzhiyun 				       sizeof(NISLANDS_SMC_STATETABLE), pi->sram_end);
1995*4882a593Smuzhiyun }
1996*4882a593Smuzhiyun 
ni_calculate_sclk_params(struct radeon_device * rdev,u32 engine_clock,NISLANDS_SMC_SCLK_VALUE * sclk)1997*4882a593Smuzhiyun static int ni_calculate_sclk_params(struct radeon_device *rdev,
1998*4882a593Smuzhiyun 				    u32 engine_clock,
1999*4882a593Smuzhiyun 				    NISLANDS_SMC_SCLK_VALUE *sclk)
2000*4882a593Smuzhiyun {
2001*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2002*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2003*4882a593Smuzhiyun 	struct atom_clock_dividers dividers;
2004*4882a593Smuzhiyun 	u32 spll_func_cntl = ni_pi->clock_registers.cg_spll_func_cntl;
2005*4882a593Smuzhiyun 	u32 spll_func_cntl_2 = ni_pi->clock_registers.cg_spll_func_cntl_2;
2006*4882a593Smuzhiyun 	u32 spll_func_cntl_3 = ni_pi->clock_registers.cg_spll_func_cntl_3;
2007*4882a593Smuzhiyun 	u32 spll_func_cntl_4 = ni_pi->clock_registers.cg_spll_func_cntl_4;
2008*4882a593Smuzhiyun 	u32 cg_spll_spread_spectrum = ni_pi->clock_registers.cg_spll_spread_spectrum;
2009*4882a593Smuzhiyun 	u32 cg_spll_spread_spectrum_2 = ni_pi->clock_registers.cg_spll_spread_spectrum_2;
2010*4882a593Smuzhiyun 	u64 tmp;
2011*4882a593Smuzhiyun 	u32 reference_clock = rdev->clock.spll.reference_freq;
2012*4882a593Smuzhiyun 	u32 reference_divider;
2013*4882a593Smuzhiyun 	u32 fbdiv;
2014*4882a593Smuzhiyun 	int ret;
2015*4882a593Smuzhiyun 
2016*4882a593Smuzhiyun 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
2017*4882a593Smuzhiyun 					     engine_clock, false, &dividers);
2018*4882a593Smuzhiyun 	if (ret)
2019*4882a593Smuzhiyun 		return ret;
2020*4882a593Smuzhiyun 
2021*4882a593Smuzhiyun 	reference_divider = 1 + dividers.ref_div;
2022*4882a593Smuzhiyun 
2023*4882a593Smuzhiyun 
2024*4882a593Smuzhiyun 	tmp = (u64) engine_clock * reference_divider * dividers.post_div * 16834;
2025*4882a593Smuzhiyun 	do_div(tmp, reference_clock);
2026*4882a593Smuzhiyun 	fbdiv = (u32) tmp;
2027*4882a593Smuzhiyun 
2028*4882a593Smuzhiyun 	spll_func_cntl &= ~(SPLL_PDIV_A_MASK | SPLL_REF_DIV_MASK);
2029*4882a593Smuzhiyun 	spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div);
2030*4882a593Smuzhiyun 	spll_func_cntl |= SPLL_PDIV_A(dividers.post_div);
2031*4882a593Smuzhiyun 
2032*4882a593Smuzhiyun 	spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
2033*4882a593Smuzhiyun 	spll_func_cntl_2 |= SCLK_MUX_SEL(2);
2034*4882a593Smuzhiyun 
2035*4882a593Smuzhiyun 	spll_func_cntl_3 &= ~SPLL_FB_DIV_MASK;
2036*4882a593Smuzhiyun 	spll_func_cntl_3 |= SPLL_FB_DIV(fbdiv);
2037*4882a593Smuzhiyun 	spll_func_cntl_3 |= SPLL_DITHEN;
2038*4882a593Smuzhiyun 
2039*4882a593Smuzhiyun 	if (pi->sclk_ss) {
2040*4882a593Smuzhiyun 		struct radeon_atom_ss ss;
2041*4882a593Smuzhiyun 		u32 vco_freq = engine_clock * dividers.post_div;
2042*4882a593Smuzhiyun 
2043*4882a593Smuzhiyun 		if (radeon_atombios_get_asic_ss_info(rdev, &ss,
2044*4882a593Smuzhiyun 						     ASIC_INTERNAL_ENGINE_SS, vco_freq)) {
2045*4882a593Smuzhiyun 			u32 clk_s = reference_clock * 5 / (reference_divider * ss.rate);
2046*4882a593Smuzhiyun 			u32 clk_v = 4 * ss.percentage * fbdiv / (clk_s * 10000);
2047*4882a593Smuzhiyun 
2048*4882a593Smuzhiyun 			cg_spll_spread_spectrum &= ~CLK_S_MASK;
2049*4882a593Smuzhiyun 			cg_spll_spread_spectrum |= CLK_S(clk_s);
2050*4882a593Smuzhiyun 			cg_spll_spread_spectrum |= SSEN;
2051*4882a593Smuzhiyun 
2052*4882a593Smuzhiyun 			cg_spll_spread_spectrum_2 &= ~CLK_V_MASK;
2053*4882a593Smuzhiyun 			cg_spll_spread_spectrum_2 |= CLK_V(clk_v);
2054*4882a593Smuzhiyun 		}
2055*4882a593Smuzhiyun 	}
2056*4882a593Smuzhiyun 
2057*4882a593Smuzhiyun 	sclk->sclk_value = engine_clock;
2058*4882a593Smuzhiyun 	sclk->vCG_SPLL_FUNC_CNTL = spll_func_cntl;
2059*4882a593Smuzhiyun 	sclk->vCG_SPLL_FUNC_CNTL_2 = spll_func_cntl_2;
2060*4882a593Smuzhiyun 	sclk->vCG_SPLL_FUNC_CNTL_3 = spll_func_cntl_3;
2061*4882a593Smuzhiyun 	sclk->vCG_SPLL_FUNC_CNTL_4 = spll_func_cntl_4;
2062*4882a593Smuzhiyun 	sclk->vCG_SPLL_SPREAD_SPECTRUM = cg_spll_spread_spectrum;
2063*4882a593Smuzhiyun 	sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cg_spll_spread_spectrum_2;
2064*4882a593Smuzhiyun 
2065*4882a593Smuzhiyun 	return 0;
2066*4882a593Smuzhiyun }
2067*4882a593Smuzhiyun 
ni_populate_sclk_value(struct radeon_device * rdev,u32 engine_clock,NISLANDS_SMC_SCLK_VALUE * sclk)2068*4882a593Smuzhiyun static int ni_populate_sclk_value(struct radeon_device *rdev,
2069*4882a593Smuzhiyun 				  u32 engine_clock,
2070*4882a593Smuzhiyun 				  NISLANDS_SMC_SCLK_VALUE *sclk)
2071*4882a593Smuzhiyun {
2072*4882a593Smuzhiyun 	NISLANDS_SMC_SCLK_VALUE sclk_tmp;
2073*4882a593Smuzhiyun 	int ret;
2074*4882a593Smuzhiyun 
2075*4882a593Smuzhiyun 	ret = ni_calculate_sclk_params(rdev, engine_clock, &sclk_tmp);
2076*4882a593Smuzhiyun 	if (!ret) {
2077*4882a593Smuzhiyun 		sclk->sclk_value = cpu_to_be32(sclk_tmp.sclk_value);
2078*4882a593Smuzhiyun 		sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL);
2079*4882a593Smuzhiyun 		sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_2);
2080*4882a593Smuzhiyun 		sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_3);
2081*4882a593Smuzhiyun 		sclk->vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_4);
2082*4882a593Smuzhiyun 		sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM);
2083*4882a593Smuzhiyun 		sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM_2);
2084*4882a593Smuzhiyun 	}
2085*4882a593Smuzhiyun 
2086*4882a593Smuzhiyun 	return ret;
2087*4882a593Smuzhiyun }
2088*4882a593Smuzhiyun 
ni_init_smc_spll_table(struct radeon_device * rdev)2089*4882a593Smuzhiyun static int ni_init_smc_spll_table(struct radeon_device *rdev)
2090*4882a593Smuzhiyun {
2091*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2092*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2093*4882a593Smuzhiyun 	SMC_NISLANDS_SPLL_DIV_TABLE *spll_table;
2094*4882a593Smuzhiyun 	NISLANDS_SMC_SCLK_VALUE sclk_params;
2095*4882a593Smuzhiyun 	u32 fb_div;
2096*4882a593Smuzhiyun 	u32 p_div;
2097*4882a593Smuzhiyun 	u32 clk_s;
2098*4882a593Smuzhiyun 	u32 clk_v;
2099*4882a593Smuzhiyun 	u32 sclk = 0;
2100*4882a593Smuzhiyun 	int i, ret;
2101*4882a593Smuzhiyun 	u32 tmp;
2102*4882a593Smuzhiyun 
2103*4882a593Smuzhiyun 	if (ni_pi->spll_table_start == 0)
2104*4882a593Smuzhiyun 		return -EINVAL;
2105*4882a593Smuzhiyun 
2106*4882a593Smuzhiyun 	spll_table = kzalloc(sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), GFP_KERNEL);
2107*4882a593Smuzhiyun 	if (spll_table == NULL)
2108*4882a593Smuzhiyun 		return -ENOMEM;
2109*4882a593Smuzhiyun 
2110*4882a593Smuzhiyun 	for (i = 0; i < 256; i++) {
2111*4882a593Smuzhiyun 		ret = ni_calculate_sclk_params(rdev, sclk, &sclk_params);
2112*4882a593Smuzhiyun 		if (ret)
2113*4882a593Smuzhiyun 			break;
2114*4882a593Smuzhiyun 
2115*4882a593Smuzhiyun 		p_div = (sclk_params.vCG_SPLL_FUNC_CNTL & SPLL_PDIV_A_MASK) >> SPLL_PDIV_A_SHIFT;
2116*4882a593Smuzhiyun 		fb_div = (sclk_params.vCG_SPLL_FUNC_CNTL_3 & SPLL_FB_DIV_MASK) >> SPLL_FB_DIV_SHIFT;
2117*4882a593Smuzhiyun 		clk_s = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM & CLK_S_MASK) >> CLK_S_SHIFT;
2118*4882a593Smuzhiyun 		clk_v = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM_2 & CLK_V_MASK) >> CLK_V_SHIFT;
2119*4882a593Smuzhiyun 
2120*4882a593Smuzhiyun 		fb_div &= ~0x00001FFF;
2121*4882a593Smuzhiyun 		fb_div >>= 1;
2122*4882a593Smuzhiyun 		clk_v >>= 6;
2123*4882a593Smuzhiyun 
2124*4882a593Smuzhiyun 		if (p_div & ~(SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT))
2125*4882a593Smuzhiyun 			ret = -EINVAL;
2126*4882a593Smuzhiyun 
2127*4882a593Smuzhiyun 		if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
2128*4882a593Smuzhiyun 			ret = -EINVAL;
2129*4882a593Smuzhiyun 
2130*4882a593Smuzhiyun 		if (fb_div & ~(SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_SHIFT))
2131*4882a593Smuzhiyun 			ret = -EINVAL;
2132*4882a593Smuzhiyun 
2133*4882a593Smuzhiyun 		if (clk_v & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT))
2134*4882a593Smuzhiyun 			ret = -EINVAL;
2135*4882a593Smuzhiyun 
2136*4882a593Smuzhiyun 		if (ret)
2137*4882a593Smuzhiyun 			break;
2138*4882a593Smuzhiyun 
2139*4882a593Smuzhiyun 		tmp = ((fb_div << SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_MASK) |
2140*4882a593Smuzhiyun 			((p_div << SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK);
2141*4882a593Smuzhiyun 		spll_table->freq[i] = cpu_to_be32(tmp);
2142*4882a593Smuzhiyun 
2143*4882a593Smuzhiyun 		tmp = ((clk_v << SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK) |
2144*4882a593Smuzhiyun 			((clk_s << SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK);
2145*4882a593Smuzhiyun 		spll_table->ss[i] = cpu_to_be32(tmp);
2146*4882a593Smuzhiyun 
2147*4882a593Smuzhiyun 		sclk += 512;
2148*4882a593Smuzhiyun 	}
2149*4882a593Smuzhiyun 
2150*4882a593Smuzhiyun 	if (!ret)
2151*4882a593Smuzhiyun 		ret = rv770_copy_bytes_to_smc(rdev, ni_pi->spll_table_start, (u8 *)spll_table,
2152*4882a593Smuzhiyun 					      sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), pi->sram_end);
2153*4882a593Smuzhiyun 
2154*4882a593Smuzhiyun 	kfree(spll_table);
2155*4882a593Smuzhiyun 
2156*4882a593Smuzhiyun 	return ret;
2157*4882a593Smuzhiyun }
2158*4882a593Smuzhiyun 
ni_populate_mclk_value(struct radeon_device * rdev,u32 engine_clock,u32 memory_clock,NISLANDS_SMC_MCLK_VALUE * mclk,bool strobe_mode,bool dll_state_on)2159*4882a593Smuzhiyun static int ni_populate_mclk_value(struct radeon_device *rdev,
2160*4882a593Smuzhiyun 				  u32 engine_clock,
2161*4882a593Smuzhiyun 				  u32 memory_clock,
2162*4882a593Smuzhiyun 				  NISLANDS_SMC_MCLK_VALUE *mclk,
2163*4882a593Smuzhiyun 				  bool strobe_mode,
2164*4882a593Smuzhiyun 				  bool dll_state_on)
2165*4882a593Smuzhiyun {
2166*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2167*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2168*4882a593Smuzhiyun 	u32 mpll_ad_func_cntl = ni_pi->clock_registers.mpll_ad_func_cntl;
2169*4882a593Smuzhiyun 	u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
2170*4882a593Smuzhiyun 	u32 mpll_dq_func_cntl = ni_pi->clock_registers.mpll_dq_func_cntl;
2171*4882a593Smuzhiyun 	u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
2172*4882a593Smuzhiyun 	u32 mclk_pwrmgt_cntl = ni_pi->clock_registers.mclk_pwrmgt_cntl;
2173*4882a593Smuzhiyun 	u32 dll_cntl = ni_pi->clock_registers.dll_cntl;
2174*4882a593Smuzhiyun 	u32 mpll_ss1 = ni_pi->clock_registers.mpll_ss1;
2175*4882a593Smuzhiyun 	u32 mpll_ss2 = ni_pi->clock_registers.mpll_ss2;
2176*4882a593Smuzhiyun 	struct atom_clock_dividers dividers;
2177*4882a593Smuzhiyun 	u32 ibias;
2178*4882a593Smuzhiyun 	u32 dll_speed;
2179*4882a593Smuzhiyun 	int ret;
2180*4882a593Smuzhiyun 	u32 mc_seq_misc7;
2181*4882a593Smuzhiyun 
2182*4882a593Smuzhiyun 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
2183*4882a593Smuzhiyun 					     memory_clock, strobe_mode, &dividers);
2184*4882a593Smuzhiyun 	if (ret)
2185*4882a593Smuzhiyun 		return ret;
2186*4882a593Smuzhiyun 
2187*4882a593Smuzhiyun 	if (!strobe_mode) {
2188*4882a593Smuzhiyun 		mc_seq_misc7 = RREG32(MC_SEQ_MISC7);
2189*4882a593Smuzhiyun 
2190*4882a593Smuzhiyun 		if (mc_seq_misc7 & 0x8000000)
2191*4882a593Smuzhiyun 			dividers.post_div = 1;
2192*4882a593Smuzhiyun 	}
2193*4882a593Smuzhiyun 
2194*4882a593Smuzhiyun 	ibias = cypress_map_clkf_to_ibias(rdev, dividers.whole_fb_div);
2195*4882a593Smuzhiyun 
2196*4882a593Smuzhiyun 	mpll_ad_func_cntl &= ~(CLKR_MASK |
2197*4882a593Smuzhiyun 			       YCLK_POST_DIV_MASK |
2198*4882a593Smuzhiyun 			       CLKF_MASK |
2199*4882a593Smuzhiyun 			       CLKFRAC_MASK |
2200*4882a593Smuzhiyun 			       IBIAS_MASK);
2201*4882a593Smuzhiyun 	mpll_ad_func_cntl |= CLKR(dividers.ref_div);
2202*4882a593Smuzhiyun 	mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div);
2203*4882a593Smuzhiyun 	mpll_ad_func_cntl |= CLKF(dividers.whole_fb_div);
2204*4882a593Smuzhiyun 	mpll_ad_func_cntl |= CLKFRAC(dividers.frac_fb_div);
2205*4882a593Smuzhiyun 	mpll_ad_func_cntl |= IBIAS(ibias);
2206*4882a593Smuzhiyun 
2207*4882a593Smuzhiyun 	if (dividers.vco_mode)
2208*4882a593Smuzhiyun 		mpll_ad_func_cntl_2 |= VCO_MODE;
2209*4882a593Smuzhiyun 	else
2210*4882a593Smuzhiyun 		mpll_ad_func_cntl_2 &= ~VCO_MODE;
2211*4882a593Smuzhiyun 
2212*4882a593Smuzhiyun 	if (pi->mem_gddr5) {
2213*4882a593Smuzhiyun 		mpll_dq_func_cntl &= ~(CLKR_MASK |
2214*4882a593Smuzhiyun 				       YCLK_POST_DIV_MASK |
2215*4882a593Smuzhiyun 				       CLKF_MASK |
2216*4882a593Smuzhiyun 				       CLKFRAC_MASK |
2217*4882a593Smuzhiyun 				       IBIAS_MASK);
2218*4882a593Smuzhiyun 		mpll_dq_func_cntl |= CLKR(dividers.ref_div);
2219*4882a593Smuzhiyun 		mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div);
2220*4882a593Smuzhiyun 		mpll_dq_func_cntl |= CLKF(dividers.whole_fb_div);
2221*4882a593Smuzhiyun 		mpll_dq_func_cntl |= CLKFRAC(dividers.frac_fb_div);
2222*4882a593Smuzhiyun 		mpll_dq_func_cntl |= IBIAS(ibias);
2223*4882a593Smuzhiyun 
2224*4882a593Smuzhiyun 		if (strobe_mode)
2225*4882a593Smuzhiyun 			mpll_dq_func_cntl &= ~PDNB;
2226*4882a593Smuzhiyun 		else
2227*4882a593Smuzhiyun 			mpll_dq_func_cntl |= PDNB;
2228*4882a593Smuzhiyun 
2229*4882a593Smuzhiyun 		if (dividers.vco_mode)
2230*4882a593Smuzhiyun 			mpll_dq_func_cntl_2 |= VCO_MODE;
2231*4882a593Smuzhiyun 		else
2232*4882a593Smuzhiyun 			mpll_dq_func_cntl_2 &= ~VCO_MODE;
2233*4882a593Smuzhiyun 	}
2234*4882a593Smuzhiyun 
2235*4882a593Smuzhiyun 	if (pi->mclk_ss) {
2236*4882a593Smuzhiyun 		struct radeon_atom_ss ss;
2237*4882a593Smuzhiyun 		u32 vco_freq = memory_clock * dividers.post_div;
2238*4882a593Smuzhiyun 
2239*4882a593Smuzhiyun 		if (radeon_atombios_get_asic_ss_info(rdev, &ss,
2240*4882a593Smuzhiyun 						     ASIC_INTERNAL_MEMORY_SS, vco_freq)) {
2241*4882a593Smuzhiyun 			u32 reference_clock = rdev->clock.mpll.reference_freq;
2242*4882a593Smuzhiyun 			u32 decoded_ref = rv740_get_decoded_reference_divider(dividers.ref_div);
2243*4882a593Smuzhiyun 			u32 clk_s = reference_clock * 5 / (decoded_ref * ss.rate);
2244*4882a593Smuzhiyun 			u32 clk_v = ss.percentage *
2245*4882a593Smuzhiyun 				(0x4000 * dividers.whole_fb_div + 0x800 * dividers.frac_fb_div) / (clk_s * 625);
2246*4882a593Smuzhiyun 
2247*4882a593Smuzhiyun 			mpll_ss1 &= ~CLKV_MASK;
2248*4882a593Smuzhiyun 			mpll_ss1 |= CLKV(clk_v);
2249*4882a593Smuzhiyun 
2250*4882a593Smuzhiyun 			mpll_ss2 &= ~CLKS_MASK;
2251*4882a593Smuzhiyun 			mpll_ss2 |= CLKS(clk_s);
2252*4882a593Smuzhiyun 		}
2253*4882a593Smuzhiyun 	}
2254*4882a593Smuzhiyun 
2255*4882a593Smuzhiyun 	dll_speed = rv740_get_dll_speed(pi->mem_gddr5,
2256*4882a593Smuzhiyun 					memory_clock);
2257*4882a593Smuzhiyun 
2258*4882a593Smuzhiyun 	mclk_pwrmgt_cntl &= ~DLL_SPEED_MASK;
2259*4882a593Smuzhiyun 	mclk_pwrmgt_cntl |= DLL_SPEED(dll_speed);
2260*4882a593Smuzhiyun 	if (dll_state_on)
2261*4882a593Smuzhiyun 		mclk_pwrmgt_cntl |= (MRDCKA0_PDNB |
2262*4882a593Smuzhiyun 				     MRDCKA1_PDNB |
2263*4882a593Smuzhiyun 				     MRDCKB0_PDNB |
2264*4882a593Smuzhiyun 				     MRDCKB1_PDNB |
2265*4882a593Smuzhiyun 				     MRDCKC0_PDNB |
2266*4882a593Smuzhiyun 				     MRDCKC1_PDNB |
2267*4882a593Smuzhiyun 				     MRDCKD0_PDNB |
2268*4882a593Smuzhiyun 				     MRDCKD1_PDNB);
2269*4882a593Smuzhiyun 	else
2270*4882a593Smuzhiyun 		mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
2271*4882a593Smuzhiyun 				      MRDCKA1_PDNB |
2272*4882a593Smuzhiyun 				      MRDCKB0_PDNB |
2273*4882a593Smuzhiyun 				      MRDCKB1_PDNB |
2274*4882a593Smuzhiyun 				      MRDCKC0_PDNB |
2275*4882a593Smuzhiyun 				      MRDCKC1_PDNB |
2276*4882a593Smuzhiyun 				      MRDCKD0_PDNB |
2277*4882a593Smuzhiyun 				      MRDCKD1_PDNB);
2278*4882a593Smuzhiyun 
2279*4882a593Smuzhiyun 
2280*4882a593Smuzhiyun 	mclk->mclk_value = cpu_to_be32(memory_clock);
2281*4882a593Smuzhiyun 	mclk->vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
2282*4882a593Smuzhiyun 	mclk->vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
2283*4882a593Smuzhiyun 	mclk->vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
2284*4882a593Smuzhiyun 	mclk->vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
2285*4882a593Smuzhiyun 	mclk->vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
2286*4882a593Smuzhiyun 	mclk->vDLL_CNTL = cpu_to_be32(dll_cntl);
2287*4882a593Smuzhiyun 	mclk->vMPLL_SS = cpu_to_be32(mpll_ss1);
2288*4882a593Smuzhiyun 	mclk->vMPLL_SS2 = cpu_to_be32(mpll_ss2);
2289*4882a593Smuzhiyun 
2290*4882a593Smuzhiyun 	return 0;
2291*4882a593Smuzhiyun }
2292*4882a593Smuzhiyun 
ni_populate_smc_sp(struct radeon_device * rdev,struct radeon_ps * radeon_state,NISLANDS_SMC_SWSTATE * smc_state)2293*4882a593Smuzhiyun static void ni_populate_smc_sp(struct radeon_device *rdev,
2294*4882a593Smuzhiyun 			       struct radeon_ps *radeon_state,
2295*4882a593Smuzhiyun 			       NISLANDS_SMC_SWSTATE *smc_state)
2296*4882a593Smuzhiyun {
2297*4882a593Smuzhiyun 	struct ni_ps *ps = ni_get_ps(radeon_state);
2298*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2299*4882a593Smuzhiyun 	int i;
2300*4882a593Smuzhiyun 
2301*4882a593Smuzhiyun 	for (i = 0; i < ps->performance_level_count - 1; i++)
2302*4882a593Smuzhiyun 		smc_state->levels[i].bSP = cpu_to_be32(pi->dsp);
2303*4882a593Smuzhiyun 
2304*4882a593Smuzhiyun 	smc_state->levels[ps->performance_level_count - 1].bSP =
2305*4882a593Smuzhiyun 		cpu_to_be32(pi->psp);
2306*4882a593Smuzhiyun }
2307*4882a593Smuzhiyun 
ni_convert_power_level_to_smc(struct radeon_device * rdev,struct rv7xx_pl * pl,NISLANDS_SMC_HW_PERFORMANCE_LEVEL * level)2308*4882a593Smuzhiyun static int ni_convert_power_level_to_smc(struct radeon_device *rdev,
2309*4882a593Smuzhiyun 					 struct rv7xx_pl *pl,
2310*4882a593Smuzhiyun 					 NISLANDS_SMC_HW_PERFORMANCE_LEVEL *level)
2311*4882a593Smuzhiyun {
2312*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2313*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2314*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2315*4882a593Smuzhiyun 	int ret;
2316*4882a593Smuzhiyun 	bool dll_state_on;
2317*4882a593Smuzhiyun 	u16 std_vddc;
2318*4882a593Smuzhiyun 	u32 tmp = RREG32(DC_STUTTER_CNTL);
2319*4882a593Smuzhiyun 
2320*4882a593Smuzhiyun 	level->gen2PCIE = pi->pcie_gen2 ?
2321*4882a593Smuzhiyun 		((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0;
2322*4882a593Smuzhiyun 
2323*4882a593Smuzhiyun 	ret = ni_populate_sclk_value(rdev, pl->sclk, &level->sclk);
2324*4882a593Smuzhiyun 	if (ret)
2325*4882a593Smuzhiyun 		return ret;
2326*4882a593Smuzhiyun 
2327*4882a593Smuzhiyun 	level->mcFlags =  0;
2328*4882a593Smuzhiyun 	if (pi->mclk_stutter_mode_threshold &&
2329*4882a593Smuzhiyun 	    (pl->mclk <= pi->mclk_stutter_mode_threshold) &&
2330*4882a593Smuzhiyun 	    !eg_pi->uvd_enabled &&
2331*4882a593Smuzhiyun 	    (tmp & DC_STUTTER_ENABLE_A) &&
2332*4882a593Smuzhiyun 	    (tmp & DC_STUTTER_ENABLE_B))
2333*4882a593Smuzhiyun 		level->mcFlags |= NISLANDS_SMC_MC_STUTTER_EN;
2334*4882a593Smuzhiyun 
2335*4882a593Smuzhiyun 	if (pi->mem_gddr5) {
2336*4882a593Smuzhiyun 		if (pl->mclk > pi->mclk_edc_enable_threshold)
2337*4882a593Smuzhiyun 			level->mcFlags |= NISLANDS_SMC_MC_EDC_RD_FLAG;
2338*4882a593Smuzhiyun 		if (pl->mclk > eg_pi->mclk_edc_wr_enable_threshold)
2339*4882a593Smuzhiyun 			level->mcFlags |= NISLANDS_SMC_MC_EDC_WR_FLAG;
2340*4882a593Smuzhiyun 
2341*4882a593Smuzhiyun 		level->strobeMode = cypress_get_strobe_mode_settings(rdev, pl->mclk);
2342*4882a593Smuzhiyun 
2343*4882a593Smuzhiyun 		if (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) {
2344*4882a593Smuzhiyun 			if (cypress_get_mclk_frequency_ratio(rdev, pl->mclk, true) >=
2345*4882a593Smuzhiyun 			    ((RREG32(MC_SEQ_MISC7) >> 16) & 0xf))
2346*4882a593Smuzhiyun 				dll_state_on = ((RREG32(MC_SEQ_MISC5) >> 1) & 0x1) ? true : false;
2347*4882a593Smuzhiyun 			else
2348*4882a593Smuzhiyun 				dll_state_on = ((RREG32(MC_SEQ_MISC6) >> 1) & 0x1) ? true : false;
2349*4882a593Smuzhiyun 		} else {
2350*4882a593Smuzhiyun 			dll_state_on = false;
2351*4882a593Smuzhiyun 			if (pl->mclk > ni_pi->mclk_rtt_mode_threshold)
2352*4882a593Smuzhiyun 				level->mcFlags |= NISLANDS_SMC_MC_RTT_ENABLE;
2353*4882a593Smuzhiyun 		}
2354*4882a593Smuzhiyun 
2355*4882a593Smuzhiyun 		ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk,
2356*4882a593Smuzhiyun 					     &level->mclk,
2357*4882a593Smuzhiyun 					     (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) != 0,
2358*4882a593Smuzhiyun 					     dll_state_on);
2359*4882a593Smuzhiyun 	} else
2360*4882a593Smuzhiyun 		ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk, &level->mclk, 1, 1);
2361*4882a593Smuzhiyun 
2362*4882a593Smuzhiyun 	if (ret)
2363*4882a593Smuzhiyun 		return ret;
2364*4882a593Smuzhiyun 
2365*4882a593Smuzhiyun 	ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
2366*4882a593Smuzhiyun 					pl->vddc, &level->vddc);
2367*4882a593Smuzhiyun 	if (ret)
2368*4882a593Smuzhiyun 		return ret;
2369*4882a593Smuzhiyun 
2370*4882a593Smuzhiyun 	ret = ni_get_std_voltage_value(rdev, &level->vddc, &std_vddc);
2371*4882a593Smuzhiyun 	if (ret)
2372*4882a593Smuzhiyun 		return ret;
2373*4882a593Smuzhiyun 
2374*4882a593Smuzhiyun 	ni_populate_std_voltage_value(rdev, std_vddc,
2375*4882a593Smuzhiyun 				      level->vddc.index, &level->std_vddc);
2376*4882a593Smuzhiyun 
2377*4882a593Smuzhiyun 	if (eg_pi->vddci_control) {
2378*4882a593Smuzhiyun 		ret = ni_populate_voltage_value(rdev, &eg_pi->vddci_voltage_table,
2379*4882a593Smuzhiyun 						pl->vddci, &level->vddci);
2380*4882a593Smuzhiyun 		if (ret)
2381*4882a593Smuzhiyun 			return ret;
2382*4882a593Smuzhiyun 	}
2383*4882a593Smuzhiyun 
2384*4882a593Smuzhiyun 	ni_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
2385*4882a593Smuzhiyun 
2386*4882a593Smuzhiyun 	return ret;
2387*4882a593Smuzhiyun }
2388*4882a593Smuzhiyun 
ni_populate_smc_t(struct radeon_device * rdev,struct radeon_ps * radeon_state,NISLANDS_SMC_SWSTATE * smc_state)2389*4882a593Smuzhiyun static int ni_populate_smc_t(struct radeon_device *rdev,
2390*4882a593Smuzhiyun 			     struct radeon_ps *radeon_state,
2391*4882a593Smuzhiyun 			     NISLANDS_SMC_SWSTATE *smc_state)
2392*4882a593Smuzhiyun {
2393*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2394*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2395*4882a593Smuzhiyun 	struct ni_ps *state = ni_get_ps(radeon_state);
2396*4882a593Smuzhiyun 	u32 a_t;
2397*4882a593Smuzhiyun 	u32 t_l, t_h;
2398*4882a593Smuzhiyun 	u32 high_bsp;
2399*4882a593Smuzhiyun 	int i, ret;
2400*4882a593Smuzhiyun 
2401*4882a593Smuzhiyun 	if (state->performance_level_count >= 9)
2402*4882a593Smuzhiyun 		return -EINVAL;
2403*4882a593Smuzhiyun 
2404*4882a593Smuzhiyun 	if (state->performance_level_count < 2) {
2405*4882a593Smuzhiyun 		a_t = CG_R(0xffff) | CG_L(0);
2406*4882a593Smuzhiyun 		smc_state->levels[0].aT = cpu_to_be32(a_t);
2407*4882a593Smuzhiyun 		return 0;
2408*4882a593Smuzhiyun 	}
2409*4882a593Smuzhiyun 
2410*4882a593Smuzhiyun 	smc_state->levels[0].aT = cpu_to_be32(0);
2411*4882a593Smuzhiyun 
2412*4882a593Smuzhiyun 	for (i = 0; i <= state->performance_level_count - 2; i++) {
2413*4882a593Smuzhiyun 		if (eg_pi->uvd_enabled)
2414*4882a593Smuzhiyun 			ret = r600_calculate_at(
2415*4882a593Smuzhiyun 				1000 * (i * (eg_pi->smu_uvd_hs ? 2 : 8) + 2),
2416*4882a593Smuzhiyun 				100 * R600_AH_DFLT,
2417*4882a593Smuzhiyun 				state->performance_levels[i + 1].sclk,
2418*4882a593Smuzhiyun 				state->performance_levels[i].sclk,
2419*4882a593Smuzhiyun 				&t_l,
2420*4882a593Smuzhiyun 				&t_h);
2421*4882a593Smuzhiyun 		else
2422*4882a593Smuzhiyun 			ret = r600_calculate_at(
2423*4882a593Smuzhiyun 				1000 * (i + 1),
2424*4882a593Smuzhiyun 				100 * R600_AH_DFLT,
2425*4882a593Smuzhiyun 				state->performance_levels[i + 1].sclk,
2426*4882a593Smuzhiyun 				state->performance_levels[i].sclk,
2427*4882a593Smuzhiyun 				&t_l,
2428*4882a593Smuzhiyun 				&t_h);
2429*4882a593Smuzhiyun 
2430*4882a593Smuzhiyun 		if (ret) {
2431*4882a593Smuzhiyun 			t_h = (i + 1) * 1000 - 50 * R600_AH_DFLT;
2432*4882a593Smuzhiyun 			t_l = (i + 1) * 1000 + 50 * R600_AH_DFLT;
2433*4882a593Smuzhiyun 		}
2434*4882a593Smuzhiyun 
2435*4882a593Smuzhiyun 		a_t = be32_to_cpu(smc_state->levels[i].aT) & ~CG_R_MASK;
2436*4882a593Smuzhiyun 		a_t |= CG_R(t_l * pi->bsp / 20000);
2437*4882a593Smuzhiyun 		smc_state->levels[i].aT = cpu_to_be32(a_t);
2438*4882a593Smuzhiyun 
2439*4882a593Smuzhiyun 		high_bsp = (i == state->performance_level_count - 2) ?
2440*4882a593Smuzhiyun 			pi->pbsp : pi->bsp;
2441*4882a593Smuzhiyun 
2442*4882a593Smuzhiyun 		a_t = CG_R(0xffff) | CG_L(t_h * high_bsp / 20000);
2443*4882a593Smuzhiyun 		smc_state->levels[i + 1].aT = cpu_to_be32(a_t);
2444*4882a593Smuzhiyun 	}
2445*4882a593Smuzhiyun 
2446*4882a593Smuzhiyun 	return 0;
2447*4882a593Smuzhiyun }
2448*4882a593Smuzhiyun 
ni_populate_power_containment_values(struct radeon_device * rdev,struct radeon_ps * radeon_state,NISLANDS_SMC_SWSTATE * smc_state)2449*4882a593Smuzhiyun static int ni_populate_power_containment_values(struct radeon_device *rdev,
2450*4882a593Smuzhiyun 						struct radeon_ps *radeon_state,
2451*4882a593Smuzhiyun 						NISLANDS_SMC_SWSTATE *smc_state)
2452*4882a593Smuzhiyun {
2453*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2454*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2455*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2456*4882a593Smuzhiyun 	struct ni_ps *state = ni_get_ps(radeon_state);
2457*4882a593Smuzhiyun 	u32 prev_sclk;
2458*4882a593Smuzhiyun 	u32 max_sclk;
2459*4882a593Smuzhiyun 	u32 min_sclk;
2460*4882a593Smuzhiyun 	int i, ret;
2461*4882a593Smuzhiyun 	u32 tdp_limit;
2462*4882a593Smuzhiyun 	u32 near_tdp_limit;
2463*4882a593Smuzhiyun 	u32 power_boost_limit;
2464*4882a593Smuzhiyun 	u8 max_ps_percent;
2465*4882a593Smuzhiyun 
2466*4882a593Smuzhiyun 	if (ni_pi->enable_power_containment == false)
2467*4882a593Smuzhiyun 		return 0;
2468*4882a593Smuzhiyun 
2469*4882a593Smuzhiyun 	if (state->performance_level_count == 0)
2470*4882a593Smuzhiyun 		return -EINVAL;
2471*4882a593Smuzhiyun 
2472*4882a593Smuzhiyun 	if (smc_state->levelCount != state->performance_level_count)
2473*4882a593Smuzhiyun 		return -EINVAL;
2474*4882a593Smuzhiyun 
2475*4882a593Smuzhiyun 	ret = ni_calculate_adjusted_tdp_limits(rdev,
2476*4882a593Smuzhiyun 					       false, /* ??? */
2477*4882a593Smuzhiyun 					       rdev->pm.dpm.tdp_adjustment,
2478*4882a593Smuzhiyun 					       &tdp_limit,
2479*4882a593Smuzhiyun 					       &near_tdp_limit);
2480*4882a593Smuzhiyun 	if (ret)
2481*4882a593Smuzhiyun 		return ret;
2482*4882a593Smuzhiyun 
2483*4882a593Smuzhiyun 	power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state, near_tdp_limit);
2484*4882a593Smuzhiyun 
2485*4882a593Smuzhiyun 	ret = rv770_write_smc_sram_dword(rdev,
2486*4882a593Smuzhiyun 					 pi->state_table_start +
2487*4882a593Smuzhiyun 					 offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
2488*4882a593Smuzhiyun 					 offsetof(PP_NIslands_DPM2Parameters, PowerBoostLimit),
2489*4882a593Smuzhiyun 					 ni_scale_power_for_smc(power_boost_limit, ni_get_smc_power_scaling_factor(rdev)),
2490*4882a593Smuzhiyun 					 pi->sram_end);
2491*4882a593Smuzhiyun 	if (ret)
2492*4882a593Smuzhiyun 		power_boost_limit = 0;
2493*4882a593Smuzhiyun 
2494*4882a593Smuzhiyun 	smc_state->levels[0].dpm2.MaxPS = 0;
2495*4882a593Smuzhiyun 	smc_state->levels[0].dpm2.NearTDPDec = 0;
2496*4882a593Smuzhiyun 	smc_state->levels[0].dpm2.AboveSafeInc = 0;
2497*4882a593Smuzhiyun 	smc_state->levels[0].dpm2.BelowSafeInc = 0;
2498*4882a593Smuzhiyun 	smc_state->levels[0].stateFlags |= power_boost_limit ? PPSMC_STATEFLAG_POWERBOOST : 0;
2499*4882a593Smuzhiyun 
2500*4882a593Smuzhiyun 	for (i = 1; i < state->performance_level_count; i++) {
2501*4882a593Smuzhiyun 		prev_sclk = state->performance_levels[i-1].sclk;
2502*4882a593Smuzhiyun 		max_sclk  = state->performance_levels[i].sclk;
2503*4882a593Smuzhiyun 		max_ps_percent = (i != (state->performance_level_count - 1)) ?
2504*4882a593Smuzhiyun 			NISLANDS_DPM2_MAXPS_PERCENT_M : NISLANDS_DPM2_MAXPS_PERCENT_H;
2505*4882a593Smuzhiyun 
2506*4882a593Smuzhiyun 		if (max_sclk < prev_sclk)
2507*4882a593Smuzhiyun 			return -EINVAL;
2508*4882a593Smuzhiyun 
2509*4882a593Smuzhiyun 		if ((max_ps_percent == 0) || (prev_sclk == max_sclk) || eg_pi->uvd_enabled)
2510*4882a593Smuzhiyun 			min_sclk = max_sclk;
2511*4882a593Smuzhiyun 		else if (1 == i)
2512*4882a593Smuzhiyun 			min_sclk = prev_sclk;
2513*4882a593Smuzhiyun 		else
2514*4882a593Smuzhiyun 			min_sclk = (prev_sclk * (u32)max_ps_percent) / 100;
2515*4882a593Smuzhiyun 
2516*4882a593Smuzhiyun 		if (min_sclk < state->performance_levels[0].sclk)
2517*4882a593Smuzhiyun 			min_sclk = state->performance_levels[0].sclk;
2518*4882a593Smuzhiyun 
2519*4882a593Smuzhiyun 		if (min_sclk == 0)
2520*4882a593Smuzhiyun 			return -EINVAL;
2521*4882a593Smuzhiyun 
2522*4882a593Smuzhiyun 		smc_state->levels[i].dpm2.MaxPS =
2523*4882a593Smuzhiyun 			(u8)((NISLANDS_DPM2_MAX_PULSE_SKIP * (max_sclk - min_sclk)) / max_sclk);
2524*4882a593Smuzhiyun 		smc_state->levels[i].dpm2.NearTDPDec = NISLANDS_DPM2_NEAR_TDP_DEC;
2525*4882a593Smuzhiyun 		smc_state->levels[i].dpm2.AboveSafeInc = NISLANDS_DPM2_ABOVE_SAFE_INC;
2526*4882a593Smuzhiyun 		smc_state->levels[i].dpm2.BelowSafeInc = NISLANDS_DPM2_BELOW_SAFE_INC;
2527*4882a593Smuzhiyun 		smc_state->levels[i].stateFlags |=
2528*4882a593Smuzhiyun 			((i != (state->performance_level_count - 1)) && power_boost_limit) ?
2529*4882a593Smuzhiyun 			PPSMC_STATEFLAG_POWERBOOST : 0;
2530*4882a593Smuzhiyun 	}
2531*4882a593Smuzhiyun 
2532*4882a593Smuzhiyun 	return 0;
2533*4882a593Smuzhiyun }
2534*4882a593Smuzhiyun 
ni_populate_sq_ramping_values(struct radeon_device * rdev,struct radeon_ps * radeon_state,NISLANDS_SMC_SWSTATE * smc_state)2535*4882a593Smuzhiyun static int ni_populate_sq_ramping_values(struct radeon_device *rdev,
2536*4882a593Smuzhiyun 					 struct radeon_ps *radeon_state,
2537*4882a593Smuzhiyun 					 NISLANDS_SMC_SWSTATE *smc_state)
2538*4882a593Smuzhiyun {
2539*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2540*4882a593Smuzhiyun 	struct ni_ps *state = ni_get_ps(radeon_state);
2541*4882a593Smuzhiyun 	u32 sq_power_throttle;
2542*4882a593Smuzhiyun 	u32 sq_power_throttle2;
2543*4882a593Smuzhiyun 	bool enable_sq_ramping = ni_pi->enable_sq_ramping;
2544*4882a593Smuzhiyun 	int i;
2545*4882a593Smuzhiyun 
2546*4882a593Smuzhiyun 	if (state->performance_level_count == 0)
2547*4882a593Smuzhiyun 		return -EINVAL;
2548*4882a593Smuzhiyun 
2549*4882a593Smuzhiyun 	if (smc_state->levelCount != state->performance_level_count)
2550*4882a593Smuzhiyun 		return -EINVAL;
2551*4882a593Smuzhiyun 
2552*4882a593Smuzhiyun 	if (rdev->pm.dpm.sq_ramping_threshold == 0)
2553*4882a593Smuzhiyun 		return -EINVAL;
2554*4882a593Smuzhiyun 
2555*4882a593Smuzhiyun 	if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER > (MAX_POWER_MASK >> MAX_POWER_SHIFT))
2556*4882a593Smuzhiyun 		enable_sq_ramping = false;
2557*4882a593Smuzhiyun 
2558*4882a593Smuzhiyun 	if (NISLANDS_DPM2_SQ_RAMP_MIN_POWER > (MIN_POWER_MASK >> MIN_POWER_SHIFT))
2559*4882a593Smuzhiyun 		enable_sq_ramping = false;
2560*4882a593Smuzhiyun 
2561*4882a593Smuzhiyun 	if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA > (MAX_POWER_DELTA_MASK >> MAX_POWER_DELTA_SHIFT))
2562*4882a593Smuzhiyun 		enable_sq_ramping = false;
2563*4882a593Smuzhiyun 
2564*4882a593Smuzhiyun 	if (NISLANDS_DPM2_SQ_RAMP_STI_SIZE > (STI_SIZE_MASK >> STI_SIZE_SHIFT))
2565*4882a593Smuzhiyun 		enable_sq_ramping = false;
2566*4882a593Smuzhiyun 
2567*4882a593Smuzhiyun 	if (NISLANDS_DPM2_SQ_RAMP_LTI_RATIO > (LTI_RATIO_MASK >> LTI_RATIO_SHIFT))
2568*4882a593Smuzhiyun 		enable_sq_ramping = false;
2569*4882a593Smuzhiyun 
2570*4882a593Smuzhiyun 	for (i = 0; i < state->performance_level_count; i++) {
2571*4882a593Smuzhiyun 		sq_power_throttle  = 0;
2572*4882a593Smuzhiyun 		sq_power_throttle2 = 0;
2573*4882a593Smuzhiyun 
2574*4882a593Smuzhiyun 		if ((state->performance_levels[i].sclk >= rdev->pm.dpm.sq_ramping_threshold) &&
2575*4882a593Smuzhiyun 		    enable_sq_ramping) {
2576*4882a593Smuzhiyun 			sq_power_throttle |= MAX_POWER(NISLANDS_DPM2_SQ_RAMP_MAX_POWER);
2577*4882a593Smuzhiyun 			sq_power_throttle |= MIN_POWER(NISLANDS_DPM2_SQ_RAMP_MIN_POWER);
2578*4882a593Smuzhiyun 			sq_power_throttle2 |= MAX_POWER_DELTA(NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA);
2579*4882a593Smuzhiyun 			sq_power_throttle2 |= STI_SIZE(NISLANDS_DPM2_SQ_RAMP_STI_SIZE);
2580*4882a593Smuzhiyun 			sq_power_throttle2 |= LTI_RATIO(NISLANDS_DPM2_SQ_RAMP_LTI_RATIO);
2581*4882a593Smuzhiyun 		} else {
2582*4882a593Smuzhiyun 			sq_power_throttle |= MAX_POWER_MASK | MIN_POWER_MASK;
2583*4882a593Smuzhiyun 			sq_power_throttle2 |= MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
2584*4882a593Smuzhiyun 		}
2585*4882a593Smuzhiyun 
2586*4882a593Smuzhiyun 		smc_state->levels[i].SQPowerThrottle   = cpu_to_be32(sq_power_throttle);
2587*4882a593Smuzhiyun 		smc_state->levels[i].SQPowerThrottle_2 = cpu_to_be32(sq_power_throttle2);
2588*4882a593Smuzhiyun 	}
2589*4882a593Smuzhiyun 
2590*4882a593Smuzhiyun 	return 0;
2591*4882a593Smuzhiyun }
2592*4882a593Smuzhiyun 
ni_enable_power_containment(struct radeon_device * rdev,struct radeon_ps * radeon_new_state,bool enable)2593*4882a593Smuzhiyun static int ni_enable_power_containment(struct radeon_device *rdev,
2594*4882a593Smuzhiyun 				       struct radeon_ps *radeon_new_state,
2595*4882a593Smuzhiyun 				       bool enable)
2596*4882a593Smuzhiyun {
2597*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2598*4882a593Smuzhiyun 	PPSMC_Result smc_result;
2599*4882a593Smuzhiyun 	int ret = 0;
2600*4882a593Smuzhiyun 
2601*4882a593Smuzhiyun 	if (ni_pi->enable_power_containment) {
2602*4882a593Smuzhiyun 		if (enable) {
2603*4882a593Smuzhiyun 			if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
2604*4882a593Smuzhiyun 				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingActive);
2605*4882a593Smuzhiyun 				if (smc_result != PPSMC_Result_OK) {
2606*4882a593Smuzhiyun 					ret = -EINVAL;
2607*4882a593Smuzhiyun 					ni_pi->pc_enabled = false;
2608*4882a593Smuzhiyun 				} else {
2609*4882a593Smuzhiyun 					ni_pi->pc_enabled = true;
2610*4882a593Smuzhiyun 				}
2611*4882a593Smuzhiyun 			}
2612*4882a593Smuzhiyun 		} else {
2613*4882a593Smuzhiyun 			smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingInactive);
2614*4882a593Smuzhiyun 			if (smc_result != PPSMC_Result_OK)
2615*4882a593Smuzhiyun 				ret = -EINVAL;
2616*4882a593Smuzhiyun 			ni_pi->pc_enabled = false;
2617*4882a593Smuzhiyun 		}
2618*4882a593Smuzhiyun 	}
2619*4882a593Smuzhiyun 
2620*4882a593Smuzhiyun 	return ret;
2621*4882a593Smuzhiyun }
2622*4882a593Smuzhiyun 
ni_convert_power_state_to_smc(struct radeon_device * rdev,struct radeon_ps * radeon_state,NISLANDS_SMC_SWSTATE * smc_state)2623*4882a593Smuzhiyun static int ni_convert_power_state_to_smc(struct radeon_device *rdev,
2624*4882a593Smuzhiyun 					 struct radeon_ps *radeon_state,
2625*4882a593Smuzhiyun 					 NISLANDS_SMC_SWSTATE *smc_state)
2626*4882a593Smuzhiyun {
2627*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2628*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2629*4882a593Smuzhiyun 	struct ni_ps *state = ni_get_ps(radeon_state);
2630*4882a593Smuzhiyun 	int i, ret;
2631*4882a593Smuzhiyun 	u32 threshold = state->performance_levels[state->performance_level_count - 1].sclk * 100 / 100;
2632*4882a593Smuzhiyun 
2633*4882a593Smuzhiyun 	if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC))
2634*4882a593Smuzhiyun 		smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
2635*4882a593Smuzhiyun 
2636*4882a593Smuzhiyun 	smc_state->levelCount = 0;
2637*4882a593Smuzhiyun 
2638*4882a593Smuzhiyun 	if (state->performance_level_count > NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE)
2639*4882a593Smuzhiyun 		return -EINVAL;
2640*4882a593Smuzhiyun 
2641*4882a593Smuzhiyun 	for (i = 0; i < state->performance_level_count; i++) {
2642*4882a593Smuzhiyun 		ret = ni_convert_power_level_to_smc(rdev, &state->performance_levels[i],
2643*4882a593Smuzhiyun 						    &smc_state->levels[i]);
2644*4882a593Smuzhiyun 		smc_state->levels[i].arbRefreshState =
2645*4882a593Smuzhiyun 			(u8)(NISLANDS_DRIVER_STATE_ARB_INDEX + i);
2646*4882a593Smuzhiyun 
2647*4882a593Smuzhiyun 		if (ret)
2648*4882a593Smuzhiyun 			return ret;
2649*4882a593Smuzhiyun 
2650*4882a593Smuzhiyun 		if (ni_pi->enable_power_containment)
2651*4882a593Smuzhiyun 			smc_state->levels[i].displayWatermark =
2652*4882a593Smuzhiyun 				(state->performance_levels[i].sclk < threshold) ?
2653*4882a593Smuzhiyun 				PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
2654*4882a593Smuzhiyun 		else
2655*4882a593Smuzhiyun 			smc_state->levels[i].displayWatermark = (i < 2) ?
2656*4882a593Smuzhiyun 				PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
2657*4882a593Smuzhiyun 
2658*4882a593Smuzhiyun 		if (eg_pi->dynamic_ac_timing)
2659*4882a593Smuzhiyun 			smc_state->levels[i].ACIndex = NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i;
2660*4882a593Smuzhiyun 		else
2661*4882a593Smuzhiyun 			smc_state->levels[i].ACIndex = 0;
2662*4882a593Smuzhiyun 
2663*4882a593Smuzhiyun 		smc_state->levelCount++;
2664*4882a593Smuzhiyun 	}
2665*4882a593Smuzhiyun 
2666*4882a593Smuzhiyun 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_watermark_threshold,
2667*4882a593Smuzhiyun 				      cpu_to_be32(threshold / 512));
2668*4882a593Smuzhiyun 
2669*4882a593Smuzhiyun 	ni_populate_smc_sp(rdev, radeon_state, smc_state);
2670*4882a593Smuzhiyun 
2671*4882a593Smuzhiyun 	ret = ni_populate_power_containment_values(rdev, radeon_state, smc_state);
2672*4882a593Smuzhiyun 	if (ret)
2673*4882a593Smuzhiyun 		ni_pi->enable_power_containment = false;
2674*4882a593Smuzhiyun 
2675*4882a593Smuzhiyun 	ret = ni_populate_sq_ramping_values(rdev, radeon_state, smc_state);
2676*4882a593Smuzhiyun 	if (ret)
2677*4882a593Smuzhiyun 		ni_pi->enable_sq_ramping = false;
2678*4882a593Smuzhiyun 
2679*4882a593Smuzhiyun 	return ni_populate_smc_t(rdev, radeon_state, smc_state);
2680*4882a593Smuzhiyun }
2681*4882a593Smuzhiyun 
ni_upload_sw_state(struct radeon_device * rdev,struct radeon_ps * radeon_new_state)2682*4882a593Smuzhiyun static int ni_upload_sw_state(struct radeon_device *rdev,
2683*4882a593Smuzhiyun 			      struct radeon_ps *radeon_new_state)
2684*4882a593Smuzhiyun {
2685*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2686*4882a593Smuzhiyun 	u16 address = pi->state_table_start +
2687*4882a593Smuzhiyun 		offsetof(NISLANDS_SMC_STATETABLE, driverState);
2688*4882a593Smuzhiyun 	NISLANDS_SMC_SWSTATE *smc_state;
2689*4882a593Smuzhiyun 	size_t state_size = struct_size(smc_state, levels,
2690*4882a593Smuzhiyun 			NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE);
2691*4882a593Smuzhiyun 	int ret;
2692*4882a593Smuzhiyun 
2693*4882a593Smuzhiyun 	smc_state = kzalloc(state_size, GFP_KERNEL);
2694*4882a593Smuzhiyun 	if (smc_state == NULL)
2695*4882a593Smuzhiyun 		return -ENOMEM;
2696*4882a593Smuzhiyun 
2697*4882a593Smuzhiyun 	ret = ni_convert_power_state_to_smc(rdev, radeon_new_state, smc_state);
2698*4882a593Smuzhiyun 	if (ret)
2699*4882a593Smuzhiyun 		goto done;
2700*4882a593Smuzhiyun 
2701*4882a593Smuzhiyun 	ret = rv770_copy_bytes_to_smc(rdev, address, (u8 *)smc_state, state_size, pi->sram_end);
2702*4882a593Smuzhiyun 
2703*4882a593Smuzhiyun done:
2704*4882a593Smuzhiyun 	kfree(smc_state);
2705*4882a593Smuzhiyun 
2706*4882a593Smuzhiyun 	return ret;
2707*4882a593Smuzhiyun }
2708*4882a593Smuzhiyun 
ni_set_mc_special_registers(struct radeon_device * rdev,struct ni_mc_reg_table * table)2709*4882a593Smuzhiyun static int ni_set_mc_special_registers(struct radeon_device *rdev,
2710*4882a593Smuzhiyun 				       struct ni_mc_reg_table *table)
2711*4882a593Smuzhiyun {
2712*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2713*4882a593Smuzhiyun 	u8 i, j, k;
2714*4882a593Smuzhiyun 	u32 temp_reg;
2715*4882a593Smuzhiyun 
2716*4882a593Smuzhiyun 	for (i = 0, j = table->last; i < table->last; i++) {
2717*4882a593Smuzhiyun 		switch (table->mc_reg_address[i].s1) {
2718*4882a593Smuzhiyun 		case MC_SEQ_MISC1 >> 2:
2719*4882a593Smuzhiyun 			if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2720*4882a593Smuzhiyun 				return -EINVAL;
2721*4882a593Smuzhiyun 			temp_reg = RREG32(MC_PMG_CMD_EMRS);
2722*4882a593Smuzhiyun 			table->mc_reg_address[j].s1 = MC_PMG_CMD_EMRS >> 2;
2723*4882a593Smuzhiyun 			table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
2724*4882a593Smuzhiyun 			for (k = 0; k < table->num_entries; k++)
2725*4882a593Smuzhiyun 				table->mc_reg_table_entry[k].mc_data[j] =
2726*4882a593Smuzhiyun 					((temp_reg & 0xffff0000)) |
2727*4882a593Smuzhiyun 					((table->mc_reg_table_entry[k].mc_data[i] & 0xffff0000) >> 16);
2728*4882a593Smuzhiyun 			j++;
2729*4882a593Smuzhiyun 			if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2730*4882a593Smuzhiyun 				return -EINVAL;
2731*4882a593Smuzhiyun 
2732*4882a593Smuzhiyun 			temp_reg = RREG32(MC_PMG_CMD_MRS);
2733*4882a593Smuzhiyun 			table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS >> 2;
2734*4882a593Smuzhiyun 			table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS_LP >> 2;
2735*4882a593Smuzhiyun 			for(k = 0; k < table->num_entries; k++) {
2736*4882a593Smuzhiyun 				table->mc_reg_table_entry[k].mc_data[j] =
2737*4882a593Smuzhiyun 					(temp_reg & 0xffff0000) |
2738*4882a593Smuzhiyun 					(table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
2739*4882a593Smuzhiyun 				if (!pi->mem_gddr5)
2740*4882a593Smuzhiyun 					table->mc_reg_table_entry[k].mc_data[j] |= 0x100;
2741*4882a593Smuzhiyun 			}
2742*4882a593Smuzhiyun 			j++;
2743*4882a593Smuzhiyun 			break;
2744*4882a593Smuzhiyun 		case MC_SEQ_RESERVE_M >> 2:
2745*4882a593Smuzhiyun 			if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2746*4882a593Smuzhiyun 				return -EINVAL;
2747*4882a593Smuzhiyun 			temp_reg = RREG32(MC_PMG_CMD_MRS1);
2748*4882a593Smuzhiyun 			table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS1 >> 2;
2749*4882a593Smuzhiyun 			table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
2750*4882a593Smuzhiyun 			for (k = 0; k < table->num_entries; k++)
2751*4882a593Smuzhiyun 				table->mc_reg_table_entry[k].mc_data[j] =
2752*4882a593Smuzhiyun 					(temp_reg & 0xffff0000) |
2753*4882a593Smuzhiyun 					(table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
2754*4882a593Smuzhiyun 			j++;
2755*4882a593Smuzhiyun 			break;
2756*4882a593Smuzhiyun 		default:
2757*4882a593Smuzhiyun 			break;
2758*4882a593Smuzhiyun 		}
2759*4882a593Smuzhiyun 	}
2760*4882a593Smuzhiyun 
2761*4882a593Smuzhiyun 	table->last = j;
2762*4882a593Smuzhiyun 
2763*4882a593Smuzhiyun 	return 0;
2764*4882a593Smuzhiyun }
2765*4882a593Smuzhiyun 
ni_check_s0_mc_reg_index(u16 in_reg,u16 * out_reg)2766*4882a593Smuzhiyun static bool ni_check_s0_mc_reg_index(u16 in_reg, u16 *out_reg)
2767*4882a593Smuzhiyun {
2768*4882a593Smuzhiyun 	bool result = true;
2769*4882a593Smuzhiyun 
2770*4882a593Smuzhiyun 	switch (in_reg) {
2771*4882a593Smuzhiyun 	case  MC_SEQ_RAS_TIMING >> 2:
2772*4882a593Smuzhiyun 		*out_reg = MC_SEQ_RAS_TIMING_LP >> 2;
2773*4882a593Smuzhiyun 		break;
2774*4882a593Smuzhiyun 	case MC_SEQ_CAS_TIMING >> 2:
2775*4882a593Smuzhiyun 		*out_reg = MC_SEQ_CAS_TIMING_LP >> 2;
2776*4882a593Smuzhiyun 		break;
2777*4882a593Smuzhiyun 	case MC_SEQ_MISC_TIMING >> 2:
2778*4882a593Smuzhiyun 		*out_reg = MC_SEQ_MISC_TIMING_LP >> 2;
2779*4882a593Smuzhiyun 		break;
2780*4882a593Smuzhiyun 	case MC_SEQ_MISC_TIMING2 >> 2:
2781*4882a593Smuzhiyun 		*out_reg = MC_SEQ_MISC_TIMING2_LP >> 2;
2782*4882a593Smuzhiyun 		break;
2783*4882a593Smuzhiyun 	case MC_SEQ_RD_CTL_D0 >> 2:
2784*4882a593Smuzhiyun 		*out_reg = MC_SEQ_RD_CTL_D0_LP >> 2;
2785*4882a593Smuzhiyun 		break;
2786*4882a593Smuzhiyun 	case MC_SEQ_RD_CTL_D1 >> 2:
2787*4882a593Smuzhiyun 		*out_reg = MC_SEQ_RD_CTL_D1_LP >> 2;
2788*4882a593Smuzhiyun 		break;
2789*4882a593Smuzhiyun 	case MC_SEQ_WR_CTL_D0 >> 2:
2790*4882a593Smuzhiyun 		*out_reg = MC_SEQ_WR_CTL_D0_LP >> 2;
2791*4882a593Smuzhiyun 		break;
2792*4882a593Smuzhiyun 	case MC_SEQ_WR_CTL_D1 >> 2:
2793*4882a593Smuzhiyun 		*out_reg = MC_SEQ_WR_CTL_D1_LP >> 2;
2794*4882a593Smuzhiyun 		break;
2795*4882a593Smuzhiyun 	case MC_PMG_CMD_EMRS >> 2:
2796*4882a593Smuzhiyun 		*out_reg = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
2797*4882a593Smuzhiyun 		break;
2798*4882a593Smuzhiyun 	case MC_PMG_CMD_MRS >> 2:
2799*4882a593Smuzhiyun 		*out_reg = MC_SEQ_PMG_CMD_MRS_LP >> 2;
2800*4882a593Smuzhiyun 		break;
2801*4882a593Smuzhiyun 	case MC_PMG_CMD_MRS1 >> 2:
2802*4882a593Smuzhiyun 		*out_reg = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
2803*4882a593Smuzhiyun 		break;
2804*4882a593Smuzhiyun 	case MC_SEQ_PMG_TIMING >> 2:
2805*4882a593Smuzhiyun 		*out_reg = MC_SEQ_PMG_TIMING_LP >> 2;
2806*4882a593Smuzhiyun 		break;
2807*4882a593Smuzhiyun 	case MC_PMG_CMD_MRS2 >> 2:
2808*4882a593Smuzhiyun 		*out_reg = MC_SEQ_PMG_CMD_MRS2_LP >> 2;
2809*4882a593Smuzhiyun 		break;
2810*4882a593Smuzhiyun 	default:
2811*4882a593Smuzhiyun 		result = false;
2812*4882a593Smuzhiyun 		break;
2813*4882a593Smuzhiyun 	}
2814*4882a593Smuzhiyun 
2815*4882a593Smuzhiyun 	return result;
2816*4882a593Smuzhiyun }
2817*4882a593Smuzhiyun 
ni_set_valid_flag(struct ni_mc_reg_table * table)2818*4882a593Smuzhiyun static void ni_set_valid_flag(struct ni_mc_reg_table *table)
2819*4882a593Smuzhiyun {
2820*4882a593Smuzhiyun 	u8 i, j;
2821*4882a593Smuzhiyun 
2822*4882a593Smuzhiyun 	for (i = 0; i < table->last; i++) {
2823*4882a593Smuzhiyun 		for (j = 1; j < table->num_entries; j++) {
2824*4882a593Smuzhiyun 			if (table->mc_reg_table_entry[j-1].mc_data[i] != table->mc_reg_table_entry[j].mc_data[i]) {
2825*4882a593Smuzhiyun 				table->valid_flag |= 1 << i;
2826*4882a593Smuzhiyun 				break;
2827*4882a593Smuzhiyun 			}
2828*4882a593Smuzhiyun 		}
2829*4882a593Smuzhiyun 	}
2830*4882a593Smuzhiyun }
2831*4882a593Smuzhiyun 
ni_set_s0_mc_reg_index(struct ni_mc_reg_table * table)2832*4882a593Smuzhiyun static void ni_set_s0_mc_reg_index(struct ni_mc_reg_table *table)
2833*4882a593Smuzhiyun {
2834*4882a593Smuzhiyun 	u32 i;
2835*4882a593Smuzhiyun 	u16 address;
2836*4882a593Smuzhiyun 
2837*4882a593Smuzhiyun 	for (i = 0; i < table->last; i++)
2838*4882a593Smuzhiyun 		table->mc_reg_address[i].s0 =
2839*4882a593Smuzhiyun 			ni_check_s0_mc_reg_index(table->mc_reg_address[i].s1, &address) ?
2840*4882a593Smuzhiyun 			address : table->mc_reg_address[i].s1;
2841*4882a593Smuzhiyun }
2842*4882a593Smuzhiyun 
ni_copy_vbios_mc_reg_table(struct atom_mc_reg_table * table,struct ni_mc_reg_table * ni_table)2843*4882a593Smuzhiyun static int ni_copy_vbios_mc_reg_table(struct atom_mc_reg_table *table,
2844*4882a593Smuzhiyun 				      struct ni_mc_reg_table *ni_table)
2845*4882a593Smuzhiyun {
2846*4882a593Smuzhiyun 	u8 i, j;
2847*4882a593Smuzhiyun 
2848*4882a593Smuzhiyun 	if (table->last > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2849*4882a593Smuzhiyun 		return -EINVAL;
2850*4882a593Smuzhiyun 	if (table->num_entries > MAX_AC_TIMING_ENTRIES)
2851*4882a593Smuzhiyun 		return -EINVAL;
2852*4882a593Smuzhiyun 
2853*4882a593Smuzhiyun 	for (i = 0; i < table->last; i++)
2854*4882a593Smuzhiyun 		ni_table->mc_reg_address[i].s1 = table->mc_reg_address[i].s1;
2855*4882a593Smuzhiyun 	ni_table->last = table->last;
2856*4882a593Smuzhiyun 
2857*4882a593Smuzhiyun 	for (i = 0; i < table->num_entries; i++) {
2858*4882a593Smuzhiyun 		ni_table->mc_reg_table_entry[i].mclk_max =
2859*4882a593Smuzhiyun 			table->mc_reg_table_entry[i].mclk_max;
2860*4882a593Smuzhiyun 		for (j = 0; j < table->last; j++)
2861*4882a593Smuzhiyun 			ni_table->mc_reg_table_entry[i].mc_data[j] =
2862*4882a593Smuzhiyun 				table->mc_reg_table_entry[i].mc_data[j];
2863*4882a593Smuzhiyun 	}
2864*4882a593Smuzhiyun 	ni_table->num_entries = table->num_entries;
2865*4882a593Smuzhiyun 
2866*4882a593Smuzhiyun 	return 0;
2867*4882a593Smuzhiyun }
2868*4882a593Smuzhiyun 
ni_initialize_mc_reg_table(struct radeon_device * rdev)2869*4882a593Smuzhiyun static int ni_initialize_mc_reg_table(struct radeon_device *rdev)
2870*4882a593Smuzhiyun {
2871*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2872*4882a593Smuzhiyun 	int ret;
2873*4882a593Smuzhiyun 	struct atom_mc_reg_table *table;
2874*4882a593Smuzhiyun 	struct ni_mc_reg_table *ni_table = &ni_pi->mc_reg_table;
2875*4882a593Smuzhiyun 	u8 module_index = rv770_get_memory_module_index(rdev);
2876*4882a593Smuzhiyun 
2877*4882a593Smuzhiyun 	table = kzalloc(sizeof(struct atom_mc_reg_table), GFP_KERNEL);
2878*4882a593Smuzhiyun 	if (!table)
2879*4882a593Smuzhiyun 		return -ENOMEM;
2880*4882a593Smuzhiyun 
2881*4882a593Smuzhiyun 	WREG32(MC_SEQ_RAS_TIMING_LP, RREG32(MC_SEQ_RAS_TIMING));
2882*4882a593Smuzhiyun 	WREG32(MC_SEQ_CAS_TIMING_LP, RREG32(MC_SEQ_CAS_TIMING));
2883*4882a593Smuzhiyun 	WREG32(MC_SEQ_MISC_TIMING_LP, RREG32(MC_SEQ_MISC_TIMING));
2884*4882a593Smuzhiyun 	WREG32(MC_SEQ_MISC_TIMING2_LP, RREG32(MC_SEQ_MISC_TIMING2));
2885*4882a593Smuzhiyun 	WREG32(MC_SEQ_PMG_CMD_EMRS_LP, RREG32(MC_PMG_CMD_EMRS));
2886*4882a593Smuzhiyun 	WREG32(MC_SEQ_PMG_CMD_MRS_LP, RREG32(MC_PMG_CMD_MRS));
2887*4882a593Smuzhiyun 	WREG32(MC_SEQ_PMG_CMD_MRS1_LP, RREG32(MC_PMG_CMD_MRS1));
2888*4882a593Smuzhiyun 	WREG32(MC_SEQ_WR_CTL_D0_LP, RREG32(MC_SEQ_WR_CTL_D0));
2889*4882a593Smuzhiyun 	WREG32(MC_SEQ_WR_CTL_D1_LP, RREG32(MC_SEQ_WR_CTL_D1));
2890*4882a593Smuzhiyun 	WREG32(MC_SEQ_RD_CTL_D0_LP, RREG32(MC_SEQ_RD_CTL_D0));
2891*4882a593Smuzhiyun 	WREG32(MC_SEQ_RD_CTL_D1_LP, RREG32(MC_SEQ_RD_CTL_D1));
2892*4882a593Smuzhiyun 	WREG32(MC_SEQ_PMG_TIMING_LP, RREG32(MC_SEQ_PMG_TIMING));
2893*4882a593Smuzhiyun 	WREG32(MC_SEQ_PMG_CMD_MRS2_LP, RREG32(MC_PMG_CMD_MRS2));
2894*4882a593Smuzhiyun 
2895*4882a593Smuzhiyun 	ret = radeon_atom_init_mc_reg_table(rdev, module_index, table);
2896*4882a593Smuzhiyun 
2897*4882a593Smuzhiyun 	if (ret)
2898*4882a593Smuzhiyun 		goto init_mc_done;
2899*4882a593Smuzhiyun 
2900*4882a593Smuzhiyun 	ret = ni_copy_vbios_mc_reg_table(table, ni_table);
2901*4882a593Smuzhiyun 
2902*4882a593Smuzhiyun 	if (ret)
2903*4882a593Smuzhiyun 		goto init_mc_done;
2904*4882a593Smuzhiyun 
2905*4882a593Smuzhiyun 	ni_set_s0_mc_reg_index(ni_table);
2906*4882a593Smuzhiyun 
2907*4882a593Smuzhiyun 	ret = ni_set_mc_special_registers(rdev, ni_table);
2908*4882a593Smuzhiyun 
2909*4882a593Smuzhiyun 	if (ret)
2910*4882a593Smuzhiyun 		goto init_mc_done;
2911*4882a593Smuzhiyun 
2912*4882a593Smuzhiyun 	ni_set_valid_flag(ni_table);
2913*4882a593Smuzhiyun 
2914*4882a593Smuzhiyun init_mc_done:
2915*4882a593Smuzhiyun 	kfree(table);
2916*4882a593Smuzhiyun 
2917*4882a593Smuzhiyun 	return ret;
2918*4882a593Smuzhiyun }
2919*4882a593Smuzhiyun 
ni_populate_mc_reg_addresses(struct radeon_device * rdev,SMC_NIslands_MCRegisters * mc_reg_table)2920*4882a593Smuzhiyun static void ni_populate_mc_reg_addresses(struct radeon_device *rdev,
2921*4882a593Smuzhiyun 					 SMC_NIslands_MCRegisters *mc_reg_table)
2922*4882a593Smuzhiyun {
2923*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2924*4882a593Smuzhiyun 	u32 i, j;
2925*4882a593Smuzhiyun 
2926*4882a593Smuzhiyun 	for (i = 0, j = 0; j < ni_pi->mc_reg_table.last; j++) {
2927*4882a593Smuzhiyun 		if (ni_pi->mc_reg_table.valid_flag & (1 << j)) {
2928*4882a593Smuzhiyun 			if (i >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2929*4882a593Smuzhiyun 				break;
2930*4882a593Smuzhiyun 			mc_reg_table->address[i].s0 =
2931*4882a593Smuzhiyun 				cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s0);
2932*4882a593Smuzhiyun 			mc_reg_table->address[i].s1 =
2933*4882a593Smuzhiyun 				cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s1);
2934*4882a593Smuzhiyun 			i++;
2935*4882a593Smuzhiyun 		}
2936*4882a593Smuzhiyun 	}
2937*4882a593Smuzhiyun 	mc_reg_table->last = (u8)i;
2938*4882a593Smuzhiyun }
2939*4882a593Smuzhiyun 
2940*4882a593Smuzhiyun 
ni_convert_mc_registers(struct ni_mc_reg_entry * entry,SMC_NIslands_MCRegisterSet * data,u32 num_entries,u32 valid_flag)2941*4882a593Smuzhiyun static void ni_convert_mc_registers(struct ni_mc_reg_entry *entry,
2942*4882a593Smuzhiyun 				    SMC_NIslands_MCRegisterSet *data,
2943*4882a593Smuzhiyun 				    u32 num_entries, u32 valid_flag)
2944*4882a593Smuzhiyun {
2945*4882a593Smuzhiyun 	u32 i, j;
2946*4882a593Smuzhiyun 
2947*4882a593Smuzhiyun 	for (i = 0, j = 0; j < num_entries; j++) {
2948*4882a593Smuzhiyun 		if (valid_flag & (1 << j)) {
2949*4882a593Smuzhiyun 			data->value[i] = cpu_to_be32(entry->mc_data[j]);
2950*4882a593Smuzhiyun 			i++;
2951*4882a593Smuzhiyun 		}
2952*4882a593Smuzhiyun 	}
2953*4882a593Smuzhiyun }
2954*4882a593Smuzhiyun 
ni_convert_mc_reg_table_entry_to_smc(struct radeon_device * rdev,struct rv7xx_pl * pl,SMC_NIslands_MCRegisterSet * mc_reg_table_data)2955*4882a593Smuzhiyun static void ni_convert_mc_reg_table_entry_to_smc(struct radeon_device *rdev,
2956*4882a593Smuzhiyun 						 struct rv7xx_pl *pl,
2957*4882a593Smuzhiyun 						 SMC_NIslands_MCRegisterSet *mc_reg_table_data)
2958*4882a593Smuzhiyun {
2959*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2960*4882a593Smuzhiyun 	u32 i = 0;
2961*4882a593Smuzhiyun 
2962*4882a593Smuzhiyun 	for (i = 0; i < ni_pi->mc_reg_table.num_entries; i++) {
2963*4882a593Smuzhiyun 		if (pl->mclk <= ni_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max)
2964*4882a593Smuzhiyun 			break;
2965*4882a593Smuzhiyun 	}
2966*4882a593Smuzhiyun 
2967*4882a593Smuzhiyun 	if ((i == ni_pi->mc_reg_table.num_entries) && (i > 0))
2968*4882a593Smuzhiyun 		--i;
2969*4882a593Smuzhiyun 
2970*4882a593Smuzhiyun 	ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[i],
2971*4882a593Smuzhiyun 				mc_reg_table_data,
2972*4882a593Smuzhiyun 				ni_pi->mc_reg_table.last,
2973*4882a593Smuzhiyun 				ni_pi->mc_reg_table.valid_flag);
2974*4882a593Smuzhiyun }
2975*4882a593Smuzhiyun 
ni_convert_mc_reg_table_to_smc(struct radeon_device * rdev,struct radeon_ps * radeon_state,SMC_NIslands_MCRegisters * mc_reg_table)2976*4882a593Smuzhiyun static void ni_convert_mc_reg_table_to_smc(struct radeon_device *rdev,
2977*4882a593Smuzhiyun 					   struct radeon_ps *radeon_state,
2978*4882a593Smuzhiyun 					   SMC_NIslands_MCRegisters *mc_reg_table)
2979*4882a593Smuzhiyun {
2980*4882a593Smuzhiyun 	struct ni_ps *state = ni_get_ps(radeon_state);
2981*4882a593Smuzhiyun 	int i;
2982*4882a593Smuzhiyun 
2983*4882a593Smuzhiyun 	for (i = 0; i < state->performance_level_count; i++) {
2984*4882a593Smuzhiyun 		ni_convert_mc_reg_table_entry_to_smc(rdev,
2985*4882a593Smuzhiyun 						     &state->performance_levels[i],
2986*4882a593Smuzhiyun 						     &mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i]);
2987*4882a593Smuzhiyun 	}
2988*4882a593Smuzhiyun }
2989*4882a593Smuzhiyun 
ni_populate_mc_reg_table(struct radeon_device * rdev,struct radeon_ps * radeon_boot_state)2990*4882a593Smuzhiyun static int ni_populate_mc_reg_table(struct radeon_device *rdev,
2991*4882a593Smuzhiyun 				    struct radeon_ps *radeon_boot_state)
2992*4882a593Smuzhiyun {
2993*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2994*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2995*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2996*4882a593Smuzhiyun 	struct ni_ps *boot_state = ni_get_ps(radeon_boot_state);
2997*4882a593Smuzhiyun 	SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
2998*4882a593Smuzhiyun 
2999*4882a593Smuzhiyun 	memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
3000*4882a593Smuzhiyun 
3001*4882a593Smuzhiyun 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_seq_index, 1);
3002*4882a593Smuzhiyun 
3003*4882a593Smuzhiyun 	ni_populate_mc_reg_addresses(rdev, mc_reg_table);
3004*4882a593Smuzhiyun 
3005*4882a593Smuzhiyun 	ni_convert_mc_reg_table_entry_to_smc(rdev, &boot_state->performance_levels[0],
3006*4882a593Smuzhiyun 					     &mc_reg_table->data[0]);
3007*4882a593Smuzhiyun 
3008*4882a593Smuzhiyun 	ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[0],
3009*4882a593Smuzhiyun 				&mc_reg_table->data[1],
3010*4882a593Smuzhiyun 				ni_pi->mc_reg_table.last,
3011*4882a593Smuzhiyun 				ni_pi->mc_reg_table.valid_flag);
3012*4882a593Smuzhiyun 
3013*4882a593Smuzhiyun 	ni_convert_mc_reg_table_to_smc(rdev, radeon_boot_state, mc_reg_table);
3014*4882a593Smuzhiyun 
3015*4882a593Smuzhiyun 	return rv770_copy_bytes_to_smc(rdev, eg_pi->mc_reg_table_start,
3016*4882a593Smuzhiyun 				       (u8 *)mc_reg_table,
3017*4882a593Smuzhiyun 				       sizeof(SMC_NIslands_MCRegisters),
3018*4882a593Smuzhiyun 				       pi->sram_end);
3019*4882a593Smuzhiyun }
3020*4882a593Smuzhiyun 
ni_upload_mc_reg_table(struct radeon_device * rdev,struct radeon_ps * radeon_new_state)3021*4882a593Smuzhiyun static int ni_upload_mc_reg_table(struct radeon_device *rdev,
3022*4882a593Smuzhiyun 				  struct radeon_ps *radeon_new_state)
3023*4882a593Smuzhiyun {
3024*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3025*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3026*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
3027*4882a593Smuzhiyun 	struct ni_ps *ni_new_state = ni_get_ps(radeon_new_state);
3028*4882a593Smuzhiyun 	SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
3029*4882a593Smuzhiyun 	u16 address;
3030*4882a593Smuzhiyun 
3031*4882a593Smuzhiyun 	memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
3032*4882a593Smuzhiyun 
3033*4882a593Smuzhiyun 	ni_convert_mc_reg_table_to_smc(rdev, radeon_new_state, mc_reg_table);
3034*4882a593Smuzhiyun 
3035*4882a593Smuzhiyun 	address = eg_pi->mc_reg_table_start +
3036*4882a593Smuzhiyun 		(u16)offsetof(SMC_NIslands_MCRegisters, data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT]);
3037*4882a593Smuzhiyun 
3038*4882a593Smuzhiyun 	return rv770_copy_bytes_to_smc(rdev, address,
3039*4882a593Smuzhiyun 				       (u8 *)&mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT],
3040*4882a593Smuzhiyun 				       sizeof(SMC_NIslands_MCRegisterSet) * ni_new_state->performance_level_count,
3041*4882a593Smuzhiyun 				       pi->sram_end);
3042*4882a593Smuzhiyun }
3043*4882a593Smuzhiyun 
ni_init_driver_calculated_leakage_table(struct radeon_device * rdev,PP_NIslands_CACTABLES * cac_tables)3044*4882a593Smuzhiyun static int ni_init_driver_calculated_leakage_table(struct radeon_device *rdev,
3045*4882a593Smuzhiyun 						   PP_NIslands_CACTABLES *cac_tables)
3046*4882a593Smuzhiyun {
3047*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
3048*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3049*4882a593Smuzhiyun 	u32 leakage = 0;
3050*4882a593Smuzhiyun 	unsigned int i, j, table_size;
3051*4882a593Smuzhiyun 	s32 t;
3052*4882a593Smuzhiyun 	u32 smc_leakage, max_leakage = 0;
3053*4882a593Smuzhiyun 	u32 scaling_factor;
3054*4882a593Smuzhiyun 
3055*4882a593Smuzhiyun 	table_size = eg_pi->vddc_voltage_table.count;
3056*4882a593Smuzhiyun 
3057*4882a593Smuzhiyun 	if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
3058*4882a593Smuzhiyun 		table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
3059*4882a593Smuzhiyun 
3060*4882a593Smuzhiyun 	scaling_factor = ni_get_smc_power_scaling_factor(rdev);
3061*4882a593Smuzhiyun 
3062*4882a593Smuzhiyun 	for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++) {
3063*4882a593Smuzhiyun 		for (j = 0; j < table_size; j++) {
3064*4882a593Smuzhiyun 			t = (1000 * ((i + 1) * 8));
3065*4882a593Smuzhiyun 
3066*4882a593Smuzhiyun 			if (t < ni_pi->cac_data.leakage_minimum_temperature)
3067*4882a593Smuzhiyun 				t = ni_pi->cac_data.leakage_minimum_temperature;
3068*4882a593Smuzhiyun 
3069*4882a593Smuzhiyun 			ni_calculate_leakage_for_v_and_t(rdev,
3070*4882a593Smuzhiyun 							 &ni_pi->cac_data.leakage_coefficients,
3071*4882a593Smuzhiyun 							 eg_pi->vddc_voltage_table.entries[j].value,
3072*4882a593Smuzhiyun 							 t,
3073*4882a593Smuzhiyun 							 ni_pi->cac_data.i_leakage,
3074*4882a593Smuzhiyun 							 &leakage);
3075*4882a593Smuzhiyun 
3076*4882a593Smuzhiyun 			smc_leakage = ni_scale_power_for_smc(leakage, scaling_factor) / 1000;
3077*4882a593Smuzhiyun 			if (smc_leakage > max_leakage)
3078*4882a593Smuzhiyun 				max_leakage = smc_leakage;
3079*4882a593Smuzhiyun 
3080*4882a593Smuzhiyun 			cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(smc_leakage);
3081*4882a593Smuzhiyun 		}
3082*4882a593Smuzhiyun 	}
3083*4882a593Smuzhiyun 
3084*4882a593Smuzhiyun 	for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
3085*4882a593Smuzhiyun 		for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3086*4882a593Smuzhiyun 			cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(max_leakage);
3087*4882a593Smuzhiyun 	}
3088*4882a593Smuzhiyun 	return 0;
3089*4882a593Smuzhiyun }
3090*4882a593Smuzhiyun 
ni_init_simplified_leakage_table(struct radeon_device * rdev,PP_NIslands_CACTABLES * cac_tables)3091*4882a593Smuzhiyun static int ni_init_simplified_leakage_table(struct radeon_device *rdev,
3092*4882a593Smuzhiyun 					    PP_NIslands_CACTABLES *cac_tables)
3093*4882a593Smuzhiyun {
3094*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3095*4882a593Smuzhiyun 	struct radeon_cac_leakage_table *leakage_table =
3096*4882a593Smuzhiyun 		&rdev->pm.dpm.dyn_state.cac_leakage_table;
3097*4882a593Smuzhiyun 	u32 i, j, table_size;
3098*4882a593Smuzhiyun 	u32 smc_leakage, max_leakage = 0;
3099*4882a593Smuzhiyun 	u32 scaling_factor;
3100*4882a593Smuzhiyun 
3101*4882a593Smuzhiyun 	if (!leakage_table)
3102*4882a593Smuzhiyun 		return -EINVAL;
3103*4882a593Smuzhiyun 
3104*4882a593Smuzhiyun 	table_size = leakage_table->count;
3105*4882a593Smuzhiyun 
3106*4882a593Smuzhiyun 	if (eg_pi->vddc_voltage_table.count != table_size)
3107*4882a593Smuzhiyun 		table_size = (eg_pi->vddc_voltage_table.count < leakage_table->count) ?
3108*4882a593Smuzhiyun 			eg_pi->vddc_voltage_table.count : leakage_table->count;
3109*4882a593Smuzhiyun 
3110*4882a593Smuzhiyun 	if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
3111*4882a593Smuzhiyun 		table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
3112*4882a593Smuzhiyun 
3113*4882a593Smuzhiyun 	if (table_size == 0)
3114*4882a593Smuzhiyun 		return -EINVAL;
3115*4882a593Smuzhiyun 
3116*4882a593Smuzhiyun 	scaling_factor = ni_get_smc_power_scaling_factor(rdev);
3117*4882a593Smuzhiyun 
3118*4882a593Smuzhiyun 	for (j = 0; j < table_size; j++) {
3119*4882a593Smuzhiyun 		smc_leakage = leakage_table->entries[j].leakage;
3120*4882a593Smuzhiyun 
3121*4882a593Smuzhiyun 		if (smc_leakage > max_leakage)
3122*4882a593Smuzhiyun 			max_leakage = smc_leakage;
3123*4882a593Smuzhiyun 
3124*4882a593Smuzhiyun 		for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3125*4882a593Smuzhiyun 			cac_tables->cac_lkge_lut[i][j] =
3126*4882a593Smuzhiyun 				cpu_to_be32(ni_scale_power_for_smc(smc_leakage, scaling_factor));
3127*4882a593Smuzhiyun 	}
3128*4882a593Smuzhiyun 
3129*4882a593Smuzhiyun 	for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
3130*4882a593Smuzhiyun 		for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3131*4882a593Smuzhiyun 			cac_tables->cac_lkge_lut[i][j] =
3132*4882a593Smuzhiyun 				cpu_to_be32(ni_scale_power_for_smc(max_leakage, scaling_factor));
3133*4882a593Smuzhiyun 	}
3134*4882a593Smuzhiyun 	return 0;
3135*4882a593Smuzhiyun }
3136*4882a593Smuzhiyun 
ni_initialize_smc_cac_tables(struct radeon_device * rdev)3137*4882a593Smuzhiyun static int ni_initialize_smc_cac_tables(struct radeon_device *rdev)
3138*4882a593Smuzhiyun {
3139*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3140*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
3141*4882a593Smuzhiyun 	PP_NIslands_CACTABLES *cac_tables = NULL;
3142*4882a593Smuzhiyun 	int i, ret;
3143*4882a593Smuzhiyun 	u32 reg;
3144*4882a593Smuzhiyun 
3145*4882a593Smuzhiyun 	if (ni_pi->enable_cac == false)
3146*4882a593Smuzhiyun 		return 0;
3147*4882a593Smuzhiyun 
3148*4882a593Smuzhiyun 	cac_tables = kzalloc(sizeof(PP_NIslands_CACTABLES), GFP_KERNEL);
3149*4882a593Smuzhiyun 	if (!cac_tables)
3150*4882a593Smuzhiyun 		return -ENOMEM;
3151*4882a593Smuzhiyun 
3152*4882a593Smuzhiyun 	reg = RREG32(CG_CAC_CTRL) & ~(TID_CNT_MASK | TID_UNIT_MASK);
3153*4882a593Smuzhiyun 	reg |= (TID_CNT(ni_pi->cac_weights->tid_cnt) |
3154*4882a593Smuzhiyun 		TID_UNIT(ni_pi->cac_weights->tid_unit));
3155*4882a593Smuzhiyun 	WREG32(CG_CAC_CTRL, reg);
3156*4882a593Smuzhiyun 
3157*4882a593Smuzhiyun 	for (i = 0; i < NISLANDS_DCCAC_MAX_LEVELS; i++)
3158*4882a593Smuzhiyun 		ni_pi->dc_cac_table[i] = ni_pi->cac_weights->dc_cac[i];
3159*4882a593Smuzhiyun 
3160*4882a593Smuzhiyun 	for (i = 0; i < SMC_NISLANDS_BIF_LUT_NUM_OF_ENTRIES; i++)
3161*4882a593Smuzhiyun 		cac_tables->cac_bif_lut[i] = ni_pi->cac_weights->pcie_cac[i];
3162*4882a593Smuzhiyun 
3163*4882a593Smuzhiyun 	ni_pi->cac_data.i_leakage = rdev->pm.dpm.cac_leakage;
3164*4882a593Smuzhiyun 	ni_pi->cac_data.pwr_const = 0;
3165*4882a593Smuzhiyun 	ni_pi->cac_data.dc_cac_value = ni_pi->dc_cac_table[NISLANDS_DCCAC_LEVEL_0];
3166*4882a593Smuzhiyun 	ni_pi->cac_data.bif_cac_value = 0;
3167*4882a593Smuzhiyun 	ni_pi->cac_data.mc_wr_weight = ni_pi->cac_weights->mc_write_weight;
3168*4882a593Smuzhiyun 	ni_pi->cac_data.mc_rd_weight = ni_pi->cac_weights->mc_read_weight;
3169*4882a593Smuzhiyun 	ni_pi->cac_data.allow_ovrflw = 0;
3170*4882a593Smuzhiyun 	ni_pi->cac_data.l2num_win_tdp = ni_pi->lta_window_size;
3171*4882a593Smuzhiyun 	ni_pi->cac_data.num_win_tdp = 0;
3172*4882a593Smuzhiyun 	ni_pi->cac_data.lts_truncate_n = ni_pi->lts_truncate;
3173*4882a593Smuzhiyun 
3174*4882a593Smuzhiyun 	if (ni_pi->driver_calculate_cac_leakage)
3175*4882a593Smuzhiyun 		ret = ni_init_driver_calculated_leakage_table(rdev, cac_tables);
3176*4882a593Smuzhiyun 	else
3177*4882a593Smuzhiyun 		ret = ni_init_simplified_leakage_table(rdev, cac_tables);
3178*4882a593Smuzhiyun 
3179*4882a593Smuzhiyun 	if (ret)
3180*4882a593Smuzhiyun 		goto done_free;
3181*4882a593Smuzhiyun 
3182*4882a593Smuzhiyun 	cac_tables->pwr_const      = cpu_to_be32(ni_pi->cac_data.pwr_const);
3183*4882a593Smuzhiyun 	cac_tables->dc_cacValue    = cpu_to_be32(ni_pi->cac_data.dc_cac_value);
3184*4882a593Smuzhiyun 	cac_tables->bif_cacValue   = cpu_to_be32(ni_pi->cac_data.bif_cac_value);
3185*4882a593Smuzhiyun 	cac_tables->AllowOvrflw    = ni_pi->cac_data.allow_ovrflw;
3186*4882a593Smuzhiyun 	cac_tables->MCWrWeight     = ni_pi->cac_data.mc_wr_weight;
3187*4882a593Smuzhiyun 	cac_tables->MCRdWeight     = ni_pi->cac_data.mc_rd_weight;
3188*4882a593Smuzhiyun 	cac_tables->numWin_TDP     = ni_pi->cac_data.num_win_tdp;
3189*4882a593Smuzhiyun 	cac_tables->l2numWin_TDP   = ni_pi->cac_data.l2num_win_tdp;
3190*4882a593Smuzhiyun 	cac_tables->lts_truncate_n = ni_pi->cac_data.lts_truncate_n;
3191*4882a593Smuzhiyun 
3192*4882a593Smuzhiyun 	ret = rv770_copy_bytes_to_smc(rdev, ni_pi->cac_table_start, (u8 *)cac_tables,
3193*4882a593Smuzhiyun 				      sizeof(PP_NIslands_CACTABLES), pi->sram_end);
3194*4882a593Smuzhiyun 
3195*4882a593Smuzhiyun done_free:
3196*4882a593Smuzhiyun 	if (ret) {
3197*4882a593Smuzhiyun 		ni_pi->enable_cac = false;
3198*4882a593Smuzhiyun 		ni_pi->enable_power_containment = false;
3199*4882a593Smuzhiyun 	}
3200*4882a593Smuzhiyun 
3201*4882a593Smuzhiyun 	kfree(cac_tables);
3202*4882a593Smuzhiyun 
3203*4882a593Smuzhiyun 	return 0;
3204*4882a593Smuzhiyun }
3205*4882a593Smuzhiyun 
ni_initialize_hardware_cac_manager(struct radeon_device * rdev)3206*4882a593Smuzhiyun static int ni_initialize_hardware_cac_manager(struct radeon_device *rdev)
3207*4882a593Smuzhiyun {
3208*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
3209*4882a593Smuzhiyun 	u32 reg;
3210*4882a593Smuzhiyun 
3211*4882a593Smuzhiyun 	if (!ni_pi->enable_cac ||
3212*4882a593Smuzhiyun 	    !ni_pi->cac_configuration_required)
3213*4882a593Smuzhiyun 		return 0;
3214*4882a593Smuzhiyun 
3215*4882a593Smuzhiyun 	if (ni_pi->cac_weights == NULL)
3216*4882a593Smuzhiyun 		return -EINVAL;
3217*4882a593Smuzhiyun 
3218*4882a593Smuzhiyun 	reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_0) & ~(WEIGHT_TCP_SIG0_MASK |
3219*4882a593Smuzhiyun 						      WEIGHT_TCP_SIG1_MASK |
3220*4882a593Smuzhiyun 						      WEIGHT_TA_SIG_MASK);
3221*4882a593Smuzhiyun 	reg |= (WEIGHT_TCP_SIG0(ni_pi->cac_weights->weight_tcp_sig0) |
3222*4882a593Smuzhiyun 		WEIGHT_TCP_SIG1(ni_pi->cac_weights->weight_tcp_sig1) |
3223*4882a593Smuzhiyun 		WEIGHT_TA_SIG(ni_pi->cac_weights->weight_ta_sig));
3224*4882a593Smuzhiyun 	WREG32_CG(CG_CAC_REGION_1_WEIGHT_0, reg);
3225*4882a593Smuzhiyun 
3226*4882a593Smuzhiyun 	reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_1) & ~(WEIGHT_TCC_EN0_MASK |
3227*4882a593Smuzhiyun 						      WEIGHT_TCC_EN1_MASK |
3228*4882a593Smuzhiyun 						      WEIGHT_TCC_EN2_MASK);
3229*4882a593Smuzhiyun 	reg |= (WEIGHT_TCC_EN0(ni_pi->cac_weights->weight_tcc_en0) |
3230*4882a593Smuzhiyun 		WEIGHT_TCC_EN1(ni_pi->cac_weights->weight_tcc_en1) |
3231*4882a593Smuzhiyun 		WEIGHT_TCC_EN2(ni_pi->cac_weights->weight_tcc_en2));
3232*4882a593Smuzhiyun 	WREG32_CG(CG_CAC_REGION_1_WEIGHT_1, reg);
3233*4882a593Smuzhiyun 
3234*4882a593Smuzhiyun 	reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_0) & ~(WEIGHT_CB_EN0_MASK |
3235*4882a593Smuzhiyun 						      WEIGHT_CB_EN1_MASK |
3236*4882a593Smuzhiyun 						      WEIGHT_CB_EN2_MASK |
3237*4882a593Smuzhiyun 						      WEIGHT_CB_EN3_MASK);
3238*4882a593Smuzhiyun 	reg |= (WEIGHT_CB_EN0(ni_pi->cac_weights->weight_cb_en0) |
3239*4882a593Smuzhiyun 		WEIGHT_CB_EN1(ni_pi->cac_weights->weight_cb_en1) |
3240*4882a593Smuzhiyun 		WEIGHT_CB_EN2(ni_pi->cac_weights->weight_cb_en2) |
3241*4882a593Smuzhiyun 		WEIGHT_CB_EN3(ni_pi->cac_weights->weight_cb_en3));
3242*4882a593Smuzhiyun 	WREG32_CG(CG_CAC_REGION_2_WEIGHT_0, reg);
3243*4882a593Smuzhiyun 
3244*4882a593Smuzhiyun 	reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_1) & ~(WEIGHT_DB_SIG0_MASK |
3245*4882a593Smuzhiyun 						      WEIGHT_DB_SIG1_MASK |
3246*4882a593Smuzhiyun 						      WEIGHT_DB_SIG2_MASK |
3247*4882a593Smuzhiyun 						      WEIGHT_DB_SIG3_MASK);
3248*4882a593Smuzhiyun 	reg |= (WEIGHT_DB_SIG0(ni_pi->cac_weights->weight_db_sig0) |
3249*4882a593Smuzhiyun 		WEIGHT_DB_SIG1(ni_pi->cac_weights->weight_db_sig1) |
3250*4882a593Smuzhiyun 		WEIGHT_DB_SIG2(ni_pi->cac_weights->weight_db_sig2) |
3251*4882a593Smuzhiyun 		WEIGHT_DB_SIG3(ni_pi->cac_weights->weight_db_sig3));
3252*4882a593Smuzhiyun 	WREG32_CG(CG_CAC_REGION_2_WEIGHT_1, reg);
3253*4882a593Smuzhiyun 
3254*4882a593Smuzhiyun 	reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_2) & ~(WEIGHT_SXM_SIG0_MASK |
3255*4882a593Smuzhiyun 						      WEIGHT_SXM_SIG1_MASK |
3256*4882a593Smuzhiyun 						      WEIGHT_SXM_SIG2_MASK |
3257*4882a593Smuzhiyun 						      WEIGHT_SXS_SIG0_MASK |
3258*4882a593Smuzhiyun 						      WEIGHT_SXS_SIG1_MASK);
3259*4882a593Smuzhiyun 	reg |= (WEIGHT_SXM_SIG0(ni_pi->cac_weights->weight_sxm_sig0) |
3260*4882a593Smuzhiyun 		WEIGHT_SXM_SIG1(ni_pi->cac_weights->weight_sxm_sig1) |
3261*4882a593Smuzhiyun 		WEIGHT_SXM_SIG2(ni_pi->cac_weights->weight_sxm_sig2) |
3262*4882a593Smuzhiyun 		WEIGHT_SXS_SIG0(ni_pi->cac_weights->weight_sxs_sig0) |
3263*4882a593Smuzhiyun 		WEIGHT_SXS_SIG1(ni_pi->cac_weights->weight_sxs_sig1));
3264*4882a593Smuzhiyun 	WREG32_CG(CG_CAC_REGION_2_WEIGHT_2, reg);
3265*4882a593Smuzhiyun 
3266*4882a593Smuzhiyun 	reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_0) & ~(WEIGHT_XBR_0_MASK |
3267*4882a593Smuzhiyun 						      WEIGHT_XBR_1_MASK |
3268*4882a593Smuzhiyun 						      WEIGHT_XBR_2_MASK |
3269*4882a593Smuzhiyun 						      WEIGHT_SPI_SIG0_MASK);
3270*4882a593Smuzhiyun 	reg |= (WEIGHT_XBR_0(ni_pi->cac_weights->weight_xbr_0) |
3271*4882a593Smuzhiyun 		WEIGHT_XBR_1(ni_pi->cac_weights->weight_xbr_1) |
3272*4882a593Smuzhiyun 		WEIGHT_XBR_2(ni_pi->cac_weights->weight_xbr_2) |
3273*4882a593Smuzhiyun 		WEIGHT_SPI_SIG0(ni_pi->cac_weights->weight_spi_sig0));
3274*4882a593Smuzhiyun 	WREG32_CG(CG_CAC_REGION_3_WEIGHT_0, reg);
3275*4882a593Smuzhiyun 
3276*4882a593Smuzhiyun 	reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_1) & ~(WEIGHT_SPI_SIG1_MASK |
3277*4882a593Smuzhiyun 						      WEIGHT_SPI_SIG2_MASK |
3278*4882a593Smuzhiyun 						      WEIGHT_SPI_SIG3_MASK |
3279*4882a593Smuzhiyun 						      WEIGHT_SPI_SIG4_MASK |
3280*4882a593Smuzhiyun 						      WEIGHT_SPI_SIG5_MASK);
3281*4882a593Smuzhiyun 	reg |= (WEIGHT_SPI_SIG1(ni_pi->cac_weights->weight_spi_sig1) |
3282*4882a593Smuzhiyun 		WEIGHT_SPI_SIG2(ni_pi->cac_weights->weight_spi_sig2) |
3283*4882a593Smuzhiyun 		WEIGHT_SPI_SIG3(ni_pi->cac_weights->weight_spi_sig3) |
3284*4882a593Smuzhiyun 		WEIGHT_SPI_SIG4(ni_pi->cac_weights->weight_spi_sig4) |
3285*4882a593Smuzhiyun 		WEIGHT_SPI_SIG5(ni_pi->cac_weights->weight_spi_sig5));
3286*4882a593Smuzhiyun 	WREG32_CG(CG_CAC_REGION_3_WEIGHT_1, reg);
3287*4882a593Smuzhiyun 
3288*4882a593Smuzhiyun 	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_0) & ~(WEIGHT_LDS_SIG0_MASK |
3289*4882a593Smuzhiyun 						      WEIGHT_LDS_SIG1_MASK |
3290*4882a593Smuzhiyun 						      WEIGHT_SC_MASK);
3291*4882a593Smuzhiyun 	reg |= (WEIGHT_LDS_SIG0(ni_pi->cac_weights->weight_lds_sig0) |
3292*4882a593Smuzhiyun 		WEIGHT_LDS_SIG1(ni_pi->cac_weights->weight_lds_sig1) |
3293*4882a593Smuzhiyun 		WEIGHT_SC(ni_pi->cac_weights->weight_sc));
3294*4882a593Smuzhiyun 	WREG32_CG(CG_CAC_REGION_4_WEIGHT_0, reg);
3295*4882a593Smuzhiyun 
3296*4882a593Smuzhiyun 	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_1) & ~(WEIGHT_BIF_MASK |
3297*4882a593Smuzhiyun 						      WEIGHT_CP_MASK |
3298*4882a593Smuzhiyun 						      WEIGHT_PA_SIG0_MASK |
3299*4882a593Smuzhiyun 						      WEIGHT_PA_SIG1_MASK |
3300*4882a593Smuzhiyun 						      WEIGHT_VGT_SIG0_MASK);
3301*4882a593Smuzhiyun 	reg |= (WEIGHT_BIF(ni_pi->cac_weights->weight_bif) |
3302*4882a593Smuzhiyun 		WEIGHT_CP(ni_pi->cac_weights->weight_cp) |
3303*4882a593Smuzhiyun 		WEIGHT_PA_SIG0(ni_pi->cac_weights->weight_pa_sig0) |
3304*4882a593Smuzhiyun 		WEIGHT_PA_SIG1(ni_pi->cac_weights->weight_pa_sig1) |
3305*4882a593Smuzhiyun 		WEIGHT_VGT_SIG0(ni_pi->cac_weights->weight_vgt_sig0));
3306*4882a593Smuzhiyun 	WREG32_CG(CG_CAC_REGION_4_WEIGHT_1, reg);
3307*4882a593Smuzhiyun 
3308*4882a593Smuzhiyun 	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_2) & ~(WEIGHT_VGT_SIG1_MASK |
3309*4882a593Smuzhiyun 						      WEIGHT_VGT_SIG2_MASK |
3310*4882a593Smuzhiyun 						      WEIGHT_DC_SIG0_MASK |
3311*4882a593Smuzhiyun 						      WEIGHT_DC_SIG1_MASK |
3312*4882a593Smuzhiyun 						      WEIGHT_DC_SIG2_MASK);
3313*4882a593Smuzhiyun 	reg |= (WEIGHT_VGT_SIG1(ni_pi->cac_weights->weight_vgt_sig1) |
3314*4882a593Smuzhiyun 		WEIGHT_VGT_SIG2(ni_pi->cac_weights->weight_vgt_sig2) |
3315*4882a593Smuzhiyun 		WEIGHT_DC_SIG0(ni_pi->cac_weights->weight_dc_sig0) |
3316*4882a593Smuzhiyun 		WEIGHT_DC_SIG1(ni_pi->cac_weights->weight_dc_sig1) |
3317*4882a593Smuzhiyun 		WEIGHT_DC_SIG2(ni_pi->cac_weights->weight_dc_sig2));
3318*4882a593Smuzhiyun 	WREG32_CG(CG_CAC_REGION_4_WEIGHT_2, reg);
3319*4882a593Smuzhiyun 
3320*4882a593Smuzhiyun 	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_3) & ~(WEIGHT_DC_SIG3_MASK |
3321*4882a593Smuzhiyun 						      WEIGHT_UVD_SIG0_MASK |
3322*4882a593Smuzhiyun 						      WEIGHT_UVD_SIG1_MASK |
3323*4882a593Smuzhiyun 						      WEIGHT_SPARE0_MASK |
3324*4882a593Smuzhiyun 						      WEIGHT_SPARE1_MASK);
3325*4882a593Smuzhiyun 	reg |= (WEIGHT_DC_SIG3(ni_pi->cac_weights->weight_dc_sig3) |
3326*4882a593Smuzhiyun 		WEIGHT_UVD_SIG0(ni_pi->cac_weights->weight_uvd_sig0) |
3327*4882a593Smuzhiyun 		WEIGHT_UVD_SIG1(ni_pi->cac_weights->weight_uvd_sig1) |
3328*4882a593Smuzhiyun 		WEIGHT_SPARE0(ni_pi->cac_weights->weight_spare0) |
3329*4882a593Smuzhiyun 		WEIGHT_SPARE1(ni_pi->cac_weights->weight_spare1));
3330*4882a593Smuzhiyun 	WREG32_CG(CG_CAC_REGION_4_WEIGHT_3, reg);
3331*4882a593Smuzhiyun 
3332*4882a593Smuzhiyun 	reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_0) & ~(WEIGHT_SQ_VSP_MASK |
3333*4882a593Smuzhiyun 						      WEIGHT_SQ_VSP0_MASK);
3334*4882a593Smuzhiyun 	reg |= (WEIGHT_SQ_VSP(ni_pi->cac_weights->weight_sq_vsp) |
3335*4882a593Smuzhiyun 		WEIGHT_SQ_VSP0(ni_pi->cac_weights->weight_sq_vsp0));
3336*4882a593Smuzhiyun 	WREG32_CG(CG_CAC_REGION_5_WEIGHT_0, reg);
3337*4882a593Smuzhiyun 
3338*4882a593Smuzhiyun 	reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_1) & ~(WEIGHT_SQ_GPR_MASK);
3339*4882a593Smuzhiyun 	reg |= WEIGHT_SQ_GPR(ni_pi->cac_weights->weight_sq_gpr);
3340*4882a593Smuzhiyun 	WREG32_CG(CG_CAC_REGION_5_WEIGHT_1, reg);
3341*4882a593Smuzhiyun 
3342*4882a593Smuzhiyun 	reg = RREG32_CG(CG_CAC_REGION_4_OVERRIDE_4) & ~(OVR_MODE_SPARE_0_MASK |
3343*4882a593Smuzhiyun 							OVR_VAL_SPARE_0_MASK |
3344*4882a593Smuzhiyun 							OVR_MODE_SPARE_1_MASK |
3345*4882a593Smuzhiyun 							OVR_VAL_SPARE_1_MASK);
3346*4882a593Smuzhiyun 	reg |= (OVR_MODE_SPARE_0(ni_pi->cac_weights->ovr_mode_spare_0) |
3347*4882a593Smuzhiyun 		OVR_VAL_SPARE_0(ni_pi->cac_weights->ovr_val_spare_0) |
3348*4882a593Smuzhiyun 		OVR_MODE_SPARE_1(ni_pi->cac_weights->ovr_mode_spare_1) |
3349*4882a593Smuzhiyun 		OVR_VAL_SPARE_1(ni_pi->cac_weights->ovr_val_spare_1));
3350*4882a593Smuzhiyun 	WREG32_CG(CG_CAC_REGION_4_OVERRIDE_4, reg);
3351*4882a593Smuzhiyun 
3352*4882a593Smuzhiyun 	reg = RREG32(SQ_CAC_THRESHOLD) & ~(VSP_MASK |
3353*4882a593Smuzhiyun 					   VSP0_MASK |
3354*4882a593Smuzhiyun 					   GPR_MASK);
3355*4882a593Smuzhiyun 	reg |= (VSP(ni_pi->cac_weights->vsp) |
3356*4882a593Smuzhiyun 		VSP0(ni_pi->cac_weights->vsp0) |
3357*4882a593Smuzhiyun 		GPR(ni_pi->cac_weights->gpr));
3358*4882a593Smuzhiyun 	WREG32(SQ_CAC_THRESHOLD, reg);
3359*4882a593Smuzhiyun 
3360*4882a593Smuzhiyun 	reg = (MCDW_WR_ENABLE |
3361*4882a593Smuzhiyun 	       MCDX_WR_ENABLE |
3362*4882a593Smuzhiyun 	       MCDY_WR_ENABLE |
3363*4882a593Smuzhiyun 	       MCDZ_WR_ENABLE |
3364*4882a593Smuzhiyun 	       INDEX(0x09D4));
3365*4882a593Smuzhiyun 	WREG32(MC_CG_CONFIG, reg);
3366*4882a593Smuzhiyun 
3367*4882a593Smuzhiyun 	reg = (READ_WEIGHT(ni_pi->cac_weights->mc_read_weight) |
3368*4882a593Smuzhiyun 	       WRITE_WEIGHT(ni_pi->cac_weights->mc_write_weight) |
3369*4882a593Smuzhiyun 	       ALLOW_OVERFLOW);
3370*4882a593Smuzhiyun 	WREG32(MC_CG_DATAPORT, reg);
3371*4882a593Smuzhiyun 
3372*4882a593Smuzhiyun 	return 0;
3373*4882a593Smuzhiyun }
3374*4882a593Smuzhiyun 
ni_enable_smc_cac(struct radeon_device * rdev,struct radeon_ps * radeon_new_state,bool enable)3375*4882a593Smuzhiyun static int ni_enable_smc_cac(struct radeon_device *rdev,
3376*4882a593Smuzhiyun 			     struct radeon_ps *radeon_new_state,
3377*4882a593Smuzhiyun 			     bool enable)
3378*4882a593Smuzhiyun {
3379*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
3380*4882a593Smuzhiyun 	int ret = 0;
3381*4882a593Smuzhiyun 	PPSMC_Result smc_result;
3382*4882a593Smuzhiyun 
3383*4882a593Smuzhiyun 	if (ni_pi->enable_cac) {
3384*4882a593Smuzhiyun 		if (enable) {
3385*4882a593Smuzhiyun 			if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
3386*4882a593Smuzhiyun 				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_CollectCAC_PowerCorreln);
3387*4882a593Smuzhiyun 
3388*4882a593Smuzhiyun 				if (ni_pi->support_cac_long_term_average) {
3389*4882a593Smuzhiyun 					smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgEnable);
3390*4882a593Smuzhiyun 					if (PPSMC_Result_OK != smc_result)
3391*4882a593Smuzhiyun 						ni_pi->support_cac_long_term_average = false;
3392*4882a593Smuzhiyun 				}
3393*4882a593Smuzhiyun 
3394*4882a593Smuzhiyun 				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableCac);
3395*4882a593Smuzhiyun 				if (PPSMC_Result_OK != smc_result)
3396*4882a593Smuzhiyun 					ret = -EINVAL;
3397*4882a593Smuzhiyun 
3398*4882a593Smuzhiyun 				ni_pi->cac_enabled = (PPSMC_Result_OK == smc_result) ? true : false;
3399*4882a593Smuzhiyun 			}
3400*4882a593Smuzhiyun 		} else if (ni_pi->cac_enabled) {
3401*4882a593Smuzhiyun 			smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_DisableCac);
3402*4882a593Smuzhiyun 
3403*4882a593Smuzhiyun 			ni_pi->cac_enabled = false;
3404*4882a593Smuzhiyun 
3405*4882a593Smuzhiyun 			if (ni_pi->support_cac_long_term_average) {
3406*4882a593Smuzhiyun 				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgDisable);
3407*4882a593Smuzhiyun 				if (PPSMC_Result_OK != smc_result)
3408*4882a593Smuzhiyun 					ni_pi->support_cac_long_term_average = false;
3409*4882a593Smuzhiyun 			}
3410*4882a593Smuzhiyun 		}
3411*4882a593Smuzhiyun 	}
3412*4882a593Smuzhiyun 
3413*4882a593Smuzhiyun 	return ret;
3414*4882a593Smuzhiyun }
3415*4882a593Smuzhiyun 
ni_pcie_performance_request(struct radeon_device * rdev,u8 perf_req,bool advertise)3416*4882a593Smuzhiyun static int ni_pcie_performance_request(struct radeon_device *rdev,
3417*4882a593Smuzhiyun 				       u8 perf_req, bool advertise)
3418*4882a593Smuzhiyun {
3419*4882a593Smuzhiyun #if defined(CONFIG_ACPI)
3420*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3421*4882a593Smuzhiyun 
3422*4882a593Smuzhiyun 	if ((perf_req == PCIE_PERF_REQ_PECI_GEN1) ||
3423*4882a593Smuzhiyun 	    (perf_req == PCIE_PERF_REQ_PECI_GEN2)) {
3424*4882a593Smuzhiyun 		if (eg_pi->pcie_performance_request_registered == false)
3425*4882a593Smuzhiyun 			radeon_acpi_pcie_notify_device_ready(rdev);
3426*4882a593Smuzhiyun 		eg_pi->pcie_performance_request_registered = true;
3427*4882a593Smuzhiyun 		return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
3428*4882a593Smuzhiyun 	} else if ((perf_req == PCIE_PERF_REQ_REMOVE_REGISTRY) &&
3429*4882a593Smuzhiyun 		    eg_pi->pcie_performance_request_registered) {
3430*4882a593Smuzhiyun 		eg_pi->pcie_performance_request_registered = false;
3431*4882a593Smuzhiyun 		return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
3432*4882a593Smuzhiyun 	}
3433*4882a593Smuzhiyun #endif
3434*4882a593Smuzhiyun 	return 0;
3435*4882a593Smuzhiyun }
3436*4882a593Smuzhiyun 
ni_advertise_gen2_capability(struct radeon_device * rdev)3437*4882a593Smuzhiyun static int ni_advertise_gen2_capability(struct radeon_device *rdev)
3438*4882a593Smuzhiyun {
3439*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3440*4882a593Smuzhiyun 	u32 tmp;
3441*4882a593Smuzhiyun 
3442*4882a593Smuzhiyun 	tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
3443*4882a593Smuzhiyun 
3444*4882a593Smuzhiyun 	if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
3445*4882a593Smuzhiyun 	    (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
3446*4882a593Smuzhiyun 		pi->pcie_gen2 = true;
3447*4882a593Smuzhiyun 	else
3448*4882a593Smuzhiyun 		pi->pcie_gen2 = false;
3449*4882a593Smuzhiyun 
3450*4882a593Smuzhiyun 	if (!pi->pcie_gen2)
3451*4882a593Smuzhiyun 		ni_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN2, true);
3452*4882a593Smuzhiyun 
3453*4882a593Smuzhiyun 	return 0;
3454*4882a593Smuzhiyun }
3455*4882a593Smuzhiyun 
ni_enable_bif_dynamic_pcie_gen2(struct radeon_device * rdev,bool enable)3456*4882a593Smuzhiyun static void ni_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
3457*4882a593Smuzhiyun 					    bool enable)
3458*4882a593Smuzhiyun {
3459*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3460*4882a593Smuzhiyun 	u32 tmp, bif;
3461*4882a593Smuzhiyun 
3462*4882a593Smuzhiyun 	tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
3463*4882a593Smuzhiyun 
3464*4882a593Smuzhiyun 	if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
3465*4882a593Smuzhiyun 	    (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
3466*4882a593Smuzhiyun 		if (enable) {
3467*4882a593Smuzhiyun 			if (!pi->boot_in_gen2) {
3468*4882a593Smuzhiyun 				bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
3469*4882a593Smuzhiyun 				bif |= CG_CLIENT_REQ(0xd);
3470*4882a593Smuzhiyun 				WREG32(CG_BIF_REQ_AND_RSP, bif);
3471*4882a593Smuzhiyun 			}
3472*4882a593Smuzhiyun 			tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
3473*4882a593Smuzhiyun 			tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
3474*4882a593Smuzhiyun 			tmp |= LC_GEN2_EN_STRAP;
3475*4882a593Smuzhiyun 
3476*4882a593Smuzhiyun 			tmp |= LC_CLR_FAILED_SPD_CHANGE_CNT;
3477*4882a593Smuzhiyun 			WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3478*4882a593Smuzhiyun 			udelay(10);
3479*4882a593Smuzhiyun 			tmp &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
3480*4882a593Smuzhiyun 			WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3481*4882a593Smuzhiyun 		} else {
3482*4882a593Smuzhiyun 			if (!pi->boot_in_gen2) {
3483*4882a593Smuzhiyun 				bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
3484*4882a593Smuzhiyun 				bif |= CG_CLIENT_REQ(0xd);
3485*4882a593Smuzhiyun 				WREG32(CG_BIF_REQ_AND_RSP, bif);
3486*4882a593Smuzhiyun 
3487*4882a593Smuzhiyun 				tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
3488*4882a593Smuzhiyun 				tmp &= ~LC_GEN2_EN_STRAP;
3489*4882a593Smuzhiyun 			}
3490*4882a593Smuzhiyun 			WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3491*4882a593Smuzhiyun 		}
3492*4882a593Smuzhiyun 	}
3493*4882a593Smuzhiyun }
3494*4882a593Smuzhiyun 
ni_enable_dynamic_pcie_gen2(struct radeon_device * rdev,bool enable)3495*4882a593Smuzhiyun static void ni_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
3496*4882a593Smuzhiyun 					bool enable)
3497*4882a593Smuzhiyun {
3498*4882a593Smuzhiyun 	ni_enable_bif_dynamic_pcie_gen2(rdev, enable);
3499*4882a593Smuzhiyun 
3500*4882a593Smuzhiyun 	if (enable)
3501*4882a593Smuzhiyun 		WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE);
3502*4882a593Smuzhiyun 	else
3503*4882a593Smuzhiyun 		WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE);
3504*4882a593Smuzhiyun }
3505*4882a593Smuzhiyun 
ni_set_uvd_clock_before_set_eng_clock(struct radeon_device * rdev,struct radeon_ps * new_ps,struct radeon_ps * old_ps)3506*4882a593Smuzhiyun void ni_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev,
3507*4882a593Smuzhiyun 					   struct radeon_ps *new_ps,
3508*4882a593Smuzhiyun 					   struct radeon_ps *old_ps)
3509*4882a593Smuzhiyun {
3510*4882a593Smuzhiyun 	struct ni_ps *new_state = ni_get_ps(new_ps);
3511*4882a593Smuzhiyun 	struct ni_ps *current_state = ni_get_ps(old_ps);
3512*4882a593Smuzhiyun 
3513*4882a593Smuzhiyun 	if ((new_ps->vclk == old_ps->vclk) &&
3514*4882a593Smuzhiyun 	    (new_ps->dclk == old_ps->dclk))
3515*4882a593Smuzhiyun 		return;
3516*4882a593Smuzhiyun 
3517*4882a593Smuzhiyun 	if (new_state->performance_levels[new_state->performance_level_count - 1].sclk >=
3518*4882a593Smuzhiyun 	    current_state->performance_levels[current_state->performance_level_count - 1].sclk)
3519*4882a593Smuzhiyun 		return;
3520*4882a593Smuzhiyun 
3521*4882a593Smuzhiyun 	radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
3522*4882a593Smuzhiyun }
3523*4882a593Smuzhiyun 
ni_set_uvd_clock_after_set_eng_clock(struct radeon_device * rdev,struct radeon_ps * new_ps,struct radeon_ps * old_ps)3524*4882a593Smuzhiyun void ni_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev,
3525*4882a593Smuzhiyun 					  struct radeon_ps *new_ps,
3526*4882a593Smuzhiyun 					  struct radeon_ps *old_ps)
3527*4882a593Smuzhiyun {
3528*4882a593Smuzhiyun 	struct ni_ps *new_state = ni_get_ps(new_ps);
3529*4882a593Smuzhiyun 	struct ni_ps *current_state = ni_get_ps(old_ps);
3530*4882a593Smuzhiyun 
3531*4882a593Smuzhiyun 	if ((new_ps->vclk == old_ps->vclk) &&
3532*4882a593Smuzhiyun 	    (new_ps->dclk == old_ps->dclk))
3533*4882a593Smuzhiyun 		return;
3534*4882a593Smuzhiyun 
3535*4882a593Smuzhiyun 	if (new_state->performance_levels[new_state->performance_level_count - 1].sclk <
3536*4882a593Smuzhiyun 	    current_state->performance_levels[current_state->performance_level_count - 1].sclk)
3537*4882a593Smuzhiyun 		return;
3538*4882a593Smuzhiyun 
3539*4882a593Smuzhiyun 	radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
3540*4882a593Smuzhiyun }
3541*4882a593Smuzhiyun 
ni_dpm_setup_asic(struct radeon_device * rdev)3542*4882a593Smuzhiyun void ni_dpm_setup_asic(struct radeon_device *rdev)
3543*4882a593Smuzhiyun {
3544*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3545*4882a593Smuzhiyun 	int r;
3546*4882a593Smuzhiyun 
3547*4882a593Smuzhiyun 	r = ni_mc_load_microcode(rdev);
3548*4882a593Smuzhiyun 	if (r)
3549*4882a593Smuzhiyun 		DRM_ERROR("Failed to load MC firmware!\n");
3550*4882a593Smuzhiyun 	ni_read_clock_registers(rdev);
3551*4882a593Smuzhiyun 	btc_read_arb_registers(rdev);
3552*4882a593Smuzhiyun 	rv770_get_memory_type(rdev);
3553*4882a593Smuzhiyun 	if (eg_pi->pcie_performance_request)
3554*4882a593Smuzhiyun 		ni_advertise_gen2_capability(rdev);
3555*4882a593Smuzhiyun 	rv770_get_pcie_gen2_status(rdev);
3556*4882a593Smuzhiyun 	rv770_enable_acpi_pm(rdev);
3557*4882a593Smuzhiyun }
3558*4882a593Smuzhiyun 
ni_update_current_ps(struct radeon_device * rdev,struct radeon_ps * rps)3559*4882a593Smuzhiyun void ni_update_current_ps(struct radeon_device *rdev,
3560*4882a593Smuzhiyun 			  struct radeon_ps *rps)
3561*4882a593Smuzhiyun {
3562*4882a593Smuzhiyun 	struct ni_ps *new_ps = ni_get_ps(rps);
3563*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3564*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
3565*4882a593Smuzhiyun 
3566*4882a593Smuzhiyun 	eg_pi->current_rps = *rps;
3567*4882a593Smuzhiyun 	ni_pi->current_ps = *new_ps;
3568*4882a593Smuzhiyun 	eg_pi->current_rps.ps_priv = &ni_pi->current_ps;
3569*4882a593Smuzhiyun }
3570*4882a593Smuzhiyun 
ni_update_requested_ps(struct radeon_device * rdev,struct radeon_ps * rps)3571*4882a593Smuzhiyun void ni_update_requested_ps(struct radeon_device *rdev,
3572*4882a593Smuzhiyun 			    struct radeon_ps *rps)
3573*4882a593Smuzhiyun {
3574*4882a593Smuzhiyun 	struct ni_ps *new_ps = ni_get_ps(rps);
3575*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3576*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
3577*4882a593Smuzhiyun 
3578*4882a593Smuzhiyun 	eg_pi->requested_rps = *rps;
3579*4882a593Smuzhiyun 	ni_pi->requested_ps = *new_ps;
3580*4882a593Smuzhiyun 	eg_pi->requested_rps.ps_priv = &ni_pi->requested_ps;
3581*4882a593Smuzhiyun }
3582*4882a593Smuzhiyun 
ni_dpm_enable(struct radeon_device * rdev)3583*4882a593Smuzhiyun int ni_dpm_enable(struct radeon_device *rdev)
3584*4882a593Smuzhiyun {
3585*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3586*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3587*4882a593Smuzhiyun 	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
3588*4882a593Smuzhiyun 	int ret;
3589*4882a593Smuzhiyun 
3590*4882a593Smuzhiyun 	if (pi->gfx_clock_gating)
3591*4882a593Smuzhiyun 		ni_cg_clockgating_default(rdev);
3592*4882a593Smuzhiyun 	if (btc_dpm_enabled(rdev))
3593*4882a593Smuzhiyun 		return -EINVAL;
3594*4882a593Smuzhiyun 	if (pi->mg_clock_gating)
3595*4882a593Smuzhiyun 		ni_mg_clockgating_default(rdev);
3596*4882a593Smuzhiyun 	if (eg_pi->ls_clock_gating)
3597*4882a593Smuzhiyun 		ni_ls_clockgating_default(rdev);
3598*4882a593Smuzhiyun 	if (pi->voltage_control) {
3599*4882a593Smuzhiyun 		rv770_enable_voltage_control(rdev, true);
3600*4882a593Smuzhiyun 		ret = cypress_construct_voltage_tables(rdev);
3601*4882a593Smuzhiyun 		if (ret) {
3602*4882a593Smuzhiyun 			DRM_ERROR("cypress_construct_voltage_tables failed\n");
3603*4882a593Smuzhiyun 			return ret;
3604*4882a593Smuzhiyun 		}
3605*4882a593Smuzhiyun 	}
3606*4882a593Smuzhiyun 	if (eg_pi->dynamic_ac_timing) {
3607*4882a593Smuzhiyun 		ret = ni_initialize_mc_reg_table(rdev);
3608*4882a593Smuzhiyun 		if (ret)
3609*4882a593Smuzhiyun 			eg_pi->dynamic_ac_timing = false;
3610*4882a593Smuzhiyun 	}
3611*4882a593Smuzhiyun 	if (pi->dynamic_ss)
3612*4882a593Smuzhiyun 		cypress_enable_spread_spectrum(rdev, true);
3613*4882a593Smuzhiyun 	if (pi->thermal_protection)
3614*4882a593Smuzhiyun 		rv770_enable_thermal_protection(rdev, true);
3615*4882a593Smuzhiyun 	rv770_setup_bsp(rdev);
3616*4882a593Smuzhiyun 	rv770_program_git(rdev);
3617*4882a593Smuzhiyun 	rv770_program_tp(rdev);
3618*4882a593Smuzhiyun 	rv770_program_tpp(rdev);
3619*4882a593Smuzhiyun 	rv770_program_sstp(rdev);
3620*4882a593Smuzhiyun 	cypress_enable_display_gap(rdev);
3621*4882a593Smuzhiyun 	rv770_program_vc(rdev);
3622*4882a593Smuzhiyun 	if (pi->dynamic_pcie_gen2)
3623*4882a593Smuzhiyun 		ni_enable_dynamic_pcie_gen2(rdev, true);
3624*4882a593Smuzhiyun 	ret = rv770_upload_firmware(rdev);
3625*4882a593Smuzhiyun 	if (ret) {
3626*4882a593Smuzhiyun 		DRM_ERROR("rv770_upload_firmware failed\n");
3627*4882a593Smuzhiyun 		return ret;
3628*4882a593Smuzhiyun 	}
3629*4882a593Smuzhiyun 	ret = ni_process_firmware_header(rdev);
3630*4882a593Smuzhiyun 	if (ret) {
3631*4882a593Smuzhiyun 		DRM_ERROR("ni_process_firmware_header failed\n");
3632*4882a593Smuzhiyun 		return ret;
3633*4882a593Smuzhiyun 	}
3634*4882a593Smuzhiyun 	ret = ni_initial_switch_from_arb_f0_to_f1(rdev);
3635*4882a593Smuzhiyun 	if (ret) {
3636*4882a593Smuzhiyun 		DRM_ERROR("ni_initial_switch_from_arb_f0_to_f1 failed\n");
3637*4882a593Smuzhiyun 		return ret;
3638*4882a593Smuzhiyun 	}
3639*4882a593Smuzhiyun 	ret = ni_init_smc_table(rdev);
3640*4882a593Smuzhiyun 	if (ret) {
3641*4882a593Smuzhiyun 		DRM_ERROR("ni_init_smc_table failed\n");
3642*4882a593Smuzhiyun 		return ret;
3643*4882a593Smuzhiyun 	}
3644*4882a593Smuzhiyun 	ret = ni_init_smc_spll_table(rdev);
3645*4882a593Smuzhiyun 	if (ret) {
3646*4882a593Smuzhiyun 		DRM_ERROR("ni_init_smc_spll_table failed\n");
3647*4882a593Smuzhiyun 		return ret;
3648*4882a593Smuzhiyun 	}
3649*4882a593Smuzhiyun 	ret = ni_init_arb_table_index(rdev);
3650*4882a593Smuzhiyun 	if (ret) {
3651*4882a593Smuzhiyun 		DRM_ERROR("ni_init_arb_table_index failed\n");
3652*4882a593Smuzhiyun 		return ret;
3653*4882a593Smuzhiyun 	}
3654*4882a593Smuzhiyun 	if (eg_pi->dynamic_ac_timing) {
3655*4882a593Smuzhiyun 		ret = ni_populate_mc_reg_table(rdev, boot_ps);
3656*4882a593Smuzhiyun 		if (ret) {
3657*4882a593Smuzhiyun 			DRM_ERROR("ni_populate_mc_reg_table failed\n");
3658*4882a593Smuzhiyun 			return ret;
3659*4882a593Smuzhiyun 		}
3660*4882a593Smuzhiyun 	}
3661*4882a593Smuzhiyun 	ret = ni_initialize_smc_cac_tables(rdev);
3662*4882a593Smuzhiyun 	if (ret) {
3663*4882a593Smuzhiyun 		DRM_ERROR("ni_initialize_smc_cac_tables failed\n");
3664*4882a593Smuzhiyun 		return ret;
3665*4882a593Smuzhiyun 	}
3666*4882a593Smuzhiyun 	ret = ni_initialize_hardware_cac_manager(rdev);
3667*4882a593Smuzhiyun 	if (ret) {
3668*4882a593Smuzhiyun 		DRM_ERROR("ni_initialize_hardware_cac_manager failed\n");
3669*4882a593Smuzhiyun 		return ret;
3670*4882a593Smuzhiyun 	}
3671*4882a593Smuzhiyun 	ret = ni_populate_smc_tdp_limits(rdev, boot_ps);
3672*4882a593Smuzhiyun 	if (ret) {
3673*4882a593Smuzhiyun 		DRM_ERROR("ni_populate_smc_tdp_limits failed\n");
3674*4882a593Smuzhiyun 		return ret;
3675*4882a593Smuzhiyun 	}
3676*4882a593Smuzhiyun 	ni_program_response_times(rdev);
3677*4882a593Smuzhiyun 	r7xx_start_smc(rdev);
3678*4882a593Smuzhiyun 	ret = cypress_notify_smc_display_change(rdev, false);
3679*4882a593Smuzhiyun 	if (ret) {
3680*4882a593Smuzhiyun 		DRM_ERROR("cypress_notify_smc_display_change failed\n");
3681*4882a593Smuzhiyun 		return ret;
3682*4882a593Smuzhiyun 	}
3683*4882a593Smuzhiyun 	cypress_enable_sclk_control(rdev, true);
3684*4882a593Smuzhiyun 	if (eg_pi->memory_transition)
3685*4882a593Smuzhiyun 		cypress_enable_mclk_control(rdev, true);
3686*4882a593Smuzhiyun 	cypress_start_dpm(rdev);
3687*4882a593Smuzhiyun 	if (pi->gfx_clock_gating)
3688*4882a593Smuzhiyun 		ni_gfx_clockgating_enable(rdev, true);
3689*4882a593Smuzhiyun 	if (pi->mg_clock_gating)
3690*4882a593Smuzhiyun 		ni_mg_clockgating_enable(rdev, true);
3691*4882a593Smuzhiyun 	if (eg_pi->ls_clock_gating)
3692*4882a593Smuzhiyun 		ni_ls_clockgating_enable(rdev, true);
3693*4882a593Smuzhiyun 
3694*4882a593Smuzhiyun 	rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
3695*4882a593Smuzhiyun 
3696*4882a593Smuzhiyun 	ni_update_current_ps(rdev, boot_ps);
3697*4882a593Smuzhiyun 
3698*4882a593Smuzhiyun 	return 0;
3699*4882a593Smuzhiyun }
3700*4882a593Smuzhiyun 
ni_dpm_disable(struct radeon_device * rdev)3701*4882a593Smuzhiyun void ni_dpm_disable(struct radeon_device *rdev)
3702*4882a593Smuzhiyun {
3703*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3704*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3705*4882a593Smuzhiyun 	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
3706*4882a593Smuzhiyun 
3707*4882a593Smuzhiyun 	if (!btc_dpm_enabled(rdev))
3708*4882a593Smuzhiyun 		return;
3709*4882a593Smuzhiyun 	rv770_clear_vc(rdev);
3710*4882a593Smuzhiyun 	if (pi->thermal_protection)
3711*4882a593Smuzhiyun 		rv770_enable_thermal_protection(rdev, false);
3712*4882a593Smuzhiyun 	ni_enable_power_containment(rdev, boot_ps, false);
3713*4882a593Smuzhiyun 	ni_enable_smc_cac(rdev, boot_ps, false);
3714*4882a593Smuzhiyun 	cypress_enable_spread_spectrum(rdev, false);
3715*4882a593Smuzhiyun 	rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, false);
3716*4882a593Smuzhiyun 	if (pi->dynamic_pcie_gen2)
3717*4882a593Smuzhiyun 		ni_enable_dynamic_pcie_gen2(rdev, false);
3718*4882a593Smuzhiyun 
3719*4882a593Smuzhiyun 	if (rdev->irq.installed &&
3720*4882a593Smuzhiyun 	    r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
3721*4882a593Smuzhiyun 		rdev->irq.dpm_thermal = false;
3722*4882a593Smuzhiyun 		radeon_irq_set(rdev);
3723*4882a593Smuzhiyun 	}
3724*4882a593Smuzhiyun 
3725*4882a593Smuzhiyun 	if (pi->gfx_clock_gating)
3726*4882a593Smuzhiyun 		ni_gfx_clockgating_enable(rdev, false);
3727*4882a593Smuzhiyun 	if (pi->mg_clock_gating)
3728*4882a593Smuzhiyun 		ni_mg_clockgating_enable(rdev, false);
3729*4882a593Smuzhiyun 	if (eg_pi->ls_clock_gating)
3730*4882a593Smuzhiyun 		ni_ls_clockgating_enable(rdev, false);
3731*4882a593Smuzhiyun 	ni_stop_dpm(rdev);
3732*4882a593Smuzhiyun 	btc_reset_to_default(rdev);
3733*4882a593Smuzhiyun 	ni_stop_smc(rdev);
3734*4882a593Smuzhiyun 	ni_force_switch_to_arb_f0(rdev);
3735*4882a593Smuzhiyun 
3736*4882a593Smuzhiyun 	ni_update_current_ps(rdev, boot_ps);
3737*4882a593Smuzhiyun }
3738*4882a593Smuzhiyun 
ni_power_control_set_level(struct radeon_device * rdev)3739*4882a593Smuzhiyun static int ni_power_control_set_level(struct radeon_device *rdev)
3740*4882a593Smuzhiyun {
3741*4882a593Smuzhiyun 	struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
3742*4882a593Smuzhiyun 	int ret;
3743*4882a593Smuzhiyun 
3744*4882a593Smuzhiyun 	ret = ni_restrict_performance_levels_before_switch(rdev);
3745*4882a593Smuzhiyun 	if (ret)
3746*4882a593Smuzhiyun 		return ret;
3747*4882a593Smuzhiyun 	ret = rv770_halt_smc(rdev);
3748*4882a593Smuzhiyun 	if (ret)
3749*4882a593Smuzhiyun 		return ret;
3750*4882a593Smuzhiyun 	ret = ni_populate_smc_tdp_limits(rdev, new_ps);
3751*4882a593Smuzhiyun 	if (ret)
3752*4882a593Smuzhiyun 		return ret;
3753*4882a593Smuzhiyun 	ret = rv770_resume_smc(rdev);
3754*4882a593Smuzhiyun 	if (ret)
3755*4882a593Smuzhiyun 		return ret;
3756*4882a593Smuzhiyun 	ret = rv770_set_sw_state(rdev);
3757*4882a593Smuzhiyun 	if (ret)
3758*4882a593Smuzhiyun 		return ret;
3759*4882a593Smuzhiyun 
3760*4882a593Smuzhiyun 	return 0;
3761*4882a593Smuzhiyun }
3762*4882a593Smuzhiyun 
ni_dpm_pre_set_power_state(struct radeon_device * rdev)3763*4882a593Smuzhiyun int ni_dpm_pre_set_power_state(struct radeon_device *rdev)
3764*4882a593Smuzhiyun {
3765*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3766*4882a593Smuzhiyun 	struct radeon_ps requested_ps = *rdev->pm.dpm.requested_ps;
3767*4882a593Smuzhiyun 	struct radeon_ps *new_ps = &requested_ps;
3768*4882a593Smuzhiyun 
3769*4882a593Smuzhiyun 	ni_update_requested_ps(rdev, new_ps);
3770*4882a593Smuzhiyun 
3771*4882a593Smuzhiyun 	ni_apply_state_adjust_rules(rdev, &eg_pi->requested_rps);
3772*4882a593Smuzhiyun 
3773*4882a593Smuzhiyun 	return 0;
3774*4882a593Smuzhiyun }
3775*4882a593Smuzhiyun 
ni_dpm_set_power_state(struct radeon_device * rdev)3776*4882a593Smuzhiyun int ni_dpm_set_power_state(struct radeon_device *rdev)
3777*4882a593Smuzhiyun {
3778*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3779*4882a593Smuzhiyun 	struct radeon_ps *new_ps = &eg_pi->requested_rps;
3780*4882a593Smuzhiyun 	struct radeon_ps *old_ps = &eg_pi->current_rps;
3781*4882a593Smuzhiyun 	int ret;
3782*4882a593Smuzhiyun 
3783*4882a593Smuzhiyun 	ret = ni_restrict_performance_levels_before_switch(rdev);
3784*4882a593Smuzhiyun 	if (ret) {
3785*4882a593Smuzhiyun 		DRM_ERROR("ni_restrict_performance_levels_before_switch failed\n");
3786*4882a593Smuzhiyun 		return ret;
3787*4882a593Smuzhiyun 	}
3788*4882a593Smuzhiyun 	ni_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps);
3789*4882a593Smuzhiyun 	ret = ni_enable_power_containment(rdev, new_ps, false);
3790*4882a593Smuzhiyun 	if (ret) {
3791*4882a593Smuzhiyun 		DRM_ERROR("ni_enable_power_containment failed\n");
3792*4882a593Smuzhiyun 		return ret;
3793*4882a593Smuzhiyun 	}
3794*4882a593Smuzhiyun 	ret = ni_enable_smc_cac(rdev, new_ps, false);
3795*4882a593Smuzhiyun 	if (ret) {
3796*4882a593Smuzhiyun 		DRM_ERROR("ni_enable_smc_cac failed\n");
3797*4882a593Smuzhiyun 		return ret;
3798*4882a593Smuzhiyun 	}
3799*4882a593Smuzhiyun 	ret = rv770_halt_smc(rdev);
3800*4882a593Smuzhiyun 	if (ret) {
3801*4882a593Smuzhiyun 		DRM_ERROR("rv770_halt_smc failed\n");
3802*4882a593Smuzhiyun 		return ret;
3803*4882a593Smuzhiyun 	}
3804*4882a593Smuzhiyun 	if (eg_pi->smu_uvd_hs)
3805*4882a593Smuzhiyun 		btc_notify_uvd_to_smc(rdev, new_ps);
3806*4882a593Smuzhiyun 	ret = ni_upload_sw_state(rdev, new_ps);
3807*4882a593Smuzhiyun 	if (ret) {
3808*4882a593Smuzhiyun 		DRM_ERROR("ni_upload_sw_state failed\n");
3809*4882a593Smuzhiyun 		return ret;
3810*4882a593Smuzhiyun 	}
3811*4882a593Smuzhiyun 	if (eg_pi->dynamic_ac_timing) {
3812*4882a593Smuzhiyun 		ret = ni_upload_mc_reg_table(rdev, new_ps);
3813*4882a593Smuzhiyun 		if (ret) {
3814*4882a593Smuzhiyun 			DRM_ERROR("ni_upload_mc_reg_table failed\n");
3815*4882a593Smuzhiyun 			return ret;
3816*4882a593Smuzhiyun 		}
3817*4882a593Smuzhiyun 	}
3818*4882a593Smuzhiyun 	ret = ni_program_memory_timing_parameters(rdev, new_ps);
3819*4882a593Smuzhiyun 	if (ret) {
3820*4882a593Smuzhiyun 		DRM_ERROR("ni_program_memory_timing_parameters failed\n");
3821*4882a593Smuzhiyun 		return ret;
3822*4882a593Smuzhiyun 	}
3823*4882a593Smuzhiyun 	ret = rv770_resume_smc(rdev);
3824*4882a593Smuzhiyun 	if (ret) {
3825*4882a593Smuzhiyun 		DRM_ERROR("rv770_resume_smc failed\n");
3826*4882a593Smuzhiyun 		return ret;
3827*4882a593Smuzhiyun 	}
3828*4882a593Smuzhiyun 	ret = rv770_set_sw_state(rdev);
3829*4882a593Smuzhiyun 	if (ret) {
3830*4882a593Smuzhiyun 		DRM_ERROR("rv770_set_sw_state failed\n");
3831*4882a593Smuzhiyun 		return ret;
3832*4882a593Smuzhiyun 	}
3833*4882a593Smuzhiyun 	ni_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps);
3834*4882a593Smuzhiyun 	ret = ni_enable_smc_cac(rdev, new_ps, true);
3835*4882a593Smuzhiyun 	if (ret) {
3836*4882a593Smuzhiyun 		DRM_ERROR("ni_enable_smc_cac failed\n");
3837*4882a593Smuzhiyun 		return ret;
3838*4882a593Smuzhiyun 	}
3839*4882a593Smuzhiyun 	ret = ni_enable_power_containment(rdev, new_ps, true);
3840*4882a593Smuzhiyun 	if (ret) {
3841*4882a593Smuzhiyun 		DRM_ERROR("ni_enable_power_containment failed\n");
3842*4882a593Smuzhiyun 		return ret;
3843*4882a593Smuzhiyun 	}
3844*4882a593Smuzhiyun 
3845*4882a593Smuzhiyun 	/* update tdp */
3846*4882a593Smuzhiyun 	ret = ni_power_control_set_level(rdev);
3847*4882a593Smuzhiyun 	if (ret) {
3848*4882a593Smuzhiyun 		DRM_ERROR("ni_power_control_set_level failed\n");
3849*4882a593Smuzhiyun 		return ret;
3850*4882a593Smuzhiyun 	}
3851*4882a593Smuzhiyun 
3852*4882a593Smuzhiyun 	return 0;
3853*4882a593Smuzhiyun }
3854*4882a593Smuzhiyun 
ni_dpm_post_set_power_state(struct radeon_device * rdev)3855*4882a593Smuzhiyun void ni_dpm_post_set_power_state(struct radeon_device *rdev)
3856*4882a593Smuzhiyun {
3857*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3858*4882a593Smuzhiyun 	struct radeon_ps *new_ps = &eg_pi->requested_rps;
3859*4882a593Smuzhiyun 
3860*4882a593Smuzhiyun 	ni_update_current_ps(rdev, new_ps);
3861*4882a593Smuzhiyun }
3862*4882a593Smuzhiyun 
3863*4882a593Smuzhiyun #if 0
3864*4882a593Smuzhiyun void ni_dpm_reset_asic(struct radeon_device *rdev)
3865*4882a593Smuzhiyun {
3866*4882a593Smuzhiyun 	ni_restrict_performance_levels_before_switch(rdev);
3867*4882a593Smuzhiyun 	rv770_set_boot_state(rdev);
3868*4882a593Smuzhiyun }
3869*4882a593Smuzhiyun #endif
3870*4882a593Smuzhiyun 
3871*4882a593Smuzhiyun union power_info {
3872*4882a593Smuzhiyun 	struct _ATOM_POWERPLAY_INFO info;
3873*4882a593Smuzhiyun 	struct _ATOM_POWERPLAY_INFO_V2 info_2;
3874*4882a593Smuzhiyun 	struct _ATOM_POWERPLAY_INFO_V3 info_3;
3875*4882a593Smuzhiyun 	struct _ATOM_PPLIB_POWERPLAYTABLE pplib;
3876*4882a593Smuzhiyun 	struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2;
3877*4882a593Smuzhiyun 	struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3;
3878*4882a593Smuzhiyun };
3879*4882a593Smuzhiyun 
3880*4882a593Smuzhiyun union pplib_clock_info {
3881*4882a593Smuzhiyun 	struct _ATOM_PPLIB_R600_CLOCK_INFO r600;
3882*4882a593Smuzhiyun 	struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780;
3883*4882a593Smuzhiyun 	struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen;
3884*4882a593Smuzhiyun 	struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo;
3885*4882a593Smuzhiyun };
3886*4882a593Smuzhiyun 
3887*4882a593Smuzhiyun union pplib_power_state {
3888*4882a593Smuzhiyun 	struct _ATOM_PPLIB_STATE v1;
3889*4882a593Smuzhiyun 	struct _ATOM_PPLIB_STATE_V2 v2;
3890*4882a593Smuzhiyun };
3891*4882a593Smuzhiyun 
ni_parse_pplib_non_clock_info(struct radeon_device * rdev,struct radeon_ps * rps,struct _ATOM_PPLIB_NONCLOCK_INFO * non_clock_info,u8 table_rev)3892*4882a593Smuzhiyun static void ni_parse_pplib_non_clock_info(struct radeon_device *rdev,
3893*4882a593Smuzhiyun 					  struct radeon_ps *rps,
3894*4882a593Smuzhiyun 					  struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info,
3895*4882a593Smuzhiyun 					  u8 table_rev)
3896*4882a593Smuzhiyun {
3897*4882a593Smuzhiyun 	rps->caps = le32_to_cpu(non_clock_info->ulCapsAndSettings);
3898*4882a593Smuzhiyun 	rps->class = le16_to_cpu(non_clock_info->usClassification);
3899*4882a593Smuzhiyun 	rps->class2 = le16_to_cpu(non_clock_info->usClassification2);
3900*4882a593Smuzhiyun 
3901*4882a593Smuzhiyun 	if (ATOM_PPLIB_NONCLOCKINFO_VER1 < table_rev) {
3902*4882a593Smuzhiyun 		rps->vclk = le32_to_cpu(non_clock_info->ulVCLK);
3903*4882a593Smuzhiyun 		rps->dclk = le32_to_cpu(non_clock_info->ulDCLK);
3904*4882a593Smuzhiyun 	} else if (r600_is_uvd_state(rps->class, rps->class2)) {
3905*4882a593Smuzhiyun 		rps->vclk = RV770_DEFAULT_VCLK_FREQ;
3906*4882a593Smuzhiyun 		rps->dclk = RV770_DEFAULT_DCLK_FREQ;
3907*4882a593Smuzhiyun 	} else {
3908*4882a593Smuzhiyun 		rps->vclk = 0;
3909*4882a593Smuzhiyun 		rps->dclk = 0;
3910*4882a593Smuzhiyun 	}
3911*4882a593Smuzhiyun 
3912*4882a593Smuzhiyun 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT)
3913*4882a593Smuzhiyun 		rdev->pm.dpm.boot_ps = rps;
3914*4882a593Smuzhiyun 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE)
3915*4882a593Smuzhiyun 		rdev->pm.dpm.uvd_ps = rps;
3916*4882a593Smuzhiyun }
3917*4882a593Smuzhiyun 
ni_parse_pplib_clock_info(struct radeon_device * rdev,struct radeon_ps * rps,int index,union pplib_clock_info * clock_info)3918*4882a593Smuzhiyun static void ni_parse_pplib_clock_info(struct radeon_device *rdev,
3919*4882a593Smuzhiyun 				      struct radeon_ps *rps, int index,
3920*4882a593Smuzhiyun 				      union pplib_clock_info *clock_info)
3921*4882a593Smuzhiyun {
3922*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3923*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3924*4882a593Smuzhiyun 	struct ni_ps *ps = ni_get_ps(rps);
3925*4882a593Smuzhiyun 	struct rv7xx_pl *pl = &ps->performance_levels[index];
3926*4882a593Smuzhiyun 
3927*4882a593Smuzhiyun 	ps->performance_level_count = index + 1;
3928*4882a593Smuzhiyun 
3929*4882a593Smuzhiyun 	pl->sclk = le16_to_cpu(clock_info->evergreen.usEngineClockLow);
3930*4882a593Smuzhiyun 	pl->sclk |= clock_info->evergreen.ucEngineClockHigh << 16;
3931*4882a593Smuzhiyun 	pl->mclk = le16_to_cpu(clock_info->evergreen.usMemoryClockLow);
3932*4882a593Smuzhiyun 	pl->mclk |= clock_info->evergreen.ucMemoryClockHigh << 16;
3933*4882a593Smuzhiyun 
3934*4882a593Smuzhiyun 	pl->vddc = le16_to_cpu(clock_info->evergreen.usVDDC);
3935*4882a593Smuzhiyun 	pl->vddci = le16_to_cpu(clock_info->evergreen.usVDDCI);
3936*4882a593Smuzhiyun 	pl->flags = le32_to_cpu(clock_info->evergreen.ulFlags);
3937*4882a593Smuzhiyun 
3938*4882a593Smuzhiyun 	/* patch up vddc if necessary */
3939*4882a593Smuzhiyun 	if (pl->vddc == 0xff01) {
3940*4882a593Smuzhiyun 		if (pi->max_vddc)
3941*4882a593Smuzhiyun 			pl->vddc = pi->max_vddc;
3942*4882a593Smuzhiyun 	}
3943*4882a593Smuzhiyun 
3944*4882a593Smuzhiyun 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) {
3945*4882a593Smuzhiyun 		pi->acpi_vddc = pl->vddc;
3946*4882a593Smuzhiyun 		eg_pi->acpi_vddci = pl->vddci;
3947*4882a593Smuzhiyun 		if (ps->performance_levels[0].flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
3948*4882a593Smuzhiyun 			pi->acpi_pcie_gen2 = true;
3949*4882a593Smuzhiyun 		else
3950*4882a593Smuzhiyun 			pi->acpi_pcie_gen2 = false;
3951*4882a593Smuzhiyun 	}
3952*4882a593Smuzhiyun 
3953*4882a593Smuzhiyun 	if (rps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) {
3954*4882a593Smuzhiyun 		eg_pi->ulv.supported = true;
3955*4882a593Smuzhiyun 		eg_pi->ulv.pl = pl;
3956*4882a593Smuzhiyun 	}
3957*4882a593Smuzhiyun 
3958*4882a593Smuzhiyun 	if (pi->min_vddc_in_table > pl->vddc)
3959*4882a593Smuzhiyun 		pi->min_vddc_in_table = pl->vddc;
3960*4882a593Smuzhiyun 
3961*4882a593Smuzhiyun 	if (pi->max_vddc_in_table < pl->vddc)
3962*4882a593Smuzhiyun 		pi->max_vddc_in_table = pl->vddc;
3963*4882a593Smuzhiyun 
3964*4882a593Smuzhiyun 	/* patch up boot state */
3965*4882a593Smuzhiyun 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) {
3966*4882a593Smuzhiyun 		u16 vddc, vddci, mvdd;
3967*4882a593Smuzhiyun 		radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd);
3968*4882a593Smuzhiyun 		pl->mclk = rdev->clock.default_mclk;
3969*4882a593Smuzhiyun 		pl->sclk = rdev->clock.default_sclk;
3970*4882a593Smuzhiyun 		pl->vddc = vddc;
3971*4882a593Smuzhiyun 		pl->vddci = vddci;
3972*4882a593Smuzhiyun 	}
3973*4882a593Smuzhiyun 
3974*4882a593Smuzhiyun 	if ((rps->class & ATOM_PPLIB_CLASSIFICATION_UI_MASK) ==
3975*4882a593Smuzhiyun 	    ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE) {
3976*4882a593Smuzhiyun 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk;
3977*4882a593Smuzhiyun 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk;
3978*4882a593Smuzhiyun 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc;
3979*4882a593Smuzhiyun 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci;
3980*4882a593Smuzhiyun 	}
3981*4882a593Smuzhiyun }
3982*4882a593Smuzhiyun 
ni_parse_power_table(struct radeon_device * rdev)3983*4882a593Smuzhiyun static int ni_parse_power_table(struct radeon_device *rdev)
3984*4882a593Smuzhiyun {
3985*4882a593Smuzhiyun 	struct radeon_mode_info *mode_info = &rdev->mode_info;
3986*4882a593Smuzhiyun 	struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info;
3987*4882a593Smuzhiyun 	union pplib_power_state *power_state;
3988*4882a593Smuzhiyun 	int i, j;
3989*4882a593Smuzhiyun 	union pplib_clock_info *clock_info;
3990*4882a593Smuzhiyun 	union power_info *power_info;
3991*4882a593Smuzhiyun 	int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
3992*4882a593Smuzhiyun 	u16 data_offset;
3993*4882a593Smuzhiyun 	u8 frev, crev;
3994*4882a593Smuzhiyun 	struct ni_ps *ps;
3995*4882a593Smuzhiyun 
3996*4882a593Smuzhiyun 	if (!atom_parse_data_header(mode_info->atom_context, index, NULL,
3997*4882a593Smuzhiyun 				   &frev, &crev, &data_offset))
3998*4882a593Smuzhiyun 		return -EINVAL;
3999*4882a593Smuzhiyun 	power_info = (union power_info *)(mode_info->atom_context->bios + data_offset);
4000*4882a593Smuzhiyun 
4001*4882a593Smuzhiyun 	rdev->pm.dpm.ps = kcalloc(power_info->pplib.ucNumStates,
4002*4882a593Smuzhiyun 				  sizeof(struct radeon_ps),
4003*4882a593Smuzhiyun 				  GFP_KERNEL);
4004*4882a593Smuzhiyun 	if (!rdev->pm.dpm.ps)
4005*4882a593Smuzhiyun 		return -ENOMEM;
4006*4882a593Smuzhiyun 
4007*4882a593Smuzhiyun 	for (i = 0; i < power_info->pplib.ucNumStates; i++) {
4008*4882a593Smuzhiyun 		power_state = (union pplib_power_state *)
4009*4882a593Smuzhiyun 			(mode_info->atom_context->bios + data_offset +
4010*4882a593Smuzhiyun 			 le16_to_cpu(power_info->pplib.usStateArrayOffset) +
4011*4882a593Smuzhiyun 			 i * power_info->pplib.ucStateEntrySize);
4012*4882a593Smuzhiyun 		non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *)
4013*4882a593Smuzhiyun 			(mode_info->atom_context->bios + data_offset +
4014*4882a593Smuzhiyun 			 le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset) +
4015*4882a593Smuzhiyun 			 (power_state->v1.ucNonClockStateIndex *
4016*4882a593Smuzhiyun 			  power_info->pplib.ucNonClockSize));
4017*4882a593Smuzhiyun 		if (power_info->pplib.ucStateEntrySize - 1) {
4018*4882a593Smuzhiyun 			u8 *idx;
4019*4882a593Smuzhiyun 			ps = kzalloc(sizeof(struct ni_ps), GFP_KERNEL);
4020*4882a593Smuzhiyun 			if (ps == NULL) {
4021*4882a593Smuzhiyun 				kfree(rdev->pm.dpm.ps);
4022*4882a593Smuzhiyun 				return -ENOMEM;
4023*4882a593Smuzhiyun 			}
4024*4882a593Smuzhiyun 			rdev->pm.dpm.ps[i].ps_priv = ps;
4025*4882a593Smuzhiyun 			ni_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i],
4026*4882a593Smuzhiyun 							 non_clock_info,
4027*4882a593Smuzhiyun 							 power_info->pplib.ucNonClockSize);
4028*4882a593Smuzhiyun 			idx = (u8 *)&power_state->v1.ucClockStateIndices[0];
4029*4882a593Smuzhiyun 			for (j = 0; j < (power_info->pplib.ucStateEntrySize - 1); j++) {
4030*4882a593Smuzhiyun 				clock_info = (union pplib_clock_info *)
4031*4882a593Smuzhiyun 					(mode_info->atom_context->bios + data_offset +
4032*4882a593Smuzhiyun 					 le16_to_cpu(power_info->pplib.usClockInfoArrayOffset) +
4033*4882a593Smuzhiyun 					 (idx[j] * power_info->pplib.ucClockInfoSize));
4034*4882a593Smuzhiyun 				ni_parse_pplib_clock_info(rdev,
4035*4882a593Smuzhiyun 							  &rdev->pm.dpm.ps[i], j,
4036*4882a593Smuzhiyun 							  clock_info);
4037*4882a593Smuzhiyun 			}
4038*4882a593Smuzhiyun 		}
4039*4882a593Smuzhiyun 	}
4040*4882a593Smuzhiyun 	rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates;
4041*4882a593Smuzhiyun 	return 0;
4042*4882a593Smuzhiyun }
4043*4882a593Smuzhiyun 
ni_dpm_init(struct radeon_device * rdev)4044*4882a593Smuzhiyun int ni_dpm_init(struct radeon_device *rdev)
4045*4882a593Smuzhiyun {
4046*4882a593Smuzhiyun 	struct rv7xx_power_info *pi;
4047*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi;
4048*4882a593Smuzhiyun 	struct ni_power_info *ni_pi;
4049*4882a593Smuzhiyun 	struct atom_clock_dividers dividers;
4050*4882a593Smuzhiyun 	int ret;
4051*4882a593Smuzhiyun 
4052*4882a593Smuzhiyun 	ni_pi = kzalloc(sizeof(struct ni_power_info), GFP_KERNEL);
4053*4882a593Smuzhiyun 	if (ni_pi == NULL)
4054*4882a593Smuzhiyun 		return -ENOMEM;
4055*4882a593Smuzhiyun 	rdev->pm.dpm.priv = ni_pi;
4056*4882a593Smuzhiyun 	eg_pi = &ni_pi->eg;
4057*4882a593Smuzhiyun 	pi = &eg_pi->rv7xx;
4058*4882a593Smuzhiyun 
4059*4882a593Smuzhiyun 	rv770_get_max_vddc(rdev);
4060*4882a593Smuzhiyun 
4061*4882a593Smuzhiyun 	eg_pi->ulv.supported = false;
4062*4882a593Smuzhiyun 	pi->acpi_vddc = 0;
4063*4882a593Smuzhiyun 	eg_pi->acpi_vddci = 0;
4064*4882a593Smuzhiyun 	pi->min_vddc_in_table = 0;
4065*4882a593Smuzhiyun 	pi->max_vddc_in_table = 0;
4066*4882a593Smuzhiyun 
4067*4882a593Smuzhiyun 	ret = r600_get_platform_caps(rdev);
4068*4882a593Smuzhiyun 	if (ret)
4069*4882a593Smuzhiyun 		return ret;
4070*4882a593Smuzhiyun 
4071*4882a593Smuzhiyun 	ret = ni_parse_power_table(rdev);
4072*4882a593Smuzhiyun 	if (ret)
4073*4882a593Smuzhiyun 		return ret;
4074*4882a593Smuzhiyun 	ret = r600_parse_extended_power_table(rdev);
4075*4882a593Smuzhiyun 	if (ret)
4076*4882a593Smuzhiyun 		return ret;
4077*4882a593Smuzhiyun 
4078*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries =
4079*4882a593Smuzhiyun 		kcalloc(4,
4080*4882a593Smuzhiyun 			sizeof(struct radeon_clock_voltage_dependency_entry),
4081*4882a593Smuzhiyun 			GFP_KERNEL);
4082*4882a593Smuzhiyun 	if (!rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries) {
4083*4882a593Smuzhiyun 		r600_free_extended_power_table(rdev);
4084*4882a593Smuzhiyun 		return -ENOMEM;
4085*4882a593Smuzhiyun 	}
4086*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.count = 4;
4087*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].clk = 0;
4088*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].v = 0;
4089*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].clk = 36000;
4090*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].v = 720;
4091*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].clk = 54000;
4092*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].v = 810;
4093*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].clk = 72000;
4094*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].v = 900;
4095*4882a593Smuzhiyun 
4096*4882a593Smuzhiyun 	ni_patch_dependency_tables_based_on_leakage(rdev);
4097*4882a593Smuzhiyun 
4098*4882a593Smuzhiyun 	if (rdev->pm.dpm.voltage_response_time == 0)
4099*4882a593Smuzhiyun 		rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
4100*4882a593Smuzhiyun 	if (rdev->pm.dpm.backbias_response_time == 0)
4101*4882a593Smuzhiyun 		rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
4102*4882a593Smuzhiyun 
4103*4882a593Smuzhiyun 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
4104*4882a593Smuzhiyun 					     0, false, &dividers);
4105*4882a593Smuzhiyun 	if (ret)
4106*4882a593Smuzhiyun 		pi->ref_div = dividers.ref_div + 1;
4107*4882a593Smuzhiyun 	else
4108*4882a593Smuzhiyun 		pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
4109*4882a593Smuzhiyun 
4110*4882a593Smuzhiyun 	pi->rlp = RV770_RLP_DFLT;
4111*4882a593Smuzhiyun 	pi->rmp = RV770_RMP_DFLT;
4112*4882a593Smuzhiyun 	pi->lhp = RV770_LHP_DFLT;
4113*4882a593Smuzhiyun 	pi->lmp = RV770_LMP_DFLT;
4114*4882a593Smuzhiyun 
4115*4882a593Smuzhiyun 	eg_pi->ats[0].rlp = RV770_RLP_DFLT;
4116*4882a593Smuzhiyun 	eg_pi->ats[0].rmp = RV770_RMP_DFLT;
4117*4882a593Smuzhiyun 	eg_pi->ats[0].lhp = RV770_LHP_DFLT;
4118*4882a593Smuzhiyun 	eg_pi->ats[0].lmp = RV770_LMP_DFLT;
4119*4882a593Smuzhiyun 
4120*4882a593Smuzhiyun 	eg_pi->ats[1].rlp = BTC_RLP_UVD_DFLT;
4121*4882a593Smuzhiyun 	eg_pi->ats[1].rmp = BTC_RMP_UVD_DFLT;
4122*4882a593Smuzhiyun 	eg_pi->ats[1].lhp = BTC_LHP_UVD_DFLT;
4123*4882a593Smuzhiyun 	eg_pi->ats[1].lmp = BTC_LMP_UVD_DFLT;
4124*4882a593Smuzhiyun 
4125*4882a593Smuzhiyun 	eg_pi->smu_uvd_hs = true;
4126*4882a593Smuzhiyun 
4127*4882a593Smuzhiyun 	if (rdev->pdev->device == 0x6707) {
4128*4882a593Smuzhiyun 		pi->mclk_strobe_mode_threshold = 55000;
4129*4882a593Smuzhiyun 		pi->mclk_edc_enable_threshold = 55000;
4130*4882a593Smuzhiyun 		eg_pi->mclk_edc_wr_enable_threshold = 55000;
4131*4882a593Smuzhiyun 	} else {
4132*4882a593Smuzhiyun 		pi->mclk_strobe_mode_threshold = 40000;
4133*4882a593Smuzhiyun 		pi->mclk_edc_enable_threshold = 40000;
4134*4882a593Smuzhiyun 		eg_pi->mclk_edc_wr_enable_threshold = 40000;
4135*4882a593Smuzhiyun 	}
4136*4882a593Smuzhiyun 	ni_pi->mclk_rtt_mode_threshold = eg_pi->mclk_edc_wr_enable_threshold;
4137*4882a593Smuzhiyun 
4138*4882a593Smuzhiyun 	pi->voltage_control =
4139*4882a593Smuzhiyun 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0);
4140*4882a593Smuzhiyun 
4141*4882a593Smuzhiyun 	pi->mvdd_control =
4142*4882a593Smuzhiyun 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0);
4143*4882a593Smuzhiyun 
4144*4882a593Smuzhiyun 	eg_pi->vddci_control =
4145*4882a593Smuzhiyun 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0);
4146*4882a593Smuzhiyun 
4147*4882a593Smuzhiyun 	rv770_get_engine_memory_ss(rdev);
4148*4882a593Smuzhiyun 
4149*4882a593Smuzhiyun 	pi->asi = RV770_ASI_DFLT;
4150*4882a593Smuzhiyun 	pi->pasi = CYPRESS_HASI_DFLT;
4151*4882a593Smuzhiyun 	pi->vrc = CYPRESS_VRC_DFLT;
4152*4882a593Smuzhiyun 
4153*4882a593Smuzhiyun 	pi->power_gating = false;
4154*4882a593Smuzhiyun 
4155*4882a593Smuzhiyun 	pi->gfx_clock_gating = true;
4156*4882a593Smuzhiyun 
4157*4882a593Smuzhiyun 	pi->mg_clock_gating = true;
4158*4882a593Smuzhiyun 	pi->mgcgtssm = true;
4159*4882a593Smuzhiyun 	eg_pi->ls_clock_gating = false;
4160*4882a593Smuzhiyun 	eg_pi->sclk_deep_sleep = false;
4161*4882a593Smuzhiyun 
4162*4882a593Smuzhiyun 	pi->dynamic_pcie_gen2 = true;
4163*4882a593Smuzhiyun 
4164*4882a593Smuzhiyun 	if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE)
4165*4882a593Smuzhiyun 		pi->thermal_protection = true;
4166*4882a593Smuzhiyun 	else
4167*4882a593Smuzhiyun 		pi->thermal_protection = false;
4168*4882a593Smuzhiyun 
4169*4882a593Smuzhiyun 	pi->display_gap = true;
4170*4882a593Smuzhiyun 
4171*4882a593Smuzhiyun 	pi->dcodt = true;
4172*4882a593Smuzhiyun 
4173*4882a593Smuzhiyun 	pi->ulps = true;
4174*4882a593Smuzhiyun 
4175*4882a593Smuzhiyun 	eg_pi->dynamic_ac_timing = true;
4176*4882a593Smuzhiyun 	eg_pi->abm = true;
4177*4882a593Smuzhiyun 	eg_pi->mcls = true;
4178*4882a593Smuzhiyun 	eg_pi->light_sleep = true;
4179*4882a593Smuzhiyun 	eg_pi->memory_transition = true;
4180*4882a593Smuzhiyun #if defined(CONFIG_ACPI)
4181*4882a593Smuzhiyun 	eg_pi->pcie_performance_request =
4182*4882a593Smuzhiyun 		radeon_acpi_is_pcie_performance_request_supported(rdev);
4183*4882a593Smuzhiyun #else
4184*4882a593Smuzhiyun 	eg_pi->pcie_performance_request = false;
4185*4882a593Smuzhiyun #endif
4186*4882a593Smuzhiyun 
4187*4882a593Smuzhiyun 	eg_pi->dll_default_on = false;
4188*4882a593Smuzhiyun 
4189*4882a593Smuzhiyun 	eg_pi->sclk_deep_sleep = false;
4190*4882a593Smuzhiyun 
4191*4882a593Smuzhiyun 	pi->mclk_stutter_mode_threshold = 0;
4192*4882a593Smuzhiyun 
4193*4882a593Smuzhiyun 	pi->sram_end = SMC_RAM_END;
4194*4882a593Smuzhiyun 
4195*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.mclk_sclk_ratio = 3;
4196*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.vddc_vddci_delta = 200;
4197*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2 = 900;
4198*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.valid_sclk_values.count = ARRAY_SIZE(btc_valid_sclk);
4199*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.valid_sclk_values.values = btc_valid_sclk;
4200*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.valid_mclk_values.count = 0;
4201*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.valid_mclk_values.values = NULL;
4202*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.sclk_mclk_delta = 12500;
4203*4882a593Smuzhiyun 
4204*4882a593Smuzhiyun 	ni_pi->cac_data.leakage_coefficients.at = 516;
4205*4882a593Smuzhiyun 	ni_pi->cac_data.leakage_coefficients.bt = 18;
4206*4882a593Smuzhiyun 	ni_pi->cac_data.leakage_coefficients.av = 51;
4207*4882a593Smuzhiyun 	ni_pi->cac_data.leakage_coefficients.bv = 2957;
4208*4882a593Smuzhiyun 
4209*4882a593Smuzhiyun 	switch (rdev->pdev->device) {
4210*4882a593Smuzhiyun 	case 0x6700:
4211*4882a593Smuzhiyun 	case 0x6701:
4212*4882a593Smuzhiyun 	case 0x6702:
4213*4882a593Smuzhiyun 	case 0x6703:
4214*4882a593Smuzhiyun 	case 0x6718:
4215*4882a593Smuzhiyun 		ni_pi->cac_weights = &cac_weights_cayman_xt;
4216*4882a593Smuzhiyun 		break;
4217*4882a593Smuzhiyun 	case 0x6705:
4218*4882a593Smuzhiyun 	case 0x6719:
4219*4882a593Smuzhiyun 	case 0x671D:
4220*4882a593Smuzhiyun 	case 0x671C:
4221*4882a593Smuzhiyun 	default:
4222*4882a593Smuzhiyun 		ni_pi->cac_weights = &cac_weights_cayman_pro;
4223*4882a593Smuzhiyun 		break;
4224*4882a593Smuzhiyun 	case 0x6704:
4225*4882a593Smuzhiyun 	case 0x6706:
4226*4882a593Smuzhiyun 	case 0x6707:
4227*4882a593Smuzhiyun 	case 0x6708:
4228*4882a593Smuzhiyun 	case 0x6709:
4229*4882a593Smuzhiyun 		ni_pi->cac_weights = &cac_weights_cayman_le;
4230*4882a593Smuzhiyun 		break;
4231*4882a593Smuzhiyun 	}
4232*4882a593Smuzhiyun 
4233*4882a593Smuzhiyun 	if (ni_pi->cac_weights->enable_power_containment_by_default) {
4234*4882a593Smuzhiyun 		ni_pi->enable_power_containment = true;
4235*4882a593Smuzhiyun 		ni_pi->enable_cac = true;
4236*4882a593Smuzhiyun 		ni_pi->enable_sq_ramping = true;
4237*4882a593Smuzhiyun 	} else {
4238*4882a593Smuzhiyun 		ni_pi->enable_power_containment = false;
4239*4882a593Smuzhiyun 		ni_pi->enable_cac = false;
4240*4882a593Smuzhiyun 		ni_pi->enable_sq_ramping = false;
4241*4882a593Smuzhiyun 	}
4242*4882a593Smuzhiyun 
4243*4882a593Smuzhiyun 	ni_pi->driver_calculate_cac_leakage = false;
4244*4882a593Smuzhiyun 	ni_pi->cac_configuration_required = true;
4245*4882a593Smuzhiyun 
4246*4882a593Smuzhiyun 	if (ni_pi->cac_configuration_required) {
4247*4882a593Smuzhiyun 		ni_pi->support_cac_long_term_average = true;
4248*4882a593Smuzhiyun 		ni_pi->lta_window_size = ni_pi->cac_weights->l2_lta_window_size;
4249*4882a593Smuzhiyun 		ni_pi->lts_truncate = ni_pi->cac_weights->lts_truncate;
4250*4882a593Smuzhiyun 	} else {
4251*4882a593Smuzhiyun 		ni_pi->support_cac_long_term_average = false;
4252*4882a593Smuzhiyun 		ni_pi->lta_window_size = 0;
4253*4882a593Smuzhiyun 		ni_pi->lts_truncate = 0;
4254*4882a593Smuzhiyun 	}
4255*4882a593Smuzhiyun 
4256*4882a593Smuzhiyun 	ni_pi->use_power_boost_limit = true;
4257*4882a593Smuzhiyun 
4258*4882a593Smuzhiyun 	/* make sure dc limits are valid */
4259*4882a593Smuzhiyun 	if ((rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.sclk == 0) ||
4260*4882a593Smuzhiyun 	    (rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.mclk == 0))
4261*4882a593Smuzhiyun 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc =
4262*4882a593Smuzhiyun 			rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
4263*4882a593Smuzhiyun 
4264*4882a593Smuzhiyun 	return 0;
4265*4882a593Smuzhiyun }
4266*4882a593Smuzhiyun 
ni_dpm_fini(struct radeon_device * rdev)4267*4882a593Smuzhiyun void ni_dpm_fini(struct radeon_device *rdev)
4268*4882a593Smuzhiyun {
4269*4882a593Smuzhiyun 	int i;
4270*4882a593Smuzhiyun 
4271*4882a593Smuzhiyun 	for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
4272*4882a593Smuzhiyun 		kfree(rdev->pm.dpm.ps[i].ps_priv);
4273*4882a593Smuzhiyun 	}
4274*4882a593Smuzhiyun 	kfree(rdev->pm.dpm.ps);
4275*4882a593Smuzhiyun 	kfree(rdev->pm.dpm.priv);
4276*4882a593Smuzhiyun 	kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries);
4277*4882a593Smuzhiyun 	r600_free_extended_power_table(rdev);
4278*4882a593Smuzhiyun }
4279*4882a593Smuzhiyun 
ni_dpm_print_power_state(struct radeon_device * rdev,struct radeon_ps * rps)4280*4882a593Smuzhiyun void ni_dpm_print_power_state(struct radeon_device *rdev,
4281*4882a593Smuzhiyun 			      struct radeon_ps *rps)
4282*4882a593Smuzhiyun {
4283*4882a593Smuzhiyun 	struct ni_ps *ps = ni_get_ps(rps);
4284*4882a593Smuzhiyun 	struct rv7xx_pl *pl;
4285*4882a593Smuzhiyun 	int i;
4286*4882a593Smuzhiyun 
4287*4882a593Smuzhiyun 	r600_dpm_print_class_info(rps->class, rps->class2);
4288*4882a593Smuzhiyun 	r600_dpm_print_cap_info(rps->caps);
4289*4882a593Smuzhiyun 	printk("\tuvd    vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
4290*4882a593Smuzhiyun 	for (i = 0; i < ps->performance_level_count; i++) {
4291*4882a593Smuzhiyun 		pl = &ps->performance_levels[i];
4292*4882a593Smuzhiyun 		if (rdev->family >= CHIP_TAHITI)
4293*4882a593Smuzhiyun 			printk("\t\tpower level %d    sclk: %u mclk: %u vddc: %u vddci: %u pcie gen: %u\n",
4294*4882a593Smuzhiyun 			       i, pl->sclk, pl->mclk, pl->vddc, pl->vddci, pl->pcie_gen + 1);
4295*4882a593Smuzhiyun 		else
4296*4882a593Smuzhiyun 			printk("\t\tpower level %d    sclk: %u mclk: %u vddc: %u vddci: %u\n",
4297*4882a593Smuzhiyun 			       i, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
4298*4882a593Smuzhiyun 	}
4299*4882a593Smuzhiyun 	r600_dpm_print_ps_status(rdev, rps);
4300*4882a593Smuzhiyun }
4301*4882a593Smuzhiyun 
ni_dpm_debugfs_print_current_performance_level(struct radeon_device * rdev,struct seq_file * m)4302*4882a593Smuzhiyun void ni_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
4303*4882a593Smuzhiyun 						    struct seq_file *m)
4304*4882a593Smuzhiyun {
4305*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4306*4882a593Smuzhiyun 	struct radeon_ps *rps = &eg_pi->current_rps;
4307*4882a593Smuzhiyun 	struct ni_ps *ps = ni_get_ps(rps);
4308*4882a593Smuzhiyun 	struct rv7xx_pl *pl;
4309*4882a593Smuzhiyun 	u32 current_index =
4310*4882a593Smuzhiyun 		(RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_STATE_INDEX_MASK) >>
4311*4882a593Smuzhiyun 		CURRENT_STATE_INDEX_SHIFT;
4312*4882a593Smuzhiyun 
4313*4882a593Smuzhiyun 	if (current_index >= ps->performance_level_count) {
4314*4882a593Smuzhiyun 		seq_printf(m, "invalid dpm profile %d\n", current_index);
4315*4882a593Smuzhiyun 	} else {
4316*4882a593Smuzhiyun 		pl = &ps->performance_levels[current_index];
4317*4882a593Smuzhiyun 		seq_printf(m, "uvd    vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
4318*4882a593Smuzhiyun 		seq_printf(m, "power level %d    sclk: %u mclk: %u vddc: %u vddci: %u\n",
4319*4882a593Smuzhiyun 			   current_index, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
4320*4882a593Smuzhiyun 	}
4321*4882a593Smuzhiyun }
4322*4882a593Smuzhiyun 
ni_dpm_get_current_sclk(struct radeon_device * rdev)4323*4882a593Smuzhiyun u32 ni_dpm_get_current_sclk(struct radeon_device *rdev)
4324*4882a593Smuzhiyun {
4325*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4326*4882a593Smuzhiyun 	struct radeon_ps *rps = &eg_pi->current_rps;
4327*4882a593Smuzhiyun 	struct ni_ps *ps = ni_get_ps(rps);
4328*4882a593Smuzhiyun 	struct rv7xx_pl *pl;
4329*4882a593Smuzhiyun 	u32 current_index =
4330*4882a593Smuzhiyun 		(RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_STATE_INDEX_MASK) >>
4331*4882a593Smuzhiyun 		CURRENT_STATE_INDEX_SHIFT;
4332*4882a593Smuzhiyun 
4333*4882a593Smuzhiyun 	if (current_index >= ps->performance_level_count) {
4334*4882a593Smuzhiyun 		return 0;
4335*4882a593Smuzhiyun 	} else {
4336*4882a593Smuzhiyun 		pl = &ps->performance_levels[current_index];
4337*4882a593Smuzhiyun 		return pl->sclk;
4338*4882a593Smuzhiyun 	}
4339*4882a593Smuzhiyun }
4340*4882a593Smuzhiyun 
ni_dpm_get_current_mclk(struct radeon_device * rdev)4341*4882a593Smuzhiyun u32 ni_dpm_get_current_mclk(struct radeon_device *rdev)
4342*4882a593Smuzhiyun {
4343*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4344*4882a593Smuzhiyun 	struct radeon_ps *rps = &eg_pi->current_rps;
4345*4882a593Smuzhiyun 	struct ni_ps *ps = ni_get_ps(rps);
4346*4882a593Smuzhiyun 	struct rv7xx_pl *pl;
4347*4882a593Smuzhiyun 	u32 current_index =
4348*4882a593Smuzhiyun 		(RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_STATE_INDEX_MASK) >>
4349*4882a593Smuzhiyun 		CURRENT_STATE_INDEX_SHIFT;
4350*4882a593Smuzhiyun 
4351*4882a593Smuzhiyun 	if (current_index >= ps->performance_level_count) {
4352*4882a593Smuzhiyun 		return 0;
4353*4882a593Smuzhiyun 	} else {
4354*4882a593Smuzhiyun 		pl = &ps->performance_levels[current_index];
4355*4882a593Smuzhiyun 		return pl->mclk;
4356*4882a593Smuzhiyun 	}
4357*4882a593Smuzhiyun }
4358*4882a593Smuzhiyun 
ni_dpm_get_sclk(struct radeon_device * rdev,bool low)4359*4882a593Smuzhiyun u32 ni_dpm_get_sclk(struct radeon_device *rdev, bool low)
4360*4882a593Smuzhiyun {
4361*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4362*4882a593Smuzhiyun 	struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
4363*4882a593Smuzhiyun 
4364*4882a593Smuzhiyun 	if (low)
4365*4882a593Smuzhiyun 		return requested_state->performance_levels[0].sclk;
4366*4882a593Smuzhiyun 	else
4367*4882a593Smuzhiyun 		return requested_state->performance_levels[requested_state->performance_level_count - 1].sclk;
4368*4882a593Smuzhiyun }
4369*4882a593Smuzhiyun 
ni_dpm_get_mclk(struct radeon_device * rdev,bool low)4370*4882a593Smuzhiyun u32 ni_dpm_get_mclk(struct radeon_device *rdev, bool low)
4371*4882a593Smuzhiyun {
4372*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4373*4882a593Smuzhiyun 	struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
4374*4882a593Smuzhiyun 
4375*4882a593Smuzhiyun 	if (low)
4376*4882a593Smuzhiyun 		return requested_state->performance_levels[0].mclk;
4377*4882a593Smuzhiyun 	else
4378*4882a593Smuzhiyun 		return requested_state->performance_levels[requested_state->performance_level_count - 1].mclk;
4379*4882a593Smuzhiyun }
4380*4882a593Smuzhiyun 
4381