xref: /OK3568_Linux_fs/kernel/drivers/gpu/drm/radeon/si_dpm.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun  * Copyright 2013 Advanced Micro Devices, Inc.
3*4882a593Smuzhiyun  *
4*4882a593Smuzhiyun  * Permission is hereby granted, free of charge, to any person obtaining a
5*4882a593Smuzhiyun  * copy of this software and associated documentation files (the "Software"),
6*4882a593Smuzhiyun  * to deal in the Software without restriction, including without limitation
7*4882a593Smuzhiyun  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8*4882a593Smuzhiyun  * and/or sell copies of the Software, and to permit persons to whom the
9*4882a593Smuzhiyun  * Software is furnished to do so, subject to the following conditions:
10*4882a593Smuzhiyun  *
11*4882a593Smuzhiyun  * The above copyright notice and this permission notice shall be included in
12*4882a593Smuzhiyun  * all copies or substantial portions of the Software.
13*4882a593Smuzhiyun  *
14*4882a593Smuzhiyun  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15*4882a593Smuzhiyun  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16*4882a593Smuzhiyun  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17*4882a593Smuzhiyun  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18*4882a593Smuzhiyun  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19*4882a593Smuzhiyun  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20*4882a593Smuzhiyun  * OTHER DEALINGS IN THE SOFTWARE.
21*4882a593Smuzhiyun  *
22*4882a593Smuzhiyun  */
23*4882a593Smuzhiyun 
24*4882a593Smuzhiyun #include <linux/math64.h>
25*4882a593Smuzhiyun #include <linux/pci.h>
26*4882a593Smuzhiyun #include <linux/seq_file.h>
27*4882a593Smuzhiyun 
28*4882a593Smuzhiyun #include "atom.h"
29*4882a593Smuzhiyun #include "r600_dpm.h"
30*4882a593Smuzhiyun #include "radeon.h"
31*4882a593Smuzhiyun #include "radeon_asic.h"
32*4882a593Smuzhiyun #include "si_dpm.h"
33*4882a593Smuzhiyun #include "sid.h"
34*4882a593Smuzhiyun 
35*4882a593Smuzhiyun #define MC_CG_ARB_FREQ_F0           0x0a
36*4882a593Smuzhiyun #define MC_CG_ARB_FREQ_F1           0x0b
37*4882a593Smuzhiyun #define MC_CG_ARB_FREQ_F2           0x0c
38*4882a593Smuzhiyun #define MC_CG_ARB_FREQ_F3           0x0d
39*4882a593Smuzhiyun 
40*4882a593Smuzhiyun #define SMC_RAM_END                 0x20000
41*4882a593Smuzhiyun 
42*4882a593Smuzhiyun #define SCLK_MIN_DEEPSLEEP_FREQ     1350
43*4882a593Smuzhiyun 
44*4882a593Smuzhiyun static const struct si_cac_config_reg cac_weights_tahiti[] =
45*4882a593Smuzhiyun {
46*4882a593Smuzhiyun 	{ 0x0, 0x0000ffff, 0, 0xc, SISLANDS_CACCONFIG_CGIND },
47*4882a593Smuzhiyun 	{ 0x0, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
48*4882a593Smuzhiyun 	{ 0x1, 0x0000ffff, 0, 0x101, SISLANDS_CACCONFIG_CGIND },
49*4882a593Smuzhiyun 	{ 0x1, 0xffff0000, 16, 0xc, SISLANDS_CACCONFIG_CGIND },
50*4882a593Smuzhiyun 	{ 0x2, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
51*4882a593Smuzhiyun 	{ 0x3, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
52*4882a593Smuzhiyun 	{ 0x3, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
53*4882a593Smuzhiyun 	{ 0x4, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
54*4882a593Smuzhiyun 	{ 0x4, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
55*4882a593Smuzhiyun 	{ 0x5, 0x0000ffff, 0, 0x8fc, SISLANDS_CACCONFIG_CGIND },
56*4882a593Smuzhiyun 	{ 0x5, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
57*4882a593Smuzhiyun 	{ 0x6, 0x0000ffff, 0, 0x95, SISLANDS_CACCONFIG_CGIND },
58*4882a593Smuzhiyun 	{ 0x6, 0xffff0000, 16, 0x34e, SISLANDS_CACCONFIG_CGIND },
59*4882a593Smuzhiyun 	{ 0x18f, 0x0000ffff, 0, 0x1a1, SISLANDS_CACCONFIG_CGIND },
60*4882a593Smuzhiyun 	{ 0x7, 0x0000ffff, 0, 0xda, SISLANDS_CACCONFIG_CGIND },
61*4882a593Smuzhiyun 	{ 0x7, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
62*4882a593Smuzhiyun 	{ 0x8, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
63*4882a593Smuzhiyun 	{ 0x8, 0xffff0000, 16, 0x46, SISLANDS_CACCONFIG_CGIND },
64*4882a593Smuzhiyun 	{ 0x9, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
65*4882a593Smuzhiyun 	{ 0xa, 0x0000ffff, 0, 0x208, SISLANDS_CACCONFIG_CGIND },
66*4882a593Smuzhiyun 	{ 0xb, 0x0000ffff, 0, 0xe7, SISLANDS_CACCONFIG_CGIND },
67*4882a593Smuzhiyun 	{ 0xb, 0xffff0000, 16, 0x948, SISLANDS_CACCONFIG_CGIND },
68*4882a593Smuzhiyun 	{ 0xc, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
69*4882a593Smuzhiyun 	{ 0xd, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
70*4882a593Smuzhiyun 	{ 0xd, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
71*4882a593Smuzhiyun 	{ 0xe, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
72*4882a593Smuzhiyun 	{ 0xf, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
73*4882a593Smuzhiyun 	{ 0xf, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
74*4882a593Smuzhiyun 	{ 0x10, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
75*4882a593Smuzhiyun 	{ 0x10, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
76*4882a593Smuzhiyun 	{ 0x11, 0x0000ffff, 0, 0x167, SISLANDS_CACCONFIG_CGIND },
77*4882a593Smuzhiyun 	{ 0x11, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
78*4882a593Smuzhiyun 	{ 0x12, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
79*4882a593Smuzhiyun 	{ 0x13, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
80*4882a593Smuzhiyun 	{ 0x13, 0xffff0000, 16, 0x35, SISLANDS_CACCONFIG_CGIND },
81*4882a593Smuzhiyun 	{ 0x14, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
82*4882a593Smuzhiyun 	{ 0x15, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
83*4882a593Smuzhiyun 	{ 0x15, 0xffff0000, 16, 0x2, SISLANDS_CACCONFIG_CGIND },
84*4882a593Smuzhiyun 	{ 0x4e, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
85*4882a593Smuzhiyun 	{ 0x16, 0x0000ffff, 0, 0x31, SISLANDS_CACCONFIG_CGIND },
86*4882a593Smuzhiyun 	{ 0x16, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
87*4882a593Smuzhiyun 	{ 0x17, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
88*4882a593Smuzhiyun 	{ 0x18, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
89*4882a593Smuzhiyun 	{ 0x18, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
90*4882a593Smuzhiyun 	{ 0x19, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
91*4882a593Smuzhiyun 	{ 0x19, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
92*4882a593Smuzhiyun 	{ 0x1a, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
93*4882a593Smuzhiyun 	{ 0x1a, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
94*4882a593Smuzhiyun 	{ 0x1b, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
95*4882a593Smuzhiyun 	{ 0x1b, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
96*4882a593Smuzhiyun 	{ 0x1c, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
97*4882a593Smuzhiyun 	{ 0x1c, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
98*4882a593Smuzhiyun 	{ 0x1d, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
99*4882a593Smuzhiyun 	{ 0x1d, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
100*4882a593Smuzhiyun 	{ 0x1e, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
101*4882a593Smuzhiyun 	{ 0x1e, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
102*4882a593Smuzhiyun 	{ 0x1f, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
103*4882a593Smuzhiyun 	{ 0x1f, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
104*4882a593Smuzhiyun 	{ 0x20, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
105*4882a593Smuzhiyun 	{ 0x6d, 0x0000ffff, 0, 0x18e, SISLANDS_CACCONFIG_CGIND },
106*4882a593Smuzhiyun 	{ 0xFFFFFFFF }
107*4882a593Smuzhiyun };
108*4882a593Smuzhiyun 
109*4882a593Smuzhiyun static const struct si_cac_config_reg lcac_tahiti[] =
110*4882a593Smuzhiyun {
111*4882a593Smuzhiyun 	{ 0x143, 0x0001fffe, 1, 0x3, SISLANDS_CACCONFIG_CGIND },
112*4882a593Smuzhiyun 	{ 0x143, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
113*4882a593Smuzhiyun 	{ 0x146, 0x0001fffe, 1, 0x3, SISLANDS_CACCONFIG_CGIND },
114*4882a593Smuzhiyun 	{ 0x146, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
115*4882a593Smuzhiyun 	{ 0x149, 0x0001fffe, 1, 0x3, SISLANDS_CACCONFIG_CGIND },
116*4882a593Smuzhiyun 	{ 0x149, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
117*4882a593Smuzhiyun 	{ 0x14c, 0x0001fffe, 1, 0x3, SISLANDS_CACCONFIG_CGIND },
118*4882a593Smuzhiyun 	{ 0x14c, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
119*4882a593Smuzhiyun 	{ 0x98, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
120*4882a593Smuzhiyun 	{ 0x98, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
121*4882a593Smuzhiyun 	{ 0x9b, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
122*4882a593Smuzhiyun 	{ 0x9b, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
123*4882a593Smuzhiyun 	{ 0x9e, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
124*4882a593Smuzhiyun 	{ 0x9e, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
125*4882a593Smuzhiyun 	{ 0x101, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
126*4882a593Smuzhiyun 	{ 0x101, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
127*4882a593Smuzhiyun 	{ 0x104, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
128*4882a593Smuzhiyun 	{ 0x104, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
129*4882a593Smuzhiyun 	{ 0x107, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
130*4882a593Smuzhiyun 	{ 0x107, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
131*4882a593Smuzhiyun 	{ 0x10a, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
132*4882a593Smuzhiyun 	{ 0x10a, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
133*4882a593Smuzhiyun 	{ 0x10d, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
134*4882a593Smuzhiyun 	{ 0x10d, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
135*4882a593Smuzhiyun 	{ 0x8c, 0x0001fffe, 1, 0x8, SISLANDS_CACCONFIG_CGIND },
136*4882a593Smuzhiyun 	{ 0x8c, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
137*4882a593Smuzhiyun 	{ 0x8f, 0x0001fffe, 1, 0x8, SISLANDS_CACCONFIG_CGIND },
138*4882a593Smuzhiyun 	{ 0x8f, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
139*4882a593Smuzhiyun 	{ 0x92, 0x0001fffe, 1, 0x8, SISLANDS_CACCONFIG_CGIND },
140*4882a593Smuzhiyun 	{ 0x92, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
141*4882a593Smuzhiyun 	{ 0x95, 0x0001fffe, 1, 0x8, SISLANDS_CACCONFIG_CGIND },
142*4882a593Smuzhiyun 	{ 0x95, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
143*4882a593Smuzhiyun 	{ 0x14f, 0x0001fffe, 1, 0x8, SISLANDS_CACCONFIG_CGIND },
144*4882a593Smuzhiyun 	{ 0x14f, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
145*4882a593Smuzhiyun 	{ 0x152, 0x0001fffe, 1, 0x8, SISLANDS_CACCONFIG_CGIND },
146*4882a593Smuzhiyun 	{ 0x152, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
147*4882a593Smuzhiyun 	{ 0x155, 0x0001fffe, 1, 0x8, SISLANDS_CACCONFIG_CGIND },
148*4882a593Smuzhiyun 	{ 0x155, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
149*4882a593Smuzhiyun 	{ 0x158, 0x0001fffe, 1, 0x8, SISLANDS_CACCONFIG_CGIND },
150*4882a593Smuzhiyun 	{ 0x158, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
151*4882a593Smuzhiyun 	{ 0x110, 0x0001fffe, 1, 0x8, SISLANDS_CACCONFIG_CGIND },
152*4882a593Smuzhiyun 	{ 0x110, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
153*4882a593Smuzhiyun 	{ 0x113, 0x0001fffe, 1, 0x8, SISLANDS_CACCONFIG_CGIND },
154*4882a593Smuzhiyun 	{ 0x113, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
155*4882a593Smuzhiyun 	{ 0x116, 0x0001fffe, 1, 0x8, SISLANDS_CACCONFIG_CGIND },
156*4882a593Smuzhiyun 	{ 0x116, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
157*4882a593Smuzhiyun 	{ 0x119, 0x0001fffe, 1, 0x8, SISLANDS_CACCONFIG_CGIND },
158*4882a593Smuzhiyun 	{ 0x119, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
159*4882a593Smuzhiyun 	{ 0x11c, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
160*4882a593Smuzhiyun 	{ 0x11c, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
161*4882a593Smuzhiyun 	{ 0x11f, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
162*4882a593Smuzhiyun 	{ 0x11f, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
163*4882a593Smuzhiyun 	{ 0x122, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
164*4882a593Smuzhiyun 	{ 0x122, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
165*4882a593Smuzhiyun 	{ 0x125, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
166*4882a593Smuzhiyun 	{ 0x125, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
167*4882a593Smuzhiyun 	{ 0x128, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
168*4882a593Smuzhiyun 	{ 0x128, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
169*4882a593Smuzhiyun 	{ 0x12b, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
170*4882a593Smuzhiyun 	{ 0x12b, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
171*4882a593Smuzhiyun 	{ 0x15b, 0x0001fffe, 1, 0x4, SISLANDS_CACCONFIG_CGIND },
172*4882a593Smuzhiyun 	{ 0x15b, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
173*4882a593Smuzhiyun 	{ 0x15e, 0x0001fffe, 1, 0x4, SISLANDS_CACCONFIG_CGIND },
174*4882a593Smuzhiyun 	{ 0x15e, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
175*4882a593Smuzhiyun 	{ 0x161, 0x0001fffe, 1, 0x4, SISLANDS_CACCONFIG_CGIND },
176*4882a593Smuzhiyun 	{ 0x161, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
177*4882a593Smuzhiyun 	{ 0x164, 0x0001fffe, 1, 0x4, SISLANDS_CACCONFIG_CGIND },
178*4882a593Smuzhiyun 	{ 0x164, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
179*4882a593Smuzhiyun 	{ 0x167, 0x0001fffe, 1, 0x4, SISLANDS_CACCONFIG_CGIND },
180*4882a593Smuzhiyun 	{ 0x167, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
181*4882a593Smuzhiyun 	{ 0x16a, 0x0001fffe, 1, 0x4, SISLANDS_CACCONFIG_CGIND },
182*4882a593Smuzhiyun 	{ 0x16a, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
183*4882a593Smuzhiyun 	{ 0x16d, 0x0001fffe, 1, 0x6, SISLANDS_CACCONFIG_CGIND },
184*4882a593Smuzhiyun 	{ 0x16d, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
185*4882a593Smuzhiyun 	{ 0x170, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
186*4882a593Smuzhiyun 	{ 0x170, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
187*4882a593Smuzhiyun 	{ 0x173, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
188*4882a593Smuzhiyun 	{ 0x173, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
189*4882a593Smuzhiyun 	{ 0x176, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
190*4882a593Smuzhiyun 	{ 0x176, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
191*4882a593Smuzhiyun 	{ 0x179, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
192*4882a593Smuzhiyun 	{ 0x179, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
193*4882a593Smuzhiyun 	{ 0x17c, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
194*4882a593Smuzhiyun 	{ 0x17c, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
195*4882a593Smuzhiyun 	{ 0x17f, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
196*4882a593Smuzhiyun 	{ 0x17f, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
197*4882a593Smuzhiyun 	{ 0xFFFFFFFF }
198*4882a593Smuzhiyun 
199*4882a593Smuzhiyun };
200*4882a593Smuzhiyun 
201*4882a593Smuzhiyun static const struct si_cac_config_reg cac_override_tahiti[] =
202*4882a593Smuzhiyun {
203*4882a593Smuzhiyun 	{ 0xFFFFFFFF }
204*4882a593Smuzhiyun };
205*4882a593Smuzhiyun 
206*4882a593Smuzhiyun static const struct si_powertune_data powertune_data_tahiti =
207*4882a593Smuzhiyun {
208*4882a593Smuzhiyun 	((1 << 16) | 27027),
209*4882a593Smuzhiyun 	6,
210*4882a593Smuzhiyun 	0,
211*4882a593Smuzhiyun 	4,
212*4882a593Smuzhiyun 	95,
213*4882a593Smuzhiyun 	{
214*4882a593Smuzhiyun 		0UL,
215*4882a593Smuzhiyun 		0UL,
216*4882a593Smuzhiyun 		4521550UL,
217*4882a593Smuzhiyun 		309631529UL,
218*4882a593Smuzhiyun 		-1270850L,
219*4882a593Smuzhiyun 		4513710L,
220*4882a593Smuzhiyun 		40
221*4882a593Smuzhiyun 	},
222*4882a593Smuzhiyun 	595000000UL,
223*4882a593Smuzhiyun 	12,
224*4882a593Smuzhiyun 	{
225*4882a593Smuzhiyun 		0,
226*4882a593Smuzhiyun 		0,
227*4882a593Smuzhiyun 		0,
228*4882a593Smuzhiyun 		0,
229*4882a593Smuzhiyun 		0,
230*4882a593Smuzhiyun 		0,
231*4882a593Smuzhiyun 		0,
232*4882a593Smuzhiyun 		0
233*4882a593Smuzhiyun 	},
234*4882a593Smuzhiyun 	true
235*4882a593Smuzhiyun };
236*4882a593Smuzhiyun 
237*4882a593Smuzhiyun static const struct si_dte_data dte_data_tahiti =
238*4882a593Smuzhiyun {
239*4882a593Smuzhiyun 	{ 1159409, 0, 0, 0, 0 },
240*4882a593Smuzhiyun 	{ 777, 0, 0, 0, 0 },
241*4882a593Smuzhiyun 	2,
242*4882a593Smuzhiyun 	54000,
243*4882a593Smuzhiyun 	127000,
244*4882a593Smuzhiyun 	25,
245*4882a593Smuzhiyun 	2,
246*4882a593Smuzhiyun 	10,
247*4882a593Smuzhiyun 	13,
248*4882a593Smuzhiyun 	{ 27, 31, 35, 39, 43, 47, 54, 61, 67, 74, 81, 88, 95, 0, 0, 0 },
249*4882a593Smuzhiyun 	{ 240888759, 221057860, 235370597, 162287531, 158510299, 131423027, 116673180, 103067515, 87941937, 76209048, 68209175, 64090048, 58301890, 0, 0, 0 },
250*4882a593Smuzhiyun 	{ 12024, 11189, 11451, 8411, 7939, 6666, 5681, 4905, 4241, 3720, 3354, 3122, 2890, 0, 0, 0 },
251*4882a593Smuzhiyun 	85,
252*4882a593Smuzhiyun 	false
253*4882a593Smuzhiyun };
254*4882a593Smuzhiyun 
255*4882a593Smuzhiyun static const struct si_dte_data dte_data_tahiti_pro =
256*4882a593Smuzhiyun {
257*4882a593Smuzhiyun 	{ 0x1E8480, 0x3D0900, 0x989680, 0x2625A00, 0x0 },
258*4882a593Smuzhiyun 	{ 0x0, 0x0, 0x0, 0x0, 0x0 },
259*4882a593Smuzhiyun 	5,
260*4882a593Smuzhiyun 	45000,
261*4882a593Smuzhiyun 	100,
262*4882a593Smuzhiyun 	0xA,
263*4882a593Smuzhiyun 	1,
264*4882a593Smuzhiyun 	0,
265*4882a593Smuzhiyun 	0x10,
266*4882a593Smuzhiyun 	{ 0x96, 0xB4, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF },
267*4882a593Smuzhiyun 	{ 0x895440, 0x3D0900, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680 },
268*4882a593Smuzhiyun 	{ 0x7D0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0 },
269*4882a593Smuzhiyun 	90,
270*4882a593Smuzhiyun 	true
271*4882a593Smuzhiyun };
272*4882a593Smuzhiyun 
273*4882a593Smuzhiyun static const struct si_dte_data dte_data_new_zealand =
274*4882a593Smuzhiyun {
275*4882a593Smuzhiyun 	{ 0x1E8480, 0x3D0900, 0x989680, 0x2625A00, 0 },
276*4882a593Smuzhiyun 	{ 0x29B, 0x3E9, 0x537, 0x7D2, 0 },
277*4882a593Smuzhiyun 	0x5,
278*4882a593Smuzhiyun 	0xAFC8,
279*4882a593Smuzhiyun 	0x69,
280*4882a593Smuzhiyun 	0x32,
281*4882a593Smuzhiyun 	1,
282*4882a593Smuzhiyun 	0,
283*4882a593Smuzhiyun 	0x10,
284*4882a593Smuzhiyun 	{ 0x82, 0xA0, 0xB4, 0xFE, 0xFE, 0xFE, 0xFE, 0xFE, 0xFE, 0xFE, 0xFE, 0xFE, 0xFE, 0xFE, 0xFE, 0xFE },
285*4882a593Smuzhiyun 	{ 0x895440, 0x3D0900, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680 },
286*4882a593Smuzhiyun 	{ 0xDAC, 0x1388, 0x685, 0x685, 0x685, 0x685, 0x685, 0x685, 0x685, 0x685, 0x685, 0x685, 0x685, 0x685, 0x685, 0x685 },
287*4882a593Smuzhiyun 	85,
288*4882a593Smuzhiyun 	true
289*4882a593Smuzhiyun };
290*4882a593Smuzhiyun 
291*4882a593Smuzhiyun static const struct si_dte_data dte_data_aruba_pro =
292*4882a593Smuzhiyun {
293*4882a593Smuzhiyun 	{ 0x1E8480, 0x3D0900, 0x989680, 0x2625A00, 0x0 },
294*4882a593Smuzhiyun 	{ 0x0, 0x0, 0x0, 0x0, 0x0 },
295*4882a593Smuzhiyun 	5,
296*4882a593Smuzhiyun 	45000,
297*4882a593Smuzhiyun 	100,
298*4882a593Smuzhiyun 	0xA,
299*4882a593Smuzhiyun 	1,
300*4882a593Smuzhiyun 	0,
301*4882a593Smuzhiyun 	0x10,
302*4882a593Smuzhiyun 	{ 0x96, 0xB4, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF },
303*4882a593Smuzhiyun 	{ 0x895440, 0x3D0900, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680 },
304*4882a593Smuzhiyun 	{ 0x1000, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0 },
305*4882a593Smuzhiyun 	90,
306*4882a593Smuzhiyun 	true
307*4882a593Smuzhiyun };
308*4882a593Smuzhiyun 
309*4882a593Smuzhiyun static const struct si_dte_data dte_data_malta =
310*4882a593Smuzhiyun {
311*4882a593Smuzhiyun 	{ 0x1E8480, 0x3D0900, 0x989680, 0x2625A00, 0x0 },
312*4882a593Smuzhiyun 	{ 0x0, 0x0, 0x0, 0x0, 0x0 },
313*4882a593Smuzhiyun 	5,
314*4882a593Smuzhiyun 	45000,
315*4882a593Smuzhiyun 	100,
316*4882a593Smuzhiyun 	0xA,
317*4882a593Smuzhiyun 	1,
318*4882a593Smuzhiyun 	0,
319*4882a593Smuzhiyun 	0x10,
320*4882a593Smuzhiyun 	{ 0x96, 0xB4, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF },
321*4882a593Smuzhiyun 	{ 0x895440, 0x3D0900, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680 },
322*4882a593Smuzhiyun 	{ 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0 },
323*4882a593Smuzhiyun 	90,
324*4882a593Smuzhiyun 	true
325*4882a593Smuzhiyun };
326*4882a593Smuzhiyun 
327*4882a593Smuzhiyun struct si_cac_config_reg cac_weights_pitcairn[] =
328*4882a593Smuzhiyun {
329*4882a593Smuzhiyun 	{ 0x0, 0x0000ffff, 0, 0x8a, SISLANDS_CACCONFIG_CGIND },
330*4882a593Smuzhiyun 	{ 0x0, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
331*4882a593Smuzhiyun 	{ 0x1, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
332*4882a593Smuzhiyun 	{ 0x1, 0xffff0000, 16, 0x24d, SISLANDS_CACCONFIG_CGIND },
333*4882a593Smuzhiyun 	{ 0x2, 0x0000ffff, 0, 0x19, SISLANDS_CACCONFIG_CGIND },
334*4882a593Smuzhiyun 	{ 0x3, 0x0000ffff, 0, 0x118, SISLANDS_CACCONFIG_CGIND },
335*4882a593Smuzhiyun 	{ 0x3, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
336*4882a593Smuzhiyun 	{ 0x4, 0x0000ffff, 0, 0x76, SISLANDS_CACCONFIG_CGIND },
337*4882a593Smuzhiyun 	{ 0x4, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
338*4882a593Smuzhiyun 	{ 0x5, 0x0000ffff, 0, 0xc11, SISLANDS_CACCONFIG_CGIND },
339*4882a593Smuzhiyun 	{ 0x5, 0xffff0000, 16, 0x7f3, SISLANDS_CACCONFIG_CGIND },
340*4882a593Smuzhiyun 	{ 0x6, 0x0000ffff, 0, 0x403, SISLANDS_CACCONFIG_CGIND },
341*4882a593Smuzhiyun 	{ 0x6, 0xffff0000, 16, 0x367, SISLANDS_CACCONFIG_CGIND },
342*4882a593Smuzhiyun 	{ 0x18f, 0x0000ffff, 0, 0x4c9, SISLANDS_CACCONFIG_CGIND },
343*4882a593Smuzhiyun 	{ 0x7, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
344*4882a593Smuzhiyun 	{ 0x7, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
345*4882a593Smuzhiyun 	{ 0x8, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
346*4882a593Smuzhiyun 	{ 0x8, 0xffff0000, 16, 0x45d, SISLANDS_CACCONFIG_CGIND },
347*4882a593Smuzhiyun 	{ 0x9, 0x0000ffff, 0, 0x36d, SISLANDS_CACCONFIG_CGIND },
348*4882a593Smuzhiyun 	{ 0xa, 0x0000ffff, 0, 0x534, SISLANDS_CACCONFIG_CGIND },
349*4882a593Smuzhiyun 	{ 0xb, 0x0000ffff, 0, 0x5da, SISLANDS_CACCONFIG_CGIND },
350*4882a593Smuzhiyun 	{ 0xb, 0xffff0000, 16, 0x880, SISLANDS_CACCONFIG_CGIND },
351*4882a593Smuzhiyun 	{ 0xc, 0x0000ffff, 0, 0x201, SISLANDS_CACCONFIG_CGIND },
352*4882a593Smuzhiyun 	{ 0xd, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
353*4882a593Smuzhiyun 	{ 0xd, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
354*4882a593Smuzhiyun 	{ 0xe, 0x0000ffff, 0, 0x9f, SISLANDS_CACCONFIG_CGIND },
355*4882a593Smuzhiyun 	{ 0xf, 0x0000ffff, 0, 0x1f, SISLANDS_CACCONFIG_CGIND },
356*4882a593Smuzhiyun 	{ 0xf, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
357*4882a593Smuzhiyun 	{ 0x10, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
358*4882a593Smuzhiyun 	{ 0x10, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
359*4882a593Smuzhiyun 	{ 0x11, 0x0000ffff, 0, 0x5de, SISLANDS_CACCONFIG_CGIND },
360*4882a593Smuzhiyun 	{ 0x11, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
361*4882a593Smuzhiyun 	{ 0x12, 0x0000ffff, 0, 0x7b, SISLANDS_CACCONFIG_CGIND },
362*4882a593Smuzhiyun 	{ 0x13, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
363*4882a593Smuzhiyun 	{ 0x13, 0xffff0000, 16, 0x13, SISLANDS_CACCONFIG_CGIND },
364*4882a593Smuzhiyun 	{ 0x14, 0x0000ffff, 0, 0xf9, SISLANDS_CACCONFIG_CGIND },
365*4882a593Smuzhiyun 	{ 0x15, 0x0000ffff, 0, 0x66, SISLANDS_CACCONFIG_CGIND },
366*4882a593Smuzhiyun 	{ 0x15, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
367*4882a593Smuzhiyun 	{ 0x4e, 0x0000ffff, 0, 0x13, SISLANDS_CACCONFIG_CGIND },
368*4882a593Smuzhiyun 	{ 0x16, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
369*4882a593Smuzhiyun 	{ 0x16, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
370*4882a593Smuzhiyun 	{ 0x17, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
371*4882a593Smuzhiyun 	{ 0x18, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
372*4882a593Smuzhiyun 	{ 0x18, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
373*4882a593Smuzhiyun 	{ 0x19, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
374*4882a593Smuzhiyun 	{ 0x19, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
375*4882a593Smuzhiyun 	{ 0x1a, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
376*4882a593Smuzhiyun 	{ 0x1a, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
377*4882a593Smuzhiyun 	{ 0x1b, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
378*4882a593Smuzhiyun 	{ 0x1b, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
379*4882a593Smuzhiyun 	{ 0x1c, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
380*4882a593Smuzhiyun 	{ 0x1c, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
381*4882a593Smuzhiyun 	{ 0x1d, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
382*4882a593Smuzhiyun 	{ 0x1d, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
383*4882a593Smuzhiyun 	{ 0x1e, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
384*4882a593Smuzhiyun 	{ 0x1e, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
385*4882a593Smuzhiyun 	{ 0x1f, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
386*4882a593Smuzhiyun 	{ 0x1f, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
387*4882a593Smuzhiyun 	{ 0x20, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
388*4882a593Smuzhiyun 	{ 0x6d, 0x0000ffff, 0, 0x186, SISLANDS_CACCONFIG_CGIND },
389*4882a593Smuzhiyun 	{ 0xFFFFFFFF }
390*4882a593Smuzhiyun };
391*4882a593Smuzhiyun 
392*4882a593Smuzhiyun static const struct si_cac_config_reg lcac_pitcairn[] =
393*4882a593Smuzhiyun {
394*4882a593Smuzhiyun 	{ 0x98, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
395*4882a593Smuzhiyun 	{ 0x98, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
396*4882a593Smuzhiyun 	{ 0x104, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
397*4882a593Smuzhiyun 	{ 0x104, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
398*4882a593Smuzhiyun 	{ 0x110, 0x0001fffe, 1, 0x5, SISLANDS_CACCONFIG_CGIND },
399*4882a593Smuzhiyun 	{ 0x110, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
400*4882a593Smuzhiyun 	{ 0x14f, 0x0001fffe, 1, 0x5, SISLANDS_CACCONFIG_CGIND },
401*4882a593Smuzhiyun 	{ 0x14f, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
402*4882a593Smuzhiyun 	{ 0x8c, 0x0001fffe, 1, 0x5, SISLANDS_CACCONFIG_CGIND },
403*4882a593Smuzhiyun 	{ 0x8c, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
404*4882a593Smuzhiyun 	{ 0x143, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
405*4882a593Smuzhiyun 	{ 0x143, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
406*4882a593Smuzhiyun 	{ 0x9b, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
407*4882a593Smuzhiyun 	{ 0x9b, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
408*4882a593Smuzhiyun 	{ 0x107, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
409*4882a593Smuzhiyun 	{ 0x107, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
410*4882a593Smuzhiyun 	{ 0x113, 0x0001fffe, 1, 0x5, SISLANDS_CACCONFIG_CGIND },
411*4882a593Smuzhiyun 	{ 0x113, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
412*4882a593Smuzhiyun 	{ 0x152, 0x0001fffe, 1, 0x5, SISLANDS_CACCONFIG_CGIND },
413*4882a593Smuzhiyun 	{ 0x152, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
414*4882a593Smuzhiyun 	{ 0x8f, 0x0001fffe, 1, 0x5, SISLANDS_CACCONFIG_CGIND },
415*4882a593Smuzhiyun 	{ 0x8f, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
416*4882a593Smuzhiyun 	{ 0x146, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
417*4882a593Smuzhiyun 	{ 0x146, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
418*4882a593Smuzhiyun 	{ 0x9e, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
419*4882a593Smuzhiyun 	{ 0x9e, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
420*4882a593Smuzhiyun 	{ 0x10a, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
421*4882a593Smuzhiyun 	{ 0x10a, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
422*4882a593Smuzhiyun 	{ 0x116, 0x0001fffe, 1, 0x5, SISLANDS_CACCONFIG_CGIND },
423*4882a593Smuzhiyun 	{ 0x116, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
424*4882a593Smuzhiyun 	{ 0x155, 0x0001fffe, 1, 0x5, SISLANDS_CACCONFIG_CGIND },
425*4882a593Smuzhiyun 	{ 0x155, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
426*4882a593Smuzhiyun 	{ 0x92, 0x0001fffe, 1, 0x5, SISLANDS_CACCONFIG_CGIND },
427*4882a593Smuzhiyun 	{ 0x92, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
428*4882a593Smuzhiyun 	{ 0x149, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
429*4882a593Smuzhiyun 	{ 0x149, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
430*4882a593Smuzhiyun 	{ 0x101, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
431*4882a593Smuzhiyun 	{ 0x101, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
432*4882a593Smuzhiyun 	{ 0x10d, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
433*4882a593Smuzhiyun 	{ 0x10d, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
434*4882a593Smuzhiyun 	{ 0x119, 0x0001fffe, 1, 0x5, SISLANDS_CACCONFIG_CGIND },
435*4882a593Smuzhiyun 	{ 0x119, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
436*4882a593Smuzhiyun 	{ 0x158, 0x0001fffe, 1, 0x5, SISLANDS_CACCONFIG_CGIND },
437*4882a593Smuzhiyun 	{ 0x158, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
438*4882a593Smuzhiyun 	{ 0x95, 0x0001fffe, 1, 0x5, SISLANDS_CACCONFIG_CGIND },
439*4882a593Smuzhiyun 	{ 0x95, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
440*4882a593Smuzhiyun 	{ 0x14c, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
441*4882a593Smuzhiyun 	{ 0x14c, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
442*4882a593Smuzhiyun 	{ 0x11c, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
443*4882a593Smuzhiyun 	{ 0x11c, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
444*4882a593Smuzhiyun 	{ 0x11f, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
445*4882a593Smuzhiyun 	{ 0x11f, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
446*4882a593Smuzhiyun 	{ 0x122, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
447*4882a593Smuzhiyun 	{ 0x122, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
448*4882a593Smuzhiyun 	{ 0x125, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
449*4882a593Smuzhiyun 	{ 0x125, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
450*4882a593Smuzhiyun 	{ 0x128, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
451*4882a593Smuzhiyun 	{ 0x128, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
452*4882a593Smuzhiyun 	{ 0x12b, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
453*4882a593Smuzhiyun 	{ 0x12b, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
454*4882a593Smuzhiyun 	{ 0x164, 0x0001fffe, 1, 0x4, SISLANDS_CACCONFIG_CGIND },
455*4882a593Smuzhiyun 	{ 0x164, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
456*4882a593Smuzhiyun 	{ 0x167, 0x0001fffe, 1, 0x4, SISLANDS_CACCONFIG_CGIND },
457*4882a593Smuzhiyun 	{ 0x167, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
458*4882a593Smuzhiyun 	{ 0x16a, 0x0001fffe, 1, 0x4, SISLANDS_CACCONFIG_CGIND },
459*4882a593Smuzhiyun 	{ 0x16a, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
460*4882a593Smuzhiyun 	{ 0x15e, 0x0001fffe, 1, 0x4, SISLANDS_CACCONFIG_CGIND },
461*4882a593Smuzhiyun 	{ 0x15e, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
462*4882a593Smuzhiyun 	{ 0x161, 0x0001fffe, 1, 0x4, SISLANDS_CACCONFIG_CGIND },
463*4882a593Smuzhiyun 	{ 0x161, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
464*4882a593Smuzhiyun 	{ 0x15b, 0x0001fffe, 1, 0x4, SISLANDS_CACCONFIG_CGIND },
465*4882a593Smuzhiyun 	{ 0x15b, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
466*4882a593Smuzhiyun 	{ 0x16d, 0x0001fffe, 1, 0x4, SISLANDS_CACCONFIG_CGIND },
467*4882a593Smuzhiyun 	{ 0x16d, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
468*4882a593Smuzhiyun 	{ 0x170, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
469*4882a593Smuzhiyun 	{ 0x170, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
470*4882a593Smuzhiyun 	{ 0x173, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
471*4882a593Smuzhiyun 	{ 0x173, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
472*4882a593Smuzhiyun 	{ 0x176, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
473*4882a593Smuzhiyun 	{ 0x176, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
474*4882a593Smuzhiyun 	{ 0x179, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
475*4882a593Smuzhiyun 	{ 0x179, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
476*4882a593Smuzhiyun 	{ 0x17c, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
477*4882a593Smuzhiyun 	{ 0x17c, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
478*4882a593Smuzhiyun 	{ 0x17f, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
479*4882a593Smuzhiyun 	{ 0x17f, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
480*4882a593Smuzhiyun 	{ 0xFFFFFFFF }
481*4882a593Smuzhiyun };
482*4882a593Smuzhiyun 
483*4882a593Smuzhiyun static const struct si_cac_config_reg cac_override_pitcairn[] =
484*4882a593Smuzhiyun {
485*4882a593Smuzhiyun 	{ 0xFFFFFFFF }
486*4882a593Smuzhiyun };
487*4882a593Smuzhiyun 
488*4882a593Smuzhiyun static const struct si_powertune_data powertune_data_pitcairn =
489*4882a593Smuzhiyun {
490*4882a593Smuzhiyun 	((1 << 16) | 27027),
491*4882a593Smuzhiyun 	5,
492*4882a593Smuzhiyun 	0,
493*4882a593Smuzhiyun 	6,
494*4882a593Smuzhiyun 	100,
495*4882a593Smuzhiyun 	{
496*4882a593Smuzhiyun 		51600000UL,
497*4882a593Smuzhiyun 		1800000UL,
498*4882a593Smuzhiyun 		7194395UL,
499*4882a593Smuzhiyun 		309631529UL,
500*4882a593Smuzhiyun 		-1270850L,
501*4882a593Smuzhiyun 		4513710L,
502*4882a593Smuzhiyun 		100
503*4882a593Smuzhiyun 	},
504*4882a593Smuzhiyun 	117830498UL,
505*4882a593Smuzhiyun 	12,
506*4882a593Smuzhiyun 	{
507*4882a593Smuzhiyun 		0,
508*4882a593Smuzhiyun 		0,
509*4882a593Smuzhiyun 		0,
510*4882a593Smuzhiyun 		0,
511*4882a593Smuzhiyun 		0,
512*4882a593Smuzhiyun 		0,
513*4882a593Smuzhiyun 		0,
514*4882a593Smuzhiyun 		0
515*4882a593Smuzhiyun 	},
516*4882a593Smuzhiyun 	true
517*4882a593Smuzhiyun };
518*4882a593Smuzhiyun 
519*4882a593Smuzhiyun static const struct si_dte_data dte_data_pitcairn =
520*4882a593Smuzhiyun {
521*4882a593Smuzhiyun 	{ 0, 0, 0, 0, 0 },
522*4882a593Smuzhiyun 	{ 0, 0, 0, 0, 0 },
523*4882a593Smuzhiyun 	0,
524*4882a593Smuzhiyun 	0,
525*4882a593Smuzhiyun 	0,
526*4882a593Smuzhiyun 	0,
527*4882a593Smuzhiyun 	0,
528*4882a593Smuzhiyun 	0,
529*4882a593Smuzhiyun 	0,
530*4882a593Smuzhiyun 	{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
531*4882a593Smuzhiyun 	{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
532*4882a593Smuzhiyun 	{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
533*4882a593Smuzhiyun 	0,
534*4882a593Smuzhiyun 	false
535*4882a593Smuzhiyun };
536*4882a593Smuzhiyun 
537*4882a593Smuzhiyun static const struct si_dte_data dte_data_curacao_xt =
538*4882a593Smuzhiyun {
539*4882a593Smuzhiyun 	{ 0x1E8480, 0x3D0900, 0x989680, 0x2625A00, 0x0 },
540*4882a593Smuzhiyun 	{ 0x0, 0x0, 0x0, 0x0, 0x0 },
541*4882a593Smuzhiyun 	5,
542*4882a593Smuzhiyun 	45000,
543*4882a593Smuzhiyun 	100,
544*4882a593Smuzhiyun 	0xA,
545*4882a593Smuzhiyun 	1,
546*4882a593Smuzhiyun 	0,
547*4882a593Smuzhiyun 	0x10,
548*4882a593Smuzhiyun 	{ 0x96, 0xB4, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF },
549*4882a593Smuzhiyun 	{ 0x895440, 0x3D0900, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680 },
550*4882a593Smuzhiyun 	{ 0x1D17, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0 },
551*4882a593Smuzhiyun 	90,
552*4882a593Smuzhiyun 	true
553*4882a593Smuzhiyun };
554*4882a593Smuzhiyun 
555*4882a593Smuzhiyun static const struct si_dte_data dte_data_curacao_pro =
556*4882a593Smuzhiyun {
557*4882a593Smuzhiyun 	{ 0x1E8480, 0x3D0900, 0x989680, 0x2625A00, 0x0 },
558*4882a593Smuzhiyun 	{ 0x0, 0x0, 0x0, 0x0, 0x0 },
559*4882a593Smuzhiyun 	5,
560*4882a593Smuzhiyun 	45000,
561*4882a593Smuzhiyun 	100,
562*4882a593Smuzhiyun 	0xA,
563*4882a593Smuzhiyun 	1,
564*4882a593Smuzhiyun 	0,
565*4882a593Smuzhiyun 	0x10,
566*4882a593Smuzhiyun 	{ 0x96, 0xB4, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF },
567*4882a593Smuzhiyun 	{ 0x895440, 0x3D0900, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680 },
568*4882a593Smuzhiyun 	{ 0x1D17, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0 },
569*4882a593Smuzhiyun 	90,
570*4882a593Smuzhiyun 	true
571*4882a593Smuzhiyun };
572*4882a593Smuzhiyun 
573*4882a593Smuzhiyun static const struct si_dte_data dte_data_neptune_xt =
574*4882a593Smuzhiyun {
575*4882a593Smuzhiyun 	{ 0x1E8480, 0x3D0900, 0x989680, 0x2625A00, 0x0 },
576*4882a593Smuzhiyun 	{ 0x0, 0x0, 0x0, 0x0, 0x0 },
577*4882a593Smuzhiyun 	5,
578*4882a593Smuzhiyun 	45000,
579*4882a593Smuzhiyun 	100,
580*4882a593Smuzhiyun 	0xA,
581*4882a593Smuzhiyun 	1,
582*4882a593Smuzhiyun 	0,
583*4882a593Smuzhiyun 	0x10,
584*4882a593Smuzhiyun 	{ 0x96, 0xB4, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF },
585*4882a593Smuzhiyun 	{ 0x895440, 0x3D0900, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680 },
586*4882a593Smuzhiyun 	{ 0x3A2F, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0 },
587*4882a593Smuzhiyun 	90,
588*4882a593Smuzhiyun 	true
589*4882a593Smuzhiyun };
590*4882a593Smuzhiyun 
591*4882a593Smuzhiyun static const struct si_cac_config_reg cac_weights_chelsea_pro[] =
592*4882a593Smuzhiyun {
593*4882a593Smuzhiyun 	{ 0x0, 0x0000ffff, 0, 0x82, SISLANDS_CACCONFIG_CGIND },
594*4882a593Smuzhiyun 	{ 0x0, 0xffff0000, 16, 0x4F, SISLANDS_CACCONFIG_CGIND },
595*4882a593Smuzhiyun 	{ 0x1, 0x0000ffff, 0, 0x153, SISLANDS_CACCONFIG_CGIND },
596*4882a593Smuzhiyun 	{ 0x1, 0xffff0000, 16, 0x52, SISLANDS_CACCONFIG_CGIND },
597*4882a593Smuzhiyun 	{ 0x2, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
598*4882a593Smuzhiyun 	{ 0x3, 0x0000ffff, 0, 0x135, SISLANDS_CACCONFIG_CGIND },
599*4882a593Smuzhiyun 	{ 0x3, 0xffff0000, 16, 0x4F, SISLANDS_CACCONFIG_CGIND },
600*4882a593Smuzhiyun 	{ 0x4, 0x0000ffff, 0, 0x135, SISLANDS_CACCONFIG_CGIND },
601*4882a593Smuzhiyun 	{ 0x4, 0xffff0000, 16, 0xAC, SISLANDS_CACCONFIG_CGIND },
602*4882a593Smuzhiyun 	{ 0x5, 0x0000ffff, 0, 0x118, SISLANDS_CACCONFIG_CGIND },
603*4882a593Smuzhiyun 	{ 0x5, 0xffff0000, 16, 0xBE, SISLANDS_CACCONFIG_CGIND },
604*4882a593Smuzhiyun 	{ 0x6, 0x0000ffff, 0, 0x110, SISLANDS_CACCONFIG_CGIND },
605*4882a593Smuzhiyun 	{ 0x6, 0xffff0000, 16, 0x4CD, SISLANDS_CACCONFIG_CGIND },
606*4882a593Smuzhiyun 	{ 0x18f, 0x0000ffff, 0, 0x30, SISLANDS_CACCONFIG_CGIND },
607*4882a593Smuzhiyun 	{ 0x7, 0x0000ffff, 0, 0x37, SISLANDS_CACCONFIG_CGIND },
608*4882a593Smuzhiyun 	{ 0x7, 0xffff0000, 16, 0x27, SISLANDS_CACCONFIG_CGIND },
609*4882a593Smuzhiyun 	{ 0x8, 0x0000ffff, 0, 0xC3, SISLANDS_CACCONFIG_CGIND },
610*4882a593Smuzhiyun 	{ 0x8, 0xffff0000, 16, 0x35, SISLANDS_CACCONFIG_CGIND },
611*4882a593Smuzhiyun 	{ 0x9, 0x0000ffff, 0, 0x28, SISLANDS_CACCONFIG_CGIND },
612*4882a593Smuzhiyun 	{ 0xa, 0x0000ffff, 0, 0x26C, SISLANDS_CACCONFIG_CGIND },
613*4882a593Smuzhiyun 	{ 0xb, 0x0000ffff, 0, 0x3B2, SISLANDS_CACCONFIG_CGIND },
614*4882a593Smuzhiyun 	{ 0xb, 0xffff0000, 16, 0x99D, SISLANDS_CACCONFIG_CGIND },
615*4882a593Smuzhiyun 	{ 0xc, 0x0000ffff, 0, 0xA3F, SISLANDS_CACCONFIG_CGIND },
616*4882a593Smuzhiyun 	{ 0xd, 0x0000ffff, 0, 0xA, SISLANDS_CACCONFIG_CGIND },
617*4882a593Smuzhiyun 	{ 0xd, 0xffff0000, 16, 0xA, SISLANDS_CACCONFIG_CGIND },
618*4882a593Smuzhiyun 	{ 0xe, 0x0000ffff, 0, 0x5, SISLANDS_CACCONFIG_CGIND },
619*4882a593Smuzhiyun 	{ 0xf, 0x0000ffff, 0, 0x3, SISLANDS_CACCONFIG_CGIND },
620*4882a593Smuzhiyun 	{ 0xf, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
621*4882a593Smuzhiyun 	{ 0x10, 0x0000ffff, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
622*4882a593Smuzhiyun 	{ 0x10, 0xffff0000, 16, 0x1, SISLANDS_CACCONFIG_CGIND },
623*4882a593Smuzhiyun 	{ 0x11, 0x0000ffff, 0, 0x5, SISLANDS_CACCONFIG_CGIND },
624*4882a593Smuzhiyun 	{ 0x11, 0xffff0000, 16, 0x15, SISLANDS_CACCONFIG_CGIND },
625*4882a593Smuzhiyun 	{ 0x12, 0x0000ffff, 0, 0x34, SISLANDS_CACCONFIG_CGIND },
626*4882a593Smuzhiyun 	{ 0x13, 0x0000ffff, 0, 0x4, SISLANDS_CACCONFIG_CGIND },
627*4882a593Smuzhiyun 	{ 0x13, 0xffff0000, 16, 0x4, SISLANDS_CACCONFIG_CGIND },
628*4882a593Smuzhiyun 	{ 0x14, 0x0000ffff, 0, 0x2BD, SISLANDS_CACCONFIG_CGIND },
629*4882a593Smuzhiyun 	{ 0x15, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
630*4882a593Smuzhiyun 	{ 0x15, 0xffff0000, 16, 0x6, SISLANDS_CACCONFIG_CGIND },
631*4882a593Smuzhiyun 	{ 0x4e, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
632*4882a593Smuzhiyun 	{ 0x16, 0x0000ffff, 0, 0x30, SISLANDS_CACCONFIG_CGIND },
633*4882a593Smuzhiyun 	{ 0x16, 0xffff0000, 16, 0x7A, SISLANDS_CACCONFIG_CGIND },
634*4882a593Smuzhiyun 	{ 0x17, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
635*4882a593Smuzhiyun 	{ 0x18, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
636*4882a593Smuzhiyun 	{ 0x18, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
637*4882a593Smuzhiyun 	{ 0x19, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
638*4882a593Smuzhiyun 	{ 0x19, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
639*4882a593Smuzhiyun 	{ 0x1a, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
640*4882a593Smuzhiyun 	{ 0x1a, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
641*4882a593Smuzhiyun 	{ 0x1b, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
642*4882a593Smuzhiyun 	{ 0x1b, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
643*4882a593Smuzhiyun 	{ 0x1c, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
644*4882a593Smuzhiyun 	{ 0x1c, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
645*4882a593Smuzhiyun 	{ 0x1d, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
646*4882a593Smuzhiyun 	{ 0x1d, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
647*4882a593Smuzhiyun 	{ 0x1e, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
648*4882a593Smuzhiyun 	{ 0x1e, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
649*4882a593Smuzhiyun 	{ 0x1f, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
650*4882a593Smuzhiyun 	{ 0x1f, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
651*4882a593Smuzhiyun 	{ 0x20, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
652*4882a593Smuzhiyun 	{ 0x6d, 0x0000ffff, 0, 0x100, SISLANDS_CACCONFIG_CGIND },
653*4882a593Smuzhiyun 	{ 0xFFFFFFFF }
654*4882a593Smuzhiyun };
655*4882a593Smuzhiyun 
656*4882a593Smuzhiyun static const struct si_cac_config_reg cac_weights_chelsea_xt[] =
657*4882a593Smuzhiyun {
658*4882a593Smuzhiyun 	{ 0x0, 0x0000ffff, 0, 0x82, SISLANDS_CACCONFIG_CGIND },
659*4882a593Smuzhiyun 	{ 0x0, 0xffff0000, 16, 0x4F, SISLANDS_CACCONFIG_CGIND },
660*4882a593Smuzhiyun 	{ 0x1, 0x0000ffff, 0, 0x153, SISLANDS_CACCONFIG_CGIND },
661*4882a593Smuzhiyun 	{ 0x1, 0xffff0000, 16, 0x52, SISLANDS_CACCONFIG_CGIND },
662*4882a593Smuzhiyun 	{ 0x2, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
663*4882a593Smuzhiyun 	{ 0x3, 0x0000ffff, 0, 0x135, SISLANDS_CACCONFIG_CGIND },
664*4882a593Smuzhiyun 	{ 0x3, 0xffff0000, 16, 0x4F, SISLANDS_CACCONFIG_CGIND },
665*4882a593Smuzhiyun 	{ 0x4, 0x0000ffff, 0, 0x135, SISLANDS_CACCONFIG_CGIND },
666*4882a593Smuzhiyun 	{ 0x4, 0xffff0000, 16, 0xAC, SISLANDS_CACCONFIG_CGIND },
667*4882a593Smuzhiyun 	{ 0x5, 0x0000ffff, 0, 0x118, SISLANDS_CACCONFIG_CGIND },
668*4882a593Smuzhiyun 	{ 0x5, 0xffff0000, 16, 0xBE, SISLANDS_CACCONFIG_CGIND },
669*4882a593Smuzhiyun 	{ 0x6, 0x0000ffff, 0, 0x110, SISLANDS_CACCONFIG_CGIND },
670*4882a593Smuzhiyun 	{ 0x6, 0xffff0000, 16, 0x4CD, SISLANDS_CACCONFIG_CGIND },
671*4882a593Smuzhiyun 	{ 0x18f, 0x0000ffff, 0, 0x30, SISLANDS_CACCONFIG_CGIND },
672*4882a593Smuzhiyun 	{ 0x7, 0x0000ffff, 0, 0x37, SISLANDS_CACCONFIG_CGIND },
673*4882a593Smuzhiyun 	{ 0x7, 0xffff0000, 16, 0x27, SISLANDS_CACCONFIG_CGIND },
674*4882a593Smuzhiyun 	{ 0x8, 0x0000ffff, 0, 0xC3, SISLANDS_CACCONFIG_CGIND },
675*4882a593Smuzhiyun 	{ 0x8, 0xffff0000, 16, 0x35, SISLANDS_CACCONFIG_CGIND },
676*4882a593Smuzhiyun 	{ 0x9, 0x0000ffff, 0, 0x28, SISLANDS_CACCONFIG_CGIND },
677*4882a593Smuzhiyun 	{ 0xa, 0x0000ffff, 0, 0x26C, SISLANDS_CACCONFIG_CGIND },
678*4882a593Smuzhiyun 	{ 0xb, 0x0000ffff, 0, 0x3B2, SISLANDS_CACCONFIG_CGIND },
679*4882a593Smuzhiyun 	{ 0xb, 0xffff0000, 16, 0x99D, SISLANDS_CACCONFIG_CGIND },
680*4882a593Smuzhiyun 	{ 0xc, 0x0000ffff, 0, 0xA3F, SISLANDS_CACCONFIG_CGIND },
681*4882a593Smuzhiyun 	{ 0xd, 0x0000ffff, 0, 0xA, SISLANDS_CACCONFIG_CGIND },
682*4882a593Smuzhiyun 	{ 0xd, 0xffff0000, 16, 0xA, SISLANDS_CACCONFIG_CGIND },
683*4882a593Smuzhiyun 	{ 0xe, 0x0000ffff, 0, 0x5, SISLANDS_CACCONFIG_CGIND },
684*4882a593Smuzhiyun 	{ 0xf, 0x0000ffff, 0, 0x3, SISLANDS_CACCONFIG_CGIND },
685*4882a593Smuzhiyun 	{ 0xf, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
686*4882a593Smuzhiyun 	{ 0x10, 0x0000ffff, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
687*4882a593Smuzhiyun 	{ 0x10, 0xffff0000, 16, 0x1, SISLANDS_CACCONFIG_CGIND },
688*4882a593Smuzhiyun 	{ 0x11, 0x0000ffff, 0, 0x5, SISLANDS_CACCONFIG_CGIND },
689*4882a593Smuzhiyun 	{ 0x11, 0xffff0000, 16, 0x15, SISLANDS_CACCONFIG_CGIND },
690*4882a593Smuzhiyun 	{ 0x12, 0x0000ffff, 0, 0x34, SISLANDS_CACCONFIG_CGIND },
691*4882a593Smuzhiyun 	{ 0x13, 0x0000ffff, 0, 0x4, SISLANDS_CACCONFIG_CGIND },
692*4882a593Smuzhiyun 	{ 0x13, 0xffff0000, 16, 0x4, SISLANDS_CACCONFIG_CGIND },
693*4882a593Smuzhiyun 	{ 0x14, 0x0000ffff, 0, 0x30A, SISLANDS_CACCONFIG_CGIND },
694*4882a593Smuzhiyun 	{ 0x15, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
695*4882a593Smuzhiyun 	{ 0x15, 0xffff0000, 16, 0x6, SISLANDS_CACCONFIG_CGIND },
696*4882a593Smuzhiyun 	{ 0x4e, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
697*4882a593Smuzhiyun 	{ 0x16, 0x0000ffff, 0, 0x30, SISLANDS_CACCONFIG_CGIND },
698*4882a593Smuzhiyun 	{ 0x16, 0xffff0000, 16, 0x7A, SISLANDS_CACCONFIG_CGIND },
699*4882a593Smuzhiyun 	{ 0x17, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
700*4882a593Smuzhiyun 	{ 0x18, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
701*4882a593Smuzhiyun 	{ 0x18, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
702*4882a593Smuzhiyun 	{ 0x19, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
703*4882a593Smuzhiyun 	{ 0x19, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
704*4882a593Smuzhiyun 	{ 0x1a, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
705*4882a593Smuzhiyun 	{ 0x1a, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
706*4882a593Smuzhiyun 	{ 0x1b, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
707*4882a593Smuzhiyun 	{ 0x1b, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
708*4882a593Smuzhiyun 	{ 0x1c, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
709*4882a593Smuzhiyun 	{ 0x1c, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
710*4882a593Smuzhiyun 	{ 0x1d, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
711*4882a593Smuzhiyun 	{ 0x1d, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
712*4882a593Smuzhiyun 	{ 0x1e, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
713*4882a593Smuzhiyun 	{ 0x1e, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
714*4882a593Smuzhiyun 	{ 0x1f, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
715*4882a593Smuzhiyun 	{ 0x1f, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
716*4882a593Smuzhiyun 	{ 0x20, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
717*4882a593Smuzhiyun 	{ 0x6d, 0x0000ffff, 0, 0x100, SISLANDS_CACCONFIG_CGIND },
718*4882a593Smuzhiyun 	{ 0xFFFFFFFF }
719*4882a593Smuzhiyun };
720*4882a593Smuzhiyun 
721*4882a593Smuzhiyun static const struct si_cac_config_reg cac_weights_heathrow[] =
722*4882a593Smuzhiyun {
723*4882a593Smuzhiyun 	{ 0x0, 0x0000ffff, 0, 0x82, SISLANDS_CACCONFIG_CGIND },
724*4882a593Smuzhiyun 	{ 0x0, 0xffff0000, 16, 0x4F, SISLANDS_CACCONFIG_CGIND },
725*4882a593Smuzhiyun 	{ 0x1, 0x0000ffff, 0, 0x153, SISLANDS_CACCONFIG_CGIND },
726*4882a593Smuzhiyun 	{ 0x1, 0xffff0000, 16, 0x52, SISLANDS_CACCONFIG_CGIND },
727*4882a593Smuzhiyun 	{ 0x2, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
728*4882a593Smuzhiyun 	{ 0x3, 0x0000ffff, 0, 0x135, SISLANDS_CACCONFIG_CGIND },
729*4882a593Smuzhiyun 	{ 0x3, 0xffff0000, 16, 0x4F, SISLANDS_CACCONFIG_CGIND },
730*4882a593Smuzhiyun 	{ 0x4, 0x0000ffff, 0, 0x135, SISLANDS_CACCONFIG_CGIND },
731*4882a593Smuzhiyun 	{ 0x4, 0xffff0000, 16, 0xAC, SISLANDS_CACCONFIG_CGIND },
732*4882a593Smuzhiyun 	{ 0x5, 0x0000ffff, 0, 0x118, SISLANDS_CACCONFIG_CGIND },
733*4882a593Smuzhiyun 	{ 0x5, 0xffff0000, 16, 0xBE, SISLANDS_CACCONFIG_CGIND },
734*4882a593Smuzhiyun 	{ 0x6, 0x0000ffff, 0, 0x110, SISLANDS_CACCONFIG_CGIND },
735*4882a593Smuzhiyun 	{ 0x6, 0xffff0000, 16, 0x4CD, SISLANDS_CACCONFIG_CGIND },
736*4882a593Smuzhiyun 	{ 0x18f, 0x0000ffff, 0, 0x30, SISLANDS_CACCONFIG_CGIND },
737*4882a593Smuzhiyun 	{ 0x7, 0x0000ffff, 0, 0x37, SISLANDS_CACCONFIG_CGIND },
738*4882a593Smuzhiyun 	{ 0x7, 0xffff0000, 16, 0x27, SISLANDS_CACCONFIG_CGIND },
739*4882a593Smuzhiyun 	{ 0x8, 0x0000ffff, 0, 0xC3, SISLANDS_CACCONFIG_CGIND },
740*4882a593Smuzhiyun 	{ 0x8, 0xffff0000, 16, 0x35, SISLANDS_CACCONFIG_CGIND },
741*4882a593Smuzhiyun 	{ 0x9, 0x0000ffff, 0, 0x28, SISLANDS_CACCONFIG_CGIND },
742*4882a593Smuzhiyun 	{ 0xa, 0x0000ffff, 0, 0x26C, SISLANDS_CACCONFIG_CGIND },
743*4882a593Smuzhiyun 	{ 0xb, 0x0000ffff, 0, 0x3B2, SISLANDS_CACCONFIG_CGIND },
744*4882a593Smuzhiyun 	{ 0xb, 0xffff0000, 16, 0x99D, SISLANDS_CACCONFIG_CGIND },
745*4882a593Smuzhiyun 	{ 0xc, 0x0000ffff, 0, 0xA3F, SISLANDS_CACCONFIG_CGIND },
746*4882a593Smuzhiyun 	{ 0xd, 0x0000ffff, 0, 0xA, SISLANDS_CACCONFIG_CGIND },
747*4882a593Smuzhiyun 	{ 0xd, 0xffff0000, 16, 0xA, SISLANDS_CACCONFIG_CGIND },
748*4882a593Smuzhiyun 	{ 0xe, 0x0000ffff, 0, 0x5, SISLANDS_CACCONFIG_CGIND },
749*4882a593Smuzhiyun 	{ 0xf, 0x0000ffff, 0, 0x3, SISLANDS_CACCONFIG_CGIND },
750*4882a593Smuzhiyun 	{ 0xf, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
751*4882a593Smuzhiyun 	{ 0x10, 0x0000ffff, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
752*4882a593Smuzhiyun 	{ 0x10, 0xffff0000, 16, 0x1, SISLANDS_CACCONFIG_CGIND },
753*4882a593Smuzhiyun 	{ 0x11, 0x0000ffff, 0, 0x5, SISLANDS_CACCONFIG_CGIND },
754*4882a593Smuzhiyun 	{ 0x11, 0xffff0000, 16, 0x15, SISLANDS_CACCONFIG_CGIND },
755*4882a593Smuzhiyun 	{ 0x12, 0x0000ffff, 0, 0x34, SISLANDS_CACCONFIG_CGIND },
756*4882a593Smuzhiyun 	{ 0x13, 0x0000ffff, 0, 0x4, SISLANDS_CACCONFIG_CGIND },
757*4882a593Smuzhiyun 	{ 0x13, 0xffff0000, 16, 0x4, SISLANDS_CACCONFIG_CGIND },
758*4882a593Smuzhiyun 	{ 0x14, 0x0000ffff, 0, 0x362, SISLANDS_CACCONFIG_CGIND },
759*4882a593Smuzhiyun 	{ 0x15, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
760*4882a593Smuzhiyun 	{ 0x15, 0xffff0000, 16, 0x6, SISLANDS_CACCONFIG_CGIND },
761*4882a593Smuzhiyun 	{ 0x4e, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
762*4882a593Smuzhiyun 	{ 0x16, 0x0000ffff, 0, 0x30, SISLANDS_CACCONFIG_CGIND },
763*4882a593Smuzhiyun 	{ 0x16, 0xffff0000, 16, 0x7A, SISLANDS_CACCONFIG_CGIND },
764*4882a593Smuzhiyun 	{ 0x17, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
765*4882a593Smuzhiyun 	{ 0x18, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
766*4882a593Smuzhiyun 	{ 0x18, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
767*4882a593Smuzhiyun 	{ 0x19, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
768*4882a593Smuzhiyun 	{ 0x19, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
769*4882a593Smuzhiyun 	{ 0x1a, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
770*4882a593Smuzhiyun 	{ 0x1a, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
771*4882a593Smuzhiyun 	{ 0x1b, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
772*4882a593Smuzhiyun 	{ 0x1b, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
773*4882a593Smuzhiyun 	{ 0x1c, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
774*4882a593Smuzhiyun 	{ 0x1c, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
775*4882a593Smuzhiyun 	{ 0x1d, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
776*4882a593Smuzhiyun 	{ 0x1d, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
777*4882a593Smuzhiyun 	{ 0x1e, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
778*4882a593Smuzhiyun 	{ 0x1e, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
779*4882a593Smuzhiyun 	{ 0x1f, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
780*4882a593Smuzhiyun 	{ 0x1f, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
781*4882a593Smuzhiyun 	{ 0x20, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
782*4882a593Smuzhiyun 	{ 0x6d, 0x0000ffff, 0, 0x100, SISLANDS_CACCONFIG_CGIND },
783*4882a593Smuzhiyun 	{ 0xFFFFFFFF }
784*4882a593Smuzhiyun };
785*4882a593Smuzhiyun 
786*4882a593Smuzhiyun static const struct si_cac_config_reg cac_weights_cape_verde_pro[] =
787*4882a593Smuzhiyun {
788*4882a593Smuzhiyun 	{ 0x0, 0x0000ffff, 0, 0x82, SISLANDS_CACCONFIG_CGIND },
789*4882a593Smuzhiyun 	{ 0x0, 0xffff0000, 16, 0x4F, SISLANDS_CACCONFIG_CGIND },
790*4882a593Smuzhiyun 	{ 0x1, 0x0000ffff, 0, 0x153, SISLANDS_CACCONFIG_CGIND },
791*4882a593Smuzhiyun 	{ 0x1, 0xffff0000, 16, 0x52, SISLANDS_CACCONFIG_CGIND },
792*4882a593Smuzhiyun 	{ 0x2, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
793*4882a593Smuzhiyun 	{ 0x3, 0x0000ffff, 0, 0x135, SISLANDS_CACCONFIG_CGIND },
794*4882a593Smuzhiyun 	{ 0x3, 0xffff0000, 16, 0x4F, SISLANDS_CACCONFIG_CGIND },
795*4882a593Smuzhiyun 	{ 0x4, 0x0000ffff, 0, 0x135, SISLANDS_CACCONFIG_CGIND },
796*4882a593Smuzhiyun 	{ 0x4, 0xffff0000, 16, 0xAC, SISLANDS_CACCONFIG_CGIND },
797*4882a593Smuzhiyun 	{ 0x5, 0x0000ffff, 0, 0x118, SISLANDS_CACCONFIG_CGIND },
798*4882a593Smuzhiyun 	{ 0x5, 0xffff0000, 16, 0xBE, SISLANDS_CACCONFIG_CGIND },
799*4882a593Smuzhiyun 	{ 0x6, 0x0000ffff, 0, 0x110, SISLANDS_CACCONFIG_CGIND },
800*4882a593Smuzhiyun 	{ 0x6, 0xffff0000, 16, 0x4CD, SISLANDS_CACCONFIG_CGIND },
801*4882a593Smuzhiyun 	{ 0x18f, 0x0000ffff, 0, 0x30, SISLANDS_CACCONFIG_CGIND },
802*4882a593Smuzhiyun 	{ 0x7, 0x0000ffff, 0, 0x37, SISLANDS_CACCONFIG_CGIND },
803*4882a593Smuzhiyun 	{ 0x7, 0xffff0000, 16, 0x27, SISLANDS_CACCONFIG_CGIND },
804*4882a593Smuzhiyun 	{ 0x8, 0x0000ffff, 0, 0xC3, SISLANDS_CACCONFIG_CGIND },
805*4882a593Smuzhiyun 	{ 0x8, 0xffff0000, 16, 0x35, SISLANDS_CACCONFIG_CGIND },
806*4882a593Smuzhiyun 	{ 0x9, 0x0000ffff, 0, 0x28, SISLANDS_CACCONFIG_CGIND },
807*4882a593Smuzhiyun 	{ 0xa, 0x0000ffff, 0, 0x26C, SISLANDS_CACCONFIG_CGIND },
808*4882a593Smuzhiyun 	{ 0xb, 0x0000ffff, 0, 0x3B2, SISLANDS_CACCONFIG_CGIND },
809*4882a593Smuzhiyun 	{ 0xb, 0xffff0000, 16, 0x99D, SISLANDS_CACCONFIG_CGIND },
810*4882a593Smuzhiyun 	{ 0xc, 0x0000ffff, 0, 0xA3F, SISLANDS_CACCONFIG_CGIND },
811*4882a593Smuzhiyun 	{ 0xd, 0x0000ffff, 0, 0xA, SISLANDS_CACCONFIG_CGIND },
812*4882a593Smuzhiyun 	{ 0xd, 0xffff0000, 16, 0xA, SISLANDS_CACCONFIG_CGIND },
813*4882a593Smuzhiyun 	{ 0xe, 0x0000ffff, 0, 0x5, SISLANDS_CACCONFIG_CGIND },
814*4882a593Smuzhiyun 	{ 0xf, 0x0000ffff, 0, 0x3, SISLANDS_CACCONFIG_CGIND },
815*4882a593Smuzhiyun 	{ 0xf, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
816*4882a593Smuzhiyun 	{ 0x10, 0x0000ffff, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
817*4882a593Smuzhiyun 	{ 0x10, 0xffff0000, 16, 0x1, SISLANDS_CACCONFIG_CGIND },
818*4882a593Smuzhiyun 	{ 0x11, 0x0000ffff, 0, 0x5, SISLANDS_CACCONFIG_CGIND },
819*4882a593Smuzhiyun 	{ 0x11, 0xffff0000, 16, 0x15, SISLANDS_CACCONFIG_CGIND },
820*4882a593Smuzhiyun 	{ 0x12, 0x0000ffff, 0, 0x34, SISLANDS_CACCONFIG_CGIND },
821*4882a593Smuzhiyun 	{ 0x13, 0x0000ffff, 0, 0x4, SISLANDS_CACCONFIG_CGIND },
822*4882a593Smuzhiyun 	{ 0x13, 0xffff0000, 16, 0x4, SISLANDS_CACCONFIG_CGIND },
823*4882a593Smuzhiyun 	{ 0x14, 0x0000ffff, 0, 0x315, SISLANDS_CACCONFIG_CGIND },
824*4882a593Smuzhiyun 	{ 0x15, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
825*4882a593Smuzhiyun 	{ 0x15, 0xffff0000, 16, 0x6, SISLANDS_CACCONFIG_CGIND },
826*4882a593Smuzhiyun 	{ 0x4e, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
827*4882a593Smuzhiyun 	{ 0x16, 0x0000ffff, 0, 0x30, SISLANDS_CACCONFIG_CGIND },
828*4882a593Smuzhiyun 	{ 0x16, 0xffff0000, 16, 0x7A, SISLANDS_CACCONFIG_CGIND },
829*4882a593Smuzhiyun 	{ 0x17, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
830*4882a593Smuzhiyun 	{ 0x18, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
831*4882a593Smuzhiyun 	{ 0x18, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
832*4882a593Smuzhiyun 	{ 0x19, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
833*4882a593Smuzhiyun 	{ 0x19, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
834*4882a593Smuzhiyun 	{ 0x1a, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
835*4882a593Smuzhiyun 	{ 0x1a, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
836*4882a593Smuzhiyun 	{ 0x1b, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
837*4882a593Smuzhiyun 	{ 0x1b, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
838*4882a593Smuzhiyun 	{ 0x1c, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
839*4882a593Smuzhiyun 	{ 0x1c, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
840*4882a593Smuzhiyun 	{ 0x1d, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
841*4882a593Smuzhiyun 	{ 0x1d, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
842*4882a593Smuzhiyun 	{ 0x1e, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
843*4882a593Smuzhiyun 	{ 0x1e, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
844*4882a593Smuzhiyun 	{ 0x1f, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
845*4882a593Smuzhiyun 	{ 0x1f, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
846*4882a593Smuzhiyun 	{ 0x20, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
847*4882a593Smuzhiyun 	{ 0x6d, 0x0000ffff, 0, 0x100, SISLANDS_CACCONFIG_CGIND },
848*4882a593Smuzhiyun 	{ 0xFFFFFFFF }
849*4882a593Smuzhiyun };
850*4882a593Smuzhiyun 
851*4882a593Smuzhiyun static const struct si_cac_config_reg cac_weights_cape_verde[] =
852*4882a593Smuzhiyun {
853*4882a593Smuzhiyun 	{ 0x0, 0x0000ffff, 0, 0x82, SISLANDS_CACCONFIG_CGIND },
854*4882a593Smuzhiyun 	{ 0x0, 0xffff0000, 16, 0x4F, SISLANDS_CACCONFIG_CGIND },
855*4882a593Smuzhiyun 	{ 0x1, 0x0000ffff, 0, 0x153, SISLANDS_CACCONFIG_CGIND },
856*4882a593Smuzhiyun 	{ 0x1, 0xffff0000, 16, 0x52, SISLANDS_CACCONFIG_CGIND },
857*4882a593Smuzhiyun 	{ 0x2, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
858*4882a593Smuzhiyun 	{ 0x3, 0x0000ffff, 0, 0x135, SISLANDS_CACCONFIG_CGIND },
859*4882a593Smuzhiyun 	{ 0x3, 0xffff0000, 16, 0x4F, SISLANDS_CACCONFIG_CGIND },
860*4882a593Smuzhiyun 	{ 0x4, 0x0000ffff, 0, 0x135, SISLANDS_CACCONFIG_CGIND },
861*4882a593Smuzhiyun 	{ 0x4, 0xffff0000, 16, 0xAC, SISLANDS_CACCONFIG_CGIND },
862*4882a593Smuzhiyun 	{ 0x5, 0x0000ffff, 0, 0x118, SISLANDS_CACCONFIG_CGIND },
863*4882a593Smuzhiyun 	{ 0x5, 0xffff0000, 16, 0xBE, SISLANDS_CACCONFIG_CGIND },
864*4882a593Smuzhiyun 	{ 0x6, 0x0000ffff, 0, 0x110, SISLANDS_CACCONFIG_CGIND },
865*4882a593Smuzhiyun 	{ 0x6, 0xffff0000, 16, 0x4CD, SISLANDS_CACCONFIG_CGIND },
866*4882a593Smuzhiyun 	{ 0x18f, 0x0000ffff, 0, 0x30, SISLANDS_CACCONFIG_CGIND },
867*4882a593Smuzhiyun 	{ 0x7, 0x0000ffff, 0, 0x37, SISLANDS_CACCONFIG_CGIND },
868*4882a593Smuzhiyun 	{ 0x7, 0xffff0000, 16, 0x27, SISLANDS_CACCONFIG_CGIND },
869*4882a593Smuzhiyun 	{ 0x8, 0x0000ffff, 0, 0xC3, SISLANDS_CACCONFIG_CGIND },
870*4882a593Smuzhiyun 	{ 0x8, 0xffff0000, 16, 0x35, SISLANDS_CACCONFIG_CGIND },
871*4882a593Smuzhiyun 	{ 0x9, 0x0000ffff, 0, 0x28, SISLANDS_CACCONFIG_CGIND },
872*4882a593Smuzhiyun 	{ 0xa, 0x0000ffff, 0, 0x26C, SISLANDS_CACCONFIG_CGIND },
873*4882a593Smuzhiyun 	{ 0xb, 0x0000ffff, 0, 0x3B2, SISLANDS_CACCONFIG_CGIND },
874*4882a593Smuzhiyun 	{ 0xb, 0xffff0000, 16, 0x99D, SISLANDS_CACCONFIG_CGIND },
875*4882a593Smuzhiyun 	{ 0xc, 0x0000ffff, 0, 0xA3F, SISLANDS_CACCONFIG_CGIND },
876*4882a593Smuzhiyun 	{ 0xd, 0x0000ffff, 0, 0xA, SISLANDS_CACCONFIG_CGIND },
877*4882a593Smuzhiyun 	{ 0xd, 0xffff0000, 16, 0xA, SISLANDS_CACCONFIG_CGIND },
878*4882a593Smuzhiyun 	{ 0xe, 0x0000ffff, 0, 0x5, SISLANDS_CACCONFIG_CGIND },
879*4882a593Smuzhiyun 	{ 0xf, 0x0000ffff, 0, 0x3, SISLANDS_CACCONFIG_CGIND },
880*4882a593Smuzhiyun 	{ 0xf, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
881*4882a593Smuzhiyun 	{ 0x10, 0x0000ffff, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
882*4882a593Smuzhiyun 	{ 0x10, 0xffff0000, 16, 0x1, SISLANDS_CACCONFIG_CGIND },
883*4882a593Smuzhiyun 	{ 0x11, 0x0000ffff, 0, 0x5, SISLANDS_CACCONFIG_CGIND },
884*4882a593Smuzhiyun 	{ 0x11, 0xffff0000, 16, 0x15, SISLANDS_CACCONFIG_CGIND },
885*4882a593Smuzhiyun 	{ 0x12, 0x0000ffff, 0, 0x34, SISLANDS_CACCONFIG_CGIND },
886*4882a593Smuzhiyun 	{ 0x13, 0x0000ffff, 0, 0x4, SISLANDS_CACCONFIG_CGIND },
887*4882a593Smuzhiyun 	{ 0x13, 0xffff0000, 16, 0x4, SISLANDS_CACCONFIG_CGIND },
888*4882a593Smuzhiyun 	{ 0x14, 0x0000ffff, 0, 0x3BA, SISLANDS_CACCONFIG_CGIND },
889*4882a593Smuzhiyun 	{ 0x15, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
890*4882a593Smuzhiyun 	{ 0x15, 0xffff0000, 16, 0x6, SISLANDS_CACCONFIG_CGIND },
891*4882a593Smuzhiyun 	{ 0x4e, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
892*4882a593Smuzhiyun 	{ 0x16, 0x0000ffff, 0, 0x30, SISLANDS_CACCONFIG_CGIND },
893*4882a593Smuzhiyun 	{ 0x16, 0xffff0000, 16, 0x7A, SISLANDS_CACCONFIG_CGIND },
894*4882a593Smuzhiyun 	{ 0x17, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
895*4882a593Smuzhiyun 	{ 0x18, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
896*4882a593Smuzhiyun 	{ 0x18, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
897*4882a593Smuzhiyun 	{ 0x19, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
898*4882a593Smuzhiyun 	{ 0x19, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
899*4882a593Smuzhiyun 	{ 0x1a, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
900*4882a593Smuzhiyun 	{ 0x1a, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
901*4882a593Smuzhiyun 	{ 0x1b, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
902*4882a593Smuzhiyun 	{ 0x1b, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
903*4882a593Smuzhiyun 	{ 0x1c, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
904*4882a593Smuzhiyun 	{ 0x1c, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
905*4882a593Smuzhiyun 	{ 0x1d, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
906*4882a593Smuzhiyun 	{ 0x1d, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
907*4882a593Smuzhiyun 	{ 0x1e, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
908*4882a593Smuzhiyun 	{ 0x1e, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
909*4882a593Smuzhiyun 	{ 0x1f, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
910*4882a593Smuzhiyun 	{ 0x1f, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
911*4882a593Smuzhiyun 	{ 0x20, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
912*4882a593Smuzhiyun 	{ 0x6d, 0x0000ffff, 0, 0x100, SISLANDS_CACCONFIG_CGIND },
913*4882a593Smuzhiyun 	{ 0xFFFFFFFF }
914*4882a593Smuzhiyun };
915*4882a593Smuzhiyun 
916*4882a593Smuzhiyun static const struct si_cac_config_reg lcac_cape_verde[] =
917*4882a593Smuzhiyun {
918*4882a593Smuzhiyun 	{ 0x98, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
919*4882a593Smuzhiyun 	{ 0x98, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
920*4882a593Smuzhiyun 	{ 0x104, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
921*4882a593Smuzhiyun 	{ 0x104, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
922*4882a593Smuzhiyun 	{ 0x110, 0x0001fffe, 1, 0x5, SISLANDS_CACCONFIG_CGIND },
923*4882a593Smuzhiyun 	{ 0x110, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
924*4882a593Smuzhiyun 	{ 0x14f, 0x0001fffe, 1, 0x5, SISLANDS_CACCONFIG_CGIND },
925*4882a593Smuzhiyun 	{ 0x14f, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
926*4882a593Smuzhiyun 	{ 0x8c, 0x0001fffe, 1, 0x5, SISLANDS_CACCONFIG_CGIND },
927*4882a593Smuzhiyun 	{ 0x8c, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
928*4882a593Smuzhiyun 	{ 0x143, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
929*4882a593Smuzhiyun 	{ 0x143, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
930*4882a593Smuzhiyun 	{ 0x9b, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
931*4882a593Smuzhiyun 	{ 0x9b, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
932*4882a593Smuzhiyun 	{ 0x107, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
933*4882a593Smuzhiyun 	{ 0x107, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
934*4882a593Smuzhiyun 	{ 0x113, 0x0001fffe, 1, 0x5, SISLANDS_CACCONFIG_CGIND },
935*4882a593Smuzhiyun 	{ 0x113, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
936*4882a593Smuzhiyun 	{ 0x152, 0x0001fffe, 1, 0x5, SISLANDS_CACCONFIG_CGIND },
937*4882a593Smuzhiyun 	{ 0x152, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
938*4882a593Smuzhiyun 	{ 0x8f, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
939*4882a593Smuzhiyun 	{ 0x8f, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
940*4882a593Smuzhiyun 	{ 0x146, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
941*4882a593Smuzhiyun 	{ 0x146, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
942*4882a593Smuzhiyun 	{ 0x11c, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
943*4882a593Smuzhiyun 	{ 0x11c, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
944*4882a593Smuzhiyun 	{ 0x11f, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
945*4882a593Smuzhiyun 	{ 0x11f, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
946*4882a593Smuzhiyun 	{ 0x164, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
947*4882a593Smuzhiyun 	{ 0x164, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
948*4882a593Smuzhiyun 	{ 0x167, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
949*4882a593Smuzhiyun 	{ 0x167, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
950*4882a593Smuzhiyun 	{ 0x16a, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
951*4882a593Smuzhiyun 	{ 0x16a, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
952*4882a593Smuzhiyun 	{ 0x15e, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
953*4882a593Smuzhiyun 	{ 0x15e, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
954*4882a593Smuzhiyun 	{ 0x161, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
955*4882a593Smuzhiyun 	{ 0x161, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
956*4882a593Smuzhiyun 	{ 0x15b, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
957*4882a593Smuzhiyun 	{ 0x15b, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
958*4882a593Smuzhiyun 	{ 0x16d, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
959*4882a593Smuzhiyun 	{ 0x16d, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
960*4882a593Smuzhiyun 	{ 0x170, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
961*4882a593Smuzhiyun 	{ 0x170, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
962*4882a593Smuzhiyun 	{ 0x173, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
963*4882a593Smuzhiyun 	{ 0x173, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
964*4882a593Smuzhiyun 	{ 0x176, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
965*4882a593Smuzhiyun 	{ 0x176, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
966*4882a593Smuzhiyun 	{ 0x179, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
967*4882a593Smuzhiyun 	{ 0x179, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
968*4882a593Smuzhiyun 	{ 0x17c, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
969*4882a593Smuzhiyun 	{ 0x17c, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
970*4882a593Smuzhiyun 	{ 0x17f, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
971*4882a593Smuzhiyun 	{ 0x17f, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
972*4882a593Smuzhiyun 	{ 0xFFFFFFFF }
973*4882a593Smuzhiyun };
974*4882a593Smuzhiyun 
975*4882a593Smuzhiyun static const struct si_cac_config_reg cac_override_cape_verde[] =
976*4882a593Smuzhiyun {
977*4882a593Smuzhiyun 	{ 0xFFFFFFFF }
978*4882a593Smuzhiyun };
979*4882a593Smuzhiyun 
980*4882a593Smuzhiyun static const struct si_powertune_data powertune_data_cape_verde =
981*4882a593Smuzhiyun {
982*4882a593Smuzhiyun 	((1 << 16) | 0x6993),
983*4882a593Smuzhiyun 	5,
984*4882a593Smuzhiyun 	0,
985*4882a593Smuzhiyun 	7,
986*4882a593Smuzhiyun 	105,
987*4882a593Smuzhiyun 	{
988*4882a593Smuzhiyun 		0UL,
989*4882a593Smuzhiyun 		0UL,
990*4882a593Smuzhiyun 		7194395UL,
991*4882a593Smuzhiyun 		309631529UL,
992*4882a593Smuzhiyun 		-1270850L,
993*4882a593Smuzhiyun 		4513710L,
994*4882a593Smuzhiyun 		100
995*4882a593Smuzhiyun 	},
996*4882a593Smuzhiyun 	117830498UL,
997*4882a593Smuzhiyun 	12,
998*4882a593Smuzhiyun 	{
999*4882a593Smuzhiyun 		0,
1000*4882a593Smuzhiyun 		0,
1001*4882a593Smuzhiyun 		0,
1002*4882a593Smuzhiyun 		0,
1003*4882a593Smuzhiyun 		0,
1004*4882a593Smuzhiyun 		0,
1005*4882a593Smuzhiyun 		0,
1006*4882a593Smuzhiyun 		0
1007*4882a593Smuzhiyun 	},
1008*4882a593Smuzhiyun 	true
1009*4882a593Smuzhiyun };
1010*4882a593Smuzhiyun 
1011*4882a593Smuzhiyun static const struct si_dte_data dte_data_cape_verde =
1012*4882a593Smuzhiyun {
1013*4882a593Smuzhiyun 	{ 0, 0, 0, 0, 0 },
1014*4882a593Smuzhiyun 	{ 0, 0, 0, 0, 0 },
1015*4882a593Smuzhiyun 	0,
1016*4882a593Smuzhiyun 	0,
1017*4882a593Smuzhiyun 	0,
1018*4882a593Smuzhiyun 	0,
1019*4882a593Smuzhiyun 	0,
1020*4882a593Smuzhiyun 	0,
1021*4882a593Smuzhiyun 	0,
1022*4882a593Smuzhiyun 	{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
1023*4882a593Smuzhiyun 	{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
1024*4882a593Smuzhiyun 	{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
1025*4882a593Smuzhiyun 	0,
1026*4882a593Smuzhiyun 	false
1027*4882a593Smuzhiyun };
1028*4882a593Smuzhiyun 
1029*4882a593Smuzhiyun static const struct si_dte_data dte_data_venus_xtx =
1030*4882a593Smuzhiyun {
1031*4882a593Smuzhiyun 	{ 0x1E8480, 0x3D0900, 0x989680, 0x2625A00, 0x0 },
1032*4882a593Smuzhiyun 	{ 0x71C, 0xAAB, 0xE39, 0x11C7, 0x0 },
1033*4882a593Smuzhiyun 	5,
1034*4882a593Smuzhiyun 	55000,
1035*4882a593Smuzhiyun 	0x69,
1036*4882a593Smuzhiyun 	0xA,
1037*4882a593Smuzhiyun 	1,
1038*4882a593Smuzhiyun 	0,
1039*4882a593Smuzhiyun 	0x3,
1040*4882a593Smuzhiyun 	{ 0x96, 0xB4, 0xFF, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0 },
1041*4882a593Smuzhiyun 	{ 0x895440, 0x3D0900, 0x989680, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0 },
1042*4882a593Smuzhiyun 	{ 0xD6D8, 0x88B8, 0x1555, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0 },
1043*4882a593Smuzhiyun 	90,
1044*4882a593Smuzhiyun 	true
1045*4882a593Smuzhiyun };
1046*4882a593Smuzhiyun 
1047*4882a593Smuzhiyun static const struct si_dte_data dte_data_venus_xt =
1048*4882a593Smuzhiyun {
1049*4882a593Smuzhiyun 	{ 0x1E8480, 0x3D0900, 0x989680, 0x2625A00, 0x0 },
1050*4882a593Smuzhiyun 	{ 0xBDA, 0x11C7, 0x17B4, 0x1DA1, 0x0 },
1051*4882a593Smuzhiyun 	5,
1052*4882a593Smuzhiyun 	55000,
1053*4882a593Smuzhiyun 	0x69,
1054*4882a593Smuzhiyun 	0xA,
1055*4882a593Smuzhiyun 	1,
1056*4882a593Smuzhiyun 	0,
1057*4882a593Smuzhiyun 	0x3,
1058*4882a593Smuzhiyun 	{ 0x96, 0xB4, 0xFF, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0 },
1059*4882a593Smuzhiyun 	{ 0x895440, 0x3D0900, 0x989680, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0 },
1060*4882a593Smuzhiyun 	{ 0xAFC8, 0x88B8, 0x238E, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0 },
1061*4882a593Smuzhiyun 	90,
1062*4882a593Smuzhiyun 	true
1063*4882a593Smuzhiyun };
1064*4882a593Smuzhiyun 
1065*4882a593Smuzhiyun static const struct si_dte_data dte_data_venus_pro =
1066*4882a593Smuzhiyun {
1067*4882a593Smuzhiyun 	{  0x1E8480, 0x3D0900, 0x989680, 0x2625A00, 0x0 },
1068*4882a593Smuzhiyun 	{ 0x11C7, 0x1AAB, 0x238E, 0x2C72, 0x0 },
1069*4882a593Smuzhiyun 	5,
1070*4882a593Smuzhiyun 	55000,
1071*4882a593Smuzhiyun 	0x69,
1072*4882a593Smuzhiyun 	0xA,
1073*4882a593Smuzhiyun 	1,
1074*4882a593Smuzhiyun 	0,
1075*4882a593Smuzhiyun 	0x3,
1076*4882a593Smuzhiyun 	{ 0x96, 0xB4, 0xFF, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0 },
1077*4882a593Smuzhiyun 	{ 0x895440, 0x3D0900, 0x989680, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0 },
1078*4882a593Smuzhiyun 	{ 0x88B8, 0x88B8, 0x3555, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0 },
1079*4882a593Smuzhiyun 	90,
1080*4882a593Smuzhiyun 	true
1081*4882a593Smuzhiyun };
1082*4882a593Smuzhiyun 
1083*4882a593Smuzhiyun struct si_cac_config_reg cac_weights_oland[] =
1084*4882a593Smuzhiyun {
1085*4882a593Smuzhiyun 	{ 0x0, 0x0000ffff, 0, 0x82, SISLANDS_CACCONFIG_CGIND },
1086*4882a593Smuzhiyun 	{ 0x0, 0xffff0000, 16, 0x4F, SISLANDS_CACCONFIG_CGIND },
1087*4882a593Smuzhiyun 	{ 0x1, 0x0000ffff, 0, 0x153, SISLANDS_CACCONFIG_CGIND },
1088*4882a593Smuzhiyun 	{ 0x1, 0xffff0000, 16, 0x52, SISLANDS_CACCONFIG_CGIND },
1089*4882a593Smuzhiyun 	{ 0x2, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1090*4882a593Smuzhiyun 	{ 0x3, 0x0000ffff, 0, 0x135, SISLANDS_CACCONFIG_CGIND },
1091*4882a593Smuzhiyun 	{ 0x3, 0xffff0000, 16, 0x4F, SISLANDS_CACCONFIG_CGIND },
1092*4882a593Smuzhiyun 	{ 0x4, 0x0000ffff, 0, 0x135, SISLANDS_CACCONFIG_CGIND },
1093*4882a593Smuzhiyun 	{ 0x4, 0xffff0000, 16, 0xAC, SISLANDS_CACCONFIG_CGIND },
1094*4882a593Smuzhiyun 	{ 0x5, 0x0000ffff, 0, 0x118, SISLANDS_CACCONFIG_CGIND },
1095*4882a593Smuzhiyun 	{ 0x5, 0xffff0000, 16, 0xBE, SISLANDS_CACCONFIG_CGIND },
1096*4882a593Smuzhiyun 	{ 0x6, 0x0000ffff, 0, 0x110, SISLANDS_CACCONFIG_CGIND },
1097*4882a593Smuzhiyun 	{ 0x6, 0xffff0000, 16, 0x4CD, SISLANDS_CACCONFIG_CGIND },
1098*4882a593Smuzhiyun 	{ 0x18f, 0x0000ffff, 0, 0x30, SISLANDS_CACCONFIG_CGIND },
1099*4882a593Smuzhiyun 	{ 0x7, 0x0000ffff, 0, 0x37, SISLANDS_CACCONFIG_CGIND },
1100*4882a593Smuzhiyun 	{ 0x7, 0xffff0000, 16, 0x27, SISLANDS_CACCONFIG_CGIND },
1101*4882a593Smuzhiyun 	{ 0x8, 0x0000ffff, 0, 0xC3, SISLANDS_CACCONFIG_CGIND },
1102*4882a593Smuzhiyun 	{ 0x8, 0xffff0000, 16, 0x35, SISLANDS_CACCONFIG_CGIND },
1103*4882a593Smuzhiyun 	{ 0x9, 0x0000ffff, 0, 0x28, SISLANDS_CACCONFIG_CGIND },
1104*4882a593Smuzhiyun 	{ 0xa, 0x0000ffff, 0, 0x26C, SISLANDS_CACCONFIG_CGIND },
1105*4882a593Smuzhiyun 	{ 0xb, 0x0000ffff, 0, 0x3B2, SISLANDS_CACCONFIG_CGIND },
1106*4882a593Smuzhiyun 	{ 0xb, 0xffff0000, 16, 0x99D, SISLANDS_CACCONFIG_CGIND },
1107*4882a593Smuzhiyun 	{ 0xc, 0x0000ffff, 0, 0xA3F, SISLANDS_CACCONFIG_CGIND },
1108*4882a593Smuzhiyun 	{ 0xd, 0x0000ffff, 0, 0xA, SISLANDS_CACCONFIG_CGIND },
1109*4882a593Smuzhiyun 	{ 0xd, 0xffff0000, 16, 0xA, SISLANDS_CACCONFIG_CGIND },
1110*4882a593Smuzhiyun 	{ 0xe, 0x0000ffff, 0, 0x5, SISLANDS_CACCONFIG_CGIND },
1111*4882a593Smuzhiyun 	{ 0xf, 0x0000ffff, 0, 0x3, SISLANDS_CACCONFIG_CGIND },
1112*4882a593Smuzhiyun 	{ 0xf, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1113*4882a593Smuzhiyun 	{ 0x10, 0x0000ffff, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1114*4882a593Smuzhiyun 	{ 0x10, 0xffff0000, 16, 0x1, SISLANDS_CACCONFIG_CGIND },
1115*4882a593Smuzhiyun 	{ 0x11, 0x0000ffff, 0, 0x5, SISLANDS_CACCONFIG_CGIND },
1116*4882a593Smuzhiyun 	{ 0x11, 0xffff0000, 16, 0x15, SISLANDS_CACCONFIG_CGIND },
1117*4882a593Smuzhiyun 	{ 0x12, 0x0000ffff, 0, 0x34, SISLANDS_CACCONFIG_CGIND },
1118*4882a593Smuzhiyun 	{ 0x13, 0x0000ffff, 0, 0x4, SISLANDS_CACCONFIG_CGIND },
1119*4882a593Smuzhiyun 	{ 0x13, 0xffff0000, 16, 0x4, SISLANDS_CACCONFIG_CGIND },
1120*4882a593Smuzhiyun 	{ 0x14, 0x0000ffff, 0, 0x3BA, SISLANDS_CACCONFIG_CGIND },
1121*4882a593Smuzhiyun 	{ 0x15, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1122*4882a593Smuzhiyun 	{ 0x15, 0xffff0000, 16, 0x6, SISLANDS_CACCONFIG_CGIND },
1123*4882a593Smuzhiyun 	{ 0x4e, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1124*4882a593Smuzhiyun 	{ 0x16, 0x0000ffff, 0, 0x30, SISLANDS_CACCONFIG_CGIND },
1125*4882a593Smuzhiyun 	{ 0x16, 0xffff0000, 16, 0x7A, SISLANDS_CACCONFIG_CGIND },
1126*4882a593Smuzhiyun 	{ 0x17, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1127*4882a593Smuzhiyun 	{ 0x18, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1128*4882a593Smuzhiyun 	{ 0x18, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1129*4882a593Smuzhiyun 	{ 0x19, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1130*4882a593Smuzhiyun 	{ 0x19, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1131*4882a593Smuzhiyun 	{ 0x1a, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1132*4882a593Smuzhiyun 	{ 0x1a, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
1133*4882a593Smuzhiyun 	{ 0x1b, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1134*4882a593Smuzhiyun 	{ 0x1b, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
1135*4882a593Smuzhiyun 	{ 0x1c, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1136*4882a593Smuzhiyun 	{ 0x1c, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
1137*4882a593Smuzhiyun 	{ 0x1d, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1138*4882a593Smuzhiyun 	{ 0x1d, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
1139*4882a593Smuzhiyun 	{ 0x1e, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1140*4882a593Smuzhiyun 	{ 0x1e, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
1141*4882a593Smuzhiyun 	{ 0x1f, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1142*4882a593Smuzhiyun 	{ 0x1f, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
1143*4882a593Smuzhiyun 	{ 0x20, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1144*4882a593Smuzhiyun 	{ 0x6d, 0x0000ffff, 0, 0x100, SISLANDS_CACCONFIG_CGIND },
1145*4882a593Smuzhiyun 	{ 0xFFFFFFFF }
1146*4882a593Smuzhiyun };
1147*4882a593Smuzhiyun 
1148*4882a593Smuzhiyun static const struct si_cac_config_reg cac_weights_mars_pro[] =
1149*4882a593Smuzhiyun {
1150*4882a593Smuzhiyun 	{ 0x0, 0x0000ffff, 0, 0x43, SISLANDS_CACCONFIG_CGIND },
1151*4882a593Smuzhiyun 	{ 0x0, 0xffff0000, 16, 0x29, SISLANDS_CACCONFIG_CGIND },
1152*4882a593Smuzhiyun 	{ 0x1, 0x0000ffff, 0, 0xAF, SISLANDS_CACCONFIG_CGIND },
1153*4882a593Smuzhiyun 	{ 0x1, 0xffff0000, 16, 0x2A, SISLANDS_CACCONFIG_CGIND },
1154*4882a593Smuzhiyun 	{ 0x2, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1155*4882a593Smuzhiyun 	{ 0x3, 0x0000ffff, 0, 0xA0, SISLANDS_CACCONFIG_CGIND },
1156*4882a593Smuzhiyun 	{ 0x3, 0xffff0000, 16, 0x29, SISLANDS_CACCONFIG_CGIND },
1157*4882a593Smuzhiyun 	{ 0x4, 0x0000ffff, 0, 0xA0, SISLANDS_CACCONFIG_CGIND },
1158*4882a593Smuzhiyun 	{ 0x4, 0xffff0000, 16, 0x59, SISLANDS_CACCONFIG_CGIND },
1159*4882a593Smuzhiyun 	{ 0x5, 0x0000ffff, 0, 0x1A5, SISLANDS_CACCONFIG_CGIND },
1160*4882a593Smuzhiyun 	{ 0x5, 0xffff0000, 16, 0x1D6, SISLANDS_CACCONFIG_CGIND },
1161*4882a593Smuzhiyun 	{ 0x6, 0x0000ffff, 0, 0x2A3, SISLANDS_CACCONFIG_CGIND },
1162*4882a593Smuzhiyun 	{ 0x6, 0xffff0000, 16, 0x8FD, SISLANDS_CACCONFIG_CGIND },
1163*4882a593Smuzhiyun 	{ 0x18f, 0x0000ffff, 0, 0x76, SISLANDS_CACCONFIG_CGIND },
1164*4882a593Smuzhiyun 	{ 0x7, 0x0000ffff, 0, 0x8A, SISLANDS_CACCONFIG_CGIND },
1165*4882a593Smuzhiyun 	{ 0x7, 0xffff0000, 16, 0xA3, SISLANDS_CACCONFIG_CGIND },
1166*4882a593Smuzhiyun 	{ 0x8, 0x0000ffff, 0, 0x71, SISLANDS_CACCONFIG_CGIND },
1167*4882a593Smuzhiyun 	{ 0x8, 0xffff0000, 16, 0x36, SISLANDS_CACCONFIG_CGIND },
1168*4882a593Smuzhiyun 	{ 0x9, 0x0000ffff, 0, 0xA6, SISLANDS_CACCONFIG_CGIND },
1169*4882a593Smuzhiyun 	{ 0xa, 0x0000ffff, 0, 0x81, SISLANDS_CACCONFIG_CGIND },
1170*4882a593Smuzhiyun 	{ 0xb, 0x0000ffff, 0, 0x3D2, SISLANDS_CACCONFIG_CGIND },
1171*4882a593Smuzhiyun 	{ 0xb, 0xffff0000, 16, 0x27C, SISLANDS_CACCONFIG_CGIND },
1172*4882a593Smuzhiyun 	{ 0xc, 0x0000ffff, 0, 0xA96, SISLANDS_CACCONFIG_CGIND },
1173*4882a593Smuzhiyun 	{ 0xd, 0x0000ffff, 0, 0x5, SISLANDS_CACCONFIG_CGIND },
1174*4882a593Smuzhiyun 	{ 0xd, 0xffff0000, 16, 0x5, SISLANDS_CACCONFIG_CGIND },
1175*4882a593Smuzhiyun 	{ 0xe, 0x0000ffff, 0, 0xB, SISLANDS_CACCONFIG_CGIND },
1176*4882a593Smuzhiyun 	{ 0xf, 0x0000ffff, 0, 0x3, SISLANDS_CACCONFIG_CGIND },
1177*4882a593Smuzhiyun 	{ 0xf, 0xffff0000, 16, 0x2, SISLANDS_CACCONFIG_CGIND },
1178*4882a593Smuzhiyun 	{ 0x10, 0x0000ffff, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1179*4882a593Smuzhiyun 	{ 0x10, 0xffff0000, 16, 0x4, SISLANDS_CACCONFIG_CGIND },
1180*4882a593Smuzhiyun 	{ 0x11, 0x0000ffff, 0, 0x15, SISLANDS_CACCONFIG_CGIND },
1181*4882a593Smuzhiyun 	{ 0x11, 0xffff0000, 16, 0x7, SISLANDS_CACCONFIG_CGIND },
1182*4882a593Smuzhiyun 	{ 0x12, 0x0000ffff, 0, 0x36, SISLANDS_CACCONFIG_CGIND },
1183*4882a593Smuzhiyun 	{ 0x13, 0x0000ffff, 0, 0x10, SISLANDS_CACCONFIG_CGIND },
1184*4882a593Smuzhiyun 	{ 0x13, 0xffff0000, 16, 0x10, SISLANDS_CACCONFIG_CGIND },
1185*4882a593Smuzhiyun 	{ 0x14, 0x0000ffff, 0, 0x2, SISLANDS_CACCONFIG_CGIND },
1186*4882a593Smuzhiyun 	{ 0x15, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1187*4882a593Smuzhiyun 	{ 0x15, 0xffff0000, 16, 0x6, SISLANDS_CACCONFIG_CGIND },
1188*4882a593Smuzhiyun 	{ 0x4e, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1189*4882a593Smuzhiyun 	{ 0x16, 0x0000ffff, 0, 0x32, SISLANDS_CACCONFIG_CGIND },
1190*4882a593Smuzhiyun 	{ 0x16, 0xffff0000, 16, 0x7E, SISLANDS_CACCONFIG_CGIND },
1191*4882a593Smuzhiyun 	{ 0x17, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1192*4882a593Smuzhiyun 	{ 0x18, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1193*4882a593Smuzhiyun 	{ 0x18, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1194*4882a593Smuzhiyun 	{ 0x19, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1195*4882a593Smuzhiyun 	{ 0x19, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1196*4882a593Smuzhiyun 	{ 0x1a, 0x0000ffff, 0, 0x280, SISLANDS_CACCONFIG_CGIND },
1197*4882a593Smuzhiyun 	{ 0x1a, 0xffff0000, 16, 0x7, SISLANDS_CACCONFIG_CGIND },
1198*4882a593Smuzhiyun 	{ 0x1b, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1199*4882a593Smuzhiyun 	{ 0x1b, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1200*4882a593Smuzhiyun 	{ 0x1c, 0x0000ffff, 0, 0x3C, SISLANDS_CACCONFIG_CGIND },
1201*4882a593Smuzhiyun 	{ 0x1c, 0xffff0000, 16, 0x203, SISLANDS_CACCONFIG_CGIND },
1202*4882a593Smuzhiyun 	{ 0x1d, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1203*4882a593Smuzhiyun 	{ 0x1d, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1204*4882a593Smuzhiyun 	{ 0x1e, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1205*4882a593Smuzhiyun 	{ 0x1e, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
1206*4882a593Smuzhiyun 	{ 0x1f, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1207*4882a593Smuzhiyun 	{ 0x1f, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
1208*4882a593Smuzhiyun 	{ 0x20, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1209*4882a593Smuzhiyun 	{ 0x6d, 0x0000ffff, 0, 0xB4, SISLANDS_CACCONFIG_CGIND },
1210*4882a593Smuzhiyun 	{ 0xFFFFFFFF }
1211*4882a593Smuzhiyun };
1212*4882a593Smuzhiyun 
1213*4882a593Smuzhiyun static const struct si_cac_config_reg cac_weights_mars_xt[] =
1214*4882a593Smuzhiyun {
1215*4882a593Smuzhiyun 	{ 0x0, 0x0000ffff, 0, 0x43, SISLANDS_CACCONFIG_CGIND },
1216*4882a593Smuzhiyun 	{ 0x0, 0xffff0000, 16, 0x29, SISLANDS_CACCONFIG_CGIND },
1217*4882a593Smuzhiyun 	{ 0x1, 0x0000ffff, 0, 0xAF, SISLANDS_CACCONFIG_CGIND },
1218*4882a593Smuzhiyun 	{ 0x1, 0xffff0000, 16, 0x2A, SISLANDS_CACCONFIG_CGIND },
1219*4882a593Smuzhiyun 	{ 0x2, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1220*4882a593Smuzhiyun 	{ 0x3, 0x0000ffff, 0, 0xA0, SISLANDS_CACCONFIG_CGIND },
1221*4882a593Smuzhiyun 	{ 0x3, 0xffff0000, 16, 0x29, SISLANDS_CACCONFIG_CGIND },
1222*4882a593Smuzhiyun 	{ 0x4, 0x0000ffff, 0, 0xA0, SISLANDS_CACCONFIG_CGIND },
1223*4882a593Smuzhiyun 	{ 0x4, 0xffff0000, 16, 0x59, SISLANDS_CACCONFIG_CGIND },
1224*4882a593Smuzhiyun 	{ 0x5, 0x0000ffff, 0, 0x1A5, SISLANDS_CACCONFIG_CGIND },
1225*4882a593Smuzhiyun 	{ 0x5, 0xffff0000, 16, 0x1D6, SISLANDS_CACCONFIG_CGIND },
1226*4882a593Smuzhiyun 	{ 0x6, 0x0000ffff, 0, 0x2A3, SISLANDS_CACCONFIG_CGIND },
1227*4882a593Smuzhiyun 	{ 0x6, 0xffff0000, 16, 0x8FD, SISLANDS_CACCONFIG_CGIND },
1228*4882a593Smuzhiyun 	{ 0x18f, 0x0000ffff, 0, 0x76, SISLANDS_CACCONFIG_CGIND },
1229*4882a593Smuzhiyun 	{ 0x7, 0x0000ffff, 0, 0x8A, SISLANDS_CACCONFIG_CGIND },
1230*4882a593Smuzhiyun 	{ 0x7, 0xffff0000, 16, 0xA3, SISLANDS_CACCONFIG_CGIND },
1231*4882a593Smuzhiyun 	{ 0x8, 0x0000ffff, 0, 0x71, SISLANDS_CACCONFIG_CGIND },
1232*4882a593Smuzhiyun 	{ 0x8, 0xffff0000, 16, 0x36, SISLANDS_CACCONFIG_CGIND },
1233*4882a593Smuzhiyun 	{ 0x9, 0x0000ffff, 0, 0xA6, SISLANDS_CACCONFIG_CGIND },
1234*4882a593Smuzhiyun 	{ 0xa, 0x0000ffff, 0, 0x81, SISLANDS_CACCONFIG_CGIND },
1235*4882a593Smuzhiyun 	{ 0xb, 0x0000ffff, 0, 0x3D2, SISLANDS_CACCONFIG_CGIND },
1236*4882a593Smuzhiyun 	{ 0xb, 0xffff0000, 16, 0x27C, SISLANDS_CACCONFIG_CGIND },
1237*4882a593Smuzhiyun 	{ 0xc, 0x0000ffff, 0, 0xA96, SISLANDS_CACCONFIG_CGIND },
1238*4882a593Smuzhiyun 	{ 0xd, 0x0000ffff, 0, 0x5, SISLANDS_CACCONFIG_CGIND },
1239*4882a593Smuzhiyun 	{ 0xd, 0xffff0000, 16, 0x5, SISLANDS_CACCONFIG_CGIND },
1240*4882a593Smuzhiyun 	{ 0xe, 0x0000ffff, 0, 0xB, SISLANDS_CACCONFIG_CGIND },
1241*4882a593Smuzhiyun 	{ 0xf, 0x0000ffff, 0, 0x3, SISLANDS_CACCONFIG_CGIND },
1242*4882a593Smuzhiyun 	{ 0xf, 0xffff0000, 16, 0x2, SISLANDS_CACCONFIG_CGIND },
1243*4882a593Smuzhiyun 	{ 0x10, 0x0000ffff, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1244*4882a593Smuzhiyun 	{ 0x10, 0xffff0000, 16, 0x4, SISLANDS_CACCONFIG_CGIND },
1245*4882a593Smuzhiyun 	{ 0x11, 0x0000ffff, 0, 0x15, SISLANDS_CACCONFIG_CGIND },
1246*4882a593Smuzhiyun 	{ 0x11, 0xffff0000, 16, 0x7, SISLANDS_CACCONFIG_CGIND },
1247*4882a593Smuzhiyun 	{ 0x12, 0x0000ffff, 0, 0x36, SISLANDS_CACCONFIG_CGIND },
1248*4882a593Smuzhiyun 	{ 0x13, 0x0000ffff, 0, 0x10, SISLANDS_CACCONFIG_CGIND },
1249*4882a593Smuzhiyun 	{ 0x13, 0xffff0000, 16, 0x10, SISLANDS_CACCONFIG_CGIND },
1250*4882a593Smuzhiyun 	{ 0x14, 0x0000ffff, 0, 0x60, SISLANDS_CACCONFIG_CGIND },
1251*4882a593Smuzhiyun 	{ 0x15, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1252*4882a593Smuzhiyun 	{ 0x15, 0xffff0000, 16, 0x6, SISLANDS_CACCONFIG_CGIND },
1253*4882a593Smuzhiyun 	{ 0x4e, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1254*4882a593Smuzhiyun 	{ 0x16, 0x0000ffff, 0, 0x32, SISLANDS_CACCONFIG_CGIND },
1255*4882a593Smuzhiyun 	{ 0x16, 0xffff0000, 16, 0x7E, SISLANDS_CACCONFIG_CGIND },
1256*4882a593Smuzhiyun 	{ 0x17, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1257*4882a593Smuzhiyun 	{ 0x18, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1258*4882a593Smuzhiyun 	{ 0x18, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1259*4882a593Smuzhiyun 	{ 0x19, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1260*4882a593Smuzhiyun 	{ 0x19, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1261*4882a593Smuzhiyun 	{ 0x1a, 0x0000ffff, 0, 0x280, SISLANDS_CACCONFIG_CGIND },
1262*4882a593Smuzhiyun 	{ 0x1a, 0xffff0000, 16, 0x7, SISLANDS_CACCONFIG_CGIND },
1263*4882a593Smuzhiyun 	{ 0x1b, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1264*4882a593Smuzhiyun 	{ 0x1b, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1265*4882a593Smuzhiyun 	{ 0x1c, 0x0000ffff, 0, 0x3C, SISLANDS_CACCONFIG_CGIND },
1266*4882a593Smuzhiyun 	{ 0x1c, 0xffff0000, 16, 0x203, SISLANDS_CACCONFIG_CGIND },
1267*4882a593Smuzhiyun 	{ 0x1d, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1268*4882a593Smuzhiyun 	{ 0x1d, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1269*4882a593Smuzhiyun 	{ 0x1e, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1270*4882a593Smuzhiyun 	{ 0x1e, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
1271*4882a593Smuzhiyun 	{ 0x1f, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1272*4882a593Smuzhiyun 	{ 0x1f, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
1273*4882a593Smuzhiyun 	{ 0x20, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1274*4882a593Smuzhiyun 	{ 0x6d, 0x0000ffff, 0, 0xB4, SISLANDS_CACCONFIG_CGIND },
1275*4882a593Smuzhiyun 	{ 0xFFFFFFFF }
1276*4882a593Smuzhiyun };
1277*4882a593Smuzhiyun 
1278*4882a593Smuzhiyun static const struct si_cac_config_reg cac_weights_oland_pro[] =
1279*4882a593Smuzhiyun {
1280*4882a593Smuzhiyun 	{ 0x0, 0x0000ffff, 0, 0x43, SISLANDS_CACCONFIG_CGIND },
1281*4882a593Smuzhiyun 	{ 0x0, 0xffff0000, 16, 0x29, SISLANDS_CACCONFIG_CGIND },
1282*4882a593Smuzhiyun 	{ 0x1, 0x0000ffff, 0, 0xAF, SISLANDS_CACCONFIG_CGIND },
1283*4882a593Smuzhiyun 	{ 0x1, 0xffff0000, 16, 0x2A, SISLANDS_CACCONFIG_CGIND },
1284*4882a593Smuzhiyun 	{ 0x2, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1285*4882a593Smuzhiyun 	{ 0x3, 0x0000ffff, 0, 0xA0, SISLANDS_CACCONFIG_CGIND },
1286*4882a593Smuzhiyun 	{ 0x3, 0xffff0000, 16, 0x29, SISLANDS_CACCONFIG_CGIND },
1287*4882a593Smuzhiyun 	{ 0x4, 0x0000ffff, 0, 0xA0, SISLANDS_CACCONFIG_CGIND },
1288*4882a593Smuzhiyun 	{ 0x4, 0xffff0000, 16, 0x59, SISLANDS_CACCONFIG_CGIND },
1289*4882a593Smuzhiyun 	{ 0x5, 0x0000ffff, 0, 0x1A5, SISLANDS_CACCONFIG_CGIND },
1290*4882a593Smuzhiyun 	{ 0x5, 0xffff0000, 16, 0x1D6, SISLANDS_CACCONFIG_CGIND },
1291*4882a593Smuzhiyun 	{ 0x6, 0x0000ffff, 0, 0x2A3, SISLANDS_CACCONFIG_CGIND },
1292*4882a593Smuzhiyun 	{ 0x6, 0xffff0000, 16, 0x8FD, SISLANDS_CACCONFIG_CGIND },
1293*4882a593Smuzhiyun 	{ 0x18f, 0x0000ffff, 0, 0x76, SISLANDS_CACCONFIG_CGIND },
1294*4882a593Smuzhiyun 	{ 0x7, 0x0000ffff, 0, 0x8A, SISLANDS_CACCONFIG_CGIND },
1295*4882a593Smuzhiyun 	{ 0x7, 0xffff0000, 16, 0xA3, SISLANDS_CACCONFIG_CGIND },
1296*4882a593Smuzhiyun 	{ 0x8, 0x0000ffff, 0, 0x71, SISLANDS_CACCONFIG_CGIND },
1297*4882a593Smuzhiyun 	{ 0x8, 0xffff0000, 16, 0x36, SISLANDS_CACCONFIG_CGIND },
1298*4882a593Smuzhiyun 	{ 0x9, 0x0000ffff, 0, 0xA6, SISLANDS_CACCONFIG_CGIND },
1299*4882a593Smuzhiyun 	{ 0xa, 0x0000ffff, 0, 0x81, SISLANDS_CACCONFIG_CGIND },
1300*4882a593Smuzhiyun 	{ 0xb, 0x0000ffff, 0, 0x3D2, SISLANDS_CACCONFIG_CGIND },
1301*4882a593Smuzhiyun 	{ 0xb, 0xffff0000, 16, 0x27C, SISLANDS_CACCONFIG_CGIND },
1302*4882a593Smuzhiyun 	{ 0xc, 0x0000ffff, 0, 0xA96, SISLANDS_CACCONFIG_CGIND },
1303*4882a593Smuzhiyun 	{ 0xd, 0x0000ffff, 0, 0x5, SISLANDS_CACCONFIG_CGIND },
1304*4882a593Smuzhiyun 	{ 0xd, 0xffff0000, 16, 0x5, SISLANDS_CACCONFIG_CGIND },
1305*4882a593Smuzhiyun 	{ 0xe, 0x0000ffff, 0, 0xB, SISLANDS_CACCONFIG_CGIND },
1306*4882a593Smuzhiyun 	{ 0xf, 0x0000ffff, 0, 0x3, SISLANDS_CACCONFIG_CGIND },
1307*4882a593Smuzhiyun 	{ 0xf, 0xffff0000, 16, 0x2, SISLANDS_CACCONFIG_CGIND },
1308*4882a593Smuzhiyun 	{ 0x10, 0x0000ffff, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1309*4882a593Smuzhiyun 	{ 0x10, 0xffff0000, 16, 0x4, SISLANDS_CACCONFIG_CGIND },
1310*4882a593Smuzhiyun 	{ 0x11, 0x0000ffff, 0, 0x15, SISLANDS_CACCONFIG_CGIND },
1311*4882a593Smuzhiyun 	{ 0x11, 0xffff0000, 16, 0x7, SISLANDS_CACCONFIG_CGIND },
1312*4882a593Smuzhiyun 	{ 0x12, 0x0000ffff, 0, 0x36, SISLANDS_CACCONFIG_CGIND },
1313*4882a593Smuzhiyun 	{ 0x13, 0x0000ffff, 0, 0x10, SISLANDS_CACCONFIG_CGIND },
1314*4882a593Smuzhiyun 	{ 0x13, 0xffff0000, 16, 0x10, SISLANDS_CACCONFIG_CGIND },
1315*4882a593Smuzhiyun 	{ 0x14, 0x0000ffff, 0, 0x90, SISLANDS_CACCONFIG_CGIND },
1316*4882a593Smuzhiyun 	{ 0x15, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1317*4882a593Smuzhiyun 	{ 0x15, 0xffff0000, 16, 0x6, SISLANDS_CACCONFIG_CGIND },
1318*4882a593Smuzhiyun 	{ 0x4e, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1319*4882a593Smuzhiyun 	{ 0x16, 0x0000ffff, 0, 0x32, SISLANDS_CACCONFIG_CGIND },
1320*4882a593Smuzhiyun 	{ 0x16, 0xffff0000, 16, 0x7E, SISLANDS_CACCONFIG_CGIND },
1321*4882a593Smuzhiyun 	{ 0x17, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1322*4882a593Smuzhiyun 	{ 0x18, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1323*4882a593Smuzhiyun 	{ 0x18, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1324*4882a593Smuzhiyun 	{ 0x19, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1325*4882a593Smuzhiyun 	{ 0x19, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1326*4882a593Smuzhiyun 	{ 0x1a, 0x0000ffff, 0, 0x280, SISLANDS_CACCONFIG_CGIND },
1327*4882a593Smuzhiyun 	{ 0x1a, 0xffff0000, 16, 0x7, SISLANDS_CACCONFIG_CGIND },
1328*4882a593Smuzhiyun 	{ 0x1b, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1329*4882a593Smuzhiyun 	{ 0x1b, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1330*4882a593Smuzhiyun 	{ 0x1c, 0x0000ffff, 0, 0x3C, SISLANDS_CACCONFIG_CGIND },
1331*4882a593Smuzhiyun 	{ 0x1c, 0xffff0000, 16, 0x203, SISLANDS_CACCONFIG_CGIND },
1332*4882a593Smuzhiyun 	{ 0x1d, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1333*4882a593Smuzhiyun 	{ 0x1d, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1334*4882a593Smuzhiyun 	{ 0x1e, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1335*4882a593Smuzhiyun 	{ 0x1e, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
1336*4882a593Smuzhiyun 	{ 0x1f, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1337*4882a593Smuzhiyun 	{ 0x1f, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
1338*4882a593Smuzhiyun 	{ 0x20, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1339*4882a593Smuzhiyun 	{ 0x6d, 0x0000ffff, 0, 0xB4, SISLANDS_CACCONFIG_CGIND },
1340*4882a593Smuzhiyun 	{ 0xFFFFFFFF }
1341*4882a593Smuzhiyun };
1342*4882a593Smuzhiyun 
1343*4882a593Smuzhiyun static const struct si_cac_config_reg cac_weights_oland_xt[] =
1344*4882a593Smuzhiyun {
1345*4882a593Smuzhiyun 	{ 0x0, 0x0000ffff, 0, 0x43, SISLANDS_CACCONFIG_CGIND },
1346*4882a593Smuzhiyun 	{ 0x0, 0xffff0000, 16, 0x29, SISLANDS_CACCONFIG_CGIND },
1347*4882a593Smuzhiyun 	{ 0x1, 0x0000ffff, 0, 0xAF, SISLANDS_CACCONFIG_CGIND },
1348*4882a593Smuzhiyun 	{ 0x1, 0xffff0000, 16, 0x2A, SISLANDS_CACCONFIG_CGIND },
1349*4882a593Smuzhiyun 	{ 0x2, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1350*4882a593Smuzhiyun 	{ 0x3, 0x0000ffff, 0, 0xA0, SISLANDS_CACCONFIG_CGIND },
1351*4882a593Smuzhiyun 	{ 0x3, 0xffff0000, 16, 0x29, SISLANDS_CACCONFIG_CGIND },
1352*4882a593Smuzhiyun 	{ 0x4, 0x0000ffff, 0, 0xA0, SISLANDS_CACCONFIG_CGIND },
1353*4882a593Smuzhiyun 	{ 0x4, 0xffff0000, 16, 0x59, SISLANDS_CACCONFIG_CGIND },
1354*4882a593Smuzhiyun 	{ 0x5, 0x0000ffff, 0, 0x1A5, SISLANDS_CACCONFIG_CGIND },
1355*4882a593Smuzhiyun 	{ 0x5, 0xffff0000, 16, 0x1D6, SISLANDS_CACCONFIG_CGIND },
1356*4882a593Smuzhiyun 	{ 0x6, 0x0000ffff, 0, 0x2A3, SISLANDS_CACCONFIG_CGIND },
1357*4882a593Smuzhiyun 	{ 0x6, 0xffff0000, 16, 0x8FD, SISLANDS_CACCONFIG_CGIND },
1358*4882a593Smuzhiyun 	{ 0x18f, 0x0000ffff, 0, 0x76, SISLANDS_CACCONFIG_CGIND },
1359*4882a593Smuzhiyun 	{ 0x7, 0x0000ffff, 0, 0x8A, SISLANDS_CACCONFIG_CGIND },
1360*4882a593Smuzhiyun 	{ 0x7, 0xffff0000, 16, 0xA3, SISLANDS_CACCONFIG_CGIND },
1361*4882a593Smuzhiyun 	{ 0x8, 0x0000ffff, 0, 0x71, SISLANDS_CACCONFIG_CGIND },
1362*4882a593Smuzhiyun 	{ 0x8, 0xffff0000, 16, 0x36, SISLANDS_CACCONFIG_CGIND },
1363*4882a593Smuzhiyun 	{ 0x9, 0x0000ffff, 0, 0xA6, SISLANDS_CACCONFIG_CGIND },
1364*4882a593Smuzhiyun 	{ 0xa, 0x0000ffff, 0, 0x81, SISLANDS_CACCONFIG_CGIND },
1365*4882a593Smuzhiyun 	{ 0xb, 0x0000ffff, 0, 0x3D2, SISLANDS_CACCONFIG_CGIND },
1366*4882a593Smuzhiyun 	{ 0xb, 0xffff0000, 16, 0x27C, SISLANDS_CACCONFIG_CGIND },
1367*4882a593Smuzhiyun 	{ 0xc, 0x0000ffff, 0, 0xA96, SISLANDS_CACCONFIG_CGIND },
1368*4882a593Smuzhiyun 	{ 0xd, 0x0000ffff, 0, 0x5, SISLANDS_CACCONFIG_CGIND },
1369*4882a593Smuzhiyun 	{ 0xd, 0xffff0000, 16, 0x5, SISLANDS_CACCONFIG_CGIND },
1370*4882a593Smuzhiyun 	{ 0xe, 0x0000ffff, 0, 0xB, SISLANDS_CACCONFIG_CGIND },
1371*4882a593Smuzhiyun 	{ 0xf, 0x0000ffff, 0, 0x3, SISLANDS_CACCONFIG_CGIND },
1372*4882a593Smuzhiyun 	{ 0xf, 0xffff0000, 16, 0x2, SISLANDS_CACCONFIG_CGIND },
1373*4882a593Smuzhiyun 	{ 0x10, 0x0000ffff, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1374*4882a593Smuzhiyun 	{ 0x10, 0xffff0000, 16, 0x4, SISLANDS_CACCONFIG_CGIND },
1375*4882a593Smuzhiyun 	{ 0x11, 0x0000ffff, 0, 0x15, SISLANDS_CACCONFIG_CGIND },
1376*4882a593Smuzhiyun 	{ 0x11, 0xffff0000, 16, 0x7, SISLANDS_CACCONFIG_CGIND },
1377*4882a593Smuzhiyun 	{ 0x12, 0x0000ffff, 0, 0x36, SISLANDS_CACCONFIG_CGIND },
1378*4882a593Smuzhiyun 	{ 0x13, 0x0000ffff, 0, 0x10, SISLANDS_CACCONFIG_CGIND },
1379*4882a593Smuzhiyun 	{ 0x13, 0xffff0000, 16, 0x10, SISLANDS_CACCONFIG_CGIND },
1380*4882a593Smuzhiyun 	{ 0x14, 0x0000ffff, 0, 0x120, SISLANDS_CACCONFIG_CGIND },
1381*4882a593Smuzhiyun 	{ 0x15, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1382*4882a593Smuzhiyun 	{ 0x15, 0xffff0000, 16, 0x6, SISLANDS_CACCONFIG_CGIND },
1383*4882a593Smuzhiyun 	{ 0x4e, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1384*4882a593Smuzhiyun 	{ 0x16, 0x0000ffff, 0, 0x32, SISLANDS_CACCONFIG_CGIND },
1385*4882a593Smuzhiyun 	{ 0x16, 0xffff0000, 16, 0x7E, SISLANDS_CACCONFIG_CGIND },
1386*4882a593Smuzhiyun 	{ 0x17, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1387*4882a593Smuzhiyun 	{ 0x18, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1388*4882a593Smuzhiyun 	{ 0x18, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1389*4882a593Smuzhiyun 	{ 0x19, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1390*4882a593Smuzhiyun 	{ 0x19, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1391*4882a593Smuzhiyun 	{ 0x1a, 0x0000ffff, 0, 0x280, SISLANDS_CACCONFIG_CGIND },
1392*4882a593Smuzhiyun 	{ 0x1a, 0xffff0000, 16, 0x7, SISLANDS_CACCONFIG_CGIND },
1393*4882a593Smuzhiyun 	{ 0x1b, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1394*4882a593Smuzhiyun 	{ 0x1b, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1395*4882a593Smuzhiyun 	{ 0x1c, 0x0000ffff, 0, 0x3C, SISLANDS_CACCONFIG_CGIND },
1396*4882a593Smuzhiyun 	{ 0x1c, 0xffff0000, 16, 0x203, SISLANDS_CACCONFIG_CGIND },
1397*4882a593Smuzhiyun 	{ 0x1d, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1398*4882a593Smuzhiyun 	{ 0x1d, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1399*4882a593Smuzhiyun 	{ 0x1e, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1400*4882a593Smuzhiyun 	{ 0x1e, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
1401*4882a593Smuzhiyun 	{ 0x1f, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1402*4882a593Smuzhiyun 	{ 0x1f, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
1403*4882a593Smuzhiyun 	{ 0x20, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1404*4882a593Smuzhiyun 	{ 0x6d, 0x0000ffff, 0, 0xB4, SISLANDS_CACCONFIG_CGIND },
1405*4882a593Smuzhiyun 	{ 0xFFFFFFFF }
1406*4882a593Smuzhiyun };
1407*4882a593Smuzhiyun 
1408*4882a593Smuzhiyun static const struct si_cac_config_reg lcac_oland[] =
1409*4882a593Smuzhiyun {
1410*4882a593Smuzhiyun 	{ 0x98, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
1411*4882a593Smuzhiyun 	{ 0x98, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1412*4882a593Smuzhiyun 	{ 0x104, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
1413*4882a593Smuzhiyun 	{ 0x104, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1414*4882a593Smuzhiyun 	{ 0x110, 0x0001fffe, 1, 0x6, SISLANDS_CACCONFIG_CGIND },
1415*4882a593Smuzhiyun 	{ 0x110, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1416*4882a593Smuzhiyun 	{ 0x14f, 0x0001fffe, 1, 0x6, SISLANDS_CACCONFIG_CGIND },
1417*4882a593Smuzhiyun 	{ 0x14f, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1418*4882a593Smuzhiyun 	{ 0x8c, 0x0001fffe, 1, 0x6, SISLANDS_CACCONFIG_CGIND },
1419*4882a593Smuzhiyun 	{ 0x8c, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1420*4882a593Smuzhiyun 	{ 0x143, 0x0001fffe, 1, 0x4, SISLANDS_CACCONFIG_CGIND },
1421*4882a593Smuzhiyun 	{ 0x143, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1422*4882a593Smuzhiyun 	{ 0x11c, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
1423*4882a593Smuzhiyun 	{ 0x11c, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1424*4882a593Smuzhiyun 	{ 0x11f, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
1425*4882a593Smuzhiyun 	{ 0x11f, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1426*4882a593Smuzhiyun 	{ 0x164, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
1427*4882a593Smuzhiyun 	{ 0x164, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1428*4882a593Smuzhiyun 	{ 0x167, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
1429*4882a593Smuzhiyun 	{ 0x167, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1430*4882a593Smuzhiyun 	{ 0x16a, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
1431*4882a593Smuzhiyun 	{ 0x16a, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1432*4882a593Smuzhiyun 	{ 0x15e, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
1433*4882a593Smuzhiyun 	{ 0x15e, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1434*4882a593Smuzhiyun 	{ 0x161, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
1435*4882a593Smuzhiyun 	{ 0x161, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1436*4882a593Smuzhiyun 	{ 0x15b, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
1437*4882a593Smuzhiyun 	{ 0x15b, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1438*4882a593Smuzhiyun 	{ 0x16d, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
1439*4882a593Smuzhiyun 	{ 0x16d, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1440*4882a593Smuzhiyun 	{ 0x170, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
1441*4882a593Smuzhiyun 	{ 0x170, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1442*4882a593Smuzhiyun 	{ 0x173, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
1443*4882a593Smuzhiyun 	{ 0x173, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1444*4882a593Smuzhiyun 	{ 0x176, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
1445*4882a593Smuzhiyun 	{ 0x176, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1446*4882a593Smuzhiyun 	{ 0x179, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
1447*4882a593Smuzhiyun 	{ 0x179, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1448*4882a593Smuzhiyun 	{ 0x17c, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
1449*4882a593Smuzhiyun 	{ 0x17c, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1450*4882a593Smuzhiyun 	{ 0x17f, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
1451*4882a593Smuzhiyun 	{ 0x17f, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1452*4882a593Smuzhiyun 	{ 0xFFFFFFFF }
1453*4882a593Smuzhiyun };
1454*4882a593Smuzhiyun 
1455*4882a593Smuzhiyun static const struct si_cac_config_reg lcac_mars_pro[] =
1456*4882a593Smuzhiyun {
1457*4882a593Smuzhiyun 	{ 0x98, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
1458*4882a593Smuzhiyun 	{ 0x98, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1459*4882a593Smuzhiyun 	{ 0x104, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
1460*4882a593Smuzhiyun 	{ 0x104, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1461*4882a593Smuzhiyun 	{ 0x110, 0x0001fffe, 1, 0x6, SISLANDS_CACCONFIG_CGIND },
1462*4882a593Smuzhiyun 	{ 0x110, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1463*4882a593Smuzhiyun 	{ 0x14f, 0x0001fffe, 1, 0x6, SISLANDS_CACCONFIG_CGIND },
1464*4882a593Smuzhiyun 	{ 0x14f, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1465*4882a593Smuzhiyun 	{ 0x8c, 0x0001fffe, 1, 0x6, SISLANDS_CACCONFIG_CGIND },
1466*4882a593Smuzhiyun 	{ 0x8c, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1467*4882a593Smuzhiyun 	{ 0x143, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
1468*4882a593Smuzhiyun 	{ 0x143, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1469*4882a593Smuzhiyun 	{ 0x11c, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
1470*4882a593Smuzhiyun 	{ 0x11c, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1471*4882a593Smuzhiyun 	{ 0x11f, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
1472*4882a593Smuzhiyun 	{ 0x11f, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1473*4882a593Smuzhiyun 	{ 0x164, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
1474*4882a593Smuzhiyun 	{ 0x164, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1475*4882a593Smuzhiyun 	{ 0x167, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
1476*4882a593Smuzhiyun 	{ 0x167, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1477*4882a593Smuzhiyun 	{ 0x16a, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
1478*4882a593Smuzhiyun 	{ 0x16a, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1479*4882a593Smuzhiyun 	{ 0x15e, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
1480*4882a593Smuzhiyun 	{ 0x15e, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1481*4882a593Smuzhiyun 	{ 0x161, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
1482*4882a593Smuzhiyun 	{ 0x161, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1483*4882a593Smuzhiyun 	{ 0x15b, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
1484*4882a593Smuzhiyun 	{ 0x15b, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1485*4882a593Smuzhiyun 	{ 0x16d, 0x0001fffe, 1, 0x2, SISLANDS_CACCONFIG_CGIND },
1486*4882a593Smuzhiyun 	{ 0x16d, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1487*4882a593Smuzhiyun 	{ 0x170, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
1488*4882a593Smuzhiyun 	{ 0x170, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1489*4882a593Smuzhiyun 	{ 0x173, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
1490*4882a593Smuzhiyun 	{ 0x173, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1491*4882a593Smuzhiyun 	{ 0x176, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
1492*4882a593Smuzhiyun 	{ 0x176, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1493*4882a593Smuzhiyun 	{ 0x179, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
1494*4882a593Smuzhiyun 	{ 0x179, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1495*4882a593Smuzhiyun 	{ 0x17c, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
1496*4882a593Smuzhiyun 	{ 0x17c, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1497*4882a593Smuzhiyun 	{ 0x17f, 0x0001fffe, 1, 0x1, SISLANDS_CACCONFIG_CGIND },
1498*4882a593Smuzhiyun 	{ 0x17f, 0x00000001, 0, 0x1, SISLANDS_CACCONFIG_CGIND },
1499*4882a593Smuzhiyun 	{ 0xFFFFFFFF }
1500*4882a593Smuzhiyun };
1501*4882a593Smuzhiyun 
1502*4882a593Smuzhiyun static const struct si_cac_config_reg cac_override_oland[] =
1503*4882a593Smuzhiyun {
1504*4882a593Smuzhiyun 	{ 0xFFFFFFFF }
1505*4882a593Smuzhiyun };
1506*4882a593Smuzhiyun 
1507*4882a593Smuzhiyun static const struct si_powertune_data powertune_data_oland =
1508*4882a593Smuzhiyun {
1509*4882a593Smuzhiyun 	((1 << 16) | 0x6993),
1510*4882a593Smuzhiyun 	5,
1511*4882a593Smuzhiyun 	0,
1512*4882a593Smuzhiyun 	7,
1513*4882a593Smuzhiyun 	105,
1514*4882a593Smuzhiyun 	{
1515*4882a593Smuzhiyun 		0UL,
1516*4882a593Smuzhiyun 		0UL,
1517*4882a593Smuzhiyun 		7194395UL,
1518*4882a593Smuzhiyun 		309631529UL,
1519*4882a593Smuzhiyun 		-1270850L,
1520*4882a593Smuzhiyun 		4513710L,
1521*4882a593Smuzhiyun 		100
1522*4882a593Smuzhiyun 	},
1523*4882a593Smuzhiyun 	117830498UL,
1524*4882a593Smuzhiyun 	12,
1525*4882a593Smuzhiyun 	{
1526*4882a593Smuzhiyun 		0,
1527*4882a593Smuzhiyun 		0,
1528*4882a593Smuzhiyun 		0,
1529*4882a593Smuzhiyun 		0,
1530*4882a593Smuzhiyun 		0,
1531*4882a593Smuzhiyun 		0,
1532*4882a593Smuzhiyun 		0,
1533*4882a593Smuzhiyun 		0
1534*4882a593Smuzhiyun 	},
1535*4882a593Smuzhiyun 	true
1536*4882a593Smuzhiyun };
1537*4882a593Smuzhiyun 
1538*4882a593Smuzhiyun static const struct si_powertune_data powertune_data_mars_pro =
1539*4882a593Smuzhiyun {
1540*4882a593Smuzhiyun 	((1 << 16) | 0x6993),
1541*4882a593Smuzhiyun 	5,
1542*4882a593Smuzhiyun 	0,
1543*4882a593Smuzhiyun 	7,
1544*4882a593Smuzhiyun 	105,
1545*4882a593Smuzhiyun 	{
1546*4882a593Smuzhiyun 		0UL,
1547*4882a593Smuzhiyun 		0UL,
1548*4882a593Smuzhiyun 		7194395UL,
1549*4882a593Smuzhiyun 		309631529UL,
1550*4882a593Smuzhiyun 		-1270850L,
1551*4882a593Smuzhiyun 		4513710L,
1552*4882a593Smuzhiyun 		100
1553*4882a593Smuzhiyun 	},
1554*4882a593Smuzhiyun 	117830498UL,
1555*4882a593Smuzhiyun 	12,
1556*4882a593Smuzhiyun 	{
1557*4882a593Smuzhiyun 		0,
1558*4882a593Smuzhiyun 		0,
1559*4882a593Smuzhiyun 		0,
1560*4882a593Smuzhiyun 		0,
1561*4882a593Smuzhiyun 		0,
1562*4882a593Smuzhiyun 		0,
1563*4882a593Smuzhiyun 		0,
1564*4882a593Smuzhiyun 		0
1565*4882a593Smuzhiyun 	},
1566*4882a593Smuzhiyun 	true
1567*4882a593Smuzhiyun };
1568*4882a593Smuzhiyun 
1569*4882a593Smuzhiyun static const struct si_dte_data dte_data_oland =
1570*4882a593Smuzhiyun {
1571*4882a593Smuzhiyun 	{ 0, 0, 0, 0, 0 },
1572*4882a593Smuzhiyun 	{ 0, 0, 0, 0, 0 },
1573*4882a593Smuzhiyun 	0,
1574*4882a593Smuzhiyun 	0,
1575*4882a593Smuzhiyun 	0,
1576*4882a593Smuzhiyun 	0,
1577*4882a593Smuzhiyun 	0,
1578*4882a593Smuzhiyun 	0,
1579*4882a593Smuzhiyun 	0,
1580*4882a593Smuzhiyun 	{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
1581*4882a593Smuzhiyun 	{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
1582*4882a593Smuzhiyun 	{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
1583*4882a593Smuzhiyun 	0,
1584*4882a593Smuzhiyun 	false
1585*4882a593Smuzhiyun };
1586*4882a593Smuzhiyun 
1587*4882a593Smuzhiyun static const struct si_dte_data dte_data_mars_pro =
1588*4882a593Smuzhiyun {
1589*4882a593Smuzhiyun 	{ 0x1E8480, 0x3D0900, 0x989680, 0x2625A00, 0x0 },
1590*4882a593Smuzhiyun 	{ 0x0, 0x0, 0x0, 0x0, 0x0 },
1591*4882a593Smuzhiyun 	5,
1592*4882a593Smuzhiyun 	55000,
1593*4882a593Smuzhiyun 	105,
1594*4882a593Smuzhiyun 	0xA,
1595*4882a593Smuzhiyun 	1,
1596*4882a593Smuzhiyun 	0,
1597*4882a593Smuzhiyun 	0x10,
1598*4882a593Smuzhiyun 	{ 0x96, 0xB4, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF },
1599*4882a593Smuzhiyun 	{ 0x895440, 0x3D0900, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680 },
1600*4882a593Smuzhiyun 	{ 0xF627, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0 },
1601*4882a593Smuzhiyun 	90,
1602*4882a593Smuzhiyun 	true
1603*4882a593Smuzhiyun };
1604*4882a593Smuzhiyun 
1605*4882a593Smuzhiyun static const struct si_dte_data dte_data_sun_xt =
1606*4882a593Smuzhiyun {
1607*4882a593Smuzhiyun 	{ 0x1E8480, 0x3D0900, 0x989680, 0x2625A00, 0x0 },
1608*4882a593Smuzhiyun 	{ 0x0, 0x0, 0x0, 0x0, 0x0 },
1609*4882a593Smuzhiyun 	5,
1610*4882a593Smuzhiyun 	55000,
1611*4882a593Smuzhiyun 	105,
1612*4882a593Smuzhiyun 	0xA,
1613*4882a593Smuzhiyun 	1,
1614*4882a593Smuzhiyun 	0,
1615*4882a593Smuzhiyun 	0x10,
1616*4882a593Smuzhiyun 	{ 0x96, 0xB4, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF },
1617*4882a593Smuzhiyun 	{ 0x895440, 0x3D0900, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680, 0x989680 },
1618*4882a593Smuzhiyun 	{ 0xD555, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0 },
1619*4882a593Smuzhiyun 	90,
1620*4882a593Smuzhiyun 	true
1621*4882a593Smuzhiyun };
1622*4882a593Smuzhiyun 
1623*4882a593Smuzhiyun 
1624*4882a593Smuzhiyun static const struct si_cac_config_reg cac_weights_hainan[] =
1625*4882a593Smuzhiyun {
1626*4882a593Smuzhiyun 	{ 0x0, 0x0000ffff, 0, 0x2d9, SISLANDS_CACCONFIG_CGIND },
1627*4882a593Smuzhiyun 	{ 0x0, 0xffff0000, 16, 0x22b, SISLANDS_CACCONFIG_CGIND },
1628*4882a593Smuzhiyun 	{ 0x1, 0x0000ffff, 0, 0x21c, SISLANDS_CACCONFIG_CGIND },
1629*4882a593Smuzhiyun 	{ 0x1, 0xffff0000, 16, 0x1dc, SISLANDS_CACCONFIG_CGIND },
1630*4882a593Smuzhiyun 	{ 0x2, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1631*4882a593Smuzhiyun 	{ 0x3, 0x0000ffff, 0, 0x24e, SISLANDS_CACCONFIG_CGIND },
1632*4882a593Smuzhiyun 	{ 0x3, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1633*4882a593Smuzhiyun 	{ 0x4, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1634*4882a593Smuzhiyun 	{ 0x4, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1635*4882a593Smuzhiyun 	{ 0x5, 0x0000ffff, 0, 0x35e, SISLANDS_CACCONFIG_CGIND },
1636*4882a593Smuzhiyun 	{ 0x5, 0xffff0000, 16, 0x1143, SISLANDS_CACCONFIG_CGIND },
1637*4882a593Smuzhiyun 	{ 0x6, 0x0000ffff, 0, 0xe17, SISLANDS_CACCONFIG_CGIND },
1638*4882a593Smuzhiyun 	{ 0x6, 0xffff0000, 16, 0x441, SISLANDS_CACCONFIG_CGIND },
1639*4882a593Smuzhiyun 	{ 0x18f, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1640*4882a593Smuzhiyun 	{ 0x7, 0x0000ffff, 0, 0x28b, SISLANDS_CACCONFIG_CGIND },
1641*4882a593Smuzhiyun 	{ 0x7, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1642*4882a593Smuzhiyun 	{ 0x8, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1643*4882a593Smuzhiyun 	{ 0x8, 0xffff0000, 16, 0xabe, SISLANDS_CACCONFIG_CGIND },
1644*4882a593Smuzhiyun 	{ 0x9, 0x0000ffff, 0, 0xf11, SISLANDS_CACCONFIG_CGIND },
1645*4882a593Smuzhiyun 	{ 0xa, 0x0000ffff, 0, 0x907, SISLANDS_CACCONFIG_CGIND },
1646*4882a593Smuzhiyun 	{ 0xb, 0x0000ffff, 0, 0xb45, SISLANDS_CACCONFIG_CGIND },
1647*4882a593Smuzhiyun 	{ 0xb, 0xffff0000, 16, 0xd1e, SISLANDS_CACCONFIG_CGIND },
1648*4882a593Smuzhiyun 	{ 0xc, 0x0000ffff, 0, 0xa2c, SISLANDS_CACCONFIG_CGIND },
1649*4882a593Smuzhiyun 	{ 0xd, 0x0000ffff, 0, 0x62, SISLANDS_CACCONFIG_CGIND },
1650*4882a593Smuzhiyun 	{ 0xd, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1651*4882a593Smuzhiyun 	{ 0xe, 0x0000ffff, 0, 0x1f3, SISLANDS_CACCONFIG_CGIND },
1652*4882a593Smuzhiyun 	{ 0xf, 0x0000ffff, 0, 0x42, SISLANDS_CACCONFIG_CGIND },
1653*4882a593Smuzhiyun 	{ 0xf, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1654*4882a593Smuzhiyun 	{ 0x10, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1655*4882a593Smuzhiyun 	{ 0x10, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1656*4882a593Smuzhiyun 	{ 0x11, 0x0000ffff, 0, 0x709, SISLANDS_CACCONFIG_CGIND },
1657*4882a593Smuzhiyun 	{ 0x11, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1658*4882a593Smuzhiyun 	{ 0x12, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1659*4882a593Smuzhiyun 	{ 0x13, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1660*4882a593Smuzhiyun 	{ 0x13, 0xffff0000, 16, 0x3a, SISLANDS_CACCONFIG_CGIND },
1661*4882a593Smuzhiyun 	{ 0x14, 0x0000ffff, 0, 0x357, SISLANDS_CACCONFIG_CGIND },
1662*4882a593Smuzhiyun 	{ 0x15, 0x0000ffff, 0, 0x9f, SISLANDS_CACCONFIG_CGIND },
1663*4882a593Smuzhiyun 	{ 0x15, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1664*4882a593Smuzhiyun 	{ 0x4e, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1665*4882a593Smuzhiyun 	{ 0x16, 0x0000ffff, 0, 0x314, SISLANDS_CACCONFIG_CGIND },
1666*4882a593Smuzhiyun 	{ 0x16, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1667*4882a593Smuzhiyun 	{ 0x17, 0x0000ffff, 0, 0x6d, SISLANDS_CACCONFIG_CGIND },
1668*4882a593Smuzhiyun 	{ 0x18, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1669*4882a593Smuzhiyun 	{ 0x18, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1670*4882a593Smuzhiyun 	{ 0x19, 0x0000ffff, 0, 0x0, SISLANDS_CACCONFIG_CGIND },
1671*4882a593Smuzhiyun 	{ 0x19, 0xffff0000, 16, 0x0, SISLANDS_CACCONFIG_CGIND },
1672*4882a593Smuzhiyun 	{ 0x1a, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1673*4882a593Smuzhiyun 	{ 0x1a, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
1674*4882a593Smuzhiyun 	{ 0x1b, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1675*4882a593Smuzhiyun 	{ 0x1b, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
1676*4882a593Smuzhiyun 	{ 0x1c, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1677*4882a593Smuzhiyun 	{ 0x1c, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
1678*4882a593Smuzhiyun 	{ 0x1d, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1679*4882a593Smuzhiyun 	{ 0x1d, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
1680*4882a593Smuzhiyun 	{ 0x1e, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1681*4882a593Smuzhiyun 	{ 0x1e, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
1682*4882a593Smuzhiyun 	{ 0x1f, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1683*4882a593Smuzhiyun 	{ 0x1f, 0xffff0000, 16, 0, SISLANDS_CACCONFIG_CGIND },
1684*4882a593Smuzhiyun 	{ 0x20, 0x0000ffff, 0, 0, SISLANDS_CACCONFIG_CGIND },
1685*4882a593Smuzhiyun 	{ 0x6d, 0x0000ffff, 0, 0x1b9, SISLANDS_CACCONFIG_CGIND },
1686*4882a593Smuzhiyun 	{ 0xFFFFFFFF }
1687*4882a593Smuzhiyun };
1688*4882a593Smuzhiyun 
1689*4882a593Smuzhiyun static const struct si_powertune_data powertune_data_hainan =
1690*4882a593Smuzhiyun {
1691*4882a593Smuzhiyun 	((1 << 16) | 0x6993),
1692*4882a593Smuzhiyun 	5,
1693*4882a593Smuzhiyun 	0,
1694*4882a593Smuzhiyun 	9,
1695*4882a593Smuzhiyun 	105,
1696*4882a593Smuzhiyun 	{
1697*4882a593Smuzhiyun 		0UL,
1698*4882a593Smuzhiyun 		0UL,
1699*4882a593Smuzhiyun 		7194395UL,
1700*4882a593Smuzhiyun 		309631529UL,
1701*4882a593Smuzhiyun 		-1270850L,
1702*4882a593Smuzhiyun 		4513710L,
1703*4882a593Smuzhiyun 		100
1704*4882a593Smuzhiyun 	},
1705*4882a593Smuzhiyun 	117830498UL,
1706*4882a593Smuzhiyun 	12,
1707*4882a593Smuzhiyun 	{
1708*4882a593Smuzhiyun 		0,
1709*4882a593Smuzhiyun 		0,
1710*4882a593Smuzhiyun 		0,
1711*4882a593Smuzhiyun 		0,
1712*4882a593Smuzhiyun 		0,
1713*4882a593Smuzhiyun 		0,
1714*4882a593Smuzhiyun 		0,
1715*4882a593Smuzhiyun 		0
1716*4882a593Smuzhiyun 	},
1717*4882a593Smuzhiyun 	true
1718*4882a593Smuzhiyun };
1719*4882a593Smuzhiyun 
1720*4882a593Smuzhiyun struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev);
1721*4882a593Smuzhiyun struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev);
1722*4882a593Smuzhiyun struct ni_power_info *ni_get_pi(struct radeon_device *rdev);
1723*4882a593Smuzhiyun struct ni_ps *ni_get_ps(struct radeon_ps *rps);
1724*4882a593Smuzhiyun 
1725*4882a593Smuzhiyun extern int si_mc_load_microcode(struct radeon_device *rdev);
1726*4882a593Smuzhiyun extern void vce_v1_0_enable_mgcg(struct radeon_device *rdev, bool enable);
1727*4882a593Smuzhiyun 
1728*4882a593Smuzhiyun static int si_populate_voltage_value(struct radeon_device *rdev,
1729*4882a593Smuzhiyun 				     const struct atom_voltage_table *table,
1730*4882a593Smuzhiyun 				     u16 value, SISLANDS_SMC_VOLTAGE_VALUE *voltage);
1731*4882a593Smuzhiyun static int si_get_std_voltage_value(struct radeon_device *rdev,
1732*4882a593Smuzhiyun 				    SISLANDS_SMC_VOLTAGE_VALUE *voltage,
1733*4882a593Smuzhiyun 				    u16 *std_voltage);
1734*4882a593Smuzhiyun static int si_write_smc_soft_register(struct radeon_device *rdev,
1735*4882a593Smuzhiyun 				      u16 reg_offset, u32 value);
1736*4882a593Smuzhiyun static int si_convert_power_level_to_smc(struct radeon_device *rdev,
1737*4882a593Smuzhiyun 					 struct rv7xx_pl *pl,
1738*4882a593Smuzhiyun 					 SISLANDS_SMC_HW_PERFORMANCE_LEVEL *level);
1739*4882a593Smuzhiyun static int si_calculate_sclk_params(struct radeon_device *rdev,
1740*4882a593Smuzhiyun 				    u32 engine_clock,
1741*4882a593Smuzhiyun 				    SISLANDS_SMC_SCLK_VALUE *sclk);
1742*4882a593Smuzhiyun 
1743*4882a593Smuzhiyun static void si_thermal_start_smc_fan_control(struct radeon_device *rdev);
1744*4882a593Smuzhiyun static void si_fan_ctrl_set_default_mode(struct radeon_device *rdev);
1745*4882a593Smuzhiyun 
si_get_pi(struct radeon_device * rdev)1746*4882a593Smuzhiyun static struct si_power_info *si_get_pi(struct radeon_device *rdev)
1747*4882a593Smuzhiyun {
1748*4882a593Smuzhiyun 	struct si_power_info *pi = rdev->pm.dpm.priv;
1749*4882a593Smuzhiyun 
1750*4882a593Smuzhiyun 	return pi;
1751*4882a593Smuzhiyun }
1752*4882a593Smuzhiyun 
si_calculate_leakage_for_v_and_t_formula(const struct ni_leakage_coeffients * coeff,u16 v,s32 t,u32 ileakage,u32 * leakage)1753*4882a593Smuzhiyun static void si_calculate_leakage_for_v_and_t_formula(const struct ni_leakage_coeffients *coeff,
1754*4882a593Smuzhiyun 						     u16 v, s32 t, u32 ileakage, u32 *leakage)
1755*4882a593Smuzhiyun {
1756*4882a593Smuzhiyun 	s64 kt, kv, leakage_w, i_leakage, vddc;
1757*4882a593Smuzhiyun 	s64 temperature, t_slope, t_intercept, av, bv, t_ref;
1758*4882a593Smuzhiyun 	s64 tmp;
1759*4882a593Smuzhiyun 
1760*4882a593Smuzhiyun 	i_leakage = div64_s64(drm_int2fixp(ileakage), 100);
1761*4882a593Smuzhiyun 	vddc = div64_s64(drm_int2fixp(v), 1000);
1762*4882a593Smuzhiyun 	temperature = div64_s64(drm_int2fixp(t), 1000);
1763*4882a593Smuzhiyun 
1764*4882a593Smuzhiyun 	t_slope = div64_s64(drm_int2fixp(coeff->t_slope), 100000000);
1765*4882a593Smuzhiyun 	t_intercept = div64_s64(drm_int2fixp(coeff->t_intercept), 100000000);
1766*4882a593Smuzhiyun 	av = div64_s64(drm_int2fixp(coeff->av), 100000000);
1767*4882a593Smuzhiyun 	bv = div64_s64(drm_int2fixp(coeff->bv), 100000000);
1768*4882a593Smuzhiyun 	t_ref = drm_int2fixp(coeff->t_ref);
1769*4882a593Smuzhiyun 
1770*4882a593Smuzhiyun 	tmp = drm_fixp_mul(t_slope, vddc) + t_intercept;
1771*4882a593Smuzhiyun 	kt = drm_fixp_exp(drm_fixp_mul(tmp, temperature));
1772*4882a593Smuzhiyun 	kt = drm_fixp_div(kt, drm_fixp_exp(drm_fixp_mul(tmp, t_ref)));
1773*4882a593Smuzhiyun 	kv = drm_fixp_mul(av, drm_fixp_exp(drm_fixp_mul(bv, vddc)));
1774*4882a593Smuzhiyun 
1775*4882a593Smuzhiyun 	leakage_w = drm_fixp_mul(drm_fixp_mul(drm_fixp_mul(i_leakage, kt), kv), vddc);
1776*4882a593Smuzhiyun 
1777*4882a593Smuzhiyun 	*leakage = drm_fixp2int(leakage_w * 1000);
1778*4882a593Smuzhiyun }
1779*4882a593Smuzhiyun 
si_calculate_leakage_for_v_and_t(struct radeon_device * rdev,const struct ni_leakage_coeffients * coeff,u16 v,s32 t,u32 i_leakage,u32 * leakage)1780*4882a593Smuzhiyun static void si_calculate_leakage_for_v_and_t(struct radeon_device *rdev,
1781*4882a593Smuzhiyun 					     const struct ni_leakage_coeffients *coeff,
1782*4882a593Smuzhiyun 					     u16 v,
1783*4882a593Smuzhiyun 					     s32 t,
1784*4882a593Smuzhiyun 					     u32 i_leakage,
1785*4882a593Smuzhiyun 					     u32 *leakage)
1786*4882a593Smuzhiyun {
1787*4882a593Smuzhiyun 	si_calculate_leakage_for_v_and_t_formula(coeff, v, t, i_leakage, leakage);
1788*4882a593Smuzhiyun }
1789*4882a593Smuzhiyun 
si_calculate_leakage_for_v_formula(const struct ni_leakage_coeffients * coeff,const u32 fixed_kt,u16 v,u32 ileakage,u32 * leakage)1790*4882a593Smuzhiyun static void si_calculate_leakage_for_v_formula(const struct ni_leakage_coeffients *coeff,
1791*4882a593Smuzhiyun 					       const u32 fixed_kt, u16 v,
1792*4882a593Smuzhiyun 					       u32 ileakage, u32 *leakage)
1793*4882a593Smuzhiyun {
1794*4882a593Smuzhiyun 	s64 kt, kv, leakage_w, i_leakage, vddc;
1795*4882a593Smuzhiyun 
1796*4882a593Smuzhiyun 	i_leakage = div64_s64(drm_int2fixp(ileakage), 100);
1797*4882a593Smuzhiyun 	vddc = div64_s64(drm_int2fixp(v), 1000);
1798*4882a593Smuzhiyun 
1799*4882a593Smuzhiyun 	kt = div64_s64(drm_int2fixp(fixed_kt), 100000000);
1800*4882a593Smuzhiyun 	kv = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->av), 100000000),
1801*4882a593Smuzhiyun 			  drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bv), 100000000), vddc)));
1802*4882a593Smuzhiyun 
1803*4882a593Smuzhiyun 	leakage_w = drm_fixp_mul(drm_fixp_mul(drm_fixp_mul(i_leakage, kt), kv), vddc);
1804*4882a593Smuzhiyun 
1805*4882a593Smuzhiyun 	*leakage = drm_fixp2int(leakage_w * 1000);
1806*4882a593Smuzhiyun }
1807*4882a593Smuzhiyun 
si_calculate_leakage_for_v(struct radeon_device * rdev,const struct ni_leakage_coeffients * coeff,const u32 fixed_kt,u16 v,u32 i_leakage,u32 * leakage)1808*4882a593Smuzhiyun static void si_calculate_leakage_for_v(struct radeon_device *rdev,
1809*4882a593Smuzhiyun 				       const struct ni_leakage_coeffients *coeff,
1810*4882a593Smuzhiyun 				       const u32 fixed_kt,
1811*4882a593Smuzhiyun 				       u16 v,
1812*4882a593Smuzhiyun 				       u32 i_leakage,
1813*4882a593Smuzhiyun 				       u32 *leakage)
1814*4882a593Smuzhiyun {
1815*4882a593Smuzhiyun 	si_calculate_leakage_for_v_formula(coeff, fixed_kt, v, i_leakage, leakage);
1816*4882a593Smuzhiyun }
1817*4882a593Smuzhiyun 
1818*4882a593Smuzhiyun 
si_update_dte_from_pl2(struct radeon_device * rdev,struct si_dte_data * dte_data)1819*4882a593Smuzhiyun static void si_update_dte_from_pl2(struct radeon_device *rdev,
1820*4882a593Smuzhiyun 				   struct si_dte_data *dte_data)
1821*4882a593Smuzhiyun {
1822*4882a593Smuzhiyun 	u32 p_limit1 = rdev->pm.dpm.tdp_limit;
1823*4882a593Smuzhiyun 	u32 p_limit2 = rdev->pm.dpm.near_tdp_limit;
1824*4882a593Smuzhiyun 	u32 k = dte_data->k;
1825*4882a593Smuzhiyun 	u32 t_max = dte_data->max_t;
1826*4882a593Smuzhiyun 	u32 t_split[5] = { 10, 15, 20, 25, 30 };
1827*4882a593Smuzhiyun 	u32 t_0 = dte_data->t0;
1828*4882a593Smuzhiyun 	u32 i;
1829*4882a593Smuzhiyun 
1830*4882a593Smuzhiyun 	if (p_limit2 != 0 && p_limit2 <= p_limit1) {
1831*4882a593Smuzhiyun 		dte_data->tdep_count = 3;
1832*4882a593Smuzhiyun 
1833*4882a593Smuzhiyun 		for (i = 0; i < k; i++) {
1834*4882a593Smuzhiyun 			dte_data->r[i] =
1835*4882a593Smuzhiyun 				(t_split[i] * (t_max - t_0/(u32)1000) * (1 << 14)) /
1836*4882a593Smuzhiyun 				(p_limit2  * (u32)100);
1837*4882a593Smuzhiyun 		}
1838*4882a593Smuzhiyun 
1839*4882a593Smuzhiyun 		dte_data->tdep_r[1] = dte_data->r[4] * 2;
1840*4882a593Smuzhiyun 
1841*4882a593Smuzhiyun 		for (i = 2; i < SMC_SISLANDS_DTE_MAX_TEMPERATURE_DEPENDENT_ARRAY_SIZE; i++) {
1842*4882a593Smuzhiyun 			dte_data->tdep_r[i] = dte_data->r[4];
1843*4882a593Smuzhiyun 		}
1844*4882a593Smuzhiyun 	} else {
1845*4882a593Smuzhiyun 		DRM_ERROR("Invalid PL2! DTE will not be updated.\n");
1846*4882a593Smuzhiyun 	}
1847*4882a593Smuzhiyun }
1848*4882a593Smuzhiyun 
si_initialize_powertune_defaults(struct radeon_device * rdev)1849*4882a593Smuzhiyun static void si_initialize_powertune_defaults(struct radeon_device *rdev)
1850*4882a593Smuzhiyun {
1851*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1852*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
1853*4882a593Smuzhiyun 	bool update_dte_from_pl2 = false;
1854*4882a593Smuzhiyun 
1855*4882a593Smuzhiyun 	if (rdev->family == CHIP_TAHITI) {
1856*4882a593Smuzhiyun 		si_pi->cac_weights = cac_weights_tahiti;
1857*4882a593Smuzhiyun 		si_pi->lcac_config = lcac_tahiti;
1858*4882a593Smuzhiyun 		si_pi->cac_override = cac_override_tahiti;
1859*4882a593Smuzhiyun 		si_pi->powertune_data = &powertune_data_tahiti;
1860*4882a593Smuzhiyun 		si_pi->dte_data = dte_data_tahiti;
1861*4882a593Smuzhiyun 
1862*4882a593Smuzhiyun 		switch (rdev->pdev->device) {
1863*4882a593Smuzhiyun 		case 0x6798:
1864*4882a593Smuzhiyun 			si_pi->dte_data.enable_dte_by_default = true;
1865*4882a593Smuzhiyun 			break;
1866*4882a593Smuzhiyun 		case 0x6799:
1867*4882a593Smuzhiyun 			si_pi->dte_data = dte_data_new_zealand;
1868*4882a593Smuzhiyun 			break;
1869*4882a593Smuzhiyun 		case 0x6790:
1870*4882a593Smuzhiyun 		case 0x6791:
1871*4882a593Smuzhiyun 		case 0x6792:
1872*4882a593Smuzhiyun 		case 0x679E:
1873*4882a593Smuzhiyun 			si_pi->dte_data = dte_data_aruba_pro;
1874*4882a593Smuzhiyun 			update_dte_from_pl2 = true;
1875*4882a593Smuzhiyun 			break;
1876*4882a593Smuzhiyun 		case 0x679B:
1877*4882a593Smuzhiyun 			si_pi->dte_data = dte_data_malta;
1878*4882a593Smuzhiyun 			update_dte_from_pl2 = true;
1879*4882a593Smuzhiyun 			break;
1880*4882a593Smuzhiyun 		case 0x679A:
1881*4882a593Smuzhiyun 			si_pi->dte_data = dte_data_tahiti_pro;
1882*4882a593Smuzhiyun 			update_dte_from_pl2 = true;
1883*4882a593Smuzhiyun 			break;
1884*4882a593Smuzhiyun 		default:
1885*4882a593Smuzhiyun 			if (si_pi->dte_data.enable_dte_by_default == true)
1886*4882a593Smuzhiyun 				DRM_ERROR("DTE is not enabled!\n");
1887*4882a593Smuzhiyun 			break;
1888*4882a593Smuzhiyun 		}
1889*4882a593Smuzhiyun 	} else if (rdev->family == CHIP_PITCAIRN) {
1890*4882a593Smuzhiyun 		switch (rdev->pdev->device) {
1891*4882a593Smuzhiyun 		case 0x6810:
1892*4882a593Smuzhiyun 		case 0x6818:
1893*4882a593Smuzhiyun 			si_pi->cac_weights = cac_weights_pitcairn;
1894*4882a593Smuzhiyun 			si_pi->lcac_config = lcac_pitcairn;
1895*4882a593Smuzhiyun 			si_pi->cac_override = cac_override_pitcairn;
1896*4882a593Smuzhiyun 			si_pi->powertune_data = &powertune_data_pitcairn;
1897*4882a593Smuzhiyun 			si_pi->dte_data = dte_data_curacao_xt;
1898*4882a593Smuzhiyun 			update_dte_from_pl2 = true;
1899*4882a593Smuzhiyun 			break;
1900*4882a593Smuzhiyun 		case 0x6819:
1901*4882a593Smuzhiyun 		case 0x6811:
1902*4882a593Smuzhiyun 			si_pi->cac_weights = cac_weights_pitcairn;
1903*4882a593Smuzhiyun 			si_pi->lcac_config = lcac_pitcairn;
1904*4882a593Smuzhiyun 			si_pi->cac_override = cac_override_pitcairn;
1905*4882a593Smuzhiyun 			si_pi->powertune_data = &powertune_data_pitcairn;
1906*4882a593Smuzhiyun 			si_pi->dte_data = dte_data_curacao_pro;
1907*4882a593Smuzhiyun 			update_dte_from_pl2 = true;
1908*4882a593Smuzhiyun 			break;
1909*4882a593Smuzhiyun 		case 0x6800:
1910*4882a593Smuzhiyun 		case 0x6806:
1911*4882a593Smuzhiyun 			si_pi->cac_weights = cac_weights_pitcairn;
1912*4882a593Smuzhiyun 			si_pi->lcac_config = lcac_pitcairn;
1913*4882a593Smuzhiyun 			si_pi->cac_override = cac_override_pitcairn;
1914*4882a593Smuzhiyun 			si_pi->powertune_data = &powertune_data_pitcairn;
1915*4882a593Smuzhiyun 			si_pi->dte_data = dte_data_neptune_xt;
1916*4882a593Smuzhiyun 			update_dte_from_pl2 = true;
1917*4882a593Smuzhiyun 			break;
1918*4882a593Smuzhiyun 		default:
1919*4882a593Smuzhiyun 			si_pi->cac_weights = cac_weights_pitcairn;
1920*4882a593Smuzhiyun 			si_pi->lcac_config = lcac_pitcairn;
1921*4882a593Smuzhiyun 			si_pi->cac_override = cac_override_pitcairn;
1922*4882a593Smuzhiyun 			si_pi->powertune_data = &powertune_data_pitcairn;
1923*4882a593Smuzhiyun 			si_pi->dte_data = dte_data_pitcairn;
1924*4882a593Smuzhiyun 			break;
1925*4882a593Smuzhiyun 		}
1926*4882a593Smuzhiyun 	} else if (rdev->family == CHIP_VERDE) {
1927*4882a593Smuzhiyun 		si_pi->lcac_config = lcac_cape_verde;
1928*4882a593Smuzhiyun 		si_pi->cac_override = cac_override_cape_verde;
1929*4882a593Smuzhiyun 		si_pi->powertune_data = &powertune_data_cape_verde;
1930*4882a593Smuzhiyun 
1931*4882a593Smuzhiyun 		switch (rdev->pdev->device) {
1932*4882a593Smuzhiyun 		case 0x683B:
1933*4882a593Smuzhiyun 		case 0x683F:
1934*4882a593Smuzhiyun 		case 0x6829:
1935*4882a593Smuzhiyun 		case 0x6835:
1936*4882a593Smuzhiyun 			si_pi->cac_weights = cac_weights_cape_verde_pro;
1937*4882a593Smuzhiyun 			si_pi->dte_data = dte_data_cape_verde;
1938*4882a593Smuzhiyun 			break;
1939*4882a593Smuzhiyun 		case 0x682C:
1940*4882a593Smuzhiyun 			si_pi->cac_weights = cac_weights_cape_verde_pro;
1941*4882a593Smuzhiyun 			si_pi->dte_data = dte_data_sun_xt;
1942*4882a593Smuzhiyun 			update_dte_from_pl2 = true;
1943*4882a593Smuzhiyun 			break;
1944*4882a593Smuzhiyun 		case 0x6825:
1945*4882a593Smuzhiyun 		case 0x6827:
1946*4882a593Smuzhiyun 			si_pi->cac_weights = cac_weights_heathrow;
1947*4882a593Smuzhiyun 			si_pi->dte_data = dte_data_cape_verde;
1948*4882a593Smuzhiyun 			break;
1949*4882a593Smuzhiyun 		case 0x6824:
1950*4882a593Smuzhiyun 		case 0x682D:
1951*4882a593Smuzhiyun 			si_pi->cac_weights = cac_weights_chelsea_xt;
1952*4882a593Smuzhiyun 			si_pi->dte_data = dte_data_cape_verde;
1953*4882a593Smuzhiyun 			break;
1954*4882a593Smuzhiyun 		case 0x682F:
1955*4882a593Smuzhiyun 			si_pi->cac_weights = cac_weights_chelsea_pro;
1956*4882a593Smuzhiyun 			si_pi->dte_data = dte_data_cape_verde;
1957*4882a593Smuzhiyun 			break;
1958*4882a593Smuzhiyun 		case 0x6820:
1959*4882a593Smuzhiyun 			si_pi->cac_weights = cac_weights_heathrow;
1960*4882a593Smuzhiyun 			si_pi->dte_data = dte_data_venus_xtx;
1961*4882a593Smuzhiyun 			break;
1962*4882a593Smuzhiyun 		case 0x6821:
1963*4882a593Smuzhiyun 			si_pi->cac_weights = cac_weights_heathrow;
1964*4882a593Smuzhiyun 			si_pi->dte_data = dte_data_venus_xt;
1965*4882a593Smuzhiyun 			break;
1966*4882a593Smuzhiyun 		case 0x6823:
1967*4882a593Smuzhiyun 		case 0x682B:
1968*4882a593Smuzhiyun 		case 0x6822:
1969*4882a593Smuzhiyun 		case 0x682A:
1970*4882a593Smuzhiyun 			si_pi->cac_weights = cac_weights_chelsea_pro;
1971*4882a593Smuzhiyun 			si_pi->dte_data = dte_data_venus_pro;
1972*4882a593Smuzhiyun 			break;
1973*4882a593Smuzhiyun 		default:
1974*4882a593Smuzhiyun 			si_pi->cac_weights = cac_weights_cape_verde;
1975*4882a593Smuzhiyun 			si_pi->dte_data = dte_data_cape_verde;
1976*4882a593Smuzhiyun 			break;
1977*4882a593Smuzhiyun 		}
1978*4882a593Smuzhiyun 	} else if (rdev->family == CHIP_OLAND) {
1979*4882a593Smuzhiyun 		switch (rdev->pdev->device) {
1980*4882a593Smuzhiyun 		case 0x6601:
1981*4882a593Smuzhiyun 		case 0x6621:
1982*4882a593Smuzhiyun 		case 0x6603:
1983*4882a593Smuzhiyun 		case 0x6605:
1984*4882a593Smuzhiyun 			si_pi->cac_weights = cac_weights_mars_pro;
1985*4882a593Smuzhiyun 			si_pi->lcac_config = lcac_mars_pro;
1986*4882a593Smuzhiyun 			si_pi->cac_override = cac_override_oland;
1987*4882a593Smuzhiyun 			si_pi->powertune_data = &powertune_data_mars_pro;
1988*4882a593Smuzhiyun 			si_pi->dte_data = dte_data_mars_pro;
1989*4882a593Smuzhiyun 			update_dte_from_pl2 = true;
1990*4882a593Smuzhiyun 			break;
1991*4882a593Smuzhiyun 		case 0x6600:
1992*4882a593Smuzhiyun 		case 0x6606:
1993*4882a593Smuzhiyun 		case 0x6620:
1994*4882a593Smuzhiyun 		case 0x6604:
1995*4882a593Smuzhiyun 			si_pi->cac_weights = cac_weights_mars_xt;
1996*4882a593Smuzhiyun 			si_pi->lcac_config = lcac_mars_pro;
1997*4882a593Smuzhiyun 			si_pi->cac_override = cac_override_oland;
1998*4882a593Smuzhiyun 			si_pi->powertune_data = &powertune_data_mars_pro;
1999*4882a593Smuzhiyun 			si_pi->dte_data = dte_data_mars_pro;
2000*4882a593Smuzhiyun 			update_dte_from_pl2 = true;
2001*4882a593Smuzhiyun 			break;
2002*4882a593Smuzhiyun 		case 0x6611:
2003*4882a593Smuzhiyun 		case 0x6613:
2004*4882a593Smuzhiyun 		case 0x6608:
2005*4882a593Smuzhiyun 			si_pi->cac_weights = cac_weights_oland_pro;
2006*4882a593Smuzhiyun 			si_pi->lcac_config = lcac_mars_pro;
2007*4882a593Smuzhiyun 			si_pi->cac_override = cac_override_oland;
2008*4882a593Smuzhiyun 			si_pi->powertune_data = &powertune_data_mars_pro;
2009*4882a593Smuzhiyun 			si_pi->dte_data = dte_data_mars_pro;
2010*4882a593Smuzhiyun 			update_dte_from_pl2 = true;
2011*4882a593Smuzhiyun 			break;
2012*4882a593Smuzhiyun 		case 0x6610:
2013*4882a593Smuzhiyun 			si_pi->cac_weights = cac_weights_oland_xt;
2014*4882a593Smuzhiyun 			si_pi->lcac_config = lcac_mars_pro;
2015*4882a593Smuzhiyun 			si_pi->cac_override = cac_override_oland;
2016*4882a593Smuzhiyun 			si_pi->powertune_data = &powertune_data_mars_pro;
2017*4882a593Smuzhiyun 			si_pi->dte_data = dte_data_mars_pro;
2018*4882a593Smuzhiyun 			update_dte_from_pl2 = true;
2019*4882a593Smuzhiyun 			break;
2020*4882a593Smuzhiyun 		default:
2021*4882a593Smuzhiyun 			si_pi->cac_weights = cac_weights_oland;
2022*4882a593Smuzhiyun 			si_pi->lcac_config = lcac_oland;
2023*4882a593Smuzhiyun 			si_pi->cac_override = cac_override_oland;
2024*4882a593Smuzhiyun 			si_pi->powertune_data = &powertune_data_oland;
2025*4882a593Smuzhiyun 			si_pi->dte_data = dte_data_oland;
2026*4882a593Smuzhiyun 			break;
2027*4882a593Smuzhiyun 		}
2028*4882a593Smuzhiyun 	} else if (rdev->family == CHIP_HAINAN) {
2029*4882a593Smuzhiyun 		si_pi->cac_weights = cac_weights_hainan;
2030*4882a593Smuzhiyun 		si_pi->lcac_config = lcac_oland;
2031*4882a593Smuzhiyun 		si_pi->cac_override = cac_override_oland;
2032*4882a593Smuzhiyun 		si_pi->powertune_data = &powertune_data_hainan;
2033*4882a593Smuzhiyun 		si_pi->dte_data = dte_data_sun_xt;
2034*4882a593Smuzhiyun 		update_dte_from_pl2 = true;
2035*4882a593Smuzhiyun 	} else {
2036*4882a593Smuzhiyun 		DRM_ERROR("Unknown SI asic revision, failed to initialize PowerTune!\n");
2037*4882a593Smuzhiyun 		return;
2038*4882a593Smuzhiyun 	}
2039*4882a593Smuzhiyun 
2040*4882a593Smuzhiyun 	ni_pi->enable_power_containment = false;
2041*4882a593Smuzhiyun 	ni_pi->enable_cac = false;
2042*4882a593Smuzhiyun 	ni_pi->enable_sq_ramping = false;
2043*4882a593Smuzhiyun 	si_pi->enable_dte = false;
2044*4882a593Smuzhiyun 
2045*4882a593Smuzhiyun 	if (si_pi->powertune_data->enable_powertune_by_default) {
2046*4882a593Smuzhiyun 		ni_pi->enable_power_containment= true;
2047*4882a593Smuzhiyun 		ni_pi->enable_cac = true;
2048*4882a593Smuzhiyun 		if (si_pi->dte_data.enable_dte_by_default) {
2049*4882a593Smuzhiyun 			si_pi->enable_dte = true;
2050*4882a593Smuzhiyun 			if (update_dte_from_pl2)
2051*4882a593Smuzhiyun 				si_update_dte_from_pl2(rdev, &si_pi->dte_data);
2052*4882a593Smuzhiyun 
2053*4882a593Smuzhiyun 		}
2054*4882a593Smuzhiyun 		ni_pi->enable_sq_ramping = true;
2055*4882a593Smuzhiyun 	}
2056*4882a593Smuzhiyun 
2057*4882a593Smuzhiyun 	ni_pi->driver_calculate_cac_leakage = true;
2058*4882a593Smuzhiyun 	ni_pi->cac_configuration_required = true;
2059*4882a593Smuzhiyun 
2060*4882a593Smuzhiyun 	if (ni_pi->cac_configuration_required) {
2061*4882a593Smuzhiyun 		ni_pi->support_cac_long_term_average = true;
2062*4882a593Smuzhiyun 		si_pi->dyn_powertune_data.l2_lta_window_size =
2063*4882a593Smuzhiyun 			si_pi->powertune_data->l2_lta_window_size_default;
2064*4882a593Smuzhiyun 		si_pi->dyn_powertune_data.lts_truncate =
2065*4882a593Smuzhiyun 			si_pi->powertune_data->lts_truncate_default;
2066*4882a593Smuzhiyun 	} else {
2067*4882a593Smuzhiyun 		ni_pi->support_cac_long_term_average = false;
2068*4882a593Smuzhiyun 		si_pi->dyn_powertune_data.l2_lta_window_size = 0;
2069*4882a593Smuzhiyun 		si_pi->dyn_powertune_data.lts_truncate = 0;
2070*4882a593Smuzhiyun 	}
2071*4882a593Smuzhiyun 
2072*4882a593Smuzhiyun 	si_pi->dyn_powertune_data.disable_uvd_powertune = false;
2073*4882a593Smuzhiyun }
2074*4882a593Smuzhiyun 
si_get_smc_power_scaling_factor(struct radeon_device * rdev)2075*4882a593Smuzhiyun static u32 si_get_smc_power_scaling_factor(struct radeon_device *rdev)
2076*4882a593Smuzhiyun {
2077*4882a593Smuzhiyun 	return 1;
2078*4882a593Smuzhiyun }
2079*4882a593Smuzhiyun 
si_calculate_cac_wintime(struct radeon_device * rdev)2080*4882a593Smuzhiyun static u32 si_calculate_cac_wintime(struct radeon_device *rdev)
2081*4882a593Smuzhiyun {
2082*4882a593Smuzhiyun 	u32 xclk;
2083*4882a593Smuzhiyun 	u32 wintime;
2084*4882a593Smuzhiyun 	u32 cac_window;
2085*4882a593Smuzhiyun 	u32 cac_window_size;
2086*4882a593Smuzhiyun 
2087*4882a593Smuzhiyun 	xclk = radeon_get_xclk(rdev);
2088*4882a593Smuzhiyun 
2089*4882a593Smuzhiyun 	if (xclk == 0)
2090*4882a593Smuzhiyun 		return 0;
2091*4882a593Smuzhiyun 
2092*4882a593Smuzhiyun 	cac_window = RREG32(CG_CAC_CTRL) & CAC_WINDOW_MASK;
2093*4882a593Smuzhiyun 	cac_window_size = ((cac_window & 0xFFFF0000) >> 16) * (cac_window & 0x0000FFFF);
2094*4882a593Smuzhiyun 
2095*4882a593Smuzhiyun 	wintime = (cac_window_size * 100) / xclk;
2096*4882a593Smuzhiyun 
2097*4882a593Smuzhiyun 	return wintime;
2098*4882a593Smuzhiyun }
2099*4882a593Smuzhiyun 
si_scale_power_for_smc(u32 power_in_watts,u32 scaling_factor)2100*4882a593Smuzhiyun static u32 si_scale_power_for_smc(u32 power_in_watts, u32 scaling_factor)
2101*4882a593Smuzhiyun {
2102*4882a593Smuzhiyun 	return power_in_watts;
2103*4882a593Smuzhiyun }
2104*4882a593Smuzhiyun 
si_calculate_adjusted_tdp_limits(struct radeon_device * rdev,bool adjust_polarity,u32 tdp_adjustment,u32 * tdp_limit,u32 * near_tdp_limit)2105*4882a593Smuzhiyun static int si_calculate_adjusted_tdp_limits(struct radeon_device *rdev,
2106*4882a593Smuzhiyun 					    bool adjust_polarity,
2107*4882a593Smuzhiyun 					    u32 tdp_adjustment,
2108*4882a593Smuzhiyun 					    u32 *tdp_limit,
2109*4882a593Smuzhiyun 					    u32 *near_tdp_limit)
2110*4882a593Smuzhiyun {
2111*4882a593Smuzhiyun 	u32 adjustment_delta, max_tdp_limit;
2112*4882a593Smuzhiyun 
2113*4882a593Smuzhiyun 	if (tdp_adjustment > (u32)rdev->pm.dpm.tdp_od_limit)
2114*4882a593Smuzhiyun 		return -EINVAL;
2115*4882a593Smuzhiyun 
2116*4882a593Smuzhiyun 	max_tdp_limit = ((100 + 100) * rdev->pm.dpm.tdp_limit) / 100;
2117*4882a593Smuzhiyun 
2118*4882a593Smuzhiyun 	if (adjust_polarity) {
2119*4882a593Smuzhiyun 		*tdp_limit = ((100 + tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
2120*4882a593Smuzhiyun 		*near_tdp_limit = rdev->pm.dpm.near_tdp_limit_adjusted + (*tdp_limit - rdev->pm.dpm.tdp_limit);
2121*4882a593Smuzhiyun 	} else {
2122*4882a593Smuzhiyun 		*tdp_limit = ((100 - tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
2123*4882a593Smuzhiyun 		adjustment_delta  = rdev->pm.dpm.tdp_limit - *tdp_limit;
2124*4882a593Smuzhiyun 		if (adjustment_delta < rdev->pm.dpm.near_tdp_limit_adjusted)
2125*4882a593Smuzhiyun 			*near_tdp_limit = rdev->pm.dpm.near_tdp_limit_adjusted - adjustment_delta;
2126*4882a593Smuzhiyun 		else
2127*4882a593Smuzhiyun 			*near_tdp_limit = 0;
2128*4882a593Smuzhiyun 	}
2129*4882a593Smuzhiyun 
2130*4882a593Smuzhiyun 	if ((*tdp_limit <= 0) || (*tdp_limit > max_tdp_limit))
2131*4882a593Smuzhiyun 		return -EINVAL;
2132*4882a593Smuzhiyun 	if ((*near_tdp_limit <= 0) || (*near_tdp_limit > *tdp_limit))
2133*4882a593Smuzhiyun 		return -EINVAL;
2134*4882a593Smuzhiyun 
2135*4882a593Smuzhiyun 	return 0;
2136*4882a593Smuzhiyun }
2137*4882a593Smuzhiyun 
si_populate_smc_tdp_limits(struct radeon_device * rdev,struct radeon_ps * radeon_state)2138*4882a593Smuzhiyun static int si_populate_smc_tdp_limits(struct radeon_device *rdev,
2139*4882a593Smuzhiyun 				      struct radeon_ps *radeon_state)
2140*4882a593Smuzhiyun {
2141*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2142*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
2143*4882a593Smuzhiyun 
2144*4882a593Smuzhiyun 	if (ni_pi->enable_power_containment) {
2145*4882a593Smuzhiyun 		SISLANDS_SMC_STATETABLE *smc_table = &si_pi->smc_statetable;
2146*4882a593Smuzhiyun 		PP_SIslands_PAPMParameters *papm_parm;
2147*4882a593Smuzhiyun 		struct radeon_ppm_table *ppm = rdev->pm.dpm.dyn_state.ppm_table;
2148*4882a593Smuzhiyun 		u32 scaling_factor = si_get_smc_power_scaling_factor(rdev);
2149*4882a593Smuzhiyun 		u32 tdp_limit;
2150*4882a593Smuzhiyun 		u32 near_tdp_limit;
2151*4882a593Smuzhiyun 		int ret;
2152*4882a593Smuzhiyun 
2153*4882a593Smuzhiyun 		if (scaling_factor == 0)
2154*4882a593Smuzhiyun 			return -EINVAL;
2155*4882a593Smuzhiyun 
2156*4882a593Smuzhiyun 		memset(smc_table, 0, sizeof(SISLANDS_SMC_STATETABLE));
2157*4882a593Smuzhiyun 
2158*4882a593Smuzhiyun 		ret = si_calculate_adjusted_tdp_limits(rdev,
2159*4882a593Smuzhiyun 						       false, /* ??? */
2160*4882a593Smuzhiyun 						       rdev->pm.dpm.tdp_adjustment,
2161*4882a593Smuzhiyun 						       &tdp_limit,
2162*4882a593Smuzhiyun 						       &near_tdp_limit);
2163*4882a593Smuzhiyun 		if (ret)
2164*4882a593Smuzhiyun 			return ret;
2165*4882a593Smuzhiyun 
2166*4882a593Smuzhiyun 		smc_table->dpm2Params.TDPLimit =
2167*4882a593Smuzhiyun 			cpu_to_be32(si_scale_power_for_smc(tdp_limit, scaling_factor) * 1000);
2168*4882a593Smuzhiyun 		smc_table->dpm2Params.NearTDPLimit =
2169*4882a593Smuzhiyun 			cpu_to_be32(si_scale_power_for_smc(near_tdp_limit, scaling_factor) * 1000);
2170*4882a593Smuzhiyun 		smc_table->dpm2Params.SafePowerLimit =
2171*4882a593Smuzhiyun 			cpu_to_be32(si_scale_power_for_smc((near_tdp_limit * SISLANDS_DPM2_TDP_SAFE_LIMIT_PERCENT) / 100, scaling_factor) * 1000);
2172*4882a593Smuzhiyun 
2173*4882a593Smuzhiyun 		ret = si_copy_bytes_to_smc(rdev,
2174*4882a593Smuzhiyun 					   (si_pi->state_table_start + offsetof(SISLANDS_SMC_STATETABLE, dpm2Params) +
2175*4882a593Smuzhiyun 						 offsetof(PP_SIslands_DPM2Parameters, TDPLimit)),
2176*4882a593Smuzhiyun 					   (u8 *)(&(smc_table->dpm2Params.TDPLimit)),
2177*4882a593Smuzhiyun 					   sizeof(u32) * 3,
2178*4882a593Smuzhiyun 					   si_pi->sram_end);
2179*4882a593Smuzhiyun 		if (ret)
2180*4882a593Smuzhiyun 			return ret;
2181*4882a593Smuzhiyun 
2182*4882a593Smuzhiyun 		if (si_pi->enable_ppm) {
2183*4882a593Smuzhiyun 			papm_parm = &si_pi->papm_parm;
2184*4882a593Smuzhiyun 			memset(papm_parm, 0, sizeof(PP_SIslands_PAPMParameters));
2185*4882a593Smuzhiyun 			papm_parm->NearTDPLimitTherm = cpu_to_be32(ppm->dgpu_tdp);
2186*4882a593Smuzhiyun 			papm_parm->dGPU_T_Limit = cpu_to_be32(ppm->tj_max);
2187*4882a593Smuzhiyun 			papm_parm->dGPU_T_Warning = cpu_to_be32(95);
2188*4882a593Smuzhiyun 			papm_parm->dGPU_T_Hysteresis = cpu_to_be32(5);
2189*4882a593Smuzhiyun 			papm_parm->PlatformPowerLimit = 0xffffffff;
2190*4882a593Smuzhiyun 			papm_parm->NearTDPLimitPAPM = 0xffffffff;
2191*4882a593Smuzhiyun 
2192*4882a593Smuzhiyun 			ret = si_copy_bytes_to_smc(rdev, si_pi->papm_cfg_table_start,
2193*4882a593Smuzhiyun 						   (u8 *)papm_parm,
2194*4882a593Smuzhiyun 						   sizeof(PP_SIslands_PAPMParameters),
2195*4882a593Smuzhiyun 						   si_pi->sram_end);
2196*4882a593Smuzhiyun 			if (ret)
2197*4882a593Smuzhiyun 				return ret;
2198*4882a593Smuzhiyun 		}
2199*4882a593Smuzhiyun 	}
2200*4882a593Smuzhiyun 	return 0;
2201*4882a593Smuzhiyun }
2202*4882a593Smuzhiyun 
si_populate_smc_tdp_limits_2(struct radeon_device * rdev,struct radeon_ps * radeon_state)2203*4882a593Smuzhiyun static int si_populate_smc_tdp_limits_2(struct radeon_device *rdev,
2204*4882a593Smuzhiyun 					struct radeon_ps *radeon_state)
2205*4882a593Smuzhiyun {
2206*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2207*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
2208*4882a593Smuzhiyun 
2209*4882a593Smuzhiyun 	if (ni_pi->enable_power_containment) {
2210*4882a593Smuzhiyun 		SISLANDS_SMC_STATETABLE *smc_table = &si_pi->smc_statetable;
2211*4882a593Smuzhiyun 		u32 scaling_factor = si_get_smc_power_scaling_factor(rdev);
2212*4882a593Smuzhiyun 		int ret;
2213*4882a593Smuzhiyun 
2214*4882a593Smuzhiyun 		memset(smc_table, 0, sizeof(SISLANDS_SMC_STATETABLE));
2215*4882a593Smuzhiyun 
2216*4882a593Smuzhiyun 		smc_table->dpm2Params.NearTDPLimit =
2217*4882a593Smuzhiyun 			cpu_to_be32(si_scale_power_for_smc(rdev->pm.dpm.near_tdp_limit_adjusted, scaling_factor) * 1000);
2218*4882a593Smuzhiyun 		smc_table->dpm2Params.SafePowerLimit =
2219*4882a593Smuzhiyun 			cpu_to_be32(si_scale_power_for_smc((rdev->pm.dpm.near_tdp_limit_adjusted * SISLANDS_DPM2_TDP_SAFE_LIMIT_PERCENT) / 100, scaling_factor) * 1000);
2220*4882a593Smuzhiyun 
2221*4882a593Smuzhiyun 		ret = si_copy_bytes_to_smc(rdev,
2222*4882a593Smuzhiyun 					   (si_pi->state_table_start +
2223*4882a593Smuzhiyun 					    offsetof(SISLANDS_SMC_STATETABLE, dpm2Params) +
2224*4882a593Smuzhiyun 					    offsetof(PP_SIslands_DPM2Parameters, NearTDPLimit)),
2225*4882a593Smuzhiyun 					   (u8 *)(&(smc_table->dpm2Params.NearTDPLimit)),
2226*4882a593Smuzhiyun 					   sizeof(u32) * 2,
2227*4882a593Smuzhiyun 					   si_pi->sram_end);
2228*4882a593Smuzhiyun 		if (ret)
2229*4882a593Smuzhiyun 			return ret;
2230*4882a593Smuzhiyun 	}
2231*4882a593Smuzhiyun 
2232*4882a593Smuzhiyun 	return 0;
2233*4882a593Smuzhiyun }
2234*4882a593Smuzhiyun 
si_calculate_power_efficiency_ratio(struct radeon_device * rdev,const u16 prev_std_vddc,const u16 curr_std_vddc)2235*4882a593Smuzhiyun static u16 si_calculate_power_efficiency_ratio(struct radeon_device *rdev,
2236*4882a593Smuzhiyun 					       const u16 prev_std_vddc,
2237*4882a593Smuzhiyun 					       const u16 curr_std_vddc)
2238*4882a593Smuzhiyun {
2239*4882a593Smuzhiyun 	u64 margin = (u64)SISLANDS_DPM2_PWREFFICIENCYRATIO_MARGIN;
2240*4882a593Smuzhiyun 	u64 prev_vddc = (u64)prev_std_vddc;
2241*4882a593Smuzhiyun 	u64 curr_vddc = (u64)curr_std_vddc;
2242*4882a593Smuzhiyun 	u64 pwr_efficiency_ratio, n, d;
2243*4882a593Smuzhiyun 
2244*4882a593Smuzhiyun 	if ((prev_vddc == 0) || (curr_vddc == 0))
2245*4882a593Smuzhiyun 		return 0;
2246*4882a593Smuzhiyun 
2247*4882a593Smuzhiyun 	n = div64_u64((u64)1024 * curr_vddc * curr_vddc * ((u64)1000 + margin), (u64)1000);
2248*4882a593Smuzhiyun 	d = prev_vddc * prev_vddc;
2249*4882a593Smuzhiyun 	pwr_efficiency_ratio = div64_u64(n, d);
2250*4882a593Smuzhiyun 
2251*4882a593Smuzhiyun 	if (pwr_efficiency_ratio > (u64)0xFFFF)
2252*4882a593Smuzhiyun 		return 0;
2253*4882a593Smuzhiyun 
2254*4882a593Smuzhiyun 	return (u16)pwr_efficiency_ratio;
2255*4882a593Smuzhiyun }
2256*4882a593Smuzhiyun 
si_should_disable_uvd_powertune(struct radeon_device * rdev,struct radeon_ps * radeon_state)2257*4882a593Smuzhiyun static bool si_should_disable_uvd_powertune(struct radeon_device *rdev,
2258*4882a593Smuzhiyun 					    struct radeon_ps *radeon_state)
2259*4882a593Smuzhiyun {
2260*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
2261*4882a593Smuzhiyun 
2262*4882a593Smuzhiyun 	if (si_pi->dyn_powertune_data.disable_uvd_powertune &&
2263*4882a593Smuzhiyun 	    radeon_state->vclk && radeon_state->dclk)
2264*4882a593Smuzhiyun 		return true;
2265*4882a593Smuzhiyun 
2266*4882a593Smuzhiyun 	return false;
2267*4882a593Smuzhiyun }
2268*4882a593Smuzhiyun 
si_populate_power_containment_values(struct radeon_device * rdev,struct radeon_ps * radeon_state,SISLANDS_SMC_SWSTATE * smc_state)2269*4882a593Smuzhiyun static int si_populate_power_containment_values(struct radeon_device *rdev,
2270*4882a593Smuzhiyun 						struct radeon_ps *radeon_state,
2271*4882a593Smuzhiyun 						SISLANDS_SMC_SWSTATE *smc_state)
2272*4882a593Smuzhiyun {
2273*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2274*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2275*4882a593Smuzhiyun 	struct ni_ps *state = ni_get_ps(radeon_state);
2276*4882a593Smuzhiyun 	SISLANDS_SMC_VOLTAGE_VALUE vddc;
2277*4882a593Smuzhiyun 	u32 prev_sclk;
2278*4882a593Smuzhiyun 	u32 max_sclk;
2279*4882a593Smuzhiyun 	u32 min_sclk;
2280*4882a593Smuzhiyun 	u16 prev_std_vddc;
2281*4882a593Smuzhiyun 	u16 curr_std_vddc;
2282*4882a593Smuzhiyun 	int i;
2283*4882a593Smuzhiyun 	u16 pwr_efficiency_ratio;
2284*4882a593Smuzhiyun 	u8 max_ps_percent;
2285*4882a593Smuzhiyun 	bool disable_uvd_power_tune;
2286*4882a593Smuzhiyun 	int ret;
2287*4882a593Smuzhiyun 
2288*4882a593Smuzhiyun 	if (ni_pi->enable_power_containment == false)
2289*4882a593Smuzhiyun 		return 0;
2290*4882a593Smuzhiyun 
2291*4882a593Smuzhiyun 	if (state->performance_level_count == 0)
2292*4882a593Smuzhiyun 		return -EINVAL;
2293*4882a593Smuzhiyun 
2294*4882a593Smuzhiyun 	if (smc_state->levelCount != state->performance_level_count)
2295*4882a593Smuzhiyun 		return -EINVAL;
2296*4882a593Smuzhiyun 
2297*4882a593Smuzhiyun 	disable_uvd_power_tune = si_should_disable_uvd_powertune(rdev, radeon_state);
2298*4882a593Smuzhiyun 
2299*4882a593Smuzhiyun 	smc_state->levels[0].dpm2.MaxPS = 0;
2300*4882a593Smuzhiyun 	smc_state->levels[0].dpm2.NearTDPDec = 0;
2301*4882a593Smuzhiyun 	smc_state->levels[0].dpm2.AboveSafeInc = 0;
2302*4882a593Smuzhiyun 	smc_state->levels[0].dpm2.BelowSafeInc = 0;
2303*4882a593Smuzhiyun 	smc_state->levels[0].dpm2.PwrEfficiencyRatio = 0;
2304*4882a593Smuzhiyun 
2305*4882a593Smuzhiyun 	for (i = 1; i < state->performance_level_count; i++) {
2306*4882a593Smuzhiyun 		prev_sclk = state->performance_levels[i-1].sclk;
2307*4882a593Smuzhiyun 		max_sclk  = state->performance_levels[i].sclk;
2308*4882a593Smuzhiyun 		if (i == 1)
2309*4882a593Smuzhiyun 			max_ps_percent = SISLANDS_DPM2_MAXPS_PERCENT_M;
2310*4882a593Smuzhiyun 		else
2311*4882a593Smuzhiyun 			max_ps_percent = SISLANDS_DPM2_MAXPS_PERCENT_H;
2312*4882a593Smuzhiyun 
2313*4882a593Smuzhiyun 		if (prev_sclk > max_sclk)
2314*4882a593Smuzhiyun 			return -EINVAL;
2315*4882a593Smuzhiyun 
2316*4882a593Smuzhiyun 		if ((max_ps_percent == 0) ||
2317*4882a593Smuzhiyun 		    (prev_sclk == max_sclk) ||
2318*4882a593Smuzhiyun 		    disable_uvd_power_tune) {
2319*4882a593Smuzhiyun 			min_sclk = max_sclk;
2320*4882a593Smuzhiyun 		} else if (i == 1) {
2321*4882a593Smuzhiyun 			min_sclk = prev_sclk;
2322*4882a593Smuzhiyun 		} else {
2323*4882a593Smuzhiyun 			min_sclk = (prev_sclk * (u32)max_ps_percent) / 100;
2324*4882a593Smuzhiyun 		}
2325*4882a593Smuzhiyun 
2326*4882a593Smuzhiyun 		if (min_sclk < state->performance_levels[0].sclk)
2327*4882a593Smuzhiyun 			min_sclk = state->performance_levels[0].sclk;
2328*4882a593Smuzhiyun 
2329*4882a593Smuzhiyun 		if (min_sclk == 0)
2330*4882a593Smuzhiyun 			return -EINVAL;
2331*4882a593Smuzhiyun 
2332*4882a593Smuzhiyun 		ret = si_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
2333*4882a593Smuzhiyun 						state->performance_levels[i-1].vddc, &vddc);
2334*4882a593Smuzhiyun 		if (ret)
2335*4882a593Smuzhiyun 			return ret;
2336*4882a593Smuzhiyun 
2337*4882a593Smuzhiyun 		ret = si_get_std_voltage_value(rdev, &vddc, &prev_std_vddc);
2338*4882a593Smuzhiyun 		if (ret)
2339*4882a593Smuzhiyun 			return ret;
2340*4882a593Smuzhiyun 
2341*4882a593Smuzhiyun 		ret = si_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
2342*4882a593Smuzhiyun 						state->performance_levels[i].vddc, &vddc);
2343*4882a593Smuzhiyun 		if (ret)
2344*4882a593Smuzhiyun 			return ret;
2345*4882a593Smuzhiyun 
2346*4882a593Smuzhiyun 		ret = si_get_std_voltage_value(rdev, &vddc, &curr_std_vddc);
2347*4882a593Smuzhiyun 		if (ret)
2348*4882a593Smuzhiyun 			return ret;
2349*4882a593Smuzhiyun 
2350*4882a593Smuzhiyun 		pwr_efficiency_ratio = si_calculate_power_efficiency_ratio(rdev,
2351*4882a593Smuzhiyun 									   prev_std_vddc, curr_std_vddc);
2352*4882a593Smuzhiyun 
2353*4882a593Smuzhiyun 		smc_state->levels[i].dpm2.MaxPS = (u8)((SISLANDS_DPM2_MAX_PULSE_SKIP * (max_sclk - min_sclk)) / max_sclk);
2354*4882a593Smuzhiyun 		smc_state->levels[i].dpm2.NearTDPDec = SISLANDS_DPM2_NEAR_TDP_DEC;
2355*4882a593Smuzhiyun 		smc_state->levels[i].dpm2.AboveSafeInc = SISLANDS_DPM2_ABOVE_SAFE_INC;
2356*4882a593Smuzhiyun 		smc_state->levels[i].dpm2.BelowSafeInc = SISLANDS_DPM2_BELOW_SAFE_INC;
2357*4882a593Smuzhiyun 		smc_state->levels[i].dpm2.PwrEfficiencyRatio = cpu_to_be16(pwr_efficiency_ratio);
2358*4882a593Smuzhiyun 	}
2359*4882a593Smuzhiyun 
2360*4882a593Smuzhiyun 	return 0;
2361*4882a593Smuzhiyun }
2362*4882a593Smuzhiyun 
si_populate_sq_ramping_values(struct radeon_device * rdev,struct radeon_ps * radeon_state,SISLANDS_SMC_SWSTATE * smc_state)2363*4882a593Smuzhiyun static int si_populate_sq_ramping_values(struct radeon_device *rdev,
2364*4882a593Smuzhiyun 					 struct radeon_ps *radeon_state,
2365*4882a593Smuzhiyun 					 SISLANDS_SMC_SWSTATE *smc_state)
2366*4882a593Smuzhiyun {
2367*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2368*4882a593Smuzhiyun 	struct ni_ps *state = ni_get_ps(radeon_state);
2369*4882a593Smuzhiyun 	u32 sq_power_throttle, sq_power_throttle2;
2370*4882a593Smuzhiyun 	bool enable_sq_ramping = ni_pi->enable_sq_ramping;
2371*4882a593Smuzhiyun 	int i;
2372*4882a593Smuzhiyun 
2373*4882a593Smuzhiyun 	if (state->performance_level_count == 0)
2374*4882a593Smuzhiyun 		return -EINVAL;
2375*4882a593Smuzhiyun 
2376*4882a593Smuzhiyun 	if (smc_state->levelCount != state->performance_level_count)
2377*4882a593Smuzhiyun 		return -EINVAL;
2378*4882a593Smuzhiyun 
2379*4882a593Smuzhiyun 	if (rdev->pm.dpm.sq_ramping_threshold == 0)
2380*4882a593Smuzhiyun 		return -EINVAL;
2381*4882a593Smuzhiyun 
2382*4882a593Smuzhiyun 	if (SISLANDS_DPM2_SQ_RAMP_MAX_POWER > (MAX_POWER_MASK >> MAX_POWER_SHIFT))
2383*4882a593Smuzhiyun 		enable_sq_ramping = false;
2384*4882a593Smuzhiyun 
2385*4882a593Smuzhiyun 	if (SISLANDS_DPM2_SQ_RAMP_MIN_POWER > (MIN_POWER_MASK >> MIN_POWER_SHIFT))
2386*4882a593Smuzhiyun 		enable_sq_ramping = false;
2387*4882a593Smuzhiyun 
2388*4882a593Smuzhiyun 	if (SISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA > (MAX_POWER_DELTA_MASK >> MAX_POWER_DELTA_SHIFT))
2389*4882a593Smuzhiyun 		enable_sq_ramping = false;
2390*4882a593Smuzhiyun 
2391*4882a593Smuzhiyun 	if (SISLANDS_DPM2_SQ_RAMP_STI_SIZE > (STI_SIZE_MASK >> STI_SIZE_SHIFT))
2392*4882a593Smuzhiyun 		enable_sq_ramping = false;
2393*4882a593Smuzhiyun 
2394*4882a593Smuzhiyun 	if (SISLANDS_DPM2_SQ_RAMP_LTI_RATIO > (LTI_RATIO_MASK >> LTI_RATIO_SHIFT))
2395*4882a593Smuzhiyun 		enable_sq_ramping = false;
2396*4882a593Smuzhiyun 
2397*4882a593Smuzhiyun 	for (i = 0; i < state->performance_level_count; i++) {
2398*4882a593Smuzhiyun 		sq_power_throttle = 0;
2399*4882a593Smuzhiyun 		sq_power_throttle2 = 0;
2400*4882a593Smuzhiyun 
2401*4882a593Smuzhiyun 		if ((state->performance_levels[i].sclk >= rdev->pm.dpm.sq_ramping_threshold) &&
2402*4882a593Smuzhiyun 		    enable_sq_ramping) {
2403*4882a593Smuzhiyun 			sq_power_throttle |= MAX_POWER(SISLANDS_DPM2_SQ_RAMP_MAX_POWER);
2404*4882a593Smuzhiyun 			sq_power_throttle |= MIN_POWER(SISLANDS_DPM2_SQ_RAMP_MIN_POWER);
2405*4882a593Smuzhiyun 			sq_power_throttle2 |= MAX_POWER_DELTA(SISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA);
2406*4882a593Smuzhiyun 			sq_power_throttle2 |= STI_SIZE(SISLANDS_DPM2_SQ_RAMP_STI_SIZE);
2407*4882a593Smuzhiyun 			sq_power_throttle2 |= LTI_RATIO(SISLANDS_DPM2_SQ_RAMP_LTI_RATIO);
2408*4882a593Smuzhiyun 		} else {
2409*4882a593Smuzhiyun 			sq_power_throttle |= MAX_POWER_MASK | MIN_POWER_MASK;
2410*4882a593Smuzhiyun 			sq_power_throttle2 |= MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
2411*4882a593Smuzhiyun 		}
2412*4882a593Smuzhiyun 
2413*4882a593Smuzhiyun 		smc_state->levels[i].SQPowerThrottle = cpu_to_be32(sq_power_throttle);
2414*4882a593Smuzhiyun 		smc_state->levels[i].SQPowerThrottle_2 = cpu_to_be32(sq_power_throttle2);
2415*4882a593Smuzhiyun 	}
2416*4882a593Smuzhiyun 
2417*4882a593Smuzhiyun 	return 0;
2418*4882a593Smuzhiyun }
2419*4882a593Smuzhiyun 
si_enable_power_containment(struct radeon_device * rdev,struct radeon_ps * radeon_new_state,bool enable)2420*4882a593Smuzhiyun static int si_enable_power_containment(struct radeon_device *rdev,
2421*4882a593Smuzhiyun 				       struct radeon_ps *radeon_new_state,
2422*4882a593Smuzhiyun 				       bool enable)
2423*4882a593Smuzhiyun {
2424*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2425*4882a593Smuzhiyun 	PPSMC_Result smc_result;
2426*4882a593Smuzhiyun 	int ret = 0;
2427*4882a593Smuzhiyun 
2428*4882a593Smuzhiyun 	if (ni_pi->enable_power_containment) {
2429*4882a593Smuzhiyun 		if (enable) {
2430*4882a593Smuzhiyun 			if (!si_should_disable_uvd_powertune(rdev, radeon_new_state)) {
2431*4882a593Smuzhiyun 				smc_result = si_send_msg_to_smc(rdev, PPSMC_TDPClampingActive);
2432*4882a593Smuzhiyun 				if (smc_result != PPSMC_Result_OK) {
2433*4882a593Smuzhiyun 					ret = -EINVAL;
2434*4882a593Smuzhiyun 					ni_pi->pc_enabled = false;
2435*4882a593Smuzhiyun 				} else {
2436*4882a593Smuzhiyun 					ni_pi->pc_enabled = true;
2437*4882a593Smuzhiyun 				}
2438*4882a593Smuzhiyun 			}
2439*4882a593Smuzhiyun 		} else {
2440*4882a593Smuzhiyun 			smc_result = si_send_msg_to_smc(rdev, PPSMC_TDPClampingInactive);
2441*4882a593Smuzhiyun 			if (smc_result != PPSMC_Result_OK)
2442*4882a593Smuzhiyun 				ret = -EINVAL;
2443*4882a593Smuzhiyun 			ni_pi->pc_enabled = false;
2444*4882a593Smuzhiyun 		}
2445*4882a593Smuzhiyun 	}
2446*4882a593Smuzhiyun 
2447*4882a593Smuzhiyun 	return ret;
2448*4882a593Smuzhiyun }
2449*4882a593Smuzhiyun 
si_initialize_smc_dte_tables(struct radeon_device * rdev)2450*4882a593Smuzhiyun static int si_initialize_smc_dte_tables(struct radeon_device *rdev)
2451*4882a593Smuzhiyun {
2452*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
2453*4882a593Smuzhiyun 	int ret = 0;
2454*4882a593Smuzhiyun 	struct si_dte_data *dte_data = &si_pi->dte_data;
2455*4882a593Smuzhiyun 	Smc_SIslands_DTE_Configuration *dte_tables = NULL;
2456*4882a593Smuzhiyun 	u32 table_size;
2457*4882a593Smuzhiyun 	u8 tdep_count;
2458*4882a593Smuzhiyun 	u32 i;
2459*4882a593Smuzhiyun 
2460*4882a593Smuzhiyun 	if (dte_data == NULL)
2461*4882a593Smuzhiyun 		si_pi->enable_dte = false;
2462*4882a593Smuzhiyun 
2463*4882a593Smuzhiyun 	if (si_pi->enable_dte == false)
2464*4882a593Smuzhiyun 		return 0;
2465*4882a593Smuzhiyun 
2466*4882a593Smuzhiyun 	if (dte_data->k <= 0)
2467*4882a593Smuzhiyun 		return -EINVAL;
2468*4882a593Smuzhiyun 
2469*4882a593Smuzhiyun 	dte_tables = kzalloc(sizeof(Smc_SIslands_DTE_Configuration), GFP_KERNEL);
2470*4882a593Smuzhiyun 	if (dte_tables == NULL) {
2471*4882a593Smuzhiyun 		si_pi->enable_dte = false;
2472*4882a593Smuzhiyun 		return -ENOMEM;
2473*4882a593Smuzhiyun 	}
2474*4882a593Smuzhiyun 
2475*4882a593Smuzhiyun 	table_size = dte_data->k;
2476*4882a593Smuzhiyun 
2477*4882a593Smuzhiyun 	if (table_size > SMC_SISLANDS_DTE_MAX_FILTER_STAGES)
2478*4882a593Smuzhiyun 		table_size = SMC_SISLANDS_DTE_MAX_FILTER_STAGES;
2479*4882a593Smuzhiyun 
2480*4882a593Smuzhiyun 	tdep_count = dte_data->tdep_count;
2481*4882a593Smuzhiyun 	if (tdep_count > SMC_SISLANDS_DTE_MAX_TEMPERATURE_DEPENDENT_ARRAY_SIZE)
2482*4882a593Smuzhiyun 		tdep_count = SMC_SISLANDS_DTE_MAX_TEMPERATURE_DEPENDENT_ARRAY_SIZE;
2483*4882a593Smuzhiyun 
2484*4882a593Smuzhiyun 	dte_tables->K = cpu_to_be32(table_size);
2485*4882a593Smuzhiyun 	dte_tables->T0 = cpu_to_be32(dte_data->t0);
2486*4882a593Smuzhiyun 	dte_tables->MaxT = cpu_to_be32(dte_data->max_t);
2487*4882a593Smuzhiyun 	dte_tables->WindowSize = dte_data->window_size;
2488*4882a593Smuzhiyun 	dte_tables->temp_select = dte_data->temp_select;
2489*4882a593Smuzhiyun 	dte_tables->DTE_mode = dte_data->dte_mode;
2490*4882a593Smuzhiyun 	dte_tables->Tthreshold = cpu_to_be32(dte_data->t_threshold);
2491*4882a593Smuzhiyun 
2492*4882a593Smuzhiyun 	if (tdep_count > 0)
2493*4882a593Smuzhiyun 		table_size--;
2494*4882a593Smuzhiyun 
2495*4882a593Smuzhiyun 	for (i = 0; i < table_size; i++) {
2496*4882a593Smuzhiyun 		dte_tables->tau[i] = cpu_to_be32(dte_data->tau[i]);
2497*4882a593Smuzhiyun 		dte_tables->R[i]   = cpu_to_be32(dte_data->r[i]);
2498*4882a593Smuzhiyun 	}
2499*4882a593Smuzhiyun 
2500*4882a593Smuzhiyun 	dte_tables->Tdep_count = tdep_count;
2501*4882a593Smuzhiyun 
2502*4882a593Smuzhiyun 	for (i = 0; i < (u32)tdep_count; i++) {
2503*4882a593Smuzhiyun 		dte_tables->T_limits[i] = dte_data->t_limits[i];
2504*4882a593Smuzhiyun 		dte_tables->Tdep_tau[i] = cpu_to_be32(dte_data->tdep_tau[i]);
2505*4882a593Smuzhiyun 		dte_tables->Tdep_R[i] = cpu_to_be32(dte_data->tdep_r[i]);
2506*4882a593Smuzhiyun 	}
2507*4882a593Smuzhiyun 
2508*4882a593Smuzhiyun 	ret = si_copy_bytes_to_smc(rdev, si_pi->dte_table_start, (u8 *)dte_tables,
2509*4882a593Smuzhiyun 				   sizeof(Smc_SIslands_DTE_Configuration), si_pi->sram_end);
2510*4882a593Smuzhiyun 	kfree(dte_tables);
2511*4882a593Smuzhiyun 
2512*4882a593Smuzhiyun 	return ret;
2513*4882a593Smuzhiyun }
2514*4882a593Smuzhiyun 
si_get_cac_std_voltage_max_min(struct radeon_device * rdev,u16 * max,u16 * min)2515*4882a593Smuzhiyun static int si_get_cac_std_voltage_max_min(struct radeon_device *rdev,
2516*4882a593Smuzhiyun 					  u16 *max, u16 *min)
2517*4882a593Smuzhiyun {
2518*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
2519*4882a593Smuzhiyun 	struct radeon_cac_leakage_table *table =
2520*4882a593Smuzhiyun 		&rdev->pm.dpm.dyn_state.cac_leakage_table;
2521*4882a593Smuzhiyun 	u32 i;
2522*4882a593Smuzhiyun 	u32 v0_loadline;
2523*4882a593Smuzhiyun 
2524*4882a593Smuzhiyun 
2525*4882a593Smuzhiyun 	if (table == NULL)
2526*4882a593Smuzhiyun 		return -EINVAL;
2527*4882a593Smuzhiyun 
2528*4882a593Smuzhiyun 	*max = 0;
2529*4882a593Smuzhiyun 	*min = 0xFFFF;
2530*4882a593Smuzhiyun 
2531*4882a593Smuzhiyun 	for (i = 0; i < table->count; i++) {
2532*4882a593Smuzhiyun 		if (table->entries[i].vddc > *max)
2533*4882a593Smuzhiyun 			*max = table->entries[i].vddc;
2534*4882a593Smuzhiyun 		if (table->entries[i].vddc < *min)
2535*4882a593Smuzhiyun 			*min = table->entries[i].vddc;
2536*4882a593Smuzhiyun 	}
2537*4882a593Smuzhiyun 
2538*4882a593Smuzhiyun 	if (si_pi->powertune_data->lkge_lut_v0_percent > 100)
2539*4882a593Smuzhiyun 		return -EINVAL;
2540*4882a593Smuzhiyun 
2541*4882a593Smuzhiyun 	v0_loadline = (*min) * (100 - si_pi->powertune_data->lkge_lut_v0_percent) / 100;
2542*4882a593Smuzhiyun 
2543*4882a593Smuzhiyun 	if (v0_loadline > 0xFFFFUL)
2544*4882a593Smuzhiyun 		return -EINVAL;
2545*4882a593Smuzhiyun 
2546*4882a593Smuzhiyun 	*min = (u16)v0_loadline;
2547*4882a593Smuzhiyun 
2548*4882a593Smuzhiyun 	if ((*min > *max) || (*max == 0) || (*min == 0))
2549*4882a593Smuzhiyun 		return -EINVAL;
2550*4882a593Smuzhiyun 
2551*4882a593Smuzhiyun 	return 0;
2552*4882a593Smuzhiyun }
2553*4882a593Smuzhiyun 
si_get_cac_std_voltage_step(u16 max,u16 min)2554*4882a593Smuzhiyun static u16 si_get_cac_std_voltage_step(u16 max, u16 min)
2555*4882a593Smuzhiyun {
2556*4882a593Smuzhiyun 	return ((max - min) + (SMC_SISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES - 1)) /
2557*4882a593Smuzhiyun 		SMC_SISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
2558*4882a593Smuzhiyun }
2559*4882a593Smuzhiyun 
si_init_dte_leakage_table(struct radeon_device * rdev,PP_SIslands_CacConfig * cac_tables,u16 vddc_max,u16 vddc_min,u16 vddc_step,u16 t0,u16 t_step)2560*4882a593Smuzhiyun static int si_init_dte_leakage_table(struct radeon_device *rdev,
2561*4882a593Smuzhiyun 				     PP_SIslands_CacConfig *cac_tables,
2562*4882a593Smuzhiyun 				     u16 vddc_max, u16 vddc_min, u16 vddc_step,
2563*4882a593Smuzhiyun 				     u16 t0, u16 t_step)
2564*4882a593Smuzhiyun {
2565*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
2566*4882a593Smuzhiyun 	u32 leakage;
2567*4882a593Smuzhiyun 	unsigned int i, j;
2568*4882a593Smuzhiyun 	s32 t;
2569*4882a593Smuzhiyun 	u32 smc_leakage;
2570*4882a593Smuzhiyun 	u32 scaling_factor;
2571*4882a593Smuzhiyun 	u16 voltage;
2572*4882a593Smuzhiyun 
2573*4882a593Smuzhiyun 	scaling_factor = si_get_smc_power_scaling_factor(rdev);
2574*4882a593Smuzhiyun 
2575*4882a593Smuzhiyun 	for (i = 0; i < SMC_SISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES ; i++) {
2576*4882a593Smuzhiyun 		t = (1000 * (i * t_step + t0));
2577*4882a593Smuzhiyun 
2578*4882a593Smuzhiyun 		for (j = 0; j < SMC_SISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
2579*4882a593Smuzhiyun 			voltage = vddc_max - (vddc_step * j);
2580*4882a593Smuzhiyun 
2581*4882a593Smuzhiyun 			si_calculate_leakage_for_v_and_t(rdev,
2582*4882a593Smuzhiyun 							 &si_pi->powertune_data->leakage_coefficients,
2583*4882a593Smuzhiyun 							 voltage,
2584*4882a593Smuzhiyun 							 t,
2585*4882a593Smuzhiyun 							 si_pi->dyn_powertune_data.cac_leakage,
2586*4882a593Smuzhiyun 							 &leakage);
2587*4882a593Smuzhiyun 
2588*4882a593Smuzhiyun 			smc_leakage = si_scale_power_for_smc(leakage, scaling_factor) / 4;
2589*4882a593Smuzhiyun 
2590*4882a593Smuzhiyun 			if (smc_leakage > 0xFFFF)
2591*4882a593Smuzhiyun 				smc_leakage = 0xFFFF;
2592*4882a593Smuzhiyun 
2593*4882a593Smuzhiyun 			cac_tables->cac_lkge_lut[i][SMC_SISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES-1-j] =
2594*4882a593Smuzhiyun 				cpu_to_be16((u16)smc_leakage);
2595*4882a593Smuzhiyun 		}
2596*4882a593Smuzhiyun 	}
2597*4882a593Smuzhiyun 	return 0;
2598*4882a593Smuzhiyun }
2599*4882a593Smuzhiyun 
si_init_simplified_leakage_table(struct radeon_device * rdev,PP_SIslands_CacConfig * cac_tables,u16 vddc_max,u16 vddc_min,u16 vddc_step)2600*4882a593Smuzhiyun static int si_init_simplified_leakage_table(struct radeon_device *rdev,
2601*4882a593Smuzhiyun 					    PP_SIslands_CacConfig *cac_tables,
2602*4882a593Smuzhiyun 					    u16 vddc_max, u16 vddc_min, u16 vddc_step)
2603*4882a593Smuzhiyun {
2604*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
2605*4882a593Smuzhiyun 	u32 leakage;
2606*4882a593Smuzhiyun 	unsigned int i, j;
2607*4882a593Smuzhiyun 	u32 smc_leakage;
2608*4882a593Smuzhiyun 	u32 scaling_factor;
2609*4882a593Smuzhiyun 	u16 voltage;
2610*4882a593Smuzhiyun 
2611*4882a593Smuzhiyun 	scaling_factor = si_get_smc_power_scaling_factor(rdev);
2612*4882a593Smuzhiyun 
2613*4882a593Smuzhiyun 	for (j = 0; j < SMC_SISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
2614*4882a593Smuzhiyun 		voltage = vddc_max - (vddc_step * j);
2615*4882a593Smuzhiyun 
2616*4882a593Smuzhiyun 		si_calculate_leakage_for_v(rdev,
2617*4882a593Smuzhiyun 					   &si_pi->powertune_data->leakage_coefficients,
2618*4882a593Smuzhiyun 					   si_pi->powertune_data->fixed_kt,
2619*4882a593Smuzhiyun 					   voltage,
2620*4882a593Smuzhiyun 					   si_pi->dyn_powertune_data.cac_leakage,
2621*4882a593Smuzhiyun 					   &leakage);
2622*4882a593Smuzhiyun 
2623*4882a593Smuzhiyun 		smc_leakage = si_scale_power_for_smc(leakage, scaling_factor) / 4;
2624*4882a593Smuzhiyun 
2625*4882a593Smuzhiyun 		if (smc_leakage > 0xFFFF)
2626*4882a593Smuzhiyun 			smc_leakage = 0xFFFF;
2627*4882a593Smuzhiyun 
2628*4882a593Smuzhiyun 		for (i = 0; i < SMC_SISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES ; i++)
2629*4882a593Smuzhiyun 			cac_tables->cac_lkge_lut[i][SMC_SISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES-1-j] =
2630*4882a593Smuzhiyun 				cpu_to_be16((u16)smc_leakage);
2631*4882a593Smuzhiyun 	}
2632*4882a593Smuzhiyun 	return 0;
2633*4882a593Smuzhiyun }
2634*4882a593Smuzhiyun 
si_initialize_smc_cac_tables(struct radeon_device * rdev)2635*4882a593Smuzhiyun static int si_initialize_smc_cac_tables(struct radeon_device *rdev)
2636*4882a593Smuzhiyun {
2637*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2638*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
2639*4882a593Smuzhiyun 	PP_SIslands_CacConfig *cac_tables = NULL;
2640*4882a593Smuzhiyun 	u16 vddc_max, vddc_min, vddc_step;
2641*4882a593Smuzhiyun 	u16 t0, t_step;
2642*4882a593Smuzhiyun 	u32 load_line_slope, reg;
2643*4882a593Smuzhiyun 	int ret = 0;
2644*4882a593Smuzhiyun 	u32 ticks_per_us = radeon_get_xclk(rdev) / 100;
2645*4882a593Smuzhiyun 
2646*4882a593Smuzhiyun 	if (ni_pi->enable_cac == false)
2647*4882a593Smuzhiyun 		return 0;
2648*4882a593Smuzhiyun 
2649*4882a593Smuzhiyun 	cac_tables = kzalloc(sizeof(PP_SIslands_CacConfig), GFP_KERNEL);
2650*4882a593Smuzhiyun 	if (!cac_tables)
2651*4882a593Smuzhiyun 		return -ENOMEM;
2652*4882a593Smuzhiyun 
2653*4882a593Smuzhiyun 	reg = RREG32(CG_CAC_CTRL) & ~CAC_WINDOW_MASK;
2654*4882a593Smuzhiyun 	reg |= CAC_WINDOW(si_pi->powertune_data->cac_window);
2655*4882a593Smuzhiyun 	WREG32(CG_CAC_CTRL, reg);
2656*4882a593Smuzhiyun 
2657*4882a593Smuzhiyun 	si_pi->dyn_powertune_data.cac_leakage = rdev->pm.dpm.cac_leakage;
2658*4882a593Smuzhiyun 	si_pi->dyn_powertune_data.dc_pwr_value =
2659*4882a593Smuzhiyun 		si_pi->powertune_data->dc_cac[NISLANDS_DCCAC_LEVEL_0];
2660*4882a593Smuzhiyun 	si_pi->dyn_powertune_data.wintime = si_calculate_cac_wintime(rdev);
2661*4882a593Smuzhiyun 	si_pi->dyn_powertune_data.shift_n = si_pi->powertune_data->shift_n_default;
2662*4882a593Smuzhiyun 
2663*4882a593Smuzhiyun 	si_pi->dyn_powertune_data.leakage_minimum_temperature = 80 * 1000;
2664*4882a593Smuzhiyun 
2665*4882a593Smuzhiyun 	ret = si_get_cac_std_voltage_max_min(rdev, &vddc_max, &vddc_min);
2666*4882a593Smuzhiyun 	if (ret)
2667*4882a593Smuzhiyun 		goto done_free;
2668*4882a593Smuzhiyun 
2669*4882a593Smuzhiyun 	vddc_step = si_get_cac_std_voltage_step(vddc_max, vddc_min);
2670*4882a593Smuzhiyun 	vddc_min = vddc_max - (vddc_step * (SMC_SISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES - 1));
2671*4882a593Smuzhiyun 	t_step = 4;
2672*4882a593Smuzhiyun 	t0 = 60;
2673*4882a593Smuzhiyun 
2674*4882a593Smuzhiyun 	if (si_pi->enable_dte || ni_pi->driver_calculate_cac_leakage)
2675*4882a593Smuzhiyun 		ret = si_init_dte_leakage_table(rdev, cac_tables,
2676*4882a593Smuzhiyun 						vddc_max, vddc_min, vddc_step,
2677*4882a593Smuzhiyun 						t0, t_step);
2678*4882a593Smuzhiyun 	else
2679*4882a593Smuzhiyun 		ret = si_init_simplified_leakage_table(rdev, cac_tables,
2680*4882a593Smuzhiyun 						       vddc_max, vddc_min, vddc_step);
2681*4882a593Smuzhiyun 	if (ret)
2682*4882a593Smuzhiyun 		goto done_free;
2683*4882a593Smuzhiyun 
2684*4882a593Smuzhiyun 	load_line_slope = ((u32)rdev->pm.dpm.load_line_slope << SMC_SISLANDS_SCALE_R) / 100;
2685*4882a593Smuzhiyun 
2686*4882a593Smuzhiyun 	cac_tables->l2numWin_TDP = cpu_to_be32(si_pi->dyn_powertune_data.l2_lta_window_size);
2687*4882a593Smuzhiyun 	cac_tables->lts_truncate_n = si_pi->dyn_powertune_data.lts_truncate;
2688*4882a593Smuzhiyun 	cac_tables->SHIFT_N = si_pi->dyn_powertune_data.shift_n;
2689*4882a593Smuzhiyun 	cac_tables->lkge_lut_V0 = cpu_to_be32((u32)vddc_min);
2690*4882a593Smuzhiyun 	cac_tables->lkge_lut_Vstep = cpu_to_be32((u32)vddc_step);
2691*4882a593Smuzhiyun 	cac_tables->R_LL = cpu_to_be32(load_line_slope);
2692*4882a593Smuzhiyun 	cac_tables->WinTime = cpu_to_be32(si_pi->dyn_powertune_data.wintime);
2693*4882a593Smuzhiyun 	cac_tables->calculation_repeats = cpu_to_be32(2);
2694*4882a593Smuzhiyun 	cac_tables->dc_cac = cpu_to_be32(0);
2695*4882a593Smuzhiyun 	cac_tables->log2_PG_LKG_SCALE = 12;
2696*4882a593Smuzhiyun 	cac_tables->cac_temp = si_pi->powertune_data->operating_temp;
2697*4882a593Smuzhiyun 	cac_tables->lkge_lut_T0 = cpu_to_be32((u32)t0);
2698*4882a593Smuzhiyun 	cac_tables->lkge_lut_Tstep = cpu_to_be32((u32)t_step);
2699*4882a593Smuzhiyun 
2700*4882a593Smuzhiyun 	ret = si_copy_bytes_to_smc(rdev, si_pi->cac_table_start, (u8 *)cac_tables,
2701*4882a593Smuzhiyun 				   sizeof(PP_SIslands_CacConfig), si_pi->sram_end);
2702*4882a593Smuzhiyun 
2703*4882a593Smuzhiyun 	if (ret)
2704*4882a593Smuzhiyun 		goto done_free;
2705*4882a593Smuzhiyun 
2706*4882a593Smuzhiyun 	ret = si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_ticks_per_us, ticks_per_us);
2707*4882a593Smuzhiyun 
2708*4882a593Smuzhiyun done_free:
2709*4882a593Smuzhiyun 	if (ret) {
2710*4882a593Smuzhiyun 		ni_pi->enable_cac = false;
2711*4882a593Smuzhiyun 		ni_pi->enable_power_containment = false;
2712*4882a593Smuzhiyun 	}
2713*4882a593Smuzhiyun 
2714*4882a593Smuzhiyun 	kfree(cac_tables);
2715*4882a593Smuzhiyun 
2716*4882a593Smuzhiyun 	return 0;
2717*4882a593Smuzhiyun }
2718*4882a593Smuzhiyun 
si_program_cac_config_registers(struct radeon_device * rdev,const struct si_cac_config_reg * cac_config_regs)2719*4882a593Smuzhiyun static int si_program_cac_config_registers(struct radeon_device *rdev,
2720*4882a593Smuzhiyun 					   const struct si_cac_config_reg *cac_config_regs)
2721*4882a593Smuzhiyun {
2722*4882a593Smuzhiyun 	const struct si_cac_config_reg *config_regs = cac_config_regs;
2723*4882a593Smuzhiyun 	u32 data = 0, offset;
2724*4882a593Smuzhiyun 
2725*4882a593Smuzhiyun 	if (!config_regs)
2726*4882a593Smuzhiyun 		return -EINVAL;
2727*4882a593Smuzhiyun 
2728*4882a593Smuzhiyun 	while (config_regs->offset != 0xFFFFFFFF) {
2729*4882a593Smuzhiyun 		switch (config_regs->type) {
2730*4882a593Smuzhiyun 		case SISLANDS_CACCONFIG_CGIND:
2731*4882a593Smuzhiyun 			offset = SMC_CG_IND_START + config_regs->offset;
2732*4882a593Smuzhiyun 			if (offset < SMC_CG_IND_END)
2733*4882a593Smuzhiyun 				data = RREG32_SMC(offset);
2734*4882a593Smuzhiyun 			break;
2735*4882a593Smuzhiyun 		default:
2736*4882a593Smuzhiyun 			data = RREG32(config_regs->offset << 2);
2737*4882a593Smuzhiyun 			break;
2738*4882a593Smuzhiyun 		}
2739*4882a593Smuzhiyun 
2740*4882a593Smuzhiyun 		data &= ~config_regs->mask;
2741*4882a593Smuzhiyun 		data |= ((config_regs->value << config_regs->shift) & config_regs->mask);
2742*4882a593Smuzhiyun 
2743*4882a593Smuzhiyun 		switch (config_regs->type) {
2744*4882a593Smuzhiyun 		case SISLANDS_CACCONFIG_CGIND:
2745*4882a593Smuzhiyun 			offset = SMC_CG_IND_START + config_regs->offset;
2746*4882a593Smuzhiyun 			if (offset < SMC_CG_IND_END)
2747*4882a593Smuzhiyun 				WREG32_SMC(offset, data);
2748*4882a593Smuzhiyun 			break;
2749*4882a593Smuzhiyun 		default:
2750*4882a593Smuzhiyun 			WREG32(config_regs->offset << 2, data);
2751*4882a593Smuzhiyun 			break;
2752*4882a593Smuzhiyun 		}
2753*4882a593Smuzhiyun 		config_regs++;
2754*4882a593Smuzhiyun 	}
2755*4882a593Smuzhiyun 	return 0;
2756*4882a593Smuzhiyun }
2757*4882a593Smuzhiyun 
si_initialize_hardware_cac_manager(struct radeon_device * rdev)2758*4882a593Smuzhiyun static int si_initialize_hardware_cac_manager(struct radeon_device *rdev)
2759*4882a593Smuzhiyun {
2760*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2761*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
2762*4882a593Smuzhiyun 	int ret;
2763*4882a593Smuzhiyun 
2764*4882a593Smuzhiyun 	if ((ni_pi->enable_cac == false) ||
2765*4882a593Smuzhiyun 	    (ni_pi->cac_configuration_required == false))
2766*4882a593Smuzhiyun 		return 0;
2767*4882a593Smuzhiyun 
2768*4882a593Smuzhiyun 	ret = si_program_cac_config_registers(rdev, si_pi->lcac_config);
2769*4882a593Smuzhiyun 	if (ret)
2770*4882a593Smuzhiyun 		return ret;
2771*4882a593Smuzhiyun 	ret = si_program_cac_config_registers(rdev, si_pi->cac_override);
2772*4882a593Smuzhiyun 	if (ret)
2773*4882a593Smuzhiyun 		return ret;
2774*4882a593Smuzhiyun 	ret = si_program_cac_config_registers(rdev, si_pi->cac_weights);
2775*4882a593Smuzhiyun 	if (ret)
2776*4882a593Smuzhiyun 		return ret;
2777*4882a593Smuzhiyun 
2778*4882a593Smuzhiyun 	return 0;
2779*4882a593Smuzhiyun }
2780*4882a593Smuzhiyun 
si_enable_smc_cac(struct radeon_device * rdev,struct radeon_ps * radeon_new_state,bool enable)2781*4882a593Smuzhiyun static int si_enable_smc_cac(struct radeon_device *rdev,
2782*4882a593Smuzhiyun 			     struct radeon_ps *radeon_new_state,
2783*4882a593Smuzhiyun 			     bool enable)
2784*4882a593Smuzhiyun {
2785*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2786*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
2787*4882a593Smuzhiyun 	PPSMC_Result smc_result;
2788*4882a593Smuzhiyun 	int ret = 0;
2789*4882a593Smuzhiyun 
2790*4882a593Smuzhiyun 	if (ni_pi->enable_cac) {
2791*4882a593Smuzhiyun 		if (enable) {
2792*4882a593Smuzhiyun 			if (!si_should_disable_uvd_powertune(rdev, radeon_new_state)) {
2793*4882a593Smuzhiyun 				if (ni_pi->support_cac_long_term_average) {
2794*4882a593Smuzhiyun 					smc_result = si_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgEnable);
2795*4882a593Smuzhiyun 					if (smc_result != PPSMC_Result_OK)
2796*4882a593Smuzhiyun 						ni_pi->support_cac_long_term_average = false;
2797*4882a593Smuzhiyun 				}
2798*4882a593Smuzhiyun 
2799*4882a593Smuzhiyun 				smc_result = si_send_msg_to_smc(rdev, PPSMC_MSG_EnableCac);
2800*4882a593Smuzhiyun 				if (smc_result != PPSMC_Result_OK) {
2801*4882a593Smuzhiyun 					ret = -EINVAL;
2802*4882a593Smuzhiyun 					ni_pi->cac_enabled = false;
2803*4882a593Smuzhiyun 				} else {
2804*4882a593Smuzhiyun 					ni_pi->cac_enabled = true;
2805*4882a593Smuzhiyun 				}
2806*4882a593Smuzhiyun 
2807*4882a593Smuzhiyun 				if (si_pi->enable_dte) {
2808*4882a593Smuzhiyun 					smc_result = si_send_msg_to_smc(rdev, PPSMC_MSG_EnableDTE);
2809*4882a593Smuzhiyun 					if (smc_result != PPSMC_Result_OK)
2810*4882a593Smuzhiyun 						ret = -EINVAL;
2811*4882a593Smuzhiyun 				}
2812*4882a593Smuzhiyun 			}
2813*4882a593Smuzhiyun 		} else if (ni_pi->cac_enabled) {
2814*4882a593Smuzhiyun 			if (si_pi->enable_dte)
2815*4882a593Smuzhiyun 				smc_result = si_send_msg_to_smc(rdev, PPSMC_MSG_DisableDTE);
2816*4882a593Smuzhiyun 
2817*4882a593Smuzhiyun 			smc_result = si_send_msg_to_smc(rdev, PPSMC_MSG_DisableCac);
2818*4882a593Smuzhiyun 
2819*4882a593Smuzhiyun 			ni_pi->cac_enabled = false;
2820*4882a593Smuzhiyun 
2821*4882a593Smuzhiyun 			if (ni_pi->support_cac_long_term_average)
2822*4882a593Smuzhiyun 				smc_result = si_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgDisable);
2823*4882a593Smuzhiyun 		}
2824*4882a593Smuzhiyun 	}
2825*4882a593Smuzhiyun 	return ret;
2826*4882a593Smuzhiyun }
2827*4882a593Smuzhiyun 
si_init_smc_spll_table(struct radeon_device * rdev)2828*4882a593Smuzhiyun static int si_init_smc_spll_table(struct radeon_device *rdev)
2829*4882a593Smuzhiyun {
2830*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2831*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
2832*4882a593Smuzhiyun 	SMC_SISLANDS_SPLL_DIV_TABLE *spll_table;
2833*4882a593Smuzhiyun 	SISLANDS_SMC_SCLK_VALUE sclk_params;
2834*4882a593Smuzhiyun 	u32 fb_div, p_div;
2835*4882a593Smuzhiyun 	u32 clk_s, clk_v;
2836*4882a593Smuzhiyun 	u32 sclk = 0;
2837*4882a593Smuzhiyun 	int ret = 0;
2838*4882a593Smuzhiyun 	u32 tmp;
2839*4882a593Smuzhiyun 	int i;
2840*4882a593Smuzhiyun 
2841*4882a593Smuzhiyun 	if (si_pi->spll_table_start == 0)
2842*4882a593Smuzhiyun 		return -EINVAL;
2843*4882a593Smuzhiyun 
2844*4882a593Smuzhiyun 	spll_table = kzalloc(sizeof(SMC_SISLANDS_SPLL_DIV_TABLE), GFP_KERNEL);
2845*4882a593Smuzhiyun 	if (spll_table == NULL)
2846*4882a593Smuzhiyun 		return -ENOMEM;
2847*4882a593Smuzhiyun 
2848*4882a593Smuzhiyun 	for (i = 0; i < 256; i++) {
2849*4882a593Smuzhiyun 		ret = si_calculate_sclk_params(rdev, sclk, &sclk_params);
2850*4882a593Smuzhiyun 		if (ret)
2851*4882a593Smuzhiyun 			break;
2852*4882a593Smuzhiyun 
2853*4882a593Smuzhiyun 		p_div = (sclk_params.vCG_SPLL_FUNC_CNTL & SPLL_PDIV_A_MASK) >> SPLL_PDIV_A_SHIFT;
2854*4882a593Smuzhiyun 		fb_div = (sclk_params.vCG_SPLL_FUNC_CNTL_3 & SPLL_FB_DIV_MASK) >> SPLL_FB_DIV_SHIFT;
2855*4882a593Smuzhiyun 		clk_s = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM & CLK_S_MASK) >> CLK_S_SHIFT;
2856*4882a593Smuzhiyun 		clk_v = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM_2 & CLK_V_MASK) >> CLK_V_SHIFT;
2857*4882a593Smuzhiyun 
2858*4882a593Smuzhiyun 		fb_div &= ~0x00001FFF;
2859*4882a593Smuzhiyun 		fb_div >>= 1;
2860*4882a593Smuzhiyun 		clk_v >>= 6;
2861*4882a593Smuzhiyun 
2862*4882a593Smuzhiyun 		if (p_div & ~(SMC_SISLANDS_SPLL_DIV_TABLE_PDIV_MASK >> SMC_SISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT))
2863*4882a593Smuzhiyun 			ret = -EINVAL;
2864*4882a593Smuzhiyun 		if (fb_div & ~(SMC_SISLANDS_SPLL_DIV_TABLE_FBDIV_MASK >> SMC_SISLANDS_SPLL_DIV_TABLE_FBDIV_SHIFT))
2865*4882a593Smuzhiyun 			ret = -EINVAL;
2866*4882a593Smuzhiyun 		if (clk_s & ~(SMC_SISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_SISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
2867*4882a593Smuzhiyun 			ret = -EINVAL;
2868*4882a593Smuzhiyun 		if (clk_v & ~(SMC_SISLANDS_SPLL_DIV_TABLE_CLKV_MASK >> SMC_SISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT))
2869*4882a593Smuzhiyun 			ret = -EINVAL;
2870*4882a593Smuzhiyun 
2871*4882a593Smuzhiyun 		if (ret)
2872*4882a593Smuzhiyun 			break;
2873*4882a593Smuzhiyun 
2874*4882a593Smuzhiyun 		tmp = ((fb_div << SMC_SISLANDS_SPLL_DIV_TABLE_FBDIV_SHIFT) & SMC_SISLANDS_SPLL_DIV_TABLE_FBDIV_MASK) |
2875*4882a593Smuzhiyun 			((p_div << SMC_SISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT) & SMC_SISLANDS_SPLL_DIV_TABLE_PDIV_MASK);
2876*4882a593Smuzhiyun 		spll_table->freq[i] = cpu_to_be32(tmp);
2877*4882a593Smuzhiyun 
2878*4882a593Smuzhiyun 		tmp = ((clk_v << SMC_SISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT) & SMC_SISLANDS_SPLL_DIV_TABLE_CLKV_MASK) |
2879*4882a593Smuzhiyun 			((clk_s << SMC_SISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT) & SMC_SISLANDS_SPLL_DIV_TABLE_CLKS_MASK);
2880*4882a593Smuzhiyun 		spll_table->ss[i] = cpu_to_be32(tmp);
2881*4882a593Smuzhiyun 
2882*4882a593Smuzhiyun 		sclk += 512;
2883*4882a593Smuzhiyun 	}
2884*4882a593Smuzhiyun 
2885*4882a593Smuzhiyun 
2886*4882a593Smuzhiyun 	if (!ret)
2887*4882a593Smuzhiyun 		ret = si_copy_bytes_to_smc(rdev, si_pi->spll_table_start,
2888*4882a593Smuzhiyun 					   (u8 *)spll_table, sizeof(SMC_SISLANDS_SPLL_DIV_TABLE),
2889*4882a593Smuzhiyun 					   si_pi->sram_end);
2890*4882a593Smuzhiyun 
2891*4882a593Smuzhiyun 	if (ret)
2892*4882a593Smuzhiyun 		ni_pi->enable_power_containment = false;
2893*4882a593Smuzhiyun 
2894*4882a593Smuzhiyun 	kfree(spll_table);
2895*4882a593Smuzhiyun 
2896*4882a593Smuzhiyun 	return ret;
2897*4882a593Smuzhiyun }
2898*4882a593Smuzhiyun 
si_get_lower_of_leakage_and_vce_voltage(struct radeon_device * rdev,u16 vce_voltage)2899*4882a593Smuzhiyun static u16 si_get_lower_of_leakage_and_vce_voltage(struct radeon_device *rdev,
2900*4882a593Smuzhiyun 						   u16 vce_voltage)
2901*4882a593Smuzhiyun {
2902*4882a593Smuzhiyun 	u16 highest_leakage = 0;
2903*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
2904*4882a593Smuzhiyun 	int i;
2905*4882a593Smuzhiyun 
2906*4882a593Smuzhiyun 	for (i = 0; i < si_pi->leakage_voltage.count; i++){
2907*4882a593Smuzhiyun 		if (highest_leakage < si_pi->leakage_voltage.entries[i].voltage)
2908*4882a593Smuzhiyun 			highest_leakage = si_pi->leakage_voltage.entries[i].voltage;
2909*4882a593Smuzhiyun 	}
2910*4882a593Smuzhiyun 
2911*4882a593Smuzhiyun 	if (si_pi->leakage_voltage.count && (highest_leakage < vce_voltage))
2912*4882a593Smuzhiyun 		return highest_leakage;
2913*4882a593Smuzhiyun 
2914*4882a593Smuzhiyun 	return vce_voltage;
2915*4882a593Smuzhiyun }
2916*4882a593Smuzhiyun 
si_get_vce_clock_voltage(struct radeon_device * rdev,u32 evclk,u32 ecclk,u16 * voltage)2917*4882a593Smuzhiyun static int si_get_vce_clock_voltage(struct radeon_device *rdev,
2918*4882a593Smuzhiyun 				    u32 evclk, u32 ecclk, u16 *voltage)
2919*4882a593Smuzhiyun {
2920*4882a593Smuzhiyun 	u32 i;
2921*4882a593Smuzhiyun 	int ret = -EINVAL;
2922*4882a593Smuzhiyun 	struct radeon_vce_clock_voltage_dependency_table *table =
2923*4882a593Smuzhiyun 		&rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table;
2924*4882a593Smuzhiyun 
2925*4882a593Smuzhiyun 	if (((evclk == 0) && (ecclk == 0)) ||
2926*4882a593Smuzhiyun 	    (table && (table->count == 0))) {
2927*4882a593Smuzhiyun 		*voltage = 0;
2928*4882a593Smuzhiyun 		return 0;
2929*4882a593Smuzhiyun 	}
2930*4882a593Smuzhiyun 
2931*4882a593Smuzhiyun 	for (i = 0; i < table->count; i++) {
2932*4882a593Smuzhiyun 		if ((evclk <= table->entries[i].evclk) &&
2933*4882a593Smuzhiyun 		    (ecclk <= table->entries[i].ecclk)) {
2934*4882a593Smuzhiyun 			*voltage = table->entries[i].v;
2935*4882a593Smuzhiyun 			ret = 0;
2936*4882a593Smuzhiyun 			break;
2937*4882a593Smuzhiyun 		}
2938*4882a593Smuzhiyun 	}
2939*4882a593Smuzhiyun 
2940*4882a593Smuzhiyun 	/* if no match return the highest voltage */
2941*4882a593Smuzhiyun 	if (ret)
2942*4882a593Smuzhiyun 		*voltage = table->entries[table->count - 1].v;
2943*4882a593Smuzhiyun 
2944*4882a593Smuzhiyun 	*voltage = si_get_lower_of_leakage_and_vce_voltage(rdev, *voltage);
2945*4882a593Smuzhiyun 
2946*4882a593Smuzhiyun 	return ret;
2947*4882a593Smuzhiyun }
2948*4882a593Smuzhiyun 
si_apply_state_adjust_rules(struct radeon_device * rdev,struct radeon_ps * rps)2949*4882a593Smuzhiyun static void si_apply_state_adjust_rules(struct radeon_device *rdev,
2950*4882a593Smuzhiyun 					struct radeon_ps *rps)
2951*4882a593Smuzhiyun {
2952*4882a593Smuzhiyun 	struct ni_ps *ps = ni_get_ps(rps);
2953*4882a593Smuzhiyun 	struct radeon_clock_and_voltage_limits *max_limits;
2954*4882a593Smuzhiyun 	bool disable_mclk_switching = false;
2955*4882a593Smuzhiyun 	bool disable_sclk_switching = false;
2956*4882a593Smuzhiyun 	u32 mclk, sclk;
2957*4882a593Smuzhiyun 	u16 vddc, vddci, min_vce_voltage = 0;
2958*4882a593Smuzhiyun 	u32 max_sclk_vddc, max_mclk_vddci, max_mclk_vddc;
2959*4882a593Smuzhiyun 	u32 max_sclk = 0, max_mclk = 0;
2960*4882a593Smuzhiyun 	int i;
2961*4882a593Smuzhiyun 
2962*4882a593Smuzhiyun 	if (rdev->family == CHIP_HAINAN) {
2963*4882a593Smuzhiyun 		if ((rdev->pdev->revision == 0x81) ||
2964*4882a593Smuzhiyun 		    (rdev->pdev->revision == 0xC3) ||
2965*4882a593Smuzhiyun 		    (rdev->pdev->device == 0x6664) ||
2966*4882a593Smuzhiyun 		    (rdev->pdev->device == 0x6665) ||
2967*4882a593Smuzhiyun 		    (rdev->pdev->device == 0x6667)) {
2968*4882a593Smuzhiyun 			max_sclk = 75000;
2969*4882a593Smuzhiyun 		}
2970*4882a593Smuzhiyun 		if ((rdev->pdev->revision == 0xC3) ||
2971*4882a593Smuzhiyun 		    (rdev->pdev->device == 0x6665)) {
2972*4882a593Smuzhiyun 			max_sclk = 60000;
2973*4882a593Smuzhiyun 			max_mclk = 80000;
2974*4882a593Smuzhiyun 		}
2975*4882a593Smuzhiyun 	} else if (rdev->family == CHIP_OLAND) {
2976*4882a593Smuzhiyun 		if ((rdev->pdev->revision == 0xC7) ||
2977*4882a593Smuzhiyun 		    (rdev->pdev->revision == 0x80) ||
2978*4882a593Smuzhiyun 		    (rdev->pdev->revision == 0x81) ||
2979*4882a593Smuzhiyun 		    (rdev->pdev->revision == 0x83) ||
2980*4882a593Smuzhiyun 		    (rdev->pdev->revision == 0x87) ||
2981*4882a593Smuzhiyun 		    (rdev->pdev->device == 0x6604) ||
2982*4882a593Smuzhiyun 		    (rdev->pdev->device == 0x6605)) {
2983*4882a593Smuzhiyun 			max_sclk = 75000;
2984*4882a593Smuzhiyun 		}
2985*4882a593Smuzhiyun 
2986*4882a593Smuzhiyun 		if (rdev->pm.dpm.high_pixelclock_count > 1)
2987*4882a593Smuzhiyun 			disable_sclk_switching = true;
2988*4882a593Smuzhiyun 	}
2989*4882a593Smuzhiyun 
2990*4882a593Smuzhiyun 	if (rps->vce_active) {
2991*4882a593Smuzhiyun 		rps->evclk = rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].evclk;
2992*4882a593Smuzhiyun 		rps->ecclk = rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].ecclk;
2993*4882a593Smuzhiyun 		si_get_vce_clock_voltage(rdev, rps->evclk, rps->ecclk,
2994*4882a593Smuzhiyun 					 &min_vce_voltage);
2995*4882a593Smuzhiyun 	} else {
2996*4882a593Smuzhiyun 		rps->evclk = 0;
2997*4882a593Smuzhiyun 		rps->ecclk = 0;
2998*4882a593Smuzhiyun 	}
2999*4882a593Smuzhiyun 
3000*4882a593Smuzhiyun 	if ((rdev->pm.dpm.new_active_crtc_count > 1) ||
3001*4882a593Smuzhiyun 	    ni_dpm_vblank_too_short(rdev))
3002*4882a593Smuzhiyun 		disable_mclk_switching = true;
3003*4882a593Smuzhiyun 
3004*4882a593Smuzhiyun 	if (rps->vclk || rps->dclk) {
3005*4882a593Smuzhiyun 		disable_mclk_switching = true;
3006*4882a593Smuzhiyun 		disable_sclk_switching = true;
3007*4882a593Smuzhiyun 	}
3008*4882a593Smuzhiyun 
3009*4882a593Smuzhiyun 	if (rdev->pm.dpm.ac_power)
3010*4882a593Smuzhiyun 		max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
3011*4882a593Smuzhiyun 	else
3012*4882a593Smuzhiyun 		max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc;
3013*4882a593Smuzhiyun 
3014*4882a593Smuzhiyun 	for (i = ps->performance_level_count - 2; i >= 0; i--) {
3015*4882a593Smuzhiyun 		if (ps->performance_levels[i].vddc > ps->performance_levels[i+1].vddc)
3016*4882a593Smuzhiyun 			ps->performance_levels[i].vddc = ps->performance_levels[i+1].vddc;
3017*4882a593Smuzhiyun 	}
3018*4882a593Smuzhiyun 	if (rdev->pm.dpm.ac_power == false) {
3019*4882a593Smuzhiyun 		for (i = 0; i < ps->performance_level_count; i++) {
3020*4882a593Smuzhiyun 			if (ps->performance_levels[i].mclk > max_limits->mclk)
3021*4882a593Smuzhiyun 				ps->performance_levels[i].mclk = max_limits->mclk;
3022*4882a593Smuzhiyun 			if (ps->performance_levels[i].sclk > max_limits->sclk)
3023*4882a593Smuzhiyun 				ps->performance_levels[i].sclk = max_limits->sclk;
3024*4882a593Smuzhiyun 			if (ps->performance_levels[i].vddc > max_limits->vddc)
3025*4882a593Smuzhiyun 				ps->performance_levels[i].vddc = max_limits->vddc;
3026*4882a593Smuzhiyun 			if (ps->performance_levels[i].vddci > max_limits->vddci)
3027*4882a593Smuzhiyun 				ps->performance_levels[i].vddci = max_limits->vddci;
3028*4882a593Smuzhiyun 		}
3029*4882a593Smuzhiyun 	}
3030*4882a593Smuzhiyun 
3031*4882a593Smuzhiyun 	/* limit clocks to max supported clocks based on voltage dependency tables */
3032*4882a593Smuzhiyun 	btc_get_max_clock_from_voltage_dependency_table(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk,
3033*4882a593Smuzhiyun 							&max_sclk_vddc);
3034*4882a593Smuzhiyun 	btc_get_max_clock_from_voltage_dependency_table(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk,
3035*4882a593Smuzhiyun 							&max_mclk_vddci);
3036*4882a593Smuzhiyun 	btc_get_max_clock_from_voltage_dependency_table(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk,
3037*4882a593Smuzhiyun 							&max_mclk_vddc);
3038*4882a593Smuzhiyun 
3039*4882a593Smuzhiyun 	for (i = 0; i < ps->performance_level_count; i++) {
3040*4882a593Smuzhiyun 		if (max_sclk_vddc) {
3041*4882a593Smuzhiyun 			if (ps->performance_levels[i].sclk > max_sclk_vddc)
3042*4882a593Smuzhiyun 				ps->performance_levels[i].sclk = max_sclk_vddc;
3043*4882a593Smuzhiyun 		}
3044*4882a593Smuzhiyun 		if (max_mclk_vddci) {
3045*4882a593Smuzhiyun 			if (ps->performance_levels[i].mclk > max_mclk_vddci)
3046*4882a593Smuzhiyun 				ps->performance_levels[i].mclk = max_mclk_vddci;
3047*4882a593Smuzhiyun 		}
3048*4882a593Smuzhiyun 		if (max_mclk_vddc) {
3049*4882a593Smuzhiyun 			if (ps->performance_levels[i].mclk > max_mclk_vddc)
3050*4882a593Smuzhiyun 				ps->performance_levels[i].mclk = max_mclk_vddc;
3051*4882a593Smuzhiyun 		}
3052*4882a593Smuzhiyun 		if (max_mclk) {
3053*4882a593Smuzhiyun 			if (ps->performance_levels[i].mclk > max_mclk)
3054*4882a593Smuzhiyun 				ps->performance_levels[i].mclk = max_mclk;
3055*4882a593Smuzhiyun 		}
3056*4882a593Smuzhiyun 		if (max_sclk) {
3057*4882a593Smuzhiyun 			if (ps->performance_levels[i].sclk > max_sclk)
3058*4882a593Smuzhiyun 				ps->performance_levels[i].sclk = max_sclk;
3059*4882a593Smuzhiyun 		}
3060*4882a593Smuzhiyun 	}
3061*4882a593Smuzhiyun 
3062*4882a593Smuzhiyun 	/* XXX validate the min clocks required for display */
3063*4882a593Smuzhiyun 
3064*4882a593Smuzhiyun 	if (disable_mclk_switching) {
3065*4882a593Smuzhiyun 		mclk  = ps->performance_levels[ps->performance_level_count - 1].mclk;
3066*4882a593Smuzhiyun 		vddci = ps->performance_levels[ps->performance_level_count - 1].vddci;
3067*4882a593Smuzhiyun 	} else {
3068*4882a593Smuzhiyun 		mclk = ps->performance_levels[0].mclk;
3069*4882a593Smuzhiyun 		vddci = ps->performance_levels[0].vddci;
3070*4882a593Smuzhiyun 	}
3071*4882a593Smuzhiyun 
3072*4882a593Smuzhiyun 	if (disable_sclk_switching) {
3073*4882a593Smuzhiyun 		sclk = ps->performance_levels[ps->performance_level_count - 1].sclk;
3074*4882a593Smuzhiyun 		vddc = ps->performance_levels[ps->performance_level_count - 1].vddc;
3075*4882a593Smuzhiyun 	} else {
3076*4882a593Smuzhiyun 		sclk = ps->performance_levels[0].sclk;
3077*4882a593Smuzhiyun 		vddc = ps->performance_levels[0].vddc;
3078*4882a593Smuzhiyun 	}
3079*4882a593Smuzhiyun 
3080*4882a593Smuzhiyun 	if (rps->vce_active) {
3081*4882a593Smuzhiyun 		if (sclk < rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].sclk)
3082*4882a593Smuzhiyun 			sclk = rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].sclk;
3083*4882a593Smuzhiyun 		if (mclk < rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].mclk)
3084*4882a593Smuzhiyun 			mclk = rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].mclk;
3085*4882a593Smuzhiyun 	}
3086*4882a593Smuzhiyun 
3087*4882a593Smuzhiyun 	/* adjusted low state */
3088*4882a593Smuzhiyun 	ps->performance_levels[0].sclk = sclk;
3089*4882a593Smuzhiyun 	ps->performance_levels[0].mclk = mclk;
3090*4882a593Smuzhiyun 	ps->performance_levels[0].vddc = vddc;
3091*4882a593Smuzhiyun 	ps->performance_levels[0].vddci = vddci;
3092*4882a593Smuzhiyun 
3093*4882a593Smuzhiyun 	if (disable_sclk_switching) {
3094*4882a593Smuzhiyun 		sclk = ps->performance_levels[0].sclk;
3095*4882a593Smuzhiyun 		for (i = 1; i < ps->performance_level_count; i++) {
3096*4882a593Smuzhiyun 			if (sclk < ps->performance_levels[i].sclk)
3097*4882a593Smuzhiyun 				sclk = ps->performance_levels[i].sclk;
3098*4882a593Smuzhiyun 		}
3099*4882a593Smuzhiyun 		for (i = 0; i < ps->performance_level_count; i++) {
3100*4882a593Smuzhiyun 			ps->performance_levels[i].sclk = sclk;
3101*4882a593Smuzhiyun 			ps->performance_levels[i].vddc = vddc;
3102*4882a593Smuzhiyun 		}
3103*4882a593Smuzhiyun 	} else {
3104*4882a593Smuzhiyun 		for (i = 1; i < ps->performance_level_count; i++) {
3105*4882a593Smuzhiyun 			if (ps->performance_levels[i].sclk < ps->performance_levels[i - 1].sclk)
3106*4882a593Smuzhiyun 				ps->performance_levels[i].sclk = ps->performance_levels[i - 1].sclk;
3107*4882a593Smuzhiyun 			if (ps->performance_levels[i].vddc < ps->performance_levels[i - 1].vddc)
3108*4882a593Smuzhiyun 				ps->performance_levels[i].vddc = ps->performance_levels[i - 1].vddc;
3109*4882a593Smuzhiyun 		}
3110*4882a593Smuzhiyun 	}
3111*4882a593Smuzhiyun 
3112*4882a593Smuzhiyun 	if (disable_mclk_switching) {
3113*4882a593Smuzhiyun 		mclk = ps->performance_levels[0].mclk;
3114*4882a593Smuzhiyun 		for (i = 1; i < ps->performance_level_count; i++) {
3115*4882a593Smuzhiyun 			if (mclk < ps->performance_levels[i].mclk)
3116*4882a593Smuzhiyun 				mclk = ps->performance_levels[i].mclk;
3117*4882a593Smuzhiyun 		}
3118*4882a593Smuzhiyun 		for (i = 0; i < ps->performance_level_count; i++) {
3119*4882a593Smuzhiyun 			ps->performance_levels[i].mclk = mclk;
3120*4882a593Smuzhiyun 			ps->performance_levels[i].vddci = vddci;
3121*4882a593Smuzhiyun 		}
3122*4882a593Smuzhiyun 	} else {
3123*4882a593Smuzhiyun 		for (i = 1; i < ps->performance_level_count; i++) {
3124*4882a593Smuzhiyun 			if (ps->performance_levels[i].mclk < ps->performance_levels[i - 1].mclk)
3125*4882a593Smuzhiyun 				ps->performance_levels[i].mclk = ps->performance_levels[i - 1].mclk;
3126*4882a593Smuzhiyun 			if (ps->performance_levels[i].vddci < ps->performance_levels[i - 1].vddci)
3127*4882a593Smuzhiyun 				ps->performance_levels[i].vddci = ps->performance_levels[i - 1].vddci;
3128*4882a593Smuzhiyun 		}
3129*4882a593Smuzhiyun 	}
3130*4882a593Smuzhiyun 
3131*4882a593Smuzhiyun 	for (i = 0; i < ps->performance_level_count; i++)
3132*4882a593Smuzhiyun 		btc_adjust_clock_combinations(rdev, max_limits,
3133*4882a593Smuzhiyun 					      &ps->performance_levels[i]);
3134*4882a593Smuzhiyun 
3135*4882a593Smuzhiyun 	for (i = 0; i < ps->performance_level_count; i++) {
3136*4882a593Smuzhiyun 		if (ps->performance_levels[i].vddc < min_vce_voltage)
3137*4882a593Smuzhiyun 			ps->performance_levels[i].vddc = min_vce_voltage;
3138*4882a593Smuzhiyun 		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk,
3139*4882a593Smuzhiyun 						   ps->performance_levels[i].sclk,
3140*4882a593Smuzhiyun 						   max_limits->vddc,  &ps->performance_levels[i].vddc);
3141*4882a593Smuzhiyun 		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk,
3142*4882a593Smuzhiyun 						   ps->performance_levels[i].mclk,
3143*4882a593Smuzhiyun 						   max_limits->vddci, &ps->performance_levels[i].vddci);
3144*4882a593Smuzhiyun 		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk,
3145*4882a593Smuzhiyun 						   ps->performance_levels[i].mclk,
3146*4882a593Smuzhiyun 						   max_limits->vddc,  &ps->performance_levels[i].vddc);
3147*4882a593Smuzhiyun 		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk,
3148*4882a593Smuzhiyun 						   rdev->clock.current_dispclk,
3149*4882a593Smuzhiyun 						   max_limits->vddc,  &ps->performance_levels[i].vddc);
3150*4882a593Smuzhiyun 	}
3151*4882a593Smuzhiyun 
3152*4882a593Smuzhiyun 	for (i = 0; i < ps->performance_level_count; i++) {
3153*4882a593Smuzhiyun 		btc_apply_voltage_delta_rules(rdev,
3154*4882a593Smuzhiyun 					      max_limits->vddc, max_limits->vddci,
3155*4882a593Smuzhiyun 					      &ps->performance_levels[i].vddc,
3156*4882a593Smuzhiyun 					      &ps->performance_levels[i].vddci);
3157*4882a593Smuzhiyun 	}
3158*4882a593Smuzhiyun 
3159*4882a593Smuzhiyun 	ps->dc_compatible = true;
3160*4882a593Smuzhiyun 	for (i = 0; i < ps->performance_level_count; i++) {
3161*4882a593Smuzhiyun 		if (ps->performance_levels[i].vddc > rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc)
3162*4882a593Smuzhiyun 			ps->dc_compatible = false;
3163*4882a593Smuzhiyun 	}
3164*4882a593Smuzhiyun }
3165*4882a593Smuzhiyun 
3166*4882a593Smuzhiyun #if 0
3167*4882a593Smuzhiyun static int si_read_smc_soft_register(struct radeon_device *rdev,
3168*4882a593Smuzhiyun 				     u16 reg_offset, u32 *value)
3169*4882a593Smuzhiyun {
3170*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
3171*4882a593Smuzhiyun 
3172*4882a593Smuzhiyun 	return si_read_smc_sram_dword(rdev,
3173*4882a593Smuzhiyun 				      si_pi->soft_regs_start + reg_offset, value,
3174*4882a593Smuzhiyun 				      si_pi->sram_end);
3175*4882a593Smuzhiyun }
3176*4882a593Smuzhiyun #endif
3177*4882a593Smuzhiyun 
si_write_smc_soft_register(struct radeon_device * rdev,u16 reg_offset,u32 value)3178*4882a593Smuzhiyun static int si_write_smc_soft_register(struct radeon_device *rdev,
3179*4882a593Smuzhiyun 				      u16 reg_offset, u32 value)
3180*4882a593Smuzhiyun {
3181*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
3182*4882a593Smuzhiyun 
3183*4882a593Smuzhiyun 	return si_write_smc_sram_dword(rdev,
3184*4882a593Smuzhiyun 				       si_pi->soft_regs_start + reg_offset,
3185*4882a593Smuzhiyun 				       value, si_pi->sram_end);
3186*4882a593Smuzhiyun }
3187*4882a593Smuzhiyun 
si_is_special_1gb_platform(struct radeon_device * rdev)3188*4882a593Smuzhiyun static bool si_is_special_1gb_platform(struct radeon_device *rdev)
3189*4882a593Smuzhiyun {
3190*4882a593Smuzhiyun 	bool ret = false;
3191*4882a593Smuzhiyun 	u32 tmp, width, row, column, bank, density;
3192*4882a593Smuzhiyun 	bool is_memory_gddr5, is_special;
3193*4882a593Smuzhiyun 
3194*4882a593Smuzhiyun 	tmp = RREG32(MC_SEQ_MISC0);
3195*4882a593Smuzhiyun 	is_memory_gddr5 = (MC_SEQ_MISC0_GDDR5_VALUE == ((tmp & MC_SEQ_MISC0_GDDR5_MASK) >> MC_SEQ_MISC0_GDDR5_SHIFT));
3196*4882a593Smuzhiyun 	is_special = (MC_SEQ_MISC0_REV_ID_VALUE == ((tmp & MC_SEQ_MISC0_REV_ID_MASK) >> MC_SEQ_MISC0_REV_ID_SHIFT))
3197*4882a593Smuzhiyun 		& (MC_SEQ_MISC0_VEN_ID_VALUE == ((tmp & MC_SEQ_MISC0_VEN_ID_MASK) >> MC_SEQ_MISC0_VEN_ID_SHIFT));
3198*4882a593Smuzhiyun 
3199*4882a593Smuzhiyun 	WREG32(MC_SEQ_IO_DEBUG_INDEX, 0xb);
3200*4882a593Smuzhiyun 	width = ((RREG32(MC_SEQ_IO_DEBUG_DATA) >> 1) & 1) ? 16 : 32;
3201*4882a593Smuzhiyun 
3202*4882a593Smuzhiyun 	tmp = RREG32(MC_ARB_RAMCFG);
3203*4882a593Smuzhiyun 	row = ((tmp & NOOFROWS_MASK) >> NOOFROWS_SHIFT) + 10;
3204*4882a593Smuzhiyun 	column = ((tmp & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT) + 8;
3205*4882a593Smuzhiyun 	bank = ((tmp & NOOFBANK_MASK) >> NOOFBANK_SHIFT) + 2;
3206*4882a593Smuzhiyun 
3207*4882a593Smuzhiyun 	density = (1 << (row + column - 20 + bank)) * width;
3208*4882a593Smuzhiyun 
3209*4882a593Smuzhiyun 	if ((rdev->pdev->device == 0x6819) &&
3210*4882a593Smuzhiyun 	    is_memory_gddr5 && is_special && (density == 0x400))
3211*4882a593Smuzhiyun 		ret = true;
3212*4882a593Smuzhiyun 
3213*4882a593Smuzhiyun 	return ret;
3214*4882a593Smuzhiyun }
3215*4882a593Smuzhiyun 
si_get_leakage_vddc(struct radeon_device * rdev)3216*4882a593Smuzhiyun static void si_get_leakage_vddc(struct radeon_device *rdev)
3217*4882a593Smuzhiyun {
3218*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
3219*4882a593Smuzhiyun 	u16 vddc, count = 0;
3220*4882a593Smuzhiyun 	int i, ret;
3221*4882a593Smuzhiyun 
3222*4882a593Smuzhiyun 	for (i = 0; i < SISLANDS_MAX_LEAKAGE_COUNT; i++) {
3223*4882a593Smuzhiyun 		ret = radeon_atom_get_leakage_vddc_based_on_leakage_idx(rdev, &vddc, SISLANDS_LEAKAGE_INDEX0 + i);
3224*4882a593Smuzhiyun 
3225*4882a593Smuzhiyun 		if (!ret && (vddc > 0) && (vddc != (SISLANDS_LEAKAGE_INDEX0 + i))) {
3226*4882a593Smuzhiyun 			si_pi->leakage_voltage.entries[count].voltage = vddc;
3227*4882a593Smuzhiyun 			si_pi->leakage_voltage.entries[count].leakage_index =
3228*4882a593Smuzhiyun 				SISLANDS_LEAKAGE_INDEX0 + i;
3229*4882a593Smuzhiyun 			count++;
3230*4882a593Smuzhiyun 		}
3231*4882a593Smuzhiyun 	}
3232*4882a593Smuzhiyun 	si_pi->leakage_voltage.count = count;
3233*4882a593Smuzhiyun }
3234*4882a593Smuzhiyun 
si_get_leakage_voltage_from_leakage_index(struct radeon_device * rdev,u32 index,u16 * leakage_voltage)3235*4882a593Smuzhiyun static int si_get_leakage_voltage_from_leakage_index(struct radeon_device *rdev,
3236*4882a593Smuzhiyun 						     u32 index, u16 *leakage_voltage)
3237*4882a593Smuzhiyun {
3238*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
3239*4882a593Smuzhiyun 	int i;
3240*4882a593Smuzhiyun 
3241*4882a593Smuzhiyun 	if (leakage_voltage == NULL)
3242*4882a593Smuzhiyun 		return -EINVAL;
3243*4882a593Smuzhiyun 
3244*4882a593Smuzhiyun 	if ((index & 0xff00) != 0xff00)
3245*4882a593Smuzhiyun 		return -EINVAL;
3246*4882a593Smuzhiyun 
3247*4882a593Smuzhiyun 	if ((index & 0xff) > SISLANDS_MAX_LEAKAGE_COUNT + 1)
3248*4882a593Smuzhiyun 		return -EINVAL;
3249*4882a593Smuzhiyun 
3250*4882a593Smuzhiyun 	if (index < SISLANDS_LEAKAGE_INDEX0)
3251*4882a593Smuzhiyun 		return -EINVAL;
3252*4882a593Smuzhiyun 
3253*4882a593Smuzhiyun 	for (i = 0; i < si_pi->leakage_voltage.count; i++) {
3254*4882a593Smuzhiyun 		if (si_pi->leakage_voltage.entries[i].leakage_index == index) {
3255*4882a593Smuzhiyun 			*leakage_voltage = si_pi->leakage_voltage.entries[i].voltage;
3256*4882a593Smuzhiyun 			return 0;
3257*4882a593Smuzhiyun 		}
3258*4882a593Smuzhiyun 	}
3259*4882a593Smuzhiyun 	return -EAGAIN;
3260*4882a593Smuzhiyun }
3261*4882a593Smuzhiyun 
si_set_dpm_event_sources(struct radeon_device * rdev,u32 sources)3262*4882a593Smuzhiyun static void si_set_dpm_event_sources(struct radeon_device *rdev, u32 sources)
3263*4882a593Smuzhiyun {
3264*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3265*4882a593Smuzhiyun 	bool want_thermal_protection;
3266*4882a593Smuzhiyun 	enum radeon_dpm_event_src dpm_event_src;
3267*4882a593Smuzhiyun 
3268*4882a593Smuzhiyun 	switch (sources) {
3269*4882a593Smuzhiyun 	case 0:
3270*4882a593Smuzhiyun 	default:
3271*4882a593Smuzhiyun 		want_thermal_protection = false;
3272*4882a593Smuzhiyun 		break;
3273*4882a593Smuzhiyun 	case (1 << RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL):
3274*4882a593Smuzhiyun 		want_thermal_protection = true;
3275*4882a593Smuzhiyun 		dpm_event_src = RADEON_DPM_EVENT_SRC_DIGITAL;
3276*4882a593Smuzhiyun 		break;
3277*4882a593Smuzhiyun 	case (1 << RADEON_DPM_AUTO_THROTTLE_SRC_EXTERNAL):
3278*4882a593Smuzhiyun 		want_thermal_protection = true;
3279*4882a593Smuzhiyun 		dpm_event_src = RADEON_DPM_EVENT_SRC_EXTERNAL;
3280*4882a593Smuzhiyun 		break;
3281*4882a593Smuzhiyun 	case ((1 << RADEON_DPM_AUTO_THROTTLE_SRC_EXTERNAL) |
3282*4882a593Smuzhiyun 	      (1 << RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL)):
3283*4882a593Smuzhiyun 		want_thermal_protection = true;
3284*4882a593Smuzhiyun 		dpm_event_src = RADEON_DPM_EVENT_SRC_DIGIAL_OR_EXTERNAL;
3285*4882a593Smuzhiyun 		break;
3286*4882a593Smuzhiyun 	}
3287*4882a593Smuzhiyun 
3288*4882a593Smuzhiyun 	if (want_thermal_protection) {
3289*4882a593Smuzhiyun 		WREG32_P(CG_THERMAL_CTRL, DPM_EVENT_SRC(dpm_event_src), ~DPM_EVENT_SRC_MASK);
3290*4882a593Smuzhiyun 		if (pi->thermal_protection)
3291*4882a593Smuzhiyun 			WREG32_P(GENERAL_PWRMGT, 0, ~THERMAL_PROTECTION_DIS);
3292*4882a593Smuzhiyun 	} else {
3293*4882a593Smuzhiyun 		WREG32_P(GENERAL_PWRMGT, THERMAL_PROTECTION_DIS, ~THERMAL_PROTECTION_DIS);
3294*4882a593Smuzhiyun 	}
3295*4882a593Smuzhiyun }
3296*4882a593Smuzhiyun 
si_enable_auto_throttle_source(struct radeon_device * rdev,enum radeon_dpm_auto_throttle_src source,bool enable)3297*4882a593Smuzhiyun static void si_enable_auto_throttle_source(struct radeon_device *rdev,
3298*4882a593Smuzhiyun 					   enum radeon_dpm_auto_throttle_src source,
3299*4882a593Smuzhiyun 					   bool enable)
3300*4882a593Smuzhiyun {
3301*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3302*4882a593Smuzhiyun 
3303*4882a593Smuzhiyun 	if (enable) {
3304*4882a593Smuzhiyun 		if (!(pi->active_auto_throttle_sources & (1 << source))) {
3305*4882a593Smuzhiyun 			pi->active_auto_throttle_sources |= 1 << source;
3306*4882a593Smuzhiyun 			si_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources);
3307*4882a593Smuzhiyun 		}
3308*4882a593Smuzhiyun 	} else {
3309*4882a593Smuzhiyun 		if (pi->active_auto_throttle_sources & (1 << source)) {
3310*4882a593Smuzhiyun 			pi->active_auto_throttle_sources &= ~(1 << source);
3311*4882a593Smuzhiyun 			si_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources);
3312*4882a593Smuzhiyun 		}
3313*4882a593Smuzhiyun 	}
3314*4882a593Smuzhiyun }
3315*4882a593Smuzhiyun 
si_start_dpm(struct radeon_device * rdev)3316*4882a593Smuzhiyun static void si_start_dpm(struct radeon_device *rdev)
3317*4882a593Smuzhiyun {
3318*4882a593Smuzhiyun 	WREG32_P(GENERAL_PWRMGT, GLOBAL_PWRMGT_EN, ~GLOBAL_PWRMGT_EN);
3319*4882a593Smuzhiyun }
3320*4882a593Smuzhiyun 
si_stop_dpm(struct radeon_device * rdev)3321*4882a593Smuzhiyun static void si_stop_dpm(struct radeon_device *rdev)
3322*4882a593Smuzhiyun {
3323*4882a593Smuzhiyun 	WREG32_P(GENERAL_PWRMGT, 0, ~GLOBAL_PWRMGT_EN);
3324*4882a593Smuzhiyun }
3325*4882a593Smuzhiyun 
si_enable_sclk_control(struct radeon_device * rdev,bool enable)3326*4882a593Smuzhiyun static void si_enable_sclk_control(struct radeon_device *rdev, bool enable)
3327*4882a593Smuzhiyun {
3328*4882a593Smuzhiyun 	if (enable)
3329*4882a593Smuzhiyun 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~SCLK_PWRMGT_OFF);
3330*4882a593Smuzhiyun 	else
3331*4882a593Smuzhiyun 		WREG32_P(SCLK_PWRMGT_CNTL, SCLK_PWRMGT_OFF, ~SCLK_PWRMGT_OFF);
3332*4882a593Smuzhiyun 
3333*4882a593Smuzhiyun }
3334*4882a593Smuzhiyun 
3335*4882a593Smuzhiyun #if 0
3336*4882a593Smuzhiyun static int si_notify_hardware_of_thermal_state(struct radeon_device *rdev,
3337*4882a593Smuzhiyun 					       u32 thermal_level)
3338*4882a593Smuzhiyun {
3339*4882a593Smuzhiyun 	PPSMC_Result ret;
3340*4882a593Smuzhiyun 
3341*4882a593Smuzhiyun 	if (thermal_level == 0) {
3342*4882a593Smuzhiyun 		ret = si_send_msg_to_smc(rdev, PPSMC_MSG_EnableThermalInterrupt);
3343*4882a593Smuzhiyun 		if (ret == PPSMC_Result_OK)
3344*4882a593Smuzhiyun 			return 0;
3345*4882a593Smuzhiyun 		else
3346*4882a593Smuzhiyun 			return -EINVAL;
3347*4882a593Smuzhiyun 	}
3348*4882a593Smuzhiyun 	return 0;
3349*4882a593Smuzhiyun }
3350*4882a593Smuzhiyun 
3351*4882a593Smuzhiyun static void si_notify_hardware_vpu_recovery_event(struct radeon_device *rdev)
3352*4882a593Smuzhiyun {
3353*4882a593Smuzhiyun 	si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_tdr_is_about_to_happen, true);
3354*4882a593Smuzhiyun }
3355*4882a593Smuzhiyun #endif
3356*4882a593Smuzhiyun 
3357*4882a593Smuzhiyun #if 0
3358*4882a593Smuzhiyun static int si_notify_hw_of_powersource(struct radeon_device *rdev, bool ac_power)
3359*4882a593Smuzhiyun {
3360*4882a593Smuzhiyun 	if (ac_power)
3361*4882a593Smuzhiyun 		return (si_send_msg_to_smc(rdev, PPSMC_MSG_RunningOnAC) == PPSMC_Result_OK) ?
3362*4882a593Smuzhiyun 			0 : -EINVAL;
3363*4882a593Smuzhiyun 
3364*4882a593Smuzhiyun 	return 0;
3365*4882a593Smuzhiyun }
3366*4882a593Smuzhiyun #endif
3367*4882a593Smuzhiyun 
si_send_msg_to_smc_with_parameter(struct radeon_device * rdev,PPSMC_Msg msg,u32 parameter)3368*4882a593Smuzhiyun static PPSMC_Result si_send_msg_to_smc_with_parameter(struct radeon_device *rdev,
3369*4882a593Smuzhiyun 						      PPSMC_Msg msg, u32 parameter)
3370*4882a593Smuzhiyun {
3371*4882a593Smuzhiyun 	WREG32(SMC_SCRATCH0, parameter);
3372*4882a593Smuzhiyun 	return si_send_msg_to_smc(rdev, msg);
3373*4882a593Smuzhiyun }
3374*4882a593Smuzhiyun 
si_restrict_performance_levels_before_switch(struct radeon_device * rdev)3375*4882a593Smuzhiyun static int si_restrict_performance_levels_before_switch(struct radeon_device *rdev)
3376*4882a593Smuzhiyun {
3377*4882a593Smuzhiyun 	if (si_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK)
3378*4882a593Smuzhiyun 		return -EINVAL;
3379*4882a593Smuzhiyun 
3380*4882a593Smuzhiyun 	return (si_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) == PPSMC_Result_OK) ?
3381*4882a593Smuzhiyun 		0 : -EINVAL;
3382*4882a593Smuzhiyun }
3383*4882a593Smuzhiyun 
si_dpm_force_performance_level(struct radeon_device * rdev,enum radeon_dpm_forced_level level)3384*4882a593Smuzhiyun int si_dpm_force_performance_level(struct radeon_device *rdev,
3385*4882a593Smuzhiyun 				   enum radeon_dpm_forced_level level)
3386*4882a593Smuzhiyun {
3387*4882a593Smuzhiyun 	struct radeon_ps *rps = rdev->pm.dpm.current_ps;
3388*4882a593Smuzhiyun 	struct ni_ps *ps = ni_get_ps(rps);
3389*4882a593Smuzhiyun 	u32 levels = ps->performance_level_count;
3390*4882a593Smuzhiyun 
3391*4882a593Smuzhiyun 	if (level == RADEON_DPM_FORCED_LEVEL_HIGH) {
3392*4882a593Smuzhiyun 		if (si_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, levels) != PPSMC_Result_OK)
3393*4882a593Smuzhiyun 			return -EINVAL;
3394*4882a593Smuzhiyun 
3395*4882a593Smuzhiyun 		if (si_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 1) != PPSMC_Result_OK)
3396*4882a593Smuzhiyun 			return -EINVAL;
3397*4882a593Smuzhiyun 	} else if (level == RADEON_DPM_FORCED_LEVEL_LOW) {
3398*4882a593Smuzhiyun 		if (si_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
3399*4882a593Smuzhiyun 			return -EINVAL;
3400*4882a593Smuzhiyun 
3401*4882a593Smuzhiyun 		if (si_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) != PPSMC_Result_OK)
3402*4882a593Smuzhiyun 			return -EINVAL;
3403*4882a593Smuzhiyun 	} else if (level == RADEON_DPM_FORCED_LEVEL_AUTO) {
3404*4882a593Smuzhiyun 		if (si_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
3405*4882a593Smuzhiyun 			return -EINVAL;
3406*4882a593Smuzhiyun 
3407*4882a593Smuzhiyun 		if (si_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, levels) != PPSMC_Result_OK)
3408*4882a593Smuzhiyun 			return -EINVAL;
3409*4882a593Smuzhiyun 	}
3410*4882a593Smuzhiyun 
3411*4882a593Smuzhiyun 	rdev->pm.dpm.forced_level = level;
3412*4882a593Smuzhiyun 
3413*4882a593Smuzhiyun 	return 0;
3414*4882a593Smuzhiyun }
3415*4882a593Smuzhiyun 
3416*4882a593Smuzhiyun #if 0
3417*4882a593Smuzhiyun static int si_set_boot_state(struct radeon_device *rdev)
3418*4882a593Smuzhiyun {
3419*4882a593Smuzhiyun 	return (si_send_msg_to_smc(rdev, PPSMC_MSG_SwitchToInitialState) == PPSMC_Result_OK) ?
3420*4882a593Smuzhiyun 		0 : -EINVAL;
3421*4882a593Smuzhiyun }
3422*4882a593Smuzhiyun #endif
3423*4882a593Smuzhiyun 
si_set_sw_state(struct radeon_device * rdev)3424*4882a593Smuzhiyun static int si_set_sw_state(struct radeon_device *rdev)
3425*4882a593Smuzhiyun {
3426*4882a593Smuzhiyun 	return (si_send_msg_to_smc(rdev, PPSMC_MSG_SwitchToSwState) == PPSMC_Result_OK) ?
3427*4882a593Smuzhiyun 		0 : -EINVAL;
3428*4882a593Smuzhiyun }
3429*4882a593Smuzhiyun 
si_halt_smc(struct radeon_device * rdev)3430*4882a593Smuzhiyun static int si_halt_smc(struct radeon_device *rdev)
3431*4882a593Smuzhiyun {
3432*4882a593Smuzhiyun 	if (si_send_msg_to_smc(rdev, PPSMC_MSG_Halt) != PPSMC_Result_OK)
3433*4882a593Smuzhiyun 		return -EINVAL;
3434*4882a593Smuzhiyun 
3435*4882a593Smuzhiyun 	return (si_wait_for_smc_inactive(rdev) == PPSMC_Result_OK) ?
3436*4882a593Smuzhiyun 		0 : -EINVAL;
3437*4882a593Smuzhiyun }
3438*4882a593Smuzhiyun 
si_resume_smc(struct radeon_device * rdev)3439*4882a593Smuzhiyun static int si_resume_smc(struct radeon_device *rdev)
3440*4882a593Smuzhiyun {
3441*4882a593Smuzhiyun 	if (si_send_msg_to_smc(rdev, PPSMC_FlushDataCache) != PPSMC_Result_OK)
3442*4882a593Smuzhiyun 		return -EINVAL;
3443*4882a593Smuzhiyun 
3444*4882a593Smuzhiyun 	return (si_send_msg_to_smc(rdev, PPSMC_MSG_Resume) == PPSMC_Result_OK) ?
3445*4882a593Smuzhiyun 		0 : -EINVAL;
3446*4882a593Smuzhiyun }
3447*4882a593Smuzhiyun 
si_dpm_start_smc(struct radeon_device * rdev)3448*4882a593Smuzhiyun static void si_dpm_start_smc(struct radeon_device *rdev)
3449*4882a593Smuzhiyun {
3450*4882a593Smuzhiyun 	si_program_jump_on_start(rdev);
3451*4882a593Smuzhiyun 	si_start_smc(rdev);
3452*4882a593Smuzhiyun 	si_start_smc_clock(rdev);
3453*4882a593Smuzhiyun }
3454*4882a593Smuzhiyun 
si_dpm_stop_smc(struct radeon_device * rdev)3455*4882a593Smuzhiyun static void si_dpm_stop_smc(struct radeon_device *rdev)
3456*4882a593Smuzhiyun {
3457*4882a593Smuzhiyun 	si_reset_smc(rdev);
3458*4882a593Smuzhiyun 	si_stop_smc_clock(rdev);
3459*4882a593Smuzhiyun }
3460*4882a593Smuzhiyun 
si_process_firmware_header(struct radeon_device * rdev)3461*4882a593Smuzhiyun static int si_process_firmware_header(struct radeon_device *rdev)
3462*4882a593Smuzhiyun {
3463*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
3464*4882a593Smuzhiyun 	u32 tmp;
3465*4882a593Smuzhiyun 	int ret;
3466*4882a593Smuzhiyun 
3467*4882a593Smuzhiyun 	ret = si_read_smc_sram_dword(rdev,
3468*4882a593Smuzhiyun 				     SISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
3469*4882a593Smuzhiyun 				     SISLANDS_SMC_FIRMWARE_HEADER_stateTable,
3470*4882a593Smuzhiyun 				     &tmp, si_pi->sram_end);
3471*4882a593Smuzhiyun 	if (ret)
3472*4882a593Smuzhiyun 		return ret;
3473*4882a593Smuzhiyun 
3474*4882a593Smuzhiyun 	si_pi->state_table_start = tmp;
3475*4882a593Smuzhiyun 
3476*4882a593Smuzhiyun 	ret = si_read_smc_sram_dword(rdev,
3477*4882a593Smuzhiyun 				     SISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
3478*4882a593Smuzhiyun 				     SISLANDS_SMC_FIRMWARE_HEADER_softRegisters,
3479*4882a593Smuzhiyun 				     &tmp, si_pi->sram_end);
3480*4882a593Smuzhiyun 	if (ret)
3481*4882a593Smuzhiyun 		return ret;
3482*4882a593Smuzhiyun 
3483*4882a593Smuzhiyun 	si_pi->soft_regs_start = tmp;
3484*4882a593Smuzhiyun 
3485*4882a593Smuzhiyun 	ret = si_read_smc_sram_dword(rdev,
3486*4882a593Smuzhiyun 				     SISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
3487*4882a593Smuzhiyun 				     SISLANDS_SMC_FIRMWARE_HEADER_mcRegisterTable,
3488*4882a593Smuzhiyun 				     &tmp, si_pi->sram_end);
3489*4882a593Smuzhiyun 	if (ret)
3490*4882a593Smuzhiyun 		return ret;
3491*4882a593Smuzhiyun 
3492*4882a593Smuzhiyun 	si_pi->mc_reg_table_start = tmp;
3493*4882a593Smuzhiyun 
3494*4882a593Smuzhiyun 	ret = si_read_smc_sram_dword(rdev,
3495*4882a593Smuzhiyun 				     SISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
3496*4882a593Smuzhiyun 				     SISLANDS_SMC_FIRMWARE_HEADER_fanTable,
3497*4882a593Smuzhiyun 				     &tmp, si_pi->sram_end);
3498*4882a593Smuzhiyun 	if (ret)
3499*4882a593Smuzhiyun 		return ret;
3500*4882a593Smuzhiyun 
3501*4882a593Smuzhiyun 	si_pi->fan_table_start = tmp;
3502*4882a593Smuzhiyun 
3503*4882a593Smuzhiyun 	ret = si_read_smc_sram_dword(rdev,
3504*4882a593Smuzhiyun 				     SISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
3505*4882a593Smuzhiyun 				     SISLANDS_SMC_FIRMWARE_HEADER_mcArbDramAutoRefreshTable,
3506*4882a593Smuzhiyun 				     &tmp, si_pi->sram_end);
3507*4882a593Smuzhiyun 	if (ret)
3508*4882a593Smuzhiyun 		return ret;
3509*4882a593Smuzhiyun 
3510*4882a593Smuzhiyun 	si_pi->arb_table_start = tmp;
3511*4882a593Smuzhiyun 
3512*4882a593Smuzhiyun 	ret = si_read_smc_sram_dword(rdev,
3513*4882a593Smuzhiyun 				     SISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
3514*4882a593Smuzhiyun 				     SISLANDS_SMC_FIRMWARE_HEADER_CacConfigTable,
3515*4882a593Smuzhiyun 				     &tmp, si_pi->sram_end);
3516*4882a593Smuzhiyun 	if (ret)
3517*4882a593Smuzhiyun 		return ret;
3518*4882a593Smuzhiyun 
3519*4882a593Smuzhiyun 	si_pi->cac_table_start = tmp;
3520*4882a593Smuzhiyun 
3521*4882a593Smuzhiyun 	ret = si_read_smc_sram_dword(rdev,
3522*4882a593Smuzhiyun 				     SISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
3523*4882a593Smuzhiyun 				     SISLANDS_SMC_FIRMWARE_HEADER_DteConfiguration,
3524*4882a593Smuzhiyun 				     &tmp, si_pi->sram_end);
3525*4882a593Smuzhiyun 	if (ret)
3526*4882a593Smuzhiyun 		return ret;
3527*4882a593Smuzhiyun 
3528*4882a593Smuzhiyun 	si_pi->dte_table_start = tmp;
3529*4882a593Smuzhiyun 
3530*4882a593Smuzhiyun 	ret = si_read_smc_sram_dword(rdev,
3531*4882a593Smuzhiyun 				     SISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
3532*4882a593Smuzhiyun 				     SISLANDS_SMC_FIRMWARE_HEADER_spllTable,
3533*4882a593Smuzhiyun 				     &tmp, si_pi->sram_end);
3534*4882a593Smuzhiyun 	if (ret)
3535*4882a593Smuzhiyun 		return ret;
3536*4882a593Smuzhiyun 
3537*4882a593Smuzhiyun 	si_pi->spll_table_start = tmp;
3538*4882a593Smuzhiyun 
3539*4882a593Smuzhiyun 	ret = si_read_smc_sram_dword(rdev,
3540*4882a593Smuzhiyun 				     SISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
3541*4882a593Smuzhiyun 				     SISLANDS_SMC_FIRMWARE_HEADER_PAPMParameters,
3542*4882a593Smuzhiyun 				     &tmp, si_pi->sram_end);
3543*4882a593Smuzhiyun 	if (ret)
3544*4882a593Smuzhiyun 		return ret;
3545*4882a593Smuzhiyun 
3546*4882a593Smuzhiyun 	si_pi->papm_cfg_table_start = tmp;
3547*4882a593Smuzhiyun 
3548*4882a593Smuzhiyun 	return ret;
3549*4882a593Smuzhiyun }
3550*4882a593Smuzhiyun 
si_read_clock_registers(struct radeon_device * rdev)3551*4882a593Smuzhiyun static void si_read_clock_registers(struct radeon_device *rdev)
3552*4882a593Smuzhiyun {
3553*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
3554*4882a593Smuzhiyun 
3555*4882a593Smuzhiyun 	si_pi->clock_registers.cg_spll_func_cntl = RREG32(CG_SPLL_FUNC_CNTL);
3556*4882a593Smuzhiyun 	si_pi->clock_registers.cg_spll_func_cntl_2 = RREG32(CG_SPLL_FUNC_CNTL_2);
3557*4882a593Smuzhiyun 	si_pi->clock_registers.cg_spll_func_cntl_3 = RREG32(CG_SPLL_FUNC_CNTL_3);
3558*4882a593Smuzhiyun 	si_pi->clock_registers.cg_spll_func_cntl_4 = RREG32(CG_SPLL_FUNC_CNTL_4);
3559*4882a593Smuzhiyun 	si_pi->clock_registers.cg_spll_spread_spectrum = RREG32(CG_SPLL_SPREAD_SPECTRUM);
3560*4882a593Smuzhiyun 	si_pi->clock_registers.cg_spll_spread_spectrum_2 = RREG32(CG_SPLL_SPREAD_SPECTRUM_2);
3561*4882a593Smuzhiyun 	si_pi->clock_registers.dll_cntl = RREG32(DLL_CNTL);
3562*4882a593Smuzhiyun 	si_pi->clock_registers.mclk_pwrmgt_cntl = RREG32(MCLK_PWRMGT_CNTL);
3563*4882a593Smuzhiyun 	si_pi->clock_registers.mpll_ad_func_cntl = RREG32(MPLL_AD_FUNC_CNTL);
3564*4882a593Smuzhiyun 	si_pi->clock_registers.mpll_dq_func_cntl = RREG32(MPLL_DQ_FUNC_CNTL);
3565*4882a593Smuzhiyun 	si_pi->clock_registers.mpll_func_cntl = RREG32(MPLL_FUNC_CNTL);
3566*4882a593Smuzhiyun 	si_pi->clock_registers.mpll_func_cntl_1 = RREG32(MPLL_FUNC_CNTL_1);
3567*4882a593Smuzhiyun 	si_pi->clock_registers.mpll_func_cntl_2 = RREG32(MPLL_FUNC_CNTL_2);
3568*4882a593Smuzhiyun 	si_pi->clock_registers.mpll_ss1 = RREG32(MPLL_SS1);
3569*4882a593Smuzhiyun 	si_pi->clock_registers.mpll_ss2 = RREG32(MPLL_SS2);
3570*4882a593Smuzhiyun }
3571*4882a593Smuzhiyun 
si_enable_thermal_protection(struct radeon_device * rdev,bool enable)3572*4882a593Smuzhiyun static void si_enable_thermal_protection(struct radeon_device *rdev,
3573*4882a593Smuzhiyun 					  bool enable)
3574*4882a593Smuzhiyun {
3575*4882a593Smuzhiyun 	if (enable)
3576*4882a593Smuzhiyun 		WREG32_P(GENERAL_PWRMGT, 0, ~THERMAL_PROTECTION_DIS);
3577*4882a593Smuzhiyun 	else
3578*4882a593Smuzhiyun 		WREG32_P(GENERAL_PWRMGT, THERMAL_PROTECTION_DIS, ~THERMAL_PROTECTION_DIS);
3579*4882a593Smuzhiyun }
3580*4882a593Smuzhiyun 
si_enable_acpi_power_management(struct radeon_device * rdev)3581*4882a593Smuzhiyun static void si_enable_acpi_power_management(struct radeon_device *rdev)
3582*4882a593Smuzhiyun {
3583*4882a593Smuzhiyun 	WREG32_P(GENERAL_PWRMGT, STATIC_PM_EN, ~STATIC_PM_EN);
3584*4882a593Smuzhiyun }
3585*4882a593Smuzhiyun 
3586*4882a593Smuzhiyun #if 0
3587*4882a593Smuzhiyun static int si_enter_ulp_state(struct radeon_device *rdev)
3588*4882a593Smuzhiyun {
3589*4882a593Smuzhiyun 	WREG32(SMC_MESSAGE_0, PPSMC_MSG_SwitchToMinimumPower);
3590*4882a593Smuzhiyun 
3591*4882a593Smuzhiyun 	udelay(25000);
3592*4882a593Smuzhiyun 
3593*4882a593Smuzhiyun 	return 0;
3594*4882a593Smuzhiyun }
3595*4882a593Smuzhiyun 
3596*4882a593Smuzhiyun static int si_exit_ulp_state(struct radeon_device *rdev)
3597*4882a593Smuzhiyun {
3598*4882a593Smuzhiyun 	int i;
3599*4882a593Smuzhiyun 
3600*4882a593Smuzhiyun 	WREG32(SMC_MESSAGE_0, PPSMC_MSG_ResumeFromMinimumPower);
3601*4882a593Smuzhiyun 
3602*4882a593Smuzhiyun 	udelay(7000);
3603*4882a593Smuzhiyun 
3604*4882a593Smuzhiyun 	for (i = 0; i < rdev->usec_timeout; i++) {
3605*4882a593Smuzhiyun 		if (RREG32(SMC_RESP_0) == 1)
3606*4882a593Smuzhiyun 			break;
3607*4882a593Smuzhiyun 		udelay(1000);
3608*4882a593Smuzhiyun 	}
3609*4882a593Smuzhiyun 
3610*4882a593Smuzhiyun 	return 0;
3611*4882a593Smuzhiyun }
3612*4882a593Smuzhiyun #endif
3613*4882a593Smuzhiyun 
si_notify_smc_display_change(struct radeon_device * rdev,bool has_display)3614*4882a593Smuzhiyun static int si_notify_smc_display_change(struct radeon_device *rdev,
3615*4882a593Smuzhiyun 				     bool has_display)
3616*4882a593Smuzhiyun {
3617*4882a593Smuzhiyun 	PPSMC_Msg msg = has_display ?
3618*4882a593Smuzhiyun 		PPSMC_MSG_HasDisplay : PPSMC_MSG_NoDisplay;
3619*4882a593Smuzhiyun 
3620*4882a593Smuzhiyun 	return (si_send_msg_to_smc(rdev, msg) == PPSMC_Result_OK) ?
3621*4882a593Smuzhiyun 		0 : -EINVAL;
3622*4882a593Smuzhiyun }
3623*4882a593Smuzhiyun 
si_program_response_times(struct radeon_device * rdev)3624*4882a593Smuzhiyun static void si_program_response_times(struct radeon_device *rdev)
3625*4882a593Smuzhiyun {
3626*4882a593Smuzhiyun 	u32 voltage_response_time, acpi_delay_time, vbi_time_out;
3627*4882a593Smuzhiyun 	u32 vddc_dly, acpi_dly, vbi_dly;
3628*4882a593Smuzhiyun 	u32 reference_clock;
3629*4882a593Smuzhiyun 
3630*4882a593Smuzhiyun 	si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_mvdd_chg_time, 1);
3631*4882a593Smuzhiyun 
3632*4882a593Smuzhiyun 	voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time;
3633*4882a593Smuzhiyun 
3634*4882a593Smuzhiyun 	if (voltage_response_time == 0)
3635*4882a593Smuzhiyun 		voltage_response_time = 1000;
3636*4882a593Smuzhiyun 
3637*4882a593Smuzhiyun 	acpi_delay_time = 15000;
3638*4882a593Smuzhiyun 	vbi_time_out = 100000;
3639*4882a593Smuzhiyun 
3640*4882a593Smuzhiyun 	reference_clock = radeon_get_xclk(rdev);
3641*4882a593Smuzhiyun 
3642*4882a593Smuzhiyun 	vddc_dly = (voltage_response_time  * reference_clock) / 100;
3643*4882a593Smuzhiyun 	acpi_dly = (acpi_delay_time * reference_clock) / 100;
3644*4882a593Smuzhiyun 	vbi_dly  = (vbi_time_out * reference_clock) / 100;
3645*4882a593Smuzhiyun 
3646*4882a593Smuzhiyun 	si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_delay_vreg,  vddc_dly);
3647*4882a593Smuzhiyun 	si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_delay_acpi,  acpi_dly);
3648*4882a593Smuzhiyun 	si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly);
3649*4882a593Smuzhiyun 	si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_mc_block_delay, 0xAA);
3650*4882a593Smuzhiyun }
3651*4882a593Smuzhiyun 
si_program_ds_registers(struct radeon_device * rdev)3652*4882a593Smuzhiyun static void si_program_ds_registers(struct radeon_device *rdev)
3653*4882a593Smuzhiyun {
3654*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3655*4882a593Smuzhiyun 	u32 tmp = 1; /* XXX: 0x10 on tahiti A0 */
3656*4882a593Smuzhiyun 
3657*4882a593Smuzhiyun 	if (eg_pi->sclk_deep_sleep) {
3658*4882a593Smuzhiyun 		WREG32_P(MISC_CLK_CNTL, DEEP_SLEEP_CLK_SEL(tmp), ~DEEP_SLEEP_CLK_SEL_MASK);
3659*4882a593Smuzhiyun 		WREG32_P(CG_SPLL_AUTOSCALE_CNTL, AUTOSCALE_ON_SS_CLEAR,
3660*4882a593Smuzhiyun 			 ~AUTOSCALE_ON_SS_CLEAR);
3661*4882a593Smuzhiyun 	}
3662*4882a593Smuzhiyun }
3663*4882a593Smuzhiyun 
si_program_display_gap(struct radeon_device * rdev)3664*4882a593Smuzhiyun static void si_program_display_gap(struct radeon_device *rdev)
3665*4882a593Smuzhiyun {
3666*4882a593Smuzhiyun 	u32 tmp, pipe;
3667*4882a593Smuzhiyun 	int i;
3668*4882a593Smuzhiyun 
3669*4882a593Smuzhiyun 	tmp = RREG32(CG_DISPLAY_GAP_CNTL) & ~(DISP1_GAP_MASK | DISP2_GAP_MASK);
3670*4882a593Smuzhiyun 	if (rdev->pm.dpm.new_active_crtc_count > 0)
3671*4882a593Smuzhiyun 		tmp |= DISP1_GAP(R600_PM_DISPLAY_GAP_VBLANK_OR_WM);
3672*4882a593Smuzhiyun 	else
3673*4882a593Smuzhiyun 		tmp |= DISP1_GAP(R600_PM_DISPLAY_GAP_IGNORE);
3674*4882a593Smuzhiyun 
3675*4882a593Smuzhiyun 	if (rdev->pm.dpm.new_active_crtc_count > 1)
3676*4882a593Smuzhiyun 		tmp |= DISP2_GAP(R600_PM_DISPLAY_GAP_VBLANK_OR_WM);
3677*4882a593Smuzhiyun 	else
3678*4882a593Smuzhiyun 		tmp |= DISP2_GAP(R600_PM_DISPLAY_GAP_IGNORE);
3679*4882a593Smuzhiyun 
3680*4882a593Smuzhiyun 	WREG32(CG_DISPLAY_GAP_CNTL, tmp);
3681*4882a593Smuzhiyun 
3682*4882a593Smuzhiyun 	tmp = RREG32(DCCG_DISP_SLOW_SELECT_REG);
3683*4882a593Smuzhiyun 	pipe = (tmp & DCCG_DISP1_SLOW_SELECT_MASK) >> DCCG_DISP1_SLOW_SELECT_SHIFT;
3684*4882a593Smuzhiyun 
3685*4882a593Smuzhiyun 	if ((rdev->pm.dpm.new_active_crtc_count > 0) &&
3686*4882a593Smuzhiyun 	    (!(rdev->pm.dpm.new_active_crtcs & (1 << pipe)))) {
3687*4882a593Smuzhiyun 		/* find the first active crtc */
3688*4882a593Smuzhiyun 		for (i = 0; i < rdev->num_crtc; i++) {
3689*4882a593Smuzhiyun 			if (rdev->pm.dpm.new_active_crtcs & (1 << i))
3690*4882a593Smuzhiyun 				break;
3691*4882a593Smuzhiyun 		}
3692*4882a593Smuzhiyun 		if (i == rdev->num_crtc)
3693*4882a593Smuzhiyun 			pipe = 0;
3694*4882a593Smuzhiyun 		else
3695*4882a593Smuzhiyun 			pipe = i;
3696*4882a593Smuzhiyun 
3697*4882a593Smuzhiyun 		tmp &= ~DCCG_DISP1_SLOW_SELECT_MASK;
3698*4882a593Smuzhiyun 		tmp |= DCCG_DISP1_SLOW_SELECT(pipe);
3699*4882a593Smuzhiyun 		WREG32(DCCG_DISP_SLOW_SELECT_REG, tmp);
3700*4882a593Smuzhiyun 	}
3701*4882a593Smuzhiyun 
3702*4882a593Smuzhiyun 	/* Setting this to false forces the performance state to low if the crtcs are disabled.
3703*4882a593Smuzhiyun 	 * This can be a problem on PowerXpress systems or if you want to use the card
3704*4882a593Smuzhiyun 	 * for offscreen rendering or compute if there are no crtcs enabled.
3705*4882a593Smuzhiyun 	 */
3706*4882a593Smuzhiyun 	si_notify_smc_display_change(rdev, rdev->pm.dpm.new_active_crtc_count > 0);
3707*4882a593Smuzhiyun }
3708*4882a593Smuzhiyun 
si_enable_spread_spectrum(struct radeon_device * rdev,bool enable)3709*4882a593Smuzhiyun static void si_enable_spread_spectrum(struct radeon_device *rdev, bool enable)
3710*4882a593Smuzhiyun {
3711*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3712*4882a593Smuzhiyun 
3713*4882a593Smuzhiyun 	if (enable) {
3714*4882a593Smuzhiyun 		if (pi->sclk_ss)
3715*4882a593Smuzhiyun 			WREG32_P(GENERAL_PWRMGT, DYN_SPREAD_SPECTRUM_EN, ~DYN_SPREAD_SPECTRUM_EN);
3716*4882a593Smuzhiyun 	} else {
3717*4882a593Smuzhiyun 		WREG32_P(CG_SPLL_SPREAD_SPECTRUM, 0, ~SSEN);
3718*4882a593Smuzhiyun 		WREG32_P(GENERAL_PWRMGT, 0, ~DYN_SPREAD_SPECTRUM_EN);
3719*4882a593Smuzhiyun 	}
3720*4882a593Smuzhiyun }
3721*4882a593Smuzhiyun 
si_setup_bsp(struct radeon_device * rdev)3722*4882a593Smuzhiyun static void si_setup_bsp(struct radeon_device *rdev)
3723*4882a593Smuzhiyun {
3724*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3725*4882a593Smuzhiyun 	u32 xclk = radeon_get_xclk(rdev);
3726*4882a593Smuzhiyun 
3727*4882a593Smuzhiyun 	r600_calculate_u_and_p(pi->asi,
3728*4882a593Smuzhiyun 			       xclk,
3729*4882a593Smuzhiyun 			       16,
3730*4882a593Smuzhiyun 			       &pi->bsp,
3731*4882a593Smuzhiyun 			       &pi->bsu);
3732*4882a593Smuzhiyun 
3733*4882a593Smuzhiyun 	r600_calculate_u_and_p(pi->pasi,
3734*4882a593Smuzhiyun 			       xclk,
3735*4882a593Smuzhiyun 			       16,
3736*4882a593Smuzhiyun 			       &pi->pbsp,
3737*4882a593Smuzhiyun 			       &pi->pbsu);
3738*4882a593Smuzhiyun 
3739*4882a593Smuzhiyun 
3740*4882a593Smuzhiyun 	pi->dsp = BSP(pi->bsp) | BSU(pi->bsu);
3741*4882a593Smuzhiyun 	pi->psp = BSP(pi->pbsp) | BSU(pi->pbsu);
3742*4882a593Smuzhiyun 
3743*4882a593Smuzhiyun 	WREG32(CG_BSP, pi->dsp);
3744*4882a593Smuzhiyun }
3745*4882a593Smuzhiyun 
si_program_git(struct radeon_device * rdev)3746*4882a593Smuzhiyun static void si_program_git(struct radeon_device *rdev)
3747*4882a593Smuzhiyun {
3748*4882a593Smuzhiyun 	WREG32_P(CG_GIT, CG_GICST(R600_GICST_DFLT), ~CG_GICST_MASK);
3749*4882a593Smuzhiyun }
3750*4882a593Smuzhiyun 
si_program_tp(struct radeon_device * rdev)3751*4882a593Smuzhiyun static void si_program_tp(struct radeon_device *rdev)
3752*4882a593Smuzhiyun {
3753*4882a593Smuzhiyun 	int i;
3754*4882a593Smuzhiyun 	enum r600_td td = R600_TD_DFLT;
3755*4882a593Smuzhiyun 
3756*4882a593Smuzhiyun 	for (i = 0; i < R600_PM_NUMBER_OF_TC; i++)
3757*4882a593Smuzhiyun 		WREG32(CG_FFCT_0 + (i * 4), (UTC_0(r600_utc[i]) | DTC_0(r600_dtc[i])));
3758*4882a593Smuzhiyun 
3759*4882a593Smuzhiyun 	if (td == R600_TD_AUTO)
3760*4882a593Smuzhiyun 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~FIR_FORCE_TREND_SEL);
3761*4882a593Smuzhiyun 	else
3762*4882a593Smuzhiyun 		WREG32_P(SCLK_PWRMGT_CNTL, FIR_FORCE_TREND_SEL, ~FIR_FORCE_TREND_SEL);
3763*4882a593Smuzhiyun 
3764*4882a593Smuzhiyun 	if (td == R600_TD_UP)
3765*4882a593Smuzhiyun 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~FIR_TREND_MODE);
3766*4882a593Smuzhiyun 
3767*4882a593Smuzhiyun 	if (td == R600_TD_DOWN)
3768*4882a593Smuzhiyun 		WREG32_P(SCLK_PWRMGT_CNTL, FIR_TREND_MODE, ~FIR_TREND_MODE);
3769*4882a593Smuzhiyun }
3770*4882a593Smuzhiyun 
si_program_tpp(struct radeon_device * rdev)3771*4882a593Smuzhiyun static void si_program_tpp(struct radeon_device *rdev)
3772*4882a593Smuzhiyun {
3773*4882a593Smuzhiyun 	WREG32(CG_TPC, R600_TPC_DFLT);
3774*4882a593Smuzhiyun }
3775*4882a593Smuzhiyun 
si_program_sstp(struct radeon_device * rdev)3776*4882a593Smuzhiyun static void si_program_sstp(struct radeon_device *rdev)
3777*4882a593Smuzhiyun {
3778*4882a593Smuzhiyun 	WREG32(CG_SSP, (SSTU(R600_SSTU_DFLT) | SST(R600_SST_DFLT)));
3779*4882a593Smuzhiyun }
3780*4882a593Smuzhiyun 
si_enable_display_gap(struct radeon_device * rdev)3781*4882a593Smuzhiyun static void si_enable_display_gap(struct radeon_device *rdev)
3782*4882a593Smuzhiyun {
3783*4882a593Smuzhiyun 	u32 tmp = RREG32(CG_DISPLAY_GAP_CNTL);
3784*4882a593Smuzhiyun 
3785*4882a593Smuzhiyun 	tmp &= ~(DISP1_GAP_MASK | DISP2_GAP_MASK);
3786*4882a593Smuzhiyun 	tmp |= (DISP1_GAP(R600_PM_DISPLAY_GAP_IGNORE) |
3787*4882a593Smuzhiyun 		DISP2_GAP(R600_PM_DISPLAY_GAP_IGNORE));
3788*4882a593Smuzhiyun 
3789*4882a593Smuzhiyun 	tmp &= ~(DISP1_GAP_MCHG_MASK | DISP2_GAP_MCHG_MASK);
3790*4882a593Smuzhiyun 	tmp |= (DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_VBLANK) |
3791*4882a593Smuzhiyun 		DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE));
3792*4882a593Smuzhiyun 	WREG32(CG_DISPLAY_GAP_CNTL, tmp);
3793*4882a593Smuzhiyun }
3794*4882a593Smuzhiyun 
si_program_vc(struct radeon_device * rdev)3795*4882a593Smuzhiyun static void si_program_vc(struct radeon_device *rdev)
3796*4882a593Smuzhiyun {
3797*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3798*4882a593Smuzhiyun 
3799*4882a593Smuzhiyun 	WREG32(CG_FTV, pi->vrc);
3800*4882a593Smuzhiyun }
3801*4882a593Smuzhiyun 
si_clear_vc(struct radeon_device * rdev)3802*4882a593Smuzhiyun static void si_clear_vc(struct radeon_device *rdev)
3803*4882a593Smuzhiyun {
3804*4882a593Smuzhiyun 	WREG32(CG_FTV, 0);
3805*4882a593Smuzhiyun }
3806*4882a593Smuzhiyun 
si_get_ddr3_mclk_frequency_ratio(u32 memory_clock)3807*4882a593Smuzhiyun u8 si_get_ddr3_mclk_frequency_ratio(u32 memory_clock)
3808*4882a593Smuzhiyun {
3809*4882a593Smuzhiyun 	u8 mc_para_index;
3810*4882a593Smuzhiyun 
3811*4882a593Smuzhiyun 	if (memory_clock < 10000)
3812*4882a593Smuzhiyun 		mc_para_index = 0;
3813*4882a593Smuzhiyun 	else if (memory_clock >= 80000)
3814*4882a593Smuzhiyun 		mc_para_index = 0x0f;
3815*4882a593Smuzhiyun 	else
3816*4882a593Smuzhiyun 		mc_para_index = (u8)((memory_clock - 10000) / 5000 + 1);
3817*4882a593Smuzhiyun 	return mc_para_index;
3818*4882a593Smuzhiyun }
3819*4882a593Smuzhiyun 
si_get_mclk_frequency_ratio(u32 memory_clock,bool strobe_mode)3820*4882a593Smuzhiyun u8 si_get_mclk_frequency_ratio(u32 memory_clock, bool strobe_mode)
3821*4882a593Smuzhiyun {
3822*4882a593Smuzhiyun 	u8 mc_para_index;
3823*4882a593Smuzhiyun 
3824*4882a593Smuzhiyun 	if (strobe_mode) {
3825*4882a593Smuzhiyun 		if (memory_clock < 12500)
3826*4882a593Smuzhiyun 			mc_para_index = 0x00;
3827*4882a593Smuzhiyun 		else if (memory_clock > 47500)
3828*4882a593Smuzhiyun 			mc_para_index = 0x0f;
3829*4882a593Smuzhiyun 		else
3830*4882a593Smuzhiyun 			mc_para_index = (u8)((memory_clock - 10000) / 2500);
3831*4882a593Smuzhiyun 	} else {
3832*4882a593Smuzhiyun 		if (memory_clock < 65000)
3833*4882a593Smuzhiyun 			mc_para_index = 0x00;
3834*4882a593Smuzhiyun 		else if (memory_clock > 135000)
3835*4882a593Smuzhiyun 			mc_para_index = 0x0f;
3836*4882a593Smuzhiyun 		else
3837*4882a593Smuzhiyun 			mc_para_index = (u8)((memory_clock - 60000) / 5000);
3838*4882a593Smuzhiyun 	}
3839*4882a593Smuzhiyun 	return mc_para_index;
3840*4882a593Smuzhiyun }
3841*4882a593Smuzhiyun 
si_get_strobe_mode_settings(struct radeon_device * rdev,u32 mclk)3842*4882a593Smuzhiyun static u8 si_get_strobe_mode_settings(struct radeon_device *rdev, u32 mclk)
3843*4882a593Smuzhiyun {
3844*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3845*4882a593Smuzhiyun 	bool strobe_mode = false;
3846*4882a593Smuzhiyun 	u8 result = 0;
3847*4882a593Smuzhiyun 
3848*4882a593Smuzhiyun 	if (mclk <= pi->mclk_strobe_mode_threshold)
3849*4882a593Smuzhiyun 		strobe_mode = true;
3850*4882a593Smuzhiyun 
3851*4882a593Smuzhiyun 	if (pi->mem_gddr5)
3852*4882a593Smuzhiyun 		result = si_get_mclk_frequency_ratio(mclk, strobe_mode);
3853*4882a593Smuzhiyun 	else
3854*4882a593Smuzhiyun 		result = si_get_ddr3_mclk_frequency_ratio(mclk);
3855*4882a593Smuzhiyun 
3856*4882a593Smuzhiyun 	if (strobe_mode)
3857*4882a593Smuzhiyun 		result |= SISLANDS_SMC_STROBE_ENABLE;
3858*4882a593Smuzhiyun 
3859*4882a593Smuzhiyun 	return result;
3860*4882a593Smuzhiyun }
3861*4882a593Smuzhiyun 
si_upload_firmware(struct radeon_device * rdev)3862*4882a593Smuzhiyun static int si_upload_firmware(struct radeon_device *rdev)
3863*4882a593Smuzhiyun {
3864*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
3865*4882a593Smuzhiyun 	int ret;
3866*4882a593Smuzhiyun 
3867*4882a593Smuzhiyun 	si_reset_smc(rdev);
3868*4882a593Smuzhiyun 	si_stop_smc_clock(rdev);
3869*4882a593Smuzhiyun 
3870*4882a593Smuzhiyun 	ret = si_load_smc_ucode(rdev, si_pi->sram_end);
3871*4882a593Smuzhiyun 
3872*4882a593Smuzhiyun 	return ret;
3873*4882a593Smuzhiyun }
3874*4882a593Smuzhiyun 
si_validate_phase_shedding_tables(struct radeon_device * rdev,const struct atom_voltage_table * table,const struct radeon_phase_shedding_limits_table * limits)3875*4882a593Smuzhiyun static bool si_validate_phase_shedding_tables(struct radeon_device *rdev,
3876*4882a593Smuzhiyun 					      const struct atom_voltage_table *table,
3877*4882a593Smuzhiyun 					      const struct radeon_phase_shedding_limits_table *limits)
3878*4882a593Smuzhiyun {
3879*4882a593Smuzhiyun 	u32 data, num_bits, num_levels;
3880*4882a593Smuzhiyun 
3881*4882a593Smuzhiyun 	if ((table == NULL) || (limits == NULL))
3882*4882a593Smuzhiyun 		return false;
3883*4882a593Smuzhiyun 
3884*4882a593Smuzhiyun 	data = table->mask_low;
3885*4882a593Smuzhiyun 
3886*4882a593Smuzhiyun 	num_bits = hweight32(data);
3887*4882a593Smuzhiyun 
3888*4882a593Smuzhiyun 	if (num_bits == 0)
3889*4882a593Smuzhiyun 		return false;
3890*4882a593Smuzhiyun 
3891*4882a593Smuzhiyun 	num_levels = (1 << num_bits);
3892*4882a593Smuzhiyun 
3893*4882a593Smuzhiyun 	if (table->count != num_levels)
3894*4882a593Smuzhiyun 		return false;
3895*4882a593Smuzhiyun 
3896*4882a593Smuzhiyun 	if (limits->count != (num_levels - 1))
3897*4882a593Smuzhiyun 		return false;
3898*4882a593Smuzhiyun 
3899*4882a593Smuzhiyun 	return true;
3900*4882a593Smuzhiyun }
3901*4882a593Smuzhiyun 
si_trim_voltage_table_to_fit_state_table(struct radeon_device * rdev,u32 max_voltage_steps,struct atom_voltage_table * voltage_table)3902*4882a593Smuzhiyun void si_trim_voltage_table_to_fit_state_table(struct radeon_device *rdev,
3903*4882a593Smuzhiyun 					      u32 max_voltage_steps,
3904*4882a593Smuzhiyun 					      struct atom_voltage_table *voltage_table)
3905*4882a593Smuzhiyun {
3906*4882a593Smuzhiyun 	unsigned int i, diff;
3907*4882a593Smuzhiyun 
3908*4882a593Smuzhiyun 	if (voltage_table->count <= max_voltage_steps)
3909*4882a593Smuzhiyun 		return;
3910*4882a593Smuzhiyun 
3911*4882a593Smuzhiyun 	diff = voltage_table->count - max_voltage_steps;
3912*4882a593Smuzhiyun 
3913*4882a593Smuzhiyun 	for (i= 0; i < max_voltage_steps; i++)
3914*4882a593Smuzhiyun 		voltage_table->entries[i] = voltage_table->entries[i + diff];
3915*4882a593Smuzhiyun 
3916*4882a593Smuzhiyun 	voltage_table->count = max_voltage_steps;
3917*4882a593Smuzhiyun }
3918*4882a593Smuzhiyun 
si_get_svi2_voltage_table(struct radeon_device * rdev,struct radeon_clock_voltage_dependency_table * voltage_dependency_table,struct atom_voltage_table * voltage_table)3919*4882a593Smuzhiyun static int si_get_svi2_voltage_table(struct radeon_device *rdev,
3920*4882a593Smuzhiyun 				     struct radeon_clock_voltage_dependency_table *voltage_dependency_table,
3921*4882a593Smuzhiyun 				     struct atom_voltage_table *voltage_table)
3922*4882a593Smuzhiyun {
3923*4882a593Smuzhiyun 	u32 i;
3924*4882a593Smuzhiyun 
3925*4882a593Smuzhiyun 	if (voltage_dependency_table == NULL)
3926*4882a593Smuzhiyun 		return -EINVAL;
3927*4882a593Smuzhiyun 
3928*4882a593Smuzhiyun 	voltage_table->mask_low = 0;
3929*4882a593Smuzhiyun 	voltage_table->phase_delay = 0;
3930*4882a593Smuzhiyun 
3931*4882a593Smuzhiyun 	voltage_table->count = voltage_dependency_table->count;
3932*4882a593Smuzhiyun 	for (i = 0; i < voltage_table->count; i++) {
3933*4882a593Smuzhiyun 		voltage_table->entries[i].value = voltage_dependency_table->entries[i].v;
3934*4882a593Smuzhiyun 		voltage_table->entries[i].smio_low = 0;
3935*4882a593Smuzhiyun 	}
3936*4882a593Smuzhiyun 
3937*4882a593Smuzhiyun 	return 0;
3938*4882a593Smuzhiyun }
3939*4882a593Smuzhiyun 
si_construct_voltage_tables(struct radeon_device * rdev)3940*4882a593Smuzhiyun static int si_construct_voltage_tables(struct radeon_device *rdev)
3941*4882a593Smuzhiyun {
3942*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3943*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3944*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
3945*4882a593Smuzhiyun 	int ret;
3946*4882a593Smuzhiyun 
3947*4882a593Smuzhiyun 	if (pi->voltage_control) {
3948*4882a593Smuzhiyun 		ret = radeon_atom_get_voltage_table(rdev, VOLTAGE_TYPE_VDDC,
3949*4882a593Smuzhiyun 						    VOLTAGE_OBJ_GPIO_LUT, &eg_pi->vddc_voltage_table);
3950*4882a593Smuzhiyun 		if (ret)
3951*4882a593Smuzhiyun 			return ret;
3952*4882a593Smuzhiyun 
3953*4882a593Smuzhiyun 		if (eg_pi->vddc_voltage_table.count > SISLANDS_MAX_NO_VREG_STEPS)
3954*4882a593Smuzhiyun 			si_trim_voltage_table_to_fit_state_table(rdev,
3955*4882a593Smuzhiyun 								 SISLANDS_MAX_NO_VREG_STEPS,
3956*4882a593Smuzhiyun 								 &eg_pi->vddc_voltage_table);
3957*4882a593Smuzhiyun 	} else if (si_pi->voltage_control_svi2) {
3958*4882a593Smuzhiyun 		ret = si_get_svi2_voltage_table(rdev,
3959*4882a593Smuzhiyun 						&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk,
3960*4882a593Smuzhiyun 						&eg_pi->vddc_voltage_table);
3961*4882a593Smuzhiyun 		if (ret)
3962*4882a593Smuzhiyun 			return ret;
3963*4882a593Smuzhiyun 	} else {
3964*4882a593Smuzhiyun 		return -EINVAL;
3965*4882a593Smuzhiyun 	}
3966*4882a593Smuzhiyun 
3967*4882a593Smuzhiyun 	if (eg_pi->vddci_control) {
3968*4882a593Smuzhiyun 		ret = radeon_atom_get_voltage_table(rdev, VOLTAGE_TYPE_VDDCI,
3969*4882a593Smuzhiyun 						    VOLTAGE_OBJ_GPIO_LUT, &eg_pi->vddci_voltage_table);
3970*4882a593Smuzhiyun 		if (ret)
3971*4882a593Smuzhiyun 			return ret;
3972*4882a593Smuzhiyun 
3973*4882a593Smuzhiyun 		if (eg_pi->vddci_voltage_table.count > SISLANDS_MAX_NO_VREG_STEPS)
3974*4882a593Smuzhiyun 			si_trim_voltage_table_to_fit_state_table(rdev,
3975*4882a593Smuzhiyun 								 SISLANDS_MAX_NO_VREG_STEPS,
3976*4882a593Smuzhiyun 								 &eg_pi->vddci_voltage_table);
3977*4882a593Smuzhiyun 	}
3978*4882a593Smuzhiyun 	if (si_pi->vddci_control_svi2) {
3979*4882a593Smuzhiyun 		ret = si_get_svi2_voltage_table(rdev,
3980*4882a593Smuzhiyun 						&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk,
3981*4882a593Smuzhiyun 						&eg_pi->vddci_voltage_table);
3982*4882a593Smuzhiyun 		if (ret)
3983*4882a593Smuzhiyun 			return ret;
3984*4882a593Smuzhiyun 	}
3985*4882a593Smuzhiyun 
3986*4882a593Smuzhiyun 	if (pi->mvdd_control) {
3987*4882a593Smuzhiyun 		ret = radeon_atom_get_voltage_table(rdev, VOLTAGE_TYPE_MVDDC,
3988*4882a593Smuzhiyun 						    VOLTAGE_OBJ_GPIO_LUT, &si_pi->mvdd_voltage_table);
3989*4882a593Smuzhiyun 
3990*4882a593Smuzhiyun 		if (ret) {
3991*4882a593Smuzhiyun 			pi->mvdd_control = false;
3992*4882a593Smuzhiyun 			return ret;
3993*4882a593Smuzhiyun 		}
3994*4882a593Smuzhiyun 
3995*4882a593Smuzhiyun 		if (si_pi->mvdd_voltage_table.count == 0) {
3996*4882a593Smuzhiyun 			pi->mvdd_control = false;
3997*4882a593Smuzhiyun 			return -EINVAL;
3998*4882a593Smuzhiyun 		}
3999*4882a593Smuzhiyun 
4000*4882a593Smuzhiyun 		if (si_pi->mvdd_voltage_table.count > SISLANDS_MAX_NO_VREG_STEPS)
4001*4882a593Smuzhiyun 			si_trim_voltage_table_to_fit_state_table(rdev,
4002*4882a593Smuzhiyun 								 SISLANDS_MAX_NO_VREG_STEPS,
4003*4882a593Smuzhiyun 								 &si_pi->mvdd_voltage_table);
4004*4882a593Smuzhiyun 	}
4005*4882a593Smuzhiyun 
4006*4882a593Smuzhiyun 	if (si_pi->vddc_phase_shed_control) {
4007*4882a593Smuzhiyun 		ret = radeon_atom_get_voltage_table(rdev, VOLTAGE_TYPE_VDDC,
4008*4882a593Smuzhiyun 						    VOLTAGE_OBJ_PHASE_LUT, &si_pi->vddc_phase_shed_table);
4009*4882a593Smuzhiyun 		if (ret)
4010*4882a593Smuzhiyun 			si_pi->vddc_phase_shed_control = false;
4011*4882a593Smuzhiyun 
4012*4882a593Smuzhiyun 		if ((si_pi->vddc_phase_shed_table.count == 0) ||
4013*4882a593Smuzhiyun 		    (si_pi->vddc_phase_shed_table.count > SISLANDS_MAX_NO_VREG_STEPS))
4014*4882a593Smuzhiyun 			si_pi->vddc_phase_shed_control = false;
4015*4882a593Smuzhiyun 	}
4016*4882a593Smuzhiyun 
4017*4882a593Smuzhiyun 	return 0;
4018*4882a593Smuzhiyun }
4019*4882a593Smuzhiyun 
si_populate_smc_voltage_table(struct radeon_device * rdev,const struct atom_voltage_table * voltage_table,SISLANDS_SMC_STATETABLE * table)4020*4882a593Smuzhiyun static void si_populate_smc_voltage_table(struct radeon_device *rdev,
4021*4882a593Smuzhiyun 					  const struct atom_voltage_table *voltage_table,
4022*4882a593Smuzhiyun 					  SISLANDS_SMC_STATETABLE *table)
4023*4882a593Smuzhiyun {
4024*4882a593Smuzhiyun 	unsigned int i;
4025*4882a593Smuzhiyun 
4026*4882a593Smuzhiyun 	for (i = 0; i < voltage_table->count; i++)
4027*4882a593Smuzhiyun 		table->lowSMIO[i] |= cpu_to_be32(voltage_table->entries[i].smio_low);
4028*4882a593Smuzhiyun }
4029*4882a593Smuzhiyun 
si_populate_smc_voltage_tables(struct radeon_device * rdev,SISLANDS_SMC_STATETABLE * table)4030*4882a593Smuzhiyun static int si_populate_smc_voltage_tables(struct radeon_device *rdev,
4031*4882a593Smuzhiyun 					  SISLANDS_SMC_STATETABLE *table)
4032*4882a593Smuzhiyun {
4033*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
4034*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4035*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
4036*4882a593Smuzhiyun 	u8 i;
4037*4882a593Smuzhiyun 
4038*4882a593Smuzhiyun 	if (si_pi->voltage_control_svi2) {
4039*4882a593Smuzhiyun 		si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_svi_rework_gpio_id_svc,
4040*4882a593Smuzhiyun 			si_pi->svc_gpio_id);
4041*4882a593Smuzhiyun 		si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_svi_rework_gpio_id_svd,
4042*4882a593Smuzhiyun 			si_pi->svd_gpio_id);
4043*4882a593Smuzhiyun 		si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_svi_rework_plat_type,
4044*4882a593Smuzhiyun 					   2);
4045*4882a593Smuzhiyun 	} else {
4046*4882a593Smuzhiyun 		if (eg_pi->vddc_voltage_table.count) {
4047*4882a593Smuzhiyun 			si_populate_smc_voltage_table(rdev, &eg_pi->vddc_voltage_table, table);
4048*4882a593Smuzhiyun 			table->voltageMaskTable.lowMask[SISLANDS_SMC_VOLTAGEMASK_VDDC] =
4049*4882a593Smuzhiyun 				cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
4050*4882a593Smuzhiyun 
4051*4882a593Smuzhiyun 			for (i = 0; i < eg_pi->vddc_voltage_table.count; i++) {
4052*4882a593Smuzhiyun 				if (pi->max_vddc_in_table <= eg_pi->vddc_voltage_table.entries[i].value) {
4053*4882a593Smuzhiyun 					table->maxVDDCIndexInPPTable = i;
4054*4882a593Smuzhiyun 					break;
4055*4882a593Smuzhiyun 				}
4056*4882a593Smuzhiyun 			}
4057*4882a593Smuzhiyun 		}
4058*4882a593Smuzhiyun 
4059*4882a593Smuzhiyun 		if (eg_pi->vddci_voltage_table.count) {
4060*4882a593Smuzhiyun 			si_populate_smc_voltage_table(rdev, &eg_pi->vddci_voltage_table, table);
4061*4882a593Smuzhiyun 
4062*4882a593Smuzhiyun 			table->voltageMaskTable.lowMask[SISLANDS_SMC_VOLTAGEMASK_VDDCI] =
4063*4882a593Smuzhiyun 				cpu_to_be32(eg_pi->vddci_voltage_table.mask_low);
4064*4882a593Smuzhiyun 		}
4065*4882a593Smuzhiyun 
4066*4882a593Smuzhiyun 
4067*4882a593Smuzhiyun 		if (si_pi->mvdd_voltage_table.count) {
4068*4882a593Smuzhiyun 			si_populate_smc_voltage_table(rdev, &si_pi->mvdd_voltage_table, table);
4069*4882a593Smuzhiyun 
4070*4882a593Smuzhiyun 			table->voltageMaskTable.lowMask[SISLANDS_SMC_VOLTAGEMASK_MVDD] =
4071*4882a593Smuzhiyun 				cpu_to_be32(si_pi->mvdd_voltage_table.mask_low);
4072*4882a593Smuzhiyun 		}
4073*4882a593Smuzhiyun 
4074*4882a593Smuzhiyun 		if (si_pi->vddc_phase_shed_control) {
4075*4882a593Smuzhiyun 			if (si_validate_phase_shedding_tables(rdev, &si_pi->vddc_phase_shed_table,
4076*4882a593Smuzhiyun 							      &rdev->pm.dpm.dyn_state.phase_shedding_limits_table)) {
4077*4882a593Smuzhiyun 				si_populate_smc_voltage_table(rdev, &si_pi->vddc_phase_shed_table, table);
4078*4882a593Smuzhiyun 
4079*4882a593Smuzhiyun 				table->phaseMaskTable.lowMask[SISLANDS_SMC_VOLTAGEMASK_VDDC_PHASE_SHEDDING] =
4080*4882a593Smuzhiyun 					cpu_to_be32(si_pi->vddc_phase_shed_table.mask_low);
4081*4882a593Smuzhiyun 
4082*4882a593Smuzhiyun 				si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_phase_shedding_delay,
4083*4882a593Smuzhiyun 							   (u32)si_pi->vddc_phase_shed_table.phase_delay);
4084*4882a593Smuzhiyun 			} else {
4085*4882a593Smuzhiyun 				si_pi->vddc_phase_shed_control = false;
4086*4882a593Smuzhiyun 			}
4087*4882a593Smuzhiyun 		}
4088*4882a593Smuzhiyun 	}
4089*4882a593Smuzhiyun 
4090*4882a593Smuzhiyun 	return 0;
4091*4882a593Smuzhiyun }
4092*4882a593Smuzhiyun 
si_populate_voltage_value(struct radeon_device * rdev,const struct atom_voltage_table * table,u16 value,SISLANDS_SMC_VOLTAGE_VALUE * voltage)4093*4882a593Smuzhiyun static int si_populate_voltage_value(struct radeon_device *rdev,
4094*4882a593Smuzhiyun 				     const struct atom_voltage_table *table,
4095*4882a593Smuzhiyun 				     u16 value, SISLANDS_SMC_VOLTAGE_VALUE *voltage)
4096*4882a593Smuzhiyun {
4097*4882a593Smuzhiyun 	unsigned int i;
4098*4882a593Smuzhiyun 
4099*4882a593Smuzhiyun 	for (i = 0; i < table->count; i++) {
4100*4882a593Smuzhiyun 		if (value <= table->entries[i].value) {
4101*4882a593Smuzhiyun 			voltage->index = (u8)i;
4102*4882a593Smuzhiyun 			voltage->value = cpu_to_be16(table->entries[i].value);
4103*4882a593Smuzhiyun 			break;
4104*4882a593Smuzhiyun 		}
4105*4882a593Smuzhiyun 	}
4106*4882a593Smuzhiyun 
4107*4882a593Smuzhiyun 	if (i >= table->count)
4108*4882a593Smuzhiyun 		return -EINVAL;
4109*4882a593Smuzhiyun 
4110*4882a593Smuzhiyun 	return 0;
4111*4882a593Smuzhiyun }
4112*4882a593Smuzhiyun 
si_populate_mvdd_value(struct radeon_device * rdev,u32 mclk,SISLANDS_SMC_VOLTAGE_VALUE * voltage)4113*4882a593Smuzhiyun static int si_populate_mvdd_value(struct radeon_device *rdev, u32 mclk,
4114*4882a593Smuzhiyun 				  SISLANDS_SMC_VOLTAGE_VALUE *voltage)
4115*4882a593Smuzhiyun {
4116*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
4117*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
4118*4882a593Smuzhiyun 
4119*4882a593Smuzhiyun 	if (pi->mvdd_control) {
4120*4882a593Smuzhiyun 		if (mclk <= pi->mvdd_split_frequency)
4121*4882a593Smuzhiyun 			voltage->index = 0;
4122*4882a593Smuzhiyun 		else
4123*4882a593Smuzhiyun 			voltage->index = (u8)(si_pi->mvdd_voltage_table.count) - 1;
4124*4882a593Smuzhiyun 
4125*4882a593Smuzhiyun 		voltage->value = cpu_to_be16(si_pi->mvdd_voltage_table.entries[voltage->index].value);
4126*4882a593Smuzhiyun 	}
4127*4882a593Smuzhiyun 	return 0;
4128*4882a593Smuzhiyun }
4129*4882a593Smuzhiyun 
si_get_std_voltage_value(struct radeon_device * rdev,SISLANDS_SMC_VOLTAGE_VALUE * voltage,u16 * std_voltage)4130*4882a593Smuzhiyun static int si_get_std_voltage_value(struct radeon_device *rdev,
4131*4882a593Smuzhiyun 				    SISLANDS_SMC_VOLTAGE_VALUE *voltage,
4132*4882a593Smuzhiyun 				    u16 *std_voltage)
4133*4882a593Smuzhiyun {
4134*4882a593Smuzhiyun 	u16 v_index;
4135*4882a593Smuzhiyun 	bool voltage_found = false;
4136*4882a593Smuzhiyun 	*std_voltage = be16_to_cpu(voltage->value);
4137*4882a593Smuzhiyun 
4138*4882a593Smuzhiyun 	if (rdev->pm.dpm.dyn_state.cac_leakage_table.entries) {
4139*4882a593Smuzhiyun 		if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_NEW_CAC_VOLTAGE) {
4140*4882a593Smuzhiyun 			if (rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries == NULL)
4141*4882a593Smuzhiyun 				return -EINVAL;
4142*4882a593Smuzhiyun 
4143*4882a593Smuzhiyun 			for (v_index = 0; (u32)v_index < rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.count; v_index++) {
4144*4882a593Smuzhiyun 				if (be16_to_cpu(voltage->value) ==
4145*4882a593Smuzhiyun 				    (u16)rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[v_index].v) {
4146*4882a593Smuzhiyun 					voltage_found = true;
4147*4882a593Smuzhiyun 					if ((u32)v_index < rdev->pm.dpm.dyn_state.cac_leakage_table.count)
4148*4882a593Smuzhiyun 						*std_voltage =
4149*4882a593Smuzhiyun 							rdev->pm.dpm.dyn_state.cac_leakage_table.entries[v_index].vddc;
4150*4882a593Smuzhiyun 					else
4151*4882a593Smuzhiyun 						*std_voltage =
4152*4882a593Smuzhiyun 							rdev->pm.dpm.dyn_state.cac_leakage_table.entries[rdev->pm.dpm.dyn_state.cac_leakage_table.count-1].vddc;
4153*4882a593Smuzhiyun 					break;
4154*4882a593Smuzhiyun 				}
4155*4882a593Smuzhiyun 			}
4156*4882a593Smuzhiyun 
4157*4882a593Smuzhiyun 			if (!voltage_found) {
4158*4882a593Smuzhiyun 				for (v_index = 0; (u32)v_index < rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.count; v_index++) {
4159*4882a593Smuzhiyun 					if (be16_to_cpu(voltage->value) <=
4160*4882a593Smuzhiyun 					    (u16)rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[v_index].v) {
4161*4882a593Smuzhiyun 						voltage_found = true;
4162*4882a593Smuzhiyun 						if ((u32)v_index < rdev->pm.dpm.dyn_state.cac_leakage_table.count)
4163*4882a593Smuzhiyun 							*std_voltage =
4164*4882a593Smuzhiyun 								rdev->pm.dpm.dyn_state.cac_leakage_table.entries[v_index].vddc;
4165*4882a593Smuzhiyun 						else
4166*4882a593Smuzhiyun 							*std_voltage =
4167*4882a593Smuzhiyun 								rdev->pm.dpm.dyn_state.cac_leakage_table.entries[rdev->pm.dpm.dyn_state.cac_leakage_table.count-1].vddc;
4168*4882a593Smuzhiyun 						break;
4169*4882a593Smuzhiyun 					}
4170*4882a593Smuzhiyun 				}
4171*4882a593Smuzhiyun 			}
4172*4882a593Smuzhiyun 		} else {
4173*4882a593Smuzhiyun 			if ((u32)voltage->index < rdev->pm.dpm.dyn_state.cac_leakage_table.count)
4174*4882a593Smuzhiyun 				*std_voltage = rdev->pm.dpm.dyn_state.cac_leakage_table.entries[voltage->index].vddc;
4175*4882a593Smuzhiyun 		}
4176*4882a593Smuzhiyun 	}
4177*4882a593Smuzhiyun 
4178*4882a593Smuzhiyun 	return 0;
4179*4882a593Smuzhiyun }
4180*4882a593Smuzhiyun 
si_populate_std_voltage_value(struct radeon_device * rdev,u16 value,u8 index,SISLANDS_SMC_VOLTAGE_VALUE * voltage)4181*4882a593Smuzhiyun static int si_populate_std_voltage_value(struct radeon_device *rdev,
4182*4882a593Smuzhiyun 					 u16 value, u8 index,
4183*4882a593Smuzhiyun 					 SISLANDS_SMC_VOLTAGE_VALUE *voltage)
4184*4882a593Smuzhiyun {
4185*4882a593Smuzhiyun 	voltage->index = index;
4186*4882a593Smuzhiyun 	voltage->value = cpu_to_be16(value);
4187*4882a593Smuzhiyun 
4188*4882a593Smuzhiyun 	return 0;
4189*4882a593Smuzhiyun }
4190*4882a593Smuzhiyun 
si_populate_phase_shedding_value(struct radeon_device * rdev,const struct radeon_phase_shedding_limits_table * limits,u16 voltage,u32 sclk,u32 mclk,SISLANDS_SMC_VOLTAGE_VALUE * smc_voltage)4191*4882a593Smuzhiyun static int si_populate_phase_shedding_value(struct radeon_device *rdev,
4192*4882a593Smuzhiyun 					    const struct radeon_phase_shedding_limits_table *limits,
4193*4882a593Smuzhiyun 					    u16 voltage, u32 sclk, u32 mclk,
4194*4882a593Smuzhiyun 					    SISLANDS_SMC_VOLTAGE_VALUE *smc_voltage)
4195*4882a593Smuzhiyun {
4196*4882a593Smuzhiyun 	unsigned int i;
4197*4882a593Smuzhiyun 
4198*4882a593Smuzhiyun 	for (i = 0; i < limits->count; i++) {
4199*4882a593Smuzhiyun 		if ((voltage <= limits->entries[i].voltage) &&
4200*4882a593Smuzhiyun 		    (sclk <= limits->entries[i].sclk) &&
4201*4882a593Smuzhiyun 		    (mclk <= limits->entries[i].mclk))
4202*4882a593Smuzhiyun 			break;
4203*4882a593Smuzhiyun 	}
4204*4882a593Smuzhiyun 
4205*4882a593Smuzhiyun 	smc_voltage->phase_settings = (u8)i;
4206*4882a593Smuzhiyun 
4207*4882a593Smuzhiyun 	return 0;
4208*4882a593Smuzhiyun }
4209*4882a593Smuzhiyun 
si_init_arb_table_index(struct radeon_device * rdev)4210*4882a593Smuzhiyun static int si_init_arb_table_index(struct radeon_device *rdev)
4211*4882a593Smuzhiyun {
4212*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
4213*4882a593Smuzhiyun 	u32 tmp;
4214*4882a593Smuzhiyun 	int ret;
4215*4882a593Smuzhiyun 
4216*4882a593Smuzhiyun 	ret = si_read_smc_sram_dword(rdev, si_pi->arb_table_start, &tmp, si_pi->sram_end);
4217*4882a593Smuzhiyun 	if (ret)
4218*4882a593Smuzhiyun 		return ret;
4219*4882a593Smuzhiyun 
4220*4882a593Smuzhiyun 	tmp &= 0x00FFFFFF;
4221*4882a593Smuzhiyun 	tmp |= MC_CG_ARB_FREQ_F1 << 24;
4222*4882a593Smuzhiyun 
4223*4882a593Smuzhiyun 	return si_write_smc_sram_dword(rdev, si_pi->arb_table_start,  tmp, si_pi->sram_end);
4224*4882a593Smuzhiyun }
4225*4882a593Smuzhiyun 
si_initial_switch_from_arb_f0_to_f1(struct radeon_device * rdev)4226*4882a593Smuzhiyun static int si_initial_switch_from_arb_f0_to_f1(struct radeon_device *rdev)
4227*4882a593Smuzhiyun {
4228*4882a593Smuzhiyun 	return ni_copy_and_switch_arb_sets(rdev, MC_CG_ARB_FREQ_F0, MC_CG_ARB_FREQ_F1);
4229*4882a593Smuzhiyun }
4230*4882a593Smuzhiyun 
si_reset_to_default(struct radeon_device * rdev)4231*4882a593Smuzhiyun static int si_reset_to_default(struct radeon_device *rdev)
4232*4882a593Smuzhiyun {
4233*4882a593Smuzhiyun 	return (si_send_msg_to_smc(rdev, PPSMC_MSG_ResetToDefaults) == PPSMC_Result_OK) ?
4234*4882a593Smuzhiyun 		0 : -EINVAL;
4235*4882a593Smuzhiyun }
4236*4882a593Smuzhiyun 
si_force_switch_to_arb_f0(struct radeon_device * rdev)4237*4882a593Smuzhiyun static int si_force_switch_to_arb_f0(struct radeon_device *rdev)
4238*4882a593Smuzhiyun {
4239*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
4240*4882a593Smuzhiyun 	u32 tmp;
4241*4882a593Smuzhiyun 	int ret;
4242*4882a593Smuzhiyun 
4243*4882a593Smuzhiyun 	ret = si_read_smc_sram_dword(rdev, si_pi->arb_table_start,
4244*4882a593Smuzhiyun 				     &tmp, si_pi->sram_end);
4245*4882a593Smuzhiyun 	if (ret)
4246*4882a593Smuzhiyun 		return ret;
4247*4882a593Smuzhiyun 
4248*4882a593Smuzhiyun 	tmp = (tmp >> 24) & 0xff;
4249*4882a593Smuzhiyun 
4250*4882a593Smuzhiyun 	if (tmp == MC_CG_ARB_FREQ_F0)
4251*4882a593Smuzhiyun 		return 0;
4252*4882a593Smuzhiyun 
4253*4882a593Smuzhiyun 	return ni_copy_and_switch_arb_sets(rdev, tmp, MC_CG_ARB_FREQ_F0);
4254*4882a593Smuzhiyun }
4255*4882a593Smuzhiyun 
si_calculate_memory_refresh_rate(struct radeon_device * rdev,u32 engine_clock)4256*4882a593Smuzhiyun static u32 si_calculate_memory_refresh_rate(struct radeon_device *rdev,
4257*4882a593Smuzhiyun 					    u32 engine_clock)
4258*4882a593Smuzhiyun {
4259*4882a593Smuzhiyun 	u32 dram_rows;
4260*4882a593Smuzhiyun 	u32 dram_refresh_rate;
4261*4882a593Smuzhiyun 	u32 mc_arb_rfsh_rate;
4262*4882a593Smuzhiyun 	u32 tmp = (RREG32(MC_ARB_RAMCFG) & NOOFROWS_MASK) >> NOOFROWS_SHIFT;
4263*4882a593Smuzhiyun 
4264*4882a593Smuzhiyun 	if (tmp >= 4)
4265*4882a593Smuzhiyun 		dram_rows = 16384;
4266*4882a593Smuzhiyun 	else
4267*4882a593Smuzhiyun 		dram_rows = 1 << (tmp + 10);
4268*4882a593Smuzhiyun 
4269*4882a593Smuzhiyun 	dram_refresh_rate = 1 << ((RREG32(MC_SEQ_MISC0) & 0x3) + 3);
4270*4882a593Smuzhiyun 	mc_arb_rfsh_rate = ((engine_clock * 10) * dram_refresh_rate / dram_rows - 32) / 64;
4271*4882a593Smuzhiyun 
4272*4882a593Smuzhiyun 	return mc_arb_rfsh_rate;
4273*4882a593Smuzhiyun }
4274*4882a593Smuzhiyun 
si_populate_memory_timing_parameters(struct radeon_device * rdev,struct rv7xx_pl * pl,SMC_SIslands_MCArbDramTimingRegisterSet * arb_regs)4275*4882a593Smuzhiyun static int si_populate_memory_timing_parameters(struct radeon_device *rdev,
4276*4882a593Smuzhiyun 						struct rv7xx_pl *pl,
4277*4882a593Smuzhiyun 						SMC_SIslands_MCArbDramTimingRegisterSet *arb_regs)
4278*4882a593Smuzhiyun {
4279*4882a593Smuzhiyun 	u32 dram_timing;
4280*4882a593Smuzhiyun 	u32 dram_timing2;
4281*4882a593Smuzhiyun 	u32 burst_time;
4282*4882a593Smuzhiyun 
4283*4882a593Smuzhiyun 	arb_regs->mc_arb_rfsh_rate =
4284*4882a593Smuzhiyun 		(u8)si_calculate_memory_refresh_rate(rdev, pl->sclk);
4285*4882a593Smuzhiyun 
4286*4882a593Smuzhiyun 	radeon_atom_set_engine_dram_timings(rdev,
4287*4882a593Smuzhiyun 					    pl->sclk,
4288*4882a593Smuzhiyun 					    pl->mclk);
4289*4882a593Smuzhiyun 
4290*4882a593Smuzhiyun 	dram_timing  = RREG32(MC_ARB_DRAM_TIMING);
4291*4882a593Smuzhiyun 	dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
4292*4882a593Smuzhiyun 	burst_time = RREG32(MC_ARB_BURST_TIME) & STATE0_MASK;
4293*4882a593Smuzhiyun 
4294*4882a593Smuzhiyun 	arb_regs->mc_arb_dram_timing  = cpu_to_be32(dram_timing);
4295*4882a593Smuzhiyun 	arb_regs->mc_arb_dram_timing2 = cpu_to_be32(dram_timing2);
4296*4882a593Smuzhiyun 	arb_regs->mc_arb_burst_time = (u8)burst_time;
4297*4882a593Smuzhiyun 
4298*4882a593Smuzhiyun 	return 0;
4299*4882a593Smuzhiyun }
4300*4882a593Smuzhiyun 
si_do_program_memory_timing_parameters(struct radeon_device * rdev,struct radeon_ps * radeon_state,unsigned int first_arb_set)4301*4882a593Smuzhiyun static int si_do_program_memory_timing_parameters(struct radeon_device *rdev,
4302*4882a593Smuzhiyun 						  struct radeon_ps *radeon_state,
4303*4882a593Smuzhiyun 						  unsigned int first_arb_set)
4304*4882a593Smuzhiyun {
4305*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
4306*4882a593Smuzhiyun 	struct ni_ps *state = ni_get_ps(radeon_state);
4307*4882a593Smuzhiyun 	SMC_SIslands_MCArbDramTimingRegisterSet arb_regs = { 0 };
4308*4882a593Smuzhiyun 	int i, ret = 0;
4309*4882a593Smuzhiyun 
4310*4882a593Smuzhiyun 	for (i = 0; i < state->performance_level_count; i++) {
4311*4882a593Smuzhiyun 		ret = si_populate_memory_timing_parameters(rdev, &state->performance_levels[i], &arb_regs);
4312*4882a593Smuzhiyun 		if (ret)
4313*4882a593Smuzhiyun 			break;
4314*4882a593Smuzhiyun 		ret = si_copy_bytes_to_smc(rdev,
4315*4882a593Smuzhiyun 					   si_pi->arb_table_start +
4316*4882a593Smuzhiyun 					   offsetof(SMC_SIslands_MCArbDramTimingRegisters, data) +
4317*4882a593Smuzhiyun 					   sizeof(SMC_SIslands_MCArbDramTimingRegisterSet) * (first_arb_set + i),
4318*4882a593Smuzhiyun 					   (u8 *)&arb_regs,
4319*4882a593Smuzhiyun 					   sizeof(SMC_SIslands_MCArbDramTimingRegisterSet),
4320*4882a593Smuzhiyun 					   si_pi->sram_end);
4321*4882a593Smuzhiyun 		if (ret)
4322*4882a593Smuzhiyun 			break;
4323*4882a593Smuzhiyun 	}
4324*4882a593Smuzhiyun 
4325*4882a593Smuzhiyun 	return ret;
4326*4882a593Smuzhiyun }
4327*4882a593Smuzhiyun 
si_program_memory_timing_parameters(struct radeon_device * rdev,struct radeon_ps * radeon_new_state)4328*4882a593Smuzhiyun static int si_program_memory_timing_parameters(struct radeon_device *rdev,
4329*4882a593Smuzhiyun 					       struct radeon_ps *radeon_new_state)
4330*4882a593Smuzhiyun {
4331*4882a593Smuzhiyun 	return si_do_program_memory_timing_parameters(rdev, radeon_new_state,
4332*4882a593Smuzhiyun 						      SISLANDS_DRIVER_STATE_ARB_INDEX);
4333*4882a593Smuzhiyun }
4334*4882a593Smuzhiyun 
si_populate_initial_mvdd_value(struct radeon_device * rdev,struct SISLANDS_SMC_VOLTAGE_VALUE * voltage)4335*4882a593Smuzhiyun static int si_populate_initial_mvdd_value(struct radeon_device *rdev,
4336*4882a593Smuzhiyun 					  struct SISLANDS_SMC_VOLTAGE_VALUE *voltage)
4337*4882a593Smuzhiyun {
4338*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
4339*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
4340*4882a593Smuzhiyun 
4341*4882a593Smuzhiyun 	if (pi->mvdd_control)
4342*4882a593Smuzhiyun 		return si_populate_voltage_value(rdev, &si_pi->mvdd_voltage_table,
4343*4882a593Smuzhiyun 						 si_pi->mvdd_bootup_value, voltage);
4344*4882a593Smuzhiyun 
4345*4882a593Smuzhiyun 	return 0;
4346*4882a593Smuzhiyun }
4347*4882a593Smuzhiyun 
si_populate_smc_initial_state(struct radeon_device * rdev,struct radeon_ps * radeon_initial_state,SISLANDS_SMC_STATETABLE * table)4348*4882a593Smuzhiyun static int si_populate_smc_initial_state(struct radeon_device *rdev,
4349*4882a593Smuzhiyun 					 struct radeon_ps *radeon_initial_state,
4350*4882a593Smuzhiyun 					 SISLANDS_SMC_STATETABLE *table)
4351*4882a593Smuzhiyun {
4352*4882a593Smuzhiyun 	struct ni_ps *initial_state = ni_get_ps(radeon_initial_state);
4353*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
4354*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4355*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
4356*4882a593Smuzhiyun 	u32 reg;
4357*4882a593Smuzhiyun 	int ret;
4358*4882a593Smuzhiyun 
4359*4882a593Smuzhiyun 	table->initialState.levels[0].mclk.vDLL_CNTL =
4360*4882a593Smuzhiyun 		cpu_to_be32(si_pi->clock_registers.dll_cntl);
4361*4882a593Smuzhiyun 	table->initialState.levels[0].mclk.vMCLK_PWRMGT_CNTL =
4362*4882a593Smuzhiyun 		cpu_to_be32(si_pi->clock_registers.mclk_pwrmgt_cntl);
4363*4882a593Smuzhiyun 	table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL =
4364*4882a593Smuzhiyun 		cpu_to_be32(si_pi->clock_registers.mpll_ad_func_cntl);
4365*4882a593Smuzhiyun 	table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL =
4366*4882a593Smuzhiyun 		cpu_to_be32(si_pi->clock_registers.mpll_dq_func_cntl);
4367*4882a593Smuzhiyun 	table->initialState.levels[0].mclk.vMPLL_FUNC_CNTL =
4368*4882a593Smuzhiyun 		cpu_to_be32(si_pi->clock_registers.mpll_func_cntl);
4369*4882a593Smuzhiyun 	table->initialState.levels[0].mclk.vMPLL_FUNC_CNTL_1 =
4370*4882a593Smuzhiyun 		cpu_to_be32(si_pi->clock_registers.mpll_func_cntl_1);
4371*4882a593Smuzhiyun 	table->initialState.levels[0].mclk.vMPLL_FUNC_CNTL_2 =
4372*4882a593Smuzhiyun 		cpu_to_be32(si_pi->clock_registers.mpll_func_cntl_2);
4373*4882a593Smuzhiyun 	table->initialState.levels[0].mclk.vMPLL_SS =
4374*4882a593Smuzhiyun 		cpu_to_be32(si_pi->clock_registers.mpll_ss1);
4375*4882a593Smuzhiyun 	table->initialState.levels[0].mclk.vMPLL_SS2 =
4376*4882a593Smuzhiyun 		cpu_to_be32(si_pi->clock_registers.mpll_ss2);
4377*4882a593Smuzhiyun 
4378*4882a593Smuzhiyun 	table->initialState.levels[0].mclk.mclk_value =
4379*4882a593Smuzhiyun 		cpu_to_be32(initial_state->performance_levels[0].mclk);
4380*4882a593Smuzhiyun 
4381*4882a593Smuzhiyun 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
4382*4882a593Smuzhiyun 		cpu_to_be32(si_pi->clock_registers.cg_spll_func_cntl);
4383*4882a593Smuzhiyun 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
4384*4882a593Smuzhiyun 		cpu_to_be32(si_pi->clock_registers.cg_spll_func_cntl_2);
4385*4882a593Smuzhiyun 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
4386*4882a593Smuzhiyun 		cpu_to_be32(si_pi->clock_registers.cg_spll_func_cntl_3);
4387*4882a593Smuzhiyun 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 =
4388*4882a593Smuzhiyun 		cpu_to_be32(si_pi->clock_registers.cg_spll_func_cntl_4);
4389*4882a593Smuzhiyun 	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
4390*4882a593Smuzhiyun 		cpu_to_be32(si_pi->clock_registers.cg_spll_spread_spectrum);
4391*4882a593Smuzhiyun 	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2  =
4392*4882a593Smuzhiyun 		cpu_to_be32(si_pi->clock_registers.cg_spll_spread_spectrum_2);
4393*4882a593Smuzhiyun 
4394*4882a593Smuzhiyun 	table->initialState.levels[0].sclk.sclk_value =
4395*4882a593Smuzhiyun 		cpu_to_be32(initial_state->performance_levels[0].sclk);
4396*4882a593Smuzhiyun 
4397*4882a593Smuzhiyun 	table->initialState.levels[0].arbRefreshState =
4398*4882a593Smuzhiyun 		SISLANDS_INITIAL_STATE_ARB_INDEX;
4399*4882a593Smuzhiyun 
4400*4882a593Smuzhiyun 	table->initialState.levels[0].ACIndex = 0;
4401*4882a593Smuzhiyun 
4402*4882a593Smuzhiyun 	ret = si_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
4403*4882a593Smuzhiyun 					initial_state->performance_levels[0].vddc,
4404*4882a593Smuzhiyun 					&table->initialState.levels[0].vddc);
4405*4882a593Smuzhiyun 
4406*4882a593Smuzhiyun 	if (!ret) {
4407*4882a593Smuzhiyun 		u16 std_vddc;
4408*4882a593Smuzhiyun 
4409*4882a593Smuzhiyun 		ret = si_get_std_voltage_value(rdev,
4410*4882a593Smuzhiyun 					       &table->initialState.levels[0].vddc,
4411*4882a593Smuzhiyun 					       &std_vddc);
4412*4882a593Smuzhiyun 		if (!ret)
4413*4882a593Smuzhiyun 			si_populate_std_voltage_value(rdev, std_vddc,
4414*4882a593Smuzhiyun 						      table->initialState.levels[0].vddc.index,
4415*4882a593Smuzhiyun 						      &table->initialState.levels[0].std_vddc);
4416*4882a593Smuzhiyun 	}
4417*4882a593Smuzhiyun 
4418*4882a593Smuzhiyun 	if (eg_pi->vddci_control)
4419*4882a593Smuzhiyun 		si_populate_voltage_value(rdev,
4420*4882a593Smuzhiyun 					  &eg_pi->vddci_voltage_table,
4421*4882a593Smuzhiyun 					  initial_state->performance_levels[0].vddci,
4422*4882a593Smuzhiyun 					  &table->initialState.levels[0].vddci);
4423*4882a593Smuzhiyun 
4424*4882a593Smuzhiyun 	if (si_pi->vddc_phase_shed_control)
4425*4882a593Smuzhiyun 		si_populate_phase_shedding_value(rdev,
4426*4882a593Smuzhiyun 						 &rdev->pm.dpm.dyn_state.phase_shedding_limits_table,
4427*4882a593Smuzhiyun 						 initial_state->performance_levels[0].vddc,
4428*4882a593Smuzhiyun 						 initial_state->performance_levels[0].sclk,
4429*4882a593Smuzhiyun 						 initial_state->performance_levels[0].mclk,
4430*4882a593Smuzhiyun 						 &table->initialState.levels[0].vddc);
4431*4882a593Smuzhiyun 
4432*4882a593Smuzhiyun 	si_populate_initial_mvdd_value(rdev, &table->initialState.levels[0].mvdd);
4433*4882a593Smuzhiyun 
4434*4882a593Smuzhiyun 	reg = CG_R(0xffff) | CG_L(0);
4435*4882a593Smuzhiyun 	table->initialState.levels[0].aT = cpu_to_be32(reg);
4436*4882a593Smuzhiyun 
4437*4882a593Smuzhiyun 	table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
4438*4882a593Smuzhiyun 
4439*4882a593Smuzhiyun 	table->initialState.levels[0].gen2PCIE = (u8)si_pi->boot_pcie_gen;
4440*4882a593Smuzhiyun 
4441*4882a593Smuzhiyun 	if (pi->mem_gddr5) {
4442*4882a593Smuzhiyun 		table->initialState.levels[0].strobeMode =
4443*4882a593Smuzhiyun 			si_get_strobe_mode_settings(rdev,
4444*4882a593Smuzhiyun 						    initial_state->performance_levels[0].mclk);
4445*4882a593Smuzhiyun 
4446*4882a593Smuzhiyun 		if (initial_state->performance_levels[0].mclk > pi->mclk_edc_enable_threshold)
4447*4882a593Smuzhiyun 			table->initialState.levels[0].mcFlags = SISLANDS_SMC_MC_EDC_RD_FLAG | SISLANDS_SMC_MC_EDC_WR_FLAG;
4448*4882a593Smuzhiyun 		else
4449*4882a593Smuzhiyun 			table->initialState.levels[0].mcFlags =  0;
4450*4882a593Smuzhiyun 	}
4451*4882a593Smuzhiyun 
4452*4882a593Smuzhiyun 	table->initialState.levelCount = 1;
4453*4882a593Smuzhiyun 
4454*4882a593Smuzhiyun 	table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
4455*4882a593Smuzhiyun 
4456*4882a593Smuzhiyun 	table->initialState.levels[0].dpm2.MaxPS = 0;
4457*4882a593Smuzhiyun 	table->initialState.levels[0].dpm2.NearTDPDec = 0;
4458*4882a593Smuzhiyun 	table->initialState.levels[0].dpm2.AboveSafeInc = 0;
4459*4882a593Smuzhiyun 	table->initialState.levels[0].dpm2.BelowSafeInc = 0;
4460*4882a593Smuzhiyun 	table->initialState.levels[0].dpm2.PwrEfficiencyRatio = 0;
4461*4882a593Smuzhiyun 
4462*4882a593Smuzhiyun 	reg = MIN_POWER_MASK | MAX_POWER_MASK;
4463*4882a593Smuzhiyun 	table->initialState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
4464*4882a593Smuzhiyun 
4465*4882a593Smuzhiyun 	reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
4466*4882a593Smuzhiyun 	table->initialState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
4467*4882a593Smuzhiyun 
4468*4882a593Smuzhiyun 	return 0;
4469*4882a593Smuzhiyun }
4470*4882a593Smuzhiyun 
si_populate_smc_acpi_state(struct radeon_device * rdev,SISLANDS_SMC_STATETABLE * table)4471*4882a593Smuzhiyun static int si_populate_smc_acpi_state(struct radeon_device *rdev,
4472*4882a593Smuzhiyun 				      SISLANDS_SMC_STATETABLE *table)
4473*4882a593Smuzhiyun {
4474*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
4475*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4476*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
4477*4882a593Smuzhiyun 	u32 spll_func_cntl = si_pi->clock_registers.cg_spll_func_cntl;
4478*4882a593Smuzhiyun 	u32 spll_func_cntl_2 = si_pi->clock_registers.cg_spll_func_cntl_2;
4479*4882a593Smuzhiyun 	u32 spll_func_cntl_3 = si_pi->clock_registers.cg_spll_func_cntl_3;
4480*4882a593Smuzhiyun 	u32 spll_func_cntl_4 = si_pi->clock_registers.cg_spll_func_cntl_4;
4481*4882a593Smuzhiyun 	u32 dll_cntl = si_pi->clock_registers.dll_cntl;
4482*4882a593Smuzhiyun 	u32 mclk_pwrmgt_cntl = si_pi->clock_registers.mclk_pwrmgt_cntl;
4483*4882a593Smuzhiyun 	u32 mpll_ad_func_cntl = si_pi->clock_registers.mpll_ad_func_cntl;
4484*4882a593Smuzhiyun 	u32 mpll_dq_func_cntl = si_pi->clock_registers.mpll_dq_func_cntl;
4485*4882a593Smuzhiyun 	u32 mpll_func_cntl = si_pi->clock_registers.mpll_func_cntl;
4486*4882a593Smuzhiyun 	u32 mpll_func_cntl_1 = si_pi->clock_registers.mpll_func_cntl_1;
4487*4882a593Smuzhiyun 	u32 mpll_func_cntl_2 = si_pi->clock_registers.mpll_func_cntl_2;
4488*4882a593Smuzhiyun 	u32 reg;
4489*4882a593Smuzhiyun 	int ret;
4490*4882a593Smuzhiyun 
4491*4882a593Smuzhiyun 	table->ACPIState = table->initialState;
4492*4882a593Smuzhiyun 
4493*4882a593Smuzhiyun 	table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
4494*4882a593Smuzhiyun 
4495*4882a593Smuzhiyun 	if (pi->acpi_vddc) {
4496*4882a593Smuzhiyun 		ret = si_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
4497*4882a593Smuzhiyun 						pi->acpi_vddc, &table->ACPIState.levels[0].vddc);
4498*4882a593Smuzhiyun 		if (!ret) {
4499*4882a593Smuzhiyun 			u16 std_vddc;
4500*4882a593Smuzhiyun 
4501*4882a593Smuzhiyun 			ret = si_get_std_voltage_value(rdev,
4502*4882a593Smuzhiyun 						       &table->ACPIState.levels[0].vddc, &std_vddc);
4503*4882a593Smuzhiyun 			if (!ret)
4504*4882a593Smuzhiyun 				si_populate_std_voltage_value(rdev, std_vddc,
4505*4882a593Smuzhiyun 							      table->ACPIState.levels[0].vddc.index,
4506*4882a593Smuzhiyun 							      &table->ACPIState.levels[0].std_vddc);
4507*4882a593Smuzhiyun 		}
4508*4882a593Smuzhiyun 		table->ACPIState.levels[0].gen2PCIE = si_pi->acpi_pcie_gen;
4509*4882a593Smuzhiyun 
4510*4882a593Smuzhiyun 		if (si_pi->vddc_phase_shed_control) {
4511*4882a593Smuzhiyun 			si_populate_phase_shedding_value(rdev,
4512*4882a593Smuzhiyun 							 &rdev->pm.dpm.dyn_state.phase_shedding_limits_table,
4513*4882a593Smuzhiyun 							 pi->acpi_vddc,
4514*4882a593Smuzhiyun 							 0,
4515*4882a593Smuzhiyun 							 0,
4516*4882a593Smuzhiyun 							 &table->ACPIState.levels[0].vddc);
4517*4882a593Smuzhiyun 		}
4518*4882a593Smuzhiyun 	} else {
4519*4882a593Smuzhiyun 		ret = si_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
4520*4882a593Smuzhiyun 						pi->min_vddc_in_table, &table->ACPIState.levels[0].vddc);
4521*4882a593Smuzhiyun 		if (!ret) {
4522*4882a593Smuzhiyun 			u16 std_vddc;
4523*4882a593Smuzhiyun 
4524*4882a593Smuzhiyun 			ret = si_get_std_voltage_value(rdev,
4525*4882a593Smuzhiyun 						       &table->ACPIState.levels[0].vddc, &std_vddc);
4526*4882a593Smuzhiyun 
4527*4882a593Smuzhiyun 			if (!ret)
4528*4882a593Smuzhiyun 				si_populate_std_voltage_value(rdev, std_vddc,
4529*4882a593Smuzhiyun 							      table->ACPIState.levels[0].vddc.index,
4530*4882a593Smuzhiyun 							      &table->ACPIState.levels[0].std_vddc);
4531*4882a593Smuzhiyun 		}
4532*4882a593Smuzhiyun 		table->ACPIState.levels[0].gen2PCIE = (u8)r600_get_pcie_gen_support(rdev,
4533*4882a593Smuzhiyun 										    si_pi->sys_pcie_mask,
4534*4882a593Smuzhiyun 										    si_pi->boot_pcie_gen,
4535*4882a593Smuzhiyun 										    RADEON_PCIE_GEN1);
4536*4882a593Smuzhiyun 
4537*4882a593Smuzhiyun 		if (si_pi->vddc_phase_shed_control)
4538*4882a593Smuzhiyun 			si_populate_phase_shedding_value(rdev,
4539*4882a593Smuzhiyun 							 &rdev->pm.dpm.dyn_state.phase_shedding_limits_table,
4540*4882a593Smuzhiyun 							 pi->min_vddc_in_table,
4541*4882a593Smuzhiyun 							 0,
4542*4882a593Smuzhiyun 							 0,
4543*4882a593Smuzhiyun 							 &table->ACPIState.levels[0].vddc);
4544*4882a593Smuzhiyun 	}
4545*4882a593Smuzhiyun 
4546*4882a593Smuzhiyun 	if (pi->acpi_vddc) {
4547*4882a593Smuzhiyun 		if (eg_pi->acpi_vddci)
4548*4882a593Smuzhiyun 			si_populate_voltage_value(rdev, &eg_pi->vddci_voltage_table,
4549*4882a593Smuzhiyun 						  eg_pi->acpi_vddci,
4550*4882a593Smuzhiyun 						  &table->ACPIState.levels[0].vddci);
4551*4882a593Smuzhiyun 	}
4552*4882a593Smuzhiyun 
4553*4882a593Smuzhiyun 	mclk_pwrmgt_cntl |= MRDCK0_RESET | MRDCK1_RESET;
4554*4882a593Smuzhiyun 	mclk_pwrmgt_cntl &= ~(MRDCK0_PDNB | MRDCK1_PDNB);
4555*4882a593Smuzhiyun 
4556*4882a593Smuzhiyun 	dll_cntl &= ~(MRDCK0_BYPASS | MRDCK1_BYPASS);
4557*4882a593Smuzhiyun 
4558*4882a593Smuzhiyun 	spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
4559*4882a593Smuzhiyun 	spll_func_cntl_2 |= SCLK_MUX_SEL(4);
4560*4882a593Smuzhiyun 
4561*4882a593Smuzhiyun 	table->ACPIState.levels[0].mclk.vDLL_CNTL =
4562*4882a593Smuzhiyun 		cpu_to_be32(dll_cntl);
4563*4882a593Smuzhiyun 	table->ACPIState.levels[0].mclk.vMCLK_PWRMGT_CNTL =
4564*4882a593Smuzhiyun 		cpu_to_be32(mclk_pwrmgt_cntl);
4565*4882a593Smuzhiyun 	table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL =
4566*4882a593Smuzhiyun 		cpu_to_be32(mpll_ad_func_cntl);
4567*4882a593Smuzhiyun 	table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL =
4568*4882a593Smuzhiyun 		cpu_to_be32(mpll_dq_func_cntl);
4569*4882a593Smuzhiyun 	table->ACPIState.levels[0].mclk.vMPLL_FUNC_CNTL =
4570*4882a593Smuzhiyun 		cpu_to_be32(mpll_func_cntl);
4571*4882a593Smuzhiyun 	table->ACPIState.levels[0].mclk.vMPLL_FUNC_CNTL_1 =
4572*4882a593Smuzhiyun 		cpu_to_be32(mpll_func_cntl_1);
4573*4882a593Smuzhiyun 	table->ACPIState.levels[0].mclk.vMPLL_FUNC_CNTL_2 =
4574*4882a593Smuzhiyun 		cpu_to_be32(mpll_func_cntl_2);
4575*4882a593Smuzhiyun 	table->ACPIState.levels[0].mclk.vMPLL_SS =
4576*4882a593Smuzhiyun 		cpu_to_be32(si_pi->clock_registers.mpll_ss1);
4577*4882a593Smuzhiyun 	table->ACPIState.levels[0].mclk.vMPLL_SS2 =
4578*4882a593Smuzhiyun 		cpu_to_be32(si_pi->clock_registers.mpll_ss2);
4579*4882a593Smuzhiyun 
4580*4882a593Smuzhiyun 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
4581*4882a593Smuzhiyun 		cpu_to_be32(spll_func_cntl);
4582*4882a593Smuzhiyun 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
4583*4882a593Smuzhiyun 		cpu_to_be32(spll_func_cntl_2);
4584*4882a593Smuzhiyun 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
4585*4882a593Smuzhiyun 		cpu_to_be32(spll_func_cntl_3);
4586*4882a593Smuzhiyun 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 =
4587*4882a593Smuzhiyun 		cpu_to_be32(spll_func_cntl_4);
4588*4882a593Smuzhiyun 
4589*4882a593Smuzhiyun 	table->ACPIState.levels[0].mclk.mclk_value = 0;
4590*4882a593Smuzhiyun 	table->ACPIState.levels[0].sclk.sclk_value = 0;
4591*4882a593Smuzhiyun 
4592*4882a593Smuzhiyun 	si_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
4593*4882a593Smuzhiyun 
4594*4882a593Smuzhiyun 	if (eg_pi->dynamic_ac_timing)
4595*4882a593Smuzhiyun 		table->ACPIState.levels[0].ACIndex = 0;
4596*4882a593Smuzhiyun 
4597*4882a593Smuzhiyun 	table->ACPIState.levels[0].dpm2.MaxPS = 0;
4598*4882a593Smuzhiyun 	table->ACPIState.levels[0].dpm2.NearTDPDec = 0;
4599*4882a593Smuzhiyun 	table->ACPIState.levels[0].dpm2.AboveSafeInc = 0;
4600*4882a593Smuzhiyun 	table->ACPIState.levels[0].dpm2.BelowSafeInc = 0;
4601*4882a593Smuzhiyun 	table->ACPIState.levels[0].dpm2.PwrEfficiencyRatio = 0;
4602*4882a593Smuzhiyun 
4603*4882a593Smuzhiyun 	reg = MIN_POWER_MASK | MAX_POWER_MASK;
4604*4882a593Smuzhiyun 	table->ACPIState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
4605*4882a593Smuzhiyun 
4606*4882a593Smuzhiyun 	reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
4607*4882a593Smuzhiyun 	table->ACPIState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
4608*4882a593Smuzhiyun 
4609*4882a593Smuzhiyun 	return 0;
4610*4882a593Smuzhiyun }
4611*4882a593Smuzhiyun 
si_populate_ulv_state(struct radeon_device * rdev,SISLANDS_SMC_SWSTATE * state)4612*4882a593Smuzhiyun static int si_populate_ulv_state(struct radeon_device *rdev,
4613*4882a593Smuzhiyun 				 SISLANDS_SMC_SWSTATE *state)
4614*4882a593Smuzhiyun {
4615*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4616*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
4617*4882a593Smuzhiyun 	struct si_ulv_param *ulv = &si_pi->ulv;
4618*4882a593Smuzhiyun 	u32 sclk_in_sr = 1350; /* ??? */
4619*4882a593Smuzhiyun 	int ret;
4620*4882a593Smuzhiyun 
4621*4882a593Smuzhiyun 	ret = si_convert_power_level_to_smc(rdev, &ulv->pl,
4622*4882a593Smuzhiyun 					    &state->levels[0]);
4623*4882a593Smuzhiyun 	if (!ret) {
4624*4882a593Smuzhiyun 		if (eg_pi->sclk_deep_sleep) {
4625*4882a593Smuzhiyun 			if (sclk_in_sr <= SCLK_MIN_DEEPSLEEP_FREQ)
4626*4882a593Smuzhiyun 				state->levels[0].stateFlags |= PPSMC_STATEFLAG_DEEPSLEEP_BYPASS;
4627*4882a593Smuzhiyun 			else
4628*4882a593Smuzhiyun 				state->levels[0].stateFlags |= PPSMC_STATEFLAG_DEEPSLEEP_THROTTLE;
4629*4882a593Smuzhiyun 		}
4630*4882a593Smuzhiyun 		if (ulv->one_pcie_lane_in_ulv)
4631*4882a593Smuzhiyun 			state->flags |= PPSMC_SWSTATE_FLAG_PCIE_X1;
4632*4882a593Smuzhiyun 		state->levels[0].arbRefreshState = (u8)(SISLANDS_ULV_STATE_ARB_INDEX);
4633*4882a593Smuzhiyun 		state->levels[0].ACIndex = 1;
4634*4882a593Smuzhiyun 		state->levels[0].std_vddc = state->levels[0].vddc;
4635*4882a593Smuzhiyun 		state->levelCount = 1;
4636*4882a593Smuzhiyun 
4637*4882a593Smuzhiyun 		state->flags |= PPSMC_SWSTATE_FLAG_DC;
4638*4882a593Smuzhiyun 	}
4639*4882a593Smuzhiyun 
4640*4882a593Smuzhiyun 	return ret;
4641*4882a593Smuzhiyun }
4642*4882a593Smuzhiyun 
si_program_ulv_memory_timing_parameters(struct radeon_device * rdev)4643*4882a593Smuzhiyun static int si_program_ulv_memory_timing_parameters(struct radeon_device *rdev)
4644*4882a593Smuzhiyun {
4645*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
4646*4882a593Smuzhiyun 	struct si_ulv_param *ulv = &si_pi->ulv;
4647*4882a593Smuzhiyun 	SMC_SIslands_MCArbDramTimingRegisterSet arb_regs = { 0 };
4648*4882a593Smuzhiyun 	int ret;
4649*4882a593Smuzhiyun 
4650*4882a593Smuzhiyun 	ret = si_populate_memory_timing_parameters(rdev, &ulv->pl,
4651*4882a593Smuzhiyun 						   &arb_regs);
4652*4882a593Smuzhiyun 	if (ret)
4653*4882a593Smuzhiyun 		return ret;
4654*4882a593Smuzhiyun 
4655*4882a593Smuzhiyun 	si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_ulv_volt_change_delay,
4656*4882a593Smuzhiyun 				   ulv->volt_change_delay);
4657*4882a593Smuzhiyun 
4658*4882a593Smuzhiyun 	ret = si_copy_bytes_to_smc(rdev,
4659*4882a593Smuzhiyun 				   si_pi->arb_table_start +
4660*4882a593Smuzhiyun 				   offsetof(SMC_SIslands_MCArbDramTimingRegisters, data) +
4661*4882a593Smuzhiyun 				   sizeof(SMC_SIslands_MCArbDramTimingRegisterSet) * SISLANDS_ULV_STATE_ARB_INDEX,
4662*4882a593Smuzhiyun 				   (u8 *)&arb_regs,
4663*4882a593Smuzhiyun 				   sizeof(SMC_SIslands_MCArbDramTimingRegisterSet),
4664*4882a593Smuzhiyun 				   si_pi->sram_end);
4665*4882a593Smuzhiyun 
4666*4882a593Smuzhiyun 	return ret;
4667*4882a593Smuzhiyun }
4668*4882a593Smuzhiyun 
si_get_mvdd_configuration(struct radeon_device * rdev)4669*4882a593Smuzhiyun static void si_get_mvdd_configuration(struct radeon_device *rdev)
4670*4882a593Smuzhiyun {
4671*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
4672*4882a593Smuzhiyun 
4673*4882a593Smuzhiyun 	pi->mvdd_split_frequency = 30000;
4674*4882a593Smuzhiyun }
4675*4882a593Smuzhiyun 
si_init_smc_table(struct radeon_device * rdev)4676*4882a593Smuzhiyun static int si_init_smc_table(struct radeon_device *rdev)
4677*4882a593Smuzhiyun {
4678*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
4679*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
4680*4882a593Smuzhiyun 	struct radeon_ps *radeon_boot_state = rdev->pm.dpm.boot_ps;
4681*4882a593Smuzhiyun 	const struct si_ulv_param *ulv = &si_pi->ulv;
4682*4882a593Smuzhiyun 	SISLANDS_SMC_STATETABLE  *table = &si_pi->smc_statetable;
4683*4882a593Smuzhiyun 	int ret;
4684*4882a593Smuzhiyun 	u32 lane_width;
4685*4882a593Smuzhiyun 	u32 vr_hot_gpio;
4686*4882a593Smuzhiyun 
4687*4882a593Smuzhiyun 	si_populate_smc_voltage_tables(rdev, table);
4688*4882a593Smuzhiyun 
4689*4882a593Smuzhiyun 	switch (rdev->pm.int_thermal_type) {
4690*4882a593Smuzhiyun 	case THERMAL_TYPE_SI:
4691*4882a593Smuzhiyun 	case THERMAL_TYPE_EMC2103_WITH_INTERNAL:
4692*4882a593Smuzhiyun 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
4693*4882a593Smuzhiyun 		break;
4694*4882a593Smuzhiyun 	case THERMAL_TYPE_NONE:
4695*4882a593Smuzhiyun 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
4696*4882a593Smuzhiyun 		break;
4697*4882a593Smuzhiyun 	default:
4698*4882a593Smuzhiyun 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
4699*4882a593Smuzhiyun 		break;
4700*4882a593Smuzhiyun 	}
4701*4882a593Smuzhiyun 
4702*4882a593Smuzhiyun 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC)
4703*4882a593Smuzhiyun 		table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
4704*4882a593Smuzhiyun 
4705*4882a593Smuzhiyun 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT) {
4706*4882a593Smuzhiyun 		if ((rdev->pdev->device != 0x6818) && (rdev->pdev->device != 0x6819))
4707*4882a593Smuzhiyun 			table->systemFlags |= PPSMC_SYSTEMFLAG_REGULATOR_HOT;
4708*4882a593Smuzhiyun 	}
4709*4882a593Smuzhiyun 
4710*4882a593Smuzhiyun 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
4711*4882a593Smuzhiyun 		table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
4712*4882a593Smuzhiyun 
4713*4882a593Smuzhiyun 	if (pi->mem_gddr5)
4714*4882a593Smuzhiyun 		table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
4715*4882a593Smuzhiyun 
4716*4882a593Smuzhiyun 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REVERT_GPIO5_POLARITY)
4717*4882a593Smuzhiyun 		table->extraFlags |= PPSMC_EXTRAFLAGS_AC2DC_GPIO5_POLARITY_HIGH;
4718*4882a593Smuzhiyun 
4719*4882a593Smuzhiyun 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_VRHOT_GPIO_CONFIGURABLE) {
4720*4882a593Smuzhiyun 		table->systemFlags |= PPSMC_SYSTEMFLAG_REGULATOR_HOT_PROG_GPIO;
4721*4882a593Smuzhiyun 		vr_hot_gpio = rdev->pm.dpm.backbias_response_time;
4722*4882a593Smuzhiyun 		si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_vr_hot_gpio,
4723*4882a593Smuzhiyun 					   vr_hot_gpio);
4724*4882a593Smuzhiyun 	}
4725*4882a593Smuzhiyun 
4726*4882a593Smuzhiyun 	ret = si_populate_smc_initial_state(rdev, radeon_boot_state, table);
4727*4882a593Smuzhiyun 	if (ret)
4728*4882a593Smuzhiyun 		return ret;
4729*4882a593Smuzhiyun 
4730*4882a593Smuzhiyun 	ret = si_populate_smc_acpi_state(rdev, table);
4731*4882a593Smuzhiyun 	if (ret)
4732*4882a593Smuzhiyun 		return ret;
4733*4882a593Smuzhiyun 
4734*4882a593Smuzhiyun 	table->driverState = table->initialState;
4735*4882a593Smuzhiyun 
4736*4882a593Smuzhiyun 	ret = si_do_program_memory_timing_parameters(rdev, radeon_boot_state,
4737*4882a593Smuzhiyun 						     SISLANDS_INITIAL_STATE_ARB_INDEX);
4738*4882a593Smuzhiyun 	if (ret)
4739*4882a593Smuzhiyun 		return ret;
4740*4882a593Smuzhiyun 
4741*4882a593Smuzhiyun 	if (ulv->supported && ulv->pl.vddc) {
4742*4882a593Smuzhiyun 		ret = si_populate_ulv_state(rdev, &table->ULVState);
4743*4882a593Smuzhiyun 		if (ret)
4744*4882a593Smuzhiyun 			return ret;
4745*4882a593Smuzhiyun 
4746*4882a593Smuzhiyun 		ret = si_program_ulv_memory_timing_parameters(rdev);
4747*4882a593Smuzhiyun 		if (ret)
4748*4882a593Smuzhiyun 			return ret;
4749*4882a593Smuzhiyun 
4750*4882a593Smuzhiyun 		WREG32(CG_ULV_CONTROL, ulv->cg_ulv_control);
4751*4882a593Smuzhiyun 		WREG32(CG_ULV_PARAMETER, ulv->cg_ulv_parameter);
4752*4882a593Smuzhiyun 
4753*4882a593Smuzhiyun 		lane_width = radeon_get_pcie_lanes(rdev);
4754*4882a593Smuzhiyun 		si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_non_ulv_pcie_link_width, lane_width);
4755*4882a593Smuzhiyun 	} else {
4756*4882a593Smuzhiyun 		table->ULVState = table->initialState;
4757*4882a593Smuzhiyun 	}
4758*4882a593Smuzhiyun 
4759*4882a593Smuzhiyun 	return si_copy_bytes_to_smc(rdev, si_pi->state_table_start,
4760*4882a593Smuzhiyun 				    (u8 *)table, sizeof(SISLANDS_SMC_STATETABLE),
4761*4882a593Smuzhiyun 				    si_pi->sram_end);
4762*4882a593Smuzhiyun }
4763*4882a593Smuzhiyun 
si_calculate_sclk_params(struct radeon_device * rdev,u32 engine_clock,SISLANDS_SMC_SCLK_VALUE * sclk)4764*4882a593Smuzhiyun static int si_calculate_sclk_params(struct radeon_device *rdev,
4765*4882a593Smuzhiyun 				    u32 engine_clock,
4766*4882a593Smuzhiyun 				    SISLANDS_SMC_SCLK_VALUE *sclk)
4767*4882a593Smuzhiyun {
4768*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
4769*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
4770*4882a593Smuzhiyun 	struct atom_clock_dividers dividers;
4771*4882a593Smuzhiyun 	u32 spll_func_cntl = si_pi->clock_registers.cg_spll_func_cntl;
4772*4882a593Smuzhiyun 	u32 spll_func_cntl_2 = si_pi->clock_registers.cg_spll_func_cntl_2;
4773*4882a593Smuzhiyun 	u32 spll_func_cntl_3 = si_pi->clock_registers.cg_spll_func_cntl_3;
4774*4882a593Smuzhiyun 	u32 spll_func_cntl_4 = si_pi->clock_registers.cg_spll_func_cntl_4;
4775*4882a593Smuzhiyun 	u32 cg_spll_spread_spectrum = si_pi->clock_registers.cg_spll_spread_spectrum;
4776*4882a593Smuzhiyun 	u32 cg_spll_spread_spectrum_2 = si_pi->clock_registers.cg_spll_spread_spectrum_2;
4777*4882a593Smuzhiyun 	u64 tmp;
4778*4882a593Smuzhiyun 	u32 reference_clock = rdev->clock.spll.reference_freq;
4779*4882a593Smuzhiyun 	u32 reference_divider;
4780*4882a593Smuzhiyun 	u32 fbdiv;
4781*4882a593Smuzhiyun 	int ret;
4782*4882a593Smuzhiyun 
4783*4882a593Smuzhiyun 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
4784*4882a593Smuzhiyun 					     engine_clock, false, &dividers);
4785*4882a593Smuzhiyun 	if (ret)
4786*4882a593Smuzhiyun 		return ret;
4787*4882a593Smuzhiyun 
4788*4882a593Smuzhiyun 	reference_divider = 1 + dividers.ref_div;
4789*4882a593Smuzhiyun 
4790*4882a593Smuzhiyun 	tmp = (u64) engine_clock * reference_divider * dividers.post_div * 16384;
4791*4882a593Smuzhiyun 	do_div(tmp, reference_clock);
4792*4882a593Smuzhiyun 	fbdiv = (u32) tmp;
4793*4882a593Smuzhiyun 
4794*4882a593Smuzhiyun 	spll_func_cntl &= ~(SPLL_PDIV_A_MASK | SPLL_REF_DIV_MASK);
4795*4882a593Smuzhiyun 	spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div);
4796*4882a593Smuzhiyun 	spll_func_cntl |= SPLL_PDIV_A(dividers.post_div);
4797*4882a593Smuzhiyun 
4798*4882a593Smuzhiyun 	spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
4799*4882a593Smuzhiyun 	spll_func_cntl_2 |= SCLK_MUX_SEL(2);
4800*4882a593Smuzhiyun 
4801*4882a593Smuzhiyun 	spll_func_cntl_3 &= ~SPLL_FB_DIV_MASK;
4802*4882a593Smuzhiyun 	spll_func_cntl_3 |= SPLL_FB_DIV(fbdiv);
4803*4882a593Smuzhiyun 	spll_func_cntl_3 |= SPLL_DITHEN;
4804*4882a593Smuzhiyun 
4805*4882a593Smuzhiyun 	if (pi->sclk_ss) {
4806*4882a593Smuzhiyun 		struct radeon_atom_ss ss;
4807*4882a593Smuzhiyun 		u32 vco_freq = engine_clock * dividers.post_div;
4808*4882a593Smuzhiyun 
4809*4882a593Smuzhiyun 		if (radeon_atombios_get_asic_ss_info(rdev, &ss,
4810*4882a593Smuzhiyun 						     ASIC_INTERNAL_ENGINE_SS, vco_freq)) {
4811*4882a593Smuzhiyun 			u32 clk_s = reference_clock * 5 / (reference_divider * ss.rate);
4812*4882a593Smuzhiyun 			u32 clk_v = 4 * ss.percentage * fbdiv / (clk_s * 10000);
4813*4882a593Smuzhiyun 
4814*4882a593Smuzhiyun 			cg_spll_spread_spectrum &= ~CLK_S_MASK;
4815*4882a593Smuzhiyun 			cg_spll_spread_spectrum |= CLK_S(clk_s);
4816*4882a593Smuzhiyun 			cg_spll_spread_spectrum |= SSEN;
4817*4882a593Smuzhiyun 
4818*4882a593Smuzhiyun 			cg_spll_spread_spectrum_2 &= ~CLK_V_MASK;
4819*4882a593Smuzhiyun 			cg_spll_spread_spectrum_2 |= CLK_V(clk_v);
4820*4882a593Smuzhiyun 		}
4821*4882a593Smuzhiyun 	}
4822*4882a593Smuzhiyun 
4823*4882a593Smuzhiyun 	sclk->sclk_value = engine_clock;
4824*4882a593Smuzhiyun 	sclk->vCG_SPLL_FUNC_CNTL = spll_func_cntl;
4825*4882a593Smuzhiyun 	sclk->vCG_SPLL_FUNC_CNTL_2 = spll_func_cntl_2;
4826*4882a593Smuzhiyun 	sclk->vCG_SPLL_FUNC_CNTL_3 = spll_func_cntl_3;
4827*4882a593Smuzhiyun 	sclk->vCG_SPLL_FUNC_CNTL_4 = spll_func_cntl_4;
4828*4882a593Smuzhiyun 	sclk->vCG_SPLL_SPREAD_SPECTRUM = cg_spll_spread_spectrum;
4829*4882a593Smuzhiyun 	sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cg_spll_spread_spectrum_2;
4830*4882a593Smuzhiyun 
4831*4882a593Smuzhiyun 	return 0;
4832*4882a593Smuzhiyun }
4833*4882a593Smuzhiyun 
si_populate_sclk_value(struct radeon_device * rdev,u32 engine_clock,SISLANDS_SMC_SCLK_VALUE * sclk)4834*4882a593Smuzhiyun static int si_populate_sclk_value(struct radeon_device *rdev,
4835*4882a593Smuzhiyun 				  u32 engine_clock,
4836*4882a593Smuzhiyun 				  SISLANDS_SMC_SCLK_VALUE *sclk)
4837*4882a593Smuzhiyun {
4838*4882a593Smuzhiyun 	SISLANDS_SMC_SCLK_VALUE sclk_tmp;
4839*4882a593Smuzhiyun 	int ret;
4840*4882a593Smuzhiyun 
4841*4882a593Smuzhiyun 	ret = si_calculate_sclk_params(rdev, engine_clock, &sclk_tmp);
4842*4882a593Smuzhiyun 	if (!ret) {
4843*4882a593Smuzhiyun 		sclk->sclk_value = cpu_to_be32(sclk_tmp.sclk_value);
4844*4882a593Smuzhiyun 		sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL);
4845*4882a593Smuzhiyun 		sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_2);
4846*4882a593Smuzhiyun 		sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_3);
4847*4882a593Smuzhiyun 		sclk->vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_4);
4848*4882a593Smuzhiyun 		sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM);
4849*4882a593Smuzhiyun 		sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM_2);
4850*4882a593Smuzhiyun 	}
4851*4882a593Smuzhiyun 
4852*4882a593Smuzhiyun 	return ret;
4853*4882a593Smuzhiyun }
4854*4882a593Smuzhiyun 
si_populate_mclk_value(struct radeon_device * rdev,u32 engine_clock,u32 memory_clock,SISLANDS_SMC_MCLK_VALUE * mclk,bool strobe_mode,bool dll_state_on)4855*4882a593Smuzhiyun static int si_populate_mclk_value(struct radeon_device *rdev,
4856*4882a593Smuzhiyun 				  u32 engine_clock,
4857*4882a593Smuzhiyun 				  u32 memory_clock,
4858*4882a593Smuzhiyun 				  SISLANDS_SMC_MCLK_VALUE *mclk,
4859*4882a593Smuzhiyun 				  bool strobe_mode,
4860*4882a593Smuzhiyun 				  bool dll_state_on)
4861*4882a593Smuzhiyun {
4862*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
4863*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
4864*4882a593Smuzhiyun 	u32  dll_cntl = si_pi->clock_registers.dll_cntl;
4865*4882a593Smuzhiyun 	u32  mclk_pwrmgt_cntl = si_pi->clock_registers.mclk_pwrmgt_cntl;
4866*4882a593Smuzhiyun 	u32  mpll_ad_func_cntl = si_pi->clock_registers.mpll_ad_func_cntl;
4867*4882a593Smuzhiyun 	u32  mpll_dq_func_cntl = si_pi->clock_registers.mpll_dq_func_cntl;
4868*4882a593Smuzhiyun 	u32  mpll_func_cntl = si_pi->clock_registers.mpll_func_cntl;
4869*4882a593Smuzhiyun 	u32  mpll_func_cntl_1 = si_pi->clock_registers.mpll_func_cntl_1;
4870*4882a593Smuzhiyun 	u32  mpll_func_cntl_2 = si_pi->clock_registers.mpll_func_cntl_2;
4871*4882a593Smuzhiyun 	u32  mpll_ss1 = si_pi->clock_registers.mpll_ss1;
4872*4882a593Smuzhiyun 	u32  mpll_ss2 = si_pi->clock_registers.mpll_ss2;
4873*4882a593Smuzhiyun 	struct atom_mpll_param mpll_param;
4874*4882a593Smuzhiyun 	int ret;
4875*4882a593Smuzhiyun 
4876*4882a593Smuzhiyun 	ret = radeon_atom_get_memory_pll_dividers(rdev, memory_clock, strobe_mode, &mpll_param);
4877*4882a593Smuzhiyun 	if (ret)
4878*4882a593Smuzhiyun 		return ret;
4879*4882a593Smuzhiyun 
4880*4882a593Smuzhiyun 	mpll_func_cntl &= ~BWCTRL_MASK;
4881*4882a593Smuzhiyun 	mpll_func_cntl |= BWCTRL(mpll_param.bwcntl);
4882*4882a593Smuzhiyun 
4883*4882a593Smuzhiyun 	mpll_func_cntl_1 &= ~(CLKF_MASK | CLKFRAC_MASK | VCO_MODE_MASK);
4884*4882a593Smuzhiyun 	mpll_func_cntl_1 |= CLKF(mpll_param.clkf) |
4885*4882a593Smuzhiyun 		CLKFRAC(mpll_param.clkfrac) | VCO_MODE(mpll_param.vco_mode);
4886*4882a593Smuzhiyun 
4887*4882a593Smuzhiyun 	mpll_ad_func_cntl &= ~YCLK_POST_DIV_MASK;
4888*4882a593Smuzhiyun 	mpll_ad_func_cntl |= YCLK_POST_DIV(mpll_param.post_div);
4889*4882a593Smuzhiyun 
4890*4882a593Smuzhiyun 	if (pi->mem_gddr5) {
4891*4882a593Smuzhiyun 		mpll_dq_func_cntl &= ~(YCLK_SEL_MASK | YCLK_POST_DIV_MASK);
4892*4882a593Smuzhiyun 		mpll_dq_func_cntl |= YCLK_SEL(mpll_param.yclk_sel) |
4893*4882a593Smuzhiyun 			YCLK_POST_DIV(mpll_param.post_div);
4894*4882a593Smuzhiyun 	}
4895*4882a593Smuzhiyun 
4896*4882a593Smuzhiyun 	if (pi->mclk_ss) {
4897*4882a593Smuzhiyun 		struct radeon_atom_ss ss;
4898*4882a593Smuzhiyun 		u32 freq_nom;
4899*4882a593Smuzhiyun 		u32 tmp;
4900*4882a593Smuzhiyun 		u32 reference_clock = rdev->clock.mpll.reference_freq;
4901*4882a593Smuzhiyun 
4902*4882a593Smuzhiyun 		if (pi->mem_gddr5)
4903*4882a593Smuzhiyun 			freq_nom = memory_clock * 4;
4904*4882a593Smuzhiyun 		else
4905*4882a593Smuzhiyun 			freq_nom = memory_clock * 2;
4906*4882a593Smuzhiyun 
4907*4882a593Smuzhiyun 		tmp = freq_nom / reference_clock;
4908*4882a593Smuzhiyun 		tmp = tmp * tmp;
4909*4882a593Smuzhiyun 		if (radeon_atombios_get_asic_ss_info(rdev, &ss,
4910*4882a593Smuzhiyun 						     ASIC_INTERNAL_MEMORY_SS, freq_nom)) {
4911*4882a593Smuzhiyun 			u32 clks = reference_clock * 5 / ss.rate;
4912*4882a593Smuzhiyun 			u32 clkv = (u32)((((131 * ss.percentage * ss.rate) / 100) * tmp) / freq_nom);
4913*4882a593Smuzhiyun 
4914*4882a593Smuzhiyun 			mpll_ss1 &= ~CLKV_MASK;
4915*4882a593Smuzhiyun 			mpll_ss1 |= CLKV(clkv);
4916*4882a593Smuzhiyun 
4917*4882a593Smuzhiyun 			mpll_ss2 &= ~CLKS_MASK;
4918*4882a593Smuzhiyun 			mpll_ss2 |= CLKS(clks);
4919*4882a593Smuzhiyun 		}
4920*4882a593Smuzhiyun 	}
4921*4882a593Smuzhiyun 
4922*4882a593Smuzhiyun 	mclk_pwrmgt_cntl &= ~DLL_SPEED_MASK;
4923*4882a593Smuzhiyun 	mclk_pwrmgt_cntl |= DLL_SPEED(mpll_param.dll_speed);
4924*4882a593Smuzhiyun 
4925*4882a593Smuzhiyun 	if (dll_state_on)
4926*4882a593Smuzhiyun 		mclk_pwrmgt_cntl |= MRDCK0_PDNB | MRDCK1_PDNB;
4927*4882a593Smuzhiyun 	else
4928*4882a593Smuzhiyun 		mclk_pwrmgt_cntl &= ~(MRDCK0_PDNB | MRDCK1_PDNB);
4929*4882a593Smuzhiyun 
4930*4882a593Smuzhiyun 	mclk->mclk_value = cpu_to_be32(memory_clock);
4931*4882a593Smuzhiyun 	mclk->vMPLL_FUNC_CNTL = cpu_to_be32(mpll_func_cntl);
4932*4882a593Smuzhiyun 	mclk->vMPLL_FUNC_CNTL_1 = cpu_to_be32(mpll_func_cntl_1);
4933*4882a593Smuzhiyun 	mclk->vMPLL_FUNC_CNTL_2 = cpu_to_be32(mpll_func_cntl_2);
4934*4882a593Smuzhiyun 	mclk->vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
4935*4882a593Smuzhiyun 	mclk->vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
4936*4882a593Smuzhiyun 	mclk->vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
4937*4882a593Smuzhiyun 	mclk->vDLL_CNTL = cpu_to_be32(dll_cntl);
4938*4882a593Smuzhiyun 	mclk->vMPLL_SS = cpu_to_be32(mpll_ss1);
4939*4882a593Smuzhiyun 	mclk->vMPLL_SS2 = cpu_to_be32(mpll_ss2);
4940*4882a593Smuzhiyun 
4941*4882a593Smuzhiyun 	return 0;
4942*4882a593Smuzhiyun }
4943*4882a593Smuzhiyun 
si_populate_smc_sp(struct radeon_device * rdev,struct radeon_ps * radeon_state,SISLANDS_SMC_SWSTATE * smc_state)4944*4882a593Smuzhiyun static void si_populate_smc_sp(struct radeon_device *rdev,
4945*4882a593Smuzhiyun 			       struct radeon_ps *radeon_state,
4946*4882a593Smuzhiyun 			       SISLANDS_SMC_SWSTATE *smc_state)
4947*4882a593Smuzhiyun {
4948*4882a593Smuzhiyun 	struct ni_ps *ps = ni_get_ps(radeon_state);
4949*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
4950*4882a593Smuzhiyun 	int i;
4951*4882a593Smuzhiyun 
4952*4882a593Smuzhiyun 	for (i = 0; i < ps->performance_level_count - 1; i++)
4953*4882a593Smuzhiyun 		smc_state->levels[i].bSP = cpu_to_be32(pi->dsp);
4954*4882a593Smuzhiyun 
4955*4882a593Smuzhiyun 	smc_state->levels[ps->performance_level_count - 1].bSP =
4956*4882a593Smuzhiyun 		cpu_to_be32(pi->psp);
4957*4882a593Smuzhiyun }
4958*4882a593Smuzhiyun 
si_convert_power_level_to_smc(struct radeon_device * rdev,struct rv7xx_pl * pl,SISLANDS_SMC_HW_PERFORMANCE_LEVEL * level)4959*4882a593Smuzhiyun static int si_convert_power_level_to_smc(struct radeon_device *rdev,
4960*4882a593Smuzhiyun 					 struct rv7xx_pl *pl,
4961*4882a593Smuzhiyun 					 SISLANDS_SMC_HW_PERFORMANCE_LEVEL *level)
4962*4882a593Smuzhiyun {
4963*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
4964*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4965*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
4966*4882a593Smuzhiyun 	int ret;
4967*4882a593Smuzhiyun 	bool dll_state_on;
4968*4882a593Smuzhiyun 	u16 std_vddc;
4969*4882a593Smuzhiyun 	bool gmc_pg = false;
4970*4882a593Smuzhiyun 
4971*4882a593Smuzhiyun 	if (eg_pi->pcie_performance_request &&
4972*4882a593Smuzhiyun 	    (si_pi->force_pcie_gen != RADEON_PCIE_GEN_INVALID))
4973*4882a593Smuzhiyun 		level->gen2PCIE = (u8)si_pi->force_pcie_gen;
4974*4882a593Smuzhiyun 	else
4975*4882a593Smuzhiyun 		level->gen2PCIE = (u8)pl->pcie_gen;
4976*4882a593Smuzhiyun 
4977*4882a593Smuzhiyun 	ret = si_populate_sclk_value(rdev, pl->sclk, &level->sclk);
4978*4882a593Smuzhiyun 	if (ret)
4979*4882a593Smuzhiyun 		return ret;
4980*4882a593Smuzhiyun 
4981*4882a593Smuzhiyun 	level->mcFlags =  0;
4982*4882a593Smuzhiyun 
4983*4882a593Smuzhiyun 	if (pi->mclk_stutter_mode_threshold &&
4984*4882a593Smuzhiyun 	    (pl->mclk <= pi->mclk_stutter_mode_threshold) &&
4985*4882a593Smuzhiyun 	    !eg_pi->uvd_enabled &&
4986*4882a593Smuzhiyun 	    (RREG32(DPG_PIPE_STUTTER_CONTROL) & STUTTER_ENABLE) &&
4987*4882a593Smuzhiyun 	    (rdev->pm.dpm.new_active_crtc_count <= 2)) {
4988*4882a593Smuzhiyun 		level->mcFlags |= SISLANDS_SMC_MC_STUTTER_EN;
4989*4882a593Smuzhiyun 
4990*4882a593Smuzhiyun 		if (gmc_pg)
4991*4882a593Smuzhiyun 			level->mcFlags |= SISLANDS_SMC_MC_PG_EN;
4992*4882a593Smuzhiyun 	}
4993*4882a593Smuzhiyun 
4994*4882a593Smuzhiyun 	if (pi->mem_gddr5) {
4995*4882a593Smuzhiyun 		if (pl->mclk > pi->mclk_edc_enable_threshold)
4996*4882a593Smuzhiyun 			level->mcFlags |= SISLANDS_SMC_MC_EDC_RD_FLAG;
4997*4882a593Smuzhiyun 
4998*4882a593Smuzhiyun 		if (pl->mclk > eg_pi->mclk_edc_wr_enable_threshold)
4999*4882a593Smuzhiyun 			level->mcFlags |= SISLANDS_SMC_MC_EDC_WR_FLAG;
5000*4882a593Smuzhiyun 
5001*4882a593Smuzhiyun 		level->strobeMode = si_get_strobe_mode_settings(rdev, pl->mclk);
5002*4882a593Smuzhiyun 
5003*4882a593Smuzhiyun 		if (level->strobeMode & SISLANDS_SMC_STROBE_ENABLE) {
5004*4882a593Smuzhiyun 			if (si_get_mclk_frequency_ratio(pl->mclk, true) >=
5005*4882a593Smuzhiyun 			    ((RREG32(MC_SEQ_MISC7) >> 16) & 0xf))
5006*4882a593Smuzhiyun 				dll_state_on = ((RREG32(MC_SEQ_MISC5) >> 1) & 0x1) ? true : false;
5007*4882a593Smuzhiyun 			else
5008*4882a593Smuzhiyun 				dll_state_on = ((RREG32(MC_SEQ_MISC6) >> 1) & 0x1) ? true : false;
5009*4882a593Smuzhiyun 		} else {
5010*4882a593Smuzhiyun 			dll_state_on = false;
5011*4882a593Smuzhiyun 		}
5012*4882a593Smuzhiyun 	} else {
5013*4882a593Smuzhiyun 		level->strobeMode = si_get_strobe_mode_settings(rdev,
5014*4882a593Smuzhiyun 								pl->mclk);
5015*4882a593Smuzhiyun 
5016*4882a593Smuzhiyun 		dll_state_on = ((RREG32(MC_SEQ_MISC5) >> 1) & 0x1) ? true : false;
5017*4882a593Smuzhiyun 	}
5018*4882a593Smuzhiyun 
5019*4882a593Smuzhiyun 	ret = si_populate_mclk_value(rdev,
5020*4882a593Smuzhiyun 				     pl->sclk,
5021*4882a593Smuzhiyun 				     pl->mclk,
5022*4882a593Smuzhiyun 				     &level->mclk,
5023*4882a593Smuzhiyun 				     (level->strobeMode & SISLANDS_SMC_STROBE_ENABLE) != 0, dll_state_on);
5024*4882a593Smuzhiyun 	if (ret)
5025*4882a593Smuzhiyun 		return ret;
5026*4882a593Smuzhiyun 
5027*4882a593Smuzhiyun 	ret = si_populate_voltage_value(rdev,
5028*4882a593Smuzhiyun 					&eg_pi->vddc_voltage_table,
5029*4882a593Smuzhiyun 					pl->vddc, &level->vddc);
5030*4882a593Smuzhiyun 	if (ret)
5031*4882a593Smuzhiyun 		return ret;
5032*4882a593Smuzhiyun 
5033*4882a593Smuzhiyun 
5034*4882a593Smuzhiyun 	ret = si_get_std_voltage_value(rdev, &level->vddc, &std_vddc);
5035*4882a593Smuzhiyun 	if (ret)
5036*4882a593Smuzhiyun 		return ret;
5037*4882a593Smuzhiyun 
5038*4882a593Smuzhiyun 	ret = si_populate_std_voltage_value(rdev, std_vddc,
5039*4882a593Smuzhiyun 					    level->vddc.index, &level->std_vddc);
5040*4882a593Smuzhiyun 	if (ret)
5041*4882a593Smuzhiyun 		return ret;
5042*4882a593Smuzhiyun 
5043*4882a593Smuzhiyun 	if (eg_pi->vddci_control) {
5044*4882a593Smuzhiyun 		ret = si_populate_voltage_value(rdev, &eg_pi->vddci_voltage_table,
5045*4882a593Smuzhiyun 						pl->vddci, &level->vddci);
5046*4882a593Smuzhiyun 		if (ret)
5047*4882a593Smuzhiyun 			return ret;
5048*4882a593Smuzhiyun 	}
5049*4882a593Smuzhiyun 
5050*4882a593Smuzhiyun 	if (si_pi->vddc_phase_shed_control) {
5051*4882a593Smuzhiyun 		ret = si_populate_phase_shedding_value(rdev,
5052*4882a593Smuzhiyun 						       &rdev->pm.dpm.dyn_state.phase_shedding_limits_table,
5053*4882a593Smuzhiyun 						       pl->vddc,
5054*4882a593Smuzhiyun 						       pl->sclk,
5055*4882a593Smuzhiyun 						       pl->mclk,
5056*4882a593Smuzhiyun 						       &level->vddc);
5057*4882a593Smuzhiyun 		if (ret)
5058*4882a593Smuzhiyun 			return ret;
5059*4882a593Smuzhiyun 	}
5060*4882a593Smuzhiyun 
5061*4882a593Smuzhiyun 	level->MaxPoweredUpCU = si_pi->max_cu;
5062*4882a593Smuzhiyun 
5063*4882a593Smuzhiyun 	ret = si_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
5064*4882a593Smuzhiyun 
5065*4882a593Smuzhiyun 	return ret;
5066*4882a593Smuzhiyun }
5067*4882a593Smuzhiyun 
si_populate_smc_t(struct radeon_device * rdev,struct radeon_ps * radeon_state,SISLANDS_SMC_SWSTATE * smc_state)5068*4882a593Smuzhiyun static int si_populate_smc_t(struct radeon_device *rdev,
5069*4882a593Smuzhiyun 			     struct radeon_ps *radeon_state,
5070*4882a593Smuzhiyun 			     SISLANDS_SMC_SWSTATE *smc_state)
5071*4882a593Smuzhiyun {
5072*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
5073*4882a593Smuzhiyun 	struct ni_ps *state = ni_get_ps(radeon_state);
5074*4882a593Smuzhiyun 	u32 a_t;
5075*4882a593Smuzhiyun 	u32 t_l, t_h;
5076*4882a593Smuzhiyun 	u32 high_bsp;
5077*4882a593Smuzhiyun 	int i, ret;
5078*4882a593Smuzhiyun 
5079*4882a593Smuzhiyun 	if (state->performance_level_count >= 9)
5080*4882a593Smuzhiyun 		return -EINVAL;
5081*4882a593Smuzhiyun 
5082*4882a593Smuzhiyun 	if (state->performance_level_count < 2) {
5083*4882a593Smuzhiyun 		a_t = CG_R(0xffff) | CG_L(0);
5084*4882a593Smuzhiyun 		smc_state->levels[0].aT = cpu_to_be32(a_t);
5085*4882a593Smuzhiyun 		return 0;
5086*4882a593Smuzhiyun 	}
5087*4882a593Smuzhiyun 
5088*4882a593Smuzhiyun 	smc_state->levels[0].aT = cpu_to_be32(0);
5089*4882a593Smuzhiyun 
5090*4882a593Smuzhiyun 	for (i = 0; i <= state->performance_level_count - 2; i++) {
5091*4882a593Smuzhiyun 		ret = r600_calculate_at(
5092*4882a593Smuzhiyun 			(50 / SISLANDS_MAX_HARDWARE_POWERLEVELS) * 100 * (i + 1),
5093*4882a593Smuzhiyun 			100 * R600_AH_DFLT,
5094*4882a593Smuzhiyun 			state->performance_levels[i + 1].sclk,
5095*4882a593Smuzhiyun 			state->performance_levels[i].sclk,
5096*4882a593Smuzhiyun 			&t_l,
5097*4882a593Smuzhiyun 			&t_h);
5098*4882a593Smuzhiyun 
5099*4882a593Smuzhiyun 		if (ret) {
5100*4882a593Smuzhiyun 			t_h = (i + 1) * 1000 - 50 * R600_AH_DFLT;
5101*4882a593Smuzhiyun 			t_l = (i + 1) * 1000 + 50 * R600_AH_DFLT;
5102*4882a593Smuzhiyun 		}
5103*4882a593Smuzhiyun 
5104*4882a593Smuzhiyun 		a_t = be32_to_cpu(smc_state->levels[i].aT) & ~CG_R_MASK;
5105*4882a593Smuzhiyun 		a_t |= CG_R(t_l * pi->bsp / 20000);
5106*4882a593Smuzhiyun 		smc_state->levels[i].aT = cpu_to_be32(a_t);
5107*4882a593Smuzhiyun 
5108*4882a593Smuzhiyun 		high_bsp = (i == state->performance_level_count - 2) ?
5109*4882a593Smuzhiyun 			pi->pbsp : pi->bsp;
5110*4882a593Smuzhiyun 		a_t = CG_R(0xffff) | CG_L(t_h * high_bsp / 20000);
5111*4882a593Smuzhiyun 		smc_state->levels[i + 1].aT = cpu_to_be32(a_t);
5112*4882a593Smuzhiyun 	}
5113*4882a593Smuzhiyun 
5114*4882a593Smuzhiyun 	return 0;
5115*4882a593Smuzhiyun }
5116*4882a593Smuzhiyun 
si_disable_ulv(struct radeon_device * rdev)5117*4882a593Smuzhiyun static int si_disable_ulv(struct radeon_device *rdev)
5118*4882a593Smuzhiyun {
5119*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
5120*4882a593Smuzhiyun 	struct si_ulv_param *ulv = &si_pi->ulv;
5121*4882a593Smuzhiyun 
5122*4882a593Smuzhiyun 	if (ulv->supported)
5123*4882a593Smuzhiyun 		return (si_send_msg_to_smc(rdev, PPSMC_MSG_DisableULV) == PPSMC_Result_OK) ?
5124*4882a593Smuzhiyun 			0 : -EINVAL;
5125*4882a593Smuzhiyun 
5126*4882a593Smuzhiyun 	return 0;
5127*4882a593Smuzhiyun }
5128*4882a593Smuzhiyun 
si_is_state_ulv_compatible(struct radeon_device * rdev,struct radeon_ps * radeon_state)5129*4882a593Smuzhiyun static bool si_is_state_ulv_compatible(struct radeon_device *rdev,
5130*4882a593Smuzhiyun 				       struct radeon_ps *radeon_state)
5131*4882a593Smuzhiyun {
5132*4882a593Smuzhiyun 	const struct si_power_info *si_pi = si_get_pi(rdev);
5133*4882a593Smuzhiyun 	const struct si_ulv_param *ulv = &si_pi->ulv;
5134*4882a593Smuzhiyun 	const struct ni_ps *state = ni_get_ps(radeon_state);
5135*4882a593Smuzhiyun 	int i;
5136*4882a593Smuzhiyun 
5137*4882a593Smuzhiyun 	if (state->performance_levels[0].mclk != ulv->pl.mclk)
5138*4882a593Smuzhiyun 		return false;
5139*4882a593Smuzhiyun 
5140*4882a593Smuzhiyun 	/* XXX validate against display requirements! */
5141*4882a593Smuzhiyun 
5142*4882a593Smuzhiyun 	for (i = 0; i < rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.count; i++) {
5143*4882a593Smuzhiyun 		if (rdev->clock.current_dispclk <=
5144*4882a593Smuzhiyun 		    rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[i].clk) {
5145*4882a593Smuzhiyun 			if (ulv->pl.vddc <
5146*4882a593Smuzhiyun 			    rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[i].v)
5147*4882a593Smuzhiyun 				return false;
5148*4882a593Smuzhiyun 		}
5149*4882a593Smuzhiyun 	}
5150*4882a593Smuzhiyun 
5151*4882a593Smuzhiyun 	if ((radeon_state->vclk != 0) || (radeon_state->dclk != 0))
5152*4882a593Smuzhiyun 		return false;
5153*4882a593Smuzhiyun 
5154*4882a593Smuzhiyun 	return true;
5155*4882a593Smuzhiyun }
5156*4882a593Smuzhiyun 
si_set_power_state_conditionally_enable_ulv(struct radeon_device * rdev,struct radeon_ps * radeon_new_state)5157*4882a593Smuzhiyun static int si_set_power_state_conditionally_enable_ulv(struct radeon_device *rdev,
5158*4882a593Smuzhiyun 						       struct radeon_ps *radeon_new_state)
5159*4882a593Smuzhiyun {
5160*4882a593Smuzhiyun 	const struct si_power_info *si_pi = si_get_pi(rdev);
5161*4882a593Smuzhiyun 	const struct si_ulv_param *ulv = &si_pi->ulv;
5162*4882a593Smuzhiyun 
5163*4882a593Smuzhiyun 	if (ulv->supported) {
5164*4882a593Smuzhiyun 		if (si_is_state_ulv_compatible(rdev, radeon_new_state))
5165*4882a593Smuzhiyun 			return (si_send_msg_to_smc(rdev, PPSMC_MSG_EnableULV) == PPSMC_Result_OK) ?
5166*4882a593Smuzhiyun 				0 : -EINVAL;
5167*4882a593Smuzhiyun 	}
5168*4882a593Smuzhiyun 	return 0;
5169*4882a593Smuzhiyun }
5170*4882a593Smuzhiyun 
si_convert_power_state_to_smc(struct radeon_device * rdev,struct radeon_ps * radeon_state,SISLANDS_SMC_SWSTATE * smc_state)5171*4882a593Smuzhiyun static int si_convert_power_state_to_smc(struct radeon_device *rdev,
5172*4882a593Smuzhiyun 					 struct radeon_ps *radeon_state,
5173*4882a593Smuzhiyun 					 SISLANDS_SMC_SWSTATE *smc_state)
5174*4882a593Smuzhiyun {
5175*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
5176*4882a593Smuzhiyun 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
5177*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
5178*4882a593Smuzhiyun 	struct ni_ps *state = ni_get_ps(radeon_state);
5179*4882a593Smuzhiyun 	int i, ret;
5180*4882a593Smuzhiyun 	u32 threshold;
5181*4882a593Smuzhiyun 	u32 sclk_in_sr = 1350; /* ??? */
5182*4882a593Smuzhiyun 
5183*4882a593Smuzhiyun 	if (state->performance_level_count > SISLANDS_MAX_HARDWARE_POWERLEVELS)
5184*4882a593Smuzhiyun 		return -EINVAL;
5185*4882a593Smuzhiyun 
5186*4882a593Smuzhiyun 	threshold = state->performance_levels[state->performance_level_count-1].sclk * 100 / 100;
5187*4882a593Smuzhiyun 
5188*4882a593Smuzhiyun 	if (radeon_state->vclk && radeon_state->dclk) {
5189*4882a593Smuzhiyun 		eg_pi->uvd_enabled = true;
5190*4882a593Smuzhiyun 		if (eg_pi->smu_uvd_hs)
5191*4882a593Smuzhiyun 			smc_state->flags |= PPSMC_SWSTATE_FLAG_UVD;
5192*4882a593Smuzhiyun 	} else {
5193*4882a593Smuzhiyun 		eg_pi->uvd_enabled = false;
5194*4882a593Smuzhiyun 	}
5195*4882a593Smuzhiyun 
5196*4882a593Smuzhiyun 	if (state->dc_compatible)
5197*4882a593Smuzhiyun 		smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
5198*4882a593Smuzhiyun 
5199*4882a593Smuzhiyun 	smc_state->levelCount = 0;
5200*4882a593Smuzhiyun 	for (i = 0; i < state->performance_level_count; i++) {
5201*4882a593Smuzhiyun 		if (eg_pi->sclk_deep_sleep) {
5202*4882a593Smuzhiyun 			if ((i == 0) || si_pi->sclk_deep_sleep_above_low) {
5203*4882a593Smuzhiyun 				if (sclk_in_sr <= SCLK_MIN_DEEPSLEEP_FREQ)
5204*4882a593Smuzhiyun 					smc_state->levels[i].stateFlags |= PPSMC_STATEFLAG_DEEPSLEEP_BYPASS;
5205*4882a593Smuzhiyun 				else
5206*4882a593Smuzhiyun 					smc_state->levels[i].stateFlags |= PPSMC_STATEFLAG_DEEPSLEEP_THROTTLE;
5207*4882a593Smuzhiyun 			}
5208*4882a593Smuzhiyun 		}
5209*4882a593Smuzhiyun 
5210*4882a593Smuzhiyun 		ret = si_convert_power_level_to_smc(rdev, &state->performance_levels[i],
5211*4882a593Smuzhiyun 						    &smc_state->levels[i]);
5212*4882a593Smuzhiyun 		smc_state->levels[i].arbRefreshState =
5213*4882a593Smuzhiyun 			(u8)(SISLANDS_DRIVER_STATE_ARB_INDEX + i);
5214*4882a593Smuzhiyun 
5215*4882a593Smuzhiyun 		if (ret)
5216*4882a593Smuzhiyun 			return ret;
5217*4882a593Smuzhiyun 
5218*4882a593Smuzhiyun 		if (ni_pi->enable_power_containment)
5219*4882a593Smuzhiyun 			smc_state->levels[i].displayWatermark =
5220*4882a593Smuzhiyun 				(state->performance_levels[i].sclk < threshold) ?
5221*4882a593Smuzhiyun 				PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
5222*4882a593Smuzhiyun 		else
5223*4882a593Smuzhiyun 			smc_state->levels[i].displayWatermark = (i < 2) ?
5224*4882a593Smuzhiyun 				PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
5225*4882a593Smuzhiyun 
5226*4882a593Smuzhiyun 		if (eg_pi->dynamic_ac_timing)
5227*4882a593Smuzhiyun 			smc_state->levels[i].ACIndex = SISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i;
5228*4882a593Smuzhiyun 		else
5229*4882a593Smuzhiyun 			smc_state->levels[i].ACIndex = 0;
5230*4882a593Smuzhiyun 
5231*4882a593Smuzhiyun 		smc_state->levelCount++;
5232*4882a593Smuzhiyun 	}
5233*4882a593Smuzhiyun 
5234*4882a593Smuzhiyun 	si_write_smc_soft_register(rdev,
5235*4882a593Smuzhiyun 				   SI_SMC_SOFT_REGISTER_watermark_threshold,
5236*4882a593Smuzhiyun 				   threshold / 512);
5237*4882a593Smuzhiyun 
5238*4882a593Smuzhiyun 	si_populate_smc_sp(rdev, radeon_state, smc_state);
5239*4882a593Smuzhiyun 
5240*4882a593Smuzhiyun 	ret = si_populate_power_containment_values(rdev, radeon_state, smc_state);
5241*4882a593Smuzhiyun 	if (ret)
5242*4882a593Smuzhiyun 		ni_pi->enable_power_containment = false;
5243*4882a593Smuzhiyun 
5244*4882a593Smuzhiyun 	ret = si_populate_sq_ramping_values(rdev, radeon_state, smc_state);
5245*4882a593Smuzhiyun 	if (ret)
5246*4882a593Smuzhiyun 		ni_pi->enable_sq_ramping = false;
5247*4882a593Smuzhiyun 
5248*4882a593Smuzhiyun 	return si_populate_smc_t(rdev, radeon_state, smc_state);
5249*4882a593Smuzhiyun }
5250*4882a593Smuzhiyun 
si_upload_sw_state(struct radeon_device * rdev,struct radeon_ps * radeon_new_state)5251*4882a593Smuzhiyun static int si_upload_sw_state(struct radeon_device *rdev,
5252*4882a593Smuzhiyun 			      struct radeon_ps *radeon_new_state)
5253*4882a593Smuzhiyun {
5254*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
5255*4882a593Smuzhiyun 	struct ni_ps *new_state = ni_get_ps(radeon_new_state);
5256*4882a593Smuzhiyun 	int ret;
5257*4882a593Smuzhiyun 	u32 address = si_pi->state_table_start +
5258*4882a593Smuzhiyun 		offsetof(SISLANDS_SMC_STATETABLE, driverState);
5259*4882a593Smuzhiyun 	u32 state_size = sizeof(SISLANDS_SMC_SWSTATE) +
5260*4882a593Smuzhiyun 		((new_state->performance_level_count - 1) *
5261*4882a593Smuzhiyun 		 sizeof(SISLANDS_SMC_HW_PERFORMANCE_LEVEL));
5262*4882a593Smuzhiyun 	SISLANDS_SMC_SWSTATE *smc_state = &si_pi->smc_statetable.driverState;
5263*4882a593Smuzhiyun 
5264*4882a593Smuzhiyun 	memset(smc_state, 0, state_size);
5265*4882a593Smuzhiyun 
5266*4882a593Smuzhiyun 	ret = si_convert_power_state_to_smc(rdev, radeon_new_state, smc_state);
5267*4882a593Smuzhiyun 	if (ret)
5268*4882a593Smuzhiyun 		return ret;
5269*4882a593Smuzhiyun 
5270*4882a593Smuzhiyun 	ret = si_copy_bytes_to_smc(rdev, address, (u8 *)smc_state,
5271*4882a593Smuzhiyun 				   state_size, si_pi->sram_end);
5272*4882a593Smuzhiyun 
5273*4882a593Smuzhiyun 	return ret;
5274*4882a593Smuzhiyun }
5275*4882a593Smuzhiyun 
si_upload_ulv_state(struct radeon_device * rdev)5276*4882a593Smuzhiyun static int si_upload_ulv_state(struct radeon_device *rdev)
5277*4882a593Smuzhiyun {
5278*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
5279*4882a593Smuzhiyun 	struct si_ulv_param *ulv = &si_pi->ulv;
5280*4882a593Smuzhiyun 	int ret = 0;
5281*4882a593Smuzhiyun 
5282*4882a593Smuzhiyun 	if (ulv->supported && ulv->pl.vddc) {
5283*4882a593Smuzhiyun 		u32 address = si_pi->state_table_start +
5284*4882a593Smuzhiyun 			offsetof(SISLANDS_SMC_STATETABLE, ULVState);
5285*4882a593Smuzhiyun 		SISLANDS_SMC_SWSTATE *smc_state = &si_pi->smc_statetable.ULVState;
5286*4882a593Smuzhiyun 		u32 state_size = sizeof(SISLANDS_SMC_SWSTATE);
5287*4882a593Smuzhiyun 
5288*4882a593Smuzhiyun 		memset(smc_state, 0, state_size);
5289*4882a593Smuzhiyun 
5290*4882a593Smuzhiyun 		ret = si_populate_ulv_state(rdev, smc_state);
5291*4882a593Smuzhiyun 		if (!ret)
5292*4882a593Smuzhiyun 			ret = si_copy_bytes_to_smc(rdev, address, (u8 *)smc_state,
5293*4882a593Smuzhiyun 						   state_size, si_pi->sram_end);
5294*4882a593Smuzhiyun 	}
5295*4882a593Smuzhiyun 
5296*4882a593Smuzhiyun 	return ret;
5297*4882a593Smuzhiyun }
5298*4882a593Smuzhiyun 
si_upload_smc_data(struct radeon_device * rdev)5299*4882a593Smuzhiyun static int si_upload_smc_data(struct radeon_device *rdev)
5300*4882a593Smuzhiyun {
5301*4882a593Smuzhiyun 	struct radeon_crtc *radeon_crtc = NULL;
5302*4882a593Smuzhiyun 	int i;
5303*4882a593Smuzhiyun 
5304*4882a593Smuzhiyun 	if (rdev->pm.dpm.new_active_crtc_count == 0)
5305*4882a593Smuzhiyun 		return 0;
5306*4882a593Smuzhiyun 
5307*4882a593Smuzhiyun 	for (i = 0; i < rdev->num_crtc; i++) {
5308*4882a593Smuzhiyun 		if (rdev->pm.dpm.new_active_crtcs & (1 << i)) {
5309*4882a593Smuzhiyun 			radeon_crtc = rdev->mode_info.crtcs[i];
5310*4882a593Smuzhiyun 			break;
5311*4882a593Smuzhiyun 		}
5312*4882a593Smuzhiyun 	}
5313*4882a593Smuzhiyun 
5314*4882a593Smuzhiyun 	if (radeon_crtc == NULL)
5315*4882a593Smuzhiyun 		return 0;
5316*4882a593Smuzhiyun 
5317*4882a593Smuzhiyun 	if (radeon_crtc->line_time <= 0)
5318*4882a593Smuzhiyun 		return 0;
5319*4882a593Smuzhiyun 
5320*4882a593Smuzhiyun 	if (si_write_smc_soft_register(rdev,
5321*4882a593Smuzhiyun 				       SI_SMC_SOFT_REGISTER_crtc_index,
5322*4882a593Smuzhiyun 				       radeon_crtc->crtc_id) != PPSMC_Result_OK)
5323*4882a593Smuzhiyun 		return 0;
5324*4882a593Smuzhiyun 
5325*4882a593Smuzhiyun 	if (si_write_smc_soft_register(rdev,
5326*4882a593Smuzhiyun 				       SI_SMC_SOFT_REGISTER_mclk_change_block_cp_min,
5327*4882a593Smuzhiyun 				       radeon_crtc->wm_high / radeon_crtc->line_time) != PPSMC_Result_OK)
5328*4882a593Smuzhiyun 		return 0;
5329*4882a593Smuzhiyun 
5330*4882a593Smuzhiyun 	if (si_write_smc_soft_register(rdev,
5331*4882a593Smuzhiyun 				       SI_SMC_SOFT_REGISTER_mclk_change_block_cp_max,
5332*4882a593Smuzhiyun 				       radeon_crtc->wm_low / radeon_crtc->line_time) != PPSMC_Result_OK)
5333*4882a593Smuzhiyun 		return 0;
5334*4882a593Smuzhiyun 
5335*4882a593Smuzhiyun 	return 0;
5336*4882a593Smuzhiyun }
5337*4882a593Smuzhiyun 
si_set_mc_special_registers(struct radeon_device * rdev,struct si_mc_reg_table * table)5338*4882a593Smuzhiyun static int si_set_mc_special_registers(struct radeon_device *rdev,
5339*4882a593Smuzhiyun 				       struct si_mc_reg_table *table)
5340*4882a593Smuzhiyun {
5341*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
5342*4882a593Smuzhiyun 	u8 i, j, k;
5343*4882a593Smuzhiyun 	u32 temp_reg;
5344*4882a593Smuzhiyun 
5345*4882a593Smuzhiyun 	for (i = 0, j = table->last; i < table->last; i++) {
5346*4882a593Smuzhiyun 		if (j >= SMC_SISLANDS_MC_REGISTER_ARRAY_SIZE)
5347*4882a593Smuzhiyun 			return -EINVAL;
5348*4882a593Smuzhiyun 		switch (table->mc_reg_address[i].s1 << 2) {
5349*4882a593Smuzhiyun 		case MC_SEQ_MISC1:
5350*4882a593Smuzhiyun 			temp_reg = RREG32(MC_PMG_CMD_EMRS);
5351*4882a593Smuzhiyun 			table->mc_reg_address[j].s1 = MC_PMG_CMD_EMRS >> 2;
5352*4882a593Smuzhiyun 			table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
5353*4882a593Smuzhiyun 			for (k = 0; k < table->num_entries; k++)
5354*4882a593Smuzhiyun 				table->mc_reg_table_entry[k].mc_data[j] =
5355*4882a593Smuzhiyun 					((temp_reg & 0xffff0000)) |
5356*4882a593Smuzhiyun 					((table->mc_reg_table_entry[k].mc_data[i] & 0xffff0000) >> 16);
5357*4882a593Smuzhiyun 			j++;
5358*4882a593Smuzhiyun 			if (j >= SMC_SISLANDS_MC_REGISTER_ARRAY_SIZE)
5359*4882a593Smuzhiyun 				return -EINVAL;
5360*4882a593Smuzhiyun 
5361*4882a593Smuzhiyun 			temp_reg = RREG32(MC_PMG_CMD_MRS);
5362*4882a593Smuzhiyun 			table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS >> 2;
5363*4882a593Smuzhiyun 			table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS_LP >> 2;
5364*4882a593Smuzhiyun 			for (k = 0; k < table->num_entries; k++) {
5365*4882a593Smuzhiyun 				table->mc_reg_table_entry[k].mc_data[j] =
5366*4882a593Smuzhiyun 					(temp_reg & 0xffff0000) |
5367*4882a593Smuzhiyun 					(table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
5368*4882a593Smuzhiyun 				if (!pi->mem_gddr5)
5369*4882a593Smuzhiyun 					table->mc_reg_table_entry[k].mc_data[j] |= 0x100;
5370*4882a593Smuzhiyun 			}
5371*4882a593Smuzhiyun 			j++;
5372*4882a593Smuzhiyun 			if (j >= SMC_SISLANDS_MC_REGISTER_ARRAY_SIZE)
5373*4882a593Smuzhiyun 				return -EINVAL;
5374*4882a593Smuzhiyun 
5375*4882a593Smuzhiyun 			if (!pi->mem_gddr5) {
5376*4882a593Smuzhiyun 				table->mc_reg_address[j].s1 = MC_PMG_AUTO_CMD >> 2;
5377*4882a593Smuzhiyun 				table->mc_reg_address[j].s0 = MC_PMG_AUTO_CMD >> 2;
5378*4882a593Smuzhiyun 				for (k = 0; k < table->num_entries; k++)
5379*4882a593Smuzhiyun 					table->mc_reg_table_entry[k].mc_data[j] =
5380*4882a593Smuzhiyun 						(table->mc_reg_table_entry[k].mc_data[i] & 0xffff0000) >> 16;
5381*4882a593Smuzhiyun 				j++;
5382*4882a593Smuzhiyun 				if (j >= SMC_SISLANDS_MC_REGISTER_ARRAY_SIZE)
5383*4882a593Smuzhiyun 					return -EINVAL;
5384*4882a593Smuzhiyun 			}
5385*4882a593Smuzhiyun 			break;
5386*4882a593Smuzhiyun 		case MC_SEQ_RESERVE_M:
5387*4882a593Smuzhiyun 			temp_reg = RREG32(MC_PMG_CMD_MRS1);
5388*4882a593Smuzhiyun 			table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS1 >> 2;
5389*4882a593Smuzhiyun 			table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
5390*4882a593Smuzhiyun 			for(k = 0; k < table->num_entries; k++)
5391*4882a593Smuzhiyun 				table->mc_reg_table_entry[k].mc_data[j] =
5392*4882a593Smuzhiyun 					(temp_reg & 0xffff0000) |
5393*4882a593Smuzhiyun 					(table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
5394*4882a593Smuzhiyun 			j++;
5395*4882a593Smuzhiyun 			if (j >= SMC_SISLANDS_MC_REGISTER_ARRAY_SIZE)
5396*4882a593Smuzhiyun 				return -EINVAL;
5397*4882a593Smuzhiyun 			break;
5398*4882a593Smuzhiyun 		default:
5399*4882a593Smuzhiyun 			break;
5400*4882a593Smuzhiyun 		}
5401*4882a593Smuzhiyun 	}
5402*4882a593Smuzhiyun 
5403*4882a593Smuzhiyun 	table->last = j;
5404*4882a593Smuzhiyun 
5405*4882a593Smuzhiyun 	return 0;
5406*4882a593Smuzhiyun }
5407*4882a593Smuzhiyun 
si_check_s0_mc_reg_index(u16 in_reg,u16 * out_reg)5408*4882a593Smuzhiyun static bool si_check_s0_mc_reg_index(u16 in_reg, u16 *out_reg)
5409*4882a593Smuzhiyun {
5410*4882a593Smuzhiyun 	bool result = true;
5411*4882a593Smuzhiyun 
5412*4882a593Smuzhiyun 	switch (in_reg) {
5413*4882a593Smuzhiyun 	case  MC_SEQ_RAS_TIMING >> 2:
5414*4882a593Smuzhiyun 		*out_reg = MC_SEQ_RAS_TIMING_LP >> 2;
5415*4882a593Smuzhiyun 		break;
5416*4882a593Smuzhiyun 	case MC_SEQ_CAS_TIMING >> 2:
5417*4882a593Smuzhiyun 		*out_reg = MC_SEQ_CAS_TIMING_LP >> 2;
5418*4882a593Smuzhiyun 		break;
5419*4882a593Smuzhiyun 	case MC_SEQ_MISC_TIMING >> 2:
5420*4882a593Smuzhiyun 		*out_reg = MC_SEQ_MISC_TIMING_LP >> 2;
5421*4882a593Smuzhiyun 		break;
5422*4882a593Smuzhiyun 	case MC_SEQ_MISC_TIMING2 >> 2:
5423*4882a593Smuzhiyun 		*out_reg = MC_SEQ_MISC_TIMING2_LP >> 2;
5424*4882a593Smuzhiyun 		break;
5425*4882a593Smuzhiyun 	case MC_SEQ_RD_CTL_D0 >> 2:
5426*4882a593Smuzhiyun 		*out_reg = MC_SEQ_RD_CTL_D0_LP >> 2;
5427*4882a593Smuzhiyun 		break;
5428*4882a593Smuzhiyun 	case MC_SEQ_RD_CTL_D1 >> 2:
5429*4882a593Smuzhiyun 		*out_reg = MC_SEQ_RD_CTL_D1_LP >> 2;
5430*4882a593Smuzhiyun 		break;
5431*4882a593Smuzhiyun 	case MC_SEQ_WR_CTL_D0 >> 2:
5432*4882a593Smuzhiyun 		*out_reg = MC_SEQ_WR_CTL_D0_LP >> 2;
5433*4882a593Smuzhiyun 		break;
5434*4882a593Smuzhiyun 	case MC_SEQ_WR_CTL_D1 >> 2:
5435*4882a593Smuzhiyun 		*out_reg = MC_SEQ_WR_CTL_D1_LP >> 2;
5436*4882a593Smuzhiyun 		break;
5437*4882a593Smuzhiyun 	case MC_PMG_CMD_EMRS >> 2:
5438*4882a593Smuzhiyun 		*out_reg = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
5439*4882a593Smuzhiyun 		break;
5440*4882a593Smuzhiyun 	case MC_PMG_CMD_MRS >> 2:
5441*4882a593Smuzhiyun 		*out_reg = MC_SEQ_PMG_CMD_MRS_LP >> 2;
5442*4882a593Smuzhiyun 		break;
5443*4882a593Smuzhiyun 	case MC_PMG_CMD_MRS1 >> 2:
5444*4882a593Smuzhiyun 		*out_reg = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
5445*4882a593Smuzhiyun 		break;
5446*4882a593Smuzhiyun 	case MC_SEQ_PMG_TIMING >> 2:
5447*4882a593Smuzhiyun 		*out_reg = MC_SEQ_PMG_TIMING_LP >> 2;
5448*4882a593Smuzhiyun 		break;
5449*4882a593Smuzhiyun 	case MC_PMG_CMD_MRS2 >> 2:
5450*4882a593Smuzhiyun 		*out_reg = MC_SEQ_PMG_CMD_MRS2_LP >> 2;
5451*4882a593Smuzhiyun 		break;
5452*4882a593Smuzhiyun 	case MC_SEQ_WR_CTL_2 >> 2:
5453*4882a593Smuzhiyun 		*out_reg = MC_SEQ_WR_CTL_2_LP >> 2;
5454*4882a593Smuzhiyun 		break;
5455*4882a593Smuzhiyun 	default:
5456*4882a593Smuzhiyun 		result = false;
5457*4882a593Smuzhiyun 		break;
5458*4882a593Smuzhiyun 	}
5459*4882a593Smuzhiyun 
5460*4882a593Smuzhiyun 	return result;
5461*4882a593Smuzhiyun }
5462*4882a593Smuzhiyun 
si_set_valid_flag(struct si_mc_reg_table * table)5463*4882a593Smuzhiyun static void si_set_valid_flag(struct si_mc_reg_table *table)
5464*4882a593Smuzhiyun {
5465*4882a593Smuzhiyun 	u8 i, j;
5466*4882a593Smuzhiyun 
5467*4882a593Smuzhiyun 	for (i = 0; i < table->last; i++) {
5468*4882a593Smuzhiyun 		for (j = 1; j < table->num_entries; j++) {
5469*4882a593Smuzhiyun 			if (table->mc_reg_table_entry[j-1].mc_data[i] != table->mc_reg_table_entry[j].mc_data[i]) {
5470*4882a593Smuzhiyun 				table->valid_flag |= 1 << i;
5471*4882a593Smuzhiyun 				break;
5472*4882a593Smuzhiyun 			}
5473*4882a593Smuzhiyun 		}
5474*4882a593Smuzhiyun 	}
5475*4882a593Smuzhiyun }
5476*4882a593Smuzhiyun 
si_set_s0_mc_reg_index(struct si_mc_reg_table * table)5477*4882a593Smuzhiyun static void si_set_s0_mc_reg_index(struct si_mc_reg_table *table)
5478*4882a593Smuzhiyun {
5479*4882a593Smuzhiyun 	u32 i;
5480*4882a593Smuzhiyun 	u16 address;
5481*4882a593Smuzhiyun 
5482*4882a593Smuzhiyun 	for (i = 0; i < table->last; i++)
5483*4882a593Smuzhiyun 		table->mc_reg_address[i].s0 = si_check_s0_mc_reg_index(table->mc_reg_address[i].s1, &address) ?
5484*4882a593Smuzhiyun 			address : table->mc_reg_address[i].s1;
5485*4882a593Smuzhiyun 
5486*4882a593Smuzhiyun }
5487*4882a593Smuzhiyun 
si_copy_vbios_mc_reg_table(struct atom_mc_reg_table * table,struct si_mc_reg_table * si_table)5488*4882a593Smuzhiyun static int si_copy_vbios_mc_reg_table(struct atom_mc_reg_table *table,
5489*4882a593Smuzhiyun 				      struct si_mc_reg_table *si_table)
5490*4882a593Smuzhiyun {
5491*4882a593Smuzhiyun 	u8 i, j;
5492*4882a593Smuzhiyun 
5493*4882a593Smuzhiyun 	if (table->last > SMC_SISLANDS_MC_REGISTER_ARRAY_SIZE)
5494*4882a593Smuzhiyun 		return -EINVAL;
5495*4882a593Smuzhiyun 	if (table->num_entries > MAX_AC_TIMING_ENTRIES)
5496*4882a593Smuzhiyun 		return -EINVAL;
5497*4882a593Smuzhiyun 
5498*4882a593Smuzhiyun 	for (i = 0; i < table->last; i++)
5499*4882a593Smuzhiyun 		si_table->mc_reg_address[i].s1 = table->mc_reg_address[i].s1;
5500*4882a593Smuzhiyun 	si_table->last = table->last;
5501*4882a593Smuzhiyun 
5502*4882a593Smuzhiyun 	for (i = 0; i < table->num_entries; i++) {
5503*4882a593Smuzhiyun 		si_table->mc_reg_table_entry[i].mclk_max =
5504*4882a593Smuzhiyun 			table->mc_reg_table_entry[i].mclk_max;
5505*4882a593Smuzhiyun 		for (j = 0; j < table->last; j++) {
5506*4882a593Smuzhiyun 			si_table->mc_reg_table_entry[i].mc_data[j] =
5507*4882a593Smuzhiyun 				table->mc_reg_table_entry[i].mc_data[j];
5508*4882a593Smuzhiyun 		}
5509*4882a593Smuzhiyun 	}
5510*4882a593Smuzhiyun 	si_table->num_entries = table->num_entries;
5511*4882a593Smuzhiyun 
5512*4882a593Smuzhiyun 	return 0;
5513*4882a593Smuzhiyun }
5514*4882a593Smuzhiyun 
si_initialize_mc_reg_table(struct radeon_device * rdev)5515*4882a593Smuzhiyun static int si_initialize_mc_reg_table(struct radeon_device *rdev)
5516*4882a593Smuzhiyun {
5517*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
5518*4882a593Smuzhiyun 	struct atom_mc_reg_table *table;
5519*4882a593Smuzhiyun 	struct si_mc_reg_table *si_table = &si_pi->mc_reg_table;
5520*4882a593Smuzhiyun 	u8 module_index = rv770_get_memory_module_index(rdev);
5521*4882a593Smuzhiyun 	int ret;
5522*4882a593Smuzhiyun 
5523*4882a593Smuzhiyun 	table = kzalloc(sizeof(struct atom_mc_reg_table), GFP_KERNEL);
5524*4882a593Smuzhiyun 	if (!table)
5525*4882a593Smuzhiyun 		return -ENOMEM;
5526*4882a593Smuzhiyun 
5527*4882a593Smuzhiyun 	WREG32(MC_SEQ_RAS_TIMING_LP, RREG32(MC_SEQ_RAS_TIMING));
5528*4882a593Smuzhiyun 	WREG32(MC_SEQ_CAS_TIMING_LP, RREG32(MC_SEQ_CAS_TIMING));
5529*4882a593Smuzhiyun 	WREG32(MC_SEQ_MISC_TIMING_LP, RREG32(MC_SEQ_MISC_TIMING));
5530*4882a593Smuzhiyun 	WREG32(MC_SEQ_MISC_TIMING2_LP, RREG32(MC_SEQ_MISC_TIMING2));
5531*4882a593Smuzhiyun 	WREG32(MC_SEQ_PMG_CMD_EMRS_LP, RREG32(MC_PMG_CMD_EMRS));
5532*4882a593Smuzhiyun 	WREG32(MC_SEQ_PMG_CMD_MRS_LP, RREG32(MC_PMG_CMD_MRS));
5533*4882a593Smuzhiyun 	WREG32(MC_SEQ_PMG_CMD_MRS1_LP, RREG32(MC_PMG_CMD_MRS1));
5534*4882a593Smuzhiyun 	WREG32(MC_SEQ_WR_CTL_D0_LP, RREG32(MC_SEQ_WR_CTL_D0));
5535*4882a593Smuzhiyun 	WREG32(MC_SEQ_WR_CTL_D1_LP, RREG32(MC_SEQ_WR_CTL_D1));
5536*4882a593Smuzhiyun 	WREG32(MC_SEQ_RD_CTL_D0_LP, RREG32(MC_SEQ_RD_CTL_D0));
5537*4882a593Smuzhiyun 	WREG32(MC_SEQ_RD_CTL_D1_LP, RREG32(MC_SEQ_RD_CTL_D1));
5538*4882a593Smuzhiyun 	WREG32(MC_SEQ_PMG_TIMING_LP, RREG32(MC_SEQ_PMG_TIMING));
5539*4882a593Smuzhiyun 	WREG32(MC_SEQ_PMG_CMD_MRS2_LP, RREG32(MC_PMG_CMD_MRS2));
5540*4882a593Smuzhiyun 	WREG32(MC_SEQ_WR_CTL_2_LP, RREG32(MC_SEQ_WR_CTL_2));
5541*4882a593Smuzhiyun 
5542*4882a593Smuzhiyun 	ret = radeon_atom_init_mc_reg_table(rdev, module_index, table);
5543*4882a593Smuzhiyun 	if (ret)
5544*4882a593Smuzhiyun 		goto init_mc_done;
5545*4882a593Smuzhiyun 
5546*4882a593Smuzhiyun 	ret = si_copy_vbios_mc_reg_table(table, si_table);
5547*4882a593Smuzhiyun 	if (ret)
5548*4882a593Smuzhiyun 		goto init_mc_done;
5549*4882a593Smuzhiyun 
5550*4882a593Smuzhiyun 	si_set_s0_mc_reg_index(si_table);
5551*4882a593Smuzhiyun 
5552*4882a593Smuzhiyun 	ret = si_set_mc_special_registers(rdev, si_table);
5553*4882a593Smuzhiyun 	if (ret)
5554*4882a593Smuzhiyun 		goto init_mc_done;
5555*4882a593Smuzhiyun 
5556*4882a593Smuzhiyun 	si_set_valid_flag(si_table);
5557*4882a593Smuzhiyun 
5558*4882a593Smuzhiyun init_mc_done:
5559*4882a593Smuzhiyun 	kfree(table);
5560*4882a593Smuzhiyun 
5561*4882a593Smuzhiyun 	return ret;
5562*4882a593Smuzhiyun 
5563*4882a593Smuzhiyun }
5564*4882a593Smuzhiyun 
si_populate_mc_reg_addresses(struct radeon_device * rdev,SMC_SIslands_MCRegisters * mc_reg_table)5565*4882a593Smuzhiyun static void si_populate_mc_reg_addresses(struct radeon_device *rdev,
5566*4882a593Smuzhiyun 					 SMC_SIslands_MCRegisters *mc_reg_table)
5567*4882a593Smuzhiyun {
5568*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
5569*4882a593Smuzhiyun 	u32 i, j;
5570*4882a593Smuzhiyun 
5571*4882a593Smuzhiyun 	for (i = 0, j = 0; j < si_pi->mc_reg_table.last; j++) {
5572*4882a593Smuzhiyun 		if (si_pi->mc_reg_table.valid_flag & (1 << j)) {
5573*4882a593Smuzhiyun 			if (i >= SMC_SISLANDS_MC_REGISTER_ARRAY_SIZE)
5574*4882a593Smuzhiyun 				break;
5575*4882a593Smuzhiyun 			mc_reg_table->address[i].s0 =
5576*4882a593Smuzhiyun 				cpu_to_be16(si_pi->mc_reg_table.mc_reg_address[j].s0);
5577*4882a593Smuzhiyun 			mc_reg_table->address[i].s1 =
5578*4882a593Smuzhiyun 				cpu_to_be16(si_pi->mc_reg_table.mc_reg_address[j].s1);
5579*4882a593Smuzhiyun 			i++;
5580*4882a593Smuzhiyun 		}
5581*4882a593Smuzhiyun 	}
5582*4882a593Smuzhiyun 	mc_reg_table->last = (u8)i;
5583*4882a593Smuzhiyun }
5584*4882a593Smuzhiyun 
si_convert_mc_registers(const struct si_mc_reg_entry * entry,SMC_SIslands_MCRegisterSet * data,u32 num_entries,u32 valid_flag)5585*4882a593Smuzhiyun static void si_convert_mc_registers(const struct si_mc_reg_entry *entry,
5586*4882a593Smuzhiyun 				    SMC_SIslands_MCRegisterSet *data,
5587*4882a593Smuzhiyun 				    u32 num_entries, u32 valid_flag)
5588*4882a593Smuzhiyun {
5589*4882a593Smuzhiyun 	u32 i, j;
5590*4882a593Smuzhiyun 
5591*4882a593Smuzhiyun 	for(i = 0, j = 0; j < num_entries; j++) {
5592*4882a593Smuzhiyun 		if (valid_flag & (1 << j)) {
5593*4882a593Smuzhiyun 			data->value[i] = cpu_to_be32(entry->mc_data[j]);
5594*4882a593Smuzhiyun 			i++;
5595*4882a593Smuzhiyun 		}
5596*4882a593Smuzhiyun 	}
5597*4882a593Smuzhiyun }
5598*4882a593Smuzhiyun 
si_convert_mc_reg_table_entry_to_smc(struct radeon_device * rdev,struct rv7xx_pl * pl,SMC_SIslands_MCRegisterSet * mc_reg_table_data)5599*4882a593Smuzhiyun static void si_convert_mc_reg_table_entry_to_smc(struct radeon_device *rdev,
5600*4882a593Smuzhiyun 						 struct rv7xx_pl *pl,
5601*4882a593Smuzhiyun 						 SMC_SIslands_MCRegisterSet *mc_reg_table_data)
5602*4882a593Smuzhiyun {
5603*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
5604*4882a593Smuzhiyun 	u32 i = 0;
5605*4882a593Smuzhiyun 
5606*4882a593Smuzhiyun 	for (i = 0; i < si_pi->mc_reg_table.num_entries; i++) {
5607*4882a593Smuzhiyun 		if (pl->mclk <= si_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max)
5608*4882a593Smuzhiyun 			break;
5609*4882a593Smuzhiyun 	}
5610*4882a593Smuzhiyun 
5611*4882a593Smuzhiyun 	if ((i == si_pi->mc_reg_table.num_entries) && (i > 0))
5612*4882a593Smuzhiyun 		--i;
5613*4882a593Smuzhiyun 
5614*4882a593Smuzhiyun 	si_convert_mc_registers(&si_pi->mc_reg_table.mc_reg_table_entry[i],
5615*4882a593Smuzhiyun 				mc_reg_table_data, si_pi->mc_reg_table.last,
5616*4882a593Smuzhiyun 				si_pi->mc_reg_table.valid_flag);
5617*4882a593Smuzhiyun }
5618*4882a593Smuzhiyun 
si_convert_mc_reg_table_to_smc(struct radeon_device * rdev,struct radeon_ps * radeon_state,SMC_SIslands_MCRegisters * mc_reg_table)5619*4882a593Smuzhiyun static void si_convert_mc_reg_table_to_smc(struct radeon_device *rdev,
5620*4882a593Smuzhiyun 					   struct radeon_ps *radeon_state,
5621*4882a593Smuzhiyun 					   SMC_SIslands_MCRegisters *mc_reg_table)
5622*4882a593Smuzhiyun {
5623*4882a593Smuzhiyun 	struct ni_ps *state = ni_get_ps(radeon_state);
5624*4882a593Smuzhiyun 	int i;
5625*4882a593Smuzhiyun 
5626*4882a593Smuzhiyun 	for (i = 0; i < state->performance_level_count; i++) {
5627*4882a593Smuzhiyun 		si_convert_mc_reg_table_entry_to_smc(rdev,
5628*4882a593Smuzhiyun 						     &state->performance_levels[i],
5629*4882a593Smuzhiyun 						     &mc_reg_table->data[SISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i]);
5630*4882a593Smuzhiyun 	}
5631*4882a593Smuzhiyun }
5632*4882a593Smuzhiyun 
si_populate_mc_reg_table(struct radeon_device * rdev,struct radeon_ps * radeon_boot_state)5633*4882a593Smuzhiyun static int si_populate_mc_reg_table(struct radeon_device *rdev,
5634*4882a593Smuzhiyun 				    struct radeon_ps *radeon_boot_state)
5635*4882a593Smuzhiyun {
5636*4882a593Smuzhiyun 	struct ni_ps *boot_state = ni_get_ps(radeon_boot_state);
5637*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
5638*4882a593Smuzhiyun 	struct si_ulv_param *ulv = &si_pi->ulv;
5639*4882a593Smuzhiyun 	SMC_SIslands_MCRegisters *smc_mc_reg_table = &si_pi->smc_mc_reg_table;
5640*4882a593Smuzhiyun 
5641*4882a593Smuzhiyun 	memset(smc_mc_reg_table, 0, sizeof(SMC_SIslands_MCRegisters));
5642*4882a593Smuzhiyun 
5643*4882a593Smuzhiyun 	si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_seq_index, 1);
5644*4882a593Smuzhiyun 
5645*4882a593Smuzhiyun 	si_populate_mc_reg_addresses(rdev, smc_mc_reg_table);
5646*4882a593Smuzhiyun 
5647*4882a593Smuzhiyun 	si_convert_mc_reg_table_entry_to_smc(rdev, &boot_state->performance_levels[0],
5648*4882a593Smuzhiyun 					     &smc_mc_reg_table->data[SISLANDS_MCREGISTERTABLE_INITIAL_SLOT]);
5649*4882a593Smuzhiyun 
5650*4882a593Smuzhiyun 	si_convert_mc_registers(&si_pi->mc_reg_table.mc_reg_table_entry[0],
5651*4882a593Smuzhiyun 				&smc_mc_reg_table->data[SISLANDS_MCREGISTERTABLE_ACPI_SLOT],
5652*4882a593Smuzhiyun 				si_pi->mc_reg_table.last,
5653*4882a593Smuzhiyun 				si_pi->mc_reg_table.valid_flag);
5654*4882a593Smuzhiyun 
5655*4882a593Smuzhiyun 	if (ulv->supported && ulv->pl.vddc != 0)
5656*4882a593Smuzhiyun 		si_convert_mc_reg_table_entry_to_smc(rdev, &ulv->pl,
5657*4882a593Smuzhiyun 						     &smc_mc_reg_table->data[SISLANDS_MCREGISTERTABLE_ULV_SLOT]);
5658*4882a593Smuzhiyun 	else
5659*4882a593Smuzhiyun 		si_convert_mc_registers(&si_pi->mc_reg_table.mc_reg_table_entry[0],
5660*4882a593Smuzhiyun 					&smc_mc_reg_table->data[SISLANDS_MCREGISTERTABLE_ULV_SLOT],
5661*4882a593Smuzhiyun 					si_pi->mc_reg_table.last,
5662*4882a593Smuzhiyun 					si_pi->mc_reg_table.valid_flag);
5663*4882a593Smuzhiyun 
5664*4882a593Smuzhiyun 	si_convert_mc_reg_table_to_smc(rdev, radeon_boot_state, smc_mc_reg_table);
5665*4882a593Smuzhiyun 
5666*4882a593Smuzhiyun 	return si_copy_bytes_to_smc(rdev, si_pi->mc_reg_table_start,
5667*4882a593Smuzhiyun 				    (u8 *)smc_mc_reg_table,
5668*4882a593Smuzhiyun 				    sizeof(SMC_SIslands_MCRegisters), si_pi->sram_end);
5669*4882a593Smuzhiyun }
5670*4882a593Smuzhiyun 
si_upload_mc_reg_table(struct radeon_device * rdev,struct radeon_ps * radeon_new_state)5671*4882a593Smuzhiyun static int si_upload_mc_reg_table(struct radeon_device *rdev,
5672*4882a593Smuzhiyun 				  struct radeon_ps *radeon_new_state)
5673*4882a593Smuzhiyun {
5674*4882a593Smuzhiyun 	struct ni_ps *new_state = ni_get_ps(radeon_new_state);
5675*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
5676*4882a593Smuzhiyun 	u32 address = si_pi->mc_reg_table_start +
5677*4882a593Smuzhiyun 		offsetof(SMC_SIslands_MCRegisters,
5678*4882a593Smuzhiyun 			 data[SISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT]);
5679*4882a593Smuzhiyun 	SMC_SIslands_MCRegisters *smc_mc_reg_table = &si_pi->smc_mc_reg_table;
5680*4882a593Smuzhiyun 
5681*4882a593Smuzhiyun 	memset(smc_mc_reg_table, 0, sizeof(SMC_SIslands_MCRegisters));
5682*4882a593Smuzhiyun 
5683*4882a593Smuzhiyun 	si_convert_mc_reg_table_to_smc(rdev, radeon_new_state, smc_mc_reg_table);
5684*4882a593Smuzhiyun 
5685*4882a593Smuzhiyun 
5686*4882a593Smuzhiyun 	return si_copy_bytes_to_smc(rdev, address,
5687*4882a593Smuzhiyun 				    (u8 *)&smc_mc_reg_table->data[SISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT],
5688*4882a593Smuzhiyun 				    sizeof(SMC_SIslands_MCRegisterSet) * new_state->performance_level_count,
5689*4882a593Smuzhiyun 				    si_pi->sram_end);
5690*4882a593Smuzhiyun 
5691*4882a593Smuzhiyun }
5692*4882a593Smuzhiyun 
si_enable_voltage_control(struct radeon_device * rdev,bool enable)5693*4882a593Smuzhiyun static void si_enable_voltage_control(struct radeon_device *rdev, bool enable)
5694*4882a593Smuzhiyun {
5695*4882a593Smuzhiyun 	if (enable)
5696*4882a593Smuzhiyun 		WREG32_P(GENERAL_PWRMGT, VOLT_PWRMGT_EN, ~VOLT_PWRMGT_EN);
5697*4882a593Smuzhiyun 	else
5698*4882a593Smuzhiyun 		WREG32_P(GENERAL_PWRMGT, 0, ~VOLT_PWRMGT_EN);
5699*4882a593Smuzhiyun }
5700*4882a593Smuzhiyun 
si_get_maximum_link_speed(struct radeon_device * rdev,struct radeon_ps * radeon_state)5701*4882a593Smuzhiyun static enum radeon_pcie_gen si_get_maximum_link_speed(struct radeon_device *rdev,
5702*4882a593Smuzhiyun 						      struct radeon_ps *radeon_state)
5703*4882a593Smuzhiyun {
5704*4882a593Smuzhiyun 	struct ni_ps *state = ni_get_ps(radeon_state);
5705*4882a593Smuzhiyun 	int i;
5706*4882a593Smuzhiyun 	u16 pcie_speed, max_speed = 0;
5707*4882a593Smuzhiyun 
5708*4882a593Smuzhiyun 	for (i = 0; i < state->performance_level_count; i++) {
5709*4882a593Smuzhiyun 		pcie_speed = state->performance_levels[i].pcie_gen;
5710*4882a593Smuzhiyun 		if (max_speed < pcie_speed)
5711*4882a593Smuzhiyun 			max_speed = pcie_speed;
5712*4882a593Smuzhiyun 	}
5713*4882a593Smuzhiyun 	return max_speed;
5714*4882a593Smuzhiyun }
5715*4882a593Smuzhiyun 
si_get_current_pcie_speed(struct radeon_device * rdev)5716*4882a593Smuzhiyun static u16 si_get_current_pcie_speed(struct radeon_device *rdev)
5717*4882a593Smuzhiyun {
5718*4882a593Smuzhiyun 	u32 speed_cntl;
5719*4882a593Smuzhiyun 
5720*4882a593Smuzhiyun 	speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL) & LC_CURRENT_DATA_RATE_MASK;
5721*4882a593Smuzhiyun 	speed_cntl >>= LC_CURRENT_DATA_RATE_SHIFT;
5722*4882a593Smuzhiyun 
5723*4882a593Smuzhiyun 	return (u16)speed_cntl;
5724*4882a593Smuzhiyun }
5725*4882a593Smuzhiyun 
si_request_link_speed_change_before_state_change(struct radeon_device * rdev,struct radeon_ps * radeon_new_state,struct radeon_ps * radeon_current_state)5726*4882a593Smuzhiyun static void si_request_link_speed_change_before_state_change(struct radeon_device *rdev,
5727*4882a593Smuzhiyun 							     struct radeon_ps *radeon_new_state,
5728*4882a593Smuzhiyun 							     struct radeon_ps *radeon_current_state)
5729*4882a593Smuzhiyun {
5730*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
5731*4882a593Smuzhiyun 	enum radeon_pcie_gen target_link_speed = si_get_maximum_link_speed(rdev, radeon_new_state);
5732*4882a593Smuzhiyun 	enum radeon_pcie_gen current_link_speed;
5733*4882a593Smuzhiyun 
5734*4882a593Smuzhiyun 	if (si_pi->force_pcie_gen == RADEON_PCIE_GEN_INVALID)
5735*4882a593Smuzhiyun 		current_link_speed = si_get_maximum_link_speed(rdev, radeon_current_state);
5736*4882a593Smuzhiyun 	else
5737*4882a593Smuzhiyun 		current_link_speed = si_pi->force_pcie_gen;
5738*4882a593Smuzhiyun 
5739*4882a593Smuzhiyun 	si_pi->force_pcie_gen = RADEON_PCIE_GEN_INVALID;
5740*4882a593Smuzhiyun 	si_pi->pspp_notify_required = false;
5741*4882a593Smuzhiyun 	if (target_link_speed > current_link_speed) {
5742*4882a593Smuzhiyun 		switch (target_link_speed) {
5743*4882a593Smuzhiyun #if defined(CONFIG_ACPI)
5744*4882a593Smuzhiyun 		case RADEON_PCIE_GEN3:
5745*4882a593Smuzhiyun 			if (radeon_acpi_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN3, false) == 0)
5746*4882a593Smuzhiyun 				break;
5747*4882a593Smuzhiyun 			si_pi->force_pcie_gen = RADEON_PCIE_GEN2;
5748*4882a593Smuzhiyun 			if (current_link_speed == RADEON_PCIE_GEN2)
5749*4882a593Smuzhiyun 				break;
5750*4882a593Smuzhiyun 			fallthrough;
5751*4882a593Smuzhiyun 		case RADEON_PCIE_GEN2:
5752*4882a593Smuzhiyun 			if (radeon_acpi_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN2, false) == 0)
5753*4882a593Smuzhiyun 				break;
5754*4882a593Smuzhiyun #endif
5755*4882a593Smuzhiyun 			/* fall through */
5756*4882a593Smuzhiyun 		default:
5757*4882a593Smuzhiyun 			si_pi->force_pcie_gen = si_get_current_pcie_speed(rdev);
5758*4882a593Smuzhiyun 			break;
5759*4882a593Smuzhiyun 		}
5760*4882a593Smuzhiyun 	} else {
5761*4882a593Smuzhiyun 		if (target_link_speed < current_link_speed)
5762*4882a593Smuzhiyun 			si_pi->pspp_notify_required = true;
5763*4882a593Smuzhiyun 	}
5764*4882a593Smuzhiyun }
5765*4882a593Smuzhiyun 
si_notify_link_speed_change_after_state_change(struct radeon_device * rdev,struct radeon_ps * radeon_new_state,struct radeon_ps * radeon_current_state)5766*4882a593Smuzhiyun static void si_notify_link_speed_change_after_state_change(struct radeon_device *rdev,
5767*4882a593Smuzhiyun 							   struct radeon_ps *radeon_new_state,
5768*4882a593Smuzhiyun 							   struct radeon_ps *radeon_current_state)
5769*4882a593Smuzhiyun {
5770*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
5771*4882a593Smuzhiyun 	enum radeon_pcie_gen target_link_speed = si_get_maximum_link_speed(rdev, radeon_new_state);
5772*4882a593Smuzhiyun 	u8 request;
5773*4882a593Smuzhiyun 
5774*4882a593Smuzhiyun 	if (si_pi->pspp_notify_required) {
5775*4882a593Smuzhiyun 		if (target_link_speed == RADEON_PCIE_GEN3)
5776*4882a593Smuzhiyun 			request = PCIE_PERF_REQ_PECI_GEN3;
5777*4882a593Smuzhiyun 		else if (target_link_speed == RADEON_PCIE_GEN2)
5778*4882a593Smuzhiyun 			request = PCIE_PERF_REQ_PECI_GEN2;
5779*4882a593Smuzhiyun 		else
5780*4882a593Smuzhiyun 			request = PCIE_PERF_REQ_PECI_GEN1;
5781*4882a593Smuzhiyun 
5782*4882a593Smuzhiyun 		if ((request == PCIE_PERF_REQ_PECI_GEN1) &&
5783*4882a593Smuzhiyun 		    (si_get_current_pcie_speed(rdev) > 0))
5784*4882a593Smuzhiyun 			return;
5785*4882a593Smuzhiyun 
5786*4882a593Smuzhiyun #if defined(CONFIG_ACPI)
5787*4882a593Smuzhiyun 		radeon_acpi_pcie_performance_request(rdev, request, false);
5788*4882a593Smuzhiyun #endif
5789*4882a593Smuzhiyun 	}
5790*4882a593Smuzhiyun }
5791*4882a593Smuzhiyun 
5792*4882a593Smuzhiyun #if 0
5793*4882a593Smuzhiyun static int si_ds_request(struct radeon_device *rdev,
5794*4882a593Smuzhiyun 			 bool ds_status_on, u32 count_write)
5795*4882a593Smuzhiyun {
5796*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
5797*4882a593Smuzhiyun 
5798*4882a593Smuzhiyun 	if (eg_pi->sclk_deep_sleep) {
5799*4882a593Smuzhiyun 		if (ds_status_on)
5800*4882a593Smuzhiyun 			return (si_send_msg_to_smc(rdev, PPSMC_MSG_CancelThrottleOVRDSCLKDS) ==
5801*4882a593Smuzhiyun 				PPSMC_Result_OK) ?
5802*4882a593Smuzhiyun 				0 : -EINVAL;
5803*4882a593Smuzhiyun 		else
5804*4882a593Smuzhiyun 			return (si_send_msg_to_smc(rdev, PPSMC_MSG_ThrottleOVRDSCLKDS) ==
5805*4882a593Smuzhiyun 				PPSMC_Result_OK) ? 0 : -EINVAL;
5806*4882a593Smuzhiyun 	}
5807*4882a593Smuzhiyun 	return 0;
5808*4882a593Smuzhiyun }
5809*4882a593Smuzhiyun #endif
5810*4882a593Smuzhiyun 
si_set_max_cu_value(struct radeon_device * rdev)5811*4882a593Smuzhiyun static void si_set_max_cu_value(struct radeon_device *rdev)
5812*4882a593Smuzhiyun {
5813*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
5814*4882a593Smuzhiyun 
5815*4882a593Smuzhiyun 	if (rdev->family == CHIP_VERDE) {
5816*4882a593Smuzhiyun 		switch (rdev->pdev->device) {
5817*4882a593Smuzhiyun 		case 0x6820:
5818*4882a593Smuzhiyun 		case 0x6825:
5819*4882a593Smuzhiyun 		case 0x6821:
5820*4882a593Smuzhiyun 		case 0x6823:
5821*4882a593Smuzhiyun 		case 0x6827:
5822*4882a593Smuzhiyun 			si_pi->max_cu = 10;
5823*4882a593Smuzhiyun 			break;
5824*4882a593Smuzhiyun 		case 0x682D:
5825*4882a593Smuzhiyun 		case 0x6824:
5826*4882a593Smuzhiyun 		case 0x682F:
5827*4882a593Smuzhiyun 		case 0x6826:
5828*4882a593Smuzhiyun 			si_pi->max_cu = 8;
5829*4882a593Smuzhiyun 			break;
5830*4882a593Smuzhiyun 		case 0x6828:
5831*4882a593Smuzhiyun 		case 0x6830:
5832*4882a593Smuzhiyun 		case 0x6831:
5833*4882a593Smuzhiyun 		case 0x6838:
5834*4882a593Smuzhiyun 		case 0x6839:
5835*4882a593Smuzhiyun 		case 0x683D:
5836*4882a593Smuzhiyun 			si_pi->max_cu = 10;
5837*4882a593Smuzhiyun 			break;
5838*4882a593Smuzhiyun 		case 0x683B:
5839*4882a593Smuzhiyun 		case 0x683F:
5840*4882a593Smuzhiyun 		case 0x6829:
5841*4882a593Smuzhiyun 			si_pi->max_cu = 8;
5842*4882a593Smuzhiyun 			break;
5843*4882a593Smuzhiyun 		default:
5844*4882a593Smuzhiyun 			si_pi->max_cu = 0;
5845*4882a593Smuzhiyun 			break;
5846*4882a593Smuzhiyun 		}
5847*4882a593Smuzhiyun 	} else {
5848*4882a593Smuzhiyun 		si_pi->max_cu = 0;
5849*4882a593Smuzhiyun 	}
5850*4882a593Smuzhiyun }
5851*4882a593Smuzhiyun 
si_patch_single_dependency_table_based_on_leakage(struct radeon_device * rdev,struct radeon_clock_voltage_dependency_table * table)5852*4882a593Smuzhiyun static int si_patch_single_dependency_table_based_on_leakage(struct radeon_device *rdev,
5853*4882a593Smuzhiyun 							     struct radeon_clock_voltage_dependency_table *table)
5854*4882a593Smuzhiyun {
5855*4882a593Smuzhiyun 	u32 i;
5856*4882a593Smuzhiyun 	int j;
5857*4882a593Smuzhiyun 	u16 leakage_voltage;
5858*4882a593Smuzhiyun 
5859*4882a593Smuzhiyun 	if (table) {
5860*4882a593Smuzhiyun 		for (i = 0; i < table->count; i++) {
5861*4882a593Smuzhiyun 			switch (si_get_leakage_voltage_from_leakage_index(rdev,
5862*4882a593Smuzhiyun 									  table->entries[i].v,
5863*4882a593Smuzhiyun 									  &leakage_voltage)) {
5864*4882a593Smuzhiyun 			case 0:
5865*4882a593Smuzhiyun 				table->entries[i].v = leakage_voltage;
5866*4882a593Smuzhiyun 				break;
5867*4882a593Smuzhiyun 			case -EAGAIN:
5868*4882a593Smuzhiyun 				return -EINVAL;
5869*4882a593Smuzhiyun 			case -EINVAL:
5870*4882a593Smuzhiyun 			default:
5871*4882a593Smuzhiyun 				break;
5872*4882a593Smuzhiyun 			}
5873*4882a593Smuzhiyun 		}
5874*4882a593Smuzhiyun 
5875*4882a593Smuzhiyun 		for (j = (table->count - 2); j >= 0; j--) {
5876*4882a593Smuzhiyun 			table->entries[j].v = (table->entries[j].v <= table->entries[j + 1].v) ?
5877*4882a593Smuzhiyun 				table->entries[j].v : table->entries[j + 1].v;
5878*4882a593Smuzhiyun 		}
5879*4882a593Smuzhiyun 	}
5880*4882a593Smuzhiyun 	return 0;
5881*4882a593Smuzhiyun }
5882*4882a593Smuzhiyun 
si_patch_dependency_tables_based_on_leakage(struct radeon_device * rdev)5883*4882a593Smuzhiyun static int si_patch_dependency_tables_based_on_leakage(struct radeon_device *rdev)
5884*4882a593Smuzhiyun {
5885*4882a593Smuzhiyun 	int ret;
5886*4882a593Smuzhiyun 
5887*4882a593Smuzhiyun 	ret = si_patch_single_dependency_table_based_on_leakage(rdev,
5888*4882a593Smuzhiyun 								&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk);
5889*4882a593Smuzhiyun 	ret = si_patch_single_dependency_table_based_on_leakage(rdev,
5890*4882a593Smuzhiyun 								&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk);
5891*4882a593Smuzhiyun 	ret = si_patch_single_dependency_table_based_on_leakage(rdev,
5892*4882a593Smuzhiyun 								&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk);
5893*4882a593Smuzhiyun 	return ret;
5894*4882a593Smuzhiyun }
5895*4882a593Smuzhiyun 
si_set_pcie_lane_width_in_smc(struct radeon_device * rdev,struct radeon_ps * radeon_new_state,struct radeon_ps * radeon_current_state)5896*4882a593Smuzhiyun static void si_set_pcie_lane_width_in_smc(struct radeon_device *rdev,
5897*4882a593Smuzhiyun 					  struct radeon_ps *radeon_new_state,
5898*4882a593Smuzhiyun 					  struct radeon_ps *radeon_current_state)
5899*4882a593Smuzhiyun {
5900*4882a593Smuzhiyun 	u32 lane_width;
5901*4882a593Smuzhiyun 	u32 new_lane_width =
5902*4882a593Smuzhiyun 		((radeon_new_state->caps & ATOM_PPLIB_PCIE_LINK_WIDTH_MASK) >> ATOM_PPLIB_PCIE_LINK_WIDTH_SHIFT) + 1;
5903*4882a593Smuzhiyun 	u32 current_lane_width =
5904*4882a593Smuzhiyun 		((radeon_current_state->caps & ATOM_PPLIB_PCIE_LINK_WIDTH_MASK) >> ATOM_PPLIB_PCIE_LINK_WIDTH_SHIFT) + 1;
5905*4882a593Smuzhiyun 
5906*4882a593Smuzhiyun 	if (new_lane_width != current_lane_width) {
5907*4882a593Smuzhiyun 		radeon_set_pcie_lanes(rdev, new_lane_width);
5908*4882a593Smuzhiyun 		lane_width = radeon_get_pcie_lanes(rdev);
5909*4882a593Smuzhiyun 		si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_non_ulv_pcie_link_width, lane_width);
5910*4882a593Smuzhiyun 	}
5911*4882a593Smuzhiyun }
5912*4882a593Smuzhiyun 
si_set_vce_clock(struct radeon_device * rdev,struct radeon_ps * new_rps,struct radeon_ps * old_rps)5913*4882a593Smuzhiyun static void si_set_vce_clock(struct radeon_device *rdev,
5914*4882a593Smuzhiyun 			     struct radeon_ps *new_rps,
5915*4882a593Smuzhiyun 			     struct radeon_ps *old_rps)
5916*4882a593Smuzhiyun {
5917*4882a593Smuzhiyun 	if ((old_rps->evclk != new_rps->evclk) ||
5918*4882a593Smuzhiyun 	    (old_rps->ecclk != new_rps->ecclk)) {
5919*4882a593Smuzhiyun 		/* turn the clocks on when encoding, off otherwise */
5920*4882a593Smuzhiyun 		if (new_rps->evclk || new_rps->ecclk)
5921*4882a593Smuzhiyun 			vce_v1_0_enable_mgcg(rdev, false);
5922*4882a593Smuzhiyun 		else
5923*4882a593Smuzhiyun 			vce_v1_0_enable_mgcg(rdev, true);
5924*4882a593Smuzhiyun 		radeon_set_vce_clocks(rdev, new_rps->evclk, new_rps->ecclk);
5925*4882a593Smuzhiyun 	}
5926*4882a593Smuzhiyun }
5927*4882a593Smuzhiyun 
si_dpm_setup_asic(struct radeon_device * rdev)5928*4882a593Smuzhiyun void si_dpm_setup_asic(struct radeon_device *rdev)
5929*4882a593Smuzhiyun {
5930*4882a593Smuzhiyun 	int r;
5931*4882a593Smuzhiyun 
5932*4882a593Smuzhiyun 	r = si_mc_load_microcode(rdev);
5933*4882a593Smuzhiyun 	if (r)
5934*4882a593Smuzhiyun 		DRM_ERROR("Failed to load MC firmware!\n");
5935*4882a593Smuzhiyun 	rv770_get_memory_type(rdev);
5936*4882a593Smuzhiyun 	si_read_clock_registers(rdev);
5937*4882a593Smuzhiyun 	si_enable_acpi_power_management(rdev);
5938*4882a593Smuzhiyun }
5939*4882a593Smuzhiyun 
si_thermal_enable_alert(struct radeon_device * rdev,bool enable)5940*4882a593Smuzhiyun static int si_thermal_enable_alert(struct radeon_device *rdev,
5941*4882a593Smuzhiyun 				   bool enable)
5942*4882a593Smuzhiyun {
5943*4882a593Smuzhiyun 	u32 thermal_int = RREG32(CG_THERMAL_INT);
5944*4882a593Smuzhiyun 
5945*4882a593Smuzhiyun 	if (enable) {
5946*4882a593Smuzhiyun 		PPSMC_Result result;
5947*4882a593Smuzhiyun 
5948*4882a593Smuzhiyun 		thermal_int &= ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
5949*4882a593Smuzhiyun 		WREG32(CG_THERMAL_INT, thermal_int);
5950*4882a593Smuzhiyun 		rdev->irq.dpm_thermal = false;
5951*4882a593Smuzhiyun 		result = si_send_msg_to_smc(rdev, PPSMC_MSG_EnableThermalInterrupt);
5952*4882a593Smuzhiyun 		if (result != PPSMC_Result_OK) {
5953*4882a593Smuzhiyun 			DRM_DEBUG_KMS("Could not enable thermal interrupts.\n");
5954*4882a593Smuzhiyun 			return -EINVAL;
5955*4882a593Smuzhiyun 		}
5956*4882a593Smuzhiyun 	} else {
5957*4882a593Smuzhiyun 		thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
5958*4882a593Smuzhiyun 		WREG32(CG_THERMAL_INT, thermal_int);
5959*4882a593Smuzhiyun 		rdev->irq.dpm_thermal = true;
5960*4882a593Smuzhiyun 	}
5961*4882a593Smuzhiyun 
5962*4882a593Smuzhiyun 	return 0;
5963*4882a593Smuzhiyun }
5964*4882a593Smuzhiyun 
si_thermal_set_temperature_range(struct radeon_device * rdev,int min_temp,int max_temp)5965*4882a593Smuzhiyun static int si_thermal_set_temperature_range(struct radeon_device *rdev,
5966*4882a593Smuzhiyun 					    int min_temp, int max_temp)
5967*4882a593Smuzhiyun {
5968*4882a593Smuzhiyun 	int low_temp = 0 * 1000;
5969*4882a593Smuzhiyun 	int high_temp = 255 * 1000;
5970*4882a593Smuzhiyun 
5971*4882a593Smuzhiyun 	if (low_temp < min_temp)
5972*4882a593Smuzhiyun 		low_temp = min_temp;
5973*4882a593Smuzhiyun 	if (high_temp > max_temp)
5974*4882a593Smuzhiyun 		high_temp = max_temp;
5975*4882a593Smuzhiyun 	if (high_temp < low_temp) {
5976*4882a593Smuzhiyun 		DRM_ERROR("invalid thermal range: %d - %d\n", low_temp, high_temp);
5977*4882a593Smuzhiyun 		return -EINVAL;
5978*4882a593Smuzhiyun 	}
5979*4882a593Smuzhiyun 
5980*4882a593Smuzhiyun 	WREG32_P(CG_THERMAL_INT, DIG_THERM_INTH(high_temp / 1000), ~DIG_THERM_INTH_MASK);
5981*4882a593Smuzhiyun 	WREG32_P(CG_THERMAL_INT, DIG_THERM_INTL(low_temp / 1000), ~DIG_THERM_INTL_MASK);
5982*4882a593Smuzhiyun 	WREG32_P(CG_THERMAL_CTRL, DIG_THERM_DPM(high_temp / 1000), ~DIG_THERM_DPM_MASK);
5983*4882a593Smuzhiyun 
5984*4882a593Smuzhiyun 	rdev->pm.dpm.thermal.min_temp = low_temp;
5985*4882a593Smuzhiyun 	rdev->pm.dpm.thermal.max_temp = high_temp;
5986*4882a593Smuzhiyun 
5987*4882a593Smuzhiyun 	return 0;
5988*4882a593Smuzhiyun }
5989*4882a593Smuzhiyun 
si_fan_ctrl_set_static_mode(struct radeon_device * rdev,u32 mode)5990*4882a593Smuzhiyun static void si_fan_ctrl_set_static_mode(struct radeon_device *rdev, u32 mode)
5991*4882a593Smuzhiyun {
5992*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
5993*4882a593Smuzhiyun 	u32 tmp;
5994*4882a593Smuzhiyun 
5995*4882a593Smuzhiyun 	if (si_pi->fan_ctrl_is_in_default_mode) {
5996*4882a593Smuzhiyun 		tmp = (RREG32(CG_FDO_CTRL2) & FDO_PWM_MODE_MASK) >> FDO_PWM_MODE_SHIFT;
5997*4882a593Smuzhiyun 		si_pi->fan_ctrl_default_mode = tmp;
5998*4882a593Smuzhiyun 		tmp = (RREG32(CG_FDO_CTRL2) & TMIN_MASK) >> TMIN_SHIFT;
5999*4882a593Smuzhiyun 		si_pi->t_min = tmp;
6000*4882a593Smuzhiyun 		si_pi->fan_ctrl_is_in_default_mode = false;
6001*4882a593Smuzhiyun 	}
6002*4882a593Smuzhiyun 
6003*4882a593Smuzhiyun 	tmp = RREG32(CG_FDO_CTRL2) & ~TMIN_MASK;
6004*4882a593Smuzhiyun 	tmp |= TMIN(0);
6005*4882a593Smuzhiyun 	WREG32(CG_FDO_CTRL2, tmp);
6006*4882a593Smuzhiyun 
6007*4882a593Smuzhiyun 	tmp = RREG32(CG_FDO_CTRL2) & ~FDO_PWM_MODE_MASK;
6008*4882a593Smuzhiyun 	tmp |= FDO_PWM_MODE(mode);
6009*4882a593Smuzhiyun 	WREG32(CG_FDO_CTRL2, tmp);
6010*4882a593Smuzhiyun }
6011*4882a593Smuzhiyun 
si_thermal_setup_fan_table(struct radeon_device * rdev)6012*4882a593Smuzhiyun static int si_thermal_setup_fan_table(struct radeon_device *rdev)
6013*4882a593Smuzhiyun {
6014*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
6015*4882a593Smuzhiyun 	PP_SIslands_FanTable fan_table = { FDO_MODE_HARDWARE };
6016*4882a593Smuzhiyun 	u32 duty100;
6017*4882a593Smuzhiyun 	u32 t_diff1, t_diff2, pwm_diff1, pwm_diff2;
6018*4882a593Smuzhiyun 	u16 fdo_min, slope1, slope2;
6019*4882a593Smuzhiyun 	u32 reference_clock, tmp;
6020*4882a593Smuzhiyun 	int ret;
6021*4882a593Smuzhiyun 	u64 tmp64;
6022*4882a593Smuzhiyun 
6023*4882a593Smuzhiyun 	if (!si_pi->fan_table_start) {
6024*4882a593Smuzhiyun 		rdev->pm.dpm.fan.ucode_fan_control = false;
6025*4882a593Smuzhiyun 		return 0;
6026*4882a593Smuzhiyun 	}
6027*4882a593Smuzhiyun 
6028*4882a593Smuzhiyun 	duty100 = (RREG32(CG_FDO_CTRL1) & FMAX_DUTY100_MASK) >> FMAX_DUTY100_SHIFT;
6029*4882a593Smuzhiyun 
6030*4882a593Smuzhiyun 	if (duty100 == 0) {
6031*4882a593Smuzhiyun 		rdev->pm.dpm.fan.ucode_fan_control = false;
6032*4882a593Smuzhiyun 		return 0;
6033*4882a593Smuzhiyun 	}
6034*4882a593Smuzhiyun 
6035*4882a593Smuzhiyun 	tmp64 = (u64)rdev->pm.dpm.fan.pwm_min * duty100;
6036*4882a593Smuzhiyun 	do_div(tmp64, 10000);
6037*4882a593Smuzhiyun 	fdo_min = (u16)tmp64;
6038*4882a593Smuzhiyun 
6039*4882a593Smuzhiyun 	t_diff1 = rdev->pm.dpm.fan.t_med - rdev->pm.dpm.fan.t_min;
6040*4882a593Smuzhiyun 	t_diff2 = rdev->pm.dpm.fan.t_high - rdev->pm.dpm.fan.t_med;
6041*4882a593Smuzhiyun 
6042*4882a593Smuzhiyun 	pwm_diff1 = rdev->pm.dpm.fan.pwm_med - rdev->pm.dpm.fan.pwm_min;
6043*4882a593Smuzhiyun 	pwm_diff2 = rdev->pm.dpm.fan.pwm_high - rdev->pm.dpm.fan.pwm_med;
6044*4882a593Smuzhiyun 
6045*4882a593Smuzhiyun 	slope1 = (u16)((50 + ((16 * duty100 * pwm_diff1) / t_diff1)) / 100);
6046*4882a593Smuzhiyun 	slope2 = (u16)((50 + ((16 * duty100 * pwm_diff2) / t_diff2)) / 100);
6047*4882a593Smuzhiyun 
6048*4882a593Smuzhiyun 	fan_table.temp_min = cpu_to_be16((50 + rdev->pm.dpm.fan.t_min) / 100);
6049*4882a593Smuzhiyun 	fan_table.temp_med = cpu_to_be16((50 + rdev->pm.dpm.fan.t_med) / 100);
6050*4882a593Smuzhiyun 	fan_table.temp_max = cpu_to_be16((50 + rdev->pm.dpm.fan.t_max) / 100);
6051*4882a593Smuzhiyun 
6052*4882a593Smuzhiyun 	fan_table.slope1 = cpu_to_be16(slope1);
6053*4882a593Smuzhiyun 	fan_table.slope2 = cpu_to_be16(slope2);
6054*4882a593Smuzhiyun 
6055*4882a593Smuzhiyun 	fan_table.fdo_min = cpu_to_be16(fdo_min);
6056*4882a593Smuzhiyun 
6057*4882a593Smuzhiyun 	fan_table.hys_down = cpu_to_be16(rdev->pm.dpm.fan.t_hyst);
6058*4882a593Smuzhiyun 
6059*4882a593Smuzhiyun 	fan_table.hys_up = cpu_to_be16(1);
6060*4882a593Smuzhiyun 
6061*4882a593Smuzhiyun 	fan_table.hys_slope = cpu_to_be16(1);
6062*4882a593Smuzhiyun 
6063*4882a593Smuzhiyun 	fan_table.temp_resp_lim = cpu_to_be16(5);
6064*4882a593Smuzhiyun 
6065*4882a593Smuzhiyun 	reference_clock = radeon_get_xclk(rdev);
6066*4882a593Smuzhiyun 
6067*4882a593Smuzhiyun 	fan_table.refresh_period = cpu_to_be32((rdev->pm.dpm.fan.cycle_delay *
6068*4882a593Smuzhiyun 						reference_clock) / 1600);
6069*4882a593Smuzhiyun 
6070*4882a593Smuzhiyun 	fan_table.fdo_max = cpu_to_be16((u16)duty100);
6071*4882a593Smuzhiyun 
6072*4882a593Smuzhiyun 	tmp = (RREG32(CG_MULT_THERMAL_CTRL) & TEMP_SEL_MASK) >> TEMP_SEL_SHIFT;
6073*4882a593Smuzhiyun 	fan_table.temp_src = (uint8_t)tmp;
6074*4882a593Smuzhiyun 
6075*4882a593Smuzhiyun 	ret = si_copy_bytes_to_smc(rdev,
6076*4882a593Smuzhiyun 				   si_pi->fan_table_start,
6077*4882a593Smuzhiyun 				   (u8 *)(&fan_table),
6078*4882a593Smuzhiyun 				   sizeof(fan_table),
6079*4882a593Smuzhiyun 				   si_pi->sram_end);
6080*4882a593Smuzhiyun 
6081*4882a593Smuzhiyun 	if (ret) {
6082*4882a593Smuzhiyun 		DRM_ERROR("Failed to load fan table to the SMC.");
6083*4882a593Smuzhiyun 		rdev->pm.dpm.fan.ucode_fan_control = false;
6084*4882a593Smuzhiyun 	}
6085*4882a593Smuzhiyun 
6086*4882a593Smuzhiyun 	return 0;
6087*4882a593Smuzhiyun }
6088*4882a593Smuzhiyun 
si_fan_ctrl_start_smc_fan_control(struct radeon_device * rdev)6089*4882a593Smuzhiyun static int si_fan_ctrl_start_smc_fan_control(struct radeon_device *rdev)
6090*4882a593Smuzhiyun {
6091*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
6092*4882a593Smuzhiyun 	PPSMC_Result ret;
6093*4882a593Smuzhiyun 
6094*4882a593Smuzhiyun 	ret = si_send_msg_to_smc(rdev, PPSMC_StartFanControl);
6095*4882a593Smuzhiyun 	if (ret == PPSMC_Result_OK) {
6096*4882a593Smuzhiyun 		si_pi->fan_is_controlled_by_smc = true;
6097*4882a593Smuzhiyun 		return 0;
6098*4882a593Smuzhiyun 	} else {
6099*4882a593Smuzhiyun 		return -EINVAL;
6100*4882a593Smuzhiyun 	}
6101*4882a593Smuzhiyun }
6102*4882a593Smuzhiyun 
si_fan_ctrl_stop_smc_fan_control(struct radeon_device * rdev)6103*4882a593Smuzhiyun static int si_fan_ctrl_stop_smc_fan_control(struct radeon_device *rdev)
6104*4882a593Smuzhiyun {
6105*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
6106*4882a593Smuzhiyun 	PPSMC_Result ret;
6107*4882a593Smuzhiyun 
6108*4882a593Smuzhiyun 	ret = si_send_msg_to_smc(rdev, PPSMC_StopFanControl);
6109*4882a593Smuzhiyun 
6110*4882a593Smuzhiyun 	if (ret == PPSMC_Result_OK) {
6111*4882a593Smuzhiyun 		si_pi->fan_is_controlled_by_smc = false;
6112*4882a593Smuzhiyun 		return 0;
6113*4882a593Smuzhiyun 	} else {
6114*4882a593Smuzhiyun 		return -EINVAL;
6115*4882a593Smuzhiyun 	}
6116*4882a593Smuzhiyun }
6117*4882a593Smuzhiyun 
si_fan_ctrl_get_fan_speed_percent(struct radeon_device * rdev,u32 * speed)6118*4882a593Smuzhiyun int si_fan_ctrl_get_fan_speed_percent(struct radeon_device *rdev,
6119*4882a593Smuzhiyun 				      u32 *speed)
6120*4882a593Smuzhiyun {
6121*4882a593Smuzhiyun 	u32 duty, duty100;
6122*4882a593Smuzhiyun 	u64 tmp64;
6123*4882a593Smuzhiyun 
6124*4882a593Smuzhiyun 	if (rdev->pm.no_fan)
6125*4882a593Smuzhiyun 		return -ENOENT;
6126*4882a593Smuzhiyun 
6127*4882a593Smuzhiyun 	duty100 = (RREG32(CG_FDO_CTRL1) & FMAX_DUTY100_MASK) >> FMAX_DUTY100_SHIFT;
6128*4882a593Smuzhiyun 	duty = (RREG32(CG_THERMAL_STATUS) & FDO_PWM_DUTY_MASK) >> FDO_PWM_DUTY_SHIFT;
6129*4882a593Smuzhiyun 
6130*4882a593Smuzhiyun 	if (duty100 == 0)
6131*4882a593Smuzhiyun 		return -EINVAL;
6132*4882a593Smuzhiyun 
6133*4882a593Smuzhiyun 	tmp64 = (u64)duty * 100;
6134*4882a593Smuzhiyun 	do_div(tmp64, duty100);
6135*4882a593Smuzhiyun 	*speed = (u32)tmp64;
6136*4882a593Smuzhiyun 
6137*4882a593Smuzhiyun 	if (*speed > 100)
6138*4882a593Smuzhiyun 		*speed = 100;
6139*4882a593Smuzhiyun 
6140*4882a593Smuzhiyun 	return 0;
6141*4882a593Smuzhiyun }
6142*4882a593Smuzhiyun 
si_fan_ctrl_set_fan_speed_percent(struct radeon_device * rdev,u32 speed)6143*4882a593Smuzhiyun int si_fan_ctrl_set_fan_speed_percent(struct radeon_device *rdev,
6144*4882a593Smuzhiyun 				      u32 speed)
6145*4882a593Smuzhiyun {
6146*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
6147*4882a593Smuzhiyun 	u32 tmp;
6148*4882a593Smuzhiyun 	u32 duty, duty100;
6149*4882a593Smuzhiyun 	u64 tmp64;
6150*4882a593Smuzhiyun 
6151*4882a593Smuzhiyun 	if (rdev->pm.no_fan)
6152*4882a593Smuzhiyun 		return -ENOENT;
6153*4882a593Smuzhiyun 
6154*4882a593Smuzhiyun 	if (si_pi->fan_is_controlled_by_smc)
6155*4882a593Smuzhiyun 		return -EINVAL;
6156*4882a593Smuzhiyun 
6157*4882a593Smuzhiyun 	if (speed > 100)
6158*4882a593Smuzhiyun 		return -EINVAL;
6159*4882a593Smuzhiyun 
6160*4882a593Smuzhiyun 	duty100 = (RREG32(CG_FDO_CTRL1) & FMAX_DUTY100_MASK) >> FMAX_DUTY100_SHIFT;
6161*4882a593Smuzhiyun 
6162*4882a593Smuzhiyun 	if (duty100 == 0)
6163*4882a593Smuzhiyun 		return -EINVAL;
6164*4882a593Smuzhiyun 
6165*4882a593Smuzhiyun 	tmp64 = (u64)speed * duty100;
6166*4882a593Smuzhiyun 	do_div(tmp64, 100);
6167*4882a593Smuzhiyun 	duty = (u32)tmp64;
6168*4882a593Smuzhiyun 
6169*4882a593Smuzhiyun 	tmp = RREG32(CG_FDO_CTRL0) & ~FDO_STATIC_DUTY_MASK;
6170*4882a593Smuzhiyun 	tmp |= FDO_STATIC_DUTY(duty);
6171*4882a593Smuzhiyun 	WREG32(CG_FDO_CTRL0, tmp);
6172*4882a593Smuzhiyun 
6173*4882a593Smuzhiyun 	return 0;
6174*4882a593Smuzhiyun }
6175*4882a593Smuzhiyun 
si_fan_ctrl_set_mode(struct radeon_device * rdev,u32 mode)6176*4882a593Smuzhiyun void si_fan_ctrl_set_mode(struct radeon_device *rdev, u32 mode)
6177*4882a593Smuzhiyun {
6178*4882a593Smuzhiyun 	if (mode) {
6179*4882a593Smuzhiyun 		/* stop auto-manage */
6180*4882a593Smuzhiyun 		if (rdev->pm.dpm.fan.ucode_fan_control)
6181*4882a593Smuzhiyun 			si_fan_ctrl_stop_smc_fan_control(rdev);
6182*4882a593Smuzhiyun 		si_fan_ctrl_set_static_mode(rdev, mode);
6183*4882a593Smuzhiyun 	} else {
6184*4882a593Smuzhiyun 		/* restart auto-manage */
6185*4882a593Smuzhiyun 		if (rdev->pm.dpm.fan.ucode_fan_control)
6186*4882a593Smuzhiyun 			si_thermal_start_smc_fan_control(rdev);
6187*4882a593Smuzhiyun 		else
6188*4882a593Smuzhiyun 			si_fan_ctrl_set_default_mode(rdev);
6189*4882a593Smuzhiyun 	}
6190*4882a593Smuzhiyun }
6191*4882a593Smuzhiyun 
si_fan_ctrl_get_mode(struct radeon_device * rdev)6192*4882a593Smuzhiyun u32 si_fan_ctrl_get_mode(struct radeon_device *rdev)
6193*4882a593Smuzhiyun {
6194*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
6195*4882a593Smuzhiyun 	u32 tmp;
6196*4882a593Smuzhiyun 
6197*4882a593Smuzhiyun 	if (si_pi->fan_is_controlled_by_smc)
6198*4882a593Smuzhiyun 		return 0;
6199*4882a593Smuzhiyun 
6200*4882a593Smuzhiyun 	tmp = RREG32(CG_FDO_CTRL2) & FDO_PWM_MODE_MASK;
6201*4882a593Smuzhiyun 	return (tmp >> FDO_PWM_MODE_SHIFT);
6202*4882a593Smuzhiyun }
6203*4882a593Smuzhiyun 
6204*4882a593Smuzhiyun #if 0
6205*4882a593Smuzhiyun static int si_fan_ctrl_get_fan_speed_rpm(struct radeon_device *rdev,
6206*4882a593Smuzhiyun 					 u32 *speed)
6207*4882a593Smuzhiyun {
6208*4882a593Smuzhiyun 	u32 tach_period;
6209*4882a593Smuzhiyun 	u32 xclk = radeon_get_xclk(rdev);
6210*4882a593Smuzhiyun 
6211*4882a593Smuzhiyun 	if (rdev->pm.no_fan)
6212*4882a593Smuzhiyun 		return -ENOENT;
6213*4882a593Smuzhiyun 
6214*4882a593Smuzhiyun 	if (rdev->pm.fan_pulses_per_revolution == 0)
6215*4882a593Smuzhiyun 		return -ENOENT;
6216*4882a593Smuzhiyun 
6217*4882a593Smuzhiyun 	tach_period = (RREG32(CG_TACH_STATUS) & TACH_PERIOD_MASK) >> TACH_PERIOD_SHIFT;
6218*4882a593Smuzhiyun 	if (tach_period == 0)
6219*4882a593Smuzhiyun 		return -ENOENT;
6220*4882a593Smuzhiyun 
6221*4882a593Smuzhiyun 	*speed = 60 * xclk * 10000 / tach_period;
6222*4882a593Smuzhiyun 
6223*4882a593Smuzhiyun 	return 0;
6224*4882a593Smuzhiyun }
6225*4882a593Smuzhiyun 
6226*4882a593Smuzhiyun static int si_fan_ctrl_set_fan_speed_rpm(struct radeon_device *rdev,
6227*4882a593Smuzhiyun 					 u32 speed)
6228*4882a593Smuzhiyun {
6229*4882a593Smuzhiyun 	u32 tach_period, tmp;
6230*4882a593Smuzhiyun 	u32 xclk = radeon_get_xclk(rdev);
6231*4882a593Smuzhiyun 
6232*4882a593Smuzhiyun 	if (rdev->pm.no_fan)
6233*4882a593Smuzhiyun 		return -ENOENT;
6234*4882a593Smuzhiyun 
6235*4882a593Smuzhiyun 	if (rdev->pm.fan_pulses_per_revolution == 0)
6236*4882a593Smuzhiyun 		return -ENOENT;
6237*4882a593Smuzhiyun 
6238*4882a593Smuzhiyun 	if ((speed < rdev->pm.fan_min_rpm) ||
6239*4882a593Smuzhiyun 	    (speed > rdev->pm.fan_max_rpm))
6240*4882a593Smuzhiyun 		return -EINVAL;
6241*4882a593Smuzhiyun 
6242*4882a593Smuzhiyun 	if (rdev->pm.dpm.fan.ucode_fan_control)
6243*4882a593Smuzhiyun 		si_fan_ctrl_stop_smc_fan_control(rdev);
6244*4882a593Smuzhiyun 
6245*4882a593Smuzhiyun 	tach_period = 60 * xclk * 10000 / (8 * speed);
6246*4882a593Smuzhiyun 	tmp = RREG32(CG_TACH_CTRL) & ~TARGET_PERIOD_MASK;
6247*4882a593Smuzhiyun 	tmp |= TARGET_PERIOD(tach_period);
6248*4882a593Smuzhiyun 	WREG32(CG_TACH_CTRL, tmp);
6249*4882a593Smuzhiyun 
6250*4882a593Smuzhiyun 	si_fan_ctrl_set_static_mode(rdev, FDO_PWM_MODE_STATIC_RPM);
6251*4882a593Smuzhiyun 
6252*4882a593Smuzhiyun 	return 0;
6253*4882a593Smuzhiyun }
6254*4882a593Smuzhiyun #endif
6255*4882a593Smuzhiyun 
si_fan_ctrl_set_default_mode(struct radeon_device * rdev)6256*4882a593Smuzhiyun static void si_fan_ctrl_set_default_mode(struct radeon_device *rdev)
6257*4882a593Smuzhiyun {
6258*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
6259*4882a593Smuzhiyun 	u32 tmp;
6260*4882a593Smuzhiyun 
6261*4882a593Smuzhiyun 	if (!si_pi->fan_ctrl_is_in_default_mode) {
6262*4882a593Smuzhiyun 		tmp = RREG32(CG_FDO_CTRL2) & ~FDO_PWM_MODE_MASK;
6263*4882a593Smuzhiyun 		tmp |= FDO_PWM_MODE(si_pi->fan_ctrl_default_mode);
6264*4882a593Smuzhiyun 		WREG32(CG_FDO_CTRL2, tmp);
6265*4882a593Smuzhiyun 
6266*4882a593Smuzhiyun 		tmp = RREG32(CG_FDO_CTRL2) & ~TMIN_MASK;
6267*4882a593Smuzhiyun 		tmp |= TMIN(si_pi->t_min);
6268*4882a593Smuzhiyun 		WREG32(CG_FDO_CTRL2, tmp);
6269*4882a593Smuzhiyun 		si_pi->fan_ctrl_is_in_default_mode = true;
6270*4882a593Smuzhiyun 	}
6271*4882a593Smuzhiyun }
6272*4882a593Smuzhiyun 
si_thermal_start_smc_fan_control(struct radeon_device * rdev)6273*4882a593Smuzhiyun static void si_thermal_start_smc_fan_control(struct radeon_device *rdev)
6274*4882a593Smuzhiyun {
6275*4882a593Smuzhiyun 	if (rdev->pm.dpm.fan.ucode_fan_control) {
6276*4882a593Smuzhiyun 		si_fan_ctrl_start_smc_fan_control(rdev);
6277*4882a593Smuzhiyun 		si_fan_ctrl_set_static_mode(rdev, FDO_PWM_MODE_STATIC);
6278*4882a593Smuzhiyun 	}
6279*4882a593Smuzhiyun }
6280*4882a593Smuzhiyun 
si_thermal_initialize(struct radeon_device * rdev)6281*4882a593Smuzhiyun static void si_thermal_initialize(struct radeon_device *rdev)
6282*4882a593Smuzhiyun {
6283*4882a593Smuzhiyun 	u32 tmp;
6284*4882a593Smuzhiyun 
6285*4882a593Smuzhiyun 	if (rdev->pm.fan_pulses_per_revolution) {
6286*4882a593Smuzhiyun 		tmp = RREG32(CG_TACH_CTRL) & ~EDGE_PER_REV_MASK;
6287*4882a593Smuzhiyun 		tmp |= EDGE_PER_REV(rdev->pm.fan_pulses_per_revolution -1);
6288*4882a593Smuzhiyun 		WREG32(CG_TACH_CTRL, tmp);
6289*4882a593Smuzhiyun 	}
6290*4882a593Smuzhiyun 
6291*4882a593Smuzhiyun 	tmp = RREG32(CG_FDO_CTRL2) & ~TACH_PWM_RESP_RATE_MASK;
6292*4882a593Smuzhiyun 	tmp |= TACH_PWM_RESP_RATE(0x28);
6293*4882a593Smuzhiyun 	WREG32(CG_FDO_CTRL2, tmp);
6294*4882a593Smuzhiyun }
6295*4882a593Smuzhiyun 
si_thermal_start_thermal_controller(struct radeon_device * rdev)6296*4882a593Smuzhiyun static int si_thermal_start_thermal_controller(struct radeon_device *rdev)
6297*4882a593Smuzhiyun {
6298*4882a593Smuzhiyun 	int ret;
6299*4882a593Smuzhiyun 
6300*4882a593Smuzhiyun 	si_thermal_initialize(rdev);
6301*4882a593Smuzhiyun 	ret = si_thermal_set_temperature_range(rdev, R600_TEMP_RANGE_MIN, R600_TEMP_RANGE_MAX);
6302*4882a593Smuzhiyun 	if (ret)
6303*4882a593Smuzhiyun 		return ret;
6304*4882a593Smuzhiyun 	ret = si_thermal_enable_alert(rdev, true);
6305*4882a593Smuzhiyun 	if (ret)
6306*4882a593Smuzhiyun 		return ret;
6307*4882a593Smuzhiyun 	if (rdev->pm.dpm.fan.ucode_fan_control) {
6308*4882a593Smuzhiyun 		ret = si_halt_smc(rdev);
6309*4882a593Smuzhiyun 		if (ret)
6310*4882a593Smuzhiyun 			return ret;
6311*4882a593Smuzhiyun 		ret = si_thermal_setup_fan_table(rdev);
6312*4882a593Smuzhiyun 		if (ret)
6313*4882a593Smuzhiyun 			return ret;
6314*4882a593Smuzhiyun 		ret = si_resume_smc(rdev);
6315*4882a593Smuzhiyun 		if (ret)
6316*4882a593Smuzhiyun 			return ret;
6317*4882a593Smuzhiyun 		si_thermal_start_smc_fan_control(rdev);
6318*4882a593Smuzhiyun 	}
6319*4882a593Smuzhiyun 
6320*4882a593Smuzhiyun 	return 0;
6321*4882a593Smuzhiyun }
6322*4882a593Smuzhiyun 
si_thermal_stop_thermal_controller(struct radeon_device * rdev)6323*4882a593Smuzhiyun static void si_thermal_stop_thermal_controller(struct radeon_device *rdev)
6324*4882a593Smuzhiyun {
6325*4882a593Smuzhiyun 	if (!rdev->pm.no_fan) {
6326*4882a593Smuzhiyun 		si_fan_ctrl_set_default_mode(rdev);
6327*4882a593Smuzhiyun 		si_fan_ctrl_stop_smc_fan_control(rdev);
6328*4882a593Smuzhiyun 	}
6329*4882a593Smuzhiyun }
6330*4882a593Smuzhiyun 
si_dpm_enable(struct radeon_device * rdev)6331*4882a593Smuzhiyun int si_dpm_enable(struct radeon_device *rdev)
6332*4882a593Smuzhiyun {
6333*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
6334*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
6335*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
6336*4882a593Smuzhiyun 	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
6337*4882a593Smuzhiyun 	int ret;
6338*4882a593Smuzhiyun 
6339*4882a593Smuzhiyun 	if (si_is_smc_running(rdev))
6340*4882a593Smuzhiyun 		return -EINVAL;
6341*4882a593Smuzhiyun 	if (pi->voltage_control || si_pi->voltage_control_svi2)
6342*4882a593Smuzhiyun 		si_enable_voltage_control(rdev, true);
6343*4882a593Smuzhiyun 	if (pi->mvdd_control)
6344*4882a593Smuzhiyun 		si_get_mvdd_configuration(rdev);
6345*4882a593Smuzhiyun 	if (pi->voltage_control || si_pi->voltage_control_svi2) {
6346*4882a593Smuzhiyun 		ret = si_construct_voltage_tables(rdev);
6347*4882a593Smuzhiyun 		if (ret) {
6348*4882a593Smuzhiyun 			DRM_ERROR("si_construct_voltage_tables failed\n");
6349*4882a593Smuzhiyun 			return ret;
6350*4882a593Smuzhiyun 		}
6351*4882a593Smuzhiyun 	}
6352*4882a593Smuzhiyun 	if (eg_pi->dynamic_ac_timing) {
6353*4882a593Smuzhiyun 		ret = si_initialize_mc_reg_table(rdev);
6354*4882a593Smuzhiyun 		if (ret)
6355*4882a593Smuzhiyun 			eg_pi->dynamic_ac_timing = false;
6356*4882a593Smuzhiyun 	}
6357*4882a593Smuzhiyun 	if (pi->dynamic_ss)
6358*4882a593Smuzhiyun 		si_enable_spread_spectrum(rdev, true);
6359*4882a593Smuzhiyun 	if (pi->thermal_protection)
6360*4882a593Smuzhiyun 		si_enable_thermal_protection(rdev, true);
6361*4882a593Smuzhiyun 	si_setup_bsp(rdev);
6362*4882a593Smuzhiyun 	si_program_git(rdev);
6363*4882a593Smuzhiyun 	si_program_tp(rdev);
6364*4882a593Smuzhiyun 	si_program_tpp(rdev);
6365*4882a593Smuzhiyun 	si_program_sstp(rdev);
6366*4882a593Smuzhiyun 	si_enable_display_gap(rdev);
6367*4882a593Smuzhiyun 	si_program_vc(rdev);
6368*4882a593Smuzhiyun 	ret = si_upload_firmware(rdev);
6369*4882a593Smuzhiyun 	if (ret) {
6370*4882a593Smuzhiyun 		DRM_ERROR("si_upload_firmware failed\n");
6371*4882a593Smuzhiyun 		return ret;
6372*4882a593Smuzhiyun 	}
6373*4882a593Smuzhiyun 	ret = si_process_firmware_header(rdev);
6374*4882a593Smuzhiyun 	if (ret) {
6375*4882a593Smuzhiyun 		DRM_ERROR("si_process_firmware_header failed\n");
6376*4882a593Smuzhiyun 		return ret;
6377*4882a593Smuzhiyun 	}
6378*4882a593Smuzhiyun 	ret = si_initial_switch_from_arb_f0_to_f1(rdev);
6379*4882a593Smuzhiyun 	if (ret) {
6380*4882a593Smuzhiyun 		DRM_ERROR("si_initial_switch_from_arb_f0_to_f1 failed\n");
6381*4882a593Smuzhiyun 		return ret;
6382*4882a593Smuzhiyun 	}
6383*4882a593Smuzhiyun 	ret = si_init_smc_table(rdev);
6384*4882a593Smuzhiyun 	if (ret) {
6385*4882a593Smuzhiyun 		DRM_ERROR("si_init_smc_table failed\n");
6386*4882a593Smuzhiyun 		return ret;
6387*4882a593Smuzhiyun 	}
6388*4882a593Smuzhiyun 	ret = si_init_smc_spll_table(rdev);
6389*4882a593Smuzhiyun 	if (ret) {
6390*4882a593Smuzhiyun 		DRM_ERROR("si_init_smc_spll_table failed\n");
6391*4882a593Smuzhiyun 		return ret;
6392*4882a593Smuzhiyun 	}
6393*4882a593Smuzhiyun 	ret = si_init_arb_table_index(rdev);
6394*4882a593Smuzhiyun 	if (ret) {
6395*4882a593Smuzhiyun 		DRM_ERROR("si_init_arb_table_index failed\n");
6396*4882a593Smuzhiyun 		return ret;
6397*4882a593Smuzhiyun 	}
6398*4882a593Smuzhiyun 	if (eg_pi->dynamic_ac_timing) {
6399*4882a593Smuzhiyun 		ret = si_populate_mc_reg_table(rdev, boot_ps);
6400*4882a593Smuzhiyun 		if (ret) {
6401*4882a593Smuzhiyun 			DRM_ERROR("si_populate_mc_reg_table failed\n");
6402*4882a593Smuzhiyun 			return ret;
6403*4882a593Smuzhiyun 		}
6404*4882a593Smuzhiyun 	}
6405*4882a593Smuzhiyun 	ret = si_initialize_smc_cac_tables(rdev);
6406*4882a593Smuzhiyun 	if (ret) {
6407*4882a593Smuzhiyun 		DRM_ERROR("si_initialize_smc_cac_tables failed\n");
6408*4882a593Smuzhiyun 		return ret;
6409*4882a593Smuzhiyun 	}
6410*4882a593Smuzhiyun 	ret = si_initialize_hardware_cac_manager(rdev);
6411*4882a593Smuzhiyun 	if (ret) {
6412*4882a593Smuzhiyun 		DRM_ERROR("si_initialize_hardware_cac_manager failed\n");
6413*4882a593Smuzhiyun 		return ret;
6414*4882a593Smuzhiyun 	}
6415*4882a593Smuzhiyun 	ret = si_initialize_smc_dte_tables(rdev);
6416*4882a593Smuzhiyun 	if (ret) {
6417*4882a593Smuzhiyun 		DRM_ERROR("si_initialize_smc_dte_tables failed\n");
6418*4882a593Smuzhiyun 		return ret;
6419*4882a593Smuzhiyun 	}
6420*4882a593Smuzhiyun 	ret = si_populate_smc_tdp_limits(rdev, boot_ps);
6421*4882a593Smuzhiyun 	if (ret) {
6422*4882a593Smuzhiyun 		DRM_ERROR("si_populate_smc_tdp_limits failed\n");
6423*4882a593Smuzhiyun 		return ret;
6424*4882a593Smuzhiyun 	}
6425*4882a593Smuzhiyun 	ret = si_populate_smc_tdp_limits_2(rdev, boot_ps);
6426*4882a593Smuzhiyun 	if (ret) {
6427*4882a593Smuzhiyun 		DRM_ERROR("si_populate_smc_tdp_limits_2 failed\n");
6428*4882a593Smuzhiyun 		return ret;
6429*4882a593Smuzhiyun 	}
6430*4882a593Smuzhiyun 	si_program_response_times(rdev);
6431*4882a593Smuzhiyun 	si_program_ds_registers(rdev);
6432*4882a593Smuzhiyun 	si_dpm_start_smc(rdev);
6433*4882a593Smuzhiyun 	ret = si_notify_smc_display_change(rdev, false);
6434*4882a593Smuzhiyun 	if (ret) {
6435*4882a593Smuzhiyun 		DRM_ERROR("si_notify_smc_display_change failed\n");
6436*4882a593Smuzhiyun 		return ret;
6437*4882a593Smuzhiyun 	}
6438*4882a593Smuzhiyun 	si_enable_sclk_control(rdev, true);
6439*4882a593Smuzhiyun 	si_start_dpm(rdev);
6440*4882a593Smuzhiyun 
6441*4882a593Smuzhiyun 	si_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
6442*4882a593Smuzhiyun 
6443*4882a593Smuzhiyun 	si_thermal_start_thermal_controller(rdev);
6444*4882a593Smuzhiyun 
6445*4882a593Smuzhiyun 	ni_update_current_ps(rdev, boot_ps);
6446*4882a593Smuzhiyun 
6447*4882a593Smuzhiyun 	return 0;
6448*4882a593Smuzhiyun }
6449*4882a593Smuzhiyun 
si_set_temperature_range(struct radeon_device * rdev)6450*4882a593Smuzhiyun static int si_set_temperature_range(struct radeon_device *rdev)
6451*4882a593Smuzhiyun {
6452*4882a593Smuzhiyun 	int ret;
6453*4882a593Smuzhiyun 
6454*4882a593Smuzhiyun 	ret = si_thermal_enable_alert(rdev, false);
6455*4882a593Smuzhiyun 	if (ret)
6456*4882a593Smuzhiyun 		return ret;
6457*4882a593Smuzhiyun 	ret = si_thermal_set_temperature_range(rdev, R600_TEMP_RANGE_MIN, R600_TEMP_RANGE_MAX);
6458*4882a593Smuzhiyun 	if (ret)
6459*4882a593Smuzhiyun 		return ret;
6460*4882a593Smuzhiyun 	ret = si_thermal_enable_alert(rdev, true);
6461*4882a593Smuzhiyun 	if (ret)
6462*4882a593Smuzhiyun 		return ret;
6463*4882a593Smuzhiyun 
6464*4882a593Smuzhiyun 	return ret;
6465*4882a593Smuzhiyun }
6466*4882a593Smuzhiyun 
si_dpm_late_enable(struct radeon_device * rdev)6467*4882a593Smuzhiyun int si_dpm_late_enable(struct radeon_device *rdev)
6468*4882a593Smuzhiyun {
6469*4882a593Smuzhiyun 	int ret;
6470*4882a593Smuzhiyun 
6471*4882a593Smuzhiyun 	ret = si_set_temperature_range(rdev);
6472*4882a593Smuzhiyun 	if (ret)
6473*4882a593Smuzhiyun 		return ret;
6474*4882a593Smuzhiyun 
6475*4882a593Smuzhiyun 	return ret;
6476*4882a593Smuzhiyun }
6477*4882a593Smuzhiyun 
si_dpm_disable(struct radeon_device * rdev)6478*4882a593Smuzhiyun void si_dpm_disable(struct radeon_device *rdev)
6479*4882a593Smuzhiyun {
6480*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
6481*4882a593Smuzhiyun 	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
6482*4882a593Smuzhiyun 
6483*4882a593Smuzhiyun 	if (!si_is_smc_running(rdev))
6484*4882a593Smuzhiyun 		return;
6485*4882a593Smuzhiyun 	si_thermal_stop_thermal_controller(rdev);
6486*4882a593Smuzhiyun 	si_disable_ulv(rdev);
6487*4882a593Smuzhiyun 	si_clear_vc(rdev);
6488*4882a593Smuzhiyun 	if (pi->thermal_protection)
6489*4882a593Smuzhiyun 		si_enable_thermal_protection(rdev, false);
6490*4882a593Smuzhiyun 	si_enable_power_containment(rdev, boot_ps, false);
6491*4882a593Smuzhiyun 	si_enable_smc_cac(rdev, boot_ps, false);
6492*4882a593Smuzhiyun 	si_enable_spread_spectrum(rdev, false);
6493*4882a593Smuzhiyun 	si_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, false);
6494*4882a593Smuzhiyun 	si_stop_dpm(rdev);
6495*4882a593Smuzhiyun 	si_reset_to_default(rdev);
6496*4882a593Smuzhiyun 	si_dpm_stop_smc(rdev);
6497*4882a593Smuzhiyun 	si_force_switch_to_arb_f0(rdev);
6498*4882a593Smuzhiyun 
6499*4882a593Smuzhiyun 	ni_update_current_ps(rdev, boot_ps);
6500*4882a593Smuzhiyun }
6501*4882a593Smuzhiyun 
si_dpm_pre_set_power_state(struct radeon_device * rdev)6502*4882a593Smuzhiyun int si_dpm_pre_set_power_state(struct radeon_device *rdev)
6503*4882a593Smuzhiyun {
6504*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
6505*4882a593Smuzhiyun 	struct radeon_ps requested_ps = *rdev->pm.dpm.requested_ps;
6506*4882a593Smuzhiyun 	struct radeon_ps *new_ps = &requested_ps;
6507*4882a593Smuzhiyun 
6508*4882a593Smuzhiyun 	ni_update_requested_ps(rdev, new_ps);
6509*4882a593Smuzhiyun 
6510*4882a593Smuzhiyun 	si_apply_state_adjust_rules(rdev, &eg_pi->requested_rps);
6511*4882a593Smuzhiyun 
6512*4882a593Smuzhiyun 	return 0;
6513*4882a593Smuzhiyun }
6514*4882a593Smuzhiyun 
si_power_control_set_level(struct radeon_device * rdev)6515*4882a593Smuzhiyun static int si_power_control_set_level(struct radeon_device *rdev)
6516*4882a593Smuzhiyun {
6517*4882a593Smuzhiyun 	struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
6518*4882a593Smuzhiyun 	int ret;
6519*4882a593Smuzhiyun 
6520*4882a593Smuzhiyun 	ret = si_restrict_performance_levels_before_switch(rdev);
6521*4882a593Smuzhiyun 	if (ret)
6522*4882a593Smuzhiyun 		return ret;
6523*4882a593Smuzhiyun 	ret = si_halt_smc(rdev);
6524*4882a593Smuzhiyun 	if (ret)
6525*4882a593Smuzhiyun 		return ret;
6526*4882a593Smuzhiyun 	ret = si_populate_smc_tdp_limits(rdev, new_ps);
6527*4882a593Smuzhiyun 	if (ret)
6528*4882a593Smuzhiyun 		return ret;
6529*4882a593Smuzhiyun 	ret = si_populate_smc_tdp_limits_2(rdev, new_ps);
6530*4882a593Smuzhiyun 	if (ret)
6531*4882a593Smuzhiyun 		return ret;
6532*4882a593Smuzhiyun 	ret = si_resume_smc(rdev);
6533*4882a593Smuzhiyun 	if (ret)
6534*4882a593Smuzhiyun 		return ret;
6535*4882a593Smuzhiyun 	ret = si_set_sw_state(rdev);
6536*4882a593Smuzhiyun 	if (ret)
6537*4882a593Smuzhiyun 		return ret;
6538*4882a593Smuzhiyun 	return 0;
6539*4882a593Smuzhiyun }
6540*4882a593Smuzhiyun 
si_dpm_set_power_state(struct radeon_device * rdev)6541*4882a593Smuzhiyun int si_dpm_set_power_state(struct radeon_device *rdev)
6542*4882a593Smuzhiyun {
6543*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
6544*4882a593Smuzhiyun 	struct radeon_ps *new_ps = &eg_pi->requested_rps;
6545*4882a593Smuzhiyun 	struct radeon_ps *old_ps = &eg_pi->current_rps;
6546*4882a593Smuzhiyun 	int ret;
6547*4882a593Smuzhiyun 
6548*4882a593Smuzhiyun 	ret = si_disable_ulv(rdev);
6549*4882a593Smuzhiyun 	if (ret) {
6550*4882a593Smuzhiyun 		DRM_ERROR("si_disable_ulv failed\n");
6551*4882a593Smuzhiyun 		return ret;
6552*4882a593Smuzhiyun 	}
6553*4882a593Smuzhiyun 	ret = si_restrict_performance_levels_before_switch(rdev);
6554*4882a593Smuzhiyun 	if (ret) {
6555*4882a593Smuzhiyun 		DRM_ERROR("si_restrict_performance_levels_before_switch failed\n");
6556*4882a593Smuzhiyun 		return ret;
6557*4882a593Smuzhiyun 	}
6558*4882a593Smuzhiyun 	if (eg_pi->pcie_performance_request)
6559*4882a593Smuzhiyun 		si_request_link_speed_change_before_state_change(rdev, new_ps, old_ps);
6560*4882a593Smuzhiyun 	ni_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps);
6561*4882a593Smuzhiyun 	ret = si_enable_power_containment(rdev, new_ps, false);
6562*4882a593Smuzhiyun 	if (ret) {
6563*4882a593Smuzhiyun 		DRM_ERROR("si_enable_power_containment failed\n");
6564*4882a593Smuzhiyun 		return ret;
6565*4882a593Smuzhiyun 	}
6566*4882a593Smuzhiyun 	ret = si_enable_smc_cac(rdev, new_ps, false);
6567*4882a593Smuzhiyun 	if (ret) {
6568*4882a593Smuzhiyun 		DRM_ERROR("si_enable_smc_cac failed\n");
6569*4882a593Smuzhiyun 		return ret;
6570*4882a593Smuzhiyun 	}
6571*4882a593Smuzhiyun 	ret = si_halt_smc(rdev);
6572*4882a593Smuzhiyun 	if (ret) {
6573*4882a593Smuzhiyun 		DRM_ERROR("si_halt_smc failed\n");
6574*4882a593Smuzhiyun 		return ret;
6575*4882a593Smuzhiyun 	}
6576*4882a593Smuzhiyun 	ret = si_upload_sw_state(rdev, new_ps);
6577*4882a593Smuzhiyun 	if (ret) {
6578*4882a593Smuzhiyun 		DRM_ERROR("si_upload_sw_state failed\n");
6579*4882a593Smuzhiyun 		return ret;
6580*4882a593Smuzhiyun 	}
6581*4882a593Smuzhiyun 	ret = si_upload_smc_data(rdev);
6582*4882a593Smuzhiyun 	if (ret) {
6583*4882a593Smuzhiyun 		DRM_ERROR("si_upload_smc_data failed\n");
6584*4882a593Smuzhiyun 		return ret;
6585*4882a593Smuzhiyun 	}
6586*4882a593Smuzhiyun 	ret = si_upload_ulv_state(rdev);
6587*4882a593Smuzhiyun 	if (ret) {
6588*4882a593Smuzhiyun 		DRM_ERROR("si_upload_ulv_state failed\n");
6589*4882a593Smuzhiyun 		return ret;
6590*4882a593Smuzhiyun 	}
6591*4882a593Smuzhiyun 	if (eg_pi->dynamic_ac_timing) {
6592*4882a593Smuzhiyun 		ret = si_upload_mc_reg_table(rdev, new_ps);
6593*4882a593Smuzhiyun 		if (ret) {
6594*4882a593Smuzhiyun 			DRM_ERROR("si_upload_mc_reg_table failed\n");
6595*4882a593Smuzhiyun 			return ret;
6596*4882a593Smuzhiyun 		}
6597*4882a593Smuzhiyun 	}
6598*4882a593Smuzhiyun 	ret = si_program_memory_timing_parameters(rdev, new_ps);
6599*4882a593Smuzhiyun 	if (ret) {
6600*4882a593Smuzhiyun 		DRM_ERROR("si_program_memory_timing_parameters failed\n");
6601*4882a593Smuzhiyun 		return ret;
6602*4882a593Smuzhiyun 	}
6603*4882a593Smuzhiyun 	si_set_pcie_lane_width_in_smc(rdev, new_ps, old_ps);
6604*4882a593Smuzhiyun 
6605*4882a593Smuzhiyun 	ret = si_resume_smc(rdev);
6606*4882a593Smuzhiyun 	if (ret) {
6607*4882a593Smuzhiyun 		DRM_ERROR("si_resume_smc failed\n");
6608*4882a593Smuzhiyun 		return ret;
6609*4882a593Smuzhiyun 	}
6610*4882a593Smuzhiyun 	ret = si_set_sw_state(rdev);
6611*4882a593Smuzhiyun 	if (ret) {
6612*4882a593Smuzhiyun 		DRM_ERROR("si_set_sw_state failed\n");
6613*4882a593Smuzhiyun 		return ret;
6614*4882a593Smuzhiyun 	}
6615*4882a593Smuzhiyun 	ni_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps);
6616*4882a593Smuzhiyun 	si_set_vce_clock(rdev, new_ps, old_ps);
6617*4882a593Smuzhiyun 	if (eg_pi->pcie_performance_request)
6618*4882a593Smuzhiyun 		si_notify_link_speed_change_after_state_change(rdev, new_ps, old_ps);
6619*4882a593Smuzhiyun 	ret = si_set_power_state_conditionally_enable_ulv(rdev, new_ps);
6620*4882a593Smuzhiyun 	if (ret) {
6621*4882a593Smuzhiyun 		DRM_ERROR("si_set_power_state_conditionally_enable_ulv failed\n");
6622*4882a593Smuzhiyun 		return ret;
6623*4882a593Smuzhiyun 	}
6624*4882a593Smuzhiyun 	ret = si_enable_smc_cac(rdev, new_ps, true);
6625*4882a593Smuzhiyun 	if (ret) {
6626*4882a593Smuzhiyun 		DRM_ERROR("si_enable_smc_cac failed\n");
6627*4882a593Smuzhiyun 		return ret;
6628*4882a593Smuzhiyun 	}
6629*4882a593Smuzhiyun 	ret = si_enable_power_containment(rdev, new_ps, true);
6630*4882a593Smuzhiyun 	if (ret) {
6631*4882a593Smuzhiyun 		DRM_ERROR("si_enable_power_containment failed\n");
6632*4882a593Smuzhiyun 		return ret;
6633*4882a593Smuzhiyun 	}
6634*4882a593Smuzhiyun 
6635*4882a593Smuzhiyun 	ret = si_power_control_set_level(rdev);
6636*4882a593Smuzhiyun 	if (ret) {
6637*4882a593Smuzhiyun 		DRM_ERROR("si_power_control_set_level failed\n");
6638*4882a593Smuzhiyun 		return ret;
6639*4882a593Smuzhiyun 	}
6640*4882a593Smuzhiyun 
6641*4882a593Smuzhiyun 	return 0;
6642*4882a593Smuzhiyun }
6643*4882a593Smuzhiyun 
si_dpm_post_set_power_state(struct radeon_device * rdev)6644*4882a593Smuzhiyun void si_dpm_post_set_power_state(struct radeon_device *rdev)
6645*4882a593Smuzhiyun {
6646*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
6647*4882a593Smuzhiyun 	struct radeon_ps *new_ps = &eg_pi->requested_rps;
6648*4882a593Smuzhiyun 
6649*4882a593Smuzhiyun 	ni_update_current_ps(rdev, new_ps);
6650*4882a593Smuzhiyun }
6651*4882a593Smuzhiyun 
6652*4882a593Smuzhiyun #if 0
6653*4882a593Smuzhiyun void si_dpm_reset_asic(struct radeon_device *rdev)
6654*4882a593Smuzhiyun {
6655*4882a593Smuzhiyun 	si_restrict_performance_levels_before_switch(rdev);
6656*4882a593Smuzhiyun 	si_disable_ulv(rdev);
6657*4882a593Smuzhiyun 	si_set_boot_state(rdev);
6658*4882a593Smuzhiyun }
6659*4882a593Smuzhiyun #endif
6660*4882a593Smuzhiyun 
si_dpm_display_configuration_changed(struct radeon_device * rdev)6661*4882a593Smuzhiyun void si_dpm_display_configuration_changed(struct radeon_device *rdev)
6662*4882a593Smuzhiyun {
6663*4882a593Smuzhiyun 	si_program_display_gap(rdev);
6664*4882a593Smuzhiyun }
6665*4882a593Smuzhiyun 
6666*4882a593Smuzhiyun union power_info {
6667*4882a593Smuzhiyun 	struct _ATOM_POWERPLAY_INFO info;
6668*4882a593Smuzhiyun 	struct _ATOM_POWERPLAY_INFO_V2 info_2;
6669*4882a593Smuzhiyun 	struct _ATOM_POWERPLAY_INFO_V3 info_3;
6670*4882a593Smuzhiyun 	struct _ATOM_PPLIB_POWERPLAYTABLE pplib;
6671*4882a593Smuzhiyun 	struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2;
6672*4882a593Smuzhiyun 	struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3;
6673*4882a593Smuzhiyun };
6674*4882a593Smuzhiyun 
6675*4882a593Smuzhiyun union pplib_clock_info {
6676*4882a593Smuzhiyun 	struct _ATOM_PPLIB_R600_CLOCK_INFO r600;
6677*4882a593Smuzhiyun 	struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780;
6678*4882a593Smuzhiyun 	struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen;
6679*4882a593Smuzhiyun 	struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo;
6680*4882a593Smuzhiyun 	struct _ATOM_PPLIB_SI_CLOCK_INFO si;
6681*4882a593Smuzhiyun };
6682*4882a593Smuzhiyun 
6683*4882a593Smuzhiyun union pplib_power_state {
6684*4882a593Smuzhiyun 	struct _ATOM_PPLIB_STATE v1;
6685*4882a593Smuzhiyun 	struct _ATOM_PPLIB_STATE_V2 v2;
6686*4882a593Smuzhiyun };
6687*4882a593Smuzhiyun 
si_parse_pplib_non_clock_info(struct radeon_device * rdev,struct radeon_ps * rps,struct _ATOM_PPLIB_NONCLOCK_INFO * non_clock_info,u8 table_rev)6688*4882a593Smuzhiyun static void si_parse_pplib_non_clock_info(struct radeon_device *rdev,
6689*4882a593Smuzhiyun 					  struct radeon_ps *rps,
6690*4882a593Smuzhiyun 					  struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info,
6691*4882a593Smuzhiyun 					  u8 table_rev)
6692*4882a593Smuzhiyun {
6693*4882a593Smuzhiyun 	rps->caps = le32_to_cpu(non_clock_info->ulCapsAndSettings);
6694*4882a593Smuzhiyun 	rps->class = le16_to_cpu(non_clock_info->usClassification);
6695*4882a593Smuzhiyun 	rps->class2 = le16_to_cpu(non_clock_info->usClassification2);
6696*4882a593Smuzhiyun 
6697*4882a593Smuzhiyun 	if (ATOM_PPLIB_NONCLOCKINFO_VER1 < table_rev) {
6698*4882a593Smuzhiyun 		rps->vclk = le32_to_cpu(non_clock_info->ulVCLK);
6699*4882a593Smuzhiyun 		rps->dclk = le32_to_cpu(non_clock_info->ulDCLK);
6700*4882a593Smuzhiyun 	} else if (r600_is_uvd_state(rps->class, rps->class2)) {
6701*4882a593Smuzhiyun 		rps->vclk = RV770_DEFAULT_VCLK_FREQ;
6702*4882a593Smuzhiyun 		rps->dclk = RV770_DEFAULT_DCLK_FREQ;
6703*4882a593Smuzhiyun 	} else {
6704*4882a593Smuzhiyun 		rps->vclk = 0;
6705*4882a593Smuzhiyun 		rps->dclk = 0;
6706*4882a593Smuzhiyun 	}
6707*4882a593Smuzhiyun 
6708*4882a593Smuzhiyun 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT)
6709*4882a593Smuzhiyun 		rdev->pm.dpm.boot_ps = rps;
6710*4882a593Smuzhiyun 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE)
6711*4882a593Smuzhiyun 		rdev->pm.dpm.uvd_ps = rps;
6712*4882a593Smuzhiyun }
6713*4882a593Smuzhiyun 
si_parse_pplib_clock_info(struct radeon_device * rdev,struct radeon_ps * rps,int index,union pplib_clock_info * clock_info)6714*4882a593Smuzhiyun static void si_parse_pplib_clock_info(struct radeon_device *rdev,
6715*4882a593Smuzhiyun 				      struct radeon_ps *rps, int index,
6716*4882a593Smuzhiyun 				      union pplib_clock_info *clock_info)
6717*4882a593Smuzhiyun {
6718*4882a593Smuzhiyun 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
6719*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
6720*4882a593Smuzhiyun 	struct si_power_info *si_pi = si_get_pi(rdev);
6721*4882a593Smuzhiyun 	struct ni_ps *ps = ni_get_ps(rps);
6722*4882a593Smuzhiyun 	u16 leakage_voltage;
6723*4882a593Smuzhiyun 	struct rv7xx_pl *pl = &ps->performance_levels[index];
6724*4882a593Smuzhiyun 	int ret;
6725*4882a593Smuzhiyun 
6726*4882a593Smuzhiyun 	ps->performance_level_count = index + 1;
6727*4882a593Smuzhiyun 
6728*4882a593Smuzhiyun 	pl->sclk = le16_to_cpu(clock_info->si.usEngineClockLow);
6729*4882a593Smuzhiyun 	pl->sclk |= clock_info->si.ucEngineClockHigh << 16;
6730*4882a593Smuzhiyun 	pl->mclk = le16_to_cpu(clock_info->si.usMemoryClockLow);
6731*4882a593Smuzhiyun 	pl->mclk |= clock_info->si.ucMemoryClockHigh << 16;
6732*4882a593Smuzhiyun 
6733*4882a593Smuzhiyun 	pl->vddc = le16_to_cpu(clock_info->si.usVDDC);
6734*4882a593Smuzhiyun 	pl->vddci = le16_to_cpu(clock_info->si.usVDDCI);
6735*4882a593Smuzhiyun 	pl->flags = le32_to_cpu(clock_info->si.ulFlags);
6736*4882a593Smuzhiyun 	pl->pcie_gen = r600_get_pcie_gen_support(rdev,
6737*4882a593Smuzhiyun 						 si_pi->sys_pcie_mask,
6738*4882a593Smuzhiyun 						 si_pi->boot_pcie_gen,
6739*4882a593Smuzhiyun 						 clock_info->si.ucPCIEGen);
6740*4882a593Smuzhiyun 
6741*4882a593Smuzhiyun 	/* patch up vddc if necessary */
6742*4882a593Smuzhiyun 	ret = si_get_leakage_voltage_from_leakage_index(rdev, pl->vddc,
6743*4882a593Smuzhiyun 							&leakage_voltage);
6744*4882a593Smuzhiyun 	if (ret == 0)
6745*4882a593Smuzhiyun 		pl->vddc = leakage_voltage;
6746*4882a593Smuzhiyun 
6747*4882a593Smuzhiyun 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) {
6748*4882a593Smuzhiyun 		pi->acpi_vddc = pl->vddc;
6749*4882a593Smuzhiyun 		eg_pi->acpi_vddci = pl->vddci;
6750*4882a593Smuzhiyun 		si_pi->acpi_pcie_gen = pl->pcie_gen;
6751*4882a593Smuzhiyun 	}
6752*4882a593Smuzhiyun 
6753*4882a593Smuzhiyun 	if ((rps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) &&
6754*4882a593Smuzhiyun 	    index == 0) {
6755*4882a593Smuzhiyun 		/* XXX disable for A0 tahiti */
6756*4882a593Smuzhiyun 		si_pi->ulv.supported = false;
6757*4882a593Smuzhiyun 		si_pi->ulv.pl = *pl;
6758*4882a593Smuzhiyun 		si_pi->ulv.one_pcie_lane_in_ulv = false;
6759*4882a593Smuzhiyun 		si_pi->ulv.volt_change_delay = SISLANDS_ULVVOLTAGECHANGEDELAY_DFLT;
6760*4882a593Smuzhiyun 		si_pi->ulv.cg_ulv_parameter = SISLANDS_CGULVPARAMETER_DFLT;
6761*4882a593Smuzhiyun 		si_pi->ulv.cg_ulv_control = SISLANDS_CGULVCONTROL_DFLT;
6762*4882a593Smuzhiyun 	}
6763*4882a593Smuzhiyun 
6764*4882a593Smuzhiyun 	if (pi->min_vddc_in_table > pl->vddc)
6765*4882a593Smuzhiyun 		pi->min_vddc_in_table = pl->vddc;
6766*4882a593Smuzhiyun 
6767*4882a593Smuzhiyun 	if (pi->max_vddc_in_table < pl->vddc)
6768*4882a593Smuzhiyun 		pi->max_vddc_in_table = pl->vddc;
6769*4882a593Smuzhiyun 
6770*4882a593Smuzhiyun 	/* patch up boot state */
6771*4882a593Smuzhiyun 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) {
6772*4882a593Smuzhiyun 		u16 vddc, vddci, mvdd;
6773*4882a593Smuzhiyun 		radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd);
6774*4882a593Smuzhiyun 		pl->mclk = rdev->clock.default_mclk;
6775*4882a593Smuzhiyun 		pl->sclk = rdev->clock.default_sclk;
6776*4882a593Smuzhiyun 		pl->vddc = vddc;
6777*4882a593Smuzhiyun 		pl->vddci = vddci;
6778*4882a593Smuzhiyun 		si_pi->mvdd_bootup_value = mvdd;
6779*4882a593Smuzhiyun 	}
6780*4882a593Smuzhiyun 
6781*4882a593Smuzhiyun 	if ((rps->class & ATOM_PPLIB_CLASSIFICATION_UI_MASK) ==
6782*4882a593Smuzhiyun 	    ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE) {
6783*4882a593Smuzhiyun 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk;
6784*4882a593Smuzhiyun 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk;
6785*4882a593Smuzhiyun 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc;
6786*4882a593Smuzhiyun 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci;
6787*4882a593Smuzhiyun 	}
6788*4882a593Smuzhiyun }
6789*4882a593Smuzhiyun 
si_parse_power_table(struct radeon_device * rdev)6790*4882a593Smuzhiyun static int si_parse_power_table(struct radeon_device *rdev)
6791*4882a593Smuzhiyun {
6792*4882a593Smuzhiyun 	struct radeon_mode_info *mode_info = &rdev->mode_info;
6793*4882a593Smuzhiyun 	struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info;
6794*4882a593Smuzhiyun 	union pplib_power_state *power_state;
6795*4882a593Smuzhiyun 	int i, j, k, non_clock_array_index, clock_array_index;
6796*4882a593Smuzhiyun 	union pplib_clock_info *clock_info;
6797*4882a593Smuzhiyun 	struct _StateArray *state_array;
6798*4882a593Smuzhiyun 	struct _ClockInfoArray *clock_info_array;
6799*4882a593Smuzhiyun 	struct _NonClockInfoArray *non_clock_info_array;
6800*4882a593Smuzhiyun 	union power_info *power_info;
6801*4882a593Smuzhiyun 	int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
6802*4882a593Smuzhiyun 	u16 data_offset;
6803*4882a593Smuzhiyun 	u8 frev, crev;
6804*4882a593Smuzhiyun 	u8 *power_state_offset;
6805*4882a593Smuzhiyun 	struct ni_ps *ps;
6806*4882a593Smuzhiyun 
6807*4882a593Smuzhiyun 	if (!atom_parse_data_header(mode_info->atom_context, index, NULL,
6808*4882a593Smuzhiyun 				   &frev, &crev, &data_offset))
6809*4882a593Smuzhiyun 		return -EINVAL;
6810*4882a593Smuzhiyun 	power_info = (union power_info *)(mode_info->atom_context->bios + data_offset);
6811*4882a593Smuzhiyun 
6812*4882a593Smuzhiyun 	state_array = (struct _StateArray *)
6813*4882a593Smuzhiyun 		(mode_info->atom_context->bios + data_offset +
6814*4882a593Smuzhiyun 		 le16_to_cpu(power_info->pplib.usStateArrayOffset));
6815*4882a593Smuzhiyun 	clock_info_array = (struct _ClockInfoArray *)
6816*4882a593Smuzhiyun 		(mode_info->atom_context->bios + data_offset +
6817*4882a593Smuzhiyun 		 le16_to_cpu(power_info->pplib.usClockInfoArrayOffset));
6818*4882a593Smuzhiyun 	non_clock_info_array = (struct _NonClockInfoArray *)
6819*4882a593Smuzhiyun 		(mode_info->atom_context->bios + data_offset +
6820*4882a593Smuzhiyun 		 le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset));
6821*4882a593Smuzhiyun 
6822*4882a593Smuzhiyun 	rdev->pm.dpm.ps = kcalloc(state_array->ucNumEntries,
6823*4882a593Smuzhiyun 				  sizeof(struct radeon_ps),
6824*4882a593Smuzhiyun 				  GFP_KERNEL);
6825*4882a593Smuzhiyun 	if (!rdev->pm.dpm.ps)
6826*4882a593Smuzhiyun 		return -ENOMEM;
6827*4882a593Smuzhiyun 	power_state_offset = (u8 *)state_array->states;
6828*4882a593Smuzhiyun 	for (i = 0; i < state_array->ucNumEntries; i++) {
6829*4882a593Smuzhiyun 		u8 *idx;
6830*4882a593Smuzhiyun 		power_state = (union pplib_power_state *)power_state_offset;
6831*4882a593Smuzhiyun 		non_clock_array_index = power_state->v2.nonClockInfoIndex;
6832*4882a593Smuzhiyun 		non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *)
6833*4882a593Smuzhiyun 			&non_clock_info_array->nonClockInfo[non_clock_array_index];
6834*4882a593Smuzhiyun 		if (!rdev->pm.power_state[i].clock_info)
6835*4882a593Smuzhiyun 			return -EINVAL;
6836*4882a593Smuzhiyun 		ps = kzalloc(sizeof(struct ni_ps), GFP_KERNEL);
6837*4882a593Smuzhiyun 		if (ps == NULL) {
6838*4882a593Smuzhiyun 			kfree(rdev->pm.dpm.ps);
6839*4882a593Smuzhiyun 			return -ENOMEM;
6840*4882a593Smuzhiyun 		}
6841*4882a593Smuzhiyun 		rdev->pm.dpm.ps[i].ps_priv = ps;
6842*4882a593Smuzhiyun 		si_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i],
6843*4882a593Smuzhiyun 					      non_clock_info,
6844*4882a593Smuzhiyun 					      non_clock_info_array->ucEntrySize);
6845*4882a593Smuzhiyun 		k = 0;
6846*4882a593Smuzhiyun 		idx = (u8 *)&power_state->v2.clockInfoIndex[0];
6847*4882a593Smuzhiyun 		for (j = 0; j < power_state->v2.ucNumDPMLevels; j++) {
6848*4882a593Smuzhiyun 			clock_array_index = idx[j];
6849*4882a593Smuzhiyun 			if (clock_array_index >= clock_info_array->ucNumEntries)
6850*4882a593Smuzhiyun 				continue;
6851*4882a593Smuzhiyun 			if (k >= SISLANDS_MAX_HARDWARE_POWERLEVELS)
6852*4882a593Smuzhiyun 				break;
6853*4882a593Smuzhiyun 			clock_info = (union pplib_clock_info *)
6854*4882a593Smuzhiyun 				((u8 *)&clock_info_array->clockInfo[0] +
6855*4882a593Smuzhiyun 				 (clock_array_index * clock_info_array->ucEntrySize));
6856*4882a593Smuzhiyun 			si_parse_pplib_clock_info(rdev,
6857*4882a593Smuzhiyun 						  &rdev->pm.dpm.ps[i], k,
6858*4882a593Smuzhiyun 						  clock_info);
6859*4882a593Smuzhiyun 			k++;
6860*4882a593Smuzhiyun 		}
6861*4882a593Smuzhiyun 		power_state_offset += 2 + power_state->v2.ucNumDPMLevels;
6862*4882a593Smuzhiyun 	}
6863*4882a593Smuzhiyun 	rdev->pm.dpm.num_ps = state_array->ucNumEntries;
6864*4882a593Smuzhiyun 
6865*4882a593Smuzhiyun 	/* fill in the vce power states */
6866*4882a593Smuzhiyun 	for (i = 0; i < RADEON_MAX_VCE_LEVELS; i++) {
6867*4882a593Smuzhiyun 		u32 sclk, mclk;
6868*4882a593Smuzhiyun 		clock_array_index = rdev->pm.dpm.vce_states[i].clk_idx;
6869*4882a593Smuzhiyun 		clock_info = (union pplib_clock_info *)
6870*4882a593Smuzhiyun 			&clock_info_array->clockInfo[clock_array_index * clock_info_array->ucEntrySize];
6871*4882a593Smuzhiyun 		sclk = le16_to_cpu(clock_info->si.usEngineClockLow);
6872*4882a593Smuzhiyun 		sclk |= clock_info->si.ucEngineClockHigh << 16;
6873*4882a593Smuzhiyun 		mclk = le16_to_cpu(clock_info->si.usMemoryClockLow);
6874*4882a593Smuzhiyun 		mclk |= clock_info->si.ucMemoryClockHigh << 16;
6875*4882a593Smuzhiyun 		rdev->pm.dpm.vce_states[i].sclk = sclk;
6876*4882a593Smuzhiyun 		rdev->pm.dpm.vce_states[i].mclk = mclk;
6877*4882a593Smuzhiyun 	}
6878*4882a593Smuzhiyun 
6879*4882a593Smuzhiyun 	return 0;
6880*4882a593Smuzhiyun }
6881*4882a593Smuzhiyun 
si_dpm_init(struct radeon_device * rdev)6882*4882a593Smuzhiyun int si_dpm_init(struct radeon_device *rdev)
6883*4882a593Smuzhiyun {
6884*4882a593Smuzhiyun 	struct rv7xx_power_info *pi;
6885*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi;
6886*4882a593Smuzhiyun 	struct ni_power_info *ni_pi;
6887*4882a593Smuzhiyun 	struct si_power_info *si_pi;
6888*4882a593Smuzhiyun 	struct atom_clock_dividers dividers;
6889*4882a593Smuzhiyun 	enum pci_bus_speed speed_cap = PCI_SPEED_UNKNOWN;
6890*4882a593Smuzhiyun 	struct pci_dev *root = rdev->pdev->bus->self;
6891*4882a593Smuzhiyun 	int ret;
6892*4882a593Smuzhiyun 
6893*4882a593Smuzhiyun 	si_pi = kzalloc(sizeof(struct si_power_info), GFP_KERNEL);
6894*4882a593Smuzhiyun 	if (si_pi == NULL)
6895*4882a593Smuzhiyun 		return -ENOMEM;
6896*4882a593Smuzhiyun 	rdev->pm.dpm.priv = si_pi;
6897*4882a593Smuzhiyun 	ni_pi = &si_pi->ni;
6898*4882a593Smuzhiyun 	eg_pi = &ni_pi->eg;
6899*4882a593Smuzhiyun 	pi = &eg_pi->rv7xx;
6900*4882a593Smuzhiyun 
6901*4882a593Smuzhiyun 	if (!pci_is_root_bus(rdev->pdev->bus))
6902*4882a593Smuzhiyun 		speed_cap = pcie_get_speed_cap(root);
6903*4882a593Smuzhiyun 	if (speed_cap == PCI_SPEED_UNKNOWN) {
6904*4882a593Smuzhiyun 		si_pi->sys_pcie_mask = 0;
6905*4882a593Smuzhiyun 	} else {
6906*4882a593Smuzhiyun 		if (speed_cap == PCIE_SPEED_8_0GT)
6907*4882a593Smuzhiyun 			si_pi->sys_pcie_mask = RADEON_PCIE_SPEED_25 |
6908*4882a593Smuzhiyun 				RADEON_PCIE_SPEED_50 |
6909*4882a593Smuzhiyun 				RADEON_PCIE_SPEED_80;
6910*4882a593Smuzhiyun 		else if (speed_cap == PCIE_SPEED_5_0GT)
6911*4882a593Smuzhiyun 			si_pi->sys_pcie_mask = RADEON_PCIE_SPEED_25 |
6912*4882a593Smuzhiyun 				RADEON_PCIE_SPEED_50;
6913*4882a593Smuzhiyun 		else
6914*4882a593Smuzhiyun 			si_pi->sys_pcie_mask = RADEON_PCIE_SPEED_25;
6915*4882a593Smuzhiyun 	}
6916*4882a593Smuzhiyun 	si_pi->force_pcie_gen = RADEON_PCIE_GEN_INVALID;
6917*4882a593Smuzhiyun 	si_pi->boot_pcie_gen = si_get_current_pcie_speed(rdev);
6918*4882a593Smuzhiyun 
6919*4882a593Smuzhiyun 	si_set_max_cu_value(rdev);
6920*4882a593Smuzhiyun 
6921*4882a593Smuzhiyun 	rv770_get_max_vddc(rdev);
6922*4882a593Smuzhiyun 	si_get_leakage_vddc(rdev);
6923*4882a593Smuzhiyun 	si_patch_dependency_tables_based_on_leakage(rdev);
6924*4882a593Smuzhiyun 
6925*4882a593Smuzhiyun 	pi->acpi_vddc = 0;
6926*4882a593Smuzhiyun 	eg_pi->acpi_vddci = 0;
6927*4882a593Smuzhiyun 	pi->min_vddc_in_table = 0;
6928*4882a593Smuzhiyun 	pi->max_vddc_in_table = 0;
6929*4882a593Smuzhiyun 
6930*4882a593Smuzhiyun 	ret = r600_get_platform_caps(rdev);
6931*4882a593Smuzhiyun 	if (ret)
6932*4882a593Smuzhiyun 		return ret;
6933*4882a593Smuzhiyun 
6934*4882a593Smuzhiyun 	ret = r600_parse_extended_power_table(rdev);
6935*4882a593Smuzhiyun 	if (ret)
6936*4882a593Smuzhiyun 		return ret;
6937*4882a593Smuzhiyun 
6938*4882a593Smuzhiyun 	ret = si_parse_power_table(rdev);
6939*4882a593Smuzhiyun 	if (ret)
6940*4882a593Smuzhiyun 		return ret;
6941*4882a593Smuzhiyun 
6942*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries =
6943*4882a593Smuzhiyun 		kcalloc(4,
6944*4882a593Smuzhiyun 			sizeof(struct radeon_clock_voltage_dependency_entry),
6945*4882a593Smuzhiyun 			GFP_KERNEL);
6946*4882a593Smuzhiyun 	if (!rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries) {
6947*4882a593Smuzhiyun 		r600_free_extended_power_table(rdev);
6948*4882a593Smuzhiyun 		return -ENOMEM;
6949*4882a593Smuzhiyun 	}
6950*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.count = 4;
6951*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].clk = 0;
6952*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].v = 0;
6953*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].clk = 36000;
6954*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].v = 720;
6955*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].clk = 54000;
6956*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].v = 810;
6957*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].clk = 72000;
6958*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].v = 900;
6959*4882a593Smuzhiyun 
6960*4882a593Smuzhiyun 	if (rdev->pm.dpm.voltage_response_time == 0)
6961*4882a593Smuzhiyun 		rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
6962*4882a593Smuzhiyun 	if (rdev->pm.dpm.backbias_response_time == 0)
6963*4882a593Smuzhiyun 		rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
6964*4882a593Smuzhiyun 
6965*4882a593Smuzhiyun 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
6966*4882a593Smuzhiyun 					     0, false, &dividers);
6967*4882a593Smuzhiyun 	if (ret)
6968*4882a593Smuzhiyun 		pi->ref_div = dividers.ref_div + 1;
6969*4882a593Smuzhiyun 	else
6970*4882a593Smuzhiyun 		pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
6971*4882a593Smuzhiyun 
6972*4882a593Smuzhiyun 	eg_pi->smu_uvd_hs = false;
6973*4882a593Smuzhiyun 
6974*4882a593Smuzhiyun 	pi->mclk_strobe_mode_threshold = 40000;
6975*4882a593Smuzhiyun 	if (si_is_special_1gb_platform(rdev))
6976*4882a593Smuzhiyun 		pi->mclk_stutter_mode_threshold = 0;
6977*4882a593Smuzhiyun 	else
6978*4882a593Smuzhiyun 		pi->mclk_stutter_mode_threshold = pi->mclk_strobe_mode_threshold;
6979*4882a593Smuzhiyun 	pi->mclk_edc_enable_threshold = 40000;
6980*4882a593Smuzhiyun 	eg_pi->mclk_edc_wr_enable_threshold = 40000;
6981*4882a593Smuzhiyun 
6982*4882a593Smuzhiyun 	ni_pi->mclk_rtt_mode_threshold = eg_pi->mclk_edc_wr_enable_threshold;
6983*4882a593Smuzhiyun 
6984*4882a593Smuzhiyun 	pi->voltage_control =
6985*4882a593Smuzhiyun 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC,
6986*4882a593Smuzhiyun 					    VOLTAGE_OBJ_GPIO_LUT);
6987*4882a593Smuzhiyun 	if (!pi->voltage_control) {
6988*4882a593Smuzhiyun 		si_pi->voltage_control_svi2 =
6989*4882a593Smuzhiyun 			radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC,
6990*4882a593Smuzhiyun 						    VOLTAGE_OBJ_SVID2);
6991*4882a593Smuzhiyun 		if (si_pi->voltage_control_svi2)
6992*4882a593Smuzhiyun 			radeon_atom_get_svi2_info(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC,
6993*4882a593Smuzhiyun 						  &si_pi->svd_gpio_id, &si_pi->svc_gpio_id);
6994*4882a593Smuzhiyun 	}
6995*4882a593Smuzhiyun 
6996*4882a593Smuzhiyun 	pi->mvdd_control =
6997*4882a593Smuzhiyun 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC,
6998*4882a593Smuzhiyun 					    VOLTAGE_OBJ_GPIO_LUT);
6999*4882a593Smuzhiyun 
7000*4882a593Smuzhiyun 	eg_pi->vddci_control =
7001*4882a593Smuzhiyun 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI,
7002*4882a593Smuzhiyun 					    VOLTAGE_OBJ_GPIO_LUT);
7003*4882a593Smuzhiyun 	if (!eg_pi->vddci_control)
7004*4882a593Smuzhiyun 		si_pi->vddci_control_svi2 =
7005*4882a593Smuzhiyun 			radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI,
7006*4882a593Smuzhiyun 						    VOLTAGE_OBJ_SVID2);
7007*4882a593Smuzhiyun 
7008*4882a593Smuzhiyun 	si_pi->vddc_phase_shed_control =
7009*4882a593Smuzhiyun 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC,
7010*4882a593Smuzhiyun 					    VOLTAGE_OBJ_PHASE_LUT);
7011*4882a593Smuzhiyun 
7012*4882a593Smuzhiyun 	rv770_get_engine_memory_ss(rdev);
7013*4882a593Smuzhiyun 
7014*4882a593Smuzhiyun 	pi->asi = RV770_ASI_DFLT;
7015*4882a593Smuzhiyun 	pi->pasi = CYPRESS_HASI_DFLT;
7016*4882a593Smuzhiyun 	pi->vrc = SISLANDS_VRC_DFLT;
7017*4882a593Smuzhiyun 
7018*4882a593Smuzhiyun 	pi->gfx_clock_gating = true;
7019*4882a593Smuzhiyun 
7020*4882a593Smuzhiyun 	eg_pi->sclk_deep_sleep = true;
7021*4882a593Smuzhiyun 	si_pi->sclk_deep_sleep_above_low = false;
7022*4882a593Smuzhiyun 
7023*4882a593Smuzhiyun 	if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE)
7024*4882a593Smuzhiyun 		pi->thermal_protection = true;
7025*4882a593Smuzhiyun 	else
7026*4882a593Smuzhiyun 		pi->thermal_protection = false;
7027*4882a593Smuzhiyun 
7028*4882a593Smuzhiyun 	eg_pi->dynamic_ac_timing = true;
7029*4882a593Smuzhiyun 
7030*4882a593Smuzhiyun 	eg_pi->light_sleep = true;
7031*4882a593Smuzhiyun #if defined(CONFIG_ACPI)
7032*4882a593Smuzhiyun 	eg_pi->pcie_performance_request =
7033*4882a593Smuzhiyun 		radeon_acpi_is_pcie_performance_request_supported(rdev);
7034*4882a593Smuzhiyun #else
7035*4882a593Smuzhiyun 	eg_pi->pcie_performance_request = false;
7036*4882a593Smuzhiyun #endif
7037*4882a593Smuzhiyun 
7038*4882a593Smuzhiyun 	si_pi->sram_end = SMC_RAM_END;
7039*4882a593Smuzhiyun 
7040*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.mclk_sclk_ratio = 4;
7041*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.sclk_mclk_delta = 15000;
7042*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.vddc_vddci_delta = 200;
7043*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.valid_sclk_values.count = 0;
7044*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.valid_sclk_values.values = NULL;
7045*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.valid_mclk_values.count = 0;
7046*4882a593Smuzhiyun 	rdev->pm.dpm.dyn_state.valid_mclk_values.values = NULL;
7047*4882a593Smuzhiyun 
7048*4882a593Smuzhiyun 	si_initialize_powertune_defaults(rdev);
7049*4882a593Smuzhiyun 
7050*4882a593Smuzhiyun 	/* make sure dc limits are valid */
7051*4882a593Smuzhiyun 	if ((rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.sclk == 0) ||
7052*4882a593Smuzhiyun 	    (rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.mclk == 0))
7053*4882a593Smuzhiyun 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc =
7054*4882a593Smuzhiyun 			rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
7055*4882a593Smuzhiyun 
7056*4882a593Smuzhiyun 	si_pi->fan_ctrl_is_in_default_mode = true;
7057*4882a593Smuzhiyun 
7058*4882a593Smuzhiyun 	return 0;
7059*4882a593Smuzhiyun }
7060*4882a593Smuzhiyun 
si_dpm_fini(struct radeon_device * rdev)7061*4882a593Smuzhiyun void si_dpm_fini(struct radeon_device *rdev)
7062*4882a593Smuzhiyun {
7063*4882a593Smuzhiyun 	int i;
7064*4882a593Smuzhiyun 
7065*4882a593Smuzhiyun 	for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
7066*4882a593Smuzhiyun 		kfree(rdev->pm.dpm.ps[i].ps_priv);
7067*4882a593Smuzhiyun 	}
7068*4882a593Smuzhiyun 	kfree(rdev->pm.dpm.ps);
7069*4882a593Smuzhiyun 	kfree(rdev->pm.dpm.priv);
7070*4882a593Smuzhiyun 	kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries);
7071*4882a593Smuzhiyun 	r600_free_extended_power_table(rdev);
7072*4882a593Smuzhiyun }
7073*4882a593Smuzhiyun 
si_dpm_debugfs_print_current_performance_level(struct radeon_device * rdev,struct seq_file * m)7074*4882a593Smuzhiyun void si_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
7075*4882a593Smuzhiyun 						    struct seq_file *m)
7076*4882a593Smuzhiyun {
7077*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
7078*4882a593Smuzhiyun 	struct radeon_ps *rps = &eg_pi->current_rps;
7079*4882a593Smuzhiyun 	struct ni_ps *ps = ni_get_ps(rps);
7080*4882a593Smuzhiyun 	struct rv7xx_pl *pl;
7081*4882a593Smuzhiyun 	u32 current_index =
7082*4882a593Smuzhiyun 		(RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_STATE_INDEX_MASK) >>
7083*4882a593Smuzhiyun 		CURRENT_STATE_INDEX_SHIFT;
7084*4882a593Smuzhiyun 
7085*4882a593Smuzhiyun 	if (current_index >= ps->performance_level_count) {
7086*4882a593Smuzhiyun 		seq_printf(m, "invalid dpm profile %d\n", current_index);
7087*4882a593Smuzhiyun 	} else {
7088*4882a593Smuzhiyun 		pl = &ps->performance_levels[current_index];
7089*4882a593Smuzhiyun 		seq_printf(m, "uvd    vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
7090*4882a593Smuzhiyun 		seq_printf(m, "power level %d    sclk: %u mclk: %u vddc: %u vddci: %u pcie gen: %u\n",
7091*4882a593Smuzhiyun 			   current_index, pl->sclk, pl->mclk, pl->vddc, pl->vddci, pl->pcie_gen + 1);
7092*4882a593Smuzhiyun 	}
7093*4882a593Smuzhiyun }
7094*4882a593Smuzhiyun 
si_dpm_get_current_sclk(struct radeon_device * rdev)7095*4882a593Smuzhiyun u32 si_dpm_get_current_sclk(struct radeon_device *rdev)
7096*4882a593Smuzhiyun {
7097*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
7098*4882a593Smuzhiyun 	struct radeon_ps *rps = &eg_pi->current_rps;
7099*4882a593Smuzhiyun 	struct ni_ps *ps = ni_get_ps(rps);
7100*4882a593Smuzhiyun 	struct rv7xx_pl *pl;
7101*4882a593Smuzhiyun 	u32 current_index =
7102*4882a593Smuzhiyun 		(RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_STATE_INDEX_MASK) >>
7103*4882a593Smuzhiyun 		CURRENT_STATE_INDEX_SHIFT;
7104*4882a593Smuzhiyun 
7105*4882a593Smuzhiyun 	if (current_index >= ps->performance_level_count) {
7106*4882a593Smuzhiyun 		return 0;
7107*4882a593Smuzhiyun 	} else {
7108*4882a593Smuzhiyun 		pl = &ps->performance_levels[current_index];
7109*4882a593Smuzhiyun 		return pl->sclk;
7110*4882a593Smuzhiyun 	}
7111*4882a593Smuzhiyun }
7112*4882a593Smuzhiyun 
si_dpm_get_current_mclk(struct radeon_device * rdev)7113*4882a593Smuzhiyun u32 si_dpm_get_current_mclk(struct radeon_device *rdev)
7114*4882a593Smuzhiyun {
7115*4882a593Smuzhiyun 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
7116*4882a593Smuzhiyun 	struct radeon_ps *rps = &eg_pi->current_rps;
7117*4882a593Smuzhiyun 	struct ni_ps *ps = ni_get_ps(rps);
7118*4882a593Smuzhiyun 	struct rv7xx_pl *pl;
7119*4882a593Smuzhiyun 	u32 current_index =
7120*4882a593Smuzhiyun 		(RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_STATE_INDEX_MASK) >>
7121*4882a593Smuzhiyun 		CURRENT_STATE_INDEX_SHIFT;
7122*4882a593Smuzhiyun 
7123*4882a593Smuzhiyun 	if (current_index >= ps->performance_level_count) {
7124*4882a593Smuzhiyun 		return 0;
7125*4882a593Smuzhiyun 	} else {
7126*4882a593Smuzhiyun 		pl = &ps->performance_levels[current_index];
7127*4882a593Smuzhiyun 		return pl->mclk;
7128*4882a593Smuzhiyun 	}
7129*4882a593Smuzhiyun }
7130