1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * (c) Copyright 2002-2010, Ralink Technology, Inc.
4*4882a593Smuzhiyun * Copyright (C) 2014 Felix Fietkau <nbd@openwrt.org>
5*4882a593Smuzhiyun * Copyright (C) 2015 Jakub Kicinski <kubakici@wp.pl>
6*4882a593Smuzhiyun * Copyright (C) 2018 Stanislaw Gruszka <stf_xl@wp.pl>
7*4882a593Smuzhiyun */
8*4882a593Smuzhiyun
9*4882a593Smuzhiyun #include <linux/kernel.h>
10*4882a593Smuzhiyun #include <linux/etherdevice.h>
11*4882a593Smuzhiyun
12*4882a593Smuzhiyun #include "mt76x0.h"
13*4882a593Smuzhiyun #include "mcu.h"
14*4882a593Smuzhiyun #include "eeprom.h"
15*4882a593Smuzhiyun #include "phy.h"
16*4882a593Smuzhiyun #include "initvals.h"
17*4882a593Smuzhiyun #include "initvals_phy.h"
18*4882a593Smuzhiyun #include "../mt76x02_phy.h"
19*4882a593Smuzhiyun
20*4882a593Smuzhiyun static int
mt76x0_rf_csr_wr(struct mt76x02_dev * dev,u32 offset,u8 value)21*4882a593Smuzhiyun mt76x0_rf_csr_wr(struct mt76x02_dev *dev, u32 offset, u8 value)
22*4882a593Smuzhiyun {
23*4882a593Smuzhiyun int ret = 0;
24*4882a593Smuzhiyun u8 bank, reg;
25*4882a593Smuzhiyun
26*4882a593Smuzhiyun if (test_bit(MT76_REMOVED, &dev->mphy.state))
27*4882a593Smuzhiyun return -ENODEV;
28*4882a593Smuzhiyun
29*4882a593Smuzhiyun bank = MT_RF_BANK(offset);
30*4882a593Smuzhiyun reg = MT_RF_REG(offset);
31*4882a593Smuzhiyun
32*4882a593Smuzhiyun if (WARN_ON_ONCE(reg > 127) || WARN_ON_ONCE(bank > 8))
33*4882a593Smuzhiyun return -EINVAL;
34*4882a593Smuzhiyun
35*4882a593Smuzhiyun mutex_lock(&dev->phy_mutex);
36*4882a593Smuzhiyun
37*4882a593Smuzhiyun if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100)) {
38*4882a593Smuzhiyun ret = -ETIMEDOUT;
39*4882a593Smuzhiyun goto out;
40*4882a593Smuzhiyun }
41*4882a593Smuzhiyun
42*4882a593Smuzhiyun mt76_wr(dev, MT_RF_CSR_CFG,
43*4882a593Smuzhiyun FIELD_PREP(MT_RF_CSR_CFG_DATA, value) |
44*4882a593Smuzhiyun FIELD_PREP(MT_RF_CSR_CFG_REG_BANK, bank) |
45*4882a593Smuzhiyun FIELD_PREP(MT_RF_CSR_CFG_REG_ID, reg) |
46*4882a593Smuzhiyun MT_RF_CSR_CFG_WR |
47*4882a593Smuzhiyun MT_RF_CSR_CFG_KICK);
48*4882a593Smuzhiyun
49*4882a593Smuzhiyun out:
50*4882a593Smuzhiyun mutex_unlock(&dev->phy_mutex);
51*4882a593Smuzhiyun
52*4882a593Smuzhiyun if (ret < 0)
53*4882a593Smuzhiyun dev_err(dev->mt76.dev, "Error: RF write %d:%d failed:%d!!\n",
54*4882a593Smuzhiyun bank, reg, ret);
55*4882a593Smuzhiyun
56*4882a593Smuzhiyun return ret;
57*4882a593Smuzhiyun }
58*4882a593Smuzhiyun
mt76x0_rf_csr_rr(struct mt76x02_dev * dev,u32 offset)59*4882a593Smuzhiyun static int mt76x0_rf_csr_rr(struct mt76x02_dev *dev, u32 offset)
60*4882a593Smuzhiyun {
61*4882a593Smuzhiyun int ret = -ETIMEDOUT;
62*4882a593Smuzhiyun u32 val;
63*4882a593Smuzhiyun u8 bank, reg;
64*4882a593Smuzhiyun
65*4882a593Smuzhiyun if (test_bit(MT76_REMOVED, &dev->mphy.state))
66*4882a593Smuzhiyun return -ENODEV;
67*4882a593Smuzhiyun
68*4882a593Smuzhiyun bank = MT_RF_BANK(offset);
69*4882a593Smuzhiyun reg = MT_RF_REG(offset);
70*4882a593Smuzhiyun
71*4882a593Smuzhiyun if (WARN_ON_ONCE(reg > 127) || WARN_ON_ONCE(bank > 8))
72*4882a593Smuzhiyun return -EINVAL;
73*4882a593Smuzhiyun
74*4882a593Smuzhiyun mutex_lock(&dev->phy_mutex);
75*4882a593Smuzhiyun
76*4882a593Smuzhiyun if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100))
77*4882a593Smuzhiyun goto out;
78*4882a593Smuzhiyun
79*4882a593Smuzhiyun mt76_wr(dev, MT_RF_CSR_CFG,
80*4882a593Smuzhiyun FIELD_PREP(MT_RF_CSR_CFG_REG_BANK, bank) |
81*4882a593Smuzhiyun FIELD_PREP(MT_RF_CSR_CFG_REG_ID, reg) |
82*4882a593Smuzhiyun MT_RF_CSR_CFG_KICK);
83*4882a593Smuzhiyun
84*4882a593Smuzhiyun if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100))
85*4882a593Smuzhiyun goto out;
86*4882a593Smuzhiyun
87*4882a593Smuzhiyun val = mt76_rr(dev, MT_RF_CSR_CFG);
88*4882a593Smuzhiyun if (FIELD_GET(MT_RF_CSR_CFG_REG_ID, val) == reg &&
89*4882a593Smuzhiyun FIELD_GET(MT_RF_CSR_CFG_REG_BANK, val) == bank)
90*4882a593Smuzhiyun ret = FIELD_GET(MT_RF_CSR_CFG_DATA, val);
91*4882a593Smuzhiyun
92*4882a593Smuzhiyun out:
93*4882a593Smuzhiyun mutex_unlock(&dev->phy_mutex);
94*4882a593Smuzhiyun
95*4882a593Smuzhiyun if (ret < 0)
96*4882a593Smuzhiyun dev_err(dev->mt76.dev, "Error: RF read %d:%d failed:%d!!\n",
97*4882a593Smuzhiyun bank, reg, ret);
98*4882a593Smuzhiyun
99*4882a593Smuzhiyun return ret;
100*4882a593Smuzhiyun }
101*4882a593Smuzhiyun
102*4882a593Smuzhiyun static int
mt76x0_rf_wr(struct mt76x02_dev * dev,u32 offset,u8 val)103*4882a593Smuzhiyun mt76x0_rf_wr(struct mt76x02_dev *dev, u32 offset, u8 val)
104*4882a593Smuzhiyun {
105*4882a593Smuzhiyun if (mt76_is_usb(&dev->mt76)) {
106*4882a593Smuzhiyun struct mt76_reg_pair pair = {
107*4882a593Smuzhiyun .reg = offset,
108*4882a593Smuzhiyun .value = val,
109*4882a593Smuzhiyun };
110*4882a593Smuzhiyun
111*4882a593Smuzhiyun WARN_ON_ONCE(!test_bit(MT76_STATE_MCU_RUNNING,
112*4882a593Smuzhiyun &dev->mphy.state));
113*4882a593Smuzhiyun return mt76_wr_rp(dev, MT_MCU_MEMMAP_RF, &pair, 1);
114*4882a593Smuzhiyun } else {
115*4882a593Smuzhiyun return mt76x0_rf_csr_wr(dev, offset, val);
116*4882a593Smuzhiyun }
117*4882a593Smuzhiyun }
118*4882a593Smuzhiyun
mt76x0_rf_rr(struct mt76x02_dev * dev,u32 offset)119*4882a593Smuzhiyun static int mt76x0_rf_rr(struct mt76x02_dev *dev, u32 offset)
120*4882a593Smuzhiyun {
121*4882a593Smuzhiyun int ret;
122*4882a593Smuzhiyun u32 val;
123*4882a593Smuzhiyun
124*4882a593Smuzhiyun if (mt76_is_usb(&dev->mt76)) {
125*4882a593Smuzhiyun struct mt76_reg_pair pair = {
126*4882a593Smuzhiyun .reg = offset,
127*4882a593Smuzhiyun };
128*4882a593Smuzhiyun
129*4882a593Smuzhiyun WARN_ON_ONCE(!test_bit(MT76_STATE_MCU_RUNNING,
130*4882a593Smuzhiyun &dev->mphy.state));
131*4882a593Smuzhiyun ret = mt76_rd_rp(dev, MT_MCU_MEMMAP_RF, &pair, 1);
132*4882a593Smuzhiyun val = pair.value;
133*4882a593Smuzhiyun } else {
134*4882a593Smuzhiyun ret = val = mt76x0_rf_csr_rr(dev, offset);
135*4882a593Smuzhiyun }
136*4882a593Smuzhiyun
137*4882a593Smuzhiyun return (ret < 0) ? ret : val;
138*4882a593Smuzhiyun }
139*4882a593Smuzhiyun
140*4882a593Smuzhiyun static int
mt76x0_rf_rmw(struct mt76x02_dev * dev,u32 offset,u8 mask,u8 val)141*4882a593Smuzhiyun mt76x0_rf_rmw(struct mt76x02_dev *dev, u32 offset, u8 mask, u8 val)
142*4882a593Smuzhiyun {
143*4882a593Smuzhiyun int ret;
144*4882a593Smuzhiyun
145*4882a593Smuzhiyun ret = mt76x0_rf_rr(dev, offset);
146*4882a593Smuzhiyun if (ret < 0)
147*4882a593Smuzhiyun return ret;
148*4882a593Smuzhiyun
149*4882a593Smuzhiyun val |= ret & ~mask;
150*4882a593Smuzhiyun
151*4882a593Smuzhiyun ret = mt76x0_rf_wr(dev, offset, val);
152*4882a593Smuzhiyun return ret ? ret : val;
153*4882a593Smuzhiyun }
154*4882a593Smuzhiyun
155*4882a593Smuzhiyun static int
mt76x0_rf_set(struct mt76x02_dev * dev,u32 offset,u8 val)156*4882a593Smuzhiyun mt76x0_rf_set(struct mt76x02_dev *dev, u32 offset, u8 val)
157*4882a593Smuzhiyun {
158*4882a593Smuzhiyun return mt76x0_rf_rmw(dev, offset, 0, val);
159*4882a593Smuzhiyun }
160*4882a593Smuzhiyun
161*4882a593Smuzhiyun static int
mt76x0_rf_clear(struct mt76x02_dev * dev,u32 offset,u8 mask)162*4882a593Smuzhiyun mt76x0_rf_clear(struct mt76x02_dev *dev, u32 offset, u8 mask)
163*4882a593Smuzhiyun {
164*4882a593Smuzhiyun return mt76x0_rf_rmw(dev, offset, mask, 0);
165*4882a593Smuzhiyun }
166*4882a593Smuzhiyun
167*4882a593Smuzhiyun static void
mt76x0_phy_rf_csr_wr_rp(struct mt76x02_dev * dev,const struct mt76_reg_pair * data,int n)168*4882a593Smuzhiyun mt76x0_phy_rf_csr_wr_rp(struct mt76x02_dev *dev,
169*4882a593Smuzhiyun const struct mt76_reg_pair *data,
170*4882a593Smuzhiyun int n)
171*4882a593Smuzhiyun {
172*4882a593Smuzhiyun while (n-- > 0) {
173*4882a593Smuzhiyun mt76x0_rf_csr_wr(dev, data->reg, data->value);
174*4882a593Smuzhiyun data++;
175*4882a593Smuzhiyun }
176*4882a593Smuzhiyun }
177*4882a593Smuzhiyun
178*4882a593Smuzhiyun #define RF_RANDOM_WRITE(dev, tab) do { \
179*4882a593Smuzhiyun if (mt76_is_mmio(&dev->mt76)) \
180*4882a593Smuzhiyun mt76x0_phy_rf_csr_wr_rp(dev, tab, ARRAY_SIZE(tab)); \
181*4882a593Smuzhiyun else \
182*4882a593Smuzhiyun mt76_wr_rp(dev, MT_MCU_MEMMAP_RF, tab, ARRAY_SIZE(tab));\
183*4882a593Smuzhiyun } while (0)
184*4882a593Smuzhiyun
mt76x0_phy_wait_bbp_ready(struct mt76x02_dev * dev)185*4882a593Smuzhiyun int mt76x0_phy_wait_bbp_ready(struct mt76x02_dev *dev)
186*4882a593Smuzhiyun {
187*4882a593Smuzhiyun int i = 20;
188*4882a593Smuzhiyun u32 val;
189*4882a593Smuzhiyun
190*4882a593Smuzhiyun do {
191*4882a593Smuzhiyun val = mt76_rr(dev, MT_BBP(CORE, 0));
192*4882a593Smuzhiyun if (val && ~val)
193*4882a593Smuzhiyun break;
194*4882a593Smuzhiyun } while (--i);
195*4882a593Smuzhiyun
196*4882a593Smuzhiyun if (!i) {
197*4882a593Smuzhiyun dev_err(dev->mt76.dev, "Error: BBP is not ready\n");
198*4882a593Smuzhiyun return -EIO;
199*4882a593Smuzhiyun }
200*4882a593Smuzhiyun
201*4882a593Smuzhiyun dev_dbg(dev->mt76.dev, "BBP version %08x\n", val);
202*4882a593Smuzhiyun return 0;
203*4882a593Smuzhiyun }
204*4882a593Smuzhiyun
205*4882a593Smuzhiyun static void
mt76x0_phy_set_band(struct mt76x02_dev * dev,enum nl80211_band band)206*4882a593Smuzhiyun mt76x0_phy_set_band(struct mt76x02_dev *dev, enum nl80211_band band)
207*4882a593Smuzhiyun {
208*4882a593Smuzhiyun switch (band) {
209*4882a593Smuzhiyun case NL80211_BAND_2GHZ:
210*4882a593Smuzhiyun RF_RANDOM_WRITE(dev, mt76x0_rf_2g_channel_0_tab);
211*4882a593Smuzhiyun
212*4882a593Smuzhiyun mt76x0_rf_wr(dev, MT_RF(5, 0), 0x45);
213*4882a593Smuzhiyun mt76x0_rf_wr(dev, MT_RF(6, 0), 0x44);
214*4882a593Smuzhiyun
215*4882a593Smuzhiyun mt76_wr(dev, MT_TX_ALC_VGA3, 0x00050007);
216*4882a593Smuzhiyun mt76_wr(dev, MT_TX0_RF_GAIN_CORR, 0x003E0002);
217*4882a593Smuzhiyun break;
218*4882a593Smuzhiyun case NL80211_BAND_5GHZ:
219*4882a593Smuzhiyun RF_RANDOM_WRITE(dev, mt76x0_rf_5g_channel_0_tab);
220*4882a593Smuzhiyun
221*4882a593Smuzhiyun mt76x0_rf_wr(dev, MT_RF(5, 0), 0x44);
222*4882a593Smuzhiyun mt76x0_rf_wr(dev, MT_RF(6, 0), 0x45);
223*4882a593Smuzhiyun
224*4882a593Smuzhiyun mt76_wr(dev, MT_TX_ALC_VGA3, 0x00000005);
225*4882a593Smuzhiyun mt76_wr(dev, MT_TX0_RF_GAIN_CORR, 0x01010102);
226*4882a593Smuzhiyun break;
227*4882a593Smuzhiyun default:
228*4882a593Smuzhiyun break;
229*4882a593Smuzhiyun }
230*4882a593Smuzhiyun }
231*4882a593Smuzhiyun
232*4882a593Smuzhiyun static void
mt76x0_phy_set_chan_rf_params(struct mt76x02_dev * dev,u8 channel,u16 rf_bw_band)233*4882a593Smuzhiyun mt76x0_phy_set_chan_rf_params(struct mt76x02_dev *dev, u8 channel,
234*4882a593Smuzhiyun u16 rf_bw_band)
235*4882a593Smuzhiyun {
236*4882a593Smuzhiyun const struct mt76x0_freq_item *freq_item;
237*4882a593Smuzhiyun u16 rf_band = rf_bw_band & 0xff00;
238*4882a593Smuzhiyun u16 rf_bw = rf_bw_band & 0x00ff;
239*4882a593Smuzhiyun enum nl80211_band band;
240*4882a593Smuzhiyun bool b_sdm = false;
241*4882a593Smuzhiyun u32 mac_reg;
242*4882a593Smuzhiyun int i;
243*4882a593Smuzhiyun
244*4882a593Smuzhiyun for (i = 0; i < ARRAY_SIZE(mt76x0_sdm_channel); i++) {
245*4882a593Smuzhiyun if (channel == mt76x0_sdm_channel[i]) {
246*4882a593Smuzhiyun b_sdm = true;
247*4882a593Smuzhiyun break;
248*4882a593Smuzhiyun }
249*4882a593Smuzhiyun }
250*4882a593Smuzhiyun
251*4882a593Smuzhiyun for (i = 0; i < ARRAY_SIZE(mt76x0_frequency_plan); i++) {
252*4882a593Smuzhiyun if (channel == mt76x0_frequency_plan[i].channel) {
253*4882a593Smuzhiyun rf_band = mt76x0_frequency_plan[i].band;
254*4882a593Smuzhiyun
255*4882a593Smuzhiyun if (b_sdm)
256*4882a593Smuzhiyun freq_item = &mt76x0_sdm_frequency_plan[i];
257*4882a593Smuzhiyun else
258*4882a593Smuzhiyun freq_item = &mt76x0_frequency_plan[i];
259*4882a593Smuzhiyun
260*4882a593Smuzhiyun mt76x0_rf_wr(dev, MT_RF(0, 37), freq_item->pllR37);
261*4882a593Smuzhiyun mt76x0_rf_wr(dev, MT_RF(0, 36), freq_item->pllR36);
262*4882a593Smuzhiyun mt76x0_rf_wr(dev, MT_RF(0, 35), freq_item->pllR35);
263*4882a593Smuzhiyun mt76x0_rf_wr(dev, MT_RF(0, 34), freq_item->pllR34);
264*4882a593Smuzhiyun mt76x0_rf_wr(dev, MT_RF(0, 33), freq_item->pllR33);
265*4882a593Smuzhiyun
266*4882a593Smuzhiyun mt76x0_rf_rmw(dev, MT_RF(0, 32), 0xe0,
267*4882a593Smuzhiyun freq_item->pllR32_b7b5);
268*4882a593Smuzhiyun
269*4882a593Smuzhiyun /* R32<4:0> pll_den: (Denomina - 8) */
270*4882a593Smuzhiyun mt76x0_rf_rmw(dev, MT_RF(0, 32), MT_RF_PLL_DEN_MASK,
271*4882a593Smuzhiyun freq_item->pllR32_b4b0);
272*4882a593Smuzhiyun
273*4882a593Smuzhiyun /* R31<7:5> */
274*4882a593Smuzhiyun mt76x0_rf_rmw(dev, MT_RF(0, 31), 0xe0,
275*4882a593Smuzhiyun freq_item->pllR31_b7b5);
276*4882a593Smuzhiyun
277*4882a593Smuzhiyun /* R31<4:0> pll_k(Nominator) */
278*4882a593Smuzhiyun mt76x0_rf_rmw(dev, MT_RF(0, 31), MT_RF_PLL_K_MASK,
279*4882a593Smuzhiyun freq_item->pllR31_b4b0);
280*4882a593Smuzhiyun
281*4882a593Smuzhiyun /* R30<7> sdm_reset_n */
282*4882a593Smuzhiyun if (b_sdm) {
283*4882a593Smuzhiyun mt76x0_rf_clear(dev, MT_RF(0, 30),
284*4882a593Smuzhiyun MT_RF_SDM_RESET_MASK);
285*4882a593Smuzhiyun mt76x0_rf_set(dev, MT_RF(0, 30),
286*4882a593Smuzhiyun MT_RF_SDM_RESET_MASK);
287*4882a593Smuzhiyun } else {
288*4882a593Smuzhiyun mt76x0_rf_rmw(dev, MT_RF(0, 30),
289*4882a593Smuzhiyun MT_RF_SDM_RESET_MASK,
290*4882a593Smuzhiyun freq_item->pllR30_b7);
291*4882a593Smuzhiyun }
292*4882a593Smuzhiyun
293*4882a593Smuzhiyun /* R30<6:2> sdmmash_prbs,sin */
294*4882a593Smuzhiyun mt76x0_rf_rmw(dev, MT_RF(0, 30),
295*4882a593Smuzhiyun MT_RF_SDM_MASH_PRBS_MASK,
296*4882a593Smuzhiyun freq_item->pllR30_b6b2);
297*4882a593Smuzhiyun
298*4882a593Smuzhiyun /* R30<1> sdm_bp */
299*4882a593Smuzhiyun mt76x0_rf_rmw(dev, MT_RF(0, 30), MT_RF_SDM_BP_MASK,
300*4882a593Smuzhiyun freq_item->pllR30_b1 << 1);
301*4882a593Smuzhiyun
302*4882a593Smuzhiyun /* R30<0> R29<7:0> (hex) pll_n */
303*4882a593Smuzhiyun mt76x0_rf_wr(dev, MT_RF(0, 29),
304*4882a593Smuzhiyun freq_item->pll_n & 0xff);
305*4882a593Smuzhiyun
306*4882a593Smuzhiyun mt76x0_rf_rmw(dev, MT_RF(0, 30), 0x1,
307*4882a593Smuzhiyun (freq_item->pll_n >> 8) & 0x1);
308*4882a593Smuzhiyun
309*4882a593Smuzhiyun /* R28<7:6> isi_iso */
310*4882a593Smuzhiyun mt76x0_rf_rmw(dev, MT_RF(0, 28), MT_RF_ISI_ISO_MASK,
311*4882a593Smuzhiyun freq_item->pllR28_b7b6);
312*4882a593Smuzhiyun
313*4882a593Smuzhiyun /* R28<5:4> pfd_dly */
314*4882a593Smuzhiyun mt76x0_rf_rmw(dev, MT_RF(0, 28), MT_RF_PFD_DLY_MASK,
315*4882a593Smuzhiyun freq_item->pllR28_b5b4);
316*4882a593Smuzhiyun
317*4882a593Smuzhiyun /* R28<3:2> clksel option */
318*4882a593Smuzhiyun mt76x0_rf_rmw(dev, MT_RF(0, 28), MT_RF_CLK_SEL_MASK,
319*4882a593Smuzhiyun freq_item->pllR28_b3b2);
320*4882a593Smuzhiyun
321*4882a593Smuzhiyun /* R28<1:0> R27<7:0> R26<7:0> (hex) sdm_k */
322*4882a593Smuzhiyun mt76x0_rf_wr(dev, MT_RF(0, 26),
323*4882a593Smuzhiyun freq_item->pll_sdm_k & 0xff);
324*4882a593Smuzhiyun mt76x0_rf_wr(dev, MT_RF(0, 27),
325*4882a593Smuzhiyun (freq_item->pll_sdm_k >> 8) & 0xff);
326*4882a593Smuzhiyun
327*4882a593Smuzhiyun mt76x0_rf_rmw(dev, MT_RF(0, 28), 0x3,
328*4882a593Smuzhiyun (freq_item->pll_sdm_k >> 16) & 0x3);
329*4882a593Smuzhiyun
330*4882a593Smuzhiyun /* R24<1:0> xo_div */
331*4882a593Smuzhiyun mt76x0_rf_rmw(dev, MT_RF(0, 24), MT_RF_XO_DIV_MASK,
332*4882a593Smuzhiyun freq_item->pllR24_b1b0);
333*4882a593Smuzhiyun
334*4882a593Smuzhiyun break;
335*4882a593Smuzhiyun }
336*4882a593Smuzhiyun }
337*4882a593Smuzhiyun
338*4882a593Smuzhiyun for (i = 0; i < ARRAY_SIZE(mt76x0_rf_bw_switch_tab); i++) {
339*4882a593Smuzhiyun if (rf_bw == mt76x0_rf_bw_switch_tab[i].bw_band) {
340*4882a593Smuzhiyun mt76x0_rf_wr(dev,
341*4882a593Smuzhiyun mt76x0_rf_bw_switch_tab[i].rf_bank_reg,
342*4882a593Smuzhiyun mt76x0_rf_bw_switch_tab[i].value);
343*4882a593Smuzhiyun } else if ((rf_bw == (mt76x0_rf_bw_switch_tab[i].bw_band & 0xFF)) &&
344*4882a593Smuzhiyun (rf_band & mt76x0_rf_bw_switch_tab[i].bw_band)) {
345*4882a593Smuzhiyun mt76x0_rf_wr(dev,
346*4882a593Smuzhiyun mt76x0_rf_bw_switch_tab[i].rf_bank_reg,
347*4882a593Smuzhiyun mt76x0_rf_bw_switch_tab[i].value);
348*4882a593Smuzhiyun }
349*4882a593Smuzhiyun }
350*4882a593Smuzhiyun
351*4882a593Smuzhiyun for (i = 0; i < ARRAY_SIZE(mt76x0_rf_band_switch_tab); i++) {
352*4882a593Smuzhiyun if (mt76x0_rf_band_switch_tab[i].bw_band & rf_band) {
353*4882a593Smuzhiyun mt76x0_rf_wr(dev,
354*4882a593Smuzhiyun mt76x0_rf_band_switch_tab[i].rf_bank_reg,
355*4882a593Smuzhiyun mt76x0_rf_band_switch_tab[i].value);
356*4882a593Smuzhiyun }
357*4882a593Smuzhiyun }
358*4882a593Smuzhiyun
359*4882a593Smuzhiyun mt76_clear(dev, MT_RF_MISC, 0xc);
360*4882a593Smuzhiyun
361*4882a593Smuzhiyun band = (rf_band & RF_G_BAND) ? NL80211_BAND_2GHZ : NL80211_BAND_5GHZ;
362*4882a593Smuzhiyun if (mt76x02_ext_pa_enabled(dev, band)) {
363*4882a593Smuzhiyun /* MT_RF_MISC (offset: 0x0518)
364*4882a593Smuzhiyun * [2]1'b1: enable external A band PA
365*4882a593Smuzhiyun * 1'b0: disable external A band PA
366*4882a593Smuzhiyun * [3]1'b1: enable external G band PA
367*4882a593Smuzhiyun * 1'b0: disable external G band PA
368*4882a593Smuzhiyun */
369*4882a593Smuzhiyun if (rf_band & RF_A_BAND)
370*4882a593Smuzhiyun mt76_set(dev, MT_RF_MISC, BIT(2));
371*4882a593Smuzhiyun else
372*4882a593Smuzhiyun mt76_set(dev, MT_RF_MISC, BIT(3));
373*4882a593Smuzhiyun
374*4882a593Smuzhiyun /* External PA */
375*4882a593Smuzhiyun for (i = 0; i < ARRAY_SIZE(mt76x0_rf_ext_pa_tab); i++)
376*4882a593Smuzhiyun if (mt76x0_rf_ext_pa_tab[i].bw_band & rf_band)
377*4882a593Smuzhiyun mt76x0_rf_wr(dev,
378*4882a593Smuzhiyun mt76x0_rf_ext_pa_tab[i].rf_bank_reg,
379*4882a593Smuzhiyun mt76x0_rf_ext_pa_tab[i].value);
380*4882a593Smuzhiyun }
381*4882a593Smuzhiyun
382*4882a593Smuzhiyun if (rf_band & RF_G_BAND) {
383*4882a593Smuzhiyun mt76_wr(dev, MT_TX0_RF_GAIN_ATTEN, 0x63707400);
384*4882a593Smuzhiyun /* Set Atten mode = 2 For G band, Disable Tx Inc dcoc. */
385*4882a593Smuzhiyun mac_reg = mt76_rr(dev, MT_TX_ALC_CFG_1);
386*4882a593Smuzhiyun mac_reg &= 0x896400FF;
387*4882a593Smuzhiyun mt76_wr(dev, MT_TX_ALC_CFG_1, mac_reg);
388*4882a593Smuzhiyun } else {
389*4882a593Smuzhiyun mt76_wr(dev, MT_TX0_RF_GAIN_ATTEN, 0x686A7800);
390*4882a593Smuzhiyun /* Set Atten mode = 0
391*4882a593Smuzhiyun * For Ext A band, Disable Tx Inc dcoc Cal.
392*4882a593Smuzhiyun */
393*4882a593Smuzhiyun mac_reg = mt76_rr(dev, MT_TX_ALC_CFG_1);
394*4882a593Smuzhiyun mac_reg &= 0x890400FF;
395*4882a593Smuzhiyun mt76_wr(dev, MT_TX_ALC_CFG_1, mac_reg);
396*4882a593Smuzhiyun }
397*4882a593Smuzhiyun }
398*4882a593Smuzhiyun
399*4882a593Smuzhiyun static void
mt76x0_phy_set_chan_bbp_params(struct mt76x02_dev * dev,u16 rf_bw_band)400*4882a593Smuzhiyun mt76x0_phy_set_chan_bbp_params(struct mt76x02_dev *dev, u16 rf_bw_band)
401*4882a593Smuzhiyun {
402*4882a593Smuzhiyun int i;
403*4882a593Smuzhiyun
404*4882a593Smuzhiyun for (i = 0; i < ARRAY_SIZE(mt76x0_bbp_switch_tab); i++) {
405*4882a593Smuzhiyun const struct mt76x0_bbp_switch_item *item = &mt76x0_bbp_switch_tab[i];
406*4882a593Smuzhiyun const struct mt76_reg_pair *pair = &item->reg_pair;
407*4882a593Smuzhiyun
408*4882a593Smuzhiyun if ((rf_bw_band & item->bw_band) != rf_bw_band)
409*4882a593Smuzhiyun continue;
410*4882a593Smuzhiyun
411*4882a593Smuzhiyun if (pair->reg == MT_BBP(AGC, 8)) {
412*4882a593Smuzhiyun u32 val = pair->value;
413*4882a593Smuzhiyun u8 gain;
414*4882a593Smuzhiyun
415*4882a593Smuzhiyun gain = FIELD_GET(MT_BBP_AGC_GAIN, val);
416*4882a593Smuzhiyun gain -= dev->cal.rx.lna_gain * 2;
417*4882a593Smuzhiyun val &= ~MT_BBP_AGC_GAIN;
418*4882a593Smuzhiyun val |= FIELD_PREP(MT_BBP_AGC_GAIN, gain);
419*4882a593Smuzhiyun mt76_wr(dev, pair->reg, val);
420*4882a593Smuzhiyun } else {
421*4882a593Smuzhiyun mt76_wr(dev, pair->reg, pair->value);
422*4882a593Smuzhiyun }
423*4882a593Smuzhiyun }
424*4882a593Smuzhiyun }
425*4882a593Smuzhiyun
mt76x0_phy_ant_select(struct mt76x02_dev * dev)426*4882a593Smuzhiyun static void mt76x0_phy_ant_select(struct mt76x02_dev *dev)
427*4882a593Smuzhiyun {
428*4882a593Smuzhiyun u16 ee_ant = mt76x02_eeprom_get(dev, MT_EE_ANTENNA);
429*4882a593Smuzhiyun u16 ee_cfg1 = mt76x02_eeprom_get(dev, MT_EE_CFG1_INIT);
430*4882a593Smuzhiyun u16 nic_conf2 = mt76x02_eeprom_get(dev, MT_EE_NIC_CONF_2);
431*4882a593Smuzhiyun u32 wlan, coex3;
432*4882a593Smuzhiyun bool ant_div;
433*4882a593Smuzhiyun
434*4882a593Smuzhiyun wlan = mt76_rr(dev, MT_WLAN_FUN_CTRL);
435*4882a593Smuzhiyun coex3 = mt76_rr(dev, MT_COEXCFG3);
436*4882a593Smuzhiyun
437*4882a593Smuzhiyun ee_ant &= ~(BIT(14) | BIT(12));
438*4882a593Smuzhiyun wlan &= ~(BIT(6) | BIT(5));
439*4882a593Smuzhiyun coex3 &= ~GENMASK(5, 2);
440*4882a593Smuzhiyun
441*4882a593Smuzhiyun if (ee_ant & MT_EE_ANTENNA_DUAL) {
442*4882a593Smuzhiyun /* dual antenna mode */
443*4882a593Smuzhiyun ant_div = !(nic_conf2 & MT_EE_NIC_CONF_2_ANT_OPT) &&
444*4882a593Smuzhiyun (nic_conf2 & MT_EE_NIC_CONF_2_ANT_DIV);
445*4882a593Smuzhiyun if (ant_div)
446*4882a593Smuzhiyun ee_ant |= BIT(12);
447*4882a593Smuzhiyun else
448*4882a593Smuzhiyun coex3 |= BIT(4);
449*4882a593Smuzhiyun coex3 |= BIT(3);
450*4882a593Smuzhiyun if (dev->mt76.cap.has_2ghz)
451*4882a593Smuzhiyun wlan |= BIT(6);
452*4882a593Smuzhiyun } else {
453*4882a593Smuzhiyun /* sigle antenna mode */
454*4882a593Smuzhiyun if (dev->mt76.cap.has_5ghz) {
455*4882a593Smuzhiyun coex3 |= BIT(3) | BIT(4);
456*4882a593Smuzhiyun } else {
457*4882a593Smuzhiyun wlan |= BIT(6);
458*4882a593Smuzhiyun coex3 |= BIT(1);
459*4882a593Smuzhiyun }
460*4882a593Smuzhiyun }
461*4882a593Smuzhiyun
462*4882a593Smuzhiyun if (is_mt7630(dev))
463*4882a593Smuzhiyun ee_ant |= BIT(14) | BIT(11);
464*4882a593Smuzhiyun
465*4882a593Smuzhiyun mt76_wr(dev, MT_WLAN_FUN_CTRL, wlan);
466*4882a593Smuzhiyun mt76_rmw(dev, MT_CMB_CTRL, GENMASK(15, 0), ee_ant);
467*4882a593Smuzhiyun mt76_rmw(dev, MT_CSR_EE_CFG1, GENMASK(15, 0), ee_cfg1);
468*4882a593Smuzhiyun mt76_clear(dev, MT_COEXCFG0, BIT(2));
469*4882a593Smuzhiyun mt76_wr(dev, MT_COEXCFG3, coex3);
470*4882a593Smuzhiyun }
471*4882a593Smuzhiyun
472*4882a593Smuzhiyun static void
mt76x0_phy_bbp_set_bw(struct mt76x02_dev * dev,enum nl80211_chan_width width)473*4882a593Smuzhiyun mt76x0_phy_bbp_set_bw(struct mt76x02_dev *dev, enum nl80211_chan_width width)
474*4882a593Smuzhiyun {
475*4882a593Smuzhiyun enum { BW_20 = 0, BW_40 = 1, BW_80 = 2, BW_10 = 4};
476*4882a593Smuzhiyun int bw;
477*4882a593Smuzhiyun
478*4882a593Smuzhiyun switch (width) {
479*4882a593Smuzhiyun default:
480*4882a593Smuzhiyun case NL80211_CHAN_WIDTH_20_NOHT:
481*4882a593Smuzhiyun case NL80211_CHAN_WIDTH_20:
482*4882a593Smuzhiyun bw = BW_20;
483*4882a593Smuzhiyun break;
484*4882a593Smuzhiyun case NL80211_CHAN_WIDTH_40:
485*4882a593Smuzhiyun bw = BW_40;
486*4882a593Smuzhiyun break;
487*4882a593Smuzhiyun case NL80211_CHAN_WIDTH_80:
488*4882a593Smuzhiyun bw = BW_80;
489*4882a593Smuzhiyun break;
490*4882a593Smuzhiyun case NL80211_CHAN_WIDTH_10:
491*4882a593Smuzhiyun bw = BW_10;
492*4882a593Smuzhiyun break;
493*4882a593Smuzhiyun case NL80211_CHAN_WIDTH_80P80:
494*4882a593Smuzhiyun case NL80211_CHAN_WIDTH_160:
495*4882a593Smuzhiyun case NL80211_CHAN_WIDTH_5:
496*4882a593Smuzhiyun /* TODO error */
497*4882a593Smuzhiyun return;
498*4882a593Smuzhiyun }
499*4882a593Smuzhiyun
500*4882a593Smuzhiyun mt76x02_mcu_function_select(dev, BW_SETTING, bw);
501*4882a593Smuzhiyun }
502*4882a593Smuzhiyun
mt76x0_phy_tssi_dc_calibrate(struct mt76x02_dev * dev)503*4882a593Smuzhiyun static void mt76x0_phy_tssi_dc_calibrate(struct mt76x02_dev *dev)
504*4882a593Smuzhiyun {
505*4882a593Smuzhiyun struct ieee80211_channel *chan = dev->mphy.chandef.chan;
506*4882a593Smuzhiyun u32 val;
507*4882a593Smuzhiyun
508*4882a593Smuzhiyun if (chan->band == NL80211_BAND_5GHZ)
509*4882a593Smuzhiyun mt76x0_rf_clear(dev, MT_RF(0, 67), 0xf);
510*4882a593Smuzhiyun
511*4882a593Smuzhiyun /* bypass ADDA control */
512*4882a593Smuzhiyun mt76_wr(dev, MT_RF_SETTING_0, 0x60002237);
513*4882a593Smuzhiyun mt76_wr(dev, MT_RF_BYPASS_0, 0xffffffff);
514*4882a593Smuzhiyun
515*4882a593Smuzhiyun /* bbp sw reset */
516*4882a593Smuzhiyun mt76_set(dev, MT_BBP(CORE, 4), BIT(0));
517*4882a593Smuzhiyun usleep_range(500, 1000);
518*4882a593Smuzhiyun mt76_clear(dev, MT_BBP(CORE, 4), BIT(0));
519*4882a593Smuzhiyun
520*4882a593Smuzhiyun val = (chan->band == NL80211_BAND_5GHZ) ? 0x80055 : 0x80050;
521*4882a593Smuzhiyun mt76_wr(dev, MT_BBP(CORE, 34), val);
522*4882a593Smuzhiyun
523*4882a593Smuzhiyun /* enable TX with DAC0 input */
524*4882a593Smuzhiyun mt76_wr(dev, MT_BBP(TXBE, 6), BIT(31));
525*4882a593Smuzhiyun
526*4882a593Smuzhiyun mt76_poll_msec(dev, MT_BBP(CORE, 34), BIT(4), 0, 200);
527*4882a593Smuzhiyun dev->cal.tssi_dc = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
528*4882a593Smuzhiyun
529*4882a593Smuzhiyun /* stop bypass ADDA */
530*4882a593Smuzhiyun mt76_wr(dev, MT_RF_BYPASS_0, 0);
531*4882a593Smuzhiyun /* stop TX */
532*4882a593Smuzhiyun mt76_wr(dev, MT_BBP(TXBE, 6), 0);
533*4882a593Smuzhiyun /* bbp sw reset */
534*4882a593Smuzhiyun mt76_set(dev, MT_BBP(CORE, 4), BIT(0));
535*4882a593Smuzhiyun usleep_range(500, 1000);
536*4882a593Smuzhiyun mt76_clear(dev, MT_BBP(CORE, 4), BIT(0));
537*4882a593Smuzhiyun
538*4882a593Smuzhiyun if (chan->band == NL80211_BAND_5GHZ)
539*4882a593Smuzhiyun mt76x0_rf_rmw(dev, MT_RF(0, 67), 0xf, 0x4);
540*4882a593Smuzhiyun }
541*4882a593Smuzhiyun
542*4882a593Smuzhiyun static int
mt76x0_phy_tssi_adc_calibrate(struct mt76x02_dev * dev,s16 * ltssi,u8 * info)543*4882a593Smuzhiyun mt76x0_phy_tssi_adc_calibrate(struct mt76x02_dev *dev, s16 *ltssi,
544*4882a593Smuzhiyun u8 *info)
545*4882a593Smuzhiyun {
546*4882a593Smuzhiyun struct ieee80211_channel *chan = dev->mphy.chandef.chan;
547*4882a593Smuzhiyun u32 val;
548*4882a593Smuzhiyun
549*4882a593Smuzhiyun val = (chan->band == NL80211_BAND_5GHZ) ? 0x80055 : 0x80050;
550*4882a593Smuzhiyun mt76_wr(dev, MT_BBP(CORE, 34), val);
551*4882a593Smuzhiyun
552*4882a593Smuzhiyun if (!mt76_poll_msec(dev, MT_BBP(CORE, 34), BIT(4), 0, 200)) {
553*4882a593Smuzhiyun mt76_clear(dev, MT_BBP(CORE, 34), BIT(4));
554*4882a593Smuzhiyun return -ETIMEDOUT;
555*4882a593Smuzhiyun }
556*4882a593Smuzhiyun
557*4882a593Smuzhiyun *ltssi = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
558*4882a593Smuzhiyun if (chan->band == NL80211_BAND_5GHZ)
559*4882a593Smuzhiyun *ltssi += 128;
560*4882a593Smuzhiyun
561*4882a593Smuzhiyun /* set packet info#1 mode */
562*4882a593Smuzhiyun mt76_wr(dev, MT_BBP(CORE, 34), 0x80041);
563*4882a593Smuzhiyun info[0] = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
564*4882a593Smuzhiyun
565*4882a593Smuzhiyun /* set packet info#2 mode */
566*4882a593Smuzhiyun mt76_wr(dev, MT_BBP(CORE, 34), 0x80042);
567*4882a593Smuzhiyun info[1] = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
568*4882a593Smuzhiyun
569*4882a593Smuzhiyun /* set packet info#3 mode */
570*4882a593Smuzhiyun mt76_wr(dev, MT_BBP(CORE, 34), 0x80043);
571*4882a593Smuzhiyun info[2] = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
572*4882a593Smuzhiyun
573*4882a593Smuzhiyun return 0;
574*4882a593Smuzhiyun }
575*4882a593Smuzhiyun
mt76x0_phy_get_rf_pa_mode(struct mt76x02_dev * dev,int index,u8 tx_rate)576*4882a593Smuzhiyun static u8 mt76x0_phy_get_rf_pa_mode(struct mt76x02_dev *dev,
577*4882a593Smuzhiyun int index, u8 tx_rate)
578*4882a593Smuzhiyun {
579*4882a593Smuzhiyun u32 val, reg;
580*4882a593Smuzhiyun
581*4882a593Smuzhiyun reg = (index == 1) ? MT_RF_PA_MODE_CFG1 : MT_RF_PA_MODE_CFG0;
582*4882a593Smuzhiyun val = mt76_rr(dev, reg);
583*4882a593Smuzhiyun return (val & (3 << (tx_rate * 2))) >> (tx_rate * 2);
584*4882a593Smuzhiyun }
585*4882a593Smuzhiyun
586*4882a593Smuzhiyun static int
mt76x0_phy_get_target_power(struct mt76x02_dev * dev,u8 tx_mode,u8 * info,s8 * target_power,s8 * target_pa_power)587*4882a593Smuzhiyun mt76x0_phy_get_target_power(struct mt76x02_dev *dev, u8 tx_mode,
588*4882a593Smuzhiyun u8 *info, s8 *target_power,
589*4882a593Smuzhiyun s8 *target_pa_power)
590*4882a593Smuzhiyun {
591*4882a593Smuzhiyun u8 tx_rate, cur_power;
592*4882a593Smuzhiyun
593*4882a593Smuzhiyun cur_power = mt76_rr(dev, MT_TX_ALC_CFG_0) & MT_TX_ALC_CFG_0_CH_INIT_0;
594*4882a593Smuzhiyun switch (tx_mode) {
595*4882a593Smuzhiyun case 0:
596*4882a593Smuzhiyun /* cck rates */
597*4882a593Smuzhiyun tx_rate = (info[0] & 0x60) >> 5;
598*4882a593Smuzhiyun if (tx_rate > 3)
599*4882a593Smuzhiyun return -EINVAL;
600*4882a593Smuzhiyun
601*4882a593Smuzhiyun *target_power = cur_power + dev->mt76.rate_power.cck[tx_rate];
602*4882a593Smuzhiyun *target_pa_power = mt76x0_phy_get_rf_pa_mode(dev, 0, tx_rate);
603*4882a593Smuzhiyun break;
604*4882a593Smuzhiyun case 1: {
605*4882a593Smuzhiyun u8 index;
606*4882a593Smuzhiyun
607*4882a593Smuzhiyun /* ofdm rates */
608*4882a593Smuzhiyun tx_rate = (info[0] & 0xf0) >> 4;
609*4882a593Smuzhiyun switch (tx_rate) {
610*4882a593Smuzhiyun case 0xb:
611*4882a593Smuzhiyun index = 0;
612*4882a593Smuzhiyun break;
613*4882a593Smuzhiyun case 0xf:
614*4882a593Smuzhiyun index = 1;
615*4882a593Smuzhiyun break;
616*4882a593Smuzhiyun case 0xa:
617*4882a593Smuzhiyun index = 2;
618*4882a593Smuzhiyun break;
619*4882a593Smuzhiyun case 0xe:
620*4882a593Smuzhiyun index = 3;
621*4882a593Smuzhiyun break;
622*4882a593Smuzhiyun case 0x9:
623*4882a593Smuzhiyun index = 4;
624*4882a593Smuzhiyun break;
625*4882a593Smuzhiyun case 0xd:
626*4882a593Smuzhiyun index = 5;
627*4882a593Smuzhiyun break;
628*4882a593Smuzhiyun case 0x8:
629*4882a593Smuzhiyun index = 6;
630*4882a593Smuzhiyun break;
631*4882a593Smuzhiyun case 0xc:
632*4882a593Smuzhiyun index = 7;
633*4882a593Smuzhiyun break;
634*4882a593Smuzhiyun default:
635*4882a593Smuzhiyun return -EINVAL;
636*4882a593Smuzhiyun }
637*4882a593Smuzhiyun
638*4882a593Smuzhiyun *target_power = cur_power + dev->mt76.rate_power.ofdm[index];
639*4882a593Smuzhiyun *target_pa_power = mt76x0_phy_get_rf_pa_mode(dev, 0, index + 4);
640*4882a593Smuzhiyun break;
641*4882a593Smuzhiyun }
642*4882a593Smuzhiyun case 4:
643*4882a593Smuzhiyun /* vht rates */
644*4882a593Smuzhiyun tx_rate = info[1] & 0xf;
645*4882a593Smuzhiyun if (tx_rate > 9)
646*4882a593Smuzhiyun return -EINVAL;
647*4882a593Smuzhiyun
648*4882a593Smuzhiyun *target_power = cur_power + dev->mt76.rate_power.vht[tx_rate];
649*4882a593Smuzhiyun *target_pa_power = mt76x0_phy_get_rf_pa_mode(dev, 1, tx_rate);
650*4882a593Smuzhiyun break;
651*4882a593Smuzhiyun default:
652*4882a593Smuzhiyun /* ht rates */
653*4882a593Smuzhiyun tx_rate = info[1] & 0x7f;
654*4882a593Smuzhiyun if (tx_rate > 9)
655*4882a593Smuzhiyun return -EINVAL;
656*4882a593Smuzhiyun
657*4882a593Smuzhiyun *target_power = cur_power + dev->mt76.rate_power.ht[tx_rate];
658*4882a593Smuzhiyun *target_pa_power = mt76x0_phy_get_rf_pa_mode(dev, 1, tx_rate);
659*4882a593Smuzhiyun break;
660*4882a593Smuzhiyun }
661*4882a593Smuzhiyun
662*4882a593Smuzhiyun return 0;
663*4882a593Smuzhiyun }
664*4882a593Smuzhiyun
mt76x0_phy_lin2db(u16 val)665*4882a593Smuzhiyun static s16 mt76x0_phy_lin2db(u16 val)
666*4882a593Smuzhiyun {
667*4882a593Smuzhiyun u32 mantissa = val << 4;
668*4882a593Smuzhiyun int ret, data;
669*4882a593Smuzhiyun s16 exp = -4;
670*4882a593Smuzhiyun
671*4882a593Smuzhiyun while (mantissa < BIT(15)) {
672*4882a593Smuzhiyun mantissa <<= 1;
673*4882a593Smuzhiyun if (--exp < -20)
674*4882a593Smuzhiyun return -10000;
675*4882a593Smuzhiyun }
676*4882a593Smuzhiyun while (mantissa > 0xffff) {
677*4882a593Smuzhiyun mantissa >>= 1;
678*4882a593Smuzhiyun if (++exp > 20)
679*4882a593Smuzhiyun return -10000;
680*4882a593Smuzhiyun }
681*4882a593Smuzhiyun
682*4882a593Smuzhiyun /* s(15,0) */
683*4882a593Smuzhiyun if (mantissa <= 47104)
684*4882a593Smuzhiyun data = mantissa + (mantissa >> 3) + (mantissa >> 4) - 38400;
685*4882a593Smuzhiyun else
686*4882a593Smuzhiyun data = mantissa - (mantissa >> 3) - (mantissa >> 6) - 23040;
687*4882a593Smuzhiyun data = max_t(int, 0, data);
688*4882a593Smuzhiyun
689*4882a593Smuzhiyun ret = ((15 + exp) << 15) + data;
690*4882a593Smuzhiyun ret = (ret << 2) + (ret << 1) + (ret >> 6) + (ret >> 7);
691*4882a593Smuzhiyun return ret >> 10;
692*4882a593Smuzhiyun }
693*4882a593Smuzhiyun
694*4882a593Smuzhiyun static int
mt76x0_phy_get_delta_power(struct mt76x02_dev * dev,u8 tx_mode,s8 target_power,s8 target_pa_power,s16 ltssi)695*4882a593Smuzhiyun mt76x0_phy_get_delta_power(struct mt76x02_dev *dev, u8 tx_mode,
696*4882a593Smuzhiyun s8 target_power, s8 target_pa_power,
697*4882a593Smuzhiyun s16 ltssi)
698*4882a593Smuzhiyun {
699*4882a593Smuzhiyun struct ieee80211_channel *chan = dev->mphy.chandef.chan;
700*4882a593Smuzhiyun int tssi_target = target_power << 12, tssi_slope;
701*4882a593Smuzhiyun int tssi_offset, tssi_db, ret;
702*4882a593Smuzhiyun u32 data;
703*4882a593Smuzhiyun u16 val;
704*4882a593Smuzhiyun
705*4882a593Smuzhiyun if (chan->band == NL80211_BAND_5GHZ) {
706*4882a593Smuzhiyun u8 bound[7];
707*4882a593Smuzhiyun int i, err;
708*4882a593Smuzhiyun
709*4882a593Smuzhiyun err = mt76x02_eeprom_copy(dev, MT_EE_TSSI_BOUND1, bound,
710*4882a593Smuzhiyun sizeof(bound));
711*4882a593Smuzhiyun if (err < 0)
712*4882a593Smuzhiyun return err;
713*4882a593Smuzhiyun
714*4882a593Smuzhiyun for (i = 0; i < ARRAY_SIZE(bound); i++) {
715*4882a593Smuzhiyun if (chan->hw_value <= bound[i] || !bound[i])
716*4882a593Smuzhiyun break;
717*4882a593Smuzhiyun }
718*4882a593Smuzhiyun val = mt76x02_eeprom_get(dev, MT_EE_TSSI_SLOPE_5G + i * 2);
719*4882a593Smuzhiyun
720*4882a593Smuzhiyun tssi_offset = val >> 8;
721*4882a593Smuzhiyun if ((tssi_offset >= 64 && tssi_offset <= 127) ||
722*4882a593Smuzhiyun (tssi_offset & BIT(7)))
723*4882a593Smuzhiyun tssi_offset -= BIT(8);
724*4882a593Smuzhiyun } else {
725*4882a593Smuzhiyun val = mt76x02_eeprom_get(dev, MT_EE_TSSI_SLOPE_2G);
726*4882a593Smuzhiyun
727*4882a593Smuzhiyun tssi_offset = val >> 8;
728*4882a593Smuzhiyun if (tssi_offset & BIT(7))
729*4882a593Smuzhiyun tssi_offset -= BIT(8);
730*4882a593Smuzhiyun }
731*4882a593Smuzhiyun tssi_slope = val & 0xff;
732*4882a593Smuzhiyun
733*4882a593Smuzhiyun switch (target_pa_power) {
734*4882a593Smuzhiyun case 1:
735*4882a593Smuzhiyun if (chan->band == NL80211_BAND_2GHZ)
736*4882a593Smuzhiyun tssi_target += 29491; /* 3.6 * 8192 */
737*4882a593Smuzhiyun fallthrough;
738*4882a593Smuzhiyun case 0:
739*4882a593Smuzhiyun break;
740*4882a593Smuzhiyun default:
741*4882a593Smuzhiyun tssi_target += 4424; /* 0.54 * 8192 */
742*4882a593Smuzhiyun break;
743*4882a593Smuzhiyun }
744*4882a593Smuzhiyun
745*4882a593Smuzhiyun if (!tx_mode) {
746*4882a593Smuzhiyun data = mt76_rr(dev, MT_BBP(CORE, 1));
747*4882a593Smuzhiyun if (is_mt7630(dev) && mt76_is_mmio(&dev->mt76)) {
748*4882a593Smuzhiyun int offset;
749*4882a593Smuzhiyun
750*4882a593Smuzhiyun /* 2.3 * 8192 or 1.5 * 8192 */
751*4882a593Smuzhiyun offset = (data & BIT(5)) ? 18841 : 12288;
752*4882a593Smuzhiyun tssi_target += offset;
753*4882a593Smuzhiyun } else if (data & BIT(5)) {
754*4882a593Smuzhiyun /* 0.8 * 8192 */
755*4882a593Smuzhiyun tssi_target += 6554;
756*4882a593Smuzhiyun }
757*4882a593Smuzhiyun }
758*4882a593Smuzhiyun
759*4882a593Smuzhiyun data = mt76_rr(dev, MT_BBP(TXBE, 4));
760*4882a593Smuzhiyun switch (data & 0x3) {
761*4882a593Smuzhiyun case 1:
762*4882a593Smuzhiyun tssi_target -= 49152; /* -6db * 8192 */
763*4882a593Smuzhiyun break;
764*4882a593Smuzhiyun case 2:
765*4882a593Smuzhiyun tssi_target -= 98304; /* -12db * 8192 */
766*4882a593Smuzhiyun break;
767*4882a593Smuzhiyun case 3:
768*4882a593Smuzhiyun tssi_target += 49152; /* 6db * 8192 */
769*4882a593Smuzhiyun break;
770*4882a593Smuzhiyun default:
771*4882a593Smuzhiyun break;
772*4882a593Smuzhiyun }
773*4882a593Smuzhiyun
774*4882a593Smuzhiyun tssi_db = mt76x0_phy_lin2db(ltssi - dev->cal.tssi_dc) * tssi_slope;
775*4882a593Smuzhiyun if (chan->band == NL80211_BAND_5GHZ) {
776*4882a593Smuzhiyun tssi_db += ((tssi_offset - 50) << 10); /* offset s4.3 */
777*4882a593Smuzhiyun tssi_target -= tssi_db;
778*4882a593Smuzhiyun if (ltssi > 254 && tssi_target > 0) {
779*4882a593Smuzhiyun /* upper saturate */
780*4882a593Smuzhiyun tssi_target = 0;
781*4882a593Smuzhiyun }
782*4882a593Smuzhiyun } else {
783*4882a593Smuzhiyun tssi_db += (tssi_offset << 9); /* offset s3.4 */
784*4882a593Smuzhiyun tssi_target -= tssi_db;
785*4882a593Smuzhiyun /* upper-lower saturate */
786*4882a593Smuzhiyun if ((ltssi > 126 && tssi_target > 0) ||
787*4882a593Smuzhiyun ((ltssi - dev->cal.tssi_dc) < 1 && tssi_target < 0)) {
788*4882a593Smuzhiyun tssi_target = 0;
789*4882a593Smuzhiyun }
790*4882a593Smuzhiyun }
791*4882a593Smuzhiyun
792*4882a593Smuzhiyun if ((dev->cal.tssi_target ^ tssi_target) < 0 &&
793*4882a593Smuzhiyun dev->cal.tssi_target > -4096 && dev->cal.tssi_target < 4096 &&
794*4882a593Smuzhiyun tssi_target > -4096 && tssi_target < 4096) {
795*4882a593Smuzhiyun if ((tssi_target < 0 &&
796*4882a593Smuzhiyun tssi_target + dev->cal.tssi_target > 0) ||
797*4882a593Smuzhiyun (tssi_target > 0 &&
798*4882a593Smuzhiyun tssi_target + dev->cal.tssi_target <= 0))
799*4882a593Smuzhiyun tssi_target = 0;
800*4882a593Smuzhiyun else
801*4882a593Smuzhiyun dev->cal.tssi_target = tssi_target;
802*4882a593Smuzhiyun } else {
803*4882a593Smuzhiyun dev->cal.tssi_target = tssi_target;
804*4882a593Smuzhiyun }
805*4882a593Smuzhiyun
806*4882a593Smuzhiyun /* make the compensate value to the nearest compensate code */
807*4882a593Smuzhiyun if (tssi_target > 0)
808*4882a593Smuzhiyun tssi_target += 2048;
809*4882a593Smuzhiyun else
810*4882a593Smuzhiyun tssi_target -= 2048;
811*4882a593Smuzhiyun tssi_target >>= 12;
812*4882a593Smuzhiyun
813*4882a593Smuzhiyun ret = mt76_get_field(dev, MT_TX_ALC_CFG_1, MT_TX_ALC_CFG_1_TEMP_COMP);
814*4882a593Smuzhiyun if (ret & BIT(5))
815*4882a593Smuzhiyun ret -= BIT(6);
816*4882a593Smuzhiyun ret += tssi_target;
817*4882a593Smuzhiyun
818*4882a593Smuzhiyun ret = min_t(int, 31, ret);
819*4882a593Smuzhiyun return max_t(int, -32, ret);
820*4882a593Smuzhiyun }
821*4882a593Smuzhiyun
mt76x0_phy_tssi_calibrate(struct mt76x02_dev * dev)822*4882a593Smuzhiyun static void mt76x0_phy_tssi_calibrate(struct mt76x02_dev *dev)
823*4882a593Smuzhiyun {
824*4882a593Smuzhiyun s8 target_power, target_pa_power;
825*4882a593Smuzhiyun u8 tssi_info[3], tx_mode;
826*4882a593Smuzhiyun s16 ltssi;
827*4882a593Smuzhiyun s8 val;
828*4882a593Smuzhiyun
829*4882a593Smuzhiyun if (mt76x0_phy_tssi_adc_calibrate(dev, <ssi, tssi_info) < 0)
830*4882a593Smuzhiyun return;
831*4882a593Smuzhiyun
832*4882a593Smuzhiyun tx_mode = tssi_info[0] & 0x7;
833*4882a593Smuzhiyun if (mt76x0_phy_get_target_power(dev, tx_mode, tssi_info,
834*4882a593Smuzhiyun &target_power, &target_pa_power) < 0)
835*4882a593Smuzhiyun return;
836*4882a593Smuzhiyun
837*4882a593Smuzhiyun val = mt76x0_phy_get_delta_power(dev, tx_mode, target_power,
838*4882a593Smuzhiyun target_pa_power, ltssi);
839*4882a593Smuzhiyun mt76_rmw_field(dev, MT_TX_ALC_CFG_1, MT_TX_ALC_CFG_1_TEMP_COMP, val);
840*4882a593Smuzhiyun }
841*4882a593Smuzhiyun
mt76x0_phy_set_txpower(struct mt76x02_dev * dev)842*4882a593Smuzhiyun void mt76x0_phy_set_txpower(struct mt76x02_dev *dev)
843*4882a593Smuzhiyun {
844*4882a593Smuzhiyun struct mt76_rate_power *t = &dev->mt76.rate_power;
845*4882a593Smuzhiyun s8 info;
846*4882a593Smuzhiyun
847*4882a593Smuzhiyun mt76x0_get_tx_power_per_rate(dev, dev->mphy.chandef.chan, t);
848*4882a593Smuzhiyun mt76x0_get_power_info(dev, dev->mphy.chandef.chan, &info);
849*4882a593Smuzhiyun
850*4882a593Smuzhiyun mt76x02_add_rate_power_offset(t, info);
851*4882a593Smuzhiyun mt76x02_limit_rate_power(t, dev->txpower_conf);
852*4882a593Smuzhiyun dev->mphy.txpower_cur = mt76x02_get_max_rate_power(t);
853*4882a593Smuzhiyun mt76x02_add_rate_power_offset(t, -info);
854*4882a593Smuzhiyun
855*4882a593Smuzhiyun dev->target_power = info;
856*4882a593Smuzhiyun mt76x02_phy_set_txpower(dev, info, info);
857*4882a593Smuzhiyun }
858*4882a593Smuzhiyun
mt76x0_phy_calibrate(struct mt76x02_dev * dev,bool power_on)859*4882a593Smuzhiyun void mt76x0_phy_calibrate(struct mt76x02_dev *dev, bool power_on)
860*4882a593Smuzhiyun {
861*4882a593Smuzhiyun struct ieee80211_channel *chan = dev->mphy.chandef.chan;
862*4882a593Smuzhiyun int is_5ghz = (chan->band == NL80211_BAND_5GHZ) ? 1 : 0;
863*4882a593Smuzhiyun u32 val, tx_alc, reg_val;
864*4882a593Smuzhiyun
865*4882a593Smuzhiyun if (is_mt7630(dev))
866*4882a593Smuzhiyun return;
867*4882a593Smuzhiyun
868*4882a593Smuzhiyun if (power_on) {
869*4882a593Smuzhiyun mt76x02_mcu_calibrate(dev, MCU_CAL_R, 0);
870*4882a593Smuzhiyun mt76x02_mcu_calibrate(dev, MCU_CAL_VCO, chan->hw_value);
871*4882a593Smuzhiyun usleep_range(10, 20);
872*4882a593Smuzhiyun
873*4882a593Smuzhiyun if (mt76x0_tssi_enabled(dev)) {
874*4882a593Smuzhiyun mt76_wr(dev, MT_MAC_SYS_CTRL,
875*4882a593Smuzhiyun MT_MAC_SYS_CTRL_ENABLE_RX);
876*4882a593Smuzhiyun mt76x0_phy_tssi_dc_calibrate(dev);
877*4882a593Smuzhiyun mt76_wr(dev, MT_MAC_SYS_CTRL,
878*4882a593Smuzhiyun MT_MAC_SYS_CTRL_ENABLE_TX |
879*4882a593Smuzhiyun MT_MAC_SYS_CTRL_ENABLE_RX);
880*4882a593Smuzhiyun }
881*4882a593Smuzhiyun }
882*4882a593Smuzhiyun
883*4882a593Smuzhiyun tx_alc = mt76_rr(dev, MT_TX_ALC_CFG_0);
884*4882a593Smuzhiyun mt76_wr(dev, MT_TX_ALC_CFG_0, 0);
885*4882a593Smuzhiyun usleep_range(500, 700);
886*4882a593Smuzhiyun
887*4882a593Smuzhiyun reg_val = mt76_rr(dev, MT_BBP(IBI, 9));
888*4882a593Smuzhiyun mt76_wr(dev, MT_BBP(IBI, 9), 0xffffff7e);
889*4882a593Smuzhiyun
890*4882a593Smuzhiyun if (is_5ghz) {
891*4882a593Smuzhiyun if (chan->hw_value < 100)
892*4882a593Smuzhiyun val = 0x701;
893*4882a593Smuzhiyun else if (chan->hw_value < 140)
894*4882a593Smuzhiyun val = 0x801;
895*4882a593Smuzhiyun else
896*4882a593Smuzhiyun val = 0x901;
897*4882a593Smuzhiyun } else {
898*4882a593Smuzhiyun val = 0x600;
899*4882a593Smuzhiyun }
900*4882a593Smuzhiyun
901*4882a593Smuzhiyun mt76x02_mcu_calibrate(dev, MCU_CAL_FULL, val);
902*4882a593Smuzhiyun mt76x02_mcu_calibrate(dev, MCU_CAL_LC, is_5ghz);
903*4882a593Smuzhiyun usleep_range(15000, 20000);
904*4882a593Smuzhiyun
905*4882a593Smuzhiyun mt76_wr(dev, MT_BBP(IBI, 9), reg_val);
906*4882a593Smuzhiyun mt76_wr(dev, MT_TX_ALC_CFG_0, tx_alc);
907*4882a593Smuzhiyun mt76x02_mcu_calibrate(dev, MCU_CAL_RXDCOC, 1);
908*4882a593Smuzhiyun }
909*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(mt76x0_phy_calibrate);
910*4882a593Smuzhiyun
mt76x0_phy_set_channel(struct mt76x02_dev * dev,struct cfg80211_chan_def * chandef)911*4882a593Smuzhiyun void mt76x0_phy_set_channel(struct mt76x02_dev *dev,
912*4882a593Smuzhiyun struct cfg80211_chan_def *chandef)
913*4882a593Smuzhiyun {
914*4882a593Smuzhiyun u32 ext_cca_chan[4] = {
915*4882a593Smuzhiyun [0] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 0) |
916*4882a593Smuzhiyun FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 1) |
917*4882a593Smuzhiyun FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) |
918*4882a593Smuzhiyun FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) |
919*4882a593Smuzhiyun FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(0)),
920*4882a593Smuzhiyun [1] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 1) |
921*4882a593Smuzhiyun FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 0) |
922*4882a593Smuzhiyun FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) |
923*4882a593Smuzhiyun FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) |
924*4882a593Smuzhiyun FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(1)),
925*4882a593Smuzhiyun [2] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 2) |
926*4882a593Smuzhiyun FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 3) |
927*4882a593Smuzhiyun FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) |
928*4882a593Smuzhiyun FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) |
929*4882a593Smuzhiyun FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(2)),
930*4882a593Smuzhiyun [3] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 3) |
931*4882a593Smuzhiyun FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 2) |
932*4882a593Smuzhiyun FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) |
933*4882a593Smuzhiyun FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) |
934*4882a593Smuzhiyun FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(3)),
935*4882a593Smuzhiyun };
936*4882a593Smuzhiyun bool scan = test_bit(MT76_SCANNING, &dev->mphy.state);
937*4882a593Smuzhiyun int ch_group_index, freq, freq1;
938*4882a593Smuzhiyun u8 channel;
939*4882a593Smuzhiyun u32 val;
940*4882a593Smuzhiyun u16 rf_bw_band;
941*4882a593Smuzhiyun
942*4882a593Smuzhiyun freq = chandef->chan->center_freq;
943*4882a593Smuzhiyun freq1 = chandef->center_freq1;
944*4882a593Smuzhiyun channel = chandef->chan->hw_value;
945*4882a593Smuzhiyun rf_bw_band = (channel <= 14) ? RF_G_BAND : RF_A_BAND;
946*4882a593Smuzhiyun
947*4882a593Smuzhiyun switch (chandef->width) {
948*4882a593Smuzhiyun case NL80211_CHAN_WIDTH_40:
949*4882a593Smuzhiyun if (freq1 > freq)
950*4882a593Smuzhiyun ch_group_index = 0;
951*4882a593Smuzhiyun else
952*4882a593Smuzhiyun ch_group_index = 1;
953*4882a593Smuzhiyun channel += 2 - ch_group_index * 4;
954*4882a593Smuzhiyun rf_bw_band |= RF_BW_40;
955*4882a593Smuzhiyun break;
956*4882a593Smuzhiyun case NL80211_CHAN_WIDTH_80:
957*4882a593Smuzhiyun ch_group_index = (freq - freq1 + 30) / 20;
958*4882a593Smuzhiyun if (WARN_ON(ch_group_index < 0 || ch_group_index > 3))
959*4882a593Smuzhiyun ch_group_index = 0;
960*4882a593Smuzhiyun channel += 6 - ch_group_index * 4;
961*4882a593Smuzhiyun rf_bw_band |= RF_BW_80;
962*4882a593Smuzhiyun break;
963*4882a593Smuzhiyun default:
964*4882a593Smuzhiyun ch_group_index = 0;
965*4882a593Smuzhiyun rf_bw_band |= RF_BW_20;
966*4882a593Smuzhiyun break;
967*4882a593Smuzhiyun }
968*4882a593Smuzhiyun
969*4882a593Smuzhiyun if (mt76_is_usb(&dev->mt76)) {
970*4882a593Smuzhiyun mt76x0_phy_bbp_set_bw(dev, chandef->width);
971*4882a593Smuzhiyun } else {
972*4882a593Smuzhiyun if (chandef->width == NL80211_CHAN_WIDTH_80 ||
973*4882a593Smuzhiyun chandef->width == NL80211_CHAN_WIDTH_40)
974*4882a593Smuzhiyun val = 0x201;
975*4882a593Smuzhiyun else
976*4882a593Smuzhiyun val = 0x601;
977*4882a593Smuzhiyun mt76_wr(dev, MT_TX_SW_CFG0, val);
978*4882a593Smuzhiyun }
979*4882a593Smuzhiyun mt76x02_phy_set_bw(dev, chandef->width, ch_group_index);
980*4882a593Smuzhiyun mt76x02_phy_set_band(dev, chandef->chan->band,
981*4882a593Smuzhiyun ch_group_index & 1);
982*4882a593Smuzhiyun
983*4882a593Smuzhiyun mt76_rmw(dev, MT_EXT_CCA_CFG,
984*4882a593Smuzhiyun (MT_EXT_CCA_CFG_CCA0 |
985*4882a593Smuzhiyun MT_EXT_CCA_CFG_CCA1 |
986*4882a593Smuzhiyun MT_EXT_CCA_CFG_CCA2 |
987*4882a593Smuzhiyun MT_EXT_CCA_CFG_CCA3 |
988*4882a593Smuzhiyun MT_EXT_CCA_CFG_CCA_MASK),
989*4882a593Smuzhiyun ext_cca_chan[ch_group_index]);
990*4882a593Smuzhiyun
991*4882a593Smuzhiyun mt76x0_phy_set_band(dev, chandef->chan->band);
992*4882a593Smuzhiyun mt76x0_phy_set_chan_rf_params(dev, channel, rf_bw_band);
993*4882a593Smuzhiyun
994*4882a593Smuzhiyun /* set Japan Tx filter at channel 14 */
995*4882a593Smuzhiyun if (channel == 14)
996*4882a593Smuzhiyun mt76_set(dev, MT_BBP(CORE, 1), 0x20);
997*4882a593Smuzhiyun else
998*4882a593Smuzhiyun mt76_clear(dev, MT_BBP(CORE, 1), 0x20);
999*4882a593Smuzhiyun
1000*4882a593Smuzhiyun mt76x0_read_rx_gain(dev);
1001*4882a593Smuzhiyun mt76x0_phy_set_chan_bbp_params(dev, rf_bw_band);
1002*4882a593Smuzhiyun
1003*4882a593Smuzhiyun /* enable vco */
1004*4882a593Smuzhiyun mt76x0_rf_set(dev, MT_RF(0, 4), BIT(7));
1005*4882a593Smuzhiyun if (scan)
1006*4882a593Smuzhiyun return;
1007*4882a593Smuzhiyun
1008*4882a593Smuzhiyun mt76x02_init_agc_gain(dev);
1009*4882a593Smuzhiyun mt76x0_phy_calibrate(dev, false);
1010*4882a593Smuzhiyun mt76x0_phy_set_txpower(dev);
1011*4882a593Smuzhiyun
1012*4882a593Smuzhiyun ieee80211_queue_delayed_work(dev->mt76.hw, &dev->cal_work,
1013*4882a593Smuzhiyun MT_CALIBRATE_INTERVAL);
1014*4882a593Smuzhiyun }
1015*4882a593Smuzhiyun
mt76x0_phy_temp_sensor(struct mt76x02_dev * dev)1016*4882a593Smuzhiyun static void mt76x0_phy_temp_sensor(struct mt76x02_dev *dev)
1017*4882a593Smuzhiyun {
1018*4882a593Smuzhiyun u8 rf_b7_73, rf_b0_66, rf_b0_67;
1019*4882a593Smuzhiyun s8 val;
1020*4882a593Smuzhiyun
1021*4882a593Smuzhiyun rf_b7_73 = mt76x0_rf_rr(dev, MT_RF(7, 73));
1022*4882a593Smuzhiyun rf_b0_66 = mt76x0_rf_rr(dev, MT_RF(0, 66));
1023*4882a593Smuzhiyun rf_b0_67 = mt76x0_rf_rr(dev, MT_RF(0, 67));
1024*4882a593Smuzhiyun
1025*4882a593Smuzhiyun mt76x0_rf_wr(dev, MT_RF(7, 73), 0x02);
1026*4882a593Smuzhiyun mt76x0_rf_wr(dev, MT_RF(0, 66), 0x23);
1027*4882a593Smuzhiyun mt76x0_rf_wr(dev, MT_RF(0, 67), 0x01);
1028*4882a593Smuzhiyun
1029*4882a593Smuzhiyun mt76_wr(dev, MT_BBP(CORE, 34), 0x00080055);
1030*4882a593Smuzhiyun if (!mt76_poll_msec(dev, MT_BBP(CORE, 34), BIT(4), 0, 200)) {
1031*4882a593Smuzhiyun mt76_clear(dev, MT_BBP(CORE, 34), BIT(4));
1032*4882a593Smuzhiyun goto done;
1033*4882a593Smuzhiyun }
1034*4882a593Smuzhiyun
1035*4882a593Smuzhiyun val = mt76_rr(dev, MT_BBP(CORE, 35));
1036*4882a593Smuzhiyun val = (35 * (val - dev->cal.rx.temp_offset)) / 10 + 25;
1037*4882a593Smuzhiyun
1038*4882a593Smuzhiyun if (abs(val - dev->cal.temp_vco) > 20) {
1039*4882a593Smuzhiyun mt76x02_mcu_calibrate(dev, MCU_CAL_VCO,
1040*4882a593Smuzhiyun dev->mphy.chandef.chan->hw_value);
1041*4882a593Smuzhiyun dev->cal.temp_vco = val;
1042*4882a593Smuzhiyun }
1043*4882a593Smuzhiyun if (abs(val - dev->cal.temp) > 30) {
1044*4882a593Smuzhiyun mt76x0_phy_calibrate(dev, false);
1045*4882a593Smuzhiyun dev->cal.temp = val;
1046*4882a593Smuzhiyun }
1047*4882a593Smuzhiyun
1048*4882a593Smuzhiyun done:
1049*4882a593Smuzhiyun mt76x0_rf_wr(dev, MT_RF(7, 73), rf_b7_73);
1050*4882a593Smuzhiyun mt76x0_rf_wr(dev, MT_RF(0, 66), rf_b0_66);
1051*4882a593Smuzhiyun mt76x0_rf_wr(dev, MT_RF(0, 67), rf_b0_67);
1052*4882a593Smuzhiyun }
1053*4882a593Smuzhiyun
mt76x0_phy_set_gain_val(struct mt76x02_dev * dev)1054*4882a593Smuzhiyun static void mt76x0_phy_set_gain_val(struct mt76x02_dev *dev)
1055*4882a593Smuzhiyun {
1056*4882a593Smuzhiyun u8 gain = dev->cal.agc_gain_cur[0] - dev->cal.agc_gain_adjust;
1057*4882a593Smuzhiyun
1058*4882a593Smuzhiyun mt76_rmw_field(dev, MT_BBP(AGC, 8), MT_BBP_AGC_GAIN, gain);
1059*4882a593Smuzhiyun
1060*4882a593Smuzhiyun if ((dev->mphy.chandef.chan->flags & IEEE80211_CHAN_RADAR) &&
1061*4882a593Smuzhiyun !is_mt7630(dev))
1062*4882a593Smuzhiyun mt76x02_phy_dfs_adjust_agc(dev);
1063*4882a593Smuzhiyun }
1064*4882a593Smuzhiyun
1065*4882a593Smuzhiyun static void
mt76x0_phy_update_channel_gain(struct mt76x02_dev * dev)1066*4882a593Smuzhiyun mt76x0_phy_update_channel_gain(struct mt76x02_dev *dev)
1067*4882a593Smuzhiyun {
1068*4882a593Smuzhiyun bool gain_change;
1069*4882a593Smuzhiyun u8 gain_delta;
1070*4882a593Smuzhiyun int low_gain;
1071*4882a593Smuzhiyun
1072*4882a593Smuzhiyun dev->cal.avg_rssi_all = mt76_get_min_avg_rssi(&dev->mt76, false);
1073*4882a593Smuzhiyun if (!dev->cal.avg_rssi_all)
1074*4882a593Smuzhiyun dev->cal.avg_rssi_all = -75;
1075*4882a593Smuzhiyun
1076*4882a593Smuzhiyun low_gain = (dev->cal.avg_rssi_all > mt76x02_get_rssi_gain_thresh(dev)) +
1077*4882a593Smuzhiyun (dev->cal.avg_rssi_all > mt76x02_get_low_rssi_gain_thresh(dev));
1078*4882a593Smuzhiyun
1079*4882a593Smuzhiyun gain_change = dev->cal.low_gain < 0 ||
1080*4882a593Smuzhiyun (dev->cal.low_gain & 2) ^ (low_gain & 2);
1081*4882a593Smuzhiyun dev->cal.low_gain = low_gain;
1082*4882a593Smuzhiyun
1083*4882a593Smuzhiyun if (!gain_change) {
1084*4882a593Smuzhiyun if (mt76x02_phy_adjust_vga_gain(dev))
1085*4882a593Smuzhiyun mt76x0_phy_set_gain_val(dev);
1086*4882a593Smuzhiyun return;
1087*4882a593Smuzhiyun }
1088*4882a593Smuzhiyun
1089*4882a593Smuzhiyun dev->cal.agc_gain_adjust = (low_gain == 2) ? 0 : 10;
1090*4882a593Smuzhiyun gain_delta = (low_gain == 2) ? 10 : 0;
1091*4882a593Smuzhiyun
1092*4882a593Smuzhiyun dev->cal.agc_gain_cur[0] = dev->cal.agc_gain_init[0] - gain_delta;
1093*4882a593Smuzhiyun mt76x0_phy_set_gain_val(dev);
1094*4882a593Smuzhiyun
1095*4882a593Smuzhiyun /* clear false CCA counters */
1096*4882a593Smuzhiyun mt76_rr(dev, MT_RX_STAT_1);
1097*4882a593Smuzhiyun }
1098*4882a593Smuzhiyun
mt76x0_phy_calibration_work(struct work_struct * work)1099*4882a593Smuzhiyun static void mt76x0_phy_calibration_work(struct work_struct *work)
1100*4882a593Smuzhiyun {
1101*4882a593Smuzhiyun struct mt76x02_dev *dev = container_of(work, struct mt76x02_dev,
1102*4882a593Smuzhiyun cal_work.work);
1103*4882a593Smuzhiyun
1104*4882a593Smuzhiyun mt76x0_phy_update_channel_gain(dev);
1105*4882a593Smuzhiyun if (mt76x0_tssi_enabled(dev))
1106*4882a593Smuzhiyun mt76x0_phy_tssi_calibrate(dev);
1107*4882a593Smuzhiyun else
1108*4882a593Smuzhiyun mt76x0_phy_temp_sensor(dev);
1109*4882a593Smuzhiyun
1110*4882a593Smuzhiyun ieee80211_queue_delayed_work(dev->mt76.hw, &dev->cal_work,
1111*4882a593Smuzhiyun 4 * MT_CALIBRATE_INTERVAL);
1112*4882a593Smuzhiyun }
1113*4882a593Smuzhiyun
mt76x0_rf_patch_reg_array(struct mt76x02_dev * dev,const struct mt76_reg_pair * rp,int len)1114*4882a593Smuzhiyun static void mt76x0_rf_patch_reg_array(struct mt76x02_dev *dev,
1115*4882a593Smuzhiyun const struct mt76_reg_pair *rp, int len)
1116*4882a593Smuzhiyun {
1117*4882a593Smuzhiyun int i;
1118*4882a593Smuzhiyun
1119*4882a593Smuzhiyun for (i = 0; i < len; i++) {
1120*4882a593Smuzhiyun u32 reg = rp[i].reg;
1121*4882a593Smuzhiyun u8 val = rp[i].value;
1122*4882a593Smuzhiyun
1123*4882a593Smuzhiyun switch (reg) {
1124*4882a593Smuzhiyun case MT_RF(0, 3):
1125*4882a593Smuzhiyun if (mt76_is_mmio(&dev->mt76)) {
1126*4882a593Smuzhiyun if (is_mt7630(dev))
1127*4882a593Smuzhiyun val = 0x70;
1128*4882a593Smuzhiyun else
1129*4882a593Smuzhiyun val = 0x63;
1130*4882a593Smuzhiyun } else {
1131*4882a593Smuzhiyun val = 0x73;
1132*4882a593Smuzhiyun }
1133*4882a593Smuzhiyun break;
1134*4882a593Smuzhiyun case MT_RF(0, 21):
1135*4882a593Smuzhiyun if (is_mt7610e(dev))
1136*4882a593Smuzhiyun val = 0x10;
1137*4882a593Smuzhiyun else
1138*4882a593Smuzhiyun val = 0x12;
1139*4882a593Smuzhiyun break;
1140*4882a593Smuzhiyun case MT_RF(5, 2):
1141*4882a593Smuzhiyun if (is_mt7630(dev))
1142*4882a593Smuzhiyun val = 0x1d;
1143*4882a593Smuzhiyun else if (is_mt7610e(dev))
1144*4882a593Smuzhiyun val = 0x00;
1145*4882a593Smuzhiyun else
1146*4882a593Smuzhiyun val = 0x0c;
1147*4882a593Smuzhiyun break;
1148*4882a593Smuzhiyun default:
1149*4882a593Smuzhiyun break;
1150*4882a593Smuzhiyun }
1151*4882a593Smuzhiyun mt76x0_rf_wr(dev, reg, val);
1152*4882a593Smuzhiyun }
1153*4882a593Smuzhiyun }
1154*4882a593Smuzhiyun
mt76x0_phy_rf_init(struct mt76x02_dev * dev)1155*4882a593Smuzhiyun static void mt76x0_phy_rf_init(struct mt76x02_dev *dev)
1156*4882a593Smuzhiyun {
1157*4882a593Smuzhiyun int i;
1158*4882a593Smuzhiyun
1159*4882a593Smuzhiyun mt76x0_rf_patch_reg_array(dev, mt76x0_rf_central_tab,
1160*4882a593Smuzhiyun ARRAY_SIZE(mt76x0_rf_central_tab));
1161*4882a593Smuzhiyun mt76x0_rf_patch_reg_array(dev, mt76x0_rf_2g_channel_0_tab,
1162*4882a593Smuzhiyun ARRAY_SIZE(mt76x0_rf_2g_channel_0_tab));
1163*4882a593Smuzhiyun RF_RANDOM_WRITE(dev, mt76x0_rf_5g_channel_0_tab);
1164*4882a593Smuzhiyun RF_RANDOM_WRITE(dev, mt76x0_rf_vga_channel_0_tab);
1165*4882a593Smuzhiyun
1166*4882a593Smuzhiyun for (i = 0; i < ARRAY_SIZE(mt76x0_rf_bw_switch_tab); i++) {
1167*4882a593Smuzhiyun const struct mt76x0_rf_switch_item *item = &mt76x0_rf_bw_switch_tab[i];
1168*4882a593Smuzhiyun
1169*4882a593Smuzhiyun if (item->bw_band == RF_BW_20)
1170*4882a593Smuzhiyun mt76x0_rf_wr(dev, item->rf_bank_reg, item->value);
1171*4882a593Smuzhiyun else if (((RF_G_BAND | RF_BW_20) & item->bw_band) ==
1172*4882a593Smuzhiyun (RF_G_BAND | RF_BW_20))
1173*4882a593Smuzhiyun mt76x0_rf_wr(dev, item->rf_bank_reg, item->value);
1174*4882a593Smuzhiyun }
1175*4882a593Smuzhiyun
1176*4882a593Smuzhiyun for (i = 0; i < ARRAY_SIZE(mt76x0_rf_band_switch_tab); i++) {
1177*4882a593Smuzhiyun if (mt76x0_rf_band_switch_tab[i].bw_band & RF_G_BAND) {
1178*4882a593Smuzhiyun mt76x0_rf_wr(dev,
1179*4882a593Smuzhiyun mt76x0_rf_band_switch_tab[i].rf_bank_reg,
1180*4882a593Smuzhiyun mt76x0_rf_band_switch_tab[i].value);
1181*4882a593Smuzhiyun }
1182*4882a593Smuzhiyun }
1183*4882a593Smuzhiyun
1184*4882a593Smuzhiyun /* Frequency calibration
1185*4882a593Smuzhiyun * E1: B0.R22<6:0>: xo_cxo<6:0>
1186*4882a593Smuzhiyun * E2: B0.R21<0>: xo_cxo<0>, B0.R22<7:0>: xo_cxo<8:1>
1187*4882a593Smuzhiyun */
1188*4882a593Smuzhiyun mt76x0_rf_wr(dev, MT_RF(0, 22),
1189*4882a593Smuzhiyun min_t(u8, dev->cal.rx.freq_offset, 0xbf));
1190*4882a593Smuzhiyun mt76x0_rf_rr(dev, MT_RF(0, 22));
1191*4882a593Smuzhiyun
1192*4882a593Smuzhiyun /* Reset procedure DAC during power-up:
1193*4882a593Smuzhiyun * - set B0.R73<7>
1194*4882a593Smuzhiyun * - clear B0.R73<7>
1195*4882a593Smuzhiyun * - set B0.R73<7>
1196*4882a593Smuzhiyun */
1197*4882a593Smuzhiyun mt76x0_rf_set(dev, MT_RF(0, 73), BIT(7));
1198*4882a593Smuzhiyun mt76x0_rf_clear(dev, MT_RF(0, 73), BIT(7));
1199*4882a593Smuzhiyun mt76x0_rf_set(dev, MT_RF(0, 73), BIT(7));
1200*4882a593Smuzhiyun
1201*4882a593Smuzhiyun /* vcocal_en: initiate VCO calibration (reset after completion)) */
1202*4882a593Smuzhiyun mt76x0_rf_set(dev, MT_RF(0, 4), 0x80);
1203*4882a593Smuzhiyun }
1204*4882a593Smuzhiyun
mt76x0_phy_init(struct mt76x02_dev * dev)1205*4882a593Smuzhiyun void mt76x0_phy_init(struct mt76x02_dev *dev)
1206*4882a593Smuzhiyun {
1207*4882a593Smuzhiyun INIT_DELAYED_WORK(&dev->cal_work, mt76x0_phy_calibration_work);
1208*4882a593Smuzhiyun
1209*4882a593Smuzhiyun mt76x0_phy_ant_select(dev);
1210*4882a593Smuzhiyun mt76x0_phy_rf_init(dev);
1211*4882a593Smuzhiyun mt76x02_phy_set_rxpath(dev);
1212*4882a593Smuzhiyun mt76x02_phy_set_txdac(dev);
1213*4882a593Smuzhiyun }
1214