1*4882a593Smuzhiyun // SPDX-License-Identifier: ISC
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * Copyright (C) 2016 Felix Fietkau <nbd@nbd.name>
4*4882a593Smuzhiyun * Copyright (C) 2018 Stanislaw Gruszka <stf_xl@wp.pl>
5*4882a593Smuzhiyun */
6*4882a593Smuzhiyun
7*4882a593Smuzhiyun #include "mt76x02.h"
8*4882a593Smuzhiyun #include "mt76x02_trace.h"
9*4882a593Smuzhiyun #include "trace.h"
10*4882a593Smuzhiyun
mt76x02_mac_reset_counters(struct mt76x02_dev * dev)11*4882a593Smuzhiyun void mt76x02_mac_reset_counters(struct mt76x02_dev *dev)
12*4882a593Smuzhiyun {
13*4882a593Smuzhiyun int i;
14*4882a593Smuzhiyun
15*4882a593Smuzhiyun mt76_rr(dev, MT_RX_STAT_0);
16*4882a593Smuzhiyun mt76_rr(dev, MT_RX_STAT_1);
17*4882a593Smuzhiyun mt76_rr(dev, MT_RX_STAT_2);
18*4882a593Smuzhiyun mt76_rr(dev, MT_TX_STA_0);
19*4882a593Smuzhiyun mt76_rr(dev, MT_TX_STA_1);
20*4882a593Smuzhiyun mt76_rr(dev, MT_TX_STA_2);
21*4882a593Smuzhiyun
22*4882a593Smuzhiyun for (i = 0; i < 16; i++)
23*4882a593Smuzhiyun mt76_rr(dev, MT_TX_AGG_CNT(i));
24*4882a593Smuzhiyun
25*4882a593Smuzhiyun for (i = 0; i < 16; i++)
26*4882a593Smuzhiyun mt76_rr(dev, MT_TX_STAT_FIFO);
27*4882a593Smuzhiyun
28*4882a593Smuzhiyun memset(dev->mt76.aggr_stats, 0, sizeof(dev->mt76.aggr_stats));
29*4882a593Smuzhiyun }
30*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(mt76x02_mac_reset_counters);
31*4882a593Smuzhiyun
32*4882a593Smuzhiyun static enum mt76x02_cipher_type
mt76x02_mac_get_key_info(struct ieee80211_key_conf * key,u8 * key_data)33*4882a593Smuzhiyun mt76x02_mac_get_key_info(struct ieee80211_key_conf *key, u8 *key_data)
34*4882a593Smuzhiyun {
35*4882a593Smuzhiyun memset(key_data, 0, 32);
36*4882a593Smuzhiyun if (!key)
37*4882a593Smuzhiyun return MT_CIPHER_NONE;
38*4882a593Smuzhiyun
39*4882a593Smuzhiyun if (key->keylen > 32)
40*4882a593Smuzhiyun return MT_CIPHER_NONE;
41*4882a593Smuzhiyun
42*4882a593Smuzhiyun memcpy(key_data, key->key, key->keylen);
43*4882a593Smuzhiyun
44*4882a593Smuzhiyun switch (key->cipher) {
45*4882a593Smuzhiyun case WLAN_CIPHER_SUITE_WEP40:
46*4882a593Smuzhiyun return MT_CIPHER_WEP40;
47*4882a593Smuzhiyun case WLAN_CIPHER_SUITE_WEP104:
48*4882a593Smuzhiyun return MT_CIPHER_WEP104;
49*4882a593Smuzhiyun case WLAN_CIPHER_SUITE_TKIP:
50*4882a593Smuzhiyun return MT_CIPHER_TKIP;
51*4882a593Smuzhiyun case WLAN_CIPHER_SUITE_CCMP:
52*4882a593Smuzhiyun return MT_CIPHER_AES_CCMP;
53*4882a593Smuzhiyun default:
54*4882a593Smuzhiyun return MT_CIPHER_NONE;
55*4882a593Smuzhiyun }
56*4882a593Smuzhiyun }
57*4882a593Smuzhiyun
mt76x02_mac_shared_key_setup(struct mt76x02_dev * dev,u8 vif_idx,u8 key_idx,struct ieee80211_key_conf * key)58*4882a593Smuzhiyun int mt76x02_mac_shared_key_setup(struct mt76x02_dev *dev, u8 vif_idx,
59*4882a593Smuzhiyun u8 key_idx, struct ieee80211_key_conf *key)
60*4882a593Smuzhiyun {
61*4882a593Smuzhiyun enum mt76x02_cipher_type cipher;
62*4882a593Smuzhiyun u8 key_data[32];
63*4882a593Smuzhiyun u32 val;
64*4882a593Smuzhiyun
65*4882a593Smuzhiyun cipher = mt76x02_mac_get_key_info(key, key_data);
66*4882a593Smuzhiyun if (cipher == MT_CIPHER_NONE && key)
67*4882a593Smuzhiyun return -EOPNOTSUPP;
68*4882a593Smuzhiyun
69*4882a593Smuzhiyun val = mt76_rr(dev, MT_SKEY_MODE(vif_idx));
70*4882a593Smuzhiyun val &= ~(MT_SKEY_MODE_MASK << MT_SKEY_MODE_SHIFT(vif_idx, key_idx));
71*4882a593Smuzhiyun val |= cipher << MT_SKEY_MODE_SHIFT(vif_idx, key_idx);
72*4882a593Smuzhiyun mt76_wr(dev, MT_SKEY_MODE(vif_idx), val);
73*4882a593Smuzhiyun
74*4882a593Smuzhiyun mt76_wr_copy(dev, MT_SKEY(vif_idx, key_idx), key_data,
75*4882a593Smuzhiyun sizeof(key_data));
76*4882a593Smuzhiyun
77*4882a593Smuzhiyun return 0;
78*4882a593Smuzhiyun }
79*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(mt76x02_mac_shared_key_setup);
80*4882a593Smuzhiyun
mt76x02_mac_wcid_sync_pn(struct mt76x02_dev * dev,u8 idx,struct ieee80211_key_conf * key)81*4882a593Smuzhiyun void mt76x02_mac_wcid_sync_pn(struct mt76x02_dev *dev, u8 idx,
82*4882a593Smuzhiyun struct ieee80211_key_conf *key)
83*4882a593Smuzhiyun {
84*4882a593Smuzhiyun enum mt76x02_cipher_type cipher;
85*4882a593Smuzhiyun u8 key_data[32];
86*4882a593Smuzhiyun u32 iv, eiv;
87*4882a593Smuzhiyun u64 pn;
88*4882a593Smuzhiyun
89*4882a593Smuzhiyun cipher = mt76x02_mac_get_key_info(key, key_data);
90*4882a593Smuzhiyun iv = mt76_rr(dev, MT_WCID_IV(idx));
91*4882a593Smuzhiyun eiv = mt76_rr(dev, MT_WCID_IV(idx) + 4);
92*4882a593Smuzhiyun
93*4882a593Smuzhiyun pn = (u64)eiv << 16;
94*4882a593Smuzhiyun if (cipher == MT_CIPHER_TKIP) {
95*4882a593Smuzhiyun pn |= (iv >> 16) & 0xff;
96*4882a593Smuzhiyun pn |= (iv & 0xff) << 8;
97*4882a593Smuzhiyun } else if (cipher >= MT_CIPHER_AES_CCMP) {
98*4882a593Smuzhiyun pn |= iv & 0xffff;
99*4882a593Smuzhiyun } else {
100*4882a593Smuzhiyun return;
101*4882a593Smuzhiyun }
102*4882a593Smuzhiyun
103*4882a593Smuzhiyun atomic64_set(&key->tx_pn, pn);
104*4882a593Smuzhiyun }
105*4882a593Smuzhiyun
mt76x02_mac_wcid_set_key(struct mt76x02_dev * dev,u8 idx,struct ieee80211_key_conf * key)106*4882a593Smuzhiyun int mt76x02_mac_wcid_set_key(struct mt76x02_dev *dev, u8 idx,
107*4882a593Smuzhiyun struct ieee80211_key_conf *key)
108*4882a593Smuzhiyun {
109*4882a593Smuzhiyun enum mt76x02_cipher_type cipher;
110*4882a593Smuzhiyun u8 key_data[32];
111*4882a593Smuzhiyun u8 iv_data[8];
112*4882a593Smuzhiyun u64 pn;
113*4882a593Smuzhiyun
114*4882a593Smuzhiyun cipher = mt76x02_mac_get_key_info(key, key_data);
115*4882a593Smuzhiyun if (cipher == MT_CIPHER_NONE && key)
116*4882a593Smuzhiyun return -EOPNOTSUPP;
117*4882a593Smuzhiyun
118*4882a593Smuzhiyun mt76_wr_copy(dev, MT_WCID_KEY(idx), key_data, sizeof(key_data));
119*4882a593Smuzhiyun mt76_rmw_field(dev, MT_WCID_ATTR(idx), MT_WCID_ATTR_PKEY_MODE, cipher);
120*4882a593Smuzhiyun
121*4882a593Smuzhiyun memset(iv_data, 0, sizeof(iv_data));
122*4882a593Smuzhiyun if (key) {
123*4882a593Smuzhiyun mt76_rmw_field(dev, MT_WCID_ATTR(idx), MT_WCID_ATTR_PAIRWISE,
124*4882a593Smuzhiyun !!(key->flags & IEEE80211_KEY_FLAG_PAIRWISE));
125*4882a593Smuzhiyun
126*4882a593Smuzhiyun pn = atomic64_read(&key->tx_pn);
127*4882a593Smuzhiyun
128*4882a593Smuzhiyun iv_data[3] = key->keyidx << 6;
129*4882a593Smuzhiyun if (cipher >= MT_CIPHER_TKIP) {
130*4882a593Smuzhiyun iv_data[3] |= 0x20;
131*4882a593Smuzhiyun put_unaligned_le32(pn >> 16, &iv_data[4]);
132*4882a593Smuzhiyun }
133*4882a593Smuzhiyun
134*4882a593Smuzhiyun if (cipher == MT_CIPHER_TKIP) {
135*4882a593Smuzhiyun iv_data[0] = (pn >> 8) & 0xff;
136*4882a593Smuzhiyun iv_data[1] = (iv_data[0] | 0x20) & 0x7f;
137*4882a593Smuzhiyun iv_data[2] = pn & 0xff;
138*4882a593Smuzhiyun } else if (cipher >= MT_CIPHER_AES_CCMP) {
139*4882a593Smuzhiyun put_unaligned_le16((pn & 0xffff), &iv_data[0]);
140*4882a593Smuzhiyun }
141*4882a593Smuzhiyun }
142*4882a593Smuzhiyun
143*4882a593Smuzhiyun mt76_wr_copy(dev, MT_WCID_IV(idx), iv_data, sizeof(iv_data));
144*4882a593Smuzhiyun
145*4882a593Smuzhiyun return 0;
146*4882a593Smuzhiyun }
147*4882a593Smuzhiyun
mt76x02_mac_wcid_setup(struct mt76x02_dev * dev,u8 idx,u8 vif_idx,u8 * mac)148*4882a593Smuzhiyun void mt76x02_mac_wcid_setup(struct mt76x02_dev *dev, u8 idx,
149*4882a593Smuzhiyun u8 vif_idx, u8 *mac)
150*4882a593Smuzhiyun {
151*4882a593Smuzhiyun struct mt76_wcid_addr addr = {};
152*4882a593Smuzhiyun u32 attr;
153*4882a593Smuzhiyun
154*4882a593Smuzhiyun attr = FIELD_PREP(MT_WCID_ATTR_BSS_IDX, vif_idx & 7) |
155*4882a593Smuzhiyun FIELD_PREP(MT_WCID_ATTR_BSS_IDX_EXT, !!(vif_idx & 8));
156*4882a593Smuzhiyun
157*4882a593Smuzhiyun mt76_wr(dev, MT_WCID_ATTR(idx), attr);
158*4882a593Smuzhiyun
159*4882a593Smuzhiyun if (idx >= 128)
160*4882a593Smuzhiyun return;
161*4882a593Smuzhiyun
162*4882a593Smuzhiyun if (mac)
163*4882a593Smuzhiyun memcpy(addr.macaddr, mac, ETH_ALEN);
164*4882a593Smuzhiyun
165*4882a593Smuzhiyun mt76_wr_copy(dev, MT_WCID_ADDR(idx), &addr, sizeof(addr));
166*4882a593Smuzhiyun }
167*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(mt76x02_mac_wcid_setup);
168*4882a593Smuzhiyun
mt76x02_mac_wcid_set_drop(struct mt76x02_dev * dev,u8 idx,bool drop)169*4882a593Smuzhiyun void mt76x02_mac_wcid_set_drop(struct mt76x02_dev *dev, u8 idx, bool drop)
170*4882a593Smuzhiyun {
171*4882a593Smuzhiyun u32 val = mt76_rr(dev, MT_WCID_DROP(idx));
172*4882a593Smuzhiyun u32 bit = MT_WCID_DROP_MASK(idx);
173*4882a593Smuzhiyun
174*4882a593Smuzhiyun /* prevent unnecessary writes */
175*4882a593Smuzhiyun if ((val & bit) != (bit * drop))
176*4882a593Smuzhiyun mt76_wr(dev, MT_WCID_DROP(idx), (val & ~bit) | (bit * drop));
177*4882a593Smuzhiyun }
178*4882a593Smuzhiyun
179*4882a593Smuzhiyun static u16
mt76x02_mac_tx_rate_val(struct mt76x02_dev * dev,const struct ieee80211_tx_rate * rate,u8 * nss_val)180*4882a593Smuzhiyun mt76x02_mac_tx_rate_val(struct mt76x02_dev *dev,
181*4882a593Smuzhiyun const struct ieee80211_tx_rate *rate, u8 *nss_val)
182*4882a593Smuzhiyun {
183*4882a593Smuzhiyun u8 phy, rate_idx, nss, bw = 0;
184*4882a593Smuzhiyun u16 rateval;
185*4882a593Smuzhiyun
186*4882a593Smuzhiyun if (rate->flags & IEEE80211_TX_RC_VHT_MCS) {
187*4882a593Smuzhiyun rate_idx = rate->idx;
188*4882a593Smuzhiyun nss = 1 + (rate->idx >> 4);
189*4882a593Smuzhiyun phy = MT_PHY_TYPE_VHT;
190*4882a593Smuzhiyun if (rate->flags & IEEE80211_TX_RC_80_MHZ_WIDTH)
191*4882a593Smuzhiyun bw = 2;
192*4882a593Smuzhiyun else if (rate->flags & IEEE80211_TX_RC_40_MHZ_WIDTH)
193*4882a593Smuzhiyun bw = 1;
194*4882a593Smuzhiyun } else if (rate->flags & IEEE80211_TX_RC_MCS) {
195*4882a593Smuzhiyun rate_idx = rate->idx;
196*4882a593Smuzhiyun nss = 1 + (rate->idx >> 3);
197*4882a593Smuzhiyun phy = MT_PHY_TYPE_HT;
198*4882a593Smuzhiyun if (rate->flags & IEEE80211_TX_RC_GREEN_FIELD)
199*4882a593Smuzhiyun phy = MT_PHY_TYPE_HT_GF;
200*4882a593Smuzhiyun if (rate->flags & IEEE80211_TX_RC_40_MHZ_WIDTH)
201*4882a593Smuzhiyun bw = 1;
202*4882a593Smuzhiyun } else {
203*4882a593Smuzhiyun const struct ieee80211_rate *r;
204*4882a593Smuzhiyun int band = dev->mphy.chandef.chan->band;
205*4882a593Smuzhiyun u16 val;
206*4882a593Smuzhiyun
207*4882a593Smuzhiyun r = &dev->mt76.hw->wiphy->bands[band]->bitrates[rate->idx];
208*4882a593Smuzhiyun if (rate->flags & IEEE80211_TX_RC_USE_SHORT_PREAMBLE)
209*4882a593Smuzhiyun val = r->hw_value_short;
210*4882a593Smuzhiyun else
211*4882a593Smuzhiyun val = r->hw_value;
212*4882a593Smuzhiyun
213*4882a593Smuzhiyun phy = val >> 8;
214*4882a593Smuzhiyun rate_idx = val & 0xff;
215*4882a593Smuzhiyun nss = 1;
216*4882a593Smuzhiyun }
217*4882a593Smuzhiyun
218*4882a593Smuzhiyun rateval = FIELD_PREP(MT_RXWI_RATE_INDEX, rate_idx);
219*4882a593Smuzhiyun rateval |= FIELD_PREP(MT_RXWI_RATE_PHY, phy);
220*4882a593Smuzhiyun rateval |= FIELD_PREP(MT_RXWI_RATE_BW, bw);
221*4882a593Smuzhiyun if (rate->flags & IEEE80211_TX_RC_SHORT_GI)
222*4882a593Smuzhiyun rateval |= MT_RXWI_RATE_SGI;
223*4882a593Smuzhiyun
224*4882a593Smuzhiyun *nss_val = nss;
225*4882a593Smuzhiyun return rateval;
226*4882a593Smuzhiyun }
227*4882a593Smuzhiyun
mt76x02_mac_wcid_set_rate(struct mt76x02_dev * dev,struct mt76_wcid * wcid,const struct ieee80211_tx_rate * rate)228*4882a593Smuzhiyun void mt76x02_mac_wcid_set_rate(struct mt76x02_dev *dev, struct mt76_wcid *wcid,
229*4882a593Smuzhiyun const struct ieee80211_tx_rate *rate)
230*4882a593Smuzhiyun {
231*4882a593Smuzhiyun s8 max_txpwr_adj = mt76x02_tx_get_max_txpwr_adj(dev, rate);
232*4882a593Smuzhiyun u16 rateval;
233*4882a593Smuzhiyun u32 tx_info;
234*4882a593Smuzhiyun s8 nss;
235*4882a593Smuzhiyun
236*4882a593Smuzhiyun rateval = mt76x02_mac_tx_rate_val(dev, rate, &nss);
237*4882a593Smuzhiyun tx_info = FIELD_PREP(MT_WCID_TX_INFO_RATE, rateval) |
238*4882a593Smuzhiyun FIELD_PREP(MT_WCID_TX_INFO_NSS, nss) |
239*4882a593Smuzhiyun FIELD_PREP(MT_WCID_TX_INFO_TXPWR_ADJ, max_txpwr_adj) |
240*4882a593Smuzhiyun MT_WCID_TX_INFO_SET;
241*4882a593Smuzhiyun wcid->tx_info = tx_info;
242*4882a593Smuzhiyun }
243*4882a593Smuzhiyun
mt76x02_mac_set_short_preamble(struct mt76x02_dev * dev,bool enable)244*4882a593Smuzhiyun void mt76x02_mac_set_short_preamble(struct mt76x02_dev *dev, bool enable)
245*4882a593Smuzhiyun {
246*4882a593Smuzhiyun if (enable)
247*4882a593Smuzhiyun mt76_set(dev, MT_AUTO_RSP_CFG, MT_AUTO_RSP_PREAMB_SHORT);
248*4882a593Smuzhiyun else
249*4882a593Smuzhiyun mt76_clear(dev, MT_AUTO_RSP_CFG, MT_AUTO_RSP_PREAMB_SHORT);
250*4882a593Smuzhiyun }
251*4882a593Smuzhiyun
mt76x02_mac_load_tx_status(struct mt76x02_dev * dev,struct mt76x02_tx_status * stat)252*4882a593Smuzhiyun bool mt76x02_mac_load_tx_status(struct mt76x02_dev *dev,
253*4882a593Smuzhiyun struct mt76x02_tx_status *stat)
254*4882a593Smuzhiyun {
255*4882a593Smuzhiyun u32 stat1, stat2;
256*4882a593Smuzhiyun
257*4882a593Smuzhiyun stat2 = mt76_rr(dev, MT_TX_STAT_FIFO_EXT);
258*4882a593Smuzhiyun stat1 = mt76_rr(dev, MT_TX_STAT_FIFO);
259*4882a593Smuzhiyun
260*4882a593Smuzhiyun stat->valid = !!(stat1 & MT_TX_STAT_FIFO_VALID);
261*4882a593Smuzhiyun if (!stat->valid)
262*4882a593Smuzhiyun return false;
263*4882a593Smuzhiyun
264*4882a593Smuzhiyun stat->success = !!(stat1 & MT_TX_STAT_FIFO_SUCCESS);
265*4882a593Smuzhiyun stat->aggr = !!(stat1 & MT_TX_STAT_FIFO_AGGR);
266*4882a593Smuzhiyun stat->ack_req = !!(stat1 & MT_TX_STAT_FIFO_ACKREQ);
267*4882a593Smuzhiyun stat->wcid = FIELD_GET(MT_TX_STAT_FIFO_WCID, stat1);
268*4882a593Smuzhiyun stat->rate = FIELD_GET(MT_TX_STAT_FIFO_RATE, stat1);
269*4882a593Smuzhiyun
270*4882a593Smuzhiyun stat->retry = FIELD_GET(MT_TX_STAT_FIFO_EXT_RETRY, stat2);
271*4882a593Smuzhiyun stat->pktid = FIELD_GET(MT_TX_STAT_FIFO_EXT_PKTID, stat2);
272*4882a593Smuzhiyun
273*4882a593Smuzhiyun trace_mac_txstat_fetch(dev, stat);
274*4882a593Smuzhiyun
275*4882a593Smuzhiyun return true;
276*4882a593Smuzhiyun }
277*4882a593Smuzhiyun
278*4882a593Smuzhiyun static int
mt76x02_mac_process_tx_rate(struct ieee80211_tx_rate * txrate,u16 rate,enum nl80211_band band)279*4882a593Smuzhiyun mt76x02_mac_process_tx_rate(struct ieee80211_tx_rate *txrate, u16 rate,
280*4882a593Smuzhiyun enum nl80211_band band)
281*4882a593Smuzhiyun {
282*4882a593Smuzhiyun u8 idx = FIELD_GET(MT_RXWI_RATE_INDEX, rate);
283*4882a593Smuzhiyun
284*4882a593Smuzhiyun txrate->idx = 0;
285*4882a593Smuzhiyun txrate->flags = 0;
286*4882a593Smuzhiyun txrate->count = 1;
287*4882a593Smuzhiyun
288*4882a593Smuzhiyun switch (FIELD_GET(MT_RXWI_RATE_PHY, rate)) {
289*4882a593Smuzhiyun case MT_PHY_TYPE_OFDM:
290*4882a593Smuzhiyun if (band == NL80211_BAND_2GHZ)
291*4882a593Smuzhiyun idx += 4;
292*4882a593Smuzhiyun
293*4882a593Smuzhiyun txrate->idx = idx;
294*4882a593Smuzhiyun return 0;
295*4882a593Smuzhiyun case MT_PHY_TYPE_CCK:
296*4882a593Smuzhiyun if (idx >= 8)
297*4882a593Smuzhiyun idx -= 8;
298*4882a593Smuzhiyun
299*4882a593Smuzhiyun txrate->idx = idx;
300*4882a593Smuzhiyun return 0;
301*4882a593Smuzhiyun case MT_PHY_TYPE_HT_GF:
302*4882a593Smuzhiyun txrate->flags |= IEEE80211_TX_RC_GREEN_FIELD;
303*4882a593Smuzhiyun fallthrough;
304*4882a593Smuzhiyun case MT_PHY_TYPE_HT:
305*4882a593Smuzhiyun txrate->flags |= IEEE80211_TX_RC_MCS;
306*4882a593Smuzhiyun txrate->idx = idx;
307*4882a593Smuzhiyun break;
308*4882a593Smuzhiyun case MT_PHY_TYPE_VHT:
309*4882a593Smuzhiyun txrate->flags |= IEEE80211_TX_RC_VHT_MCS;
310*4882a593Smuzhiyun txrate->idx = idx;
311*4882a593Smuzhiyun break;
312*4882a593Smuzhiyun default:
313*4882a593Smuzhiyun return -EINVAL;
314*4882a593Smuzhiyun }
315*4882a593Smuzhiyun
316*4882a593Smuzhiyun switch (FIELD_GET(MT_RXWI_RATE_BW, rate)) {
317*4882a593Smuzhiyun case MT_PHY_BW_20:
318*4882a593Smuzhiyun break;
319*4882a593Smuzhiyun case MT_PHY_BW_40:
320*4882a593Smuzhiyun txrate->flags |= IEEE80211_TX_RC_40_MHZ_WIDTH;
321*4882a593Smuzhiyun break;
322*4882a593Smuzhiyun case MT_PHY_BW_80:
323*4882a593Smuzhiyun txrate->flags |= IEEE80211_TX_RC_80_MHZ_WIDTH;
324*4882a593Smuzhiyun break;
325*4882a593Smuzhiyun default:
326*4882a593Smuzhiyun return -EINVAL;
327*4882a593Smuzhiyun }
328*4882a593Smuzhiyun
329*4882a593Smuzhiyun if (rate & MT_RXWI_RATE_SGI)
330*4882a593Smuzhiyun txrate->flags |= IEEE80211_TX_RC_SHORT_GI;
331*4882a593Smuzhiyun
332*4882a593Smuzhiyun return 0;
333*4882a593Smuzhiyun }
334*4882a593Smuzhiyun
mt76x02_mac_write_txwi(struct mt76x02_dev * dev,struct mt76x02_txwi * txwi,struct sk_buff * skb,struct mt76_wcid * wcid,struct ieee80211_sta * sta,int len)335*4882a593Smuzhiyun void mt76x02_mac_write_txwi(struct mt76x02_dev *dev, struct mt76x02_txwi *txwi,
336*4882a593Smuzhiyun struct sk_buff *skb, struct mt76_wcid *wcid,
337*4882a593Smuzhiyun struct ieee80211_sta *sta, int len)
338*4882a593Smuzhiyun {
339*4882a593Smuzhiyun struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data;
340*4882a593Smuzhiyun struct ieee80211_tx_info *info = IEEE80211_SKB_CB(skb);
341*4882a593Smuzhiyun struct ieee80211_tx_rate *rate = &info->control.rates[0];
342*4882a593Smuzhiyun struct ieee80211_key_conf *key = info->control.hw_key;
343*4882a593Smuzhiyun u32 wcid_tx_info;
344*4882a593Smuzhiyun u16 rate_ht_mask = FIELD_PREP(MT_RXWI_RATE_PHY, BIT(1) | BIT(2));
345*4882a593Smuzhiyun u16 txwi_flags = 0, rateval;
346*4882a593Smuzhiyun u8 nss;
347*4882a593Smuzhiyun s8 txpwr_adj, max_txpwr_adj;
348*4882a593Smuzhiyun u8 ccmp_pn[8], nstreams = dev->chainmask & 0xf;
349*4882a593Smuzhiyun
350*4882a593Smuzhiyun memset(txwi, 0, sizeof(*txwi));
351*4882a593Smuzhiyun
352*4882a593Smuzhiyun mt76_tx_check_agg_ssn(sta, skb);
353*4882a593Smuzhiyun
354*4882a593Smuzhiyun if (!info->control.hw_key && wcid && wcid->hw_key_idx != 0xff &&
355*4882a593Smuzhiyun ieee80211_has_protected(hdr->frame_control)) {
356*4882a593Smuzhiyun wcid = NULL;
357*4882a593Smuzhiyun ieee80211_get_tx_rates(info->control.vif, sta, skb,
358*4882a593Smuzhiyun info->control.rates, 1);
359*4882a593Smuzhiyun }
360*4882a593Smuzhiyun
361*4882a593Smuzhiyun if (wcid)
362*4882a593Smuzhiyun txwi->wcid = wcid->idx;
363*4882a593Smuzhiyun else
364*4882a593Smuzhiyun txwi->wcid = 0xff;
365*4882a593Smuzhiyun
366*4882a593Smuzhiyun if (wcid && wcid->sw_iv && key) {
367*4882a593Smuzhiyun u64 pn = atomic64_inc_return(&key->tx_pn);
368*4882a593Smuzhiyun
369*4882a593Smuzhiyun ccmp_pn[0] = pn;
370*4882a593Smuzhiyun ccmp_pn[1] = pn >> 8;
371*4882a593Smuzhiyun ccmp_pn[2] = 0;
372*4882a593Smuzhiyun ccmp_pn[3] = 0x20 | (key->keyidx << 6);
373*4882a593Smuzhiyun ccmp_pn[4] = pn >> 16;
374*4882a593Smuzhiyun ccmp_pn[5] = pn >> 24;
375*4882a593Smuzhiyun ccmp_pn[6] = pn >> 32;
376*4882a593Smuzhiyun ccmp_pn[7] = pn >> 40;
377*4882a593Smuzhiyun txwi->iv = *((__le32 *)&ccmp_pn[0]);
378*4882a593Smuzhiyun txwi->eiv = *((__le32 *)&ccmp_pn[4]);
379*4882a593Smuzhiyun }
380*4882a593Smuzhiyun
381*4882a593Smuzhiyun if (wcid && (rate->idx < 0 || !rate->count)) {
382*4882a593Smuzhiyun wcid_tx_info = wcid->tx_info;
383*4882a593Smuzhiyun rateval = FIELD_GET(MT_WCID_TX_INFO_RATE, wcid_tx_info);
384*4882a593Smuzhiyun max_txpwr_adj = FIELD_GET(MT_WCID_TX_INFO_TXPWR_ADJ,
385*4882a593Smuzhiyun wcid_tx_info);
386*4882a593Smuzhiyun nss = FIELD_GET(MT_WCID_TX_INFO_NSS, wcid_tx_info);
387*4882a593Smuzhiyun } else {
388*4882a593Smuzhiyun rateval = mt76x02_mac_tx_rate_val(dev, rate, &nss);
389*4882a593Smuzhiyun max_txpwr_adj = mt76x02_tx_get_max_txpwr_adj(dev, rate);
390*4882a593Smuzhiyun }
391*4882a593Smuzhiyun txwi->rate = cpu_to_le16(rateval);
392*4882a593Smuzhiyun
393*4882a593Smuzhiyun txpwr_adj = mt76x02_tx_get_txpwr_adj(dev, dev->txpower_conf,
394*4882a593Smuzhiyun max_txpwr_adj);
395*4882a593Smuzhiyun txwi->ctl2 = FIELD_PREP(MT_TX_PWR_ADJ, txpwr_adj);
396*4882a593Smuzhiyun
397*4882a593Smuzhiyun if (nstreams > 1 && mt76_rev(&dev->mt76) >= MT76XX_REV_E4)
398*4882a593Smuzhiyun txwi->txstream = 0x13;
399*4882a593Smuzhiyun else if (nstreams > 1 && mt76_rev(&dev->mt76) >= MT76XX_REV_E3 &&
400*4882a593Smuzhiyun !(txwi->rate & cpu_to_le16(rate_ht_mask)))
401*4882a593Smuzhiyun txwi->txstream = 0x93;
402*4882a593Smuzhiyun
403*4882a593Smuzhiyun if (is_mt76x2(dev) && (info->flags & IEEE80211_TX_CTL_LDPC))
404*4882a593Smuzhiyun txwi->rate |= cpu_to_le16(MT_RXWI_RATE_LDPC);
405*4882a593Smuzhiyun if ((info->flags & IEEE80211_TX_CTL_STBC) && nss == 1)
406*4882a593Smuzhiyun txwi->rate |= cpu_to_le16(MT_RXWI_RATE_STBC);
407*4882a593Smuzhiyun if (nss > 1 && sta && sta->smps_mode == IEEE80211_SMPS_DYNAMIC)
408*4882a593Smuzhiyun txwi_flags |= MT_TXWI_FLAGS_MMPS;
409*4882a593Smuzhiyun if (!(info->flags & IEEE80211_TX_CTL_NO_ACK))
410*4882a593Smuzhiyun txwi->ack_ctl |= MT_TXWI_ACK_CTL_REQ;
411*4882a593Smuzhiyun if (info->flags & IEEE80211_TX_CTL_ASSIGN_SEQ)
412*4882a593Smuzhiyun txwi->ack_ctl |= MT_TXWI_ACK_CTL_NSEQ;
413*4882a593Smuzhiyun if ((info->flags & IEEE80211_TX_CTL_AMPDU) && sta) {
414*4882a593Smuzhiyun u8 ba_size = IEEE80211_MIN_AMPDU_BUF;
415*4882a593Smuzhiyun u8 ampdu_density = sta->ht_cap.ampdu_density;
416*4882a593Smuzhiyun
417*4882a593Smuzhiyun ba_size <<= sta->ht_cap.ampdu_factor;
418*4882a593Smuzhiyun ba_size = min_t(int, 63, ba_size - 1);
419*4882a593Smuzhiyun if (info->flags & IEEE80211_TX_CTL_RATE_CTRL_PROBE)
420*4882a593Smuzhiyun ba_size = 0;
421*4882a593Smuzhiyun txwi->ack_ctl |= FIELD_PREP(MT_TXWI_ACK_CTL_BA_WINDOW, ba_size);
422*4882a593Smuzhiyun
423*4882a593Smuzhiyun if (ampdu_density < IEEE80211_HT_MPDU_DENSITY_4)
424*4882a593Smuzhiyun ampdu_density = IEEE80211_HT_MPDU_DENSITY_4;
425*4882a593Smuzhiyun
426*4882a593Smuzhiyun txwi_flags |= MT_TXWI_FLAGS_AMPDU |
427*4882a593Smuzhiyun FIELD_PREP(MT_TXWI_FLAGS_MPDU_DENSITY, ampdu_density);
428*4882a593Smuzhiyun }
429*4882a593Smuzhiyun
430*4882a593Smuzhiyun if (ieee80211_is_probe_resp(hdr->frame_control) ||
431*4882a593Smuzhiyun ieee80211_is_beacon(hdr->frame_control))
432*4882a593Smuzhiyun txwi_flags |= MT_TXWI_FLAGS_TS;
433*4882a593Smuzhiyun
434*4882a593Smuzhiyun txwi->flags |= cpu_to_le16(txwi_flags);
435*4882a593Smuzhiyun txwi->len_ctl = cpu_to_le16(len);
436*4882a593Smuzhiyun }
437*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(mt76x02_mac_write_txwi);
438*4882a593Smuzhiyun
439*4882a593Smuzhiyun static void
mt76x02_tx_rate_fallback(struct ieee80211_tx_rate * rates,int idx,int phy)440*4882a593Smuzhiyun mt76x02_tx_rate_fallback(struct ieee80211_tx_rate *rates, int idx, int phy)
441*4882a593Smuzhiyun {
442*4882a593Smuzhiyun u8 mcs, nss;
443*4882a593Smuzhiyun
444*4882a593Smuzhiyun if (!idx)
445*4882a593Smuzhiyun return;
446*4882a593Smuzhiyun
447*4882a593Smuzhiyun rates += idx - 1;
448*4882a593Smuzhiyun rates[1] = rates[0];
449*4882a593Smuzhiyun switch (phy) {
450*4882a593Smuzhiyun case MT_PHY_TYPE_VHT:
451*4882a593Smuzhiyun mcs = ieee80211_rate_get_vht_mcs(rates);
452*4882a593Smuzhiyun nss = ieee80211_rate_get_vht_nss(rates);
453*4882a593Smuzhiyun
454*4882a593Smuzhiyun if (mcs == 0)
455*4882a593Smuzhiyun nss = max_t(int, nss - 1, 1);
456*4882a593Smuzhiyun else
457*4882a593Smuzhiyun mcs--;
458*4882a593Smuzhiyun
459*4882a593Smuzhiyun ieee80211_rate_set_vht(rates + 1, mcs, nss);
460*4882a593Smuzhiyun break;
461*4882a593Smuzhiyun case MT_PHY_TYPE_HT_GF:
462*4882a593Smuzhiyun case MT_PHY_TYPE_HT:
463*4882a593Smuzhiyun /* MCS 8 falls back to MCS 0 */
464*4882a593Smuzhiyun if (rates[0].idx == 8) {
465*4882a593Smuzhiyun rates[1].idx = 0;
466*4882a593Smuzhiyun break;
467*4882a593Smuzhiyun }
468*4882a593Smuzhiyun fallthrough;
469*4882a593Smuzhiyun default:
470*4882a593Smuzhiyun rates[1].idx = max_t(int, rates[0].idx - 1, 0);
471*4882a593Smuzhiyun break;
472*4882a593Smuzhiyun }
473*4882a593Smuzhiyun }
474*4882a593Smuzhiyun
475*4882a593Smuzhiyun static void
mt76x02_mac_fill_tx_status(struct mt76x02_dev * dev,struct mt76x02_sta * msta,struct ieee80211_tx_info * info,struct mt76x02_tx_status * st,int n_frames)476*4882a593Smuzhiyun mt76x02_mac_fill_tx_status(struct mt76x02_dev *dev, struct mt76x02_sta *msta,
477*4882a593Smuzhiyun struct ieee80211_tx_info *info,
478*4882a593Smuzhiyun struct mt76x02_tx_status *st, int n_frames)
479*4882a593Smuzhiyun {
480*4882a593Smuzhiyun struct ieee80211_tx_rate *rate = info->status.rates;
481*4882a593Smuzhiyun struct ieee80211_tx_rate last_rate;
482*4882a593Smuzhiyun u16 first_rate;
483*4882a593Smuzhiyun int retry = st->retry;
484*4882a593Smuzhiyun int phy;
485*4882a593Smuzhiyun int i;
486*4882a593Smuzhiyun
487*4882a593Smuzhiyun if (!n_frames)
488*4882a593Smuzhiyun return;
489*4882a593Smuzhiyun
490*4882a593Smuzhiyun phy = FIELD_GET(MT_RXWI_RATE_PHY, st->rate);
491*4882a593Smuzhiyun
492*4882a593Smuzhiyun if (st->pktid & MT_PACKET_ID_HAS_RATE) {
493*4882a593Smuzhiyun first_rate = st->rate & ~MT_PKTID_RATE;
494*4882a593Smuzhiyun first_rate |= st->pktid & MT_PKTID_RATE;
495*4882a593Smuzhiyun
496*4882a593Smuzhiyun mt76x02_mac_process_tx_rate(&rate[0], first_rate,
497*4882a593Smuzhiyun dev->mphy.chandef.chan->band);
498*4882a593Smuzhiyun } else if (rate[0].idx < 0) {
499*4882a593Smuzhiyun if (!msta)
500*4882a593Smuzhiyun return;
501*4882a593Smuzhiyun
502*4882a593Smuzhiyun mt76x02_mac_process_tx_rate(&rate[0], msta->wcid.tx_info,
503*4882a593Smuzhiyun dev->mphy.chandef.chan->band);
504*4882a593Smuzhiyun }
505*4882a593Smuzhiyun
506*4882a593Smuzhiyun mt76x02_mac_process_tx_rate(&last_rate, st->rate,
507*4882a593Smuzhiyun dev->mphy.chandef.chan->band);
508*4882a593Smuzhiyun
509*4882a593Smuzhiyun for (i = 0; i < ARRAY_SIZE(info->status.rates); i++) {
510*4882a593Smuzhiyun retry--;
511*4882a593Smuzhiyun if (i + 1 == ARRAY_SIZE(info->status.rates)) {
512*4882a593Smuzhiyun info->status.rates[i] = last_rate;
513*4882a593Smuzhiyun info->status.rates[i].count = max_t(int, retry, 1);
514*4882a593Smuzhiyun break;
515*4882a593Smuzhiyun }
516*4882a593Smuzhiyun
517*4882a593Smuzhiyun mt76x02_tx_rate_fallback(info->status.rates, i, phy);
518*4882a593Smuzhiyun if (info->status.rates[i].idx == last_rate.idx)
519*4882a593Smuzhiyun break;
520*4882a593Smuzhiyun }
521*4882a593Smuzhiyun
522*4882a593Smuzhiyun if (i + 1 < ARRAY_SIZE(info->status.rates)) {
523*4882a593Smuzhiyun info->status.rates[i + 1].idx = -1;
524*4882a593Smuzhiyun info->status.rates[i + 1].count = 0;
525*4882a593Smuzhiyun }
526*4882a593Smuzhiyun
527*4882a593Smuzhiyun info->status.ampdu_len = n_frames;
528*4882a593Smuzhiyun info->status.ampdu_ack_len = st->success ? n_frames : 0;
529*4882a593Smuzhiyun
530*4882a593Smuzhiyun if (st->aggr)
531*4882a593Smuzhiyun info->flags |= IEEE80211_TX_CTL_AMPDU |
532*4882a593Smuzhiyun IEEE80211_TX_STAT_AMPDU;
533*4882a593Smuzhiyun
534*4882a593Smuzhiyun if (!st->ack_req)
535*4882a593Smuzhiyun info->flags |= IEEE80211_TX_CTL_NO_ACK;
536*4882a593Smuzhiyun else if (st->success)
537*4882a593Smuzhiyun info->flags |= IEEE80211_TX_STAT_ACK;
538*4882a593Smuzhiyun }
539*4882a593Smuzhiyun
mt76x02_send_tx_status(struct mt76x02_dev * dev,struct mt76x02_tx_status * stat,u8 * update)540*4882a593Smuzhiyun void mt76x02_send_tx_status(struct mt76x02_dev *dev,
541*4882a593Smuzhiyun struct mt76x02_tx_status *stat, u8 *update)
542*4882a593Smuzhiyun {
543*4882a593Smuzhiyun struct ieee80211_tx_info info = {};
544*4882a593Smuzhiyun struct ieee80211_tx_status status = {
545*4882a593Smuzhiyun .info = &info
546*4882a593Smuzhiyun };
547*4882a593Smuzhiyun static const u8 ac_to_tid[4] = {
548*4882a593Smuzhiyun [IEEE80211_AC_BE] = 0,
549*4882a593Smuzhiyun [IEEE80211_AC_BK] = 1,
550*4882a593Smuzhiyun [IEEE80211_AC_VI] = 4,
551*4882a593Smuzhiyun [IEEE80211_AC_VO] = 6
552*4882a593Smuzhiyun };
553*4882a593Smuzhiyun struct mt76_wcid *wcid = NULL;
554*4882a593Smuzhiyun struct mt76x02_sta *msta = NULL;
555*4882a593Smuzhiyun struct mt76_dev *mdev = &dev->mt76;
556*4882a593Smuzhiyun struct sk_buff_head list;
557*4882a593Smuzhiyun u32 duration = 0;
558*4882a593Smuzhiyun u8 cur_pktid;
559*4882a593Smuzhiyun u32 ac = 0;
560*4882a593Smuzhiyun int len = 0;
561*4882a593Smuzhiyun
562*4882a593Smuzhiyun if (stat->pktid == MT_PACKET_ID_NO_ACK)
563*4882a593Smuzhiyun return;
564*4882a593Smuzhiyun
565*4882a593Smuzhiyun rcu_read_lock();
566*4882a593Smuzhiyun
567*4882a593Smuzhiyun if (stat->wcid < MT76x02_N_WCIDS)
568*4882a593Smuzhiyun wcid = rcu_dereference(dev->mt76.wcid[stat->wcid]);
569*4882a593Smuzhiyun
570*4882a593Smuzhiyun if (wcid && wcid->sta) {
571*4882a593Smuzhiyun void *priv;
572*4882a593Smuzhiyun
573*4882a593Smuzhiyun priv = msta = container_of(wcid, struct mt76x02_sta, wcid);
574*4882a593Smuzhiyun status.sta = container_of(priv, struct ieee80211_sta,
575*4882a593Smuzhiyun drv_priv);
576*4882a593Smuzhiyun }
577*4882a593Smuzhiyun
578*4882a593Smuzhiyun mt76_tx_status_lock(mdev, &list);
579*4882a593Smuzhiyun
580*4882a593Smuzhiyun if (wcid) {
581*4882a593Smuzhiyun if (mt76_is_skb_pktid(stat->pktid))
582*4882a593Smuzhiyun status.skb = mt76_tx_status_skb_get(mdev, wcid,
583*4882a593Smuzhiyun stat->pktid, &list);
584*4882a593Smuzhiyun if (status.skb)
585*4882a593Smuzhiyun status.info = IEEE80211_SKB_CB(status.skb);
586*4882a593Smuzhiyun }
587*4882a593Smuzhiyun
588*4882a593Smuzhiyun if (!status.skb && !(stat->pktid & MT_PACKET_ID_HAS_RATE)) {
589*4882a593Smuzhiyun mt76_tx_status_unlock(mdev, &list);
590*4882a593Smuzhiyun goto out;
591*4882a593Smuzhiyun }
592*4882a593Smuzhiyun
593*4882a593Smuzhiyun
594*4882a593Smuzhiyun if (msta && stat->aggr && !status.skb) {
595*4882a593Smuzhiyun u32 stat_val, stat_cache;
596*4882a593Smuzhiyun
597*4882a593Smuzhiyun stat_val = stat->rate;
598*4882a593Smuzhiyun stat_val |= ((u32)stat->retry) << 16;
599*4882a593Smuzhiyun stat_cache = msta->status.rate;
600*4882a593Smuzhiyun stat_cache |= ((u32)msta->status.retry) << 16;
601*4882a593Smuzhiyun
602*4882a593Smuzhiyun if (*update == 0 && stat_val == stat_cache &&
603*4882a593Smuzhiyun stat->wcid == msta->status.wcid && msta->n_frames < 32) {
604*4882a593Smuzhiyun msta->n_frames++;
605*4882a593Smuzhiyun mt76_tx_status_unlock(mdev, &list);
606*4882a593Smuzhiyun goto out;
607*4882a593Smuzhiyun }
608*4882a593Smuzhiyun
609*4882a593Smuzhiyun cur_pktid = msta->status.pktid;
610*4882a593Smuzhiyun mt76x02_mac_fill_tx_status(dev, msta, status.info,
611*4882a593Smuzhiyun &msta->status, msta->n_frames);
612*4882a593Smuzhiyun
613*4882a593Smuzhiyun msta->status = *stat;
614*4882a593Smuzhiyun msta->n_frames = 1;
615*4882a593Smuzhiyun *update = 0;
616*4882a593Smuzhiyun } else {
617*4882a593Smuzhiyun cur_pktid = stat->pktid;
618*4882a593Smuzhiyun mt76x02_mac_fill_tx_status(dev, msta, status.info, stat, 1);
619*4882a593Smuzhiyun *update = 1;
620*4882a593Smuzhiyun }
621*4882a593Smuzhiyun
622*4882a593Smuzhiyun if (status.skb) {
623*4882a593Smuzhiyun info = *status.info;
624*4882a593Smuzhiyun len = status.skb->len;
625*4882a593Smuzhiyun ac = skb_get_queue_mapping(status.skb);
626*4882a593Smuzhiyun mt76_tx_status_skb_done(mdev, status.skb, &list);
627*4882a593Smuzhiyun } else if (msta) {
628*4882a593Smuzhiyun len = status.info->status.ampdu_len * ewma_pktlen_read(&msta->pktlen);
629*4882a593Smuzhiyun ac = FIELD_GET(MT_PKTID_AC, cur_pktid);
630*4882a593Smuzhiyun }
631*4882a593Smuzhiyun
632*4882a593Smuzhiyun mt76_tx_status_unlock(mdev, &list);
633*4882a593Smuzhiyun
634*4882a593Smuzhiyun if (!status.skb)
635*4882a593Smuzhiyun ieee80211_tx_status_ext(mt76_hw(dev), &status);
636*4882a593Smuzhiyun
637*4882a593Smuzhiyun if (!len)
638*4882a593Smuzhiyun goto out;
639*4882a593Smuzhiyun
640*4882a593Smuzhiyun duration = ieee80211_calc_tx_airtime(mt76_hw(dev), &info, len);
641*4882a593Smuzhiyun
642*4882a593Smuzhiyun spin_lock_bh(&dev->mt76.cc_lock);
643*4882a593Smuzhiyun dev->tx_airtime += duration;
644*4882a593Smuzhiyun spin_unlock_bh(&dev->mt76.cc_lock);
645*4882a593Smuzhiyun
646*4882a593Smuzhiyun if (msta)
647*4882a593Smuzhiyun ieee80211_sta_register_airtime(status.sta, ac_to_tid[ac], duration, 0);
648*4882a593Smuzhiyun
649*4882a593Smuzhiyun out:
650*4882a593Smuzhiyun rcu_read_unlock();
651*4882a593Smuzhiyun }
652*4882a593Smuzhiyun
653*4882a593Smuzhiyun static int
mt76x02_mac_process_rate(struct mt76x02_dev * dev,struct mt76_rx_status * status,u16 rate)654*4882a593Smuzhiyun mt76x02_mac_process_rate(struct mt76x02_dev *dev,
655*4882a593Smuzhiyun struct mt76_rx_status *status,
656*4882a593Smuzhiyun u16 rate)
657*4882a593Smuzhiyun {
658*4882a593Smuzhiyun u8 idx = FIELD_GET(MT_RXWI_RATE_INDEX, rate);
659*4882a593Smuzhiyun
660*4882a593Smuzhiyun switch (FIELD_GET(MT_RXWI_RATE_PHY, rate)) {
661*4882a593Smuzhiyun case MT_PHY_TYPE_OFDM:
662*4882a593Smuzhiyun if (idx >= 8)
663*4882a593Smuzhiyun idx = 0;
664*4882a593Smuzhiyun
665*4882a593Smuzhiyun if (status->band == NL80211_BAND_2GHZ)
666*4882a593Smuzhiyun idx += 4;
667*4882a593Smuzhiyun
668*4882a593Smuzhiyun status->rate_idx = idx;
669*4882a593Smuzhiyun return 0;
670*4882a593Smuzhiyun case MT_PHY_TYPE_CCK:
671*4882a593Smuzhiyun if (idx >= 8) {
672*4882a593Smuzhiyun idx -= 8;
673*4882a593Smuzhiyun status->enc_flags |= RX_ENC_FLAG_SHORTPRE;
674*4882a593Smuzhiyun }
675*4882a593Smuzhiyun
676*4882a593Smuzhiyun if (idx >= 4)
677*4882a593Smuzhiyun idx = 0;
678*4882a593Smuzhiyun
679*4882a593Smuzhiyun status->rate_idx = idx;
680*4882a593Smuzhiyun return 0;
681*4882a593Smuzhiyun case MT_PHY_TYPE_HT_GF:
682*4882a593Smuzhiyun status->enc_flags |= RX_ENC_FLAG_HT_GF;
683*4882a593Smuzhiyun fallthrough;
684*4882a593Smuzhiyun case MT_PHY_TYPE_HT:
685*4882a593Smuzhiyun status->encoding = RX_ENC_HT;
686*4882a593Smuzhiyun status->rate_idx = idx;
687*4882a593Smuzhiyun break;
688*4882a593Smuzhiyun case MT_PHY_TYPE_VHT: {
689*4882a593Smuzhiyun u8 n_rxstream = dev->chainmask & 0xf;
690*4882a593Smuzhiyun
691*4882a593Smuzhiyun status->encoding = RX_ENC_VHT;
692*4882a593Smuzhiyun status->rate_idx = FIELD_GET(MT_RATE_INDEX_VHT_IDX, idx);
693*4882a593Smuzhiyun status->nss = min_t(u8, n_rxstream,
694*4882a593Smuzhiyun FIELD_GET(MT_RATE_INDEX_VHT_NSS, idx) + 1);
695*4882a593Smuzhiyun break;
696*4882a593Smuzhiyun }
697*4882a593Smuzhiyun default:
698*4882a593Smuzhiyun return -EINVAL;
699*4882a593Smuzhiyun }
700*4882a593Smuzhiyun
701*4882a593Smuzhiyun if (rate & MT_RXWI_RATE_LDPC)
702*4882a593Smuzhiyun status->enc_flags |= RX_ENC_FLAG_LDPC;
703*4882a593Smuzhiyun
704*4882a593Smuzhiyun if (rate & MT_RXWI_RATE_SGI)
705*4882a593Smuzhiyun status->enc_flags |= RX_ENC_FLAG_SHORT_GI;
706*4882a593Smuzhiyun
707*4882a593Smuzhiyun if (rate & MT_RXWI_RATE_STBC)
708*4882a593Smuzhiyun status->enc_flags |= 1 << RX_ENC_FLAG_STBC_SHIFT;
709*4882a593Smuzhiyun
710*4882a593Smuzhiyun switch (FIELD_GET(MT_RXWI_RATE_BW, rate)) {
711*4882a593Smuzhiyun case MT_PHY_BW_20:
712*4882a593Smuzhiyun break;
713*4882a593Smuzhiyun case MT_PHY_BW_40:
714*4882a593Smuzhiyun status->bw = RATE_INFO_BW_40;
715*4882a593Smuzhiyun break;
716*4882a593Smuzhiyun case MT_PHY_BW_80:
717*4882a593Smuzhiyun status->bw = RATE_INFO_BW_80;
718*4882a593Smuzhiyun break;
719*4882a593Smuzhiyun default:
720*4882a593Smuzhiyun break;
721*4882a593Smuzhiyun }
722*4882a593Smuzhiyun
723*4882a593Smuzhiyun return 0;
724*4882a593Smuzhiyun }
725*4882a593Smuzhiyun
mt76x02_mac_setaddr(struct mt76x02_dev * dev,const u8 * addr)726*4882a593Smuzhiyun void mt76x02_mac_setaddr(struct mt76x02_dev *dev, const u8 *addr)
727*4882a593Smuzhiyun {
728*4882a593Smuzhiyun static const u8 null_addr[ETH_ALEN] = {};
729*4882a593Smuzhiyun int i;
730*4882a593Smuzhiyun
731*4882a593Smuzhiyun ether_addr_copy(dev->mt76.macaddr, addr);
732*4882a593Smuzhiyun
733*4882a593Smuzhiyun if (!is_valid_ether_addr(dev->mt76.macaddr)) {
734*4882a593Smuzhiyun eth_random_addr(dev->mt76.macaddr);
735*4882a593Smuzhiyun dev_info(dev->mt76.dev,
736*4882a593Smuzhiyun "Invalid MAC address, using random address %pM\n",
737*4882a593Smuzhiyun dev->mt76.macaddr);
738*4882a593Smuzhiyun }
739*4882a593Smuzhiyun
740*4882a593Smuzhiyun mt76_wr(dev, MT_MAC_ADDR_DW0, get_unaligned_le32(dev->mt76.macaddr));
741*4882a593Smuzhiyun mt76_wr(dev, MT_MAC_ADDR_DW1,
742*4882a593Smuzhiyun get_unaligned_le16(dev->mt76.macaddr + 4) |
743*4882a593Smuzhiyun FIELD_PREP(MT_MAC_ADDR_DW1_U2ME_MASK, 0xff));
744*4882a593Smuzhiyun
745*4882a593Smuzhiyun mt76_wr(dev, MT_MAC_BSSID_DW0,
746*4882a593Smuzhiyun get_unaligned_le32(dev->mt76.macaddr));
747*4882a593Smuzhiyun mt76_wr(dev, MT_MAC_BSSID_DW1,
748*4882a593Smuzhiyun get_unaligned_le16(dev->mt76.macaddr + 4) |
749*4882a593Smuzhiyun FIELD_PREP(MT_MAC_BSSID_DW1_MBSS_MODE, 3) | /* 8 APs + 8 STAs */
750*4882a593Smuzhiyun MT_MAC_BSSID_DW1_MBSS_LOCAL_BIT);
751*4882a593Smuzhiyun /* enable 7 additional beacon slots and control them with bypass mask */
752*4882a593Smuzhiyun mt76_rmw_field(dev, MT_MAC_BSSID_DW1, MT_MAC_BSSID_DW1_MBEACON_N, 7);
753*4882a593Smuzhiyun
754*4882a593Smuzhiyun for (i = 0; i < 16; i++)
755*4882a593Smuzhiyun mt76x02_mac_set_bssid(dev, i, null_addr);
756*4882a593Smuzhiyun }
757*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(mt76x02_mac_setaddr);
758*4882a593Smuzhiyun
759*4882a593Smuzhiyun static int
mt76x02_mac_get_rssi(struct mt76x02_dev * dev,s8 rssi,int chain)760*4882a593Smuzhiyun mt76x02_mac_get_rssi(struct mt76x02_dev *dev, s8 rssi, int chain)
761*4882a593Smuzhiyun {
762*4882a593Smuzhiyun struct mt76x02_rx_freq_cal *cal = &dev->cal.rx;
763*4882a593Smuzhiyun
764*4882a593Smuzhiyun rssi += cal->rssi_offset[chain];
765*4882a593Smuzhiyun rssi -= cal->lna_gain;
766*4882a593Smuzhiyun
767*4882a593Smuzhiyun return rssi;
768*4882a593Smuzhiyun }
769*4882a593Smuzhiyun
mt76x02_mac_process_rx(struct mt76x02_dev * dev,struct sk_buff * skb,void * rxi)770*4882a593Smuzhiyun int mt76x02_mac_process_rx(struct mt76x02_dev *dev, struct sk_buff *skb,
771*4882a593Smuzhiyun void *rxi)
772*4882a593Smuzhiyun {
773*4882a593Smuzhiyun struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
774*4882a593Smuzhiyun struct mt76x02_rxwi *rxwi = rxi;
775*4882a593Smuzhiyun struct mt76x02_sta *sta;
776*4882a593Smuzhiyun u32 rxinfo = le32_to_cpu(rxwi->rxinfo);
777*4882a593Smuzhiyun u32 ctl = le32_to_cpu(rxwi->ctl);
778*4882a593Smuzhiyun u16 rate = le16_to_cpu(rxwi->rate);
779*4882a593Smuzhiyun u16 tid_sn = le16_to_cpu(rxwi->tid_sn);
780*4882a593Smuzhiyun bool unicast = rxwi->rxinfo & cpu_to_le32(MT_RXINFO_UNICAST);
781*4882a593Smuzhiyun int pad_len = 0, nstreams = dev->chainmask & 0xf;
782*4882a593Smuzhiyun s8 signal;
783*4882a593Smuzhiyun u8 pn_len;
784*4882a593Smuzhiyun u8 wcid;
785*4882a593Smuzhiyun int len;
786*4882a593Smuzhiyun
787*4882a593Smuzhiyun if (!test_bit(MT76_STATE_RUNNING, &dev->mphy.state))
788*4882a593Smuzhiyun return -EINVAL;
789*4882a593Smuzhiyun
790*4882a593Smuzhiyun if (rxinfo & MT_RXINFO_L2PAD)
791*4882a593Smuzhiyun pad_len += 2;
792*4882a593Smuzhiyun
793*4882a593Smuzhiyun if (rxinfo & MT_RXINFO_DECRYPT) {
794*4882a593Smuzhiyun status->flag |= RX_FLAG_DECRYPTED;
795*4882a593Smuzhiyun status->flag |= RX_FLAG_MMIC_STRIPPED;
796*4882a593Smuzhiyun status->flag |= RX_FLAG_MIC_STRIPPED;
797*4882a593Smuzhiyun status->flag |= RX_FLAG_IV_STRIPPED;
798*4882a593Smuzhiyun }
799*4882a593Smuzhiyun
800*4882a593Smuzhiyun wcid = FIELD_GET(MT_RXWI_CTL_WCID, ctl);
801*4882a593Smuzhiyun sta = mt76x02_rx_get_sta(&dev->mt76, wcid);
802*4882a593Smuzhiyun status->wcid = mt76x02_rx_get_sta_wcid(sta, unicast);
803*4882a593Smuzhiyun
804*4882a593Smuzhiyun len = FIELD_GET(MT_RXWI_CTL_MPDU_LEN, ctl);
805*4882a593Smuzhiyun pn_len = FIELD_GET(MT_RXINFO_PN_LEN, rxinfo);
806*4882a593Smuzhiyun if (pn_len) {
807*4882a593Smuzhiyun int offset = ieee80211_get_hdrlen_from_skb(skb) + pad_len;
808*4882a593Smuzhiyun u8 *data = skb->data + offset;
809*4882a593Smuzhiyun
810*4882a593Smuzhiyun status->iv[0] = data[7];
811*4882a593Smuzhiyun status->iv[1] = data[6];
812*4882a593Smuzhiyun status->iv[2] = data[5];
813*4882a593Smuzhiyun status->iv[3] = data[4];
814*4882a593Smuzhiyun status->iv[4] = data[1];
815*4882a593Smuzhiyun status->iv[5] = data[0];
816*4882a593Smuzhiyun
817*4882a593Smuzhiyun /*
818*4882a593Smuzhiyun * Driver CCMP validation can't deal with fragments.
819*4882a593Smuzhiyun * Let mac80211 take care of it.
820*4882a593Smuzhiyun */
821*4882a593Smuzhiyun if (rxinfo & MT_RXINFO_FRAG) {
822*4882a593Smuzhiyun status->flag &= ~RX_FLAG_IV_STRIPPED;
823*4882a593Smuzhiyun } else {
824*4882a593Smuzhiyun pad_len += pn_len << 2;
825*4882a593Smuzhiyun len -= pn_len << 2;
826*4882a593Smuzhiyun }
827*4882a593Smuzhiyun }
828*4882a593Smuzhiyun
829*4882a593Smuzhiyun mt76x02_remove_hdr_pad(skb, pad_len);
830*4882a593Smuzhiyun
831*4882a593Smuzhiyun if ((rxinfo & MT_RXINFO_BA) && !(rxinfo & MT_RXINFO_NULL))
832*4882a593Smuzhiyun status->aggr = true;
833*4882a593Smuzhiyun
834*4882a593Smuzhiyun if (rxinfo & MT_RXINFO_AMPDU) {
835*4882a593Smuzhiyun status->flag |= RX_FLAG_AMPDU_DETAILS;
836*4882a593Smuzhiyun status->ampdu_ref = dev->ampdu_ref;
837*4882a593Smuzhiyun
838*4882a593Smuzhiyun /*
839*4882a593Smuzhiyun * When receiving an A-MPDU subframe and RSSI info is not valid,
840*4882a593Smuzhiyun * we can assume that more subframes belonging to the same A-MPDU
841*4882a593Smuzhiyun * are coming. The last one will have valid RSSI info
842*4882a593Smuzhiyun */
843*4882a593Smuzhiyun if (rxinfo & MT_RXINFO_RSSI) {
844*4882a593Smuzhiyun if (!++dev->ampdu_ref)
845*4882a593Smuzhiyun dev->ampdu_ref++;
846*4882a593Smuzhiyun }
847*4882a593Smuzhiyun }
848*4882a593Smuzhiyun
849*4882a593Smuzhiyun if (WARN_ON_ONCE(len > skb->len))
850*4882a593Smuzhiyun return -EINVAL;
851*4882a593Smuzhiyun
852*4882a593Smuzhiyun pskb_trim(skb, len);
853*4882a593Smuzhiyun
854*4882a593Smuzhiyun status->chains = BIT(0);
855*4882a593Smuzhiyun signal = mt76x02_mac_get_rssi(dev, rxwi->rssi[0], 0);
856*4882a593Smuzhiyun status->chain_signal[0] = signal;
857*4882a593Smuzhiyun if (nstreams > 1) {
858*4882a593Smuzhiyun status->chains |= BIT(1);
859*4882a593Smuzhiyun status->chain_signal[1] = mt76x02_mac_get_rssi(dev,
860*4882a593Smuzhiyun rxwi->rssi[1],
861*4882a593Smuzhiyun 1);
862*4882a593Smuzhiyun signal = max_t(s8, signal, status->chain_signal[1]);
863*4882a593Smuzhiyun }
864*4882a593Smuzhiyun status->signal = signal;
865*4882a593Smuzhiyun status->freq = dev->mphy.chandef.chan->center_freq;
866*4882a593Smuzhiyun status->band = dev->mphy.chandef.chan->band;
867*4882a593Smuzhiyun
868*4882a593Smuzhiyun status->tid = FIELD_GET(MT_RXWI_TID, tid_sn);
869*4882a593Smuzhiyun status->seqno = FIELD_GET(MT_RXWI_SN, tid_sn);
870*4882a593Smuzhiyun
871*4882a593Smuzhiyun return mt76x02_mac_process_rate(dev, status, rate);
872*4882a593Smuzhiyun }
873*4882a593Smuzhiyun
mt76x02_mac_poll_tx_status(struct mt76x02_dev * dev,bool irq)874*4882a593Smuzhiyun void mt76x02_mac_poll_tx_status(struct mt76x02_dev *dev, bool irq)
875*4882a593Smuzhiyun {
876*4882a593Smuzhiyun struct mt76x02_tx_status stat = {};
877*4882a593Smuzhiyun u8 update = 1;
878*4882a593Smuzhiyun bool ret;
879*4882a593Smuzhiyun
880*4882a593Smuzhiyun if (!test_bit(MT76_STATE_RUNNING, &dev->mphy.state))
881*4882a593Smuzhiyun return;
882*4882a593Smuzhiyun
883*4882a593Smuzhiyun trace_mac_txstat_poll(dev);
884*4882a593Smuzhiyun
885*4882a593Smuzhiyun while (!irq || !kfifo_is_full(&dev->txstatus_fifo)) {
886*4882a593Smuzhiyun if (!spin_trylock(&dev->txstatus_fifo_lock))
887*4882a593Smuzhiyun break;
888*4882a593Smuzhiyun
889*4882a593Smuzhiyun ret = mt76x02_mac_load_tx_status(dev, &stat);
890*4882a593Smuzhiyun spin_unlock(&dev->txstatus_fifo_lock);
891*4882a593Smuzhiyun
892*4882a593Smuzhiyun if (!ret)
893*4882a593Smuzhiyun break;
894*4882a593Smuzhiyun
895*4882a593Smuzhiyun if (!irq) {
896*4882a593Smuzhiyun mt76x02_send_tx_status(dev, &stat, &update);
897*4882a593Smuzhiyun continue;
898*4882a593Smuzhiyun }
899*4882a593Smuzhiyun
900*4882a593Smuzhiyun kfifo_put(&dev->txstatus_fifo, stat);
901*4882a593Smuzhiyun }
902*4882a593Smuzhiyun }
903*4882a593Smuzhiyun
mt76x02_tx_complete_skb(struct mt76_dev * mdev,struct mt76_queue_entry * e)904*4882a593Smuzhiyun void mt76x02_tx_complete_skb(struct mt76_dev *mdev, struct mt76_queue_entry *e)
905*4882a593Smuzhiyun {
906*4882a593Smuzhiyun struct mt76x02_dev *dev = container_of(mdev, struct mt76x02_dev, mt76);
907*4882a593Smuzhiyun struct mt76x02_txwi *txwi;
908*4882a593Smuzhiyun u8 *txwi_ptr;
909*4882a593Smuzhiyun
910*4882a593Smuzhiyun if (!e->txwi) {
911*4882a593Smuzhiyun dev_kfree_skb_any(e->skb);
912*4882a593Smuzhiyun return;
913*4882a593Smuzhiyun }
914*4882a593Smuzhiyun
915*4882a593Smuzhiyun mt76x02_mac_poll_tx_status(dev, false);
916*4882a593Smuzhiyun
917*4882a593Smuzhiyun txwi_ptr = mt76_get_txwi_ptr(mdev, e->txwi);
918*4882a593Smuzhiyun txwi = (struct mt76x02_txwi *)txwi_ptr;
919*4882a593Smuzhiyun trace_mac_txdone(mdev, txwi->wcid, txwi->pktid);
920*4882a593Smuzhiyun
921*4882a593Smuzhiyun mt76_tx_complete_skb(mdev, e->wcid, e->skb);
922*4882a593Smuzhiyun }
923*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(mt76x02_tx_complete_skb);
924*4882a593Smuzhiyun
mt76x02_mac_set_rts_thresh(struct mt76x02_dev * dev,u32 val)925*4882a593Smuzhiyun void mt76x02_mac_set_rts_thresh(struct mt76x02_dev *dev, u32 val)
926*4882a593Smuzhiyun {
927*4882a593Smuzhiyun u32 data = 0;
928*4882a593Smuzhiyun
929*4882a593Smuzhiyun if (val != ~0)
930*4882a593Smuzhiyun data = FIELD_PREP(MT_PROT_CFG_CTRL, 1) |
931*4882a593Smuzhiyun MT_PROT_CFG_RTS_THRESH;
932*4882a593Smuzhiyun
933*4882a593Smuzhiyun mt76_rmw_field(dev, MT_TX_RTS_CFG, MT_TX_RTS_CFG_THRESH, val);
934*4882a593Smuzhiyun
935*4882a593Smuzhiyun mt76_rmw(dev, MT_CCK_PROT_CFG,
936*4882a593Smuzhiyun MT_PROT_CFG_CTRL | MT_PROT_CFG_RTS_THRESH, data);
937*4882a593Smuzhiyun mt76_rmw(dev, MT_OFDM_PROT_CFG,
938*4882a593Smuzhiyun MT_PROT_CFG_CTRL | MT_PROT_CFG_RTS_THRESH, data);
939*4882a593Smuzhiyun }
940*4882a593Smuzhiyun
mt76x02_mac_set_tx_protection(struct mt76x02_dev * dev,bool legacy_prot,int ht_mode)941*4882a593Smuzhiyun void mt76x02_mac_set_tx_protection(struct mt76x02_dev *dev, bool legacy_prot,
942*4882a593Smuzhiyun int ht_mode)
943*4882a593Smuzhiyun {
944*4882a593Smuzhiyun int mode = ht_mode & IEEE80211_HT_OP_MODE_PROTECTION;
945*4882a593Smuzhiyun bool non_gf = !!(ht_mode & IEEE80211_HT_OP_MODE_NON_GF_STA_PRSNT);
946*4882a593Smuzhiyun u32 prot[6];
947*4882a593Smuzhiyun u32 vht_prot[3];
948*4882a593Smuzhiyun int i;
949*4882a593Smuzhiyun u16 rts_thr;
950*4882a593Smuzhiyun
951*4882a593Smuzhiyun for (i = 0; i < ARRAY_SIZE(prot); i++) {
952*4882a593Smuzhiyun prot[i] = mt76_rr(dev, MT_CCK_PROT_CFG + i * 4);
953*4882a593Smuzhiyun prot[i] &= ~MT_PROT_CFG_CTRL;
954*4882a593Smuzhiyun if (i >= 2)
955*4882a593Smuzhiyun prot[i] &= ~MT_PROT_CFG_RATE;
956*4882a593Smuzhiyun }
957*4882a593Smuzhiyun
958*4882a593Smuzhiyun for (i = 0; i < ARRAY_SIZE(vht_prot); i++) {
959*4882a593Smuzhiyun vht_prot[i] = mt76_rr(dev, MT_TX_PROT_CFG6 + i * 4);
960*4882a593Smuzhiyun vht_prot[i] &= ~(MT_PROT_CFG_CTRL | MT_PROT_CFG_RATE);
961*4882a593Smuzhiyun }
962*4882a593Smuzhiyun
963*4882a593Smuzhiyun rts_thr = mt76_get_field(dev, MT_TX_RTS_CFG, MT_TX_RTS_CFG_THRESH);
964*4882a593Smuzhiyun
965*4882a593Smuzhiyun if (rts_thr != 0xffff)
966*4882a593Smuzhiyun prot[0] |= MT_PROT_CTRL_RTS_CTS;
967*4882a593Smuzhiyun
968*4882a593Smuzhiyun if (legacy_prot) {
969*4882a593Smuzhiyun prot[1] |= MT_PROT_CTRL_CTS2SELF;
970*4882a593Smuzhiyun
971*4882a593Smuzhiyun prot[2] |= MT_PROT_RATE_CCK_11;
972*4882a593Smuzhiyun prot[3] |= MT_PROT_RATE_CCK_11;
973*4882a593Smuzhiyun prot[4] |= MT_PROT_RATE_CCK_11;
974*4882a593Smuzhiyun prot[5] |= MT_PROT_RATE_CCK_11;
975*4882a593Smuzhiyun
976*4882a593Smuzhiyun vht_prot[0] |= MT_PROT_RATE_CCK_11;
977*4882a593Smuzhiyun vht_prot[1] |= MT_PROT_RATE_CCK_11;
978*4882a593Smuzhiyun vht_prot[2] |= MT_PROT_RATE_CCK_11;
979*4882a593Smuzhiyun } else {
980*4882a593Smuzhiyun if (rts_thr != 0xffff)
981*4882a593Smuzhiyun prot[1] |= MT_PROT_CTRL_RTS_CTS;
982*4882a593Smuzhiyun
983*4882a593Smuzhiyun prot[2] |= MT_PROT_RATE_OFDM_24;
984*4882a593Smuzhiyun prot[3] |= MT_PROT_RATE_DUP_OFDM_24;
985*4882a593Smuzhiyun prot[4] |= MT_PROT_RATE_OFDM_24;
986*4882a593Smuzhiyun prot[5] |= MT_PROT_RATE_DUP_OFDM_24;
987*4882a593Smuzhiyun
988*4882a593Smuzhiyun vht_prot[0] |= MT_PROT_RATE_OFDM_24;
989*4882a593Smuzhiyun vht_prot[1] |= MT_PROT_RATE_DUP_OFDM_24;
990*4882a593Smuzhiyun vht_prot[2] |= MT_PROT_RATE_SGI_OFDM_24;
991*4882a593Smuzhiyun }
992*4882a593Smuzhiyun
993*4882a593Smuzhiyun switch (mode) {
994*4882a593Smuzhiyun case IEEE80211_HT_OP_MODE_PROTECTION_NONMEMBER:
995*4882a593Smuzhiyun case IEEE80211_HT_OP_MODE_PROTECTION_NONHT_MIXED:
996*4882a593Smuzhiyun prot[2] |= MT_PROT_CTRL_RTS_CTS;
997*4882a593Smuzhiyun prot[3] |= MT_PROT_CTRL_RTS_CTS;
998*4882a593Smuzhiyun prot[4] |= MT_PROT_CTRL_RTS_CTS;
999*4882a593Smuzhiyun prot[5] |= MT_PROT_CTRL_RTS_CTS;
1000*4882a593Smuzhiyun vht_prot[0] |= MT_PROT_CTRL_RTS_CTS;
1001*4882a593Smuzhiyun vht_prot[1] |= MT_PROT_CTRL_RTS_CTS;
1002*4882a593Smuzhiyun vht_prot[2] |= MT_PROT_CTRL_RTS_CTS;
1003*4882a593Smuzhiyun break;
1004*4882a593Smuzhiyun case IEEE80211_HT_OP_MODE_PROTECTION_20MHZ:
1005*4882a593Smuzhiyun prot[3] |= MT_PROT_CTRL_RTS_CTS;
1006*4882a593Smuzhiyun prot[5] |= MT_PROT_CTRL_RTS_CTS;
1007*4882a593Smuzhiyun vht_prot[1] |= MT_PROT_CTRL_RTS_CTS;
1008*4882a593Smuzhiyun vht_prot[2] |= MT_PROT_CTRL_RTS_CTS;
1009*4882a593Smuzhiyun break;
1010*4882a593Smuzhiyun }
1011*4882a593Smuzhiyun
1012*4882a593Smuzhiyun if (non_gf) {
1013*4882a593Smuzhiyun prot[4] |= MT_PROT_CTRL_RTS_CTS;
1014*4882a593Smuzhiyun prot[5] |= MT_PROT_CTRL_RTS_CTS;
1015*4882a593Smuzhiyun }
1016*4882a593Smuzhiyun
1017*4882a593Smuzhiyun for (i = 0; i < ARRAY_SIZE(prot); i++)
1018*4882a593Smuzhiyun mt76_wr(dev, MT_CCK_PROT_CFG + i * 4, prot[i]);
1019*4882a593Smuzhiyun
1020*4882a593Smuzhiyun for (i = 0; i < ARRAY_SIZE(vht_prot); i++)
1021*4882a593Smuzhiyun mt76_wr(dev, MT_TX_PROT_CFG6 + i * 4, vht_prot[i]);
1022*4882a593Smuzhiyun }
1023*4882a593Smuzhiyun
mt76x02_update_channel(struct mt76_dev * mdev)1024*4882a593Smuzhiyun void mt76x02_update_channel(struct mt76_dev *mdev)
1025*4882a593Smuzhiyun {
1026*4882a593Smuzhiyun struct mt76x02_dev *dev = container_of(mdev, struct mt76x02_dev, mt76);
1027*4882a593Smuzhiyun struct mt76_channel_state *state;
1028*4882a593Smuzhiyun
1029*4882a593Smuzhiyun state = mdev->phy.chan_state;
1030*4882a593Smuzhiyun state->cc_busy += mt76_rr(dev, MT_CH_BUSY);
1031*4882a593Smuzhiyun
1032*4882a593Smuzhiyun spin_lock_bh(&dev->mt76.cc_lock);
1033*4882a593Smuzhiyun state->cc_tx += dev->tx_airtime;
1034*4882a593Smuzhiyun dev->tx_airtime = 0;
1035*4882a593Smuzhiyun spin_unlock_bh(&dev->mt76.cc_lock);
1036*4882a593Smuzhiyun }
1037*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(mt76x02_update_channel);
1038*4882a593Smuzhiyun
mt76x02_check_mac_err(struct mt76x02_dev * dev)1039*4882a593Smuzhiyun static void mt76x02_check_mac_err(struct mt76x02_dev *dev)
1040*4882a593Smuzhiyun {
1041*4882a593Smuzhiyun u32 val = mt76_rr(dev, 0x10f4);
1042*4882a593Smuzhiyun
1043*4882a593Smuzhiyun if (!(val & BIT(29)) || !(val & (BIT(7) | BIT(5))))
1044*4882a593Smuzhiyun return;
1045*4882a593Smuzhiyun
1046*4882a593Smuzhiyun dev_err(dev->mt76.dev, "mac specific condition occurred\n");
1047*4882a593Smuzhiyun
1048*4882a593Smuzhiyun mt76_set(dev, MT_MAC_SYS_CTRL, MT_MAC_SYS_CTRL_RESET_CSR);
1049*4882a593Smuzhiyun udelay(10);
1050*4882a593Smuzhiyun mt76_wr(dev, MT_MAC_SYS_CTRL,
1051*4882a593Smuzhiyun MT_MAC_SYS_CTRL_ENABLE_TX | MT_MAC_SYS_CTRL_ENABLE_RX);
1052*4882a593Smuzhiyun }
1053*4882a593Smuzhiyun
1054*4882a593Smuzhiyun static void
mt76x02_edcca_tx_enable(struct mt76x02_dev * dev,bool enable)1055*4882a593Smuzhiyun mt76x02_edcca_tx_enable(struct mt76x02_dev *dev, bool enable)
1056*4882a593Smuzhiyun {
1057*4882a593Smuzhiyun if (enable) {
1058*4882a593Smuzhiyun u32 data;
1059*4882a593Smuzhiyun
1060*4882a593Smuzhiyun mt76_set(dev, MT_MAC_SYS_CTRL, MT_MAC_SYS_CTRL_ENABLE_TX);
1061*4882a593Smuzhiyun mt76_set(dev, MT_AUTO_RSP_CFG, MT_AUTO_RSP_EN);
1062*4882a593Smuzhiyun /* enable pa-lna */
1063*4882a593Smuzhiyun data = mt76_rr(dev, MT_TX_PIN_CFG);
1064*4882a593Smuzhiyun data |= MT_TX_PIN_CFG_TXANT |
1065*4882a593Smuzhiyun MT_TX_PIN_CFG_RXANT |
1066*4882a593Smuzhiyun MT_TX_PIN_RFTR_EN |
1067*4882a593Smuzhiyun MT_TX_PIN_TRSW_EN;
1068*4882a593Smuzhiyun mt76_wr(dev, MT_TX_PIN_CFG, data);
1069*4882a593Smuzhiyun } else {
1070*4882a593Smuzhiyun mt76_clear(dev, MT_MAC_SYS_CTRL, MT_MAC_SYS_CTRL_ENABLE_TX);
1071*4882a593Smuzhiyun mt76_clear(dev, MT_AUTO_RSP_CFG, MT_AUTO_RSP_EN);
1072*4882a593Smuzhiyun /* disable pa-lna */
1073*4882a593Smuzhiyun mt76_clear(dev, MT_TX_PIN_CFG, MT_TX_PIN_CFG_TXANT);
1074*4882a593Smuzhiyun mt76_clear(dev, MT_TX_PIN_CFG, MT_TX_PIN_CFG_RXANT);
1075*4882a593Smuzhiyun }
1076*4882a593Smuzhiyun dev->ed_tx_blocked = !enable;
1077*4882a593Smuzhiyun }
1078*4882a593Smuzhiyun
mt76x02_edcca_init(struct mt76x02_dev * dev)1079*4882a593Smuzhiyun void mt76x02_edcca_init(struct mt76x02_dev *dev)
1080*4882a593Smuzhiyun {
1081*4882a593Smuzhiyun dev->ed_trigger = 0;
1082*4882a593Smuzhiyun dev->ed_silent = 0;
1083*4882a593Smuzhiyun
1084*4882a593Smuzhiyun if (dev->ed_monitor) {
1085*4882a593Smuzhiyun struct ieee80211_channel *chan = dev->mphy.chandef.chan;
1086*4882a593Smuzhiyun u8 ed_th = chan->band == NL80211_BAND_5GHZ ? 0x0e : 0x20;
1087*4882a593Smuzhiyun
1088*4882a593Smuzhiyun mt76_clear(dev, MT_TX_LINK_CFG, MT_TX_CFACK_EN);
1089*4882a593Smuzhiyun mt76_set(dev, MT_TXOP_CTRL_CFG, MT_TXOP_ED_CCA_EN);
1090*4882a593Smuzhiyun mt76_rmw(dev, MT_BBP(AGC, 2), GENMASK(15, 0),
1091*4882a593Smuzhiyun ed_th << 8 | ed_th);
1092*4882a593Smuzhiyun mt76_set(dev, MT_TXOP_HLDR_ET, MT_TXOP_HLDR_TX40M_BLK_EN);
1093*4882a593Smuzhiyun } else {
1094*4882a593Smuzhiyun mt76_set(dev, MT_TX_LINK_CFG, MT_TX_CFACK_EN);
1095*4882a593Smuzhiyun mt76_clear(dev, MT_TXOP_CTRL_CFG, MT_TXOP_ED_CCA_EN);
1096*4882a593Smuzhiyun if (is_mt76x2(dev)) {
1097*4882a593Smuzhiyun mt76_wr(dev, MT_BBP(AGC, 2), 0x00007070);
1098*4882a593Smuzhiyun mt76_set(dev, MT_TXOP_HLDR_ET,
1099*4882a593Smuzhiyun MT_TXOP_HLDR_TX40M_BLK_EN);
1100*4882a593Smuzhiyun } else {
1101*4882a593Smuzhiyun mt76_wr(dev, MT_BBP(AGC, 2), 0x003a6464);
1102*4882a593Smuzhiyun mt76_clear(dev, MT_TXOP_HLDR_ET,
1103*4882a593Smuzhiyun MT_TXOP_HLDR_TX40M_BLK_EN);
1104*4882a593Smuzhiyun }
1105*4882a593Smuzhiyun }
1106*4882a593Smuzhiyun mt76x02_edcca_tx_enable(dev, true);
1107*4882a593Smuzhiyun dev->ed_monitor_learning = true;
1108*4882a593Smuzhiyun
1109*4882a593Smuzhiyun /* clear previous CCA timer value */
1110*4882a593Smuzhiyun mt76_rr(dev, MT_ED_CCA_TIMER);
1111*4882a593Smuzhiyun dev->ed_time = ktime_get_boottime();
1112*4882a593Smuzhiyun }
1113*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(mt76x02_edcca_init);
1114*4882a593Smuzhiyun
1115*4882a593Smuzhiyun #define MT_EDCCA_TH 92
1116*4882a593Smuzhiyun #define MT_EDCCA_BLOCK_TH 2
1117*4882a593Smuzhiyun #define MT_EDCCA_LEARN_TH 50
1118*4882a593Smuzhiyun #define MT_EDCCA_LEARN_CCA 180
1119*4882a593Smuzhiyun #define MT_EDCCA_LEARN_TIMEOUT (20 * HZ)
1120*4882a593Smuzhiyun
mt76x02_edcca_check(struct mt76x02_dev * dev)1121*4882a593Smuzhiyun static void mt76x02_edcca_check(struct mt76x02_dev *dev)
1122*4882a593Smuzhiyun {
1123*4882a593Smuzhiyun ktime_t cur_time;
1124*4882a593Smuzhiyun u32 active, val, busy;
1125*4882a593Smuzhiyun
1126*4882a593Smuzhiyun cur_time = ktime_get_boottime();
1127*4882a593Smuzhiyun val = mt76_rr(dev, MT_ED_CCA_TIMER);
1128*4882a593Smuzhiyun
1129*4882a593Smuzhiyun active = ktime_to_us(ktime_sub(cur_time, dev->ed_time));
1130*4882a593Smuzhiyun dev->ed_time = cur_time;
1131*4882a593Smuzhiyun
1132*4882a593Smuzhiyun busy = (val * 100) / active;
1133*4882a593Smuzhiyun busy = min_t(u32, busy, 100);
1134*4882a593Smuzhiyun
1135*4882a593Smuzhiyun if (busy > MT_EDCCA_TH) {
1136*4882a593Smuzhiyun dev->ed_trigger++;
1137*4882a593Smuzhiyun dev->ed_silent = 0;
1138*4882a593Smuzhiyun } else {
1139*4882a593Smuzhiyun dev->ed_silent++;
1140*4882a593Smuzhiyun dev->ed_trigger = 0;
1141*4882a593Smuzhiyun }
1142*4882a593Smuzhiyun
1143*4882a593Smuzhiyun if (dev->cal.agc_lowest_gain &&
1144*4882a593Smuzhiyun dev->cal.false_cca > MT_EDCCA_LEARN_CCA &&
1145*4882a593Smuzhiyun dev->ed_trigger > MT_EDCCA_LEARN_TH) {
1146*4882a593Smuzhiyun dev->ed_monitor_learning = false;
1147*4882a593Smuzhiyun dev->ed_trigger_timeout = jiffies + 20 * HZ;
1148*4882a593Smuzhiyun } else if (!dev->ed_monitor_learning &&
1149*4882a593Smuzhiyun time_is_after_jiffies(dev->ed_trigger_timeout)) {
1150*4882a593Smuzhiyun dev->ed_monitor_learning = true;
1151*4882a593Smuzhiyun mt76x02_edcca_tx_enable(dev, true);
1152*4882a593Smuzhiyun }
1153*4882a593Smuzhiyun
1154*4882a593Smuzhiyun if (dev->ed_monitor_learning)
1155*4882a593Smuzhiyun return;
1156*4882a593Smuzhiyun
1157*4882a593Smuzhiyun if (dev->ed_trigger > MT_EDCCA_BLOCK_TH && !dev->ed_tx_blocked)
1158*4882a593Smuzhiyun mt76x02_edcca_tx_enable(dev, false);
1159*4882a593Smuzhiyun else if (dev->ed_silent > MT_EDCCA_BLOCK_TH && dev->ed_tx_blocked)
1160*4882a593Smuzhiyun mt76x02_edcca_tx_enable(dev, true);
1161*4882a593Smuzhiyun }
1162*4882a593Smuzhiyun
mt76x02_mac_work(struct work_struct * work)1163*4882a593Smuzhiyun void mt76x02_mac_work(struct work_struct *work)
1164*4882a593Smuzhiyun {
1165*4882a593Smuzhiyun struct mt76x02_dev *dev = container_of(work, struct mt76x02_dev,
1166*4882a593Smuzhiyun mt76.mac_work.work);
1167*4882a593Smuzhiyun int i, idx;
1168*4882a593Smuzhiyun
1169*4882a593Smuzhiyun mutex_lock(&dev->mt76.mutex);
1170*4882a593Smuzhiyun
1171*4882a593Smuzhiyun mt76_update_survey(&dev->mt76);
1172*4882a593Smuzhiyun for (i = 0, idx = 0; i < 16; i++) {
1173*4882a593Smuzhiyun u32 val = mt76_rr(dev, MT_TX_AGG_CNT(i));
1174*4882a593Smuzhiyun
1175*4882a593Smuzhiyun dev->mt76.aggr_stats[idx++] += val & 0xffff;
1176*4882a593Smuzhiyun dev->mt76.aggr_stats[idx++] += val >> 16;
1177*4882a593Smuzhiyun }
1178*4882a593Smuzhiyun
1179*4882a593Smuzhiyun if (!dev->mt76.beacon_mask)
1180*4882a593Smuzhiyun mt76x02_check_mac_err(dev);
1181*4882a593Smuzhiyun
1182*4882a593Smuzhiyun if (dev->ed_monitor)
1183*4882a593Smuzhiyun mt76x02_edcca_check(dev);
1184*4882a593Smuzhiyun
1185*4882a593Smuzhiyun mutex_unlock(&dev->mt76.mutex);
1186*4882a593Smuzhiyun
1187*4882a593Smuzhiyun mt76_tx_status_check(&dev->mt76, NULL, false);
1188*4882a593Smuzhiyun
1189*4882a593Smuzhiyun ieee80211_queue_delayed_work(mt76_hw(dev), &dev->mt76.mac_work,
1190*4882a593Smuzhiyun MT_MAC_WORK_INTERVAL);
1191*4882a593Smuzhiyun }
1192*4882a593Smuzhiyun
mt76x02_mac_cc_reset(struct mt76x02_dev * dev)1193*4882a593Smuzhiyun void mt76x02_mac_cc_reset(struct mt76x02_dev *dev)
1194*4882a593Smuzhiyun {
1195*4882a593Smuzhiyun dev->mphy.survey_time = ktime_get_boottime();
1196*4882a593Smuzhiyun
1197*4882a593Smuzhiyun mt76_wr(dev, MT_CH_TIME_CFG,
1198*4882a593Smuzhiyun MT_CH_TIME_CFG_TIMER_EN |
1199*4882a593Smuzhiyun MT_CH_TIME_CFG_TX_AS_BUSY |
1200*4882a593Smuzhiyun MT_CH_TIME_CFG_RX_AS_BUSY |
1201*4882a593Smuzhiyun MT_CH_TIME_CFG_NAV_AS_BUSY |
1202*4882a593Smuzhiyun MT_CH_TIME_CFG_EIFS_AS_BUSY |
1203*4882a593Smuzhiyun MT_CH_CCA_RC_EN |
1204*4882a593Smuzhiyun FIELD_PREP(MT_CH_TIME_CFG_CH_TIMER_CLR, 1));
1205*4882a593Smuzhiyun
1206*4882a593Smuzhiyun /* channel cycle counters read-and-clear */
1207*4882a593Smuzhiyun mt76_rr(dev, MT_CH_BUSY);
1208*4882a593Smuzhiyun mt76_rr(dev, MT_CH_IDLE);
1209*4882a593Smuzhiyun }
1210*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(mt76x02_mac_cc_reset);
1211*4882a593Smuzhiyun
mt76x02_mac_set_bssid(struct mt76x02_dev * dev,u8 idx,const u8 * addr)1212*4882a593Smuzhiyun void mt76x02_mac_set_bssid(struct mt76x02_dev *dev, u8 idx, const u8 *addr)
1213*4882a593Smuzhiyun {
1214*4882a593Smuzhiyun idx &= 7;
1215*4882a593Smuzhiyun mt76_wr(dev, MT_MAC_APC_BSSID_L(idx), get_unaligned_le32(addr));
1216*4882a593Smuzhiyun mt76_rmw_field(dev, MT_MAC_APC_BSSID_H(idx), MT_MAC_APC_BSSID_H_ADDR,
1217*4882a593Smuzhiyun get_unaligned_le16(addr + 4));
1218*4882a593Smuzhiyun }
1219