1*4882a593Smuzhiyun // SPDX-License-Identifier: ISC
2*4882a593Smuzhiyun /* Copyright (C) 2020 MediaTek Inc. */
3*4882a593Smuzhiyun
4*4882a593Smuzhiyun #include <linux/etherdevice.h>
5*4882a593Smuzhiyun #include <linux/timekeeping.h>
6*4882a593Smuzhiyun #include "mt7915.h"
7*4882a593Smuzhiyun #include "../dma.h"
8*4882a593Smuzhiyun #include "mac.h"
9*4882a593Smuzhiyun
10*4882a593Smuzhiyun #define to_rssi(field, rxv) ((FIELD_GET(field, rxv) - 220) / 2)
11*4882a593Smuzhiyun
12*4882a593Smuzhiyun #define HE_BITS(f) cpu_to_le16(IEEE80211_RADIOTAP_HE_##f)
13*4882a593Smuzhiyun #define HE_PREP(f, m, v) le16_encode_bits(le32_get_bits(v, MT_CRXV_HE_##m),\
14*4882a593Smuzhiyun IEEE80211_RADIOTAP_HE_##f)
15*4882a593Smuzhiyun
16*4882a593Smuzhiyun static const struct mt7915_dfs_radar_spec etsi_radar_specs = {
17*4882a593Smuzhiyun .pulse_th = { 110, -10, -80, 40, 5200, 128, 5200 },
18*4882a593Smuzhiyun .radar_pattern = {
19*4882a593Smuzhiyun [5] = { 1, 0, 6, 32, 28, 0, 990, 5010, 17, 1, 1 },
20*4882a593Smuzhiyun [6] = { 1, 0, 9, 32, 28, 0, 615, 5010, 27, 1, 1 },
21*4882a593Smuzhiyun [7] = { 1, 0, 15, 32, 28, 0, 240, 445, 27, 1, 1 },
22*4882a593Smuzhiyun [8] = { 1, 0, 12, 32, 28, 0, 240, 510, 42, 1, 1 },
23*4882a593Smuzhiyun [9] = { 1, 1, 0, 0, 0, 0, 2490, 3343, 14, 0, 0, 12, 32, 28, { }, 126 },
24*4882a593Smuzhiyun [10] = { 1, 1, 0, 0, 0, 0, 2490, 3343, 14, 0, 0, 15, 32, 24, { }, 126 },
25*4882a593Smuzhiyun [11] = { 1, 1, 0, 0, 0, 0, 823, 2510, 14, 0, 0, 18, 32, 28, { }, 54 },
26*4882a593Smuzhiyun [12] = { 1, 1, 0, 0, 0, 0, 823, 2510, 14, 0, 0, 27, 32, 24, { }, 54 },
27*4882a593Smuzhiyun },
28*4882a593Smuzhiyun };
29*4882a593Smuzhiyun
30*4882a593Smuzhiyun static const struct mt7915_dfs_radar_spec fcc_radar_specs = {
31*4882a593Smuzhiyun .pulse_th = { 110, -10, -80, 40, 5200, 128, 5200 },
32*4882a593Smuzhiyun .radar_pattern = {
33*4882a593Smuzhiyun [0] = { 1, 0, 8, 32, 28, 0, 508, 3076, 13, 1, 1 },
34*4882a593Smuzhiyun [1] = { 1, 0, 12, 32, 28, 0, 140, 240, 17, 1, 1 },
35*4882a593Smuzhiyun [2] = { 1, 0, 8, 32, 28, 0, 190, 510, 22, 1, 1 },
36*4882a593Smuzhiyun [3] = { 1, 0, 6, 32, 28, 0, 190, 510, 32, 1, 1 },
37*4882a593Smuzhiyun [4] = { 1, 0, 9, 255, 28, 0, 323, 343, 13, 1, 32 },
38*4882a593Smuzhiyun },
39*4882a593Smuzhiyun };
40*4882a593Smuzhiyun
41*4882a593Smuzhiyun static const struct mt7915_dfs_radar_spec jp_radar_specs = {
42*4882a593Smuzhiyun .pulse_th = { 110, -10, -80, 40, 5200, 128, 5200 },
43*4882a593Smuzhiyun .radar_pattern = {
44*4882a593Smuzhiyun [0] = { 1, 0, 8, 32, 28, 0, 508, 3076, 13, 1, 1 },
45*4882a593Smuzhiyun [1] = { 1, 0, 12, 32, 28, 0, 140, 240, 17, 1, 1 },
46*4882a593Smuzhiyun [2] = { 1, 0, 8, 32, 28, 0, 190, 510, 22, 1, 1 },
47*4882a593Smuzhiyun [3] = { 1, 0, 6, 32, 28, 0, 190, 510, 32, 1, 1 },
48*4882a593Smuzhiyun [4] = { 1, 0, 9, 255, 28, 0, 323, 343, 13, 1, 32 },
49*4882a593Smuzhiyun [13] = { 1, 0, 7, 32, 28, 0, 3836, 3856, 14, 1, 1 },
50*4882a593Smuzhiyun [14] = { 1, 0, 6, 32, 28, 0, 615, 5010, 110, 1, 1 },
51*4882a593Smuzhiyun [15] = { 1, 1, 0, 0, 0, 0, 15, 5010, 110, 0, 0, 12, 32, 28 },
52*4882a593Smuzhiyun },
53*4882a593Smuzhiyun };
54*4882a593Smuzhiyun
mt7915_rx_get_wcid(struct mt7915_dev * dev,u16 idx,bool unicast)55*4882a593Smuzhiyun static struct mt76_wcid *mt7915_rx_get_wcid(struct mt7915_dev *dev,
56*4882a593Smuzhiyun u16 idx, bool unicast)
57*4882a593Smuzhiyun {
58*4882a593Smuzhiyun struct mt7915_sta *sta;
59*4882a593Smuzhiyun struct mt76_wcid *wcid;
60*4882a593Smuzhiyun
61*4882a593Smuzhiyun if (idx >= ARRAY_SIZE(dev->mt76.wcid))
62*4882a593Smuzhiyun return NULL;
63*4882a593Smuzhiyun
64*4882a593Smuzhiyun wcid = rcu_dereference(dev->mt76.wcid[idx]);
65*4882a593Smuzhiyun if (unicast || !wcid)
66*4882a593Smuzhiyun return wcid;
67*4882a593Smuzhiyun
68*4882a593Smuzhiyun if (!wcid->sta)
69*4882a593Smuzhiyun return NULL;
70*4882a593Smuzhiyun
71*4882a593Smuzhiyun sta = container_of(wcid, struct mt7915_sta, wcid);
72*4882a593Smuzhiyun if (!sta->vif)
73*4882a593Smuzhiyun return NULL;
74*4882a593Smuzhiyun
75*4882a593Smuzhiyun return &sta->vif->sta.wcid;
76*4882a593Smuzhiyun }
77*4882a593Smuzhiyun
mt7915_sta_ps(struct mt76_dev * mdev,struct ieee80211_sta * sta,bool ps)78*4882a593Smuzhiyun void mt7915_sta_ps(struct mt76_dev *mdev, struct ieee80211_sta *sta, bool ps)
79*4882a593Smuzhiyun {
80*4882a593Smuzhiyun }
81*4882a593Smuzhiyun
mt7915_mac_wtbl_update(struct mt7915_dev * dev,int idx,u32 mask)82*4882a593Smuzhiyun bool mt7915_mac_wtbl_update(struct mt7915_dev *dev, int idx, u32 mask)
83*4882a593Smuzhiyun {
84*4882a593Smuzhiyun mt76_rmw(dev, MT_WTBL_UPDATE, MT_WTBL_UPDATE_WLAN_IDX,
85*4882a593Smuzhiyun FIELD_PREP(MT_WTBL_UPDATE_WLAN_IDX, idx) | mask);
86*4882a593Smuzhiyun
87*4882a593Smuzhiyun return mt76_poll(dev, MT_WTBL_UPDATE, MT_WTBL_UPDATE_BUSY,
88*4882a593Smuzhiyun 0, 5000);
89*4882a593Smuzhiyun }
90*4882a593Smuzhiyun
mt7915_mac_wtbl_lmac_addr(struct mt7915_dev * dev,u16 wcid)91*4882a593Smuzhiyun static u32 mt7915_mac_wtbl_lmac_addr(struct mt7915_dev *dev, u16 wcid)
92*4882a593Smuzhiyun {
93*4882a593Smuzhiyun mt76_wr(dev, MT_WTBLON_TOP_WDUCR,
94*4882a593Smuzhiyun FIELD_PREP(MT_WTBLON_TOP_WDUCR_GROUP, (wcid >> 7)));
95*4882a593Smuzhiyun
96*4882a593Smuzhiyun return MT_WTBL_LMAC_OFFS(wcid, 0);
97*4882a593Smuzhiyun }
98*4882a593Smuzhiyun
99*4882a593Smuzhiyun /* TODO: use txfree airtime info to avoid runtime accessing in the long run */
mt7915_mac_sta_poll(struct mt7915_dev * dev)100*4882a593Smuzhiyun static void mt7915_mac_sta_poll(struct mt7915_dev *dev)
101*4882a593Smuzhiyun {
102*4882a593Smuzhiyun static const u8 ac_to_tid[] = {
103*4882a593Smuzhiyun [IEEE80211_AC_BE] = 0,
104*4882a593Smuzhiyun [IEEE80211_AC_BK] = 1,
105*4882a593Smuzhiyun [IEEE80211_AC_VI] = 4,
106*4882a593Smuzhiyun [IEEE80211_AC_VO] = 6
107*4882a593Smuzhiyun };
108*4882a593Smuzhiyun struct ieee80211_sta *sta;
109*4882a593Smuzhiyun struct mt7915_sta *msta;
110*4882a593Smuzhiyun u32 tx_time[IEEE80211_NUM_ACS], rx_time[IEEE80211_NUM_ACS];
111*4882a593Smuzhiyun LIST_HEAD(sta_poll_list);
112*4882a593Smuzhiyun int i;
113*4882a593Smuzhiyun
114*4882a593Smuzhiyun spin_lock_bh(&dev->sta_poll_lock);
115*4882a593Smuzhiyun list_splice_init(&dev->sta_poll_list, &sta_poll_list);
116*4882a593Smuzhiyun spin_unlock_bh(&dev->sta_poll_lock);
117*4882a593Smuzhiyun
118*4882a593Smuzhiyun rcu_read_lock();
119*4882a593Smuzhiyun
120*4882a593Smuzhiyun while (true) {
121*4882a593Smuzhiyun bool clear = false;
122*4882a593Smuzhiyun u32 addr;
123*4882a593Smuzhiyun u16 idx;
124*4882a593Smuzhiyun
125*4882a593Smuzhiyun spin_lock_bh(&dev->sta_poll_lock);
126*4882a593Smuzhiyun if (list_empty(&sta_poll_list)) {
127*4882a593Smuzhiyun spin_unlock_bh(&dev->sta_poll_lock);
128*4882a593Smuzhiyun break;
129*4882a593Smuzhiyun }
130*4882a593Smuzhiyun msta = list_first_entry(&sta_poll_list,
131*4882a593Smuzhiyun struct mt7915_sta, poll_list);
132*4882a593Smuzhiyun list_del_init(&msta->poll_list);
133*4882a593Smuzhiyun spin_unlock_bh(&dev->sta_poll_lock);
134*4882a593Smuzhiyun
135*4882a593Smuzhiyun idx = msta->wcid.idx;
136*4882a593Smuzhiyun addr = mt7915_mac_wtbl_lmac_addr(dev, idx) + 20 * 4;
137*4882a593Smuzhiyun
138*4882a593Smuzhiyun for (i = 0; i < IEEE80211_NUM_ACS; i++) {
139*4882a593Smuzhiyun u32 tx_last = msta->airtime_ac[i];
140*4882a593Smuzhiyun u32 rx_last = msta->airtime_ac[i + 4];
141*4882a593Smuzhiyun
142*4882a593Smuzhiyun msta->airtime_ac[i] = mt76_rr(dev, addr);
143*4882a593Smuzhiyun msta->airtime_ac[i + 4] = mt76_rr(dev, addr + 4);
144*4882a593Smuzhiyun
145*4882a593Smuzhiyun tx_time[i] = msta->airtime_ac[i] - tx_last;
146*4882a593Smuzhiyun rx_time[i] = msta->airtime_ac[i + 4] - rx_last;
147*4882a593Smuzhiyun
148*4882a593Smuzhiyun if ((tx_last | rx_last) & BIT(30))
149*4882a593Smuzhiyun clear = true;
150*4882a593Smuzhiyun
151*4882a593Smuzhiyun addr += 8;
152*4882a593Smuzhiyun }
153*4882a593Smuzhiyun
154*4882a593Smuzhiyun if (clear) {
155*4882a593Smuzhiyun mt7915_mac_wtbl_update(dev, idx,
156*4882a593Smuzhiyun MT_WTBL_UPDATE_ADM_COUNT_CLEAR);
157*4882a593Smuzhiyun memset(msta->airtime_ac, 0, sizeof(msta->airtime_ac));
158*4882a593Smuzhiyun }
159*4882a593Smuzhiyun
160*4882a593Smuzhiyun if (!msta->wcid.sta)
161*4882a593Smuzhiyun continue;
162*4882a593Smuzhiyun
163*4882a593Smuzhiyun sta = container_of((void *)msta, struct ieee80211_sta,
164*4882a593Smuzhiyun drv_priv);
165*4882a593Smuzhiyun for (i = 0; i < IEEE80211_NUM_ACS; i++) {
166*4882a593Smuzhiyun u8 q = mt7915_lmac_mapping(dev, i);
167*4882a593Smuzhiyun u32 tx_cur = tx_time[q];
168*4882a593Smuzhiyun u32 rx_cur = rx_time[q];
169*4882a593Smuzhiyun u8 tid = ac_to_tid[i];
170*4882a593Smuzhiyun
171*4882a593Smuzhiyun if (!tx_cur && !rx_cur)
172*4882a593Smuzhiyun continue;
173*4882a593Smuzhiyun
174*4882a593Smuzhiyun ieee80211_sta_register_airtime(sta, tid, tx_cur,
175*4882a593Smuzhiyun rx_cur);
176*4882a593Smuzhiyun }
177*4882a593Smuzhiyun }
178*4882a593Smuzhiyun
179*4882a593Smuzhiyun rcu_read_unlock();
180*4882a593Smuzhiyun }
181*4882a593Smuzhiyun
182*4882a593Smuzhiyun static void
mt7915_mac_decode_he_radiotap_ru(struct mt76_rx_status * status,struct ieee80211_radiotap_he * he,__le32 * rxv)183*4882a593Smuzhiyun mt7915_mac_decode_he_radiotap_ru(struct mt76_rx_status *status,
184*4882a593Smuzhiyun struct ieee80211_radiotap_he *he,
185*4882a593Smuzhiyun __le32 *rxv)
186*4882a593Smuzhiyun {
187*4882a593Smuzhiyun u32 ru_h, ru_l;
188*4882a593Smuzhiyun u8 ru, offs = 0;
189*4882a593Smuzhiyun
190*4882a593Smuzhiyun ru_l = FIELD_GET(MT_PRXV_HE_RU_ALLOC_L, le32_to_cpu(rxv[0]));
191*4882a593Smuzhiyun ru_h = FIELD_GET(MT_PRXV_HE_RU_ALLOC_H, le32_to_cpu(rxv[1]));
192*4882a593Smuzhiyun ru = (u8)(ru_l | ru_h << 4);
193*4882a593Smuzhiyun
194*4882a593Smuzhiyun status->bw = RATE_INFO_BW_HE_RU;
195*4882a593Smuzhiyun
196*4882a593Smuzhiyun switch (ru) {
197*4882a593Smuzhiyun case 0 ... 36:
198*4882a593Smuzhiyun status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_26;
199*4882a593Smuzhiyun offs = ru;
200*4882a593Smuzhiyun break;
201*4882a593Smuzhiyun case 37 ... 52:
202*4882a593Smuzhiyun status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_52;
203*4882a593Smuzhiyun offs = ru - 37;
204*4882a593Smuzhiyun break;
205*4882a593Smuzhiyun case 53 ... 60:
206*4882a593Smuzhiyun status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_106;
207*4882a593Smuzhiyun offs = ru - 53;
208*4882a593Smuzhiyun break;
209*4882a593Smuzhiyun case 61 ... 64:
210*4882a593Smuzhiyun status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_242;
211*4882a593Smuzhiyun offs = ru - 61;
212*4882a593Smuzhiyun break;
213*4882a593Smuzhiyun case 65 ... 66:
214*4882a593Smuzhiyun status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_484;
215*4882a593Smuzhiyun offs = ru - 65;
216*4882a593Smuzhiyun break;
217*4882a593Smuzhiyun case 67:
218*4882a593Smuzhiyun status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_996;
219*4882a593Smuzhiyun break;
220*4882a593Smuzhiyun case 68:
221*4882a593Smuzhiyun status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_2x996;
222*4882a593Smuzhiyun break;
223*4882a593Smuzhiyun }
224*4882a593Smuzhiyun
225*4882a593Smuzhiyun he->data1 |= HE_BITS(DATA1_BW_RU_ALLOC_KNOWN);
226*4882a593Smuzhiyun he->data2 |= HE_BITS(DATA2_RU_OFFSET_KNOWN) |
227*4882a593Smuzhiyun le16_encode_bits(offs,
228*4882a593Smuzhiyun IEEE80211_RADIOTAP_HE_DATA2_RU_OFFSET);
229*4882a593Smuzhiyun }
230*4882a593Smuzhiyun
231*4882a593Smuzhiyun static void
mt7915_mac_decode_he_radiotap(struct sk_buff * skb,struct mt76_rx_status * status,__le32 * rxv,u32 phy)232*4882a593Smuzhiyun mt7915_mac_decode_he_radiotap(struct sk_buff *skb,
233*4882a593Smuzhiyun struct mt76_rx_status *status,
234*4882a593Smuzhiyun __le32 *rxv, u32 phy)
235*4882a593Smuzhiyun {
236*4882a593Smuzhiyun /* TODO: struct ieee80211_radiotap_he_mu */
237*4882a593Smuzhiyun static const struct ieee80211_radiotap_he known = {
238*4882a593Smuzhiyun .data1 = HE_BITS(DATA1_DATA_MCS_KNOWN) |
239*4882a593Smuzhiyun HE_BITS(DATA1_DATA_DCM_KNOWN) |
240*4882a593Smuzhiyun HE_BITS(DATA1_STBC_KNOWN) |
241*4882a593Smuzhiyun HE_BITS(DATA1_CODING_KNOWN) |
242*4882a593Smuzhiyun HE_BITS(DATA1_LDPC_XSYMSEG_KNOWN) |
243*4882a593Smuzhiyun HE_BITS(DATA1_DOPPLER_KNOWN) |
244*4882a593Smuzhiyun HE_BITS(DATA1_BSS_COLOR_KNOWN),
245*4882a593Smuzhiyun .data2 = HE_BITS(DATA2_GI_KNOWN) |
246*4882a593Smuzhiyun HE_BITS(DATA2_TXBF_KNOWN) |
247*4882a593Smuzhiyun HE_BITS(DATA2_PE_DISAMBIG_KNOWN) |
248*4882a593Smuzhiyun HE_BITS(DATA2_TXOP_KNOWN),
249*4882a593Smuzhiyun };
250*4882a593Smuzhiyun struct ieee80211_radiotap_he *he = NULL;
251*4882a593Smuzhiyun u32 ltf_size = le32_get_bits(rxv[2], MT_CRXV_HE_LTF_SIZE) + 1;
252*4882a593Smuzhiyun
253*4882a593Smuzhiyun he = skb_push(skb, sizeof(known));
254*4882a593Smuzhiyun memcpy(he, &known, sizeof(known));
255*4882a593Smuzhiyun
256*4882a593Smuzhiyun he->data3 = HE_PREP(DATA3_BSS_COLOR, BSS_COLOR, rxv[14]) |
257*4882a593Smuzhiyun HE_PREP(DATA3_LDPC_XSYMSEG, LDPC_EXT_SYM, rxv[2]);
258*4882a593Smuzhiyun he->data5 = HE_PREP(DATA5_PE_DISAMBIG, PE_DISAMBIG, rxv[2]) |
259*4882a593Smuzhiyun le16_encode_bits(ltf_size,
260*4882a593Smuzhiyun IEEE80211_RADIOTAP_HE_DATA5_LTF_SIZE);
261*4882a593Smuzhiyun he->data6 = HE_PREP(DATA6_TXOP, TXOP_DUR, rxv[14]) |
262*4882a593Smuzhiyun HE_PREP(DATA6_DOPPLER, DOPPLER, rxv[14]);
263*4882a593Smuzhiyun
264*4882a593Smuzhiyun switch (phy) {
265*4882a593Smuzhiyun case MT_PHY_TYPE_HE_SU:
266*4882a593Smuzhiyun he->data1 |= HE_BITS(DATA1_FORMAT_SU) |
267*4882a593Smuzhiyun HE_BITS(DATA1_UL_DL_KNOWN) |
268*4882a593Smuzhiyun HE_BITS(DATA1_BEAM_CHANGE_KNOWN) |
269*4882a593Smuzhiyun HE_BITS(DATA1_SPTL_REUSE_KNOWN);
270*4882a593Smuzhiyun
271*4882a593Smuzhiyun he->data3 |= HE_PREP(DATA3_BEAM_CHANGE, BEAM_CHNG, rxv[14]) |
272*4882a593Smuzhiyun HE_PREP(DATA3_UL_DL, UPLINK, rxv[2]);
273*4882a593Smuzhiyun he->data4 |= HE_PREP(DATA4_SU_MU_SPTL_REUSE, SR_MASK, rxv[11]);
274*4882a593Smuzhiyun break;
275*4882a593Smuzhiyun case MT_PHY_TYPE_HE_EXT_SU:
276*4882a593Smuzhiyun he->data1 |= HE_BITS(DATA1_FORMAT_EXT_SU) |
277*4882a593Smuzhiyun HE_BITS(DATA1_UL_DL_KNOWN);
278*4882a593Smuzhiyun
279*4882a593Smuzhiyun he->data3 |= HE_PREP(DATA3_UL_DL, UPLINK, rxv[2]);
280*4882a593Smuzhiyun break;
281*4882a593Smuzhiyun case MT_PHY_TYPE_HE_MU:
282*4882a593Smuzhiyun he->data1 |= HE_BITS(DATA1_FORMAT_MU) |
283*4882a593Smuzhiyun HE_BITS(DATA1_UL_DL_KNOWN) |
284*4882a593Smuzhiyun HE_BITS(DATA1_SPTL_REUSE_KNOWN);
285*4882a593Smuzhiyun
286*4882a593Smuzhiyun he->data3 |= HE_PREP(DATA3_UL_DL, UPLINK, rxv[2]);
287*4882a593Smuzhiyun he->data4 |= HE_PREP(DATA4_SU_MU_SPTL_REUSE, SR_MASK, rxv[11]);
288*4882a593Smuzhiyun
289*4882a593Smuzhiyun mt7915_mac_decode_he_radiotap_ru(status, he, rxv);
290*4882a593Smuzhiyun break;
291*4882a593Smuzhiyun case MT_PHY_TYPE_HE_TB:
292*4882a593Smuzhiyun he->data1 |= HE_BITS(DATA1_FORMAT_TRIG) |
293*4882a593Smuzhiyun HE_BITS(DATA1_SPTL_REUSE_KNOWN) |
294*4882a593Smuzhiyun HE_BITS(DATA1_SPTL_REUSE2_KNOWN) |
295*4882a593Smuzhiyun HE_BITS(DATA1_SPTL_REUSE3_KNOWN) |
296*4882a593Smuzhiyun HE_BITS(DATA1_SPTL_REUSE4_KNOWN);
297*4882a593Smuzhiyun
298*4882a593Smuzhiyun he->data4 |= HE_PREP(DATA4_TB_SPTL_REUSE1, SR_MASK, rxv[11]) |
299*4882a593Smuzhiyun HE_PREP(DATA4_TB_SPTL_REUSE2, SR1_MASK, rxv[11]) |
300*4882a593Smuzhiyun HE_PREP(DATA4_TB_SPTL_REUSE3, SR2_MASK, rxv[11]) |
301*4882a593Smuzhiyun HE_PREP(DATA4_TB_SPTL_REUSE4, SR3_MASK, rxv[11]);
302*4882a593Smuzhiyun
303*4882a593Smuzhiyun mt7915_mac_decode_he_radiotap_ru(status, he, rxv);
304*4882a593Smuzhiyun break;
305*4882a593Smuzhiyun default:
306*4882a593Smuzhiyun break;
307*4882a593Smuzhiyun }
308*4882a593Smuzhiyun }
309*4882a593Smuzhiyun
mt7915_mac_fill_rx(struct mt7915_dev * dev,struct sk_buff * skb)310*4882a593Smuzhiyun int mt7915_mac_fill_rx(struct mt7915_dev *dev, struct sk_buff *skb)
311*4882a593Smuzhiyun {
312*4882a593Smuzhiyun struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
313*4882a593Smuzhiyun struct mt76_phy *mphy = &dev->mt76.phy;
314*4882a593Smuzhiyun struct mt7915_phy *phy = &dev->phy;
315*4882a593Smuzhiyun struct ieee80211_supported_band *sband;
316*4882a593Smuzhiyun struct ieee80211_hdr *hdr;
317*4882a593Smuzhiyun __le32 *rxd = (__le32 *)skb->data;
318*4882a593Smuzhiyun __le32 *rxv = NULL;
319*4882a593Smuzhiyun u32 mode = 0;
320*4882a593Smuzhiyun u32 rxd1 = le32_to_cpu(rxd[1]);
321*4882a593Smuzhiyun u32 rxd2 = le32_to_cpu(rxd[2]);
322*4882a593Smuzhiyun u32 rxd3 = le32_to_cpu(rxd[3]);
323*4882a593Smuzhiyun bool unicast, insert_ccmp_hdr = false;
324*4882a593Smuzhiyun u8 remove_pad;
325*4882a593Smuzhiyun int i, idx;
326*4882a593Smuzhiyun
327*4882a593Smuzhiyun memset(status, 0, sizeof(*status));
328*4882a593Smuzhiyun
329*4882a593Smuzhiyun if (rxd1 & MT_RXD1_NORMAL_BAND_IDX) {
330*4882a593Smuzhiyun mphy = dev->mt76.phy2;
331*4882a593Smuzhiyun if (!mphy)
332*4882a593Smuzhiyun return -EINVAL;
333*4882a593Smuzhiyun
334*4882a593Smuzhiyun phy = mphy->priv;
335*4882a593Smuzhiyun status->ext_phy = true;
336*4882a593Smuzhiyun }
337*4882a593Smuzhiyun
338*4882a593Smuzhiyun if (!test_bit(MT76_STATE_RUNNING, &mphy->state))
339*4882a593Smuzhiyun return -EINVAL;
340*4882a593Smuzhiyun
341*4882a593Smuzhiyun unicast = FIELD_GET(MT_RXD3_NORMAL_ADDR_TYPE, rxd3) == MT_RXD3_NORMAL_U2M;
342*4882a593Smuzhiyun idx = FIELD_GET(MT_RXD1_NORMAL_WLAN_IDX, rxd1);
343*4882a593Smuzhiyun status->wcid = mt7915_rx_get_wcid(dev, idx, unicast);
344*4882a593Smuzhiyun
345*4882a593Smuzhiyun if (status->wcid) {
346*4882a593Smuzhiyun struct mt7915_sta *msta;
347*4882a593Smuzhiyun
348*4882a593Smuzhiyun msta = container_of(status->wcid, struct mt7915_sta, wcid);
349*4882a593Smuzhiyun spin_lock_bh(&dev->sta_poll_lock);
350*4882a593Smuzhiyun if (list_empty(&msta->poll_list))
351*4882a593Smuzhiyun list_add_tail(&msta->poll_list, &dev->sta_poll_list);
352*4882a593Smuzhiyun spin_unlock_bh(&dev->sta_poll_lock);
353*4882a593Smuzhiyun }
354*4882a593Smuzhiyun
355*4882a593Smuzhiyun status->freq = mphy->chandef.chan->center_freq;
356*4882a593Smuzhiyun status->band = mphy->chandef.chan->band;
357*4882a593Smuzhiyun if (status->band == NL80211_BAND_5GHZ)
358*4882a593Smuzhiyun sband = &mphy->sband_5g.sband;
359*4882a593Smuzhiyun else
360*4882a593Smuzhiyun sband = &mphy->sband_2g.sband;
361*4882a593Smuzhiyun
362*4882a593Smuzhiyun if (!sband->channels)
363*4882a593Smuzhiyun return -EINVAL;
364*4882a593Smuzhiyun
365*4882a593Smuzhiyun if (rxd1 & MT_RXD1_NORMAL_FCS_ERR)
366*4882a593Smuzhiyun status->flag |= RX_FLAG_FAILED_FCS_CRC;
367*4882a593Smuzhiyun
368*4882a593Smuzhiyun if (rxd1 & MT_RXD1_NORMAL_TKIP_MIC_ERR)
369*4882a593Smuzhiyun status->flag |= RX_FLAG_MMIC_ERROR;
370*4882a593Smuzhiyun
371*4882a593Smuzhiyun if (FIELD_GET(MT_RXD1_NORMAL_SEC_MODE, rxd1) != 0 &&
372*4882a593Smuzhiyun !(rxd1 & (MT_RXD1_NORMAL_CLM | MT_RXD1_NORMAL_CM))) {
373*4882a593Smuzhiyun status->flag |= RX_FLAG_DECRYPTED;
374*4882a593Smuzhiyun status->flag |= RX_FLAG_IV_STRIPPED;
375*4882a593Smuzhiyun status->flag |= RX_FLAG_MMIC_STRIPPED | RX_FLAG_MIC_STRIPPED;
376*4882a593Smuzhiyun }
377*4882a593Smuzhiyun
378*4882a593Smuzhiyun if (!(rxd2 & MT_RXD2_NORMAL_NON_AMPDU)) {
379*4882a593Smuzhiyun status->flag |= RX_FLAG_AMPDU_DETAILS;
380*4882a593Smuzhiyun
381*4882a593Smuzhiyun /* all subframes of an A-MPDU have the same timestamp */
382*4882a593Smuzhiyun if (phy->rx_ampdu_ts != rxd[14]) {
383*4882a593Smuzhiyun if (!++phy->ampdu_ref)
384*4882a593Smuzhiyun phy->ampdu_ref++;
385*4882a593Smuzhiyun }
386*4882a593Smuzhiyun phy->rx_ampdu_ts = rxd[14];
387*4882a593Smuzhiyun
388*4882a593Smuzhiyun status->ampdu_ref = phy->ampdu_ref;
389*4882a593Smuzhiyun }
390*4882a593Smuzhiyun
391*4882a593Smuzhiyun remove_pad = FIELD_GET(MT_RXD2_NORMAL_HDR_OFFSET, rxd2);
392*4882a593Smuzhiyun
393*4882a593Smuzhiyun if (rxd2 & MT_RXD2_NORMAL_MAX_LEN_ERROR)
394*4882a593Smuzhiyun return -EINVAL;
395*4882a593Smuzhiyun
396*4882a593Smuzhiyun rxd += 6;
397*4882a593Smuzhiyun if (rxd1 & MT_RXD1_NORMAL_GROUP_4) {
398*4882a593Smuzhiyun rxd += 4;
399*4882a593Smuzhiyun if ((u8 *)rxd - skb->data >= skb->len)
400*4882a593Smuzhiyun return -EINVAL;
401*4882a593Smuzhiyun }
402*4882a593Smuzhiyun
403*4882a593Smuzhiyun if (rxd1 & MT_RXD1_NORMAL_GROUP_1) {
404*4882a593Smuzhiyun u8 *data = (u8 *)rxd;
405*4882a593Smuzhiyun
406*4882a593Smuzhiyun if (status->flag & RX_FLAG_DECRYPTED) {
407*4882a593Smuzhiyun status->iv[0] = data[5];
408*4882a593Smuzhiyun status->iv[1] = data[4];
409*4882a593Smuzhiyun status->iv[2] = data[3];
410*4882a593Smuzhiyun status->iv[3] = data[2];
411*4882a593Smuzhiyun status->iv[4] = data[1];
412*4882a593Smuzhiyun status->iv[5] = data[0];
413*4882a593Smuzhiyun
414*4882a593Smuzhiyun insert_ccmp_hdr = FIELD_GET(MT_RXD2_NORMAL_FRAG, rxd2);
415*4882a593Smuzhiyun }
416*4882a593Smuzhiyun rxd += 4;
417*4882a593Smuzhiyun if ((u8 *)rxd - skb->data >= skb->len)
418*4882a593Smuzhiyun return -EINVAL;
419*4882a593Smuzhiyun }
420*4882a593Smuzhiyun
421*4882a593Smuzhiyun if (rxd1 & MT_RXD1_NORMAL_GROUP_2) {
422*4882a593Smuzhiyun rxd += 2;
423*4882a593Smuzhiyun if ((u8 *)rxd - skb->data >= skb->len)
424*4882a593Smuzhiyun return -EINVAL;
425*4882a593Smuzhiyun }
426*4882a593Smuzhiyun
427*4882a593Smuzhiyun /* RXD Group 3 - P-RXV */
428*4882a593Smuzhiyun if (rxd1 & MT_RXD1_NORMAL_GROUP_3) {
429*4882a593Smuzhiyun u32 v0, v1, v2;
430*4882a593Smuzhiyun
431*4882a593Smuzhiyun rxv = rxd;
432*4882a593Smuzhiyun rxd += 2;
433*4882a593Smuzhiyun if ((u8 *)rxd - skb->data >= skb->len)
434*4882a593Smuzhiyun return -EINVAL;
435*4882a593Smuzhiyun
436*4882a593Smuzhiyun v0 = le32_to_cpu(rxv[0]);
437*4882a593Smuzhiyun v1 = le32_to_cpu(rxv[1]);
438*4882a593Smuzhiyun v2 = le32_to_cpu(rxv[2]);
439*4882a593Smuzhiyun
440*4882a593Smuzhiyun if (v0 & MT_PRXV_HT_AD_CODE)
441*4882a593Smuzhiyun status->enc_flags |= RX_ENC_FLAG_LDPC;
442*4882a593Smuzhiyun
443*4882a593Smuzhiyun status->chains = mphy->antenna_mask;
444*4882a593Smuzhiyun status->chain_signal[0] = to_rssi(MT_PRXV_RCPI0, v1);
445*4882a593Smuzhiyun status->chain_signal[1] = to_rssi(MT_PRXV_RCPI1, v1);
446*4882a593Smuzhiyun status->chain_signal[2] = to_rssi(MT_PRXV_RCPI2, v1);
447*4882a593Smuzhiyun status->chain_signal[3] = to_rssi(MT_PRXV_RCPI3, v1);
448*4882a593Smuzhiyun status->signal = status->chain_signal[0];
449*4882a593Smuzhiyun
450*4882a593Smuzhiyun for (i = 1; i < hweight8(mphy->antenna_mask); i++) {
451*4882a593Smuzhiyun if (!(status->chains & BIT(i)))
452*4882a593Smuzhiyun continue;
453*4882a593Smuzhiyun
454*4882a593Smuzhiyun status->signal = max(status->signal,
455*4882a593Smuzhiyun status->chain_signal[i]);
456*4882a593Smuzhiyun }
457*4882a593Smuzhiyun
458*4882a593Smuzhiyun /* RXD Group 5 - C-RXV */
459*4882a593Smuzhiyun if (rxd1 & MT_RXD1_NORMAL_GROUP_5) {
460*4882a593Smuzhiyun u8 stbc = FIELD_GET(MT_CRXV_HT_STBC, v2);
461*4882a593Smuzhiyun u8 gi = FIELD_GET(MT_CRXV_HT_SHORT_GI, v2);
462*4882a593Smuzhiyun bool cck = false;
463*4882a593Smuzhiyun
464*4882a593Smuzhiyun rxd += 18;
465*4882a593Smuzhiyun if ((u8 *)rxd - skb->data >= skb->len)
466*4882a593Smuzhiyun return -EINVAL;
467*4882a593Smuzhiyun
468*4882a593Smuzhiyun idx = i = FIELD_GET(MT_PRXV_TX_RATE, v0);
469*4882a593Smuzhiyun mode = FIELD_GET(MT_CRXV_TX_MODE, v2);
470*4882a593Smuzhiyun
471*4882a593Smuzhiyun switch (mode) {
472*4882a593Smuzhiyun case MT_PHY_TYPE_CCK:
473*4882a593Smuzhiyun cck = true;
474*4882a593Smuzhiyun fallthrough;
475*4882a593Smuzhiyun case MT_PHY_TYPE_OFDM:
476*4882a593Smuzhiyun i = mt76_get_rate(&dev->mt76, sband, i, cck);
477*4882a593Smuzhiyun break;
478*4882a593Smuzhiyun case MT_PHY_TYPE_HT_GF:
479*4882a593Smuzhiyun case MT_PHY_TYPE_HT:
480*4882a593Smuzhiyun status->encoding = RX_ENC_HT;
481*4882a593Smuzhiyun if (i > 31)
482*4882a593Smuzhiyun return -EINVAL;
483*4882a593Smuzhiyun break;
484*4882a593Smuzhiyun case MT_PHY_TYPE_VHT:
485*4882a593Smuzhiyun status->nss =
486*4882a593Smuzhiyun FIELD_GET(MT_PRXV_NSTS, v0) + 1;
487*4882a593Smuzhiyun status->encoding = RX_ENC_VHT;
488*4882a593Smuzhiyun if (i > 9)
489*4882a593Smuzhiyun return -EINVAL;
490*4882a593Smuzhiyun break;
491*4882a593Smuzhiyun case MT_PHY_TYPE_HE_MU:
492*4882a593Smuzhiyun status->flag |= RX_FLAG_RADIOTAP_HE_MU;
493*4882a593Smuzhiyun fallthrough;
494*4882a593Smuzhiyun case MT_PHY_TYPE_HE_SU:
495*4882a593Smuzhiyun case MT_PHY_TYPE_HE_EXT_SU:
496*4882a593Smuzhiyun case MT_PHY_TYPE_HE_TB:
497*4882a593Smuzhiyun status->nss =
498*4882a593Smuzhiyun FIELD_GET(MT_PRXV_NSTS, v0) + 1;
499*4882a593Smuzhiyun status->encoding = RX_ENC_HE;
500*4882a593Smuzhiyun status->flag |= RX_FLAG_RADIOTAP_HE;
501*4882a593Smuzhiyun i &= GENMASK(3, 0);
502*4882a593Smuzhiyun
503*4882a593Smuzhiyun if (gi <= NL80211_RATE_INFO_HE_GI_3_2)
504*4882a593Smuzhiyun status->he_gi = gi;
505*4882a593Smuzhiyun
506*4882a593Smuzhiyun status->he_dcm = !!(idx & MT_PRXV_TX_DCM);
507*4882a593Smuzhiyun break;
508*4882a593Smuzhiyun default:
509*4882a593Smuzhiyun return -EINVAL;
510*4882a593Smuzhiyun }
511*4882a593Smuzhiyun status->rate_idx = i;
512*4882a593Smuzhiyun
513*4882a593Smuzhiyun switch (FIELD_GET(MT_CRXV_FRAME_MODE, v2)) {
514*4882a593Smuzhiyun case IEEE80211_STA_RX_BW_20:
515*4882a593Smuzhiyun break;
516*4882a593Smuzhiyun case IEEE80211_STA_RX_BW_40:
517*4882a593Smuzhiyun if (mode & MT_PHY_TYPE_HE_EXT_SU &&
518*4882a593Smuzhiyun (idx & MT_PRXV_TX_ER_SU_106T)) {
519*4882a593Smuzhiyun status->bw = RATE_INFO_BW_HE_RU;
520*4882a593Smuzhiyun status->he_ru =
521*4882a593Smuzhiyun NL80211_RATE_INFO_HE_RU_ALLOC_106;
522*4882a593Smuzhiyun } else {
523*4882a593Smuzhiyun status->bw = RATE_INFO_BW_40;
524*4882a593Smuzhiyun }
525*4882a593Smuzhiyun break;
526*4882a593Smuzhiyun case IEEE80211_STA_RX_BW_80:
527*4882a593Smuzhiyun status->bw = RATE_INFO_BW_80;
528*4882a593Smuzhiyun break;
529*4882a593Smuzhiyun case IEEE80211_STA_RX_BW_160:
530*4882a593Smuzhiyun status->bw = RATE_INFO_BW_160;
531*4882a593Smuzhiyun break;
532*4882a593Smuzhiyun default:
533*4882a593Smuzhiyun return -EINVAL;
534*4882a593Smuzhiyun }
535*4882a593Smuzhiyun
536*4882a593Smuzhiyun status->enc_flags |= RX_ENC_FLAG_STBC_MASK * stbc;
537*4882a593Smuzhiyun if (mode < MT_PHY_TYPE_HE_SU && gi)
538*4882a593Smuzhiyun status->enc_flags |= RX_ENC_FLAG_SHORT_GI;
539*4882a593Smuzhiyun }
540*4882a593Smuzhiyun }
541*4882a593Smuzhiyun
542*4882a593Smuzhiyun skb_pull(skb, (u8 *)rxd - skb->data + 2 * remove_pad);
543*4882a593Smuzhiyun
544*4882a593Smuzhiyun if (insert_ccmp_hdr) {
545*4882a593Smuzhiyun u8 key_id = FIELD_GET(MT_RXD1_NORMAL_KEY_ID, rxd1);
546*4882a593Smuzhiyun
547*4882a593Smuzhiyun mt76_insert_ccmp_hdr(skb, key_id);
548*4882a593Smuzhiyun }
549*4882a593Smuzhiyun
550*4882a593Smuzhiyun if (rxv && status->flag & RX_FLAG_RADIOTAP_HE)
551*4882a593Smuzhiyun mt7915_mac_decode_he_radiotap(skb, status, rxv, mode);
552*4882a593Smuzhiyun
553*4882a593Smuzhiyun hdr = mt76_skb_get_hdr(skb);
554*4882a593Smuzhiyun if (!status->wcid || !ieee80211_is_data_qos(hdr->frame_control))
555*4882a593Smuzhiyun return 0;
556*4882a593Smuzhiyun
557*4882a593Smuzhiyun status->aggr = unicast &&
558*4882a593Smuzhiyun !ieee80211_is_qos_nullfunc(hdr->frame_control);
559*4882a593Smuzhiyun status->tid = *ieee80211_get_qos_ctl(hdr) & IEEE80211_QOS_CTL_TID_MASK;
560*4882a593Smuzhiyun status->seqno = IEEE80211_SEQ_TO_SN(le16_to_cpu(hdr->seq_ctrl));
561*4882a593Smuzhiyun
562*4882a593Smuzhiyun return 0;
563*4882a593Smuzhiyun }
564*4882a593Smuzhiyun
mt7915_mac_write_txwi(struct mt7915_dev * dev,__le32 * txwi,struct sk_buff * skb,struct mt76_wcid * wcid,struct ieee80211_key_conf * key,bool beacon)565*4882a593Smuzhiyun void mt7915_mac_write_txwi(struct mt7915_dev *dev, __le32 *txwi,
566*4882a593Smuzhiyun struct sk_buff *skb, struct mt76_wcid *wcid,
567*4882a593Smuzhiyun struct ieee80211_key_conf *key, bool beacon)
568*4882a593Smuzhiyun {
569*4882a593Smuzhiyun struct ieee80211_tx_info *info = IEEE80211_SKB_CB(skb);
570*4882a593Smuzhiyun struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data;
571*4882a593Smuzhiyun struct ieee80211_mgmt *mgmt = (struct ieee80211_mgmt *)skb->data;
572*4882a593Smuzhiyun bool multicast = is_multicast_ether_addr(hdr->addr1);
573*4882a593Smuzhiyun struct ieee80211_vif *vif = info->control.vif;
574*4882a593Smuzhiyun struct mt76_phy *mphy = &dev->mphy;
575*4882a593Smuzhiyun bool ext_phy = info->hw_queue & MT_TX_HW_QUEUE_EXT_PHY;
576*4882a593Smuzhiyun u8 fc_type, fc_stype, p_fmt, q_idx, omac_idx = 0, wmm_idx = 0;
577*4882a593Smuzhiyun __le16 fc = hdr->frame_control;
578*4882a593Smuzhiyun u16 tx_count = 15, seqno = 0;
579*4882a593Smuzhiyun u8 tid = skb->priority & IEEE80211_QOS_CTL_TID_MASK;
580*4882a593Smuzhiyun u32 val;
581*4882a593Smuzhiyun
582*4882a593Smuzhiyun if (vif) {
583*4882a593Smuzhiyun struct mt7915_vif *mvif = (struct mt7915_vif *)vif->drv_priv;
584*4882a593Smuzhiyun
585*4882a593Smuzhiyun omac_idx = mvif->omac_idx;
586*4882a593Smuzhiyun wmm_idx = mvif->wmm_idx;
587*4882a593Smuzhiyun }
588*4882a593Smuzhiyun
589*4882a593Smuzhiyun if (ext_phy && dev->mt76.phy2)
590*4882a593Smuzhiyun mphy = dev->mt76.phy2;
591*4882a593Smuzhiyun
592*4882a593Smuzhiyun fc_type = (le16_to_cpu(fc) & IEEE80211_FCTL_FTYPE) >> 2;
593*4882a593Smuzhiyun fc_stype = (le16_to_cpu(fc) & IEEE80211_FCTL_STYPE) >> 4;
594*4882a593Smuzhiyun
595*4882a593Smuzhiyun txwi[4] = 0;
596*4882a593Smuzhiyun txwi[5] = 0;
597*4882a593Smuzhiyun txwi[6] = 0;
598*4882a593Smuzhiyun
599*4882a593Smuzhiyun if (beacon) {
600*4882a593Smuzhiyun p_fmt = MT_TX_TYPE_FW;
601*4882a593Smuzhiyun q_idx = MT_LMAC_BCN0;
602*4882a593Smuzhiyun } else if (skb_get_queue_mapping(skb) >= MT_TXQ_PSD) {
603*4882a593Smuzhiyun p_fmt = MT_TX_TYPE_CT;
604*4882a593Smuzhiyun q_idx = MT_LMAC_ALTX0;
605*4882a593Smuzhiyun } else {
606*4882a593Smuzhiyun p_fmt = MT_TX_TYPE_CT;
607*4882a593Smuzhiyun q_idx = wmm_idx * MT7915_MAX_WMM_SETS +
608*4882a593Smuzhiyun mt7915_lmac_mapping(dev, skb_get_queue_mapping(skb));
609*4882a593Smuzhiyun }
610*4882a593Smuzhiyun
611*4882a593Smuzhiyun if (ieee80211_is_action(fc) &&
612*4882a593Smuzhiyun mgmt->u.action.category == WLAN_CATEGORY_BACK &&
613*4882a593Smuzhiyun mgmt->u.action.u.addba_req.action_code == WLAN_ACTION_ADDBA_REQ) {
614*4882a593Smuzhiyun u16 capab = le16_to_cpu(mgmt->u.action.u.addba_req.capab);
615*4882a593Smuzhiyun
616*4882a593Smuzhiyun txwi[5] |= cpu_to_le32(MT_TXD5_ADD_BA);
617*4882a593Smuzhiyun tid = (capab >> 2) & IEEE80211_QOS_CTL_TID_MASK;
618*4882a593Smuzhiyun } else if (ieee80211_is_back_req(hdr->frame_control)) {
619*4882a593Smuzhiyun struct ieee80211_bar *bar = (struct ieee80211_bar *)hdr;
620*4882a593Smuzhiyun u16 control = le16_to_cpu(bar->control);
621*4882a593Smuzhiyun
622*4882a593Smuzhiyun tid = FIELD_GET(IEEE80211_BAR_CTRL_TID_INFO_MASK, control);
623*4882a593Smuzhiyun }
624*4882a593Smuzhiyun
625*4882a593Smuzhiyun val = FIELD_PREP(MT_TXD0_TX_BYTES, skb->len + MT_TXD_SIZE) |
626*4882a593Smuzhiyun FIELD_PREP(MT_TXD0_PKT_FMT, p_fmt) |
627*4882a593Smuzhiyun FIELD_PREP(MT_TXD0_Q_IDX, q_idx);
628*4882a593Smuzhiyun txwi[0] = cpu_to_le32(val);
629*4882a593Smuzhiyun
630*4882a593Smuzhiyun val = MT_TXD1_LONG_FORMAT |
631*4882a593Smuzhiyun FIELD_PREP(MT_TXD1_WLAN_IDX, wcid->idx) |
632*4882a593Smuzhiyun FIELD_PREP(MT_TXD1_HDR_FORMAT, MT_HDR_FORMAT_802_11) |
633*4882a593Smuzhiyun FIELD_PREP(MT_TXD1_HDR_INFO,
634*4882a593Smuzhiyun ieee80211_get_hdrlen_from_skb(skb) / 2) |
635*4882a593Smuzhiyun FIELD_PREP(MT_TXD1_TID, tid) |
636*4882a593Smuzhiyun FIELD_PREP(MT_TXD1_OWN_MAC, omac_idx);
637*4882a593Smuzhiyun
638*4882a593Smuzhiyun if (ext_phy && q_idx >= MT_LMAC_ALTX0 && q_idx <= MT_LMAC_BCN0)
639*4882a593Smuzhiyun val |= MT_TXD1_TGID;
640*4882a593Smuzhiyun
641*4882a593Smuzhiyun txwi[1] = cpu_to_le32(val);
642*4882a593Smuzhiyun
643*4882a593Smuzhiyun val = FIELD_PREP(MT_TXD2_FRAME_TYPE, fc_type) |
644*4882a593Smuzhiyun FIELD_PREP(MT_TXD2_SUB_TYPE, fc_stype) |
645*4882a593Smuzhiyun FIELD_PREP(MT_TXD2_MULTICAST, multicast);
646*4882a593Smuzhiyun if (key) {
647*4882a593Smuzhiyun if (multicast && ieee80211_is_robust_mgmt_frame(skb) &&
648*4882a593Smuzhiyun key->cipher == WLAN_CIPHER_SUITE_AES_CMAC) {
649*4882a593Smuzhiyun val |= MT_TXD2_BIP;
650*4882a593Smuzhiyun txwi[3] = 0;
651*4882a593Smuzhiyun } else {
652*4882a593Smuzhiyun txwi[3] = cpu_to_le32(MT_TXD3_PROTECT_FRAME);
653*4882a593Smuzhiyun }
654*4882a593Smuzhiyun } else {
655*4882a593Smuzhiyun txwi[3] = 0;
656*4882a593Smuzhiyun }
657*4882a593Smuzhiyun txwi[2] = cpu_to_le32(val);
658*4882a593Smuzhiyun
659*4882a593Smuzhiyun if (!ieee80211_is_data(fc) || multicast) {
660*4882a593Smuzhiyun u16 rate;
661*4882a593Smuzhiyun
662*4882a593Smuzhiyun /* hardware won't add HTC for mgmt/ctrl frame */
663*4882a593Smuzhiyun txwi[2] |= cpu_to_le32(MT_TXD2_FIX_RATE | MT_TXD2_HTC_VLD);
664*4882a593Smuzhiyun
665*4882a593Smuzhiyun if (mphy->chandef.chan->band == NL80211_BAND_5GHZ)
666*4882a593Smuzhiyun rate = MT7915_5G_RATE_DEFAULT;
667*4882a593Smuzhiyun else
668*4882a593Smuzhiyun rate = MT7915_2G_RATE_DEFAULT;
669*4882a593Smuzhiyun
670*4882a593Smuzhiyun val = MT_TXD6_FIXED_BW |
671*4882a593Smuzhiyun FIELD_PREP(MT_TXD6_TX_RATE, rate);
672*4882a593Smuzhiyun txwi[6] |= cpu_to_le32(val);
673*4882a593Smuzhiyun txwi[3] |= cpu_to_le32(MT_TXD3_BA_DISABLE);
674*4882a593Smuzhiyun }
675*4882a593Smuzhiyun
676*4882a593Smuzhiyun if (!ieee80211_is_beacon(fc))
677*4882a593Smuzhiyun txwi[3] |= cpu_to_le32(MT_TXD3_SW_POWER_MGMT);
678*4882a593Smuzhiyun else
679*4882a593Smuzhiyun tx_count = 0x1f;
680*4882a593Smuzhiyun
681*4882a593Smuzhiyun if (info->flags & IEEE80211_TX_CTL_NO_ACK)
682*4882a593Smuzhiyun txwi[3] |= cpu_to_le32(MT_TXD3_NO_ACK);
683*4882a593Smuzhiyun
684*4882a593Smuzhiyun val = FIELD_PREP(MT_TXD7_TYPE, fc_type) |
685*4882a593Smuzhiyun FIELD_PREP(MT_TXD7_SUB_TYPE, fc_stype);
686*4882a593Smuzhiyun if (wcid->amsdu)
687*4882a593Smuzhiyun val |= MT_TXD7_HW_AMSDU;
688*4882a593Smuzhiyun txwi[7] = cpu_to_le32(val);
689*4882a593Smuzhiyun
690*4882a593Smuzhiyun val = FIELD_PREP(MT_TXD3_REM_TX_COUNT, tx_count);
691*4882a593Smuzhiyun if (info->flags & IEEE80211_TX_CTL_INJECTED) {
692*4882a593Smuzhiyun seqno = le16_to_cpu(hdr->seq_ctrl);
693*4882a593Smuzhiyun
694*4882a593Smuzhiyun if (ieee80211_is_back_req(hdr->frame_control)) {
695*4882a593Smuzhiyun struct ieee80211_bar *bar;
696*4882a593Smuzhiyun
697*4882a593Smuzhiyun bar = (struct ieee80211_bar *)skb->data;
698*4882a593Smuzhiyun seqno = le16_to_cpu(bar->start_seq_num);
699*4882a593Smuzhiyun }
700*4882a593Smuzhiyun
701*4882a593Smuzhiyun val |= MT_TXD3_SN_VALID |
702*4882a593Smuzhiyun FIELD_PREP(MT_TXD3_SEQ, IEEE80211_SEQ_TO_SN(seqno));
703*4882a593Smuzhiyun }
704*4882a593Smuzhiyun txwi[3] |= cpu_to_le32(val);
705*4882a593Smuzhiyun }
706*4882a593Smuzhiyun
mt7915_tx_prepare_skb(struct mt76_dev * mdev,void * txwi_ptr,enum mt76_txq_id qid,struct mt76_wcid * wcid,struct ieee80211_sta * sta,struct mt76_tx_info * tx_info)707*4882a593Smuzhiyun int mt7915_tx_prepare_skb(struct mt76_dev *mdev, void *txwi_ptr,
708*4882a593Smuzhiyun enum mt76_txq_id qid, struct mt76_wcid *wcid,
709*4882a593Smuzhiyun struct ieee80211_sta *sta,
710*4882a593Smuzhiyun struct mt76_tx_info *tx_info)
711*4882a593Smuzhiyun {
712*4882a593Smuzhiyun struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)tx_info->skb->data;
713*4882a593Smuzhiyun struct mt7915_dev *dev = container_of(mdev, struct mt7915_dev, mt76);
714*4882a593Smuzhiyun struct ieee80211_tx_info *info = IEEE80211_SKB_CB(tx_info->skb);
715*4882a593Smuzhiyun struct ieee80211_key_conf *key = info->control.hw_key;
716*4882a593Smuzhiyun struct ieee80211_vif *vif = info->control.vif;
717*4882a593Smuzhiyun struct mt76_tx_cb *cb = mt76_tx_skb_cb(tx_info->skb);
718*4882a593Smuzhiyun struct mt76_txwi_cache *t;
719*4882a593Smuzhiyun struct mt7915_txp *txp;
720*4882a593Smuzhiyun int id, i, nbuf = tx_info->nbuf - 1;
721*4882a593Smuzhiyun u8 *txwi = (u8 *)txwi_ptr;
722*4882a593Smuzhiyun
723*4882a593Smuzhiyun if (!wcid)
724*4882a593Smuzhiyun wcid = &dev->mt76.global_wcid;
725*4882a593Smuzhiyun
726*4882a593Smuzhiyun cb->wcid = wcid->idx;
727*4882a593Smuzhiyun
728*4882a593Smuzhiyun mt7915_mac_write_txwi(dev, txwi_ptr, tx_info->skb, wcid, key,
729*4882a593Smuzhiyun false);
730*4882a593Smuzhiyun
731*4882a593Smuzhiyun txp = (struct mt7915_txp *)(txwi + MT_TXD_SIZE);
732*4882a593Smuzhiyun for (i = 0; i < nbuf; i++) {
733*4882a593Smuzhiyun txp->buf[i] = cpu_to_le32(tx_info->buf[i + 1].addr);
734*4882a593Smuzhiyun txp->len[i] = cpu_to_le16(tx_info->buf[i + 1].len);
735*4882a593Smuzhiyun }
736*4882a593Smuzhiyun txp->nbuf = nbuf;
737*4882a593Smuzhiyun
738*4882a593Smuzhiyun /* pass partial skb header to fw */
739*4882a593Smuzhiyun tx_info->buf[1].len = MT_CT_PARSE_LEN;
740*4882a593Smuzhiyun tx_info->buf[1].skip_unmap = true;
741*4882a593Smuzhiyun tx_info->nbuf = MT_CT_DMA_BUF_NUM;
742*4882a593Smuzhiyun
743*4882a593Smuzhiyun txp->flags = cpu_to_le16(MT_CT_INFO_APPLY_TXD);
744*4882a593Smuzhiyun
745*4882a593Smuzhiyun if (!key)
746*4882a593Smuzhiyun txp->flags |= cpu_to_le16(MT_CT_INFO_NONE_CIPHER_FRAME);
747*4882a593Smuzhiyun
748*4882a593Smuzhiyun if (ieee80211_is_mgmt(hdr->frame_control))
749*4882a593Smuzhiyun txp->flags |= cpu_to_le16(MT_CT_INFO_MGMT_FRAME);
750*4882a593Smuzhiyun
751*4882a593Smuzhiyun if (vif) {
752*4882a593Smuzhiyun struct mt7915_vif *mvif = (struct mt7915_vif *)vif->drv_priv;
753*4882a593Smuzhiyun
754*4882a593Smuzhiyun txp->bss_idx = mvif->idx;
755*4882a593Smuzhiyun }
756*4882a593Smuzhiyun
757*4882a593Smuzhiyun t = (struct mt76_txwi_cache *)(txwi + mdev->drv->txwi_size);
758*4882a593Smuzhiyun t->skb = tx_info->skb;
759*4882a593Smuzhiyun
760*4882a593Smuzhiyun spin_lock_bh(&dev->token_lock);
761*4882a593Smuzhiyun id = idr_alloc(&dev->token, t, 0, MT7915_TOKEN_SIZE, GFP_ATOMIC);
762*4882a593Smuzhiyun spin_unlock_bh(&dev->token_lock);
763*4882a593Smuzhiyun if (id < 0)
764*4882a593Smuzhiyun return id;
765*4882a593Smuzhiyun
766*4882a593Smuzhiyun txp->token = cpu_to_le16(id);
767*4882a593Smuzhiyun txp->rept_wds_wcid = 0xff;
768*4882a593Smuzhiyun tx_info->skb = DMA_DUMMY_DATA;
769*4882a593Smuzhiyun
770*4882a593Smuzhiyun return 0;
771*4882a593Smuzhiyun }
772*4882a593Smuzhiyun
773*4882a593Smuzhiyun static void
mt7915_tx_check_aggr(struct ieee80211_sta * sta,__le32 * txwi)774*4882a593Smuzhiyun mt7915_tx_check_aggr(struct ieee80211_sta *sta, __le32 *txwi)
775*4882a593Smuzhiyun {
776*4882a593Smuzhiyun struct mt7915_sta *msta;
777*4882a593Smuzhiyun u16 fc, tid;
778*4882a593Smuzhiyun u32 val;
779*4882a593Smuzhiyun
780*4882a593Smuzhiyun if (!sta || !sta->ht_cap.ht_supported)
781*4882a593Smuzhiyun return;
782*4882a593Smuzhiyun
783*4882a593Smuzhiyun tid = FIELD_GET(MT_TXD1_TID, le32_to_cpu(txwi[1]));
784*4882a593Smuzhiyun if (tid >= 6) /* skip VO queue */
785*4882a593Smuzhiyun return;
786*4882a593Smuzhiyun
787*4882a593Smuzhiyun val = le32_to_cpu(txwi[2]);
788*4882a593Smuzhiyun fc = FIELD_GET(MT_TXD2_FRAME_TYPE, val) << 2 |
789*4882a593Smuzhiyun FIELD_GET(MT_TXD2_SUB_TYPE, val) << 4;
790*4882a593Smuzhiyun if (unlikely(fc != (IEEE80211_FTYPE_DATA | IEEE80211_STYPE_QOS_DATA)))
791*4882a593Smuzhiyun return;
792*4882a593Smuzhiyun
793*4882a593Smuzhiyun msta = (struct mt7915_sta *)sta->drv_priv;
794*4882a593Smuzhiyun if (!test_and_set_bit(tid, &msta->ampdu_state))
795*4882a593Smuzhiyun ieee80211_start_tx_ba_session(sta, tid, 0);
796*4882a593Smuzhiyun }
797*4882a593Smuzhiyun
798*4882a593Smuzhiyun static inline void
mt7915_tx_status(struct ieee80211_sta * sta,struct ieee80211_hw * hw,struct ieee80211_tx_info * info,struct sk_buff * skb)799*4882a593Smuzhiyun mt7915_tx_status(struct ieee80211_sta *sta, struct ieee80211_hw *hw,
800*4882a593Smuzhiyun struct ieee80211_tx_info *info, struct sk_buff *skb)
801*4882a593Smuzhiyun {
802*4882a593Smuzhiyun struct ieee80211_tx_status status = {
803*4882a593Smuzhiyun .sta = sta,
804*4882a593Smuzhiyun .info = info,
805*4882a593Smuzhiyun };
806*4882a593Smuzhiyun
807*4882a593Smuzhiyun if (skb)
808*4882a593Smuzhiyun status.skb = skb;
809*4882a593Smuzhiyun
810*4882a593Smuzhiyun if (sta) {
811*4882a593Smuzhiyun struct mt7915_sta *msta;
812*4882a593Smuzhiyun
813*4882a593Smuzhiyun msta = (struct mt7915_sta *)sta->drv_priv;
814*4882a593Smuzhiyun status.rate = &msta->stats.tx_rate;
815*4882a593Smuzhiyun }
816*4882a593Smuzhiyun
817*4882a593Smuzhiyun /* use status_ext to report HE rate */
818*4882a593Smuzhiyun ieee80211_tx_status_ext(hw, &status);
819*4882a593Smuzhiyun }
820*4882a593Smuzhiyun
821*4882a593Smuzhiyun static void
mt7915_tx_complete_status(struct mt76_dev * mdev,struct sk_buff * skb,struct ieee80211_sta * sta,u8 stat)822*4882a593Smuzhiyun mt7915_tx_complete_status(struct mt76_dev *mdev, struct sk_buff *skb,
823*4882a593Smuzhiyun struct ieee80211_sta *sta, u8 stat)
824*4882a593Smuzhiyun {
825*4882a593Smuzhiyun struct ieee80211_tx_info *info = IEEE80211_SKB_CB(skb);
826*4882a593Smuzhiyun struct ieee80211_hw *hw;
827*4882a593Smuzhiyun
828*4882a593Smuzhiyun hw = mt76_tx_status_get_hw(mdev, skb);
829*4882a593Smuzhiyun
830*4882a593Smuzhiyun if (info->flags & IEEE80211_TX_CTL_AMPDU)
831*4882a593Smuzhiyun info->flags |= IEEE80211_TX_STAT_AMPDU;
832*4882a593Smuzhiyun
833*4882a593Smuzhiyun if (stat)
834*4882a593Smuzhiyun ieee80211_tx_info_clear_status(info);
835*4882a593Smuzhiyun
836*4882a593Smuzhiyun if (!(info->flags & IEEE80211_TX_CTL_NO_ACK))
837*4882a593Smuzhiyun info->flags |= IEEE80211_TX_STAT_ACK;
838*4882a593Smuzhiyun
839*4882a593Smuzhiyun info->status.tx_time = 0;
840*4882a593Smuzhiyun
841*4882a593Smuzhiyun if (info->flags & IEEE80211_TX_CTL_REQ_TX_STATUS) {
842*4882a593Smuzhiyun mt7915_tx_status(sta, hw, info, skb);
843*4882a593Smuzhiyun return;
844*4882a593Smuzhiyun }
845*4882a593Smuzhiyun
846*4882a593Smuzhiyun if (sta || !(info->flags & IEEE80211_TX_CTL_NO_ACK))
847*4882a593Smuzhiyun mt7915_tx_status(sta, hw, info, NULL);
848*4882a593Smuzhiyun
849*4882a593Smuzhiyun ieee80211_free_txskb(hw, skb);
850*4882a593Smuzhiyun }
851*4882a593Smuzhiyun
mt7915_txp_skb_unmap(struct mt76_dev * dev,struct mt76_txwi_cache * t)852*4882a593Smuzhiyun void mt7915_txp_skb_unmap(struct mt76_dev *dev,
853*4882a593Smuzhiyun struct mt76_txwi_cache *t)
854*4882a593Smuzhiyun {
855*4882a593Smuzhiyun struct mt7915_txp *txp;
856*4882a593Smuzhiyun int i;
857*4882a593Smuzhiyun
858*4882a593Smuzhiyun txp = mt7915_txwi_to_txp(dev, t);
859*4882a593Smuzhiyun for (i = 0; i < txp->nbuf; i++)
860*4882a593Smuzhiyun dma_unmap_single(dev->dev, le32_to_cpu(txp->buf[i]),
861*4882a593Smuzhiyun le16_to_cpu(txp->len[i]), DMA_TO_DEVICE);
862*4882a593Smuzhiyun }
863*4882a593Smuzhiyun
mt7915_mac_tx_free(struct mt7915_dev * dev,struct sk_buff * skb)864*4882a593Smuzhiyun void mt7915_mac_tx_free(struct mt7915_dev *dev, struct sk_buff *skb)
865*4882a593Smuzhiyun {
866*4882a593Smuzhiyun struct mt7915_tx_free *free = (struct mt7915_tx_free *)skb->data;
867*4882a593Smuzhiyun struct mt76_dev *mdev = &dev->mt76;
868*4882a593Smuzhiyun struct mt76_txwi_cache *txwi;
869*4882a593Smuzhiyun struct ieee80211_sta *sta = NULL;
870*4882a593Smuzhiyun u8 i, count;
871*4882a593Smuzhiyun
872*4882a593Smuzhiyun /* clean DMA queues and unmap buffers first */
873*4882a593Smuzhiyun mt76_queue_tx_cleanup(dev, MT_TXQ_PSD, false);
874*4882a593Smuzhiyun mt76_queue_tx_cleanup(dev, MT_TXQ_BE, false);
875*4882a593Smuzhiyun
876*4882a593Smuzhiyun /*
877*4882a593Smuzhiyun * TODO: MT_TX_FREE_LATENCY is msdu time from the TXD is queued into PLE,
878*4882a593Smuzhiyun * to the time ack is received or dropped by hw (air + hw queue time).
879*4882a593Smuzhiyun * Should avoid accessing WTBL to get Tx airtime, and use it instead.
880*4882a593Smuzhiyun */
881*4882a593Smuzhiyun count = FIELD_GET(MT_TX_FREE_MSDU_CNT, le16_to_cpu(free->ctrl));
882*4882a593Smuzhiyun for (i = 0; i < count; i++) {
883*4882a593Smuzhiyun u32 msdu, info = le32_to_cpu(free->info[i]);
884*4882a593Smuzhiyun u8 stat;
885*4882a593Smuzhiyun
886*4882a593Smuzhiyun /*
887*4882a593Smuzhiyun * 1'b1: new wcid pair.
888*4882a593Smuzhiyun * 1'b0: msdu_id with the same 'wcid pair' as above.
889*4882a593Smuzhiyun */
890*4882a593Smuzhiyun if (info & MT_TX_FREE_PAIR) {
891*4882a593Smuzhiyun struct mt7915_sta *msta;
892*4882a593Smuzhiyun struct mt7915_phy *phy;
893*4882a593Smuzhiyun struct mt76_wcid *wcid;
894*4882a593Smuzhiyun u16 idx;
895*4882a593Smuzhiyun
896*4882a593Smuzhiyun count++;
897*4882a593Smuzhiyun idx = FIELD_GET(MT_TX_FREE_WLAN_ID, info);
898*4882a593Smuzhiyun wcid = rcu_dereference(dev->mt76.wcid[idx]);
899*4882a593Smuzhiyun sta = wcid_to_sta(wcid);
900*4882a593Smuzhiyun if (!sta)
901*4882a593Smuzhiyun continue;
902*4882a593Smuzhiyun
903*4882a593Smuzhiyun msta = container_of(wcid, struct mt7915_sta, wcid);
904*4882a593Smuzhiyun phy = msta->vif->phy;
905*4882a593Smuzhiyun spin_lock_bh(&dev->sta_poll_lock);
906*4882a593Smuzhiyun if (list_empty(&msta->stats_list))
907*4882a593Smuzhiyun list_add_tail(&msta->stats_list, &phy->stats_list);
908*4882a593Smuzhiyun if (list_empty(&msta->poll_list))
909*4882a593Smuzhiyun list_add_tail(&msta->poll_list, &dev->sta_poll_list);
910*4882a593Smuzhiyun spin_unlock_bh(&dev->sta_poll_lock);
911*4882a593Smuzhiyun }
912*4882a593Smuzhiyun
913*4882a593Smuzhiyun msdu = FIELD_GET(MT_TX_FREE_MSDU_ID, info);
914*4882a593Smuzhiyun stat = FIELD_GET(MT_TX_FREE_STATUS, info);
915*4882a593Smuzhiyun
916*4882a593Smuzhiyun spin_lock_bh(&dev->token_lock);
917*4882a593Smuzhiyun txwi = idr_remove(&dev->token, msdu);
918*4882a593Smuzhiyun spin_unlock_bh(&dev->token_lock);
919*4882a593Smuzhiyun
920*4882a593Smuzhiyun if (!txwi)
921*4882a593Smuzhiyun continue;
922*4882a593Smuzhiyun
923*4882a593Smuzhiyun mt7915_txp_skb_unmap(mdev, txwi);
924*4882a593Smuzhiyun if (txwi->skb) {
925*4882a593Smuzhiyun struct ieee80211_tx_info *info = IEEE80211_SKB_CB(txwi->skb);
926*4882a593Smuzhiyun void *txwi_ptr = mt76_get_txwi_ptr(mdev, txwi);
927*4882a593Smuzhiyun
928*4882a593Smuzhiyun if (likely(txwi->skb->protocol != cpu_to_be16(ETH_P_PAE)))
929*4882a593Smuzhiyun mt7915_tx_check_aggr(sta, txwi_ptr);
930*4882a593Smuzhiyun
931*4882a593Smuzhiyun if (sta && !info->tx_time_est) {
932*4882a593Smuzhiyun struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
933*4882a593Smuzhiyun int pending;
934*4882a593Smuzhiyun
935*4882a593Smuzhiyun pending = atomic_dec_return(&wcid->non_aql_packets);
936*4882a593Smuzhiyun if (pending < 0)
937*4882a593Smuzhiyun atomic_cmpxchg(&wcid->non_aql_packets, pending, 0);
938*4882a593Smuzhiyun }
939*4882a593Smuzhiyun
940*4882a593Smuzhiyun mt7915_tx_complete_status(mdev, txwi->skb, sta, stat);
941*4882a593Smuzhiyun txwi->skb = NULL;
942*4882a593Smuzhiyun }
943*4882a593Smuzhiyun
944*4882a593Smuzhiyun mt76_put_txwi(mdev, txwi);
945*4882a593Smuzhiyun }
946*4882a593Smuzhiyun dev_kfree_skb(skb);
947*4882a593Smuzhiyun
948*4882a593Smuzhiyun mt7915_mac_sta_poll(dev);
949*4882a593Smuzhiyun mt76_worker_schedule(&dev->mt76.tx_worker);
950*4882a593Smuzhiyun }
951*4882a593Smuzhiyun
mt7915_tx_complete_skb(struct mt76_dev * mdev,struct mt76_queue_entry * e)952*4882a593Smuzhiyun void mt7915_tx_complete_skb(struct mt76_dev *mdev, struct mt76_queue_entry *e)
953*4882a593Smuzhiyun {
954*4882a593Smuzhiyun struct mt7915_dev *dev;
955*4882a593Smuzhiyun
956*4882a593Smuzhiyun if (!e->txwi) {
957*4882a593Smuzhiyun dev_kfree_skb_any(e->skb);
958*4882a593Smuzhiyun return;
959*4882a593Smuzhiyun }
960*4882a593Smuzhiyun
961*4882a593Smuzhiyun dev = container_of(mdev, struct mt7915_dev, mt76);
962*4882a593Smuzhiyun
963*4882a593Smuzhiyun /* error path */
964*4882a593Smuzhiyun if (e->skb == DMA_DUMMY_DATA) {
965*4882a593Smuzhiyun struct mt76_txwi_cache *t;
966*4882a593Smuzhiyun struct mt7915_txp *txp;
967*4882a593Smuzhiyun
968*4882a593Smuzhiyun txp = mt7915_txwi_to_txp(mdev, e->txwi);
969*4882a593Smuzhiyun
970*4882a593Smuzhiyun spin_lock_bh(&dev->token_lock);
971*4882a593Smuzhiyun t = idr_remove(&dev->token, le16_to_cpu(txp->token));
972*4882a593Smuzhiyun spin_unlock_bh(&dev->token_lock);
973*4882a593Smuzhiyun e->skb = t ? t->skb : NULL;
974*4882a593Smuzhiyun }
975*4882a593Smuzhiyun
976*4882a593Smuzhiyun if (e->skb) {
977*4882a593Smuzhiyun struct mt76_tx_cb *cb = mt76_tx_skb_cb(e->skb);
978*4882a593Smuzhiyun struct mt76_wcid *wcid;
979*4882a593Smuzhiyun
980*4882a593Smuzhiyun wcid = rcu_dereference(dev->mt76.wcid[cb->wcid]);
981*4882a593Smuzhiyun
982*4882a593Smuzhiyun mt7915_tx_complete_status(mdev, e->skb, wcid_to_sta(wcid), 0);
983*4882a593Smuzhiyun }
984*4882a593Smuzhiyun }
985*4882a593Smuzhiyun
mt7915_mac_cca_stats_reset(struct mt7915_phy * phy)986*4882a593Smuzhiyun void mt7915_mac_cca_stats_reset(struct mt7915_phy *phy)
987*4882a593Smuzhiyun {
988*4882a593Smuzhiyun struct mt7915_dev *dev = phy->dev;
989*4882a593Smuzhiyun bool ext_phy = phy != &dev->phy;
990*4882a593Smuzhiyun u32 reg = MT_WF_PHY_RX_CTRL1(ext_phy);
991*4882a593Smuzhiyun
992*4882a593Smuzhiyun mt7915_l2_clear(dev, reg, MT_WF_PHY_RX_CTRL1_STSCNT_EN);
993*4882a593Smuzhiyun mt7915_l2_set(dev, reg, BIT(11) | BIT(9));
994*4882a593Smuzhiyun }
995*4882a593Smuzhiyun
mt7915_mac_reset_counters(struct mt7915_phy * phy)996*4882a593Smuzhiyun void mt7915_mac_reset_counters(struct mt7915_phy *phy)
997*4882a593Smuzhiyun {
998*4882a593Smuzhiyun struct mt7915_dev *dev = phy->dev;
999*4882a593Smuzhiyun bool ext_phy = phy != &dev->phy;
1000*4882a593Smuzhiyun int i;
1001*4882a593Smuzhiyun
1002*4882a593Smuzhiyun for (i = 0; i < 4; i++) {
1003*4882a593Smuzhiyun mt76_rr(dev, MT_TX_AGG_CNT(ext_phy, i));
1004*4882a593Smuzhiyun mt76_rr(dev, MT_TX_AGG_CNT2(ext_phy, i));
1005*4882a593Smuzhiyun }
1006*4882a593Smuzhiyun
1007*4882a593Smuzhiyun if (ext_phy) {
1008*4882a593Smuzhiyun dev->mt76.phy2->survey_time = ktime_get_boottime();
1009*4882a593Smuzhiyun i = ARRAY_SIZE(dev->mt76.aggr_stats) / 2;
1010*4882a593Smuzhiyun } else {
1011*4882a593Smuzhiyun dev->mt76.phy.survey_time = ktime_get_boottime();
1012*4882a593Smuzhiyun i = 0;
1013*4882a593Smuzhiyun }
1014*4882a593Smuzhiyun memset(&dev->mt76.aggr_stats[i], 0, sizeof(dev->mt76.aggr_stats) / 2);
1015*4882a593Smuzhiyun
1016*4882a593Smuzhiyun /* reset airtime counters */
1017*4882a593Smuzhiyun mt76_rr(dev, MT_MIB_SDR9(ext_phy));
1018*4882a593Smuzhiyun mt76_rr(dev, MT_MIB_SDR36(ext_phy));
1019*4882a593Smuzhiyun mt76_rr(dev, MT_MIB_SDR37(ext_phy));
1020*4882a593Smuzhiyun
1021*4882a593Smuzhiyun mt76_set(dev, MT_WF_RMAC_MIB_TIME0(ext_phy),
1022*4882a593Smuzhiyun MT_WF_RMAC_MIB_RXTIME_CLR);
1023*4882a593Smuzhiyun mt76_set(dev, MT_WF_RMAC_MIB_AIRTIME0(ext_phy),
1024*4882a593Smuzhiyun MT_WF_RMAC_MIB_RXTIME_CLR);
1025*4882a593Smuzhiyun }
1026*4882a593Smuzhiyun
mt7915_mac_set_timing(struct mt7915_phy * phy)1027*4882a593Smuzhiyun void mt7915_mac_set_timing(struct mt7915_phy *phy)
1028*4882a593Smuzhiyun {
1029*4882a593Smuzhiyun s16 coverage_class = phy->coverage_class;
1030*4882a593Smuzhiyun struct mt7915_dev *dev = phy->dev;
1031*4882a593Smuzhiyun bool ext_phy = phy != &dev->phy;
1032*4882a593Smuzhiyun u32 val, reg_offset;
1033*4882a593Smuzhiyun u32 cck = FIELD_PREP(MT_TIMEOUT_VAL_PLCP, 231) |
1034*4882a593Smuzhiyun FIELD_PREP(MT_TIMEOUT_VAL_CCA, 48);
1035*4882a593Smuzhiyun u32 ofdm = FIELD_PREP(MT_TIMEOUT_VAL_PLCP, 60) |
1036*4882a593Smuzhiyun FIELD_PREP(MT_TIMEOUT_VAL_CCA, 28);
1037*4882a593Smuzhiyun int sifs, offset;
1038*4882a593Smuzhiyun bool is_5ghz = phy->mt76->chandef.chan->band == NL80211_BAND_5GHZ;
1039*4882a593Smuzhiyun
1040*4882a593Smuzhiyun if (!test_bit(MT76_STATE_RUNNING, &phy->mt76->state))
1041*4882a593Smuzhiyun return;
1042*4882a593Smuzhiyun
1043*4882a593Smuzhiyun if (is_5ghz)
1044*4882a593Smuzhiyun sifs = 16;
1045*4882a593Smuzhiyun else
1046*4882a593Smuzhiyun sifs = 10;
1047*4882a593Smuzhiyun
1048*4882a593Smuzhiyun if (ext_phy) {
1049*4882a593Smuzhiyun coverage_class = max_t(s16, dev->phy.coverage_class,
1050*4882a593Smuzhiyun coverage_class);
1051*4882a593Smuzhiyun } else {
1052*4882a593Smuzhiyun struct mt7915_phy *phy_ext = mt7915_ext_phy(dev);
1053*4882a593Smuzhiyun
1054*4882a593Smuzhiyun if (phy_ext)
1055*4882a593Smuzhiyun coverage_class = max_t(s16, phy_ext->coverage_class,
1056*4882a593Smuzhiyun coverage_class);
1057*4882a593Smuzhiyun }
1058*4882a593Smuzhiyun mt76_set(dev, MT_ARB_SCR(ext_phy),
1059*4882a593Smuzhiyun MT_ARB_SCR_TX_DISABLE | MT_ARB_SCR_RX_DISABLE);
1060*4882a593Smuzhiyun udelay(1);
1061*4882a593Smuzhiyun
1062*4882a593Smuzhiyun offset = 3 * coverage_class;
1063*4882a593Smuzhiyun reg_offset = FIELD_PREP(MT_TIMEOUT_VAL_PLCP, offset) |
1064*4882a593Smuzhiyun FIELD_PREP(MT_TIMEOUT_VAL_CCA, offset);
1065*4882a593Smuzhiyun
1066*4882a593Smuzhiyun mt76_wr(dev, MT_TMAC_CDTR(ext_phy), cck + reg_offset);
1067*4882a593Smuzhiyun mt76_wr(dev, MT_TMAC_ODTR(ext_phy), ofdm + reg_offset);
1068*4882a593Smuzhiyun mt76_wr(dev, MT_TMAC_ICR0(ext_phy),
1069*4882a593Smuzhiyun FIELD_PREP(MT_IFS_EIFS, 360) |
1070*4882a593Smuzhiyun FIELD_PREP(MT_IFS_RIFS, 2) |
1071*4882a593Smuzhiyun FIELD_PREP(MT_IFS_SIFS, sifs) |
1072*4882a593Smuzhiyun FIELD_PREP(MT_IFS_SLOT, phy->slottime));
1073*4882a593Smuzhiyun
1074*4882a593Smuzhiyun if (phy->slottime < 20 || is_5ghz)
1075*4882a593Smuzhiyun val = MT7915_CFEND_RATE_DEFAULT;
1076*4882a593Smuzhiyun else
1077*4882a593Smuzhiyun val = MT7915_CFEND_RATE_11B;
1078*4882a593Smuzhiyun
1079*4882a593Smuzhiyun mt76_rmw_field(dev, MT_AGG_ACR0(ext_phy), MT_AGG_ACR_CFEND_RATE, val);
1080*4882a593Smuzhiyun mt76_clear(dev, MT_ARB_SCR(ext_phy),
1081*4882a593Smuzhiyun MT_ARB_SCR_TX_DISABLE | MT_ARB_SCR_RX_DISABLE);
1082*4882a593Smuzhiyun }
1083*4882a593Smuzhiyun
1084*4882a593Smuzhiyun /*
1085*4882a593Smuzhiyun * TODO: mib counters are read-clear and there're many HE functionalities need
1086*4882a593Smuzhiyun * such info, hence firmware prepares a task to read the fields out to a shared
1087*4882a593Smuzhiyun * structure. User should switch to use event format to avoid race condition.
1088*4882a593Smuzhiyun */
1089*4882a593Smuzhiyun static void
mt7915_phy_update_channel(struct mt76_phy * mphy,int idx)1090*4882a593Smuzhiyun mt7915_phy_update_channel(struct mt76_phy *mphy, int idx)
1091*4882a593Smuzhiyun {
1092*4882a593Smuzhiyun struct mt7915_dev *dev = container_of(mphy->dev, struct mt7915_dev, mt76);
1093*4882a593Smuzhiyun struct mt76_channel_state *state;
1094*4882a593Smuzhiyun u64 busy_time, tx_time, rx_time, obss_time;
1095*4882a593Smuzhiyun
1096*4882a593Smuzhiyun busy_time = mt76_get_field(dev, MT_MIB_SDR9(idx),
1097*4882a593Smuzhiyun MT_MIB_SDR9_BUSY_MASK);
1098*4882a593Smuzhiyun tx_time = mt76_get_field(dev, MT_MIB_SDR36(idx),
1099*4882a593Smuzhiyun MT_MIB_SDR36_TXTIME_MASK);
1100*4882a593Smuzhiyun rx_time = mt76_get_field(dev, MT_MIB_SDR37(idx),
1101*4882a593Smuzhiyun MT_MIB_SDR37_RXTIME_MASK);
1102*4882a593Smuzhiyun obss_time = mt76_get_field(dev, MT_WF_RMAC_MIB_AIRTIME14(idx),
1103*4882a593Smuzhiyun MT_MIB_OBSSTIME_MASK);
1104*4882a593Smuzhiyun
1105*4882a593Smuzhiyun /* TODO: state->noise */
1106*4882a593Smuzhiyun state = mphy->chan_state;
1107*4882a593Smuzhiyun state->cc_busy += busy_time;
1108*4882a593Smuzhiyun state->cc_tx += tx_time;
1109*4882a593Smuzhiyun state->cc_rx += rx_time + obss_time;
1110*4882a593Smuzhiyun state->cc_bss_rx += rx_time;
1111*4882a593Smuzhiyun }
1112*4882a593Smuzhiyun
mt7915_update_channel(struct mt76_dev * mdev)1113*4882a593Smuzhiyun void mt7915_update_channel(struct mt76_dev *mdev)
1114*4882a593Smuzhiyun {
1115*4882a593Smuzhiyun struct mt7915_dev *dev = container_of(mdev, struct mt7915_dev, mt76);
1116*4882a593Smuzhiyun
1117*4882a593Smuzhiyun mt7915_phy_update_channel(&mdev->phy, 0);
1118*4882a593Smuzhiyun if (mdev->phy2)
1119*4882a593Smuzhiyun mt7915_phy_update_channel(mdev->phy2, 1);
1120*4882a593Smuzhiyun
1121*4882a593Smuzhiyun /* reset obss airtime */
1122*4882a593Smuzhiyun mt76_set(dev, MT_WF_RMAC_MIB_TIME0(0), MT_WF_RMAC_MIB_RXTIME_CLR);
1123*4882a593Smuzhiyun if (mdev->phy2)
1124*4882a593Smuzhiyun mt76_set(dev, MT_WF_RMAC_MIB_TIME0(1),
1125*4882a593Smuzhiyun MT_WF_RMAC_MIB_RXTIME_CLR);
1126*4882a593Smuzhiyun }
1127*4882a593Smuzhiyun
1128*4882a593Smuzhiyun static bool
mt7915_wait_reset_state(struct mt7915_dev * dev,u32 state)1129*4882a593Smuzhiyun mt7915_wait_reset_state(struct mt7915_dev *dev, u32 state)
1130*4882a593Smuzhiyun {
1131*4882a593Smuzhiyun bool ret;
1132*4882a593Smuzhiyun
1133*4882a593Smuzhiyun ret = wait_event_timeout(dev->reset_wait,
1134*4882a593Smuzhiyun (READ_ONCE(dev->reset_state) & state),
1135*4882a593Smuzhiyun MT7915_RESET_TIMEOUT);
1136*4882a593Smuzhiyun
1137*4882a593Smuzhiyun WARN(!ret, "Timeout waiting for MCU reset state %x\n", state);
1138*4882a593Smuzhiyun return ret;
1139*4882a593Smuzhiyun }
1140*4882a593Smuzhiyun
1141*4882a593Smuzhiyun static void
mt7915_update_vif_beacon(void * priv,u8 * mac,struct ieee80211_vif * vif)1142*4882a593Smuzhiyun mt7915_update_vif_beacon(void *priv, u8 *mac, struct ieee80211_vif *vif)
1143*4882a593Smuzhiyun {
1144*4882a593Smuzhiyun struct ieee80211_hw *hw = priv;
1145*4882a593Smuzhiyun
1146*4882a593Smuzhiyun mt7915_mcu_add_beacon(hw, vif, vif->bss_conf.enable_beacon);
1147*4882a593Smuzhiyun }
1148*4882a593Smuzhiyun
1149*4882a593Smuzhiyun static void
mt7915_update_beacons(struct mt7915_dev * dev)1150*4882a593Smuzhiyun mt7915_update_beacons(struct mt7915_dev *dev)
1151*4882a593Smuzhiyun {
1152*4882a593Smuzhiyun ieee80211_iterate_active_interfaces(dev->mt76.hw,
1153*4882a593Smuzhiyun IEEE80211_IFACE_ITER_RESUME_ALL,
1154*4882a593Smuzhiyun mt7915_update_vif_beacon, dev->mt76.hw);
1155*4882a593Smuzhiyun
1156*4882a593Smuzhiyun if (!dev->mt76.phy2)
1157*4882a593Smuzhiyun return;
1158*4882a593Smuzhiyun
1159*4882a593Smuzhiyun ieee80211_iterate_active_interfaces(dev->mt76.phy2->hw,
1160*4882a593Smuzhiyun IEEE80211_IFACE_ITER_RESUME_ALL,
1161*4882a593Smuzhiyun mt7915_update_vif_beacon, dev->mt76.phy2->hw);
1162*4882a593Smuzhiyun }
1163*4882a593Smuzhiyun
1164*4882a593Smuzhiyun static void
mt7915_dma_reset(struct mt7915_dev * dev)1165*4882a593Smuzhiyun mt7915_dma_reset(struct mt7915_dev *dev)
1166*4882a593Smuzhiyun {
1167*4882a593Smuzhiyun int i;
1168*4882a593Smuzhiyun
1169*4882a593Smuzhiyun mt76_clear(dev, MT_WFDMA0_GLO_CFG,
1170*4882a593Smuzhiyun MT_WFDMA0_GLO_CFG_TX_DMA_EN | MT_WFDMA0_GLO_CFG_RX_DMA_EN);
1171*4882a593Smuzhiyun mt76_clear(dev, MT_WFDMA1_GLO_CFG,
1172*4882a593Smuzhiyun MT_WFDMA1_GLO_CFG_TX_DMA_EN | MT_WFDMA1_GLO_CFG_RX_DMA_EN);
1173*4882a593Smuzhiyun usleep_range(1000, 2000);
1174*4882a593Smuzhiyun
1175*4882a593Smuzhiyun for (i = 0; i < __MT_TXQ_MAX; i++)
1176*4882a593Smuzhiyun mt76_queue_tx_cleanup(dev, i, true);
1177*4882a593Smuzhiyun
1178*4882a593Smuzhiyun mt76_for_each_q_rx(&dev->mt76, i) {
1179*4882a593Smuzhiyun mt76_queue_rx_reset(dev, i);
1180*4882a593Smuzhiyun }
1181*4882a593Smuzhiyun
1182*4882a593Smuzhiyun /* re-init prefetch settings after reset */
1183*4882a593Smuzhiyun mt7915_dma_prefetch(dev);
1184*4882a593Smuzhiyun
1185*4882a593Smuzhiyun mt76_set(dev, MT_WFDMA0_GLO_CFG,
1186*4882a593Smuzhiyun MT_WFDMA0_GLO_CFG_TX_DMA_EN | MT_WFDMA0_GLO_CFG_RX_DMA_EN);
1187*4882a593Smuzhiyun mt76_set(dev, MT_WFDMA1_GLO_CFG,
1188*4882a593Smuzhiyun MT_WFDMA1_GLO_CFG_TX_DMA_EN | MT_WFDMA1_GLO_CFG_RX_DMA_EN);
1189*4882a593Smuzhiyun }
1190*4882a593Smuzhiyun
1191*4882a593Smuzhiyun /* system error recovery */
mt7915_mac_reset_work(struct work_struct * work)1192*4882a593Smuzhiyun void mt7915_mac_reset_work(struct work_struct *work)
1193*4882a593Smuzhiyun {
1194*4882a593Smuzhiyun struct mt7915_phy *phy2;
1195*4882a593Smuzhiyun struct mt76_phy *ext_phy;
1196*4882a593Smuzhiyun struct mt7915_dev *dev;
1197*4882a593Smuzhiyun
1198*4882a593Smuzhiyun dev = container_of(work, struct mt7915_dev, reset_work);
1199*4882a593Smuzhiyun ext_phy = dev->mt76.phy2;
1200*4882a593Smuzhiyun phy2 = ext_phy ? ext_phy->priv : NULL;
1201*4882a593Smuzhiyun
1202*4882a593Smuzhiyun if (!(READ_ONCE(dev->reset_state) & MT_MCU_CMD_STOP_DMA))
1203*4882a593Smuzhiyun return;
1204*4882a593Smuzhiyun
1205*4882a593Smuzhiyun ieee80211_stop_queues(mt76_hw(dev));
1206*4882a593Smuzhiyun if (ext_phy)
1207*4882a593Smuzhiyun ieee80211_stop_queues(ext_phy->hw);
1208*4882a593Smuzhiyun
1209*4882a593Smuzhiyun set_bit(MT76_RESET, &dev->mphy.state);
1210*4882a593Smuzhiyun set_bit(MT76_MCU_RESET, &dev->mphy.state);
1211*4882a593Smuzhiyun wake_up(&dev->mt76.mcu.wait);
1212*4882a593Smuzhiyun cancel_delayed_work_sync(&dev->phy.mac_work);
1213*4882a593Smuzhiyun if (phy2)
1214*4882a593Smuzhiyun cancel_delayed_work_sync(&phy2->mac_work);
1215*4882a593Smuzhiyun
1216*4882a593Smuzhiyun /* lock/unlock all queues to ensure that no tx is pending */
1217*4882a593Smuzhiyun mt76_txq_schedule_all(&dev->mphy);
1218*4882a593Smuzhiyun if (ext_phy)
1219*4882a593Smuzhiyun mt76_txq_schedule_all(ext_phy);
1220*4882a593Smuzhiyun
1221*4882a593Smuzhiyun mt76_worker_disable(&dev->mt76.tx_worker);
1222*4882a593Smuzhiyun napi_disable(&dev->mt76.napi[0]);
1223*4882a593Smuzhiyun napi_disable(&dev->mt76.napi[1]);
1224*4882a593Smuzhiyun napi_disable(&dev->mt76.napi[2]);
1225*4882a593Smuzhiyun napi_disable(&dev->mt76.tx_napi);
1226*4882a593Smuzhiyun
1227*4882a593Smuzhiyun mutex_lock(&dev->mt76.mutex);
1228*4882a593Smuzhiyun
1229*4882a593Smuzhiyun mt76_wr(dev, MT_MCU_INT_EVENT, MT_MCU_INT_EVENT_DMA_STOPPED);
1230*4882a593Smuzhiyun
1231*4882a593Smuzhiyun if (mt7915_wait_reset_state(dev, MT_MCU_CMD_RESET_DONE)) {
1232*4882a593Smuzhiyun mt7915_dma_reset(dev);
1233*4882a593Smuzhiyun
1234*4882a593Smuzhiyun mt76_wr(dev, MT_MCU_INT_EVENT, MT_MCU_INT_EVENT_DMA_INIT);
1235*4882a593Smuzhiyun mt7915_wait_reset_state(dev, MT_MCU_CMD_RECOVERY_DONE);
1236*4882a593Smuzhiyun }
1237*4882a593Smuzhiyun
1238*4882a593Smuzhiyun clear_bit(MT76_MCU_RESET, &dev->mphy.state);
1239*4882a593Smuzhiyun clear_bit(MT76_RESET, &dev->mphy.state);
1240*4882a593Smuzhiyun
1241*4882a593Smuzhiyun mt76_worker_enable(&dev->mt76.tx_worker);
1242*4882a593Smuzhiyun napi_enable(&dev->mt76.tx_napi);
1243*4882a593Smuzhiyun napi_schedule(&dev->mt76.tx_napi);
1244*4882a593Smuzhiyun
1245*4882a593Smuzhiyun napi_enable(&dev->mt76.napi[0]);
1246*4882a593Smuzhiyun napi_schedule(&dev->mt76.napi[0]);
1247*4882a593Smuzhiyun
1248*4882a593Smuzhiyun napi_enable(&dev->mt76.napi[1]);
1249*4882a593Smuzhiyun napi_schedule(&dev->mt76.napi[1]);
1250*4882a593Smuzhiyun
1251*4882a593Smuzhiyun napi_enable(&dev->mt76.napi[2]);
1252*4882a593Smuzhiyun napi_schedule(&dev->mt76.napi[2]);
1253*4882a593Smuzhiyun
1254*4882a593Smuzhiyun ieee80211_wake_queues(mt76_hw(dev));
1255*4882a593Smuzhiyun if (ext_phy)
1256*4882a593Smuzhiyun ieee80211_wake_queues(ext_phy->hw);
1257*4882a593Smuzhiyun
1258*4882a593Smuzhiyun mt76_wr(dev, MT_MCU_INT_EVENT, MT_MCU_INT_EVENT_RESET_DONE);
1259*4882a593Smuzhiyun mt7915_wait_reset_state(dev, MT_MCU_CMD_NORMAL_STATE);
1260*4882a593Smuzhiyun
1261*4882a593Smuzhiyun mutex_unlock(&dev->mt76.mutex);
1262*4882a593Smuzhiyun
1263*4882a593Smuzhiyun mt7915_update_beacons(dev);
1264*4882a593Smuzhiyun
1265*4882a593Smuzhiyun ieee80211_queue_delayed_work(mt76_hw(dev), &dev->phy.mac_work,
1266*4882a593Smuzhiyun MT7915_WATCHDOG_TIME);
1267*4882a593Smuzhiyun if (phy2)
1268*4882a593Smuzhiyun ieee80211_queue_delayed_work(ext_phy->hw, &phy2->mac_work,
1269*4882a593Smuzhiyun MT7915_WATCHDOG_TIME);
1270*4882a593Smuzhiyun }
1271*4882a593Smuzhiyun
1272*4882a593Smuzhiyun static void
mt7915_mac_update_mib_stats(struct mt7915_phy * phy)1273*4882a593Smuzhiyun mt7915_mac_update_mib_stats(struct mt7915_phy *phy)
1274*4882a593Smuzhiyun {
1275*4882a593Smuzhiyun struct mt7915_dev *dev = phy->dev;
1276*4882a593Smuzhiyun struct mib_stats *mib = &phy->mib;
1277*4882a593Smuzhiyun bool ext_phy = phy != &dev->phy;
1278*4882a593Smuzhiyun int i, aggr0, aggr1;
1279*4882a593Smuzhiyun
1280*4882a593Smuzhiyun mib->fcs_err_cnt += mt76_get_field(dev, MT_MIB_SDR3(ext_phy),
1281*4882a593Smuzhiyun MT_MIB_SDR3_FCS_ERR_MASK);
1282*4882a593Smuzhiyun
1283*4882a593Smuzhiyun aggr0 = ext_phy ? ARRAY_SIZE(dev->mt76.aggr_stats) / 2 : 0;
1284*4882a593Smuzhiyun for (i = 0, aggr1 = aggr0 + 4; i < 4; i++) {
1285*4882a593Smuzhiyun u32 val;
1286*4882a593Smuzhiyun
1287*4882a593Smuzhiyun val = mt76_rr(dev, MT_MIB_MB_SDR1(ext_phy, i));
1288*4882a593Smuzhiyun mib->ba_miss_cnt += FIELD_GET(MT_MIB_BA_MISS_COUNT_MASK, val);
1289*4882a593Smuzhiyun mib->ack_fail_cnt +=
1290*4882a593Smuzhiyun FIELD_GET(MT_MIB_ACK_FAIL_COUNT_MASK, val);
1291*4882a593Smuzhiyun
1292*4882a593Smuzhiyun val = mt76_rr(dev, MT_MIB_MB_SDR0(ext_phy, i));
1293*4882a593Smuzhiyun mib->rts_cnt += FIELD_GET(MT_MIB_RTS_COUNT_MASK, val);
1294*4882a593Smuzhiyun mib->rts_retries_cnt +=
1295*4882a593Smuzhiyun FIELD_GET(MT_MIB_RTS_RETRIES_COUNT_MASK, val);
1296*4882a593Smuzhiyun
1297*4882a593Smuzhiyun val = mt76_rr(dev, MT_TX_AGG_CNT(ext_phy, i));
1298*4882a593Smuzhiyun dev->mt76.aggr_stats[aggr0++] += val & 0xffff;
1299*4882a593Smuzhiyun dev->mt76.aggr_stats[aggr0++] += val >> 16;
1300*4882a593Smuzhiyun
1301*4882a593Smuzhiyun val = mt76_rr(dev, MT_TX_AGG_CNT2(ext_phy, i));
1302*4882a593Smuzhiyun dev->mt76.aggr_stats[aggr1++] += val & 0xffff;
1303*4882a593Smuzhiyun dev->mt76.aggr_stats[aggr1++] += val >> 16;
1304*4882a593Smuzhiyun }
1305*4882a593Smuzhiyun }
1306*4882a593Smuzhiyun
1307*4882a593Smuzhiyun static void
mt7915_mac_sta_stats_work(struct mt7915_phy * phy)1308*4882a593Smuzhiyun mt7915_mac_sta_stats_work(struct mt7915_phy *phy)
1309*4882a593Smuzhiyun {
1310*4882a593Smuzhiyun struct mt7915_dev *dev = phy->dev;
1311*4882a593Smuzhiyun struct mt7915_sta *msta;
1312*4882a593Smuzhiyun LIST_HEAD(list);
1313*4882a593Smuzhiyun
1314*4882a593Smuzhiyun spin_lock_bh(&dev->sta_poll_lock);
1315*4882a593Smuzhiyun list_splice_init(&phy->stats_list, &list);
1316*4882a593Smuzhiyun
1317*4882a593Smuzhiyun while (!list_empty(&list)) {
1318*4882a593Smuzhiyun msta = list_first_entry(&list, struct mt7915_sta, stats_list);
1319*4882a593Smuzhiyun list_del_init(&msta->stats_list);
1320*4882a593Smuzhiyun spin_unlock_bh(&dev->sta_poll_lock);
1321*4882a593Smuzhiyun
1322*4882a593Smuzhiyun /* use MT_TX_FREE_RATE to report Tx rate for further devices */
1323*4882a593Smuzhiyun mt7915_mcu_get_rate_info(dev, RATE_CTRL_RU_INFO, msta->wcid.idx);
1324*4882a593Smuzhiyun
1325*4882a593Smuzhiyun spin_lock_bh(&dev->sta_poll_lock);
1326*4882a593Smuzhiyun }
1327*4882a593Smuzhiyun
1328*4882a593Smuzhiyun spin_unlock_bh(&dev->sta_poll_lock);
1329*4882a593Smuzhiyun }
1330*4882a593Smuzhiyun
mt7915_mac_sta_rc_work(struct work_struct * work)1331*4882a593Smuzhiyun void mt7915_mac_sta_rc_work(struct work_struct *work)
1332*4882a593Smuzhiyun {
1333*4882a593Smuzhiyun struct mt7915_dev *dev = container_of(work, struct mt7915_dev, rc_work);
1334*4882a593Smuzhiyun struct ieee80211_sta *sta;
1335*4882a593Smuzhiyun struct ieee80211_vif *vif;
1336*4882a593Smuzhiyun struct mt7915_sta *msta;
1337*4882a593Smuzhiyun u32 changed;
1338*4882a593Smuzhiyun LIST_HEAD(list);
1339*4882a593Smuzhiyun
1340*4882a593Smuzhiyun spin_lock_bh(&dev->sta_poll_lock);
1341*4882a593Smuzhiyun list_splice_init(&dev->sta_rc_list, &list);
1342*4882a593Smuzhiyun
1343*4882a593Smuzhiyun while (!list_empty(&list)) {
1344*4882a593Smuzhiyun msta = list_first_entry(&list, struct mt7915_sta, rc_list);
1345*4882a593Smuzhiyun list_del_init(&msta->rc_list);
1346*4882a593Smuzhiyun changed = msta->stats.changed;
1347*4882a593Smuzhiyun msta->stats.changed = 0;
1348*4882a593Smuzhiyun spin_unlock_bh(&dev->sta_poll_lock);
1349*4882a593Smuzhiyun
1350*4882a593Smuzhiyun sta = container_of((void *)msta, struct ieee80211_sta, drv_priv);
1351*4882a593Smuzhiyun vif = container_of((void *)msta->vif, struct ieee80211_vif, drv_priv);
1352*4882a593Smuzhiyun
1353*4882a593Smuzhiyun if (changed & (IEEE80211_RC_SUPP_RATES_CHANGED |
1354*4882a593Smuzhiyun IEEE80211_RC_NSS_CHANGED |
1355*4882a593Smuzhiyun IEEE80211_RC_BW_CHANGED))
1356*4882a593Smuzhiyun mt7915_mcu_add_rate_ctrl(dev, vif, sta);
1357*4882a593Smuzhiyun
1358*4882a593Smuzhiyun if (changed & IEEE80211_RC_SMPS_CHANGED)
1359*4882a593Smuzhiyun mt7915_mcu_add_smps(dev, vif, sta);
1360*4882a593Smuzhiyun
1361*4882a593Smuzhiyun spin_lock_bh(&dev->sta_poll_lock);
1362*4882a593Smuzhiyun }
1363*4882a593Smuzhiyun
1364*4882a593Smuzhiyun spin_unlock_bh(&dev->sta_poll_lock);
1365*4882a593Smuzhiyun }
1366*4882a593Smuzhiyun
mt7915_mac_work(struct work_struct * work)1367*4882a593Smuzhiyun void mt7915_mac_work(struct work_struct *work)
1368*4882a593Smuzhiyun {
1369*4882a593Smuzhiyun struct mt7915_phy *phy;
1370*4882a593Smuzhiyun struct mt76_dev *mdev;
1371*4882a593Smuzhiyun
1372*4882a593Smuzhiyun phy = (struct mt7915_phy *)container_of(work, struct mt7915_phy,
1373*4882a593Smuzhiyun mac_work.work);
1374*4882a593Smuzhiyun mdev = &phy->dev->mt76;
1375*4882a593Smuzhiyun
1376*4882a593Smuzhiyun mutex_lock(&mdev->mutex);
1377*4882a593Smuzhiyun
1378*4882a593Smuzhiyun mt76_update_survey(mdev);
1379*4882a593Smuzhiyun if (++phy->mac_work_count == 5) {
1380*4882a593Smuzhiyun phy->mac_work_count = 0;
1381*4882a593Smuzhiyun
1382*4882a593Smuzhiyun mt7915_mac_update_mib_stats(phy);
1383*4882a593Smuzhiyun }
1384*4882a593Smuzhiyun
1385*4882a593Smuzhiyun if (++phy->sta_work_count == 10) {
1386*4882a593Smuzhiyun phy->sta_work_count = 0;
1387*4882a593Smuzhiyun mt7915_mac_sta_stats_work(phy);
1388*4882a593Smuzhiyun };
1389*4882a593Smuzhiyun
1390*4882a593Smuzhiyun mutex_unlock(&mdev->mutex);
1391*4882a593Smuzhiyun
1392*4882a593Smuzhiyun ieee80211_queue_delayed_work(phy->mt76->hw, &phy->mac_work,
1393*4882a593Smuzhiyun MT7915_WATCHDOG_TIME);
1394*4882a593Smuzhiyun }
1395*4882a593Smuzhiyun
mt7915_dfs_stop_radar_detector(struct mt7915_phy * phy)1396*4882a593Smuzhiyun static void mt7915_dfs_stop_radar_detector(struct mt7915_phy *phy)
1397*4882a593Smuzhiyun {
1398*4882a593Smuzhiyun struct mt7915_dev *dev = phy->dev;
1399*4882a593Smuzhiyun
1400*4882a593Smuzhiyun if (phy->rdd_state & BIT(0))
1401*4882a593Smuzhiyun mt7915_mcu_rdd_cmd(dev, RDD_STOP, 0, MT_RX_SEL0, 0);
1402*4882a593Smuzhiyun if (phy->rdd_state & BIT(1))
1403*4882a593Smuzhiyun mt7915_mcu_rdd_cmd(dev, RDD_STOP, 1, MT_RX_SEL0, 0);
1404*4882a593Smuzhiyun }
1405*4882a593Smuzhiyun
mt7915_dfs_start_rdd(struct mt7915_dev * dev,int chain)1406*4882a593Smuzhiyun static int mt7915_dfs_start_rdd(struct mt7915_dev *dev, int chain)
1407*4882a593Smuzhiyun {
1408*4882a593Smuzhiyun int err;
1409*4882a593Smuzhiyun
1410*4882a593Smuzhiyun err = mt7915_mcu_rdd_cmd(dev, RDD_START, chain, MT_RX_SEL0, 0);
1411*4882a593Smuzhiyun if (err < 0)
1412*4882a593Smuzhiyun return err;
1413*4882a593Smuzhiyun
1414*4882a593Smuzhiyun return mt7915_mcu_rdd_cmd(dev, RDD_DET_MODE, chain, MT_RX_SEL0, 1);
1415*4882a593Smuzhiyun }
1416*4882a593Smuzhiyun
mt7915_dfs_start_radar_detector(struct mt7915_phy * phy)1417*4882a593Smuzhiyun static int mt7915_dfs_start_radar_detector(struct mt7915_phy *phy)
1418*4882a593Smuzhiyun {
1419*4882a593Smuzhiyun struct cfg80211_chan_def *chandef = &phy->mt76->chandef;
1420*4882a593Smuzhiyun struct mt7915_dev *dev = phy->dev;
1421*4882a593Smuzhiyun bool ext_phy = phy != &dev->phy;
1422*4882a593Smuzhiyun int err;
1423*4882a593Smuzhiyun
1424*4882a593Smuzhiyun /* start CAC */
1425*4882a593Smuzhiyun err = mt7915_mcu_rdd_cmd(dev, RDD_CAC_START, ext_phy, MT_RX_SEL0, 0);
1426*4882a593Smuzhiyun if (err < 0)
1427*4882a593Smuzhiyun return err;
1428*4882a593Smuzhiyun
1429*4882a593Smuzhiyun err = mt7915_dfs_start_rdd(dev, ext_phy);
1430*4882a593Smuzhiyun if (err < 0)
1431*4882a593Smuzhiyun return err;
1432*4882a593Smuzhiyun
1433*4882a593Smuzhiyun phy->rdd_state |= BIT(ext_phy);
1434*4882a593Smuzhiyun
1435*4882a593Smuzhiyun if (chandef->width == NL80211_CHAN_WIDTH_160 ||
1436*4882a593Smuzhiyun chandef->width == NL80211_CHAN_WIDTH_80P80) {
1437*4882a593Smuzhiyun err = mt7915_dfs_start_rdd(dev, 1);
1438*4882a593Smuzhiyun if (err < 0)
1439*4882a593Smuzhiyun return err;
1440*4882a593Smuzhiyun
1441*4882a593Smuzhiyun phy->rdd_state |= BIT(1);
1442*4882a593Smuzhiyun }
1443*4882a593Smuzhiyun
1444*4882a593Smuzhiyun return 0;
1445*4882a593Smuzhiyun }
1446*4882a593Smuzhiyun
1447*4882a593Smuzhiyun static int
mt7915_dfs_init_radar_specs(struct mt7915_phy * phy)1448*4882a593Smuzhiyun mt7915_dfs_init_radar_specs(struct mt7915_phy *phy)
1449*4882a593Smuzhiyun {
1450*4882a593Smuzhiyun const struct mt7915_dfs_radar_spec *radar_specs;
1451*4882a593Smuzhiyun struct mt7915_dev *dev = phy->dev;
1452*4882a593Smuzhiyun int err, i;
1453*4882a593Smuzhiyun
1454*4882a593Smuzhiyun switch (dev->mt76.region) {
1455*4882a593Smuzhiyun case NL80211_DFS_FCC:
1456*4882a593Smuzhiyun radar_specs = &fcc_radar_specs;
1457*4882a593Smuzhiyun err = mt7915_mcu_set_fcc5_lpn(dev, 8);
1458*4882a593Smuzhiyun if (err < 0)
1459*4882a593Smuzhiyun return err;
1460*4882a593Smuzhiyun break;
1461*4882a593Smuzhiyun case NL80211_DFS_ETSI:
1462*4882a593Smuzhiyun radar_specs = &etsi_radar_specs;
1463*4882a593Smuzhiyun break;
1464*4882a593Smuzhiyun case NL80211_DFS_JP:
1465*4882a593Smuzhiyun radar_specs = &jp_radar_specs;
1466*4882a593Smuzhiyun break;
1467*4882a593Smuzhiyun default:
1468*4882a593Smuzhiyun return -EINVAL;
1469*4882a593Smuzhiyun }
1470*4882a593Smuzhiyun
1471*4882a593Smuzhiyun for (i = 0; i < ARRAY_SIZE(radar_specs->radar_pattern); i++) {
1472*4882a593Smuzhiyun err = mt7915_mcu_set_radar_th(dev, i,
1473*4882a593Smuzhiyun &radar_specs->radar_pattern[i]);
1474*4882a593Smuzhiyun if (err < 0)
1475*4882a593Smuzhiyun return err;
1476*4882a593Smuzhiyun }
1477*4882a593Smuzhiyun
1478*4882a593Smuzhiyun return mt7915_mcu_set_pulse_th(dev, &radar_specs->pulse_th);
1479*4882a593Smuzhiyun }
1480*4882a593Smuzhiyun
mt7915_dfs_init_radar_detector(struct mt7915_phy * phy)1481*4882a593Smuzhiyun int mt7915_dfs_init_radar_detector(struct mt7915_phy *phy)
1482*4882a593Smuzhiyun {
1483*4882a593Smuzhiyun struct cfg80211_chan_def *chandef = &phy->mt76->chandef;
1484*4882a593Smuzhiyun struct mt7915_dev *dev = phy->dev;
1485*4882a593Smuzhiyun bool ext_phy = phy != &dev->phy;
1486*4882a593Smuzhiyun int err;
1487*4882a593Smuzhiyun
1488*4882a593Smuzhiyun if (dev->mt76.region == NL80211_DFS_UNSET) {
1489*4882a593Smuzhiyun phy->dfs_state = -1;
1490*4882a593Smuzhiyun if (phy->rdd_state)
1491*4882a593Smuzhiyun goto stop;
1492*4882a593Smuzhiyun
1493*4882a593Smuzhiyun return 0;
1494*4882a593Smuzhiyun }
1495*4882a593Smuzhiyun
1496*4882a593Smuzhiyun if (test_bit(MT76_SCANNING, &phy->mt76->state))
1497*4882a593Smuzhiyun return 0;
1498*4882a593Smuzhiyun
1499*4882a593Smuzhiyun if (phy->dfs_state == chandef->chan->dfs_state)
1500*4882a593Smuzhiyun return 0;
1501*4882a593Smuzhiyun
1502*4882a593Smuzhiyun err = mt7915_dfs_init_radar_specs(phy);
1503*4882a593Smuzhiyun if (err < 0) {
1504*4882a593Smuzhiyun phy->dfs_state = -1;
1505*4882a593Smuzhiyun goto stop;
1506*4882a593Smuzhiyun }
1507*4882a593Smuzhiyun
1508*4882a593Smuzhiyun phy->dfs_state = chandef->chan->dfs_state;
1509*4882a593Smuzhiyun
1510*4882a593Smuzhiyun if (chandef->chan->flags & IEEE80211_CHAN_RADAR) {
1511*4882a593Smuzhiyun if (chandef->chan->dfs_state != NL80211_DFS_AVAILABLE)
1512*4882a593Smuzhiyun return mt7915_dfs_start_radar_detector(phy);
1513*4882a593Smuzhiyun
1514*4882a593Smuzhiyun return mt7915_mcu_rdd_cmd(dev, RDD_CAC_END, ext_phy,
1515*4882a593Smuzhiyun MT_RX_SEL0, 0);
1516*4882a593Smuzhiyun }
1517*4882a593Smuzhiyun
1518*4882a593Smuzhiyun stop:
1519*4882a593Smuzhiyun err = mt7915_mcu_rdd_cmd(dev, RDD_NORMAL_START, ext_phy,
1520*4882a593Smuzhiyun MT_RX_SEL0, 0);
1521*4882a593Smuzhiyun if (err < 0)
1522*4882a593Smuzhiyun return err;
1523*4882a593Smuzhiyun
1524*4882a593Smuzhiyun mt7915_dfs_stop_radar_detector(phy);
1525*4882a593Smuzhiyun return 0;
1526*4882a593Smuzhiyun }
1527