xref: /OK3568_Linux_fs/kernel/drivers/net/wireless/mediatek/mt76/mt7603/mac.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: ISC
2*4882a593Smuzhiyun 
3*4882a593Smuzhiyun #include <linux/etherdevice.h>
4*4882a593Smuzhiyun #include <linux/timekeeping.h>
5*4882a593Smuzhiyun #include "mt7603.h"
6*4882a593Smuzhiyun #include "mac.h"
7*4882a593Smuzhiyun #include "../trace.h"
8*4882a593Smuzhiyun 
9*4882a593Smuzhiyun #define MT_PSE_PAGE_SIZE	128
10*4882a593Smuzhiyun 
11*4882a593Smuzhiyun static u32
mt7603_ac_queue_mask0(u32 mask)12*4882a593Smuzhiyun mt7603_ac_queue_mask0(u32 mask)
13*4882a593Smuzhiyun {
14*4882a593Smuzhiyun 	u32 ret = 0;
15*4882a593Smuzhiyun 
16*4882a593Smuzhiyun 	ret |= GENMASK(3, 0) * !!(mask & BIT(0));
17*4882a593Smuzhiyun 	ret |= GENMASK(8, 5) * !!(mask & BIT(1));
18*4882a593Smuzhiyun 	ret |= GENMASK(13, 10) * !!(mask & BIT(2));
19*4882a593Smuzhiyun 	ret |= GENMASK(19, 16) * !!(mask & BIT(3));
20*4882a593Smuzhiyun 	return ret;
21*4882a593Smuzhiyun }
22*4882a593Smuzhiyun 
23*4882a593Smuzhiyun static void
mt76_stop_tx_ac(struct mt7603_dev * dev,u32 mask)24*4882a593Smuzhiyun mt76_stop_tx_ac(struct mt7603_dev *dev, u32 mask)
25*4882a593Smuzhiyun {
26*4882a593Smuzhiyun 	mt76_set(dev, MT_WF_ARB_TX_STOP_0, mt7603_ac_queue_mask0(mask));
27*4882a593Smuzhiyun }
28*4882a593Smuzhiyun 
29*4882a593Smuzhiyun static void
mt76_start_tx_ac(struct mt7603_dev * dev,u32 mask)30*4882a593Smuzhiyun mt76_start_tx_ac(struct mt7603_dev *dev, u32 mask)
31*4882a593Smuzhiyun {
32*4882a593Smuzhiyun 	mt76_set(dev, MT_WF_ARB_TX_START_0, mt7603_ac_queue_mask0(mask));
33*4882a593Smuzhiyun }
34*4882a593Smuzhiyun 
mt7603_mac_reset_counters(struct mt7603_dev * dev)35*4882a593Smuzhiyun void mt7603_mac_reset_counters(struct mt7603_dev *dev)
36*4882a593Smuzhiyun {
37*4882a593Smuzhiyun 	int i;
38*4882a593Smuzhiyun 
39*4882a593Smuzhiyun 	for (i = 0; i < 2; i++)
40*4882a593Smuzhiyun 		mt76_rr(dev, MT_TX_AGG_CNT(i));
41*4882a593Smuzhiyun 
42*4882a593Smuzhiyun 	memset(dev->mt76.aggr_stats, 0, sizeof(dev->mt76.aggr_stats));
43*4882a593Smuzhiyun }
44*4882a593Smuzhiyun 
mt7603_mac_set_timing(struct mt7603_dev * dev)45*4882a593Smuzhiyun void mt7603_mac_set_timing(struct mt7603_dev *dev)
46*4882a593Smuzhiyun {
47*4882a593Smuzhiyun 	u32 cck = FIELD_PREP(MT_TIMEOUT_VAL_PLCP, 231) |
48*4882a593Smuzhiyun 		  FIELD_PREP(MT_TIMEOUT_VAL_CCA, 48);
49*4882a593Smuzhiyun 	u32 ofdm = FIELD_PREP(MT_TIMEOUT_VAL_PLCP, 60) |
50*4882a593Smuzhiyun 		   FIELD_PREP(MT_TIMEOUT_VAL_CCA, 24);
51*4882a593Smuzhiyun 	int offset = 3 * dev->coverage_class;
52*4882a593Smuzhiyun 	u32 reg_offset = FIELD_PREP(MT_TIMEOUT_VAL_PLCP, offset) |
53*4882a593Smuzhiyun 			 FIELD_PREP(MT_TIMEOUT_VAL_CCA, offset);
54*4882a593Smuzhiyun 	bool is_5ghz = dev->mphy.chandef.chan->band == NL80211_BAND_5GHZ;
55*4882a593Smuzhiyun 	int sifs;
56*4882a593Smuzhiyun 	u32 val;
57*4882a593Smuzhiyun 
58*4882a593Smuzhiyun 	if (is_5ghz)
59*4882a593Smuzhiyun 		sifs = 16;
60*4882a593Smuzhiyun 	else
61*4882a593Smuzhiyun 		sifs = 10;
62*4882a593Smuzhiyun 
63*4882a593Smuzhiyun 	mt76_set(dev, MT_ARB_SCR,
64*4882a593Smuzhiyun 		 MT_ARB_SCR_TX_DISABLE | MT_ARB_SCR_RX_DISABLE);
65*4882a593Smuzhiyun 	udelay(1);
66*4882a593Smuzhiyun 
67*4882a593Smuzhiyun 	mt76_wr(dev, MT_TIMEOUT_CCK, cck + reg_offset);
68*4882a593Smuzhiyun 	mt76_wr(dev, MT_TIMEOUT_OFDM, ofdm + reg_offset);
69*4882a593Smuzhiyun 	mt76_wr(dev, MT_IFS,
70*4882a593Smuzhiyun 		FIELD_PREP(MT_IFS_EIFS, 360) |
71*4882a593Smuzhiyun 		FIELD_PREP(MT_IFS_RIFS, 2) |
72*4882a593Smuzhiyun 		FIELD_PREP(MT_IFS_SIFS, sifs) |
73*4882a593Smuzhiyun 		FIELD_PREP(MT_IFS_SLOT, dev->slottime));
74*4882a593Smuzhiyun 
75*4882a593Smuzhiyun 	if (dev->slottime < 20 || is_5ghz)
76*4882a593Smuzhiyun 		val = MT7603_CFEND_RATE_DEFAULT;
77*4882a593Smuzhiyun 	else
78*4882a593Smuzhiyun 		val = MT7603_CFEND_RATE_11B;
79*4882a593Smuzhiyun 
80*4882a593Smuzhiyun 	mt76_rmw_field(dev, MT_AGG_CONTROL, MT_AGG_CONTROL_CFEND_RATE, val);
81*4882a593Smuzhiyun 
82*4882a593Smuzhiyun 	mt76_clear(dev, MT_ARB_SCR,
83*4882a593Smuzhiyun 		   MT_ARB_SCR_TX_DISABLE | MT_ARB_SCR_RX_DISABLE);
84*4882a593Smuzhiyun }
85*4882a593Smuzhiyun 
86*4882a593Smuzhiyun static void
mt7603_wtbl_update(struct mt7603_dev * dev,int idx,u32 mask)87*4882a593Smuzhiyun mt7603_wtbl_update(struct mt7603_dev *dev, int idx, u32 mask)
88*4882a593Smuzhiyun {
89*4882a593Smuzhiyun 	mt76_rmw(dev, MT_WTBL_UPDATE, MT_WTBL_UPDATE_WLAN_IDX,
90*4882a593Smuzhiyun 		 FIELD_PREP(MT_WTBL_UPDATE_WLAN_IDX, idx) | mask);
91*4882a593Smuzhiyun 
92*4882a593Smuzhiyun 	mt76_poll(dev, MT_WTBL_UPDATE, MT_WTBL_UPDATE_BUSY, 0, 5000);
93*4882a593Smuzhiyun }
94*4882a593Smuzhiyun 
95*4882a593Smuzhiyun static u32
mt7603_wtbl1_addr(int idx)96*4882a593Smuzhiyun mt7603_wtbl1_addr(int idx)
97*4882a593Smuzhiyun {
98*4882a593Smuzhiyun 	return MT_WTBL1_BASE + idx * MT_WTBL1_SIZE;
99*4882a593Smuzhiyun }
100*4882a593Smuzhiyun 
101*4882a593Smuzhiyun static u32
mt7603_wtbl2_addr(int idx)102*4882a593Smuzhiyun mt7603_wtbl2_addr(int idx)
103*4882a593Smuzhiyun {
104*4882a593Smuzhiyun 	/* Mapped to WTBL2 */
105*4882a593Smuzhiyun 	return MT_PCIE_REMAP_BASE_1 + idx * MT_WTBL2_SIZE;
106*4882a593Smuzhiyun }
107*4882a593Smuzhiyun 
108*4882a593Smuzhiyun static u32
mt7603_wtbl3_addr(int idx)109*4882a593Smuzhiyun mt7603_wtbl3_addr(int idx)
110*4882a593Smuzhiyun {
111*4882a593Smuzhiyun 	u32 base = mt7603_wtbl2_addr(MT7603_WTBL_SIZE);
112*4882a593Smuzhiyun 
113*4882a593Smuzhiyun 	return base + idx * MT_WTBL3_SIZE;
114*4882a593Smuzhiyun }
115*4882a593Smuzhiyun 
116*4882a593Smuzhiyun static u32
mt7603_wtbl4_addr(int idx)117*4882a593Smuzhiyun mt7603_wtbl4_addr(int idx)
118*4882a593Smuzhiyun {
119*4882a593Smuzhiyun 	u32 base = mt7603_wtbl3_addr(MT7603_WTBL_SIZE);
120*4882a593Smuzhiyun 
121*4882a593Smuzhiyun 	return base + idx * MT_WTBL4_SIZE;
122*4882a593Smuzhiyun }
123*4882a593Smuzhiyun 
mt7603_wtbl_init(struct mt7603_dev * dev,int idx,int vif,const u8 * mac_addr)124*4882a593Smuzhiyun void mt7603_wtbl_init(struct mt7603_dev *dev, int idx, int vif,
125*4882a593Smuzhiyun 		      const u8 *mac_addr)
126*4882a593Smuzhiyun {
127*4882a593Smuzhiyun 	const void *_mac = mac_addr;
128*4882a593Smuzhiyun 	u32 addr = mt7603_wtbl1_addr(idx);
129*4882a593Smuzhiyun 	u32 w0 = 0, w1 = 0;
130*4882a593Smuzhiyun 	int i;
131*4882a593Smuzhiyun 
132*4882a593Smuzhiyun 	if (_mac) {
133*4882a593Smuzhiyun 		w0 = FIELD_PREP(MT_WTBL1_W0_ADDR_HI,
134*4882a593Smuzhiyun 				get_unaligned_le16(_mac + 4));
135*4882a593Smuzhiyun 		w1 = FIELD_PREP(MT_WTBL1_W1_ADDR_LO,
136*4882a593Smuzhiyun 				get_unaligned_le32(_mac));
137*4882a593Smuzhiyun 	}
138*4882a593Smuzhiyun 
139*4882a593Smuzhiyun 	if (vif < 0)
140*4882a593Smuzhiyun 		vif = 0;
141*4882a593Smuzhiyun 	else
142*4882a593Smuzhiyun 		w0 |= MT_WTBL1_W0_RX_CHECK_A1;
143*4882a593Smuzhiyun 	w0 |= FIELD_PREP(MT_WTBL1_W0_MUAR_IDX, vif);
144*4882a593Smuzhiyun 
145*4882a593Smuzhiyun 	mt76_poll(dev, MT_WTBL_UPDATE, MT_WTBL_UPDATE_BUSY, 0, 5000);
146*4882a593Smuzhiyun 
147*4882a593Smuzhiyun 	mt76_set(dev, addr + 0 * 4, w0);
148*4882a593Smuzhiyun 	mt76_set(dev, addr + 1 * 4, w1);
149*4882a593Smuzhiyun 	mt76_set(dev, addr + 2 * 4, MT_WTBL1_W2_ADMISSION_CONTROL);
150*4882a593Smuzhiyun 
151*4882a593Smuzhiyun 	mt76_stop_tx_ac(dev, GENMASK(3, 0));
152*4882a593Smuzhiyun 	addr = mt7603_wtbl2_addr(idx);
153*4882a593Smuzhiyun 	for (i = 0; i < MT_WTBL2_SIZE; i += 4)
154*4882a593Smuzhiyun 		mt76_wr(dev, addr + i, 0);
155*4882a593Smuzhiyun 	mt7603_wtbl_update(dev, idx, MT_WTBL_UPDATE_WTBL2);
156*4882a593Smuzhiyun 	mt76_start_tx_ac(dev, GENMASK(3, 0));
157*4882a593Smuzhiyun 
158*4882a593Smuzhiyun 	addr = mt7603_wtbl3_addr(idx);
159*4882a593Smuzhiyun 	for (i = 0; i < MT_WTBL3_SIZE; i += 4)
160*4882a593Smuzhiyun 		mt76_wr(dev, addr + i, 0);
161*4882a593Smuzhiyun 
162*4882a593Smuzhiyun 	addr = mt7603_wtbl4_addr(idx);
163*4882a593Smuzhiyun 	for (i = 0; i < MT_WTBL4_SIZE; i += 4)
164*4882a593Smuzhiyun 		mt76_wr(dev, addr + i, 0);
165*4882a593Smuzhiyun 
166*4882a593Smuzhiyun 	mt7603_wtbl_update(dev, idx, MT_WTBL_UPDATE_ADM_COUNT_CLEAR);
167*4882a593Smuzhiyun }
168*4882a593Smuzhiyun 
169*4882a593Smuzhiyun static void
mt7603_wtbl_set_skip_tx(struct mt7603_dev * dev,int idx,bool enabled)170*4882a593Smuzhiyun mt7603_wtbl_set_skip_tx(struct mt7603_dev *dev, int idx, bool enabled)
171*4882a593Smuzhiyun {
172*4882a593Smuzhiyun 	u32 addr = mt7603_wtbl1_addr(idx);
173*4882a593Smuzhiyun 	u32 val = mt76_rr(dev, addr + 3 * 4);
174*4882a593Smuzhiyun 
175*4882a593Smuzhiyun 	val &= ~MT_WTBL1_W3_SKIP_TX;
176*4882a593Smuzhiyun 	val |= enabled * MT_WTBL1_W3_SKIP_TX;
177*4882a593Smuzhiyun 
178*4882a593Smuzhiyun 	mt76_wr(dev, addr + 3 * 4, val);
179*4882a593Smuzhiyun }
180*4882a593Smuzhiyun 
mt7603_filter_tx(struct mt7603_dev * dev,int idx,bool abort)181*4882a593Smuzhiyun void mt7603_filter_tx(struct mt7603_dev *dev, int idx, bool abort)
182*4882a593Smuzhiyun {
183*4882a593Smuzhiyun 	int i, port, queue;
184*4882a593Smuzhiyun 
185*4882a593Smuzhiyun 	if (abort) {
186*4882a593Smuzhiyun 		port = 3; /* PSE */
187*4882a593Smuzhiyun 		queue = 8; /* free queue */
188*4882a593Smuzhiyun 	} else {
189*4882a593Smuzhiyun 		port = 0; /* HIF */
190*4882a593Smuzhiyun 		queue = 1; /* MCU queue */
191*4882a593Smuzhiyun 	}
192*4882a593Smuzhiyun 
193*4882a593Smuzhiyun 	mt7603_wtbl_set_skip_tx(dev, idx, true);
194*4882a593Smuzhiyun 
195*4882a593Smuzhiyun 	mt76_wr(dev, MT_TX_ABORT, MT_TX_ABORT_EN |
196*4882a593Smuzhiyun 			FIELD_PREP(MT_TX_ABORT_WCID, idx));
197*4882a593Smuzhiyun 
198*4882a593Smuzhiyun 	for (i = 0; i < 4; i++) {
199*4882a593Smuzhiyun 		mt76_wr(dev, MT_DMA_FQCR0, MT_DMA_FQCR0_BUSY |
200*4882a593Smuzhiyun 			FIELD_PREP(MT_DMA_FQCR0_TARGET_WCID, idx) |
201*4882a593Smuzhiyun 			FIELD_PREP(MT_DMA_FQCR0_TARGET_QID, i) |
202*4882a593Smuzhiyun 			FIELD_PREP(MT_DMA_FQCR0_DEST_PORT_ID, port) |
203*4882a593Smuzhiyun 			FIELD_PREP(MT_DMA_FQCR0_DEST_QUEUE_ID, queue));
204*4882a593Smuzhiyun 
205*4882a593Smuzhiyun 		WARN_ON_ONCE(!mt76_poll(dev, MT_DMA_FQCR0, MT_DMA_FQCR0_BUSY,
206*4882a593Smuzhiyun 					0, 5000));
207*4882a593Smuzhiyun 	}
208*4882a593Smuzhiyun 
209*4882a593Smuzhiyun 	mt76_wr(dev, MT_TX_ABORT, 0);
210*4882a593Smuzhiyun 
211*4882a593Smuzhiyun 	mt7603_wtbl_set_skip_tx(dev, idx, false);
212*4882a593Smuzhiyun }
213*4882a593Smuzhiyun 
mt7603_wtbl_set_smps(struct mt7603_dev * dev,struct mt7603_sta * sta,bool enabled)214*4882a593Smuzhiyun void mt7603_wtbl_set_smps(struct mt7603_dev *dev, struct mt7603_sta *sta,
215*4882a593Smuzhiyun 			  bool enabled)
216*4882a593Smuzhiyun {
217*4882a593Smuzhiyun 	u32 addr = mt7603_wtbl1_addr(sta->wcid.idx);
218*4882a593Smuzhiyun 
219*4882a593Smuzhiyun 	if (sta->smps == enabled)
220*4882a593Smuzhiyun 		return;
221*4882a593Smuzhiyun 
222*4882a593Smuzhiyun 	mt76_rmw_field(dev, addr + 2 * 4, MT_WTBL1_W2_SMPS, enabled);
223*4882a593Smuzhiyun 	sta->smps = enabled;
224*4882a593Smuzhiyun }
225*4882a593Smuzhiyun 
mt7603_wtbl_set_ps(struct mt7603_dev * dev,struct mt7603_sta * sta,bool enabled)226*4882a593Smuzhiyun void mt7603_wtbl_set_ps(struct mt7603_dev *dev, struct mt7603_sta *sta,
227*4882a593Smuzhiyun 			bool enabled)
228*4882a593Smuzhiyun {
229*4882a593Smuzhiyun 	int idx = sta->wcid.idx;
230*4882a593Smuzhiyun 	u32 addr;
231*4882a593Smuzhiyun 
232*4882a593Smuzhiyun 	spin_lock_bh(&dev->ps_lock);
233*4882a593Smuzhiyun 
234*4882a593Smuzhiyun 	if (sta->ps == enabled)
235*4882a593Smuzhiyun 		goto out;
236*4882a593Smuzhiyun 
237*4882a593Smuzhiyun 	mt76_wr(dev, MT_PSE_RTA,
238*4882a593Smuzhiyun 		FIELD_PREP(MT_PSE_RTA_TAG_ID, idx) |
239*4882a593Smuzhiyun 		FIELD_PREP(MT_PSE_RTA_PORT_ID, 0) |
240*4882a593Smuzhiyun 		FIELD_PREP(MT_PSE_RTA_QUEUE_ID, 1) |
241*4882a593Smuzhiyun 		FIELD_PREP(MT_PSE_RTA_REDIRECT_EN, enabled) |
242*4882a593Smuzhiyun 		MT_PSE_RTA_WRITE | MT_PSE_RTA_BUSY);
243*4882a593Smuzhiyun 
244*4882a593Smuzhiyun 	mt76_poll(dev, MT_PSE_RTA, MT_PSE_RTA_BUSY, 0, 5000);
245*4882a593Smuzhiyun 
246*4882a593Smuzhiyun 	if (enabled)
247*4882a593Smuzhiyun 		mt7603_filter_tx(dev, idx, false);
248*4882a593Smuzhiyun 
249*4882a593Smuzhiyun 	addr = mt7603_wtbl1_addr(idx);
250*4882a593Smuzhiyun 	mt76_set(dev, MT_WTBL1_OR, MT_WTBL1_OR_PSM_WRITE);
251*4882a593Smuzhiyun 	mt76_rmw(dev, addr + 3 * 4, MT_WTBL1_W3_POWER_SAVE,
252*4882a593Smuzhiyun 		 enabled * MT_WTBL1_W3_POWER_SAVE);
253*4882a593Smuzhiyun 	mt76_clear(dev, MT_WTBL1_OR, MT_WTBL1_OR_PSM_WRITE);
254*4882a593Smuzhiyun 	sta->ps = enabled;
255*4882a593Smuzhiyun 
256*4882a593Smuzhiyun out:
257*4882a593Smuzhiyun 	spin_unlock_bh(&dev->ps_lock);
258*4882a593Smuzhiyun }
259*4882a593Smuzhiyun 
mt7603_wtbl_clear(struct mt7603_dev * dev,int idx)260*4882a593Smuzhiyun void mt7603_wtbl_clear(struct mt7603_dev *dev, int idx)
261*4882a593Smuzhiyun {
262*4882a593Smuzhiyun 	int wtbl2_frame_size = MT_PSE_PAGE_SIZE / MT_WTBL2_SIZE;
263*4882a593Smuzhiyun 	int wtbl2_frame = idx / wtbl2_frame_size;
264*4882a593Smuzhiyun 	int wtbl2_entry = idx % wtbl2_frame_size;
265*4882a593Smuzhiyun 
266*4882a593Smuzhiyun 	int wtbl3_base_frame = MT_WTBL3_OFFSET / MT_PSE_PAGE_SIZE;
267*4882a593Smuzhiyun 	int wtbl3_frame_size = MT_PSE_PAGE_SIZE / MT_WTBL3_SIZE;
268*4882a593Smuzhiyun 	int wtbl3_frame = wtbl3_base_frame + idx / wtbl3_frame_size;
269*4882a593Smuzhiyun 	int wtbl3_entry = (idx % wtbl3_frame_size) * 2;
270*4882a593Smuzhiyun 
271*4882a593Smuzhiyun 	int wtbl4_base_frame = MT_WTBL4_OFFSET / MT_PSE_PAGE_SIZE;
272*4882a593Smuzhiyun 	int wtbl4_frame_size = MT_PSE_PAGE_SIZE / MT_WTBL4_SIZE;
273*4882a593Smuzhiyun 	int wtbl4_frame = wtbl4_base_frame + idx / wtbl4_frame_size;
274*4882a593Smuzhiyun 	int wtbl4_entry = idx % wtbl4_frame_size;
275*4882a593Smuzhiyun 
276*4882a593Smuzhiyun 	u32 addr = MT_WTBL1_BASE + idx * MT_WTBL1_SIZE;
277*4882a593Smuzhiyun 	int i;
278*4882a593Smuzhiyun 
279*4882a593Smuzhiyun 	mt76_poll(dev, MT_WTBL_UPDATE, MT_WTBL_UPDATE_BUSY, 0, 5000);
280*4882a593Smuzhiyun 
281*4882a593Smuzhiyun 	mt76_wr(dev, addr + 0 * 4,
282*4882a593Smuzhiyun 		MT_WTBL1_W0_RX_CHECK_A1 |
283*4882a593Smuzhiyun 		MT_WTBL1_W0_RX_CHECK_A2 |
284*4882a593Smuzhiyun 		MT_WTBL1_W0_RX_VALID);
285*4882a593Smuzhiyun 	mt76_wr(dev, addr + 1 * 4, 0);
286*4882a593Smuzhiyun 	mt76_wr(dev, addr + 2 * 4, 0);
287*4882a593Smuzhiyun 
288*4882a593Smuzhiyun 	mt76_set(dev, MT_WTBL1_OR, MT_WTBL1_OR_PSM_WRITE);
289*4882a593Smuzhiyun 
290*4882a593Smuzhiyun 	mt76_wr(dev, addr + 3 * 4,
291*4882a593Smuzhiyun 		FIELD_PREP(MT_WTBL1_W3_WTBL2_FRAME_ID, wtbl2_frame) |
292*4882a593Smuzhiyun 		FIELD_PREP(MT_WTBL1_W3_WTBL2_ENTRY_ID, wtbl2_entry) |
293*4882a593Smuzhiyun 		FIELD_PREP(MT_WTBL1_W3_WTBL4_FRAME_ID, wtbl4_frame) |
294*4882a593Smuzhiyun 		MT_WTBL1_W3_I_PSM | MT_WTBL1_W3_KEEP_I_PSM);
295*4882a593Smuzhiyun 	mt76_wr(dev, addr + 4 * 4,
296*4882a593Smuzhiyun 		FIELD_PREP(MT_WTBL1_W4_WTBL3_FRAME_ID, wtbl3_frame) |
297*4882a593Smuzhiyun 		FIELD_PREP(MT_WTBL1_W4_WTBL3_ENTRY_ID, wtbl3_entry) |
298*4882a593Smuzhiyun 		FIELD_PREP(MT_WTBL1_W4_WTBL4_ENTRY_ID, wtbl4_entry));
299*4882a593Smuzhiyun 
300*4882a593Smuzhiyun 	mt76_clear(dev, MT_WTBL1_OR, MT_WTBL1_OR_PSM_WRITE);
301*4882a593Smuzhiyun 
302*4882a593Smuzhiyun 	addr = mt7603_wtbl2_addr(idx);
303*4882a593Smuzhiyun 
304*4882a593Smuzhiyun 	/* Clear BA information */
305*4882a593Smuzhiyun 	mt76_wr(dev, addr + (15 * 4), 0);
306*4882a593Smuzhiyun 
307*4882a593Smuzhiyun 	mt76_stop_tx_ac(dev, GENMASK(3, 0));
308*4882a593Smuzhiyun 	for (i = 2; i <= 4; i++)
309*4882a593Smuzhiyun 		mt76_wr(dev, addr + (i * 4), 0);
310*4882a593Smuzhiyun 	mt7603_wtbl_update(dev, idx, MT_WTBL_UPDATE_WTBL2);
311*4882a593Smuzhiyun 	mt76_start_tx_ac(dev, GENMASK(3, 0));
312*4882a593Smuzhiyun 
313*4882a593Smuzhiyun 	mt7603_wtbl_update(dev, idx, MT_WTBL_UPDATE_RX_COUNT_CLEAR);
314*4882a593Smuzhiyun 	mt7603_wtbl_update(dev, idx, MT_WTBL_UPDATE_TX_COUNT_CLEAR);
315*4882a593Smuzhiyun 	mt7603_wtbl_update(dev, idx, MT_WTBL_UPDATE_ADM_COUNT_CLEAR);
316*4882a593Smuzhiyun }
317*4882a593Smuzhiyun 
mt7603_wtbl_update_cap(struct mt7603_dev * dev,struct ieee80211_sta * sta)318*4882a593Smuzhiyun void mt7603_wtbl_update_cap(struct mt7603_dev *dev, struct ieee80211_sta *sta)
319*4882a593Smuzhiyun {
320*4882a593Smuzhiyun 	struct mt7603_sta *msta = (struct mt7603_sta *)sta->drv_priv;
321*4882a593Smuzhiyun 	int idx = msta->wcid.idx;
322*4882a593Smuzhiyun 	u8 ampdu_density;
323*4882a593Smuzhiyun 	u32 addr;
324*4882a593Smuzhiyun 	u32 val;
325*4882a593Smuzhiyun 
326*4882a593Smuzhiyun 	addr = mt7603_wtbl1_addr(idx);
327*4882a593Smuzhiyun 
328*4882a593Smuzhiyun 	ampdu_density = sta->ht_cap.ampdu_density;
329*4882a593Smuzhiyun 	if (ampdu_density < IEEE80211_HT_MPDU_DENSITY_4)
330*4882a593Smuzhiyun 		ampdu_density = IEEE80211_HT_MPDU_DENSITY_4;
331*4882a593Smuzhiyun 
332*4882a593Smuzhiyun 	val = mt76_rr(dev, addr + 2 * 4);
333*4882a593Smuzhiyun 	val &= MT_WTBL1_W2_KEY_TYPE | MT_WTBL1_W2_ADMISSION_CONTROL;
334*4882a593Smuzhiyun 	val |= FIELD_PREP(MT_WTBL1_W2_AMPDU_FACTOR, sta->ht_cap.ampdu_factor) |
335*4882a593Smuzhiyun 	       FIELD_PREP(MT_WTBL1_W2_MPDU_DENSITY, sta->ht_cap.ampdu_density) |
336*4882a593Smuzhiyun 	       MT_WTBL1_W2_TXS_BAF_REPORT;
337*4882a593Smuzhiyun 
338*4882a593Smuzhiyun 	if (sta->ht_cap.cap)
339*4882a593Smuzhiyun 		val |= MT_WTBL1_W2_HT;
340*4882a593Smuzhiyun 	if (sta->vht_cap.cap)
341*4882a593Smuzhiyun 		val |= MT_WTBL1_W2_VHT;
342*4882a593Smuzhiyun 
343*4882a593Smuzhiyun 	mt76_wr(dev, addr + 2 * 4, val);
344*4882a593Smuzhiyun 
345*4882a593Smuzhiyun 	addr = mt7603_wtbl2_addr(idx);
346*4882a593Smuzhiyun 	val = mt76_rr(dev, addr + 9 * 4);
347*4882a593Smuzhiyun 	val &= ~(MT_WTBL2_W9_SHORT_GI_20 | MT_WTBL2_W9_SHORT_GI_40 |
348*4882a593Smuzhiyun 		 MT_WTBL2_W9_SHORT_GI_80);
349*4882a593Smuzhiyun 	if (sta->ht_cap.cap & IEEE80211_HT_CAP_SGI_20)
350*4882a593Smuzhiyun 		val |= MT_WTBL2_W9_SHORT_GI_20;
351*4882a593Smuzhiyun 	if (sta->ht_cap.cap & IEEE80211_HT_CAP_SGI_40)
352*4882a593Smuzhiyun 		val |= MT_WTBL2_W9_SHORT_GI_40;
353*4882a593Smuzhiyun 	mt76_wr(dev, addr + 9 * 4, val);
354*4882a593Smuzhiyun }
355*4882a593Smuzhiyun 
mt7603_mac_rx_ba_reset(struct mt7603_dev * dev,void * addr,u8 tid)356*4882a593Smuzhiyun void mt7603_mac_rx_ba_reset(struct mt7603_dev *dev, void *addr, u8 tid)
357*4882a593Smuzhiyun {
358*4882a593Smuzhiyun 	mt76_wr(dev, MT_BA_CONTROL_0, get_unaligned_le32(addr));
359*4882a593Smuzhiyun 	mt76_wr(dev, MT_BA_CONTROL_1,
360*4882a593Smuzhiyun 		(get_unaligned_le16(addr + 4) |
361*4882a593Smuzhiyun 		 FIELD_PREP(MT_BA_CONTROL_1_TID, tid) |
362*4882a593Smuzhiyun 		 MT_BA_CONTROL_1_RESET));
363*4882a593Smuzhiyun }
364*4882a593Smuzhiyun 
mt7603_mac_tx_ba_reset(struct mt7603_dev * dev,int wcid,int tid,int ba_size)365*4882a593Smuzhiyun void mt7603_mac_tx_ba_reset(struct mt7603_dev *dev, int wcid, int tid,
366*4882a593Smuzhiyun 			    int ba_size)
367*4882a593Smuzhiyun {
368*4882a593Smuzhiyun 	u32 addr = mt7603_wtbl2_addr(wcid);
369*4882a593Smuzhiyun 	u32 tid_mask = FIELD_PREP(MT_WTBL2_W15_BA_EN_TIDS, BIT(tid)) |
370*4882a593Smuzhiyun 		       (MT_WTBL2_W15_BA_WIN_SIZE <<
371*4882a593Smuzhiyun 			(tid * MT_WTBL2_W15_BA_WIN_SIZE_SHIFT));
372*4882a593Smuzhiyun 	u32 tid_val;
373*4882a593Smuzhiyun 	int i;
374*4882a593Smuzhiyun 
375*4882a593Smuzhiyun 	if (ba_size < 0) {
376*4882a593Smuzhiyun 		/* disable */
377*4882a593Smuzhiyun 		mt76_clear(dev, addr + (15 * 4), tid_mask);
378*4882a593Smuzhiyun 		return;
379*4882a593Smuzhiyun 	}
380*4882a593Smuzhiyun 
381*4882a593Smuzhiyun 	for (i = 7; i > 0; i--) {
382*4882a593Smuzhiyun 		if (ba_size >= MT_AGG_SIZE_LIMIT(i))
383*4882a593Smuzhiyun 			break;
384*4882a593Smuzhiyun 	}
385*4882a593Smuzhiyun 
386*4882a593Smuzhiyun 	tid_val = FIELD_PREP(MT_WTBL2_W15_BA_EN_TIDS, BIT(tid)) |
387*4882a593Smuzhiyun 		  i << (tid * MT_WTBL2_W15_BA_WIN_SIZE_SHIFT);
388*4882a593Smuzhiyun 
389*4882a593Smuzhiyun 	mt76_rmw(dev, addr + (15 * 4), tid_mask, tid_val);
390*4882a593Smuzhiyun }
391*4882a593Smuzhiyun 
mt7603_mac_sta_poll(struct mt7603_dev * dev)392*4882a593Smuzhiyun void mt7603_mac_sta_poll(struct mt7603_dev *dev)
393*4882a593Smuzhiyun {
394*4882a593Smuzhiyun 	static const u8 ac_to_tid[4] = {
395*4882a593Smuzhiyun 		[IEEE80211_AC_BE] = 0,
396*4882a593Smuzhiyun 		[IEEE80211_AC_BK] = 1,
397*4882a593Smuzhiyun 		[IEEE80211_AC_VI] = 4,
398*4882a593Smuzhiyun 		[IEEE80211_AC_VO] = 6
399*4882a593Smuzhiyun 	};
400*4882a593Smuzhiyun 	struct ieee80211_sta *sta;
401*4882a593Smuzhiyun 	struct mt7603_sta *msta;
402*4882a593Smuzhiyun 	u32 total_airtime = 0;
403*4882a593Smuzhiyun 	u32 airtime[4];
404*4882a593Smuzhiyun 	u32 addr;
405*4882a593Smuzhiyun 	int i;
406*4882a593Smuzhiyun 
407*4882a593Smuzhiyun 	rcu_read_lock();
408*4882a593Smuzhiyun 
409*4882a593Smuzhiyun 	while (1) {
410*4882a593Smuzhiyun 		bool clear = false;
411*4882a593Smuzhiyun 
412*4882a593Smuzhiyun 		spin_lock_bh(&dev->sta_poll_lock);
413*4882a593Smuzhiyun 		if (list_empty(&dev->sta_poll_list)) {
414*4882a593Smuzhiyun 			spin_unlock_bh(&dev->sta_poll_lock);
415*4882a593Smuzhiyun 			break;
416*4882a593Smuzhiyun 		}
417*4882a593Smuzhiyun 
418*4882a593Smuzhiyun 		msta = list_first_entry(&dev->sta_poll_list, struct mt7603_sta,
419*4882a593Smuzhiyun 					poll_list);
420*4882a593Smuzhiyun 		list_del_init(&msta->poll_list);
421*4882a593Smuzhiyun 		spin_unlock_bh(&dev->sta_poll_lock);
422*4882a593Smuzhiyun 
423*4882a593Smuzhiyun 		addr = mt7603_wtbl4_addr(msta->wcid.idx);
424*4882a593Smuzhiyun 		for (i = 0; i < 4; i++) {
425*4882a593Smuzhiyun 			u32 airtime_last = msta->tx_airtime_ac[i];
426*4882a593Smuzhiyun 
427*4882a593Smuzhiyun 			msta->tx_airtime_ac[i] = mt76_rr(dev, addr + i * 8);
428*4882a593Smuzhiyun 			airtime[i] = msta->tx_airtime_ac[i] - airtime_last;
429*4882a593Smuzhiyun 			airtime[i] *= 32;
430*4882a593Smuzhiyun 			total_airtime += airtime[i];
431*4882a593Smuzhiyun 
432*4882a593Smuzhiyun 			if (msta->tx_airtime_ac[i] & BIT(22))
433*4882a593Smuzhiyun 				clear = true;
434*4882a593Smuzhiyun 		}
435*4882a593Smuzhiyun 
436*4882a593Smuzhiyun 		if (clear) {
437*4882a593Smuzhiyun 			mt7603_wtbl_update(dev, msta->wcid.idx,
438*4882a593Smuzhiyun 					   MT_WTBL_UPDATE_ADM_COUNT_CLEAR);
439*4882a593Smuzhiyun 			memset(msta->tx_airtime_ac, 0,
440*4882a593Smuzhiyun 			       sizeof(msta->tx_airtime_ac));
441*4882a593Smuzhiyun 		}
442*4882a593Smuzhiyun 
443*4882a593Smuzhiyun 		if (!msta->wcid.sta)
444*4882a593Smuzhiyun 			continue;
445*4882a593Smuzhiyun 
446*4882a593Smuzhiyun 		sta = container_of((void *)msta, struct ieee80211_sta, drv_priv);
447*4882a593Smuzhiyun 		for (i = 0; i < 4; i++) {
448*4882a593Smuzhiyun 			struct mt76_queue *q = dev->mt76.q_tx[i];
449*4882a593Smuzhiyun 			u8 qidx = q->hw_idx;
450*4882a593Smuzhiyun 			u8 tid = ac_to_tid[i];
451*4882a593Smuzhiyun 			u32 txtime = airtime[qidx];
452*4882a593Smuzhiyun 
453*4882a593Smuzhiyun 			if (!txtime)
454*4882a593Smuzhiyun 				continue;
455*4882a593Smuzhiyun 
456*4882a593Smuzhiyun 			ieee80211_sta_register_airtime(sta, tid, txtime, 0);
457*4882a593Smuzhiyun 		}
458*4882a593Smuzhiyun 	}
459*4882a593Smuzhiyun 
460*4882a593Smuzhiyun 	rcu_read_unlock();
461*4882a593Smuzhiyun 
462*4882a593Smuzhiyun 	if (!total_airtime)
463*4882a593Smuzhiyun 		return;
464*4882a593Smuzhiyun 
465*4882a593Smuzhiyun 	spin_lock_bh(&dev->mt76.cc_lock);
466*4882a593Smuzhiyun 	dev->mphy.chan_state->cc_tx += total_airtime;
467*4882a593Smuzhiyun 	spin_unlock_bh(&dev->mt76.cc_lock);
468*4882a593Smuzhiyun }
469*4882a593Smuzhiyun 
470*4882a593Smuzhiyun static struct mt76_wcid *
mt7603_rx_get_wcid(struct mt7603_dev * dev,u8 idx,bool unicast)471*4882a593Smuzhiyun mt7603_rx_get_wcid(struct mt7603_dev *dev, u8 idx, bool unicast)
472*4882a593Smuzhiyun {
473*4882a593Smuzhiyun 	struct mt7603_sta *sta;
474*4882a593Smuzhiyun 	struct mt76_wcid *wcid;
475*4882a593Smuzhiyun 
476*4882a593Smuzhiyun 	if (idx >= MT7603_WTBL_SIZE)
477*4882a593Smuzhiyun 		return NULL;
478*4882a593Smuzhiyun 
479*4882a593Smuzhiyun 	wcid = rcu_dereference(dev->mt76.wcid[idx]);
480*4882a593Smuzhiyun 	if (unicast || !wcid)
481*4882a593Smuzhiyun 		return wcid;
482*4882a593Smuzhiyun 
483*4882a593Smuzhiyun 	if (!wcid->sta)
484*4882a593Smuzhiyun 		return NULL;
485*4882a593Smuzhiyun 
486*4882a593Smuzhiyun 	sta = container_of(wcid, struct mt7603_sta, wcid);
487*4882a593Smuzhiyun 	if (!sta->vif)
488*4882a593Smuzhiyun 		return NULL;
489*4882a593Smuzhiyun 
490*4882a593Smuzhiyun 	return &sta->vif->sta.wcid;
491*4882a593Smuzhiyun }
492*4882a593Smuzhiyun 
493*4882a593Smuzhiyun int
mt7603_mac_fill_rx(struct mt7603_dev * dev,struct sk_buff * skb)494*4882a593Smuzhiyun mt7603_mac_fill_rx(struct mt7603_dev *dev, struct sk_buff *skb)
495*4882a593Smuzhiyun {
496*4882a593Smuzhiyun 	struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
497*4882a593Smuzhiyun 	struct ieee80211_supported_band *sband;
498*4882a593Smuzhiyun 	struct ieee80211_hdr *hdr;
499*4882a593Smuzhiyun 	__le32 *rxd = (__le32 *)skb->data;
500*4882a593Smuzhiyun 	u32 rxd0 = le32_to_cpu(rxd[0]);
501*4882a593Smuzhiyun 	u32 rxd1 = le32_to_cpu(rxd[1]);
502*4882a593Smuzhiyun 	u32 rxd2 = le32_to_cpu(rxd[2]);
503*4882a593Smuzhiyun 	bool unicast = rxd1 & MT_RXD1_NORMAL_U2M;
504*4882a593Smuzhiyun 	bool insert_ccmp_hdr = false;
505*4882a593Smuzhiyun 	bool remove_pad;
506*4882a593Smuzhiyun 	int idx;
507*4882a593Smuzhiyun 	int i;
508*4882a593Smuzhiyun 
509*4882a593Smuzhiyun 	memset(status, 0, sizeof(*status));
510*4882a593Smuzhiyun 
511*4882a593Smuzhiyun 	i = FIELD_GET(MT_RXD1_NORMAL_CH_FREQ, rxd1);
512*4882a593Smuzhiyun 	sband = (i & 1) ? &dev->mphy.sband_5g.sband : &dev->mphy.sband_2g.sband;
513*4882a593Smuzhiyun 	i >>= 1;
514*4882a593Smuzhiyun 
515*4882a593Smuzhiyun 	idx = FIELD_GET(MT_RXD2_NORMAL_WLAN_IDX, rxd2);
516*4882a593Smuzhiyun 	status->wcid = mt7603_rx_get_wcid(dev, idx, unicast);
517*4882a593Smuzhiyun 
518*4882a593Smuzhiyun 	status->band = sband->band;
519*4882a593Smuzhiyun 	if (i < sband->n_channels)
520*4882a593Smuzhiyun 		status->freq = sband->channels[i].center_freq;
521*4882a593Smuzhiyun 
522*4882a593Smuzhiyun 	if (rxd2 & MT_RXD2_NORMAL_FCS_ERR)
523*4882a593Smuzhiyun 		status->flag |= RX_FLAG_FAILED_FCS_CRC;
524*4882a593Smuzhiyun 
525*4882a593Smuzhiyun 	if (rxd2 & MT_RXD2_NORMAL_TKIP_MIC_ERR)
526*4882a593Smuzhiyun 		status->flag |= RX_FLAG_MMIC_ERROR;
527*4882a593Smuzhiyun 
528*4882a593Smuzhiyun 	if (FIELD_GET(MT_RXD2_NORMAL_SEC_MODE, rxd2) != 0 &&
529*4882a593Smuzhiyun 	    !(rxd2 & (MT_RXD2_NORMAL_CLM | MT_RXD2_NORMAL_CM))) {
530*4882a593Smuzhiyun 		status->flag |= RX_FLAG_DECRYPTED;
531*4882a593Smuzhiyun 		status->flag |= RX_FLAG_IV_STRIPPED;
532*4882a593Smuzhiyun 		status->flag |= RX_FLAG_MMIC_STRIPPED | RX_FLAG_MIC_STRIPPED;
533*4882a593Smuzhiyun 	}
534*4882a593Smuzhiyun 
535*4882a593Smuzhiyun 	if (!(rxd2 & (MT_RXD2_NORMAL_NON_AMPDU_SUB |
536*4882a593Smuzhiyun 		      MT_RXD2_NORMAL_NON_AMPDU))) {
537*4882a593Smuzhiyun 		status->flag |= RX_FLAG_AMPDU_DETAILS;
538*4882a593Smuzhiyun 
539*4882a593Smuzhiyun 		/* all subframes of an A-MPDU have the same timestamp */
540*4882a593Smuzhiyun 		if (dev->rx_ampdu_ts != rxd[12]) {
541*4882a593Smuzhiyun 			if (!++dev->ampdu_ref)
542*4882a593Smuzhiyun 				dev->ampdu_ref++;
543*4882a593Smuzhiyun 		}
544*4882a593Smuzhiyun 		dev->rx_ampdu_ts = rxd[12];
545*4882a593Smuzhiyun 
546*4882a593Smuzhiyun 		status->ampdu_ref = dev->ampdu_ref;
547*4882a593Smuzhiyun 	}
548*4882a593Smuzhiyun 
549*4882a593Smuzhiyun 	remove_pad = rxd1 & MT_RXD1_NORMAL_HDR_OFFSET;
550*4882a593Smuzhiyun 
551*4882a593Smuzhiyun 	if (rxd2 & MT_RXD2_NORMAL_MAX_LEN_ERROR)
552*4882a593Smuzhiyun 		return -EINVAL;
553*4882a593Smuzhiyun 
554*4882a593Smuzhiyun 	if (!sband->channels)
555*4882a593Smuzhiyun 		return -EINVAL;
556*4882a593Smuzhiyun 
557*4882a593Smuzhiyun 	rxd += 4;
558*4882a593Smuzhiyun 	if (rxd0 & MT_RXD0_NORMAL_GROUP_4) {
559*4882a593Smuzhiyun 		rxd += 4;
560*4882a593Smuzhiyun 		if ((u8 *)rxd - skb->data >= skb->len)
561*4882a593Smuzhiyun 			return -EINVAL;
562*4882a593Smuzhiyun 	}
563*4882a593Smuzhiyun 	if (rxd0 & MT_RXD0_NORMAL_GROUP_1) {
564*4882a593Smuzhiyun 		u8 *data = (u8 *)rxd;
565*4882a593Smuzhiyun 
566*4882a593Smuzhiyun 		if (status->flag & RX_FLAG_DECRYPTED) {
567*4882a593Smuzhiyun 			status->iv[0] = data[5];
568*4882a593Smuzhiyun 			status->iv[1] = data[4];
569*4882a593Smuzhiyun 			status->iv[2] = data[3];
570*4882a593Smuzhiyun 			status->iv[3] = data[2];
571*4882a593Smuzhiyun 			status->iv[4] = data[1];
572*4882a593Smuzhiyun 			status->iv[5] = data[0];
573*4882a593Smuzhiyun 
574*4882a593Smuzhiyun 			insert_ccmp_hdr = FIELD_GET(MT_RXD2_NORMAL_FRAG, rxd2);
575*4882a593Smuzhiyun 		}
576*4882a593Smuzhiyun 
577*4882a593Smuzhiyun 		rxd += 4;
578*4882a593Smuzhiyun 		if ((u8 *)rxd - skb->data >= skb->len)
579*4882a593Smuzhiyun 			return -EINVAL;
580*4882a593Smuzhiyun 	}
581*4882a593Smuzhiyun 	if (rxd0 & MT_RXD0_NORMAL_GROUP_2) {
582*4882a593Smuzhiyun 		rxd += 2;
583*4882a593Smuzhiyun 		if ((u8 *)rxd - skb->data >= skb->len)
584*4882a593Smuzhiyun 			return -EINVAL;
585*4882a593Smuzhiyun 	}
586*4882a593Smuzhiyun 	if (rxd0 & MT_RXD0_NORMAL_GROUP_3) {
587*4882a593Smuzhiyun 		u32 rxdg0 = le32_to_cpu(rxd[0]);
588*4882a593Smuzhiyun 		u32 rxdg3 = le32_to_cpu(rxd[3]);
589*4882a593Smuzhiyun 		bool cck = false;
590*4882a593Smuzhiyun 
591*4882a593Smuzhiyun 		i = FIELD_GET(MT_RXV1_TX_RATE, rxdg0);
592*4882a593Smuzhiyun 		switch (FIELD_GET(MT_RXV1_TX_MODE, rxdg0)) {
593*4882a593Smuzhiyun 		case MT_PHY_TYPE_CCK:
594*4882a593Smuzhiyun 			cck = true;
595*4882a593Smuzhiyun 			fallthrough;
596*4882a593Smuzhiyun 		case MT_PHY_TYPE_OFDM:
597*4882a593Smuzhiyun 			i = mt76_get_rate(&dev->mt76, sband, i, cck);
598*4882a593Smuzhiyun 			break;
599*4882a593Smuzhiyun 		case MT_PHY_TYPE_HT_GF:
600*4882a593Smuzhiyun 		case MT_PHY_TYPE_HT:
601*4882a593Smuzhiyun 			status->encoding = RX_ENC_HT;
602*4882a593Smuzhiyun 			if (i > 15)
603*4882a593Smuzhiyun 				return -EINVAL;
604*4882a593Smuzhiyun 			break;
605*4882a593Smuzhiyun 		default:
606*4882a593Smuzhiyun 			return -EINVAL;
607*4882a593Smuzhiyun 		}
608*4882a593Smuzhiyun 
609*4882a593Smuzhiyun 		if (rxdg0 & MT_RXV1_HT_SHORT_GI)
610*4882a593Smuzhiyun 			status->enc_flags |= RX_ENC_FLAG_SHORT_GI;
611*4882a593Smuzhiyun 		if (rxdg0 & MT_RXV1_HT_AD_CODE)
612*4882a593Smuzhiyun 			status->enc_flags |= RX_ENC_FLAG_LDPC;
613*4882a593Smuzhiyun 
614*4882a593Smuzhiyun 		status->enc_flags |= RX_ENC_FLAG_STBC_MASK *
615*4882a593Smuzhiyun 				    FIELD_GET(MT_RXV1_HT_STBC, rxdg0);
616*4882a593Smuzhiyun 
617*4882a593Smuzhiyun 		status->rate_idx = i;
618*4882a593Smuzhiyun 
619*4882a593Smuzhiyun 		status->chains = dev->mphy.antenna_mask;
620*4882a593Smuzhiyun 		status->chain_signal[0] = FIELD_GET(MT_RXV4_IB_RSSI0, rxdg3) +
621*4882a593Smuzhiyun 					  dev->rssi_offset[0];
622*4882a593Smuzhiyun 		status->chain_signal[1] = FIELD_GET(MT_RXV4_IB_RSSI1, rxdg3) +
623*4882a593Smuzhiyun 					  dev->rssi_offset[1];
624*4882a593Smuzhiyun 
625*4882a593Smuzhiyun 		status->signal = status->chain_signal[0];
626*4882a593Smuzhiyun 		if (status->chains & BIT(1))
627*4882a593Smuzhiyun 			status->signal = max(status->signal,
628*4882a593Smuzhiyun 					     status->chain_signal[1]);
629*4882a593Smuzhiyun 
630*4882a593Smuzhiyun 		if (FIELD_GET(MT_RXV1_FRAME_MODE, rxdg0) == 1)
631*4882a593Smuzhiyun 			status->bw = RATE_INFO_BW_40;
632*4882a593Smuzhiyun 
633*4882a593Smuzhiyun 		rxd += 6;
634*4882a593Smuzhiyun 		if ((u8 *)rxd - skb->data >= skb->len)
635*4882a593Smuzhiyun 			return -EINVAL;
636*4882a593Smuzhiyun 	} else {
637*4882a593Smuzhiyun 		return -EINVAL;
638*4882a593Smuzhiyun 	}
639*4882a593Smuzhiyun 
640*4882a593Smuzhiyun 	skb_pull(skb, (u8 *)rxd - skb->data + 2 * remove_pad);
641*4882a593Smuzhiyun 
642*4882a593Smuzhiyun 	if (insert_ccmp_hdr) {
643*4882a593Smuzhiyun 		u8 key_id = FIELD_GET(MT_RXD1_NORMAL_KEY_ID, rxd1);
644*4882a593Smuzhiyun 
645*4882a593Smuzhiyun 		mt76_insert_ccmp_hdr(skb, key_id);
646*4882a593Smuzhiyun 	}
647*4882a593Smuzhiyun 
648*4882a593Smuzhiyun 	hdr = (struct ieee80211_hdr *)skb->data;
649*4882a593Smuzhiyun 	if (!status->wcid || !ieee80211_is_data_qos(hdr->frame_control))
650*4882a593Smuzhiyun 		return 0;
651*4882a593Smuzhiyun 
652*4882a593Smuzhiyun 	status->aggr = unicast &&
653*4882a593Smuzhiyun 		       !ieee80211_is_qos_nullfunc(hdr->frame_control);
654*4882a593Smuzhiyun 	status->tid = *ieee80211_get_qos_ctl(hdr) & IEEE80211_QOS_CTL_TID_MASK;
655*4882a593Smuzhiyun 	status->seqno = IEEE80211_SEQ_TO_SN(le16_to_cpu(hdr->seq_ctrl));
656*4882a593Smuzhiyun 
657*4882a593Smuzhiyun 	return 0;
658*4882a593Smuzhiyun }
659*4882a593Smuzhiyun 
660*4882a593Smuzhiyun static u16
mt7603_mac_tx_rate_val(struct mt7603_dev * dev,const struct ieee80211_tx_rate * rate,bool stbc,u8 * bw)661*4882a593Smuzhiyun mt7603_mac_tx_rate_val(struct mt7603_dev *dev,
662*4882a593Smuzhiyun 		       const struct ieee80211_tx_rate *rate, bool stbc, u8 *bw)
663*4882a593Smuzhiyun {
664*4882a593Smuzhiyun 	u8 phy, nss, rate_idx;
665*4882a593Smuzhiyun 	u16 rateval;
666*4882a593Smuzhiyun 
667*4882a593Smuzhiyun 	*bw = 0;
668*4882a593Smuzhiyun 	if (rate->flags & IEEE80211_TX_RC_MCS) {
669*4882a593Smuzhiyun 		rate_idx = rate->idx;
670*4882a593Smuzhiyun 		nss = 1 + (rate->idx >> 3);
671*4882a593Smuzhiyun 		phy = MT_PHY_TYPE_HT;
672*4882a593Smuzhiyun 		if (rate->flags & IEEE80211_TX_RC_GREEN_FIELD)
673*4882a593Smuzhiyun 			phy = MT_PHY_TYPE_HT_GF;
674*4882a593Smuzhiyun 		if (rate->flags & IEEE80211_TX_RC_40_MHZ_WIDTH)
675*4882a593Smuzhiyun 			*bw = 1;
676*4882a593Smuzhiyun 	} else {
677*4882a593Smuzhiyun 		const struct ieee80211_rate *r;
678*4882a593Smuzhiyun 		int band = dev->mphy.chandef.chan->band;
679*4882a593Smuzhiyun 		u16 val;
680*4882a593Smuzhiyun 
681*4882a593Smuzhiyun 		nss = 1;
682*4882a593Smuzhiyun 		r = &mt76_hw(dev)->wiphy->bands[band]->bitrates[rate->idx];
683*4882a593Smuzhiyun 		if (rate->flags & IEEE80211_TX_RC_USE_SHORT_PREAMBLE)
684*4882a593Smuzhiyun 			val = r->hw_value_short;
685*4882a593Smuzhiyun 		else
686*4882a593Smuzhiyun 			val = r->hw_value;
687*4882a593Smuzhiyun 
688*4882a593Smuzhiyun 		phy = val >> 8;
689*4882a593Smuzhiyun 		rate_idx = val & 0xff;
690*4882a593Smuzhiyun 	}
691*4882a593Smuzhiyun 
692*4882a593Smuzhiyun 	rateval = (FIELD_PREP(MT_TX_RATE_IDX, rate_idx) |
693*4882a593Smuzhiyun 		   FIELD_PREP(MT_TX_RATE_MODE, phy));
694*4882a593Smuzhiyun 
695*4882a593Smuzhiyun 	if (stbc && nss == 1)
696*4882a593Smuzhiyun 		rateval |= MT_TX_RATE_STBC;
697*4882a593Smuzhiyun 
698*4882a593Smuzhiyun 	return rateval;
699*4882a593Smuzhiyun }
700*4882a593Smuzhiyun 
mt7603_wtbl_set_rates(struct mt7603_dev * dev,struct mt7603_sta * sta,struct ieee80211_tx_rate * probe_rate,struct ieee80211_tx_rate * rates)701*4882a593Smuzhiyun void mt7603_wtbl_set_rates(struct mt7603_dev *dev, struct mt7603_sta *sta,
702*4882a593Smuzhiyun 			   struct ieee80211_tx_rate *probe_rate,
703*4882a593Smuzhiyun 			   struct ieee80211_tx_rate *rates)
704*4882a593Smuzhiyun {
705*4882a593Smuzhiyun 	struct ieee80211_tx_rate *ref;
706*4882a593Smuzhiyun 	int wcid = sta->wcid.idx;
707*4882a593Smuzhiyun 	u32 addr = mt7603_wtbl2_addr(wcid);
708*4882a593Smuzhiyun 	bool stbc = false;
709*4882a593Smuzhiyun 	int n_rates = sta->n_rates;
710*4882a593Smuzhiyun 	u8 bw, bw_prev, bw_idx = 0;
711*4882a593Smuzhiyun 	u16 val[4];
712*4882a593Smuzhiyun 	u16 probe_val;
713*4882a593Smuzhiyun 	u32 w9 = mt76_rr(dev, addr + 9 * 4);
714*4882a593Smuzhiyun 	bool rateset;
715*4882a593Smuzhiyun 	int i, k;
716*4882a593Smuzhiyun 
717*4882a593Smuzhiyun 	if (!mt76_poll(dev, MT_WTBL_UPDATE, MT_WTBL_UPDATE_BUSY, 0, 5000))
718*4882a593Smuzhiyun 		return;
719*4882a593Smuzhiyun 
720*4882a593Smuzhiyun 	for (i = n_rates; i < 4; i++)
721*4882a593Smuzhiyun 		rates[i] = rates[n_rates - 1];
722*4882a593Smuzhiyun 
723*4882a593Smuzhiyun 	rateset = !(sta->rate_set_tsf & BIT(0));
724*4882a593Smuzhiyun 	memcpy(sta->rateset[rateset].rates, rates,
725*4882a593Smuzhiyun 	       sizeof(sta->rateset[rateset].rates));
726*4882a593Smuzhiyun 	if (probe_rate) {
727*4882a593Smuzhiyun 		sta->rateset[rateset].probe_rate = *probe_rate;
728*4882a593Smuzhiyun 		ref = &sta->rateset[rateset].probe_rate;
729*4882a593Smuzhiyun 	} else {
730*4882a593Smuzhiyun 		sta->rateset[rateset].probe_rate.idx = -1;
731*4882a593Smuzhiyun 		ref = &sta->rateset[rateset].rates[0];
732*4882a593Smuzhiyun 	}
733*4882a593Smuzhiyun 
734*4882a593Smuzhiyun 	rates = sta->rateset[rateset].rates;
735*4882a593Smuzhiyun 	for (i = 0; i < ARRAY_SIZE(sta->rateset[rateset].rates); i++) {
736*4882a593Smuzhiyun 		/*
737*4882a593Smuzhiyun 		 * We don't support switching between short and long GI
738*4882a593Smuzhiyun 		 * within the rate set. For accurate tx status reporting, we
739*4882a593Smuzhiyun 		 * need to make sure that flags match.
740*4882a593Smuzhiyun 		 * For improved performance, avoid duplicate entries by
741*4882a593Smuzhiyun 		 * decrementing the MCS index if necessary
742*4882a593Smuzhiyun 		 */
743*4882a593Smuzhiyun 		if ((ref->flags ^ rates[i].flags) & IEEE80211_TX_RC_SHORT_GI)
744*4882a593Smuzhiyun 			rates[i].flags ^= IEEE80211_TX_RC_SHORT_GI;
745*4882a593Smuzhiyun 
746*4882a593Smuzhiyun 		for (k = 0; k < i; k++) {
747*4882a593Smuzhiyun 			if (rates[i].idx != rates[k].idx)
748*4882a593Smuzhiyun 				continue;
749*4882a593Smuzhiyun 			if ((rates[i].flags ^ rates[k].flags) &
750*4882a593Smuzhiyun 			    IEEE80211_TX_RC_40_MHZ_WIDTH)
751*4882a593Smuzhiyun 				continue;
752*4882a593Smuzhiyun 
753*4882a593Smuzhiyun 			if (!rates[i].idx)
754*4882a593Smuzhiyun 				continue;
755*4882a593Smuzhiyun 
756*4882a593Smuzhiyun 			rates[i].idx--;
757*4882a593Smuzhiyun 		}
758*4882a593Smuzhiyun 	}
759*4882a593Smuzhiyun 
760*4882a593Smuzhiyun 	w9 &= MT_WTBL2_W9_SHORT_GI_20 | MT_WTBL2_W9_SHORT_GI_40 |
761*4882a593Smuzhiyun 	      MT_WTBL2_W9_SHORT_GI_80;
762*4882a593Smuzhiyun 
763*4882a593Smuzhiyun 	val[0] = mt7603_mac_tx_rate_val(dev, &rates[0], stbc, &bw);
764*4882a593Smuzhiyun 	bw_prev = bw;
765*4882a593Smuzhiyun 
766*4882a593Smuzhiyun 	if (probe_rate) {
767*4882a593Smuzhiyun 		probe_val = mt7603_mac_tx_rate_val(dev, probe_rate, stbc, &bw);
768*4882a593Smuzhiyun 		if (bw)
769*4882a593Smuzhiyun 			bw_idx = 1;
770*4882a593Smuzhiyun 		else
771*4882a593Smuzhiyun 			bw_prev = 0;
772*4882a593Smuzhiyun 	} else {
773*4882a593Smuzhiyun 		probe_val = val[0];
774*4882a593Smuzhiyun 	}
775*4882a593Smuzhiyun 
776*4882a593Smuzhiyun 	w9 |= FIELD_PREP(MT_WTBL2_W9_CC_BW_SEL, bw);
777*4882a593Smuzhiyun 	w9 |= FIELD_PREP(MT_WTBL2_W9_BW_CAP, bw);
778*4882a593Smuzhiyun 
779*4882a593Smuzhiyun 	val[1] = mt7603_mac_tx_rate_val(dev, &rates[1], stbc, &bw);
780*4882a593Smuzhiyun 	if (bw_prev) {
781*4882a593Smuzhiyun 		bw_idx = 3;
782*4882a593Smuzhiyun 		bw_prev = bw;
783*4882a593Smuzhiyun 	}
784*4882a593Smuzhiyun 
785*4882a593Smuzhiyun 	val[2] = mt7603_mac_tx_rate_val(dev, &rates[2], stbc, &bw);
786*4882a593Smuzhiyun 	if (bw_prev) {
787*4882a593Smuzhiyun 		bw_idx = 5;
788*4882a593Smuzhiyun 		bw_prev = bw;
789*4882a593Smuzhiyun 	}
790*4882a593Smuzhiyun 
791*4882a593Smuzhiyun 	val[3] = mt7603_mac_tx_rate_val(dev, &rates[3], stbc, &bw);
792*4882a593Smuzhiyun 	if (bw_prev)
793*4882a593Smuzhiyun 		bw_idx = 7;
794*4882a593Smuzhiyun 
795*4882a593Smuzhiyun 	w9 |= FIELD_PREP(MT_WTBL2_W9_CHANGE_BW_RATE,
796*4882a593Smuzhiyun 		       bw_idx ? bw_idx - 1 : 7);
797*4882a593Smuzhiyun 
798*4882a593Smuzhiyun 	mt76_wr(dev, MT_WTBL_RIUCR0, w9);
799*4882a593Smuzhiyun 
800*4882a593Smuzhiyun 	mt76_wr(dev, MT_WTBL_RIUCR1,
801*4882a593Smuzhiyun 		FIELD_PREP(MT_WTBL_RIUCR1_RATE0, probe_val) |
802*4882a593Smuzhiyun 		FIELD_PREP(MT_WTBL_RIUCR1_RATE1, val[0]) |
803*4882a593Smuzhiyun 		FIELD_PREP(MT_WTBL_RIUCR1_RATE2_LO, val[1]));
804*4882a593Smuzhiyun 
805*4882a593Smuzhiyun 	mt76_wr(dev, MT_WTBL_RIUCR2,
806*4882a593Smuzhiyun 		FIELD_PREP(MT_WTBL_RIUCR2_RATE2_HI, val[1] >> 8) |
807*4882a593Smuzhiyun 		FIELD_PREP(MT_WTBL_RIUCR2_RATE3, val[1]) |
808*4882a593Smuzhiyun 		FIELD_PREP(MT_WTBL_RIUCR2_RATE4, val[2]) |
809*4882a593Smuzhiyun 		FIELD_PREP(MT_WTBL_RIUCR2_RATE5_LO, val[2]));
810*4882a593Smuzhiyun 
811*4882a593Smuzhiyun 	mt76_wr(dev, MT_WTBL_RIUCR3,
812*4882a593Smuzhiyun 		FIELD_PREP(MT_WTBL_RIUCR3_RATE5_HI, val[2] >> 4) |
813*4882a593Smuzhiyun 		FIELD_PREP(MT_WTBL_RIUCR3_RATE6, val[3]) |
814*4882a593Smuzhiyun 		FIELD_PREP(MT_WTBL_RIUCR3_RATE7, val[3]));
815*4882a593Smuzhiyun 
816*4882a593Smuzhiyun 	mt76_set(dev, MT_LPON_T0CR, MT_LPON_T0CR_MODE); /* TSF read */
817*4882a593Smuzhiyun 	sta->rate_set_tsf = (mt76_rr(dev, MT_LPON_UTTR0) & ~BIT(0)) | rateset;
818*4882a593Smuzhiyun 
819*4882a593Smuzhiyun 	mt76_wr(dev, MT_WTBL_UPDATE,
820*4882a593Smuzhiyun 		FIELD_PREP(MT_WTBL_UPDATE_WLAN_IDX, wcid) |
821*4882a593Smuzhiyun 		MT_WTBL_UPDATE_RATE_UPDATE |
822*4882a593Smuzhiyun 		MT_WTBL_UPDATE_TX_COUNT_CLEAR);
823*4882a593Smuzhiyun 
824*4882a593Smuzhiyun 	if (!(sta->wcid.tx_info & MT_WCID_TX_INFO_SET))
825*4882a593Smuzhiyun 		mt76_poll(dev, MT_WTBL_UPDATE, MT_WTBL_UPDATE_BUSY, 0, 5000);
826*4882a593Smuzhiyun 
827*4882a593Smuzhiyun 	sta->rate_count = 2 * MT7603_RATE_RETRY * n_rates;
828*4882a593Smuzhiyun 	sta->wcid.tx_info |= MT_WCID_TX_INFO_SET;
829*4882a593Smuzhiyun }
830*4882a593Smuzhiyun 
831*4882a593Smuzhiyun static enum mt7603_cipher_type
mt7603_mac_get_key_info(struct ieee80211_key_conf * key,u8 * key_data)832*4882a593Smuzhiyun mt7603_mac_get_key_info(struct ieee80211_key_conf *key, u8 *key_data)
833*4882a593Smuzhiyun {
834*4882a593Smuzhiyun 	memset(key_data, 0, 32);
835*4882a593Smuzhiyun 	if (!key)
836*4882a593Smuzhiyun 		return MT_CIPHER_NONE;
837*4882a593Smuzhiyun 
838*4882a593Smuzhiyun 	if (key->keylen > 32)
839*4882a593Smuzhiyun 		return MT_CIPHER_NONE;
840*4882a593Smuzhiyun 
841*4882a593Smuzhiyun 	memcpy(key_data, key->key, key->keylen);
842*4882a593Smuzhiyun 
843*4882a593Smuzhiyun 	switch (key->cipher) {
844*4882a593Smuzhiyun 	case WLAN_CIPHER_SUITE_WEP40:
845*4882a593Smuzhiyun 		return MT_CIPHER_WEP40;
846*4882a593Smuzhiyun 	case WLAN_CIPHER_SUITE_WEP104:
847*4882a593Smuzhiyun 		return MT_CIPHER_WEP104;
848*4882a593Smuzhiyun 	case WLAN_CIPHER_SUITE_TKIP:
849*4882a593Smuzhiyun 		/* Rx/Tx MIC keys are swapped */
850*4882a593Smuzhiyun 		memcpy(key_data + 16, key->key + 24, 8);
851*4882a593Smuzhiyun 		memcpy(key_data + 24, key->key + 16, 8);
852*4882a593Smuzhiyun 		return MT_CIPHER_TKIP;
853*4882a593Smuzhiyun 	case WLAN_CIPHER_SUITE_CCMP:
854*4882a593Smuzhiyun 		return MT_CIPHER_AES_CCMP;
855*4882a593Smuzhiyun 	default:
856*4882a593Smuzhiyun 		return MT_CIPHER_NONE;
857*4882a593Smuzhiyun 	}
858*4882a593Smuzhiyun }
859*4882a593Smuzhiyun 
mt7603_wtbl_set_key(struct mt7603_dev * dev,int wcid,struct ieee80211_key_conf * key)860*4882a593Smuzhiyun int mt7603_wtbl_set_key(struct mt7603_dev *dev, int wcid,
861*4882a593Smuzhiyun 			struct ieee80211_key_conf *key)
862*4882a593Smuzhiyun {
863*4882a593Smuzhiyun 	enum mt7603_cipher_type cipher;
864*4882a593Smuzhiyun 	u32 addr = mt7603_wtbl3_addr(wcid);
865*4882a593Smuzhiyun 	u8 key_data[32];
866*4882a593Smuzhiyun 	int key_len = sizeof(key_data);
867*4882a593Smuzhiyun 
868*4882a593Smuzhiyun 	cipher = mt7603_mac_get_key_info(key, key_data);
869*4882a593Smuzhiyun 	if (cipher == MT_CIPHER_NONE && key)
870*4882a593Smuzhiyun 		return -EOPNOTSUPP;
871*4882a593Smuzhiyun 
872*4882a593Smuzhiyun 	if (key && (cipher == MT_CIPHER_WEP40 || cipher == MT_CIPHER_WEP104)) {
873*4882a593Smuzhiyun 		addr += key->keyidx * 16;
874*4882a593Smuzhiyun 		key_len = 16;
875*4882a593Smuzhiyun 	}
876*4882a593Smuzhiyun 
877*4882a593Smuzhiyun 	mt76_wr_copy(dev, addr, key_data, key_len);
878*4882a593Smuzhiyun 
879*4882a593Smuzhiyun 	addr = mt7603_wtbl1_addr(wcid);
880*4882a593Smuzhiyun 	mt76_rmw_field(dev, addr + 2 * 4, MT_WTBL1_W2_KEY_TYPE, cipher);
881*4882a593Smuzhiyun 	if (key)
882*4882a593Smuzhiyun 		mt76_rmw_field(dev, addr, MT_WTBL1_W0_KEY_IDX, key->keyidx);
883*4882a593Smuzhiyun 	mt76_rmw_field(dev, addr, MT_WTBL1_W0_RX_KEY_VALID, !!key);
884*4882a593Smuzhiyun 
885*4882a593Smuzhiyun 	return 0;
886*4882a593Smuzhiyun }
887*4882a593Smuzhiyun 
888*4882a593Smuzhiyun static int
mt7603_mac_write_txwi(struct mt7603_dev * dev,__le32 * txwi,struct sk_buff * skb,enum mt76_txq_id qid,struct mt76_wcid * wcid,struct ieee80211_sta * sta,int pid,struct ieee80211_key_conf * key)889*4882a593Smuzhiyun mt7603_mac_write_txwi(struct mt7603_dev *dev, __le32 *txwi,
890*4882a593Smuzhiyun 		      struct sk_buff *skb, enum mt76_txq_id qid,
891*4882a593Smuzhiyun 		      struct mt76_wcid *wcid, struct ieee80211_sta *sta,
892*4882a593Smuzhiyun 		      int pid, struct ieee80211_key_conf *key)
893*4882a593Smuzhiyun {
894*4882a593Smuzhiyun 	struct ieee80211_tx_info *info = IEEE80211_SKB_CB(skb);
895*4882a593Smuzhiyun 	struct ieee80211_tx_rate *rate = &info->control.rates[0];
896*4882a593Smuzhiyun 	struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data;
897*4882a593Smuzhiyun 	struct ieee80211_bar *bar = (struct ieee80211_bar *)skb->data;
898*4882a593Smuzhiyun 	struct ieee80211_vif *vif = info->control.vif;
899*4882a593Smuzhiyun 	struct mt76_queue *q = dev->mt76.q_tx[qid];
900*4882a593Smuzhiyun 	struct mt7603_vif *mvif;
901*4882a593Smuzhiyun 	int wlan_idx;
902*4882a593Smuzhiyun 	int hdr_len = ieee80211_get_hdrlen_from_skb(skb);
903*4882a593Smuzhiyun 	int tx_count = 8;
904*4882a593Smuzhiyun 	u8 frame_type, frame_subtype;
905*4882a593Smuzhiyun 	u16 fc = le16_to_cpu(hdr->frame_control);
906*4882a593Smuzhiyun 	u16 seqno = 0;
907*4882a593Smuzhiyun 	u8 vif_idx = 0;
908*4882a593Smuzhiyun 	u32 val;
909*4882a593Smuzhiyun 	u8 bw;
910*4882a593Smuzhiyun 
911*4882a593Smuzhiyun 	if (vif) {
912*4882a593Smuzhiyun 		mvif = (struct mt7603_vif *)vif->drv_priv;
913*4882a593Smuzhiyun 		vif_idx = mvif->idx;
914*4882a593Smuzhiyun 		if (vif_idx && qid >= MT_TXQ_BEACON)
915*4882a593Smuzhiyun 			vif_idx += 0x10;
916*4882a593Smuzhiyun 	}
917*4882a593Smuzhiyun 
918*4882a593Smuzhiyun 	if (sta) {
919*4882a593Smuzhiyun 		struct mt7603_sta *msta = (struct mt7603_sta *)sta->drv_priv;
920*4882a593Smuzhiyun 
921*4882a593Smuzhiyun 		tx_count = msta->rate_count;
922*4882a593Smuzhiyun 	}
923*4882a593Smuzhiyun 
924*4882a593Smuzhiyun 	if (wcid)
925*4882a593Smuzhiyun 		wlan_idx = wcid->idx;
926*4882a593Smuzhiyun 	else
927*4882a593Smuzhiyun 		wlan_idx = MT7603_WTBL_RESERVED;
928*4882a593Smuzhiyun 
929*4882a593Smuzhiyun 	frame_type = (fc & IEEE80211_FCTL_FTYPE) >> 2;
930*4882a593Smuzhiyun 	frame_subtype = (fc & IEEE80211_FCTL_STYPE) >> 4;
931*4882a593Smuzhiyun 
932*4882a593Smuzhiyun 	val = FIELD_PREP(MT_TXD0_TX_BYTES, skb->len + MT_TXD_SIZE) |
933*4882a593Smuzhiyun 	      FIELD_PREP(MT_TXD0_Q_IDX, q->hw_idx);
934*4882a593Smuzhiyun 	txwi[0] = cpu_to_le32(val);
935*4882a593Smuzhiyun 
936*4882a593Smuzhiyun 	val = MT_TXD1_LONG_FORMAT |
937*4882a593Smuzhiyun 	      FIELD_PREP(MT_TXD1_OWN_MAC, vif_idx) |
938*4882a593Smuzhiyun 	      FIELD_PREP(MT_TXD1_TID,
939*4882a593Smuzhiyun 			 skb->priority & IEEE80211_QOS_CTL_TID_MASK) |
940*4882a593Smuzhiyun 	      FIELD_PREP(MT_TXD1_HDR_FORMAT, MT_HDR_FORMAT_802_11) |
941*4882a593Smuzhiyun 	      FIELD_PREP(MT_TXD1_HDR_INFO, hdr_len / 2) |
942*4882a593Smuzhiyun 	      FIELD_PREP(MT_TXD1_WLAN_IDX, wlan_idx) |
943*4882a593Smuzhiyun 	      FIELD_PREP(MT_TXD1_PROTECTED, !!key);
944*4882a593Smuzhiyun 	txwi[1] = cpu_to_le32(val);
945*4882a593Smuzhiyun 
946*4882a593Smuzhiyun 	if (info->flags & IEEE80211_TX_CTL_NO_ACK)
947*4882a593Smuzhiyun 		txwi[1] |= cpu_to_le32(MT_TXD1_NO_ACK);
948*4882a593Smuzhiyun 
949*4882a593Smuzhiyun 	val = FIELD_PREP(MT_TXD2_FRAME_TYPE, frame_type) |
950*4882a593Smuzhiyun 	      FIELD_PREP(MT_TXD2_SUB_TYPE, frame_subtype) |
951*4882a593Smuzhiyun 	      FIELD_PREP(MT_TXD2_MULTICAST,
952*4882a593Smuzhiyun 			 is_multicast_ether_addr(hdr->addr1));
953*4882a593Smuzhiyun 	txwi[2] = cpu_to_le32(val);
954*4882a593Smuzhiyun 
955*4882a593Smuzhiyun 	if (!(info->flags & IEEE80211_TX_CTL_AMPDU))
956*4882a593Smuzhiyun 		txwi[2] |= cpu_to_le32(MT_TXD2_BA_DISABLE);
957*4882a593Smuzhiyun 
958*4882a593Smuzhiyun 	txwi[4] = 0;
959*4882a593Smuzhiyun 
960*4882a593Smuzhiyun 	val = MT_TXD5_TX_STATUS_HOST | MT_TXD5_SW_POWER_MGMT |
961*4882a593Smuzhiyun 	      FIELD_PREP(MT_TXD5_PID, pid);
962*4882a593Smuzhiyun 	txwi[5] = cpu_to_le32(val);
963*4882a593Smuzhiyun 
964*4882a593Smuzhiyun 	txwi[6] = 0;
965*4882a593Smuzhiyun 
966*4882a593Smuzhiyun 	if (rate->idx >= 0 && rate->count &&
967*4882a593Smuzhiyun 	    !(info->flags & IEEE80211_TX_CTL_RATE_CTRL_PROBE)) {
968*4882a593Smuzhiyun 		bool stbc = info->flags & IEEE80211_TX_CTL_STBC;
969*4882a593Smuzhiyun 		u16 rateval = mt7603_mac_tx_rate_val(dev, rate, stbc, &bw);
970*4882a593Smuzhiyun 
971*4882a593Smuzhiyun 		txwi[2] |= cpu_to_le32(MT_TXD2_FIX_RATE);
972*4882a593Smuzhiyun 
973*4882a593Smuzhiyun 		val = MT_TXD6_FIXED_BW |
974*4882a593Smuzhiyun 		      FIELD_PREP(MT_TXD6_BW, bw) |
975*4882a593Smuzhiyun 		      FIELD_PREP(MT_TXD6_TX_RATE, rateval);
976*4882a593Smuzhiyun 		txwi[6] |= cpu_to_le32(val);
977*4882a593Smuzhiyun 
978*4882a593Smuzhiyun 		if (rate->flags & IEEE80211_TX_RC_SHORT_GI)
979*4882a593Smuzhiyun 			txwi[6] |= cpu_to_le32(MT_TXD6_SGI);
980*4882a593Smuzhiyun 
981*4882a593Smuzhiyun 		if (!(rate->flags & IEEE80211_TX_RC_MCS))
982*4882a593Smuzhiyun 			txwi[2] |= cpu_to_le32(MT_TXD2_BA_DISABLE);
983*4882a593Smuzhiyun 
984*4882a593Smuzhiyun 		tx_count = rate->count;
985*4882a593Smuzhiyun 	}
986*4882a593Smuzhiyun 
987*4882a593Smuzhiyun 	/* use maximum tx count for beacons and buffered multicast */
988*4882a593Smuzhiyun 	if (qid >= MT_TXQ_BEACON)
989*4882a593Smuzhiyun 		tx_count = 0x1f;
990*4882a593Smuzhiyun 
991*4882a593Smuzhiyun 	val = FIELD_PREP(MT_TXD3_REM_TX_COUNT, tx_count) |
992*4882a593Smuzhiyun 		  MT_TXD3_SN_VALID;
993*4882a593Smuzhiyun 
994*4882a593Smuzhiyun 	if (ieee80211_is_data_qos(hdr->frame_control))
995*4882a593Smuzhiyun 		seqno = le16_to_cpu(hdr->seq_ctrl);
996*4882a593Smuzhiyun 	else if (ieee80211_is_back_req(hdr->frame_control))
997*4882a593Smuzhiyun 		seqno = le16_to_cpu(bar->start_seq_num);
998*4882a593Smuzhiyun 	else
999*4882a593Smuzhiyun 		val &= ~MT_TXD3_SN_VALID;
1000*4882a593Smuzhiyun 
1001*4882a593Smuzhiyun 	val |= FIELD_PREP(MT_TXD3_SEQ, seqno >> 4);
1002*4882a593Smuzhiyun 
1003*4882a593Smuzhiyun 	txwi[3] = cpu_to_le32(val);
1004*4882a593Smuzhiyun 
1005*4882a593Smuzhiyun 	if (key) {
1006*4882a593Smuzhiyun 		u64 pn = atomic64_inc_return(&key->tx_pn);
1007*4882a593Smuzhiyun 
1008*4882a593Smuzhiyun 		txwi[3] |= cpu_to_le32(MT_TXD3_PN_VALID);
1009*4882a593Smuzhiyun 		txwi[4] = cpu_to_le32(pn & GENMASK(31, 0));
1010*4882a593Smuzhiyun 		txwi[5] |= cpu_to_le32(FIELD_PREP(MT_TXD5_PN_HIGH, pn >> 32));
1011*4882a593Smuzhiyun 	}
1012*4882a593Smuzhiyun 
1013*4882a593Smuzhiyun 	txwi[7] = 0;
1014*4882a593Smuzhiyun 
1015*4882a593Smuzhiyun 	return 0;
1016*4882a593Smuzhiyun }
1017*4882a593Smuzhiyun 
mt7603_tx_prepare_skb(struct mt76_dev * mdev,void * txwi_ptr,enum mt76_txq_id qid,struct mt76_wcid * wcid,struct ieee80211_sta * sta,struct mt76_tx_info * tx_info)1018*4882a593Smuzhiyun int mt7603_tx_prepare_skb(struct mt76_dev *mdev, void *txwi_ptr,
1019*4882a593Smuzhiyun 			  enum mt76_txq_id qid, struct mt76_wcid *wcid,
1020*4882a593Smuzhiyun 			  struct ieee80211_sta *sta,
1021*4882a593Smuzhiyun 			  struct mt76_tx_info *tx_info)
1022*4882a593Smuzhiyun {
1023*4882a593Smuzhiyun 	struct mt7603_dev *dev = container_of(mdev, struct mt7603_dev, mt76);
1024*4882a593Smuzhiyun 	struct mt7603_sta *msta = container_of(wcid, struct mt7603_sta, wcid);
1025*4882a593Smuzhiyun 	struct ieee80211_tx_info *info = IEEE80211_SKB_CB(tx_info->skb);
1026*4882a593Smuzhiyun 	struct ieee80211_key_conf *key = info->control.hw_key;
1027*4882a593Smuzhiyun 	int pid;
1028*4882a593Smuzhiyun 
1029*4882a593Smuzhiyun 	if (!wcid)
1030*4882a593Smuzhiyun 		wcid = &dev->global_sta.wcid;
1031*4882a593Smuzhiyun 
1032*4882a593Smuzhiyun 	if (sta) {
1033*4882a593Smuzhiyun 		msta = (struct mt7603_sta *)sta->drv_priv;
1034*4882a593Smuzhiyun 
1035*4882a593Smuzhiyun 		if ((info->flags & (IEEE80211_TX_CTL_NO_PS_BUFFER |
1036*4882a593Smuzhiyun 				    IEEE80211_TX_CTL_CLEAR_PS_FILT)) ||
1037*4882a593Smuzhiyun 		    (info->control.flags & IEEE80211_TX_CTRL_PS_RESPONSE))
1038*4882a593Smuzhiyun 			mt7603_wtbl_set_ps(dev, msta, false);
1039*4882a593Smuzhiyun 
1040*4882a593Smuzhiyun 		mt76_tx_check_agg_ssn(sta, tx_info->skb);
1041*4882a593Smuzhiyun 	}
1042*4882a593Smuzhiyun 
1043*4882a593Smuzhiyun 	pid = mt76_tx_status_skb_add(mdev, wcid, tx_info->skb);
1044*4882a593Smuzhiyun 
1045*4882a593Smuzhiyun 	if (info->flags & IEEE80211_TX_CTL_RATE_CTRL_PROBE) {
1046*4882a593Smuzhiyun 		spin_lock_bh(&dev->mt76.lock);
1047*4882a593Smuzhiyun 		mt7603_wtbl_set_rates(dev, msta, &info->control.rates[0],
1048*4882a593Smuzhiyun 				      msta->rates);
1049*4882a593Smuzhiyun 		msta->rate_probe = true;
1050*4882a593Smuzhiyun 		spin_unlock_bh(&dev->mt76.lock);
1051*4882a593Smuzhiyun 	}
1052*4882a593Smuzhiyun 
1053*4882a593Smuzhiyun 	mt7603_mac_write_txwi(dev, txwi_ptr, tx_info->skb, qid, wcid,
1054*4882a593Smuzhiyun 			      sta, pid, key);
1055*4882a593Smuzhiyun 
1056*4882a593Smuzhiyun 	return 0;
1057*4882a593Smuzhiyun }
1058*4882a593Smuzhiyun 
1059*4882a593Smuzhiyun static bool
mt7603_fill_txs(struct mt7603_dev * dev,struct mt7603_sta * sta,struct ieee80211_tx_info * info,__le32 * txs_data)1060*4882a593Smuzhiyun mt7603_fill_txs(struct mt7603_dev *dev, struct mt7603_sta *sta,
1061*4882a593Smuzhiyun 		struct ieee80211_tx_info *info, __le32 *txs_data)
1062*4882a593Smuzhiyun {
1063*4882a593Smuzhiyun 	struct ieee80211_supported_band *sband;
1064*4882a593Smuzhiyun 	struct mt7603_rate_set *rs;
1065*4882a593Smuzhiyun 	int first_idx = 0, last_idx;
1066*4882a593Smuzhiyun 	u32 rate_set_tsf;
1067*4882a593Smuzhiyun 	u32 final_rate;
1068*4882a593Smuzhiyun 	u32 final_rate_flags;
1069*4882a593Smuzhiyun 	bool rs_idx;
1070*4882a593Smuzhiyun 	bool ack_timeout;
1071*4882a593Smuzhiyun 	bool fixed_rate;
1072*4882a593Smuzhiyun 	bool probe;
1073*4882a593Smuzhiyun 	bool ampdu;
1074*4882a593Smuzhiyun 	bool cck = false;
1075*4882a593Smuzhiyun 	int count;
1076*4882a593Smuzhiyun 	u32 txs;
1077*4882a593Smuzhiyun 	int idx;
1078*4882a593Smuzhiyun 	int i;
1079*4882a593Smuzhiyun 
1080*4882a593Smuzhiyun 	fixed_rate = info->status.rates[0].count;
1081*4882a593Smuzhiyun 	probe = !!(info->flags & IEEE80211_TX_CTL_RATE_CTRL_PROBE);
1082*4882a593Smuzhiyun 
1083*4882a593Smuzhiyun 	txs = le32_to_cpu(txs_data[4]);
1084*4882a593Smuzhiyun 	ampdu = !fixed_rate && (txs & MT_TXS4_AMPDU);
1085*4882a593Smuzhiyun 	count = FIELD_GET(MT_TXS4_TX_COUNT, txs);
1086*4882a593Smuzhiyun 	last_idx = FIELD_GET(MT_TXS4_LAST_TX_RATE, txs);
1087*4882a593Smuzhiyun 
1088*4882a593Smuzhiyun 	txs = le32_to_cpu(txs_data[0]);
1089*4882a593Smuzhiyun 	final_rate = FIELD_GET(MT_TXS0_TX_RATE, txs);
1090*4882a593Smuzhiyun 	ack_timeout = txs & MT_TXS0_ACK_TIMEOUT;
1091*4882a593Smuzhiyun 
1092*4882a593Smuzhiyun 	if (!ampdu && (txs & MT_TXS0_RTS_TIMEOUT))
1093*4882a593Smuzhiyun 		return false;
1094*4882a593Smuzhiyun 
1095*4882a593Smuzhiyun 	if (txs & MT_TXS0_QUEUE_TIMEOUT)
1096*4882a593Smuzhiyun 		return false;
1097*4882a593Smuzhiyun 
1098*4882a593Smuzhiyun 	if (!ack_timeout)
1099*4882a593Smuzhiyun 		info->flags |= IEEE80211_TX_STAT_ACK;
1100*4882a593Smuzhiyun 
1101*4882a593Smuzhiyun 	info->status.ampdu_len = 1;
1102*4882a593Smuzhiyun 	info->status.ampdu_ack_len = !!(info->flags &
1103*4882a593Smuzhiyun 					IEEE80211_TX_STAT_ACK);
1104*4882a593Smuzhiyun 
1105*4882a593Smuzhiyun 	if (ampdu || (info->flags & IEEE80211_TX_CTL_AMPDU))
1106*4882a593Smuzhiyun 		info->flags |= IEEE80211_TX_STAT_AMPDU | IEEE80211_TX_CTL_AMPDU;
1107*4882a593Smuzhiyun 
1108*4882a593Smuzhiyun 	first_idx = max_t(int, 0, last_idx - (count - 1) / MT7603_RATE_RETRY);
1109*4882a593Smuzhiyun 
1110*4882a593Smuzhiyun 	if (fixed_rate && !probe) {
1111*4882a593Smuzhiyun 		info->status.rates[0].count = count;
1112*4882a593Smuzhiyun 		i = 0;
1113*4882a593Smuzhiyun 		goto out;
1114*4882a593Smuzhiyun 	}
1115*4882a593Smuzhiyun 
1116*4882a593Smuzhiyun 	rate_set_tsf = READ_ONCE(sta->rate_set_tsf);
1117*4882a593Smuzhiyun 	rs_idx = !((u32)(FIELD_GET(MT_TXS1_F0_TIMESTAMP, le32_to_cpu(txs_data[1])) -
1118*4882a593Smuzhiyun 			 rate_set_tsf) < 1000000);
1119*4882a593Smuzhiyun 	rs_idx ^= rate_set_tsf & BIT(0);
1120*4882a593Smuzhiyun 	rs = &sta->rateset[rs_idx];
1121*4882a593Smuzhiyun 
1122*4882a593Smuzhiyun 	if (!first_idx && rs->probe_rate.idx >= 0) {
1123*4882a593Smuzhiyun 		info->status.rates[0] = rs->probe_rate;
1124*4882a593Smuzhiyun 
1125*4882a593Smuzhiyun 		spin_lock_bh(&dev->mt76.lock);
1126*4882a593Smuzhiyun 		if (sta->rate_probe) {
1127*4882a593Smuzhiyun 			mt7603_wtbl_set_rates(dev, sta, NULL,
1128*4882a593Smuzhiyun 					      sta->rates);
1129*4882a593Smuzhiyun 			sta->rate_probe = false;
1130*4882a593Smuzhiyun 		}
1131*4882a593Smuzhiyun 		spin_unlock_bh(&dev->mt76.lock);
1132*4882a593Smuzhiyun 	} else {
1133*4882a593Smuzhiyun 		info->status.rates[0] = rs->rates[first_idx / 2];
1134*4882a593Smuzhiyun 	}
1135*4882a593Smuzhiyun 	info->status.rates[0].count = 0;
1136*4882a593Smuzhiyun 
1137*4882a593Smuzhiyun 	for (i = 0, idx = first_idx; count && idx <= last_idx; idx++) {
1138*4882a593Smuzhiyun 		struct ieee80211_tx_rate *cur_rate;
1139*4882a593Smuzhiyun 		int cur_count;
1140*4882a593Smuzhiyun 
1141*4882a593Smuzhiyun 		cur_rate = &rs->rates[idx / 2];
1142*4882a593Smuzhiyun 		cur_count = min_t(int, MT7603_RATE_RETRY, count);
1143*4882a593Smuzhiyun 		count -= cur_count;
1144*4882a593Smuzhiyun 
1145*4882a593Smuzhiyun 		if (idx && (cur_rate->idx != info->status.rates[i].idx ||
1146*4882a593Smuzhiyun 			    cur_rate->flags != info->status.rates[i].flags)) {
1147*4882a593Smuzhiyun 			i++;
1148*4882a593Smuzhiyun 			if (i == ARRAY_SIZE(info->status.rates)) {
1149*4882a593Smuzhiyun 				i--;
1150*4882a593Smuzhiyun 				break;
1151*4882a593Smuzhiyun 			}
1152*4882a593Smuzhiyun 
1153*4882a593Smuzhiyun 			info->status.rates[i] = *cur_rate;
1154*4882a593Smuzhiyun 			info->status.rates[i].count = 0;
1155*4882a593Smuzhiyun 		}
1156*4882a593Smuzhiyun 
1157*4882a593Smuzhiyun 		info->status.rates[i].count += cur_count;
1158*4882a593Smuzhiyun 	}
1159*4882a593Smuzhiyun 
1160*4882a593Smuzhiyun out:
1161*4882a593Smuzhiyun 	final_rate_flags = info->status.rates[i].flags;
1162*4882a593Smuzhiyun 
1163*4882a593Smuzhiyun 	switch (FIELD_GET(MT_TX_RATE_MODE, final_rate)) {
1164*4882a593Smuzhiyun 	case MT_PHY_TYPE_CCK:
1165*4882a593Smuzhiyun 		cck = true;
1166*4882a593Smuzhiyun 		fallthrough;
1167*4882a593Smuzhiyun 	case MT_PHY_TYPE_OFDM:
1168*4882a593Smuzhiyun 		if (dev->mphy.chandef.chan->band == NL80211_BAND_5GHZ)
1169*4882a593Smuzhiyun 			sband = &dev->mphy.sband_5g.sband;
1170*4882a593Smuzhiyun 		else
1171*4882a593Smuzhiyun 			sband = &dev->mphy.sband_2g.sband;
1172*4882a593Smuzhiyun 		final_rate &= GENMASK(5, 0);
1173*4882a593Smuzhiyun 		final_rate = mt76_get_rate(&dev->mt76, sband, final_rate,
1174*4882a593Smuzhiyun 					   cck);
1175*4882a593Smuzhiyun 		final_rate_flags = 0;
1176*4882a593Smuzhiyun 		break;
1177*4882a593Smuzhiyun 	case MT_PHY_TYPE_HT_GF:
1178*4882a593Smuzhiyun 	case MT_PHY_TYPE_HT:
1179*4882a593Smuzhiyun 		final_rate_flags |= IEEE80211_TX_RC_MCS;
1180*4882a593Smuzhiyun 		final_rate &= GENMASK(5, 0);
1181*4882a593Smuzhiyun 		if (final_rate > 15)
1182*4882a593Smuzhiyun 			return false;
1183*4882a593Smuzhiyun 		break;
1184*4882a593Smuzhiyun 	default:
1185*4882a593Smuzhiyun 		return false;
1186*4882a593Smuzhiyun 	}
1187*4882a593Smuzhiyun 
1188*4882a593Smuzhiyun 	info->status.rates[i].idx = final_rate;
1189*4882a593Smuzhiyun 	info->status.rates[i].flags = final_rate_flags;
1190*4882a593Smuzhiyun 
1191*4882a593Smuzhiyun 	return true;
1192*4882a593Smuzhiyun }
1193*4882a593Smuzhiyun 
1194*4882a593Smuzhiyun static bool
mt7603_mac_add_txs_skb(struct mt7603_dev * dev,struct mt7603_sta * sta,int pid,__le32 * txs_data)1195*4882a593Smuzhiyun mt7603_mac_add_txs_skb(struct mt7603_dev *dev, struct mt7603_sta *sta, int pid,
1196*4882a593Smuzhiyun 		       __le32 *txs_data)
1197*4882a593Smuzhiyun {
1198*4882a593Smuzhiyun 	struct mt76_dev *mdev = &dev->mt76;
1199*4882a593Smuzhiyun 	struct sk_buff_head list;
1200*4882a593Smuzhiyun 	struct sk_buff *skb;
1201*4882a593Smuzhiyun 
1202*4882a593Smuzhiyun 	if (pid < MT_PACKET_ID_FIRST)
1203*4882a593Smuzhiyun 		return false;
1204*4882a593Smuzhiyun 
1205*4882a593Smuzhiyun 	trace_mac_txdone(mdev, sta->wcid.idx, pid);
1206*4882a593Smuzhiyun 
1207*4882a593Smuzhiyun 	mt76_tx_status_lock(mdev, &list);
1208*4882a593Smuzhiyun 	skb = mt76_tx_status_skb_get(mdev, &sta->wcid, pid, &list);
1209*4882a593Smuzhiyun 	if (skb) {
1210*4882a593Smuzhiyun 		struct ieee80211_tx_info *info = IEEE80211_SKB_CB(skb);
1211*4882a593Smuzhiyun 
1212*4882a593Smuzhiyun 		if (!mt7603_fill_txs(dev, sta, info, txs_data)) {
1213*4882a593Smuzhiyun 			ieee80211_tx_info_clear_status(info);
1214*4882a593Smuzhiyun 			info->status.rates[0].idx = -1;
1215*4882a593Smuzhiyun 		}
1216*4882a593Smuzhiyun 
1217*4882a593Smuzhiyun 		mt76_tx_status_skb_done(mdev, skb, &list);
1218*4882a593Smuzhiyun 	}
1219*4882a593Smuzhiyun 	mt76_tx_status_unlock(mdev, &list);
1220*4882a593Smuzhiyun 
1221*4882a593Smuzhiyun 	return !!skb;
1222*4882a593Smuzhiyun }
1223*4882a593Smuzhiyun 
mt7603_mac_add_txs(struct mt7603_dev * dev,void * data)1224*4882a593Smuzhiyun void mt7603_mac_add_txs(struct mt7603_dev *dev, void *data)
1225*4882a593Smuzhiyun {
1226*4882a593Smuzhiyun 	struct ieee80211_tx_info info = {};
1227*4882a593Smuzhiyun 	struct ieee80211_sta *sta = NULL;
1228*4882a593Smuzhiyun 	struct mt7603_sta *msta = NULL;
1229*4882a593Smuzhiyun 	struct mt76_wcid *wcid;
1230*4882a593Smuzhiyun 	__le32 *txs_data = data;
1231*4882a593Smuzhiyun 	u32 txs;
1232*4882a593Smuzhiyun 	u8 wcidx;
1233*4882a593Smuzhiyun 	u8 pid;
1234*4882a593Smuzhiyun 
1235*4882a593Smuzhiyun 	txs = le32_to_cpu(txs_data[4]);
1236*4882a593Smuzhiyun 	pid = FIELD_GET(MT_TXS4_PID, txs);
1237*4882a593Smuzhiyun 	txs = le32_to_cpu(txs_data[3]);
1238*4882a593Smuzhiyun 	wcidx = FIELD_GET(MT_TXS3_WCID, txs);
1239*4882a593Smuzhiyun 
1240*4882a593Smuzhiyun 	if (pid == MT_PACKET_ID_NO_ACK)
1241*4882a593Smuzhiyun 		return;
1242*4882a593Smuzhiyun 
1243*4882a593Smuzhiyun 	if (wcidx >= MT7603_WTBL_SIZE)
1244*4882a593Smuzhiyun 		return;
1245*4882a593Smuzhiyun 
1246*4882a593Smuzhiyun 	rcu_read_lock();
1247*4882a593Smuzhiyun 
1248*4882a593Smuzhiyun 	wcid = rcu_dereference(dev->mt76.wcid[wcidx]);
1249*4882a593Smuzhiyun 	if (!wcid)
1250*4882a593Smuzhiyun 		goto out;
1251*4882a593Smuzhiyun 
1252*4882a593Smuzhiyun 	msta = container_of(wcid, struct mt7603_sta, wcid);
1253*4882a593Smuzhiyun 	sta = wcid_to_sta(wcid);
1254*4882a593Smuzhiyun 
1255*4882a593Smuzhiyun 	if (list_empty(&msta->poll_list)) {
1256*4882a593Smuzhiyun 		spin_lock_bh(&dev->sta_poll_lock);
1257*4882a593Smuzhiyun 		list_add_tail(&msta->poll_list, &dev->sta_poll_list);
1258*4882a593Smuzhiyun 		spin_unlock_bh(&dev->sta_poll_lock);
1259*4882a593Smuzhiyun 	}
1260*4882a593Smuzhiyun 
1261*4882a593Smuzhiyun 	if (mt7603_mac_add_txs_skb(dev, msta, pid, txs_data))
1262*4882a593Smuzhiyun 		goto out;
1263*4882a593Smuzhiyun 
1264*4882a593Smuzhiyun 	if (wcidx >= MT7603_WTBL_STA || !sta)
1265*4882a593Smuzhiyun 		goto out;
1266*4882a593Smuzhiyun 
1267*4882a593Smuzhiyun 	if (mt7603_fill_txs(dev, msta, &info, txs_data))
1268*4882a593Smuzhiyun 		ieee80211_tx_status_noskb(mt76_hw(dev), sta, &info);
1269*4882a593Smuzhiyun 
1270*4882a593Smuzhiyun out:
1271*4882a593Smuzhiyun 	rcu_read_unlock();
1272*4882a593Smuzhiyun }
1273*4882a593Smuzhiyun 
mt7603_tx_complete_skb(struct mt76_dev * mdev,struct mt76_queue_entry * e)1274*4882a593Smuzhiyun void mt7603_tx_complete_skb(struct mt76_dev *mdev, struct mt76_queue_entry *e)
1275*4882a593Smuzhiyun {
1276*4882a593Smuzhiyun 	struct mt7603_dev *dev = container_of(mdev, struct mt7603_dev, mt76);
1277*4882a593Smuzhiyun 	struct sk_buff *skb = e->skb;
1278*4882a593Smuzhiyun 
1279*4882a593Smuzhiyun 	if (!e->txwi) {
1280*4882a593Smuzhiyun 		dev_kfree_skb_any(skb);
1281*4882a593Smuzhiyun 		return;
1282*4882a593Smuzhiyun 	}
1283*4882a593Smuzhiyun 
1284*4882a593Smuzhiyun 	dev->tx_hang_check = 0;
1285*4882a593Smuzhiyun 	mt76_tx_complete_skb(mdev, e->wcid, skb);
1286*4882a593Smuzhiyun }
1287*4882a593Smuzhiyun 
1288*4882a593Smuzhiyun static bool
wait_for_wpdma(struct mt7603_dev * dev)1289*4882a593Smuzhiyun wait_for_wpdma(struct mt7603_dev *dev)
1290*4882a593Smuzhiyun {
1291*4882a593Smuzhiyun 	return mt76_poll(dev, MT_WPDMA_GLO_CFG,
1292*4882a593Smuzhiyun 			 MT_WPDMA_GLO_CFG_TX_DMA_BUSY |
1293*4882a593Smuzhiyun 			 MT_WPDMA_GLO_CFG_RX_DMA_BUSY,
1294*4882a593Smuzhiyun 			 0, 1000);
1295*4882a593Smuzhiyun }
1296*4882a593Smuzhiyun 
mt7603_pse_reset(struct mt7603_dev * dev)1297*4882a593Smuzhiyun static void mt7603_pse_reset(struct mt7603_dev *dev)
1298*4882a593Smuzhiyun {
1299*4882a593Smuzhiyun 	/* Clear previous reset result */
1300*4882a593Smuzhiyun 	if (!dev->reset_cause[RESET_CAUSE_RESET_FAILED])
1301*4882a593Smuzhiyun 		mt76_clear(dev, MT_MCU_DEBUG_RESET, MT_MCU_DEBUG_RESET_PSE_S);
1302*4882a593Smuzhiyun 
1303*4882a593Smuzhiyun 	/* Reset PSE */
1304*4882a593Smuzhiyun 	mt76_set(dev, MT_MCU_DEBUG_RESET, MT_MCU_DEBUG_RESET_PSE);
1305*4882a593Smuzhiyun 
1306*4882a593Smuzhiyun 	if (!mt76_poll_msec(dev, MT_MCU_DEBUG_RESET,
1307*4882a593Smuzhiyun 			    MT_MCU_DEBUG_RESET_PSE_S,
1308*4882a593Smuzhiyun 			    MT_MCU_DEBUG_RESET_PSE_S, 500)) {
1309*4882a593Smuzhiyun 		dev->reset_cause[RESET_CAUSE_RESET_FAILED]++;
1310*4882a593Smuzhiyun 		mt76_clear(dev, MT_MCU_DEBUG_RESET, MT_MCU_DEBUG_RESET_PSE);
1311*4882a593Smuzhiyun 	} else {
1312*4882a593Smuzhiyun 		dev->reset_cause[RESET_CAUSE_RESET_FAILED] = 0;
1313*4882a593Smuzhiyun 		mt76_clear(dev, MT_MCU_DEBUG_RESET, MT_MCU_DEBUG_RESET_QUEUES);
1314*4882a593Smuzhiyun 	}
1315*4882a593Smuzhiyun 
1316*4882a593Smuzhiyun 	if (dev->reset_cause[RESET_CAUSE_RESET_FAILED] >= 3)
1317*4882a593Smuzhiyun 		dev->reset_cause[RESET_CAUSE_RESET_FAILED] = 0;
1318*4882a593Smuzhiyun }
1319*4882a593Smuzhiyun 
mt7603_mac_dma_start(struct mt7603_dev * dev)1320*4882a593Smuzhiyun void mt7603_mac_dma_start(struct mt7603_dev *dev)
1321*4882a593Smuzhiyun {
1322*4882a593Smuzhiyun 	mt7603_mac_start(dev);
1323*4882a593Smuzhiyun 
1324*4882a593Smuzhiyun 	wait_for_wpdma(dev);
1325*4882a593Smuzhiyun 	usleep_range(50, 100);
1326*4882a593Smuzhiyun 
1327*4882a593Smuzhiyun 	mt76_set(dev, MT_WPDMA_GLO_CFG,
1328*4882a593Smuzhiyun 		 (MT_WPDMA_GLO_CFG_TX_DMA_EN |
1329*4882a593Smuzhiyun 		  MT_WPDMA_GLO_CFG_RX_DMA_EN |
1330*4882a593Smuzhiyun 		  FIELD_PREP(MT_WPDMA_GLO_CFG_DMA_BURST_SIZE, 3) |
1331*4882a593Smuzhiyun 		  MT_WPDMA_GLO_CFG_TX_WRITEBACK_DONE));
1332*4882a593Smuzhiyun 
1333*4882a593Smuzhiyun 	mt7603_irq_enable(dev, MT_INT_RX_DONE_ALL | MT_INT_TX_DONE_ALL);
1334*4882a593Smuzhiyun }
1335*4882a593Smuzhiyun 
mt7603_mac_start(struct mt7603_dev * dev)1336*4882a593Smuzhiyun void mt7603_mac_start(struct mt7603_dev *dev)
1337*4882a593Smuzhiyun {
1338*4882a593Smuzhiyun 	mt76_clear(dev, MT_ARB_SCR,
1339*4882a593Smuzhiyun 		   MT_ARB_SCR_TX_DISABLE | MT_ARB_SCR_RX_DISABLE);
1340*4882a593Smuzhiyun 	mt76_wr(dev, MT_WF_ARB_TX_START_0, ~0);
1341*4882a593Smuzhiyun 	mt76_set(dev, MT_WF_ARB_RQCR, MT_WF_ARB_RQCR_RX_START);
1342*4882a593Smuzhiyun }
1343*4882a593Smuzhiyun 
mt7603_mac_stop(struct mt7603_dev * dev)1344*4882a593Smuzhiyun void mt7603_mac_stop(struct mt7603_dev *dev)
1345*4882a593Smuzhiyun {
1346*4882a593Smuzhiyun 	mt76_set(dev, MT_ARB_SCR,
1347*4882a593Smuzhiyun 		 MT_ARB_SCR_TX_DISABLE | MT_ARB_SCR_RX_DISABLE);
1348*4882a593Smuzhiyun 	mt76_wr(dev, MT_WF_ARB_TX_START_0, 0);
1349*4882a593Smuzhiyun 	mt76_clear(dev, MT_WF_ARB_RQCR, MT_WF_ARB_RQCR_RX_START);
1350*4882a593Smuzhiyun }
1351*4882a593Smuzhiyun 
mt7603_pse_client_reset(struct mt7603_dev * dev)1352*4882a593Smuzhiyun void mt7603_pse_client_reset(struct mt7603_dev *dev)
1353*4882a593Smuzhiyun {
1354*4882a593Smuzhiyun 	u32 addr;
1355*4882a593Smuzhiyun 
1356*4882a593Smuzhiyun 	addr = mt7603_reg_map(dev, MT_CLIENT_BASE_PHYS_ADDR +
1357*4882a593Smuzhiyun 				   MT_CLIENT_RESET_TX);
1358*4882a593Smuzhiyun 
1359*4882a593Smuzhiyun 	/* Clear previous reset state */
1360*4882a593Smuzhiyun 	mt76_clear(dev, addr,
1361*4882a593Smuzhiyun 		   MT_CLIENT_RESET_TX_R_E_1 |
1362*4882a593Smuzhiyun 		   MT_CLIENT_RESET_TX_R_E_2 |
1363*4882a593Smuzhiyun 		   MT_CLIENT_RESET_TX_R_E_1_S |
1364*4882a593Smuzhiyun 		   MT_CLIENT_RESET_TX_R_E_2_S);
1365*4882a593Smuzhiyun 
1366*4882a593Smuzhiyun 	/* Start PSE client TX abort */
1367*4882a593Smuzhiyun 	mt76_set(dev, addr, MT_CLIENT_RESET_TX_R_E_1);
1368*4882a593Smuzhiyun 	mt76_poll_msec(dev, addr, MT_CLIENT_RESET_TX_R_E_1_S,
1369*4882a593Smuzhiyun 		       MT_CLIENT_RESET_TX_R_E_1_S, 500);
1370*4882a593Smuzhiyun 
1371*4882a593Smuzhiyun 	mt76_set(dev, addr, MT_CLIENT_RESET_TX_R_E_2);
1372*4882a593Smuzhiyun 	mt76_set(dev, MT_WPDMA_GLO_CFG, MT_WPDMA_GLO_CFG_SW_RESET);
1373*4882a593Smuzhiyun 
1374*4882a593Smuzhiyun 	/* Wait for PSE client to clear TX FIFO */
1375*4882a593Smuzhiyun 	mt76_poll_msec(dev, addr, MT_CLIENT_RESET_TX_R_E_2_S,
1376*4882a593Smuzhiyun 		       MT_CLIENT_RESET_TX_R_E_2_S, 500);
1377*4882a593Smuzhiyun 
1378*4882a593Smuzhiyun 	/* Clear PSE client TX abort state */
1379*4882a593Smuzhiyun 	mt76_clear(dev, addr,
1380*4882a593Smuzhiyun 		   MT_CLIENT_RESET_TX_R_E_1 |
1381*4882a593Smuzhiyun 		   MT_CLIENT_RESET_TX_R_E_2);
1382*4882a593Smuzhiyun }
1383*4882a593Smuzhiyun 
mt7603_dma_sched_reset(struct mt7603_dev * dev)1384*4882a593Smuzhiyun static void mt7603_dma_sched_reset(struct mt7603_dev *dev)
1385*4882a593Smuzhiyun {
1386*4882a593Smuzhiyun 	if (!is_mt7628(dev))
1387*4882a593Smuzhiyun 		return;
1388*4882a593Smuzhiyun 
1389*4882a593Smuzhiyun 	mt76_set(dev, MT_SCH_4, MT_SCH_4_RESET);
1390*4882a593Smuzhiyun 	mt76_clear(dev, MT_SCH_4, MT_SCH_4_RESET);
1391*4882a593Smuzhiyun }
1392*4882a593Smuzhiyun 
mt7603_mac_watchdog_reset(struct mt7603_dev * dev)1393*4882a593Smuzhiyun static void mt7603_mac_watchdog_reset(struct mt7603_dev *dev)
1394*4882a593Smuzhiyun {
1395*4882a593Smuzhiyun 	int beacon_int = dev->mt76.beacon_int;
1396*4882a593Smuzhiyun 	u32 mask = dev->mt76.mmio.irqmask;
1397*4882a593Smuzhiyun 	int i;
1398*4882a593Smuzhiyun 
1399*4882a593Smuzhiyun 	ieee80211_stop_queues(dev->mt76.hw);
1400*4882a593Smuzhiyun 	set_bit(MT76_RESET, &dev->mphy.state);
1401*4882a593Smuzhiyun 
1402*4882a593Smuzhiyun 	/* lock/unlock all queues to ensure that no tx is pending */
1403*4882a593Smuzhiyun 	mt76_txq_schedule_all(&dev->mphy);
1404*4882a593Smuzhiyun 
1405*4882a593Smuzhiyun 	mt76_worker_disable(&dev->mt76.tx_worker);
1406*4882a593Smuzhiyun 	tasklet_disable(&dev->mt76.pre_tbtt_tasklet);
1407*4882a593Smuzhiyun 	napi_disable(&dev->mt76.napi[0]);
1408*4882a593Smuzhiyun 	napi_disable(&dev->mt76.napi[1]);
1409*4882a593Smuzhiyun 	napi_disable(&dev->mt76.tx_napi);
1410*4882a593Smuzhiyun 
1411*4882a593Smuzhiyun 	mutex_lock(&dev->mt76.mutex);
1412*4882a593Smuzhiyun 
1413*4882a593Smuzhiyun 	mt7603_beacon_set_timer(dev, -1, 0);
1414*4882a593Smuzhiyun 
1415*4882a593Smuzhiyun 	if (dev->reset_cause[RESET_CAUSE_RESET_FAILED] ||
1416*4882a593Smuzhiyun 	    dev->cur_reset_cause == RESET_CAUSE_RX_PSE_BUSY ||
1417*4882a593Smuzhiyun 	    dev->cur_reset_cause == RESET_CAUSE_BEACON_STUCK ||
1418*4882a593Smuzhiyun 	    dev->cur_reset_cause == RESET_CAUSE_TX_HANG)
1419*4882a593Smuzhiyun 		mt7603_pse_reset(dev);
1420*4882a593Smuzhiyun 
1421*4882a593Smuzhiyun 	if (dev->reset_cause[RESET_CAUSE_RESET_FAILED])
1422*4882a593Smuzhiyun 		goto skip_dma_reset;
1423*4882a593Smuzhiyun 
1424*4882a593Smuzhiyun 	mt7603_mac_stop(dev);
1425*4882a593Smuzhiyun 
1426*4882a593Smuzhiyun 	mt76_clear(dev, MT_WPDMA_GLO_CFG,
1427*4882a593Smuzhiyun 		   MT_WPDMA_GLO_CFG_RX_DMA_EN | MT_WPDMA_GLO_CFG_TX_DMA_EN |
1428*4882a593Smuzhiyun 		   MT_WPDMA_GLO_CFG_TX_WRITEBACK_DONE);
1429*4882a593Smuzhiyun 	usleep_range(1000, 2000);
1430*4882a593Smuzhiyun 
1431*4882a593Smuzhiyun 	mt7603_irq_disable(dev, mask);
1432*4882a593Smuzhiyun 
1433*4882a593Smuzhiyun 	mt76_set(dev, MT_WPDMA_GLO_CFG, MT_WPDMA_GLO_CFG_FORCE_TX_EOF);
1434*4882a593Smuzhiyun 
1435*4882a593Smuzhiyun 	mt7603_pse_client_reset(dev);
1436*4882a593Smuzhiyun 
1437*4882a593Smuzhiyun 	for (i = 0; i < __MT_TXQ_MAX; i++)
1438*4882a593Smuzhiyun 		mt76_queue_tx_cleanup(dev, i, true);
1439*4882a593Smuzhiyun 
1440*4882a593Smuzhiyun 	mt76_for_each_q_rx(&dev->mt76, i) {
1441*4882a593Smuzhiyun 		mt76_queue_rx_reset(dev, i);
1442*4882a593Smuzhiyun 	}
1443*4882a593Smuzhiyun 
1444*4882a593Smuzhiyun 	mt7603_dma_sched_reset(dev);
1445*4882a593Smuzhiyun 
1446*4882a593Smuzhiyun 	mt7603_mac_dma_start(dev);
1447*4882a593Smuzhiyun 
1448*4882a593Smuzhiyun 	mt7603_irq_enable(dev, mask);
1449*4882a593Smuzhiyun 
1450*4882a593Smuzhiyun skip_dma_reset:
1451*4882a593Smuzhiyun 	clear_bit(MT76_RESET, &dev->mphy.state);
1452*4882a593Smuzhiyun 	mutex_unlock(&dev->mt76.mutex);
1453*4882a593Smuzhiyun 
1454*4882a593Smuzhiyun 	mt76_worker_enable(&dev->mt76.tx_worker);
1455*4882a593Smuzhiyun 	napi_enable(&dev->mt76.tx_napi);
1456*4882a593Smuzhiyun 	napi_schedule(&dev->mt76.tx_napi);
1457*4882a593Smuzhiyun 
1458*4882a593Smuzhiyun 	tasklet_enable(&dev->mt76.pre_tbtt_tasklet);
1459*4882a593Smuzhiyun 	mt7603_beacon_set_timer(dev, -1, beacon_int);
1460*4882a593Smuzhiyun 
1461*4882a593Smuzhiyun 	napi_enable(&dev->mt76.napi[0]);
1462*4882a593Smuzhiyun 	napi_schedule(&dev->mt76.napi[0]);
1463*4882a593Smuzhiyun 
1464*4882a593Smuzhiyun 	napi_enable(&dev->mt76.napi[1]);
1465*4882a593Smuzhiyun 	napi_schedule(&dev->mt76.napi[1]);
1466*4882a593Smuzhiyun 
1467*4882a593Smuzhiyun 	ieee80211_wake_queues(dev->mt76.hw);
1468*4882a593Smuzhiyun 	mt76_txq_schedule_all(&dev->mphy);
1469*4882a593Smuzhiyun }
1470*4882a593Smuzhiyun 
mt7603_dma_debug(struct mt7603_dev * dev,u8 index)1471*4882a593Smuzhiyun static u32 mt7603_dma_debug(struct mt7603_dev *dev, u8 index)
1472*4882a593Smuzhiyun {
1473*4882a593Smuzhiyun 	u32 val;
1474*4882a593Smuzhiyun 
1475*4882a593Smuzhiyun 	mt76_wr(dev, MT_WPDMA_DEBUG,
1476*4882a593Smuzhiyun 		FIELD_PREP(MT_WPDMA_DEBUG_IDX, index) |
1477*4882a593Smuzhiyun 		MT_WPDMA_DEBUG_SEL);
1478*4882a593Smuzhiyun 
1479*4882a593Smuzhiyun 	val = mt76_rr(dev, MT_WPDMA_DEBUG);
1480*4882a593Smuzhiyun 	return FIELD_GET(MT_WPDMA_DEBUG_VALUE, val);
1481*4882a593Smuzhiyun }
1482*4882a593Smuzhiyun 
mt7603_rx_fifo_busy(struct mt7603_dev * dev)1483*4882a593Smuzhiyun static bool mt7603_rx_fifo_busy(struct mt7603_dev *dev)
1484*4882a593Smuzhiyun {
1485*4882a593Smuzhiyun 	if (is_mt7628(dev))
1486*4882a593Smuzhiyun 		return mt7603_dma_debug(dev, 9) & BIT(9);
1487*4882a593Smuzhiyun 
1488*4882a593Smuzhiyun 	return mt7603_dma_debug(dev, 2) & BIT(8);
1489*4882a593Smuzhiyun }
1490*4882a593Smuzhiyun 
mt7603_rx_dma_busy(struct mt7603_dev * dev)1491*4882a593Smuzhiyun static bool mt7603_rx_dma_busy(struct mt7603_dev *dev)
1492*4882a593Smuzhiyun {
1493*4882a593Smuzhiyun 	if (!(mt76_rr(dev, MT_WPDMA_GLO_CFG) & MT_WPDMA_GLO_CFG_RX_DMA_BUSY))
1494*4882a593Smuzhiyun 		return false;
1495*4882a593Smuzhiyun 
1496*4882a593Smuzhiyun 	return mt7603_rx_fifo_busy(dev);
1497*4882a593Smuzhiyun }
1498*4882a593Smuzhiyun 
mt7603_tx_dma_busy(struct mt7603_dev * dev)1499*4882a593Smuzhiyun static bool mt7603_tx_dma_busy(struct mt7603_dev *dev)
1500*4882a593Smuzhiyun {
1501*4882a593Smuzhiyun 	u32 val;
1502*4882a593Smuzhiyun 
1503*4882a593Smuzhiyun 	if (!(mt76_rr(dev, MT_WPDMA_GLO_CFG) & MT_WPDMA_GLO_CFG_TX_DMA_BUSY))
1504*4882a593Smuzhiyun 		return false;
1505*4882a593Smuzhiyun 
1506*4882a593Smuzhiyun 	val = mt7603_dma_debug(dev, 9);
1507*4882a593Smuzhiyun 	return (val & BIT(8)) && (val & 0xf) != 0xf;
1508*4882a593Smuzhiyun }
1509*4882a593Smuzhiyun 
mt7603_tx_hang(struct mt7603_dev * dev)1510*4882a593Smuzhiyun static bool mt7603_tx_hang(struct mt7603_dev *dev)
1511*4882a593Smuzhiyun {
1512*4882a593Smuzhiyun 	struct mt76_queue *q;
1513*4882a593Smuzhiyun 	u32 dma_idx, prev_dma_idx;
1514*4882a593Smuzhiyun 	int i;
1515*4882a593Smuzhiyun 
1516*4882a593Smuzhiyun 	for (i = 0; i < 4; i++) {
1517*4882a593Smuzhiyun 		q = dev->mt76.q_tx[i];
1518*4882a593Smuzhiyun 
1519*4882a593Smuzhiyun 		if (!q->queued)
1520*4882a593Smuzhiyun 			continue;
1521*4882a593Smuzhiyun 
1522*4882a593Smuzhiyun 		prev_dma_idx = dev->tx_dma_idx[i];
1523*4882a593Smuzhiyun 		dma_idx = readl(&q->regs->dma_idx);
1524*4882a593Smuzhiyun 		dev->tx_dma_idx[i] = dma_idx;
1525*4882a593Smuzhiyun 
1526*4882a593Smuzhiyun 		if (dma_idx == prev_dma_idx &&
1527*4882a593Smuzhiyun 		    dma_idx != readl(&q->regs->cpu_idx))
1528*4882a593Smuzhiyun 			break;
1529*4882a593Smuzhiyun 	}
1530*4882a593Smuzhiyun 
1531*4882a593Smuzhiyun 	return i < 4;
1532*4882a593Smuzhiyun }
1533*4882a593Smuzhiyun 
mt7603_rx_pse_busy(struct mt7603_dev * dev)1534*4882a593Smuzhiyun static bool mt7603_rx_pse_busy(struct mt7603_dev *dev)
1535*4882a593Smuzhiyun {
1536*4882a593Smuzhiyun 	u32 addr, val;
1537*4882a593Smuzhiyun 
1538*4882a593Smuzhiyun 	if (mt76_rr(dev, MT_MCU_DEBUG_RESET) & MT_MCU_DEBUG_RESET_QUEUES)
1539*4882a593Smuzhiyun 		return true;
1540*4882a593Smuzhiyun 
1541*4882a593Smuzhiyun 	if (mt7603_rx_fifo_busy(dev))
1542*4882a593Smuzhiyun 		return false;
1543*4882a593Smuzhiyun 
1544*4882a593Smuzhiyun 	addr = mt7603_reg_map(dev, MT_CLIENT_BASE_PHYS_ADDR + MT_CLIENT_STATUS);
1545*4882a593Smuzhiyun 	mt76_wr(dev, addr, 3);
1546*4882a593Smuzhiyun 	val = mt76_rr(dev, addr) >> 16;
1547*4882a593Smuzhiyun 
1548*4882a593Smuzhiyun 	if (is_mt7628(dev) && (val & 0x4001) == 0x4001)
1549*4882a593Smuzhiyun 		return true;
1550*4882a593Smuzhiyun 
1551*4882a593Smuzhiyun 	return (val & 0x8001) == 0x8001 || (val & 0xe001) == 0xe001;
1552*4882a593Smuzhiyun }
1553*4882a593Smuzhiyun 
1554*4882a593Smuzhiyun static bool
mt7603_watchdog_check(struct mt7603_dev * dev,u8 * counter,enum mt7603_reset_cause cause,bool (* check)(struct mt7603_dev * dev))1555*4882a593Smuzhiyun mt7603_watchdog_check(struct mt7603_dev *dev, u8 *counter,
1556*4882a593Smuzhiyun 		      enum mt7603_reset_cause cause,
1557*4882a593Smuzhiyun 		      bool (*check)(struct mt7603_dev *dev))
1558*4882a593Smuzhiyun {
1559*4882a593Smuzhiyun 	if (dev->reset_test == cause + 1) {
1560*4882a593Smuzhiyun 		dev->reset_test = 0;
1561*4882a593Smuzhiyun 		goto trigger;
1562*4882a593Smuzhiyun 	}
1563*4882a593Smuzhiyun 
1564*4882a593Smuzhiyun 	if (check) {
1565*4882a593Smuzhiyun 		if (!check(dev) && *counter < MT7603_WATCHDOG_TIMEOUT) {
1566*4882a593Smuzhiyun 			*counter = 0;
1567*4882a593Smuzhiyun 			return false;
1568*4882a593Smuzhiyun 		}
1569*4882a593Smuzhiyun 
1570*4882a593Smuzhiyun 		(*counter)++;
1571*4882a593Smuzhiyun 	}
1572*4882a593Smuzhiyun 
1573*4882a593Smuzhiyun 	if (*counter < MT7603_WATCHDOG_TIMEOUT)
1574*4882a593Smuzhiyun 		return false;
1575*4882a593Smuzhiyun trigger:
1576*4882a593Smuzhiyun 	dev->cur_reset_cause = cause;
1577*4882a593Smuzhiyun 	dev->reset_cause[cause]++;
1578*4882a593Smuzhiyun 	return true;
1579*4882a593Smuzhiyun }
1580*4882a593Smuzhiyun 
mt7603_update_channel(struct mt76_dev * mdev)1581*4882a593Smuzhiyun void mt7603_update_channel(struct mt76_dev *mdev)
1582*4882a593Smuzhiyun {
1583*4882a593Smuzhiyun 	struct mt7603_dev *dev = container_of(mdev, struct mt7603_dev, mt76);
1584*4882a593Smuzhiyun 	struct mt76_channel_state *state;
1585*4882a593Smuzhiyun 
1586*4882a593Smuzhiyun 	state = mdev->phy.chan_state;
1587*4882a593Smuzhiyun 	state->cc_busy += mt76_rr(dev, MT_MIB_STAT_CCA);
1588*4882a593Smuzhiyun }
1589*4882a593Smuzhiyun 
1590*4882a593Smuzhiyun void
mt7603_edcca_set_strict(struct mt7603_dev * dev,bool val)1591*4882a593Smuzhiyun mt7603_edcca_set_strict(struct mt7603_dev *dev, bool val)
1592*4882a593Smuzhiyun {
1593*4882a593Smuzhiyun 	u32 rxtd_6 = 0xd7c80000;
1594*4882a593Smuzhiyun 
1595*4882a593Smuzhiyun 	if (val == dev->ed_strict_mode)
1596*4882a593Smuzhiyun 		return;
1597*4882a593Smuzhiyun 
1598*4882a593Smuzhiyun 	dev->ed_strict_mode = val;
1599*4882a593Smuzhiyun 
1600*4882a593Smuzhiyun 	/* Ensure that ED/CCA does not trigger if disabled */
1601*4882a593Smuzhiyun 	if (!dev->ed_monitor)
1602*4882a593Smuzhiyun 		rxtd_6 |= FIELD_PREP(MT_RXTD_6_CCAED_TH, 0x34);
1603*4882a593Smuzhiyun 	else
1604*4882a593Smuzhiyun 		rxtd_6 |= FIELD_PREP(MT_RXTD_6_CCAED_TH, 0x7d);
1605*4882a593Smuzhiyun 
1606*4882a593Smuzhiyun 	if (dev->ed_monitor && !dev->ed_strict_mode)
1607*4882a593Smuzhiyun 		rxtd_6 |= FIELD_PREP(MT_RXTD_6_ACI_TH, 0x0f);
1608*4882a593Smuzhiyun 	else
1609*4882a593Smuzhiyun 		rxtd_6 |= FIELD_PREP(MT_RXTD_6_ACI_TH, 0x10);
1610*4882a593Smuzhiyun 
1611*4882a593Smuzhiyun 	mt76_wr(dev, MT_RXTD(6), rxtd_6);
1612*4882a593Smuzhiyun 
1613*4882a593Smuzhiyun 	mt76_rmw_field(dev, MT_RXTD(13), MT_RXTD_13_ACI_TH_EN,
1614*4882a593Smuzhiyun 		       dev->ed_monitor && !dev->ed_strict_mode);
1615*4882a593Smuzhiyun }
1616*4882a593Smuzhiyun 
1617*4882a593Smuzhiyun static void
mt7603_edcca_check(struct mt7603_dev * dev)1618*4882a593Smuzhiyun mt7603_edcca_check(struct mt7603_dev *dev)
1619*4882a593Smuzhiyun {
1620*4882a593Smuzhiyun 	u32 val = mt76_rr(dev, MT_AGC(41));
1621*4882a593Smuzhiyun 	ktime_t cur_time;
1622*4882a593Smuzhiyun 	int rssi0, rssi1;
1623*4882a593Smuzhiyun 	u32 active;
1624*4882a593Smuzhiyun 	u32 ed_busy;
1625*4882a593Smuzhiyun 
1626*4882a593Smuzhiyun 	if (!dev->ed_monitor)
1627*4882a593Smuzhiyun 		return;
1628*4882a593Smuzhiyun 
1629*4882a593Smuzhiyun 	rssi0 = FIELD_GET(MT_AGC_41_RSSI_0, val);
1630*4882a593Smuzhiyun 	if (rssi0 > 128)
1631*4882a593Smuzhiyun 		rssi0 -= 256;
1632*4882a593Smuzhiyun 
1633*4882a593Smuzhiyun 	rssi1 = FIELD_GET(MT_AGC_41_RSSI_1, val);
1634*4882a593Smuzhiyun 	if (rssi1 > 128)
1635*4882a593Smuzhiyun 		rssi1 -= 256;
1636*4882a593Smuzhiyun 
1637*4882a593Smuzhiyun 	if (max(rssi0, rssi1) >= -40 &&
1638*4882a593Smuzhiyun 	    dev->ed_strong_signal < MT7603_EDCCA_BLOCK_TH)
1639*4882a593Smuzhiyun 		dev->ed_strong_signal++;
1640*4882a593Smuzhiyun 	else if (dev->ed_strong_signal > 0)
1641*4882a593Smuzhiyun 		dev->ed_strong_signal--;
1642*4882a593Smuzhiyun 
1643*4882a593Smuzhiyun 	cur_time = ktime_get_boottime();
1644*4882a593Smuzhiyun 	ed_busy = mt76_rr(dev, MT_MIB_STAT_ED) & MT_MIB_STAT_ED_MASK;
1645*4882a593Smuzhiyun 
1646*4882a593Smuzhiyun 	active = ktime_to_us(ktime_sub(cur_time, dev->ed_time));
1647*4882a593Smuzhiyun 	dev->ed_time = cur_time;
1648*4882a593Smuzhiyun 
1649*4882a593Smuzhiyun 	if (!active)
1650*4882a593Smuzhiyun 		return;
1651*4882a593Smuzhiyun 
1652*4882a593Smuzhiyun 	if (100 * ed_busy / active > 90) {
1653*4882a593Smuzhiyun 		if (dev->ed_trigger < 0)
1654*4882a593Smuzhiyun 			dev->ed_trigger = 0;
1655*4882a593Smuzhiyun 		dev->ed_trigger++;
1656*4882a593Smuzhiyun 	} else {
1657*4882a593Smuzhiyun 		if (dev->ed_trigger > 0)
1658*4882a593Smuzhiyun 			dev->ed_trigger = 0;
1659*4882a593Smuzhiyun 		dev->ed_trigger--;
1660*4882a593Smuzhiyun 	}
1661*4882a593Smuzhiyun 
1662*4882a593Smuzhiyun 	if (dev->ed_trigger > MT7603_EDCCA_BLOCK_TH ||
1663*4882a593Smuzhiyun 	    dev->ed_strong_signal < MT7603_EDCCA_BLOCK_TH / 2) {
1664*4882a593Smuzhiyun 		mt7603_edcca_set_strict(dev, true);
1665*4882a593Smuzhiyun 	} else if (dev->ed_trigger < -MT7603_EDCCA_BLOCK_TH) {
1666*4882a593Smuzhiyun 		mt7603_edcca_set_strict(dev, false);
1667*4882a593Smuzhiyun 	}
1668*4882a593Smuzhiyun 
1669*4882a593Smuzhiyun 	if (dev->ed_trigger > MT7603_EDCCA_BLOCK_TH)
1670*4882a593Smuzhiyun 		dev->ed_trigger = MT7603_EDCCA_BLOCK_TH;
1671*4882a593Smuzhiyun 	else if (dev->ed_trigger < -MT7603_EDCCA_BLOCK_TH)
1672*4882a593Smuzhiyun 		dev->ed_trigger = -MT7603_EDCCA_BLOCK_TH;
1673*4882a593Smuzhiyun }
1674*4882a593Smuzhiyun 
mt7603_cca_stats_reset(struct mt7603_dev * dev)1675*4882a593Smuzhiyun void mt7603_cca_stats_reset(struct mt7603_dev *dev)
1676*4882a593Smuzhiyun {
1677*4882a593Smuzhiyun 	mt76_set(dev, MT_PHYCTRL(2), MT_PHYCTRL_2_STATUS_RESET);
1678*4882a593Smuzhiyun 	mt76_clear(dev, MT_PHYCTRL(2), MT_PHYCTRL_2_STATUS_RESET);
1679*4882a593Smuzhiyun 	mt76_set(dev, MT_PHYCTRL(2), MT_PHYCTRL_2_STATUS_EN);
1680*4882a593Smuzhiyun }
1681*4882a593Smuzhiyun 
1682*4882a593Smuzhiyun static void
mt7603_adjust_sensitivity(struct mt7603_dev * dev)1683*4882a593Smuzhiyun mt7603_adjust_sensitivity(struct mt7603_dev *dev)
1684*4882a593Smuzhiyun {
1685*4882a593Smuzhiyun 	u32 agc0 = dev->agc0, agc3 = dev->agc3;
1686*4882a593Smuzhiyun 	u32 adj;
1687*4882a593Smuzhiyun 
1688*4882a593Smuzhiyun 	if (!dev->sensitivity || dev->sensitivity < -100) {
1689*4882a593Smuzhiyun 		dev->sensitivity = 0;
1690*4882a593Smuzhiyun 	} else if (dev->sensitivity <= -84) {
1691*4882a593Smuzhiyun 		adj = 7 + (dev->sensitivity + 92) / 2;
1692*4882a593Smuzhiyun 
1693*4882a593Smuzhiyun 		agc0 = 0x56f0076f;
1694*4882a593Smuzhiyun 		agc0 |= adj << 12;
1695*4882a593Smuzhiyun 		agc0 |= adj << 16;
1696*4882a593Smuzhiyun 		agc3 = 0x81d0d5e3;
1697*4882a593Smuzhiyun 	} else if (dev->sensitivity <= -72) {
1698*4882a593Smuzhiyun 		adj = 7 + (dev->sensitivity + 80) / 2;
1699*4882a593Smuzhiyun 
1700*4882a593Smuzhiyun 		agc0 = 0x6af0006f;
1701*4882a593Smuzhiyun 		agc0 |= adj << 8;
1702*4882a593Smuzhiyun 		agc0 |= adj << 12;
1703*4882a593Smuzhiyun 		agc0 |= adj << 16;
1704*4882a593Smuzhiyun 
1705*4882a593Smuzhiyun 		agc3 = 0x8181d5e3;
1706*4882a593Smuzhiyun 	} else {
1707*4882a593Smuzhiyun 		if (dev->sensitivity > -54)
1708*4882a593Smuzhiyun 			dev->sensitivity = -54;
1709*4882a593Smuzhiyun 
1710*4882a593Smuzhiyun 		adj = 7 + (dev->sensitivity + 80) / 2;
1711*4882a593Smuzhiyun 
1712*4882a593Smuzhiyun 		agc0 = 0x7ff0000f;
1713*4882a593Smuzhiyun 		agc0 |= adj << 4;
1714*4882a593Smuzhiyun 		agc0 |= adj << 8;
1715*4882a593Smuzhiyun 		agc0 |= adj << 12;
1716*4882a593Smuzhiyun 		agc0 |= adj << 16;
1717*4882a593Smuzhiyun 
1718*4882a593Smuzhiyun 		agc3 = 0x818181e3;
1719*4882a593Smuzhiyun 	}
1720*4882a593Smuzhiyun 
1721*4882a593Smuzhiyun 	mt76_wr(dev, MT_AGC(0), agc0);
1722*4882a593Smuzhiyun 	mt76_wr(dev, MT_AGC1(0), agc0);
1723*4882a593Smuzhiyun 
1724*4882a593Smuzhiyun 	mt76_wr(dev, MT_AGC(3), agc3);
1725*4882a593Smuzhiyun 	mt76_wr(dev, MT_AGC1(3), agc3);
1726*4882a593Smuzhiyun }
1727*4882a593Smuzhiyun 
1728*4882a593Smuzhiyun static void
mt7603_false_cca_check(struct mt7603_dev * dev)1729*4882a593Smuzhiyun mt7603_false_cca_check(struct mt7603_dev *dev)
1730*4882a593Smuzhiyun {
1731*4882a593Smuzhiyun 	int pd_cck, pd_ofdm, mdrdy_cck, mdrdy_ofdm;
1732*4882a593Smuzhiyun 	int false_cca;
1733*4882a593Smuzhiyun 	int min_signal;
1734*4882a593Smuzhiyun 	u32 val;
1735*4882a593Smuzhiyun 
1736*4882a593Smuzhiyun 	if (!dev->dynamic_sensitivity)
1737*4882a593Smuzhiyun 		return;
1738*4882a593Smuzhiyun 
1739*4882a593Smuzhiyun 	val = mt76_rr(dev, MT_PHYCTRL_STAT_PD);
1740*4882a593Smuzhiyun 	pd_cck = FIELD_GET(MT_PHYCTRL_STAT_PD_CCK, val);
1741*4882a593Smuzhiyun 	pd_ofdm = FIELD_GET(MT_PHYCTRL_STAT_PD_OFDM, val);
1742*4882a593Smuzhiyun 
1743*4882a593Smuzhiyun 	val = mt76_rr(dev, MT_PHYCTRL_STAT_MDRDY);
1744*4882a593Smuzhiyun 	mdrdy_cck = FIELD_GET(MT_PHYCTRL_STAT_MDRDY_CCK, val);
1745*4882a593Smuzhiyun 	mdrdy_ofdm = FIELD_GET(MT_PHYCTRL_STAT_MDRDY_OFDM, val);
1746*4882a593Smuzhiyun 
1747*4882a593Smuzhiyun 	dev->false_cca_ofdm = pd_ofdm - mdrdy_ofdm;
1748*4882a593Smuzhiyun 	dev->false_cca_cck = pd_cck - mdrdy_cck;
1749*4882a593Smuzhiyun 
1750*4882a593Smuzhiyun 	mt7603_cca_stats_reset(dev);
1751*4882a593Smuzhiyun 
1752*4882a593Smuzhiyun 	min_signal = mt76_get_min_avg_rssi(&dev->mt76, false);
1753*4882a593Smuzhiyun 	if (!min_signal) {
1754*4882a593Smuzhiyun 		dev->sensitivity = 0;
1755*4882a593Smuzhiyun 		dev->last_cca_adj = jiffies;
1756*4882a593Smuzhiyun 		goto out;
1757*4882a593Smuzhiyun 	}
1758*4882a593Smuzhiyun 
1759*4882a593Smuzhiyun 	min_signal -= 15;
1760*4882a593Smuzhiyun 
1761*4882a593Smuzhiyun 	false_cca = dev->false_cca_ofdm + dev->false_cca_cck;
1762*4882a593Smuzhiyun 	if (false_cca > 600 &&
1763*4882a593Smuzhiyun 	    dev->sensitivity < -100 + dev->sensitivity_limit) {
1764*4882a593Smuzhiyun 		if (!dev->sensitivity)
1765*4882a593Smuzhiyun 			dev->sensitivity = -92;
1766*4882a593Smuzhiyun 		else
1767*4882a593Smuzhiyun 			dev->sensitivity += 2;
1768*4882a593Smuzhiyun 		dev->last_cca_adj = jiffies;
1769*4882a593Smuzhiyun 	} else if (false_cca < 100 ||
1770*4882a593Smuzhiyun 		   time_after(jiffies, dev->last_cca_adj + 10 * HZ)) {
1771*4882a593Smuzhiyun 		dev->last_cca_adj = jiffies;
1772*4882a593Smuzhiyun 		if (!dev->sensitivity)
1773*4882a593Smuzhiyun 			goto out;
1774*4882a593Smuzhiyun 
1775*4882a593Smuzhiyun 		dev->sensitivity -= 2;
1776*4882a593Smuzhiyun 	}
1777*4882a593Smuzhiyun 
1778*4882a593Smuzhiyun 	if (dev->sensitivity && dev->sensitivity > min_signal) {
1779*4882a593Smuzhiyun 		dev->sensitivity = min_signal;
1780*4882a593Smuzhiyun 		dev->last_cca_adj = jiffies;
1781*4882a593Smuzhiyun 	}
1782*4882a593Smuzhiyun 
1783*4882a593Smuzhiyun out:
1784*4882a593Smuzhiyun 	mt7603_adjust_sensitivity(dev);
1785*4882a593Smuzhiyun }
1786*4882a593Smuzhiyun 
mt7603_mac_work(struct work_struct * work)1787*4882a593Smuzhiyun void mt7603_mac_work(struct work_struct *work)
1788*4882a593Smuzhiyun {
1789*4882a593Smuzhiyun 	struct mt7603_dev *dev = container_of(work, struct mt7603_dev,
1790*4882a593Smuzhiyun 					      mt76.mac_work.work);
1791*4882a593Smuzhiyun 	bool reset = false;
1792*4882a593Smuzhiyun 	int i, idx;
1793*4882a593Smuzhiyun 
1794*4882a593Smuzhiyun 	mt76_tx_status_check(&dev->mt76, NULL, false);
1795*4882a593Smuzhiyun 
1796*4882a593Smuzhiyun 	mutex_lock(&dev->mt76.mutex);
1797*4882a593Smuzhiyun 
1798*4882a593Smuzhiyun 	dev->mac_work_count++;
1799*4882a593Smuzhiyun 	mt76_update_survey(&dev->mt76);
1800*4882a593Smuzhiyun 	mt7603_edcca_check(dev);
1801*4882a593Smuzhiyun 
1802*4882a593Smuzhiyun 	for (i = 0, idx = 0; i < 2; i++) {
1803*4882a593Smuzhiyun 		u32 val = mt76_rr(dev, MT_TX_AGG_CNT(i));
1804*4882a593Smuzhiyun 
1805*4882a593Smuzhiyun 		dev->mt76.aggr_stats[idx++] += val & 0xffff;
1806*4882a593Smuzhiyun 		dev->mt76.aggr_stats[idx++] += val >> 16;
1807*4882a593Smuzhiyun 	}
1808*4882a593Smuzhiyun 
1809*4882a593Smuzhiyun 	if (dev->mac_work_count == 10)
1810*4882a593Smuzhiyun 		mt7603_false_cca_check(dev);
1811*4882a593Smuzhiyun 
1812*4882a593Smuzhiyun 	if (mt7603_watchdog_check(dev, &dev->rx_pse_check,
1813*4882a593Smuzhiyun 				  RESET_CAUSE_RX_PSE_BUSY,
1814*4882a593Smuzhiyun 				  mt7603_rx_pse_busy) ||
1815*4882a593Smuzhiyun 	    mt7603_watchdog_check(dev, &dev->beacon_check,
1816*4882a593Smuzhiyun 				  RESET_CAUSE_BEACON_STUCK,
1817*4882a593Smuzhiyun 				  NULL) ||
1818*4882a593Smuzhiyun 	    mt7603_watchdog_check(dev, &dev->tx_hang_check,
1819*4882a593Smuzhiyun 				  RESET_CAUSE_TX_HANG,
1820*4882a593Smuzhiyun 				  mt7603_tx_hang) ||
1821*4882a593Smuzhiyun 	    mt7603_watchdog_check(dev, &dev->tx_dma_check,
1822*4882a593Smuzhiyun 				  RESET_CAUSE_TX_BUSY,
1823*4882a593Smuzhiyun 				  mt7603_tx_dma_busy) ||
1824*4882a593Smuzhiyun 	    mt7603_watchdog_check(dev, &dev->rx_dma_check,
1825*4882a593Smuzhiyun 				  RESET_CAUSE_RX_BUSY,
1826*4882a593Smuzhiyun 				  mt7603_rx_dma_busy) ||
1827*4882a593Smuzhiyun 	    mt7603_watchdog_check(dev, &dev->mcu_hang,
1828*4882a593Smuzhiyun 				  RESET_CAUSE_MCU_HANG,
1829*4882a593Smuzhiyun 				  NULL) ||
1830*4882a593Smuzhiyun 	    dev->reset_cause[RESET_CAUSE_RESET_FAILED]) {
1831*4882a593Smuzhiyun 		dev->beacon_check = 0;
1832*4882a593Smuzhiyun 		dev->tx_dma_check = 0;
1833*4882a593Smuzhiyun 		dev->tx_hang_check = 0;
1834*4882a593Smuzhiyun 		dev->rx_dma_check = 0;
1835*4882a593Smuzhiyun 		dev->rx_pse_check = 0;
1836*4882a593Smuzhiyun 		dev->mcu_hang = 0;
1837*4882a593Smuzhiyun 		dev->rx_dma_idx = ~0;
1838*4882a593Smuzhiyun 		memset(dev->tx_dma_idx, 0xff, sizeof(dev->tx_dma_idx));
1839*4882a593Smuzhiyun 		reset = true;
1840*4882a593Smuzhiyun 		dev->mac_work_count = 0;
1841*4882a593Smuzhiyun 	}
1842*4882a593Smuzhiyun 
1843*4882a593Smuzhiyun 	if (dev->mac_work_count >= 10)
1844*4882a593Smuzhiyun 		dev->mac_work_count = 0;
1845*4882a593Smuzhiyun 
1846*4882a593Smuzhiyun 	mutex_unlock(&dev->mt76.mutex);
1847*4882a593Smuzhiyun 
1848*4882a593Smuzhiyun 	if (reset)
1849*4882a593Smuzhiyun 		mt7603_mac_watchdog_reset(dev);
1850*4882a593Smuzhiyun 
1851*4882a593Smuzhiyun 	ieee80211_queue_delayed_work(mt76_hw(dev), &dev->mt76.mac_work,
1852*4882a593Smuzhiyun 				     msecs_to_jiffies(MT7603_WATCHDOG_TIME));
1853*4882a593Smuzhiyun }
1854