1*4882a593Smuzhiyun // SPDX-License-Identifier: ISC
2*4882a593Smuzhiyun /* Copyright (C) 2020 Felix Fietkau <nbd@nbd.name> */
3*4882a593Smuzhiyun #include "mt76.h"
4*4882a593Smuzhiyun
5*4882a593Smuzhiyun static const struct nla_policy mt76_tm_policy[NUM_MT76_TM_ATTRS] = {
6*4882a593Smuzhiyun [MT76_TM_ATTR_RESET] = { .type = NLA_FLAG },
7*4882a593Smuzhiyun [MT76_TM_ATTR_STATE] = { .type = NLA_U8 },
8*4882a593Smuzhiyun [MT76_TM_ATTR_TX_COUNT] = { .type = NLA_U32 },
9*4882a593Smuzhiyun [MT76_TM_ATTR_TX_RATE_MODE] = { .type = NLA_U8 },
10*4882a593Smuzhiyun [MT76_TM_ATTR_TX_RATE_NSS] = { .type = NLA_U8 },
11*4882a593Smuzhiyun [MT76_TM_ATTR_TX_RATE_IDX] = { .type = NLA_U8 },
12*4882a593Smuzhiyun [MT76_TM_ATTR_TX_RATE_SGI] = { .type = NLA_U8 },
13*4882a593Smuzhiyun [MT76_TM_ATTR_TX_RATE_LDPC] = { .type = NLA_U8 },
14*4882a593Smuzhiyun [MT76_TM_ATTR_TX_ANTENNA] = { .type = NLA_U8 },
15*4882a593Smuzhiyun [MT76_TM_ATTR_TX_POWER_CONTROL] = { .type = NLA_U8 },
16*4882a593Smuzhiyun [MT76_TM_ATTR_TX_POWER] = { .type = NLA_NESTED },
17*4882a593Smuzhiyun [MT76_TM_ATTR_FREQ_OFFSET] = { .type = NLA_U32 },
18*4882a593Smuzhiyun };
19*4882a593Smuzhiyun
mt76_testmode_tx_pending(struct mt76_dev * dev)20*4882a593Smuzhiyun void mt76_testmode_tx_pending(struct mt76_dev *dev)
21*4882a593Smuzhiyun {
22*4882a593Smuzhiyun struct mt76_testmode_data *td = &dev->test;
23*4882a593Smuzhiyun struct mt76_wcid *wcid = &dev->global_wcid;
24*4882a593Smuzhiyun struct sk_buff *skb = td->tx_skb;
25*4882a593Smuzhiyun struct mt76_queue *q;
26*4882a593Smuzhiyun int qid;
27*4882a593Smuzhiyun
28*4882a593Smuzhiyun if (!skb || !td->tx_pending)
29*4882a593Smuzhiyun return;
30*4882a593Smuzhiyun
31*4882a593Smuzhiyun qid = skb_get_queue_mapping(skb);
32*4882a593Smuzhiyun q = dev->q_tx[qid];
33*4882a593Smuzhiyun
34*4882a593Smuzhiyun spin_lock_bh(&q->lock);
35*4882a593Smuzhiyun
36*4882a593Smuzhiyun while (td->tx_pending > 0 && td->tx_queued - td->tx_done < 1000 &&
37*4882a593Smuzhiyun q->queued < q->ndesc / 2) {
38*4882a593Smuzhiyun int ret;
39*4882a593Smuzhiyun
40*4882a593Smuzhiyun ret = dev->queue_ops->tx_queue_skb(dev, qid, skb_get(skb), wcid, NULL);
41*4882a593Smuzhiyun if (ret < 0)
42*4882a593Smuzhiyun break;
43*4882a593Smuzhiyun
44*4882a593Smuzhiyun td->tx_pending--;
45*4882a593Smuzhiyun td->tx_queued++;
46*4882a593Smuzhiyun }
47*4882a593Smuzhiyun
48*4882a593Smuzhiyun dev->queue_ops->kick(dev, q);
49*4882a593Smuzhiyun
50*4882a593Smuzhiyun spin_unlock_bh(&q->lock);
51*4882a593Smuzhiyun }
52*4882a593Smuzhiyun
53*4882a593Smuzhiyun
54*4882a593Smuzhiyun static int
mt76_testmode_tx_init(struct mt76_dev * dev)55*4882a593Smuzhiyun mt76_testmode_tx_init(struct mt76_dev *dev)
56*4882a593Smuzhiyun {
57*4882a593Smuzhiyun struct mt76_testmode_data *td = &dev->test;
58*4882a593Smuzhiyun struct ieee80211_tx_info *info;
59*4882a593Smuzhiyun struct ieee80211_hdr *hdr;
60*4882a593Smuzhiyun struct sk_buff *skb;
61*4882a593Smuzhiyun u16 fc = IEEE80211_FTYPE_DATA | IEEE80211_STYPE_DATA |
62*4882a593Smuzhiyun IEEE80211_FCTL_FROMDS;
63*4882a593Smuzhiyun struct ieee80211_tx_rate *rate;
64*4882a593Smuzhiyun u8 max_nss = hweight8(dev->phy.antenna_mask);
65*4882a593Smuzhiyun
66*4882a593Smuzhiyun if (td->tx_antenna_mask)
67*4882a593Smuzhiyun max_nss = min_t(u8, max_nss, hweight8(td->tx_antenna_mask));
68*4882a593Smuzhiyun
69*4882a593Smuzhiyun skb = alloc_skb(td->tx_msdu_len, GFP_KERNEL);
70*4882a593Smuzhiyun if (!skb)
71*4882a593Smuzhiyun return -ENOMEM;
72*4882a593Smuzhiyun
73*4882a593Smuzhiyun dev_kfree_skb(td->tx_skb);
74*4882a593Smuzhiyun td->tx_skb = skb;
75*4882a593Smuzhiyun hdr = __skb_put_zero(skb, td->tx_msdu_len);
76*4882a593Smuzhiyun hdr->frame_control = cpu_to_le16(fc);
77*4882a593Smuzhiyun memcpy(hdr->addr1, dev->macaddr, sizeof(dev->macaddr));
78*4882a593Smuzhiyun memcpy(hdr->addr2, dev->macaddr, sizeof(dev->macaddr));
79*4882a593Smuzhiyun memcpy(hdr->addr3, dev->macaddr, sizeof(dev->macaddr));
80*4882a593Smuzhiyun
81*4882a593Smuzhiyun info = IEEE80211_SKB_CB(skb);
82*4882a593Smuzhiyun info->flags = IEEE80211_TX_CTL_INJECTED |
83*4882a593Smuzhiyun IEEE80211_TX_CTL_NO_ACK |
84*4882a593Smuzhiyun IEEE80211_TX_CTL_NO_PS_BUFFER;
85*4882a593Smuzhiyun rate = &info->control.rates[0];
86*4882a593Smuzhiyun rate->count = 1;
87*4882a593Smuzhiyun rate->idx = td->tx_rate_idx;
88*4882a593Smuzhiyun
89*4882a593Smuzhiyun switch (td->tx_rate_mode) {
90*4882a593Smuzhiyun case MT76_TM_TX_MODE_CCK:
91*4882a593Smuzhiyun if (dev->phy.chandef.chan->band != NL80211_BAND_2GHZ)
92*4882a593Smuzhiyun return -EINVAL;
93*4882a593Smuzhiyun
94*4882a593Smuzhiyun if (rate->idx > 4)
95*4882a593Smuzhiyun return -EINVAL;
96*4882a593Smuzhiyun break;
97*4882a593Smuzhiyun case MT76_TM_TX_MODE_OFDM:
98*4882a593Smuzhiyun if (dev->phy.chandef.chan->band != NL80211_BAND_2GHZ)
99*4882a593Smuzhiyun break;
100*4882a593Smuzhiyun
101*4882a593Smuzhiyun if (rate->idx > 8)
102*4882a593Smuzhiyun return -EINVAL;
103*4882a593Smuzhiyun
104*4882a593Smuzhiyun rate->idx += 4;
105*4882a593Smuzhiyun break;
106*4882a593Smuzhiyun case MT76_TM_TX_MODE_HT:
107*4882a593Smuzhiyun if (rate->idx > 8 * max_nss &&
108*4882a593Smuzhiyun !(rate->idx == 32 &&
109*4882a593Smuzhiyun dev->phy.chandef.width >= NL80211_CHAN_WIDTH_40))
110*4882a593Smuzhiyun return -EINVAL;
111*4882a593Smuzhiyun
112*4882a593Smuzhiyun rate->flags |= IEEE80211_TX_RC_MCS;
113*4882a593Smuzhiyun break;
114*4882a593Smuzhiyun case MT76_TM_TX_MODE_VHT:
115*4882a593Smuzhiyun if (rate->idx > 9)
116*4882a593Smuzhiyun return -EINVAL;
117*4882a593Smuzhiyun
118*4882a593Smuzhiyun if (td->tx_rate_nss > max_nss)
119*4882a593Smuzhiyun return -EINVAL;
120*4882a593Smuzhiyun
121*4882a593Smuzhiyun ieee80211_rate_set_vht(rate, td->tx_rate_idx, td->tx_rate_nss);
122*4882a593Smuzhiyun rate->flags |= IEEE80211_TX_RC_VHT_MCS;
123*4882a593Smuzhiyun break;
124*4882a593Smuzhiyun default:
125*4882a593Smuzhiyun break;
126*4882a593Smuzhiyun }
127*4882a593Smuzhiyun
128*4882a593Smuzhiyun if (td->tx_rate_sgi)
129*4882a593Smuzhiyun rate->flags |= IEEE80211_TX_RC_SHORT_GI;
130*4882a593Smuzhiyun
131*4882a593Smuzhiyun if (td->tx_rate_ldpc)
132*4882a593Smuzhiyun info->flags |= IEEE80211_TX_CTL_LDPC;
133*4882a593Smuzhiyun
134*4882a593Smuzhiyun if (td->tx_rate_mode >= MT76_TM_TX_MODE_HT) {
135*4882a593Smuzhiyun switch (dev->phy.chandef.width) {
136*4882a593Smuzhiyun case NL80211_CHAN_WIDTH_40:
137*4882a593Smuzhiyun rate->flags |= IEEE80211_TX_RC_40_MHZ_WIDTH;
138*4882a593Smuzhiyun break;
139*4882a593Smuzhiyun case NL80211_CHAN_WIDTH_80:
140*4882a593Smuzhiyun rate->flags |= IEEE80211_TX_RC_80_MHZ_WIDTH;
141*4882a593Smuzhiyun break;
142*4882a593Smuzhiyun case NL80211_CHAN_WIDTH_80P80:
143*4882a593Smuzhiyun case NL80211_CHAN_WIDTH_160:
144*4882a593Smuzhiyun rate->flags |= IEEE80211_TX_RC_160_MHZ_WIDTH;
145*4882a593Smuzhiyun break;
146*4882a593Smuzhiyun default:
147*4882a593Smuzhiyun break;
148*4882a593Smuzhiyun }
149*4882a593Smuzhiyun }
150*4882a593Smuzhiyun
151*4882a593Smuzhiyun skb_set_queue_mapping(skb, IEEE80211_AC_BE);
152*4882a593Smuzhiyun
153*4882a593Smuzhiyun return 0;
154*4882a593Smuzhiyun }
155*4882a593Smuzhiyun
156*4882a593Smuzhiyun static void
mt76_testmode_tx_start(struct mt76_dev * dev)157*4882a593Smuzhiyun mt76_testmode_tx_start(struct mt76_dev *dev)
158*4882a593Smuzhiyun {
159*4882a593Smuzhiyun struct mt76_testmode_data *td = &dev->test;
160*4882a593Smuzhiyun
161*4882a593Smuzhiyun td->tx_queued = 0;
162*4882a593Smuzhiyun td->tx_done = 0;
163*4882a593Smuzhiyun td->tx_pending = td->tx_count;
164*4882a593Smuzhiyun mt76_worker_schedule(&dev->tx_worker);
165*4882a593Smuzhiyun }
166*4882a593Smuzhiyun
167*4882a593Smuzhiyun static void
mt76_testmode_tx_stop(struct mt76_dev * dev)168*4882a593Smuzhiyun mt76_testmode_tx_stop(struct mt76_dev *dev)
169*4882a593Smuzhiyun {
170*4882a593Smuzhiyun struct mt76_testmode_data *td = &dev->test;
171*4882a593Smuzhiyun
172*4882a593Smuzhiyun mt76_worker_disable(&dev->tx_worker);
173*4882a593Smuzhiyun
174*4882a593Smuzhiyun td->tx_pending = 0;
175*4882a593Smuzhiyun
176*4882a593Smuzhiyun mt76_worker_enable(&dev->tx_worker);
177*4882a593Smuzhiyun
178*4882a593Smuzhiyun wait_event_timeout(dev->tx_wait, td->tx_done == td->tx_queued, 10 * HZ);
179*4882a593Smuzhiyun
180*4882a593Smuzhiyun dev_kfree_skb(td->tx_skb);
181*4882a593Smuzhiyun td->tx_skb = NULL;
182*4882a593Smuzhiyun }
183*4882a593Smuzhiyun
184*4882a593Smuzhiyun static inline void
mt76_testmode_param_set(struct mt76_testmode_data * td,u16 idx)185*4882a593Smuzhiyun mt76_testmode_param_set(struct mt76_testmode_data *td, u16 idx)
186*4882a593Smuzhiyun {
187*4882a593Smuzhiyun td->param_set[idx / 32] |= BIT(idx % 32);
188*4882a593Smuzhiyun }
189*4882a593Smuzhiyun
190*4882a593Smuzhiyun static inline bool
mt76_testmode_param_present(struct mt76_testmode_data * td,u16 idx)191*4882a593Smuzhiyun mt76_testmode_param_present(struct mt76_testmode_data *td, u16 idx)
192*4882a593Smuzhiyun {
193*4882a593Smuzhiyun return td->param_set[idx / 32] & BIT(idx % 32);
194*4882a593Smuzhiyun }
195*4882a593Smuzhiyun
196*4882a593Smuzhiyun static void
mt76_testmode_init_defaults(struct mt76_dev * dev)197*4882a593Smuzhiyun mt76_testmode_init_defaults(struct mt76_dev *dev)
198*4882a593Smuzhiyun {
199*4882a593Smuzhiyun struct mt76_testmode_data *td = &dev->test;
200*4882a593Smuzhiyun
201*4882a593Smuzhiyun if (td->tx_msdu_len > 0)
202*4882a593Smuzhiyun return;
203*4882a593Smuzhiyun
204*4882a593Smuzhiyun td->tx_msdu_len = 1024;
205*4882a593Smuzhiyun td->tx_count = 1;
206*4882a593Smuzhiyun td->tx_rate_mode = MT76_TM_TX_MODE_OFDM;
207*4882a593Smuzhiyun td->tx_rate_nss = 1;
208*4882a593Smuzhiyun }
209*4882a593Smuzhiyun
210*4882a593Smuzhiyun static int
__mt76_testmode_set_state(struct mt76_dev * dev,enum mt76_testmode_state state)211*4882a593Smuzhiyun __mt76_testmode_set_state(struct mt76_dev *dev, enum mt76_testmode_state state)
212*4882a593Smuzhiyun {
213*4882a593Smuzhiyun enum mt76_testmode_state prev_state = dev->test.state;
214*4882a593Smuzhiyun int err;
215*4882a593Smuzhiyun
216*4882a593Smuzhiyun if (prev_state == MT76_TM_STATE_TX_FRAMES)
217*4882a593Smuzhiyun mt76_testmode_tx_stop(dev);
218*4882a593Smuzhiyun
219*4882a593Smuzhiyun if (state == MT76_TM_STATE_TX_FRAMES) {
220*4882a593Smuzhiyun err = mt76_testmode_tx_init(dev);
221*4882a593Smuzhiyun if (err)
222*4882a593Smuzhiyun return err;
223*4882a593Smuzhiyun }
224*4882a593Smuzhiyun
225*4882a593Smuzhiyun err = dev->test_ops->set_state(dev, state);
226*4882a593Smuzhiyun if (err) {
227*4882a593Smuzhiyun if (state == MT76_TM_STATE_TX_FRAMES)
228*4882a593Smuzhiyun mt76_testmode_tx_stop(dev);
229*4882a593Smuzhiyun
230*4882a593Smuzhiyun return err;
231*4882a593Smuzhiyun }
232*4882a593Smuzhiyun
233*4882a593Smuzhiyun if (state == MT76_TM_STATE_TX_FRAMES)
234*4882a593Smuzhiyun mt76_testmode_tx_start(dev);
235*4882a593Smuzhiyun else if (state == MT76_TM_STATE_RX_FRAMES) {
236*4882a593Smuzhiyun memset(&dev->test.rx_stats, 0, sizeof(dev->test.rx_stats));
237*4882a593Smuzhiyun }
238*4882a593Smuzhiyun
239*4882a593Smuzhiyun dev->test.state = state;
240*4882a593Smuzhiyun
241*4882a593Smuzhiyun return 0;
242*4882a593Smuzhiyun }
243*4882a593Smuzhiyun
mt76_testmode_set_state(struct mt76_dev * dev,enum mt76_testmode_state state)244*4882a593Smuzhiyun int mt76_testmode_set_state(struct mt76_dev *dev, enum mt76_testmode_state state)
245*4882a593Smuzhiyun {
246*4882a593Smuzhiyun struct mt76_testmode_data *td = &dev->test;
247*4882a593Smuzhiyun struct ieee80211_hw *hw = dev->phy.hw;
248*4882a593Smuzhiyun
249*4882a593Smuzhiyun if (state == td->state && state == MT76_TM_STATE_OFF)
250*4882a593Smuzhiyun return 0;
251*4882a593Smuzhiyun
252*4882a593Smuzhiyun if (state > MT76_TM_STATE_OFF &&
253*4882a593Smuzhiyun (!test_bit(MT76_STATE_RUNNING, &dev->phy.state) ||
254*4882a593Smuzhiyun !(hw->conf.flags & IEEE80211_CONF_MONITOR)))
255*4882a593Smuzhiyun return -ENOTCONN;
256*4882a593Smuzhiyun
257*4882a593Smuzhiyun if (state != MT76_TM_STATE_IDLE &&
258*4882a593Smuzhiyun td->state != MT76_TM_STATE_IDLE) {
259*4882a593Smuzhiyun int ret;
260*4882a593Smuzhiyun
261*4882a593Smuzhiyun ret = __mt76_testmode_set_state(dev, MT76_TM_STATE_IDLE);
262*4882a593Smuzhiyun if (ret)
263*4882a593Smuzhiyun return ret;
264*4882a593Smuzhiyun }
265*4882a593Smuzhiyun
266*4882a593Smuzhiyun return __mt76_testmode_set_state(dev, state);
267*4882a593Smuzhiyun
268*4882a593Smuzhiyun }
269*4882a593Smuzhiyun EXPORT_SYMBOL(mt76_testmode_set_state);
270*4882a593Smuzhiyun
271*4882a593Smuzhiyun static int
mt76_tm_get_u8(struct nlattr * attr,u8 * dest,u8 min,u8 max)272*4882a593Smuzhiyun mt76_tm_get_u8(struct nlattr *attr, u8 *dest, u8 min, u8 max)
273*4882a593Smuzhiyun {
274*4882a593Smuzhiyun u8 val;
275*4882a593Smuzhiyun
276*4882a593Smuzhiyun if (!attr)
277*4882a593Smuzhiyun return 0;
278*4882a593Smuzhiyun
279*4882a593Smuzhiyun val = nla_get_u8(attr);
280*4882a593Smuzhiyun if (val < min || val > max)
281*4882a593Smuzhiyun return -EINVAL;
282*4882a593Smuzhiyun
283*4882a593Smuzhiyun *dest = val;
284*4882a593Smuzhiyun return 0;
285*4882a593Smuzhiyun }
286*4882a593Smuzhiyun
mt76_testmode_cmd(struct ieee80211_hw * hw,struct ieee80211_vif * vif,void * data,int len)287*4882a593Smuzhiyun int mt76_testmode_cmd(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
288*4882a593Smuzhiyun void *data, int len)
289*4882a593Smuzhiyun {
290*4882a593Smuzhiyun struct mt76_phy *phy = hw->priv;
291*4882a593Smuzhiyun struct mt76_dev *dev = phy->dev;
292*4882a593Smuzhiyun struct mt76_testmode_data *td = &dev->test;
293*4882a593Smuzhiyun struct nlattr *tb[NUM_MT76_TM_ATTRS];
294*4882a593Smuzhiyun u32 state;
295*4882a593Smuzhiyun int err;
296*4882a593Smuzhiyun int i;
297*4882a593Smuzhiyun
298*4882a593Smuzhiyun if (!dev->test_ops)
299*4882a593Smuzhiyun return -EOPNOTSUPP;
300*4882a593Smuzhiyun
301*4882a593Smuzhiyun err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len,
302*4882a593Smuzhiyun mt76_tm_policy, NULL);
303*4882a593Smuzhiyun if (err)
304*4882a593Smuzhiyun return err;
305*4882a593Smuzhiyun
306*4882a593Smuzhiyun err = -EINVAL;
307*4882a593Smuzhiyun
308*4882a593Smuzhiyun mutex_lock(&dev->mutex);
309*4882a593Smuzhiyun
310*4882a593Smuzhiyun if (tb[MT76_TM_ATTR_RESET]) {
311*4882a593Smuzhiyun mt76_testmode_set_state(dev, MT76_TM_STATE_OFF);
312*4882a593Smuzhiyun memset(td, 0, sizeof(*td));
313*4882a593Smuzhiyun }
314*4882a593Smuzhiyun
315*4882a593Smuzhiyun mt76_testmode_init_defaults(dev);
316*4882a593Smuzhiyun
317*4882a593Smuzhiyun if (tb[MT76_TM_ATTR_TX_COUNT])
318*4882a593Smuzhiyun td->tx_count = nla_get_u32(tb[MT76_TM_ATTR_TX_COUNT]);
319*4882a593Smuzhiyun
320*4882a593Smuzhiyun if (tb[MT76_TM_ATTR_TX_LENGTH]) {
321*4882a593Smuzhiyun u32 val = nla_get_u32(tb[MT76_TM_ATTR_TX_LENGTH]);
322*4882a593Smuzhiyun
323*4882a593Smuzhiyun if (val > IEEE80211_MAX_FRAME_LEN ||
324*4882a593Smuzhiyun val < sizeof(struct ieee80211_hdr))
325*4882a593Smuzhiyun goto out;
326*4882a593Smuzhiyun
327*4882a593Smuzhiyun td->tx_msdu_len = val;
328*4882a593Smuzhiyun }
329*4882a593Smuzhiyun
330*4882a593Smuzhiyun if (tb[MT76_TM_ATTR_TX_RATE_IDX])
331*4882a593Smuzhiyun td->tx_rate_idx = nla_get_u8(tb[MT76_TM_ATTR_TX_RATE_IDX]);
332*4882a593Smuzhiyun
333*4882a593Smuzhiyun if (mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_MODE], &td->tx_rate_mode,
334*4882a593Smuzhiyun 0, MT76_TM_TX_MODE_MAX) ||
335*4882a593Smuzhiyun mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_NSS], &td->tx_rate_nss,
336*4882a593Smuzhiyun 1, hweight8(phy->antenna_mask)) ||
337*4882a593Smuzhiyun mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_SGI], &td->tx_rate_sgi, 0, 1) ||
338*4882a593Smuzhiyun mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_LDPC], &td->tx_rate_ldpc, 0, 1) ||
339*4882a593Smuzhiyun mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_ANTENNA], &td->tx_antenna_mask, 1,
340*4882a593Smuzhiyun phy->antenna_mask) ||
341*4882a593Smuzhiyun mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_POWER_CONTROL],
342*4882a593Smuzhiyun &td->tx_power_control, 0, 1))
343*4882a593Smuzhiyun goto out;
344*4882a593Smuzhiyun
345*4882a593Smuzhiyun if (tb[MT76_TM_ATTR_FREQ_OFFSET])
346*4882a593Smuzhiyun td->freq_offset = nla_get_u32(tb[MT76_TM_ATTR_FREQ_OFFSET]);
347*4882a593Smuzhiyun
348*4882a593Smuzhiyun if (tb[MT76_TM_ATTR_STATE]) {
349*4882a593Smuzhiyun state = nla_get_u32(tb[MT76_TM_ATTR_STATE]);
350*4882a593Smuzhiyun if (state > MT76_TM_STATE_MAX)
351*4882a593Smuzhiyun goto out;
352*4882a593Smuzhiyun } else {
353*4882a593Smuzhiyun state = td->state;
354*4882a593Smuzhiyun }
355*4882a593Smuzhiyun
356*4882a593Smuzhiyun if (tb[MT76_TM_ATTR_TX_POWER]) {
357*4882a593Smuzhiyun struct nlattr *cur;
358*4882a593Smuzhiyun int idx = 0;
359*4882a593Smuzhiyun int rem;
360*4882a593Smuzhiyun
361*4882a593Smuzhiyun nla_for_each_nested(cur, tb[MT76_TM_ATTR_TX_POWER], rem) {
362*4882a593Smuzhiyun if (nla_len(cur) != 1 ||
363*4882a593Smuzhiyun idx >= ARRAY_SIZE(td->tx_power))
364*4882a593Smuzhiyun goto out;
365*4882a593Smuzhiyun
366*4882a593Smuzhiyun td->tx_power[idx++] = nla_get_u8(cur);
367*4882a593Smuzhiyun }
368*4882a593Smuzhiyun }
369*4882a593Smuzhiyun
370*4882a593Smuzhiyun if (dev->test_ops->set_params) {
371*4882a593Smuzhiyun err = dev->test_ops->set_params(dev, tb, state);
372*4882a593Smuzhiyun if (err)
373*4882a593Smuzhiyun goto out;
374*4882a593Smuzhiyun }
375*4882a593Smuzhiyun
376*4882a593Smuzhiyun for (i = MT76_TM_ATTR_STATE; i < ARRAY_SIZE(tb); i++)
377*4882a593Smuzhiyun if (tb[i])
378*4882a593Smuzhiyun mt76_testmode_param_set(td, i);
379*4882a593Smuzhiyun
380*4882a593Smuzhiyun err = 0;
381*4882a593Smuzhiyun if (tb[MT76_TM_ATTR_STATE])
382*4882a593Smuzhiyun err = mt76_testmode_set_state(dev, state);
383*4882a593Smuzhiyun
384*4882a593Smuzhiyun out:
385*4882a593Smuzhiyun mutex_unlock(&dev->mutex);
386*4882a593Smuzhiyun
387*4882a593Smuzhiyun return err;
388*4882a593Smuzhiyun }
389*4882a593Smuzhiyun EXPORT_SYMBOL(mt76_testmode_cmd);
390*4882a593Smuzhiyun
391*4882a593Smuzhiyun static int
mt76_testmode_dump_stats(struct mt76_dev * dev,struct sk_buff * msg)392*4882a593Smuzhiyun mt76_testmode_dump_stats(struct mt76_dev *dev, struct sk_buff *msg)
393*4882a593Smuzhiyun {
394*4882a593Smuzhiyun struct mt76_testmode_data *td = &dev->test;
395*4882a593Smuzhiyun u64 rx_packets = 0;
396*4882a593Smuzhiyun u64 rx_fcs_error = 0;
397*4882a593Smuzhiyun int i;
398*4882a593Smuzhiyun
399*4882a593Smuzhiyun for (i = 0; i < ARRAY_SIZE(td->rx_stats.packets); i++) {
400*4882a593Smuzhiyun rx_packets += td->rx_stats.packets[i];
401*4882a593Smuzhiyun rx_fcs_error += td->rx_stats.fcs_error[i];
402*4882a593Smuzhiyun }
403*4882a593Smuzhiyun
404*4882a593Smuzhiyun if (nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_PENDING, td->tx_pending) ||
405*4882a593Smuzhiyun nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_QUEUED, td->tx_queued) ||
406*4882a593Smuzhiyun nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_DONE, td->tx_done) ||
407*4882a593Smuzhiyun nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_PACKETS, rx_packets,
408*4882a593Smuzhiyun MT76_TM_STATS_ATTR_PAD) ||
409*4882a593Smuzhiyun nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_FCS_ERROR, rx_fcs_error,
410*4882a593Smuzhiyun MT76_TM_STATS_ATTR_PAD))
411*4882a593Smuzhiyun return -EMSGSIZE;
412*4882a593Smuzhiyun
413*4882a593Smuzhiyun if (dev->test_ops->dump_stats)
414*4882a593Smuzhiyun return dev->test_ops->dump_stats(dev, msg);
415*4882a593Smuzhiyun
416*4882a593Smuzhiyun return 0;
417*4882a593Smuzhiyun }
418*4882a593Smuzhiyun
mt76_testmode_dump(struct ieee80211_hw * hw,struct sk_buff * msg,struct netlink_callback * cb,void * data,int len)419*4882a593Smuzhiyun int mt76_testmode_dump(struct ieee80211_hw *hw, struct sk_buff *msg,
420*4882a593Smuzhiyun struct netlink_callback *cb, void *data, int len)
421*4882a593Smuzhiyun {
422*4882a593Smuzhiyun struct mt76_phy *phy = hw->priv;
423*4882a593Smuzhiyun struct mt76_dev *dev = phy->dev;
424*4882a593Smuzhiyun struct mt76_testmode_data *td = &dev->test;
425*4882a593Smuzhiyun struct nlattr *tb[NUM_MT76_TM_ATTRS] = {};
426*4882a593Smuzhiyun int err = 0;
427*4882a593Smuzhiyun void *a;
428*4882a593Smuzhiyun int i;
429*4882a593Smuzhiyun
430*4882a593Smuzhiyun if (!dev->test_ops)
431*4882a593Smuzhiyun return -EOPNOTSUPP;
432*4882a593Smuzhiyun
433*4882a593Smuzhiyun if (cb->args[2]++ > 0)
434*4882a593Smuzhiyun return -ENOENT;
435*4882a593Smuzhiyun
436*4882a593Smuzhiyun if (data) {
437*4882a593Smuzhiyun err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len,
438*4882a593Smuzhiyun mt76_tm_policy, NULL);
439*4882a593Smuzhiyun if (err)
440*4882a593Smuzhiyun return err;
441*4882a593Smuzhiyun }
442*4882a593Smuzhiyun
443*4882a593Smuzhiyun mutex_lock(&dev->mutex);
444*4882a593Smuzhiyun
445*4882a593Smuzhiyun if (tb[MT76_TM_ATTR_STATS]) {
446*4882a593Smuzhiyun err = -EINVAL;
447*4882a593Smuzhiyun
448*4882a593Smuzhiyun a = nla_nest_start(msg, MT76_TM_ATTR_STATS);
449*4882a593Smuzhiyun if (a) {
450*4882a593Smuzhiyun err = mt76_testmode_dump_stats(dev, msg);
451*4882a593Smuzhiyun nla_nest_end(msg, a);
452*4882a593Smuzhiyun }
453*4882a593Smuzhiyun
454*4882a593Smuzhiyun goto out;
455*4882a593Smuzhiyun }
456*4882a593Smuzhiyun
457*4882a593Smuzhiyun mt76_testmode_init_defaults(dev);
458*4882a593Smuzhiyun
459*4882a593Smuzhiyun err = -EMSGSIZE;
460*4882a593Smuzhiyun if (nla_put_u32(msg, MT76_TM_ATTR_STATE, td->state))
461*4882a593Smuzhiyun goto out;
462*4882a593Smuzhiyun
463*4882a593Smuzhiyun if (td->mtd_name &&
464*4882a593Smuzhiyun (nla_put_string(msg, MT76_TM_ATTR_MTD_PART, td->mtd_name) ||
465*4882a593Smuzhiyun nla_put_u32(msg, MT76_TM_ATTR_MTD_OFFSET, td->mtd_offset)))
466*4882a593Smuzhiyun goto out;
467*4882a593Smuzhiyun
468*4882a593Smuzhiyun if (nla_put_u32(msg, MT76_TM_ATTR_TX_COUNT, td->tx_count) ||
469*4882a593Smuzhiyun nla_put_u32(msg, MT76_TM_ATTR_TX_LENGTH, td->tx_msdu_len) ||
470*4882a593Smuzhiyun nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_MODE, td->tx_rate_mode) ||
471*4882a593Smuzhiyun nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_NSS, td->tx_rate_nss) ||
472*4882a593Smuzhiyun nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, td->tx_rate_idx) ||
473*4882a593Smuzhiyun nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_SGI, td->tx_rate_sgi) ||
474*4882a593Smuzhiyun nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_LDPC, td->tx_rate_ldpc) ||
475*4882a593Smuzhiyun (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_ANTENNA) &&
476*4882a593Smuzhiyun nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, td->tx_antenna_mask)) ||
477*4882a593Smuzhiyun (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER_CONTROL) &&
478*4882a593Smuzhiyun nla_put_u8(msg, MT76_TM_ATTR_TX_POWER_CONTROL, td->tx_power_control)) ||
479*4882a593Smuzhiyun (mt76_testmode_param_present(td, MT76_TM_ATTR_FREQ_OFFSET) &&
480*4882a593Smuzhiyun nla_put_u8(msg, MT76_TM_ATTR_FREQ_OFFSET, td->freq_offset)))
481*4882a593Smuzhiyun goto out;
482*4882a593Smuzhiyun
483*4882a593Smuzhiyun if (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER)) {
484*4882a593Smuzhiyun a = nla_nest_start(msg, MT76_TM_ATTR_TX_POWER);
485*4882a593Smuzhiyun if (!a)
486*4882a593Smuzhiyun goto out;
487*4882a593Smuzhiyun
488*4882a593Smuzhiyun for (i = 0; i < ARRAY_SIZE(td->tx_power); i++)
489*4882a593Smuzhiyun if (nla_put_u8(msg, i, td->tx_power[i]))
490*4882a593Smuzhiyun goto out;
491*4882a593Smuzhiyun
492*4882a593Smuzhiyun nla_nest_end(msg, a);
493*4882a593Smuzhiyun }
494*4882a593Smuzhiyun
495*4882a593Smuzhiyun err = 0;
496*4882a593Smuzhiyun
497*4882a593Smuzhiyun out:
498*4882a593Smuzhiyun mutex_unlock(&dev->mutex);
499*4882a593Smuzhiyun
500*4882a593Smuzhiyun return err;
501*4882a593Smuzhiyun }
502*4882a593Smuzhiyun EXPORT_SYMBOL(mt76_testmode_dump);
503