Lines Matching refs:wcid

100 mt76_tx_status_skb_add(struct mt76_dev *dev, struct mt76_wcid *wcid,  in mt76_tx_status_skb_add()  argument
107 if (!wcid) in mt76_tx_status_skb_add()
120 wcid->packet_id = (wcid->packet_id + 1) & MT_PACKET_ID_MASK; in mt76_tx_status_skb_add()
121 if (wcid->packet_id == MT_PACKET_ID_NO_ACK || in mt76_tx_status_skb_add()
122 wcid->packet_id == MT_PACKET_ID_NO_SKB) in mt76_tx_status_skb_add()
123 wcid->packet_id = MT_PACKET_ID_FIRST; in mt76_tx_status_skb_add()
125 pid = wcid->packet_id; in mt76_tx_status_skb_add()
126 cb->wcid = wcid->idx; in mt76_tx_status_skb_add()
138 mt76_tx_status_skb_get(struct mt76_dev *dev, struct mt76_wcid *wcid, int pktid, in mt76_tx_status_skb_get() argument
146 if (wcid && cb->wcid != wcid->idx) in mt76_tx_status_skb_get()
165 mt76_tx_status_check(struct mt76_dev *dev, struct mt76_wcid *wcid, bool flush) in mt76_tx_status_check() argument
170 mt76_tx_status_skb_get(dev, wcid, flush ? -1 : 0, &list); in mt76_tx_status_check()
179 struct mt76_wcid *wcid; in mt76_tx_check_non_aql() local
185 if (wcid_idx >= ARRAY_SIZE(dev->wcid)) in mt76_tx_check_non_aql()
190 wcid = rcu_dereference(dev->wcid[wcid_idx]); in mt76_tx_check_non_aql()
191 if (wcid) { in mt76_tx_check_non_aql()
192 pending = atomic_dec_return(&wcid->non_aql_packets); in mt76_tx_check_non_aql()
194 atomic_cmpxchg(&wcid->non_aql_packets, pending, 0); in mt76_tx_check_non_aql()
229 struct mt76_wcid *wcid, struct ieee80211_sta *sta, in __mt76_tx_queue_skb() argument
239 idx = dev->queue_ops->tx_queue_skb(dev, qid, skb, wcid, sta); in __mt76_tx_queue_skb()
243 wcid = (struct mt76_wcid *)sta->drv_priv; in __mt76_tx_queue_skb()
245 q->entry[idx].wcid = wcid->idx; in __mt76_tx_queue_skb()
246 pending = atomic_inc_return(&wcid->non_aql_packets); in __mt76_tx_queue_skb()
255 struct mt76_wcid *wcid, struct sk_buff *skb) in mt76_tx() argument
281 if (wcid && !(wcid->tx_info & MT_WCID_TX_INFO_SET)) in mt76_tx()
291 __mt76_tx_queue_skb(dev, qid, skb, wcid, sta, NULL); in mt76_tx()
326 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv; in mt76_queue_ps_skb() local
335 __mt76_tx_queue_skb(dev, MT_TXQ_PSD, skb, wcid, sta, NULL); in mt76_queue_ps_skb()
390 struct mt76_wcid *wcid = mtxq->wcid; in mt76_txq_send_burst() local
397 if (test_bit(MT_WCID_FLAG_PS, &wcid->flags)) in mt76_txq_send_burst()
400 if (atomic_read(&wcid->non_aql_packets) >= MT_MAX_NON_AQL_PKT) in mt76_txq_send_burst()
408 if (!(wcid->tx_info & MT_WCID_TX_INFO_SET)) in mt76_txq_send_burst()
412 idx = __mt76_tx_queue_skb(dev, qid, skb, wcid, txq->sta, &stop); in mt76_txq_send_burst()
432 if (!(wcid->tx_info & MT_WCID_TX_INFO_SET)) in mt76_txq_send_burst()
436 idx = __mt76_tx_queue_skb(dev, qid, skb, wcid, txq->sta, &stop); in mt76_txq_send_burst()
455 struct mt76_wcid *wcid; in mt76_txq_schedule_list() local
474 wcid = mtxq->wcid; in mt76_txq_schedule_list()
475 if (wcid && test_bit(MT_WCID_FLAG_PS, &wcid->flags)) in mt76_txq_schedule_list()