xref: /OK3568_Linux_fs/kernel/drivers/net/wireless/ath/ath10k/htc.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: ISC
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * Copyright (c) 2005-2011 Atheros Communications Inc.
4*4882a593Smuzhiyun  * Copyright (c) 2011-2017 Qualcomm Atheros, Inc.
5*4882a593Smuzhiyun  */
6*4882a593Smuzhiyun 
7*4882a593Smuzhiyun #include "core.h"
8*4882a593Smuzhiyun #include "hif.h"
9*4882a593Smuzhiyun #include "debug.h"
10*4882a593Smuzhiyun 
11*4882a593Smuzhiyun /********/
12*4882a593Smuzhiyun /* Send */
13*4882a593Smuzhiyun /********/
14*4882a593Smuzhiyun 
ath10k_htc_control_tx_complete(struct ath10k * ar,struct sk_buff * skb)15*4882a593Smuzhiyun static void ath10k_htc_control_tx_complete(struct ath10k *ar,
16*4882a593Smuzhiyun 					   struct sk_buff *skb)
17*4882a593Smuzhiyun {
18*4882a593Smuzhiyun 	kfree_skb(skb);
19*4882a593Smuzhiyun }
20*4882a593Smuzhiyun 
ath10k_htc_build_tx_ctrl_skb(void * ar)21*4882a593Smuzhiyun static struct sk_buff *ath10k_htc_build_tx_ctrl_skb(void *ar)
22*4882a593Smuzhiyun {
23*4882a593Smuzhiyun 	struct sk_buff *skb;
24*4882a593Smuzhiyun 	struct ath10k_skb_cb *skb_cb;
25*4882a593Smuzhiyun 
26*4882a593Smuzhiyun 	skb = dev_alloc_skb(ATH10K_HTC_CONTROL_BUFFER_SIZE);
27*4882a593Smuzhiyun 	if (!skb)
28*4882a593Smuzhiyun 		return NULL;
29*4882a593Smuzhiyun 
30*4882a593Smuzhiyun 	skb_reserve(skb, 20); /* FIXME: why 20 bytes? */
31*4882a593Smuzhiyun 	WARN_ONCE((unsigned long)skb->data & 3, "unaligned skb");
32*4882a593Smuzhiyun 
33*4882a593Smuzhiyun 	skb_cb = ATH10K_SKB_CB(skb);
34*4882a593Smuzhiyun 	memset(skb_cb, 0, sizeof(*skb_cb));
35*4882a593Smuzhiyun 
36*4882a593Smuzhiyun 	ath10k_dbg(ar, ATH10K_DBG_HTC, "%s: skb %pK\n", __func__, skb);
37*4882a593Smuzhiyun 	return skb;
38*4882a593Smuzhiyun }
39*4882a593Smuzhiyun 
ath10k_htc_restore_tx_skb(struct ath10k_htc * htc,struct sk_buff * skb)40*4882a593Smuzhiyun static inline void ath10k_htc_restore_tx_skb(struct ath10k_htc *htc,
41*4882a593Smuzhiyun 					     struct sk_buff *skb)
42*4882a593Smuzhiyun {
43*4882a593Smuzhiyun 	struct ath10k_skb_cb *skb_cb = ATH10K_SKB_CB(skb);
44*4882a593Smuzhiyun 
45*4882a593Smuzhiyun 	if (htc->ar->bus_param.dev_type != ATH10K_DEV_TYPE_HL)
46*4882a593Smuzhiyun 		dma_unmap_single(htc->ar->dev, skb_cb->paddr, skb->len, DMA_TO_DEVICE);
47*4882a593Smuzhiyun 	skb_pull(skb, sizeof(struct ath10k_htc_hdr));
48*4882a593Smuzhiyun }
49*4882a593Smuzhiyun 
ath10k_htc_notify_tx_completion(struct ath10k_htc_ep * ep,struct sk_buff * skb)50*4882a593Smuzhiyun void ath10k_htc_notify_tx_completion(struct ath10k_htc_ep *ep,
51*4882a593Smuzhiyun 				     struct sk_buff *skb)
52*4882a593Smuzhiyun {
53*4882a593Smuzhiyun 	struct ath10k *ar = ep->htc->ar;
54*4882a593Smuzhiyun 	struct ath10k_htc_hdr *hdr;
55*4882a593Smuzhiyun 
56*4882a593Smuzhiyun 	ath10k_dbg(ar, ATH10K_DBG_HTC, "%s: ep %d skb %pK\n", __func__,
57*4882a593Smuzhiyun 		   ep->eid, skb);
58*4882a593Smuzhiyun 
59*4882a593Smuzhiyun 	hdr = (struct ath10k_htc_hdr *)skb->data;
60*4882a593Smuzhiyun 	ath10k_htc_restore_tx_skb(ep->htc, skb);
61*4882a593Smuzhiyun 
62*4882a593Smuzhiyun 	if (!ep->ep_ops.ep_tx_complete) {
63*4882a593Smuzhiyun 		ath10k_warn(ar, "no tx handler for eid %d\n", ep->eid);
64*4882a593Smuzhiyun 		dev_kfree_skb_any(skb);
65*4882a593Smuzhiyun 		return;
66*4882a593Smuzhiyun 	}
67*4882a593Smuzhiyun 
68*4882a593Smuzhiyun 	if (hdr->flags & ATH10K_HTC_FLAG_SEND_BUNDLE) {
69*4882a593Smuzhiyun 		dev_kfree_skb_any(skb);
70*4882a593Smuzhiyun 		return;
71*4882a593Smuzhiyun 	}
72*4882a593Smuzhiyun 
73*4882a593Smuzhiyun 	ep->ep_ops.ep_tx_complete(ep->htc->ar, skb);
74*4882a593Smuzhiyun }
75*4882a593Smuzhiyun EXPORT_SYMBOL(ath10k_htc_notify_tx_completion);
76*4882a593Smuzhiyun 
ath10k_htc_prepare_tx_skb(struct ath10k_htc_ep * ep,struct sk_buff * skb)77*4882a593Smuzhiyun static void ath10k_htc_prepare_tx_skb(struct ath10k_htc_ep *ep,
78*4882a593Smuzhiyun 				      struct sk_buff *skb)
79*4882a593Smuzhiyun {
80*4882a593Smuzhiyun 	struct ath10k_htc_hdr *hdr;
81*4882a593Smuzhiyun 
82*4882a593Smuzhiyun 	hdr = (struct ath10k_htc_hdr *)skb->data;
83*4882a593Smuzhiyun 	memset(hdr, 0, sizeof(struct ath10k_htc_hdr));
84*4882a593Smuzhiyun 
85*4882a593Smuzhiyun 	hdr->eid = ep->eid;
86*4882a593Smuzhiyun 	hdr->len = __cpu_to_le16(skb->len - sizeof(*hdr));
87*4882a593Smuzhiyun 	hdr->flags = 0;
88*4882a593Smuzhiyun 	if (ep->tx_credit_flow_enabled && !ep->bundle_tx)
89*4882a593Smuzhiyun 		hdr->flags |= ATH10K_HTC_FLAG_NEED_CREDIT_UPDATE;
90*4882a593Smuzhiyun 
91*4882a593Smuzhiyun 	spin_lock_bh(&ep->htc->tx_lock);
92*4882a593Smuzhiyun 	hdr->seq_no = ep->seq_no++;
93*4882a593Smuzhiyun 	spin_unlock_bh(&ep->htc->tx_lock);
94*4882a593Smuzhiyun }
95*4882a593Smuzhiyun 
ath10k_htc_consume_credit(struct ath10k_htc_ep * ep,unsigned int len,bool consume)96*4882a593Smuzhiyun static int ath10k_htc_consume_credit(struct ath10k_htc_ep *ep,
97*4882a593Smuzhiyun 				     unsigned int len,
98*4882a593Smuzhiyun 				     bool consume)
99*4882a593Smuzhiyun {
100*4882a593Smuzhiyun 	struct ath10k_htc *htc = ep->htc;
101*4882a593Smuzhiyun 	struct ath10k *ar = htc->ar;
102*4882a593Smuzhiyun 	enum ath10k_htc_ep_id eid = ep->eid;
103*4882a593Smuzhiyun 	int credits, ret = 0;
104*4882a593Smuzhiyun 
105*4882a593Smuzhiyun 	if (!ep->tx_credit_flow_enabled)
106*4882a593Smuzhiyun 		return 0;
107*4882a593Smuzhiyun 
108*4882a593Smuzhiyun 	credits = DIV_ROUND_UP(len, ep->tx_credit_size);
109*4882a593Smuzhiyun 	spin_lock_bh(&htc->tx_lock);
110*4882a593Smuzhiyun 
111*4882a593Smuzhiyun 	if (ep->tx_credits < credits) {
112*4882a593Smuzhiyun 		ath10k_dbg(ar, ATH10K_DBG_HTC,
113*4882a593Smuzhiyun 			   "htc insufficient credits ep %d required %d available %d consume %d\n",
114*4882a593Smuzhiyun 			   eid, credits, ep->tx_credits, consume);
115*4882a593Smuzhiyun 		ret = -EAGAIN;
116*4882a593Smuzhiyun 		goto unlock;
117*4882a593Smuzhiyun 	}
118*4882a593Smuzhiyun 
119*4882a593Smuzhiyun 	if (consume) {
120*4882a593Smuzhiyun 		ep->tx_credits -= credits;
121*4882a593Smuzhiyun 		ath10k_dbg(ar, ATH10K_DBG_HTC,
122*4882a593Smuzhiyun 			   "htc ep %d consumed %d credits total %d\n",
123*4882a593Smuzhiyun 			   eid, credits, ep->tx_credits);
124*4882a593Smuzhiyun 	}
125*4882a593Smuzhiyun 
126*4882a593Smuzhiyun unlock:
127*4882a593Smuzhiyun 	spin_unlock_bh(&htc->tx_lock);
128*4882a593Smuzhiyun 	return ret;
129*4882a593Smuzhiyun }
130*4882a593Smuzhiyun 
ath10k_htc_release_credit(struct ath10k_htc_ep * ep,unsigned int len)131*4882a593Smuzhiyun static void ath10k_htc_release_credit(struct ath10k_htc_ep *ep, unsigned int len)
132*4882a593Smuzhiyun {
133*4882a593Smuzhiyun 	struct ath10k_htc *htc = ep->htc;
134*4882a593Smuzhiyun 	struct ath10k *ar = htc->ar;
135*4882a593Smuzhiyun 	enum ath10k_htc_ep_id eid = ep->eid;
136*4882a593Smuzhiyun 	int credits;
137*4882a593Smuzhiyun 
138*4882a593Smuzhiyun 	if (!ep->tx_credit_flow_enabled)
139*4882a593Smuzhiyun 		return;
140*4882a593Smuzhiyun 
141*4882a593Smuzhiyun 	credits = DIV_ROUND_UP(len, ep->tx_credit_size);
142*4882a593Smuzhiyun 	spin_lock_bh(&htc->tx_lock);
143*4882a593Smuzhiyun 	ep->tx_credits += credits;
144*4882a593Smuzhiyun 	ath10k_dbg(ar, ATH10K_DBG_HTC,
145*4882a593Smuzhiyun 		   "htc ep %d reverted %d credits back total %d\n",
146*4882a593Smuzhiyun 		   eid, credits, ep->tx_credits);
147*4882a593Smuzhiyun 	spin_unlock_bh(&htc->tx_lock);
148*4882a593Smuzhiyun 
149*4882a593Smuzhiyun 	if (ep->ep_ops.ep_tx_credits)
150*4882a593Smuzhiyun 		ep->ep_ops.ep_tx_credits(htc->ar);
151*4882a593Smuzhiyun }
152*4882a593Smuzhiyun 
ath10k_htc_send(struct ath10k_htc * htc,enum ath10k_htc_ep_id eid,struct sk_buff * skb)153*4882a593Smuzhiyun int ath10k_htc_send(struct ath10k_htc *htc,
154*4882a593Smuzhiyun 		    enum ath10k_htc_ep_id eid,
155*4882a593Smuzhiyun 		    struct sk_buff *skb)
156*4882a593Smuzhiyun {
157*4882a593Smuzhiyun 	struct ath10k *ar = htc->ar;
158*4882a593Smuzhiyun 	struct ath10k_htc_ep *ep = &htc->endpoint[eid];
159*4882a593Smuzhiyun 	struct ath10k_skb_cb *skb_cb = ATH10K_SKB_CB(skb);
160*4882a593Smuzhiyun 	struct ath10k_hif_sg_item sg_item;
161*4882a593Smuzhiyun 	struct device *dev = htc->ar->dev;
162*4882a593Smuzhiyun 	int ret;
163*4882a593Smuzhiyun 	unsigned int skb_len;
164*4882a593Smuzhiyun 
165*4882a593Smuzhiyun 	if (htc->ar->state == ATH10K_STATE_WEDGED)
166*4882a593Smuzhiyun 		return -ECOMM;
167*4882a593Smuzhiyun 
168*4882a593Smuzhiyun 	if (eid >= ATH10K_HTC_EP_COUNT) {
169*4882a593Smuzhiyun 		ath10k_warn(ar, "Invalid endpoint id: %d\n", eid);
170*4882a593Smuzhiyun 		return -ENOENT;
171*4882a593Smuzhiyun 	}
172*4882a593Smuzhiyun 
173*4882a593Smuzhiyun 	skb_push(skb, sizeof(struct ath10k_htc_hdr));
174*4882a593Smuzhiyun 
175*4882a593Smuzhiyun 	skb_len = skb->len;
176*4882a593Smuzhiyun 	ret = ath10k_htc_consume_credit(ep, skb_len, true);
177*4882a593Smuzhiyun 	if (ret)
178*4882a593Smuzhiyun 		goto err_pull;
179*4882a593Smuzhiyun 
180*4882a593Smuzhiyun 	ath10k_htc_prepare_tx_skb(ep, skb);
181*4882a593Smuzhiyun 
182*4882a593Smuzhiyun 	skb_cb->eid = eid;
183*4882a593Smuzhiyun 	if (ar->bus_param.dev_type != ATH10K_DEV_TYPE_HL) {
184*4882a593Smuzhiyun 		skb_cb->paddr = dma_map_single(dev, skb->data, skb->len,
185*4882a593Smuzhiyun 					       DMA_TO_DEVICE);
186*4882a593Smuzhiyun 		ret = dma_mapping_error(dev, skb_cb->paddr);
187*4882a593Smuzhiyun 		if (ret) {
188*4882a593Smuzhiyun 			ret = -EIO;
189*4882a593Smuzhiyun 			goto err_credits;
190*4882a593Smuzhiyun 		}
191*4882a593Smuzhiyun 	}
192*4882a593Smuzhiyun 
193*4882a593Smuzhiyun 	sg_item.transfer_id = ep->eid;
194*4882a593Smuzhiyun 	sg_item.transfer_context = skb;
195*4882a593Smuzhiyun 	sg_item.vaddr = skb->data;
196*4882a593Smuzhiyun 	sg_item.paddr = skb_cb->paddr;
197*4882a593Smuzhiyun 	sg_item.len = skb->len;
198*4882a593Smuzhiyun 
199*4882a593Smuzhiyun 	ret = ath10k_hif_tx_sg(htc->ar, ep->ul_pipe_id, &sg_item, 1);
200*4882a593Smuzhiyun 	if (ret)
201*4882a593Smuzhiyun 		goto err_unmap;
202*4882a593Smuzhiyun 
203*4882a593Smuzhiyun 	return 0;
204*4882a593Smuzhiyun 
205*4882a593Smuzhiyun err_unmap:
206*4882a593Smuzhiyun 	if (ar->bus_param.dev_type != ATH10K_DEV_TYPE_HL)
207*4882a593Smuzhiyun 		dma_unmap_single(dev, skb_cb->paddr, skb->len, DMA_TO_DEVICE);
208*4882a593Smuzhiyun err_credits:
209*4882a593Smuzhiyun 	ath10k_htc_release_credit(ep, skb_len);
210*4882a593Smuzhiyun err_pull:
211*4882a593Smuzhiyun 	skb_pull(skb, sizeof(struct ath10k_htc_hdr));
212*4882a593Smuzhiyun 	return ret;
213*4882a593Smuzhiyun }
214*4882a593Smuzhiyun 
ath10k_htc_tx_completion_handler(struct ath10k * ar,struct sk_buff * skb)215*4882a593Smuzhiyun void ath10k_htc_tx_completion_handler(struct ath10k *ar, struct sk_buff *skb)
216*4882a593Smuzhiyun {
217*4882a593Smuzhiyun 	struct ath10k_htc *htc = &ar->htc;
218*4882a593Smuzhiyun 	struct ath10k_skb_cb *skb_cb;
219*4882a593Smuzhiyun 	struct ath10k_htc_ep *ep;
220*4882a593Smuzhiyun 
221*4882a593Smuzhiyun 	if (WARN_ON_ONCE(!skb))
222*4882a593Smuzhiyun 		return;
223*4882a593Smuzhiyun 
224*4882a593Smuzhiyun 	skb_cb = ATH10K_SKB_CB(skb);
225*4882a593Smuzhiyun 	ep = &htc->endpoint[skb_cb->eid];
226*4882a593Smuzhiyun 
227*4882a593Smuzhiyun 	ath10k_htc_notify_tx_completion(ep, skb);
228*4882a593Smuzhiyun 	/* the skb now belongs to the completion handler */
229*4882a593Smuzhiyun }
230*4882a593Smuzhiyun EXPORT_SYMBOL(ath10k_htc_tx_completion_handler);
231*4882a593Smuzhiyun 
232*4882a593Smuzhiyun /***********/
233*4882a593Smuzhiyun /* Receive */
234*4882a593Smuzhiyun /***********/
235*4882a593Smuzhiyun 
236*4882a593Smuzhiyun static void
ath10k_htc_process_credit_report(struct ath10k_htc * htc,const struct ath10k_htc_credit_report * report,int len,enum ath10k_htc_ep_id eid)237*4882a593Smuzhiyun ath10k_htc_process_credit_report(struct ath10k_htc *htc,
238*4882a593Smuzhiyun 				 const struct ath10k_htc_credit_report *report,
239*4882a593Smuzhiyun 				 int len,
240*4882a593Smuzhiyun 				 enum ath10k_htc_ep_id eid)
241*4882a593Smuzhiyun {
242*4882a593Smuzhiyun 	struct ath10k *ar = htc->ar;
243*4882a593Smuzhiyun 	struct ath10k_htc_ep *ep;
244*4882a593Smuzhiyun 	int i, n_reports;
245*4882a593Smuzhiyun 
246*4882a593Smuzhiyun 	if (len % sizeof(*report))
247*4882a593Smuzhiyun 		ath10k_warn(ar, "Uneven credit report len %d", len);
248*4882a593Smuzhiyun 
249*4882a593Smuzhiyun 	n_reports = len / sizeof(*report);
250*4882a593Smuzhiyun 
251*4882a593Smuzhiyun 	spin_lock_bh(&htc->tx_lock);
252*4882a593Smuzhiyun 	for (i = 0; i < n_reports; i++, report++) {
253*4882a593Smuzhiyun 		if (report->eid >= ATH10K_HTC_EP_COUNT)
254*4882a593Smuzhiyun 			break;
255*4882a593Smuzhiyun 
256*4882a593Smuzhiyun 		ep = &htc->endpoint[report->eid];
257*4882a593Smuzhiyun 		ep->tx_credits += report->credits;
258*4882a593Smuzhiyun 
259*4882a593Smuzhiyun 		ath10k_dbg(ar, ATH10K_DBG_HTC, "htc ep %d got %d credits (total %d)\n",
260*4882a593Smuzhiyun 			   report->eid, report->credits, ep->tx_credits);
261*4882a593Smuzhiyun 
262*4882a593Smuzhiyun 		if (ep->ep_ops.ep_tx_credits) {
263*4882a593Smuzhiyun 			spin_unlock_bh(&htc->tx_lock);
264*4882a593Smuzhiyun 			ep->ep_ops.ep_tx_credits(htc->ar);
265*4882a593Smuzhiyun 			spin_lock_bh(&htc->tx_lock);
266*4882a593Smuzhiyun 		}
267*4882a593Smuzhiyun 	}
268*4882a593Smuzhiyun 	spin_unlock_bh(&htc->tx_lock);
269*4882a593Smuzhiyun }
270*4882a593Smuzhiyun 
271*4882a593Smuzhiyun static int
ath10k_htc_process_lookahead(struct ath10k_htc * htc,const struct ath10k_htc_lookahead_report * report,int len,enum ath10k_htc_ep_id eid,void * next_lookaheads,int * next_lookaheads_len)272*4882a593Smuzhiyun ath10k_htc_process_lookahead(struct ath10k_htc *htc,
273*4882a593Smuzhiyun 			     const struct ath10k_htc_lookahead_report *report,
274*4882a593Smuzhiyun 			     int len,
275*4882a593Smuzhiyun 			     enum ath10k_htc_ep_id eid,
276*4882a593Smuzhiyun 			     void *next_lookaheads,
277*4882a593Smuzhiyun 			     int *next_lookaheads_len)
278*4882a593Smuzhiyun {
279*4882a593Smuzhiyun 	struct ath10k *ar = htc->ar;
280*4882a593Smuzhiyun 
281*4882a593Smuzhiyun 	/* Invalid lookahead flags are actually transmitted by
282*4882a593Smuzhiyun 	 * the target in the HTC control message.
283*4882a593Smuzhiyun 	 * Since this will happen at every boot we silently ignore
284*4882a593Smuzhiyun 	 * the lookahead in this case
285*4882a593Smuzhiyun 	 */
286*4882a593Smuzhiyun 	if (report->pre_valid != ((~report->post_valid) & 0xFF))
287*4882a593Smuzhiyun 		return 0;
288*4882a593Smuzhiyun 
289*4882a593Smuzhiyun 	if (next_lookaheads && next_lookaheads_len) {
290*4882a593Smuzhiyun 		ath10k_dbg(ar, ATH10K_DBG_HTC,
291*4882a593Smuzhiyun 			   "htc rx lookahead found pre_valid 0x%x post_valid 0x%x\n",
292*4882a593Smuzhiyun 			   report->pre_valid, report->post_valid);
293*4882a593Smuzhiyun 
294*4882a593Smuzhiyun 		/* look ahead bytes are valid, copy them over */
295*4882a593Smuzhiyun 		memcpy((u8 *)next_lookaheads, report->lookahead, 4);
296*4882a593Smuzhiyun 
297*4882a593Smuzhiyun 		*next_lookaheads_len = 1;
298*4882a593Smuzhiyun 	}
299*4882a593Smuzhiyun 
300*4882a593Smuzhiyun 	return 0;
301*4882a593Smuzhiyun }
302*4882a593Smuzhiyun 
303*4882a593Smuzhiyun static int
ath10k_htc_process_lookahead_bundle(struct ath10k_htc * htc,const struct ath10k_htc_lookahead_bundle * report,int len,enum ath10k_htc_ep_id eid,void * next_lookaheads,int * next_lookaheads_len)304*4882a593Smuzhiyun ath10k_htc_process_lookahead_bundle(struct ath10k_htc *htc,
305*4882a593Smuzhiyun 				    const struct ath10k_htc_lookahead_bundle *report,
306*4882a593Smuzhiyun 				    int len,
307*4882a593Smuzhiyun 				    enum ath10k_htc_ep_id eid,
308*4882a593Smuzhiyun 				    void *next_lookaheads,
309*4882a593Smuzhiyun 				    int *next_lookaheads_len)
310*4882a593Smuzhiyun {
311*4882a593Smuzhiyun 	struct ath10k *ar = htc->ar;
312*4882a593Smuzhiyun 	int bundle_cnt = len / sizeof(*report);
313*4882a593Smuzhiyun 
314*4882a593Smuzhiyun 	if (!bundle_cnt || (bundle_cnt > htc->max_msgs_per_htc_bundle)) {
315*4882a593Smuzhiyun 		ath10k_warn(ar, "Invalid lookahead bundle count: %d\n",
316*4882a593Smuzhiyun 			    bundle_cnt);
317*4882a593Smuzhiyun 		return -EINVAL;
318*4882a593Smuzhiyun 	}
319*4882a593Smuzhiyun 
320*4882a593Smuzhiyun 	if (next_lookaheads && next_lookaheads_len) {
321*4882a593Smuzhiyun 		int i;
322*4882a593Smuzhiyun 
323*4882a593Smuzhiyun 		for (i = 0; i < bundle_cnt; i++) {
324*4882a593Smuzhiyun 			memcpy(((u8 *)next_lookaheads) + 4 * i,
325*4882a593Smuzhiyun 			       report->lookahead, 4);
326*4882a593Smuzhiyun 			report++;
327*4882a593Smuzhiyun 		}
328*4882a593Smuzhiyun 
329*4882a593Smuzhiyun 		*next_lookaheads_len = bundle_cnt;
330*4882a593Smuzhiyun 	}
331*4882a593Smuzhiyun 
332*4882a593Smuzhiyun 	return 0;
333*4882a593Smuzhiyun }
334*4882a593Smuzhiyun 
ath10k_htc_process_trailer(struct ath10k_htc * htc,u8 * buffer,int length,enum ath10k_htc_ep_id src_eid,void * next_lookaheads,int * next_lookaheads_len)335*4882a593Smuzhiyun int ath10k_htc_process_trailer(struct ath10k_htc *htc,
336*4882a593Smuzhiyun 			       u8 *buffer,
337*4882a593Smuzhiyun 			       int length,
338*4882a593Smuzhiyun 			       enum ath10k_htc_ep_id src_eid,
339*4882a593Smuzhiyun 			       void *next_lookaheads,
340*4882a593Smuzhiyun 			       int *next_lookaheads_len)
341*4882a593Smuzhiyun {
342*4882a593Smuzhiyun 	struct ath10k_htc_lookahead_bundle *bundle;
343*4882a593Smuzhiyun 	struct ath10k *ar = htc->ar;
344*4882a593Smuzhiyun 	int status = 0;
345*4882a593Smuzhiyun 	struct ath10k_htc_record *record;
346*4882a593Smuzhiyun 	u8 *orig_buffer;
347*4882a593Smuzhiyun 	int orig_length;
348*4882a593Smuzhiyun 	size_t len;
349*4882a593Smuzhiyun 
350*4882a593Smuzhiyun 	orig_buffer = buffer;
351*4882a593Smuzhiyun 	orig_length = length;
352*4882a593Smuzhiyun 
353*4882a593Smuzhiyun 	while (length > 0) {
354*4882a593Smuzhiyun 		record = (struct ath10k_htc_record *)buffer;
355*4882a593Smuzhiyun 
356*4882a593Smuzhiyun 		if (length < sizeof(record->hdr)) {
357*4882a593Smuzhiyun 			status = -EINVAL;
358*4882a593Smuzhiyun 			break;
359*4882a593Smuzhiyun 		}
360*4882a593Smuzhiyun 
361*4882a593Smuzhiyun 		if (record->hdr.len > length) {
362*4882a593Smuzhiyun 			/* no room left in buffer for record */
363*4882a593Smuzhiyun 			ath10k_warn(ar, "Invalid record length: %d\n",
364*4882a593Smuzhiyun 				    record->hdr.len);
365*4882a593Smuzhiyun 			status = -EINVAL;
366*4882a593Smuzhiyun 			break;
367*4882a593Smuzhiyun 		}
368*4882a593Smuzhiyun 
369*4882a593Smuzhiyun 		switch (record->hdr.id) {
370*4882a593Smuzhiyun 		case ATH10K_HTC_RECORD_CREDITS:
371*4882a593Smuzhiyun 			len = sizeof(struct ath10k_htc_credit_report);
372*4882a593Smuzhiyun 			if (record->hdr.len < len) {
373*4882a593Smuzhiyun 				ath10k_warn(ar, "Credit report too long\n");
374*4882a593Smuzhiyun 				status = -EINVAL;
375*4882a593Smuzhiyun 				break;
376*4882a593Smuzhiyun 			}
377*4882a593Smuzhiyun 			ath10k_htc_process_credit_report(htc,
378*4882a593Smuzhiyun 							 record->credit_report,
379*4882a593Smuzhiyun 							 record->hdr.len,
380*4882a593Smuzhiyun 							 src_eid);
381*4882a593Smuzhiyun 			break;
382*4882a593Smuzhiyun 		case ATH10K_HTC_RECORD_LOOKAHEAD:
383*4882a593Smuzhiyun 			len = sizeof(struct ath10k_htc_lookahead_report);
384*4882a593Smuzhiyun 			if (record->hdr.len < len) {
385*4882a593Smuzhiyun 				ath10k_warn(ar, "Lookahead report too long\n");
386*4882a593Smuzhiyun 				status = -EINVAL;
387*4882a593Smuzhiyun 				break;
388*4882a593Smuzhiyun 			}
389*4882a593Smuzhiyun 			status = ath10k_htc_process_lookahead(htc,
390*4882a593Smuzhiyun 							      record->lookahead_report,
391*4882a593Smuzhiyun 							      record->hdr.len,
392*4882a593Smuzhiyun 							      src_eid,
393*4882a593Smuzhiyun 							      next_lookaheads,
394*4882a593Smuzhiyun 							      next_lookaheads_len);
395*4882a593Smuzhiyun 			break;
396*4882a593Smuzhiyun 		case ATH10K_HTC_RECORD_LOOKAHEAD_BUNDLE:
397*4882a593Smuzhiyun 			bundle = record->lookahead_bundle;
398*4882a593Smuzhiyun 			status = ath10k_htc_process_lookahead_bundle(htc,
399*4882a593Smuzhiyun 								     bundle,
400*4882a593Smuzhiyun 								     record->hdr.len,
401*4882a593Smuzhiyun 								     src_eid,
402*4882a593Smuzhiyun 								     next_lookaheads,
403*4882a593Smuzhiyun 								     next_lookaheads_len);
404*4882a593Smuzhiyun 			break;
405*4882a593Smuzhiyun 		default:
406*4882a593Smuzhiyun 			ath10k_warn(ar, "Unhandled record: id:%d length:%d\n",
407*4882a593Smuzhiyun 				    record->hdr.id, record->hdr.len);
408*4882a593Smuzhiyun 			break;
409*4882a593Smuzhiyun 		}
410*4882a593Smuzhiyun 
411*4882a593Smuzhiyun 		if (status)
412*4882a593Smuzhiyun 			break;
413*4882a593Smuzhiyun 
414*4882a593Smuzhiyun 		/* multiple records may be present in a trailer */
415*4882a593Smuzhiyun 		buffer += sizeof(record->hdr) + record->hdr.len;
416*4882a593Smuzhiyun 		length -= sizeof(record->hdr) + record->hdr.len;
417*4882a593Smuzhiyun 	}
418*4882a593Smuzhiyun 
419*4882a593Smuzhiyun 	if (status)
420*4882a593Smuzhiyun 		ath10k_dbg_dump(ar, ATH10K_DBG_HTC, "htc rx bad trailer", "",
421*4882a593Smuzhiyun 				orig_buffer, orig_length);
422*4882a593Smuzhiyun 
423*4882a593Smuzhiyun 	return status;
424*4882a593Smuzhiyun }
425*4882a593Smuzhiyun EXPORT_SYMBOL(ath10k_htc_process_trailer);
426*4882a593Smuzhiyun 
ath10k_htc_rx_completion_handler(struct ath10k * ar,struct sk_buff * skb)427*4882a593Smuzhiyun void ath10k_htc_rx_completion_handler(struct ath10k *ar, struct sk_buff *skb)
428*4882a593Smuzhiyun {
429*4882a593Smuzhiyun 	int status = 0;
430*4882a593Smuzhiyun 	struct ath10k_htc *htc = &ar->htc;
431*4882a593Smuzhiyun 	struct ath10k_htc_hdr *hdr;
432*4882a593Smuzhiyun 	struct ath10k_htc_ep *ep;
433*4882a593Smuzhiyun 	u16 payload_len;
434*4882a593Smuzhiyun 	u32 trailer_len = 0;
435*4882a593Smuzhiyun 	size_t min_len;
436*4882a593Smuzhiyun 	u8 eid;
437*4882a593Smuzhiyun 	bool trailer_present;
438*4882a593Smuzhiyun 
439*4882a593Smuzhiyun 	hdr = (struct ath10k_htc_hdr *)skb->data;
440*4882a593Smuzhiyun 	skb_pull(skb, sizeof(*hdr));
441*4882a593Smuzhiyun 
442*4882a593Smuzhiyun 	eid = hdr->eid;
443*4882a593Smuzhiyun 
444*4882a593Smuzhiyun 	if (eid >= ATH10K_HTC_EP_COUNT) {
445*4882a593Smuzhiyun 		ath10k_warn(ar, "HTC Rx: invalid eid %d\n", eid);
446*4882a593Smuzhiyun 		ath10k_dbg_dump(ar, ATH10K_DBG_HTC, "htc bad header", "",
447*4882a593Smuzhiyun 				hdr, sizeof(*hdr));
448*4882a593Smuzhiyun 		goto out;
449*4882a593Smuzhiyun 	}
450*4882a593Smuzhiyun 
451*4882a593Smuzhiyun 	ep = &htc->endpoint[eid];
452*4882a593Smuzhiyun 
453*4882a593Smuzhiyun 	payload_len = __le16_to_cpu(hdr->len);
454*4882a593Smuzhiyun 
455*4882a593Smuzhiyun 	if (payload_len + sizeof(*hdr) > ATH10K_HTC_MAX_LEN) {
456*4882a593Smuzhiyun 		ath10k_warn(ar, "HTC rx frame too long, len: %zu\n",
457*4882a593Smuzhiyun 			    payload_len + sizeof(*hdr));
458*4882a593Smuzhiyun 		ath10k_dbg_dump(ar, ATH10K_DBG_HTC, "htc bad rx pkt len", "",
459*4882a593Smuzhiyun 				hdr, sizeof(*hdr));
460*4882a593Smuzhiyun 		goto out;
461*4882a593Smuzhiyun 	}
462*4882a593Smuzhiyun 
463*4882a593Smuzhiyun 	if (skb->len < payload_len) {
464*4882a593Smuzhiyun 		ath10k_dbg(ar, ATH10K_DBG_HTC,
465*4882a593Smuzhiyun 			   "HTC Rx: insufficient length, got %d, expected %d\n",
466*4882a593Smuzhiyun 			   skb->len, payload_len);
467*4882a593Smuzhiyun 		ath10k_dbg_dump(ar, ATH10K_DBG_HTC, "htc bad rx pkt len",
468*4882a593Smuzhiyun 				"", hdr, sizeof(*hdr));
469*4882a593Smuzhiyun 		goto out;
470*4882a593Smuzhiyun 	}
471*4882a593Smuzhiyun 
472*4882a593Smuzhiyun 	/* get flags to check for trailer */
473*4882a593Smuzhiyun 	trailer_present = hdr->flags & ATH10K_HTC_FLAG_TRAILER_PRESENT;
474*4882a593Smuzhiyun 	if (trailer_present) {
475*4882a593Smuzhiyun 		u8 *trailer;
476*4882a593Smuzhiyun 
477*4882a593Smuzhiyun 		trailer_len = hdr->trailer_len;
478*4882a593Smuzhiyun 		min_len = sizeof(struct ath10k_ath10k_htc_record_hdr);
479*4882a593Smuzhiyun 
480*4882a593Smuzhiyun 		if ((trailer_len < min_len) ||
481*4882a593Smuzhiyun 		    (trailer_len > payload_len)) {
482*4882a593Smuzhiyun 			ath10k_warn(ar, "Invalid trailer length: %d\n",
483*4882a593Smuzhiyun 				    trailer_len);
484*4882a593Smuzhiyun 			goto out;
485*4882a593Smuzhiyun 		}
486*4882a593Smuzhiyun 
487*4882a593Smuzhiyun 		trailer = (u8 *)hdr;
488*4882a593Smuzhiyun 		trailer += sizeof(*hdr);
489*4882a593Smuzhiyun 		trailer += payload_len;
490*4882a593Smuzhiyun 		trailer -= trailer_len;
491*4882a593Smuzhiyun 		status = ath10k_htc_process_trailer(htc, trailer,
492*4882a593Smuzhiyun 						    trailer_len, hdr->eid,
493*4882a593Smuzhiyun 						    NULL, NULL);
494*4882a593Smuzhiyun 		if (status)
495*4882a593Smuzhiyun 			goto out;
496*4882a593Smuzhiyun 
497*4882a593Smuzhiyun 		skb_trim(skb, skb->len - trailer_len);
498*4882a593Smuzhiyun 	}
499*4882a593Smuzhiyun 
500*4882a593Smuzhiyun 	if (((int)payload_len - (int)trailer_len) <= 0)
501*4882a593Smuzhiyun 		/* zero length packet with trailer data, just drop these */
502*4882a593Smuzhiyun 		goto out;
503*4882a593Smuzhiyun 
504*4882a593Smuzhiyun 	ath10k_dbg(ar, ATH10K_DBG_HTC, "htc rx completion ep %d skb %pK\n",
505*4882a593Smuzhiyun 		   eid, skb);
506*4882a593Smuzhiyun 	ep->ep_ops.ep_rx_complete(ar, skb);
507*4882a593Smuzhiyun 
508*4882a593Smuzhiyun 	/* skb is now owned by the rx completion handler */
509*4882a593Smuzhiyun 	skb = NULL;
510*4882a593Smuzhiyun out:
511*4882a593Smuzhiyun 	kfree_skb(skb);
512*4882a593Smuzhiyun }
513*4882a593Smuzhiyun EXPORT_SYMBOL(ath10k_htc_rx_completion_handler);
514*4882a593Smuzhiyun 
ath10k_htc_control_rx_complete(struct ath10k * ar,struct sk_buff * skb)515*4882a593Smuzhiyun static void ath10k_htc_control_rx_complete(struct ath10k *ar,
516*4882a593Smuzhiyun 					   struct sk_buff *skb)
517*4882a593Smuzhiyun {
518*4882a593Smuzhiyun 	struct ath10k_htc *htc = &ar->htc;
519*4882a593Smuzhiyun 	struct ath10k_htc_msg *msg = (struct ath10k_htc_msg *)skb->data;
520*4882a593Smuzhiyun 
521*4882a593Smuzhiyun 	switch (__le16_to_cpu(msg->hdr.message_id)) {
522*4882a593Smuzhiyun 	case ATH10K_HTC_MSG_READY_ID:
523*4882a593Smuzhiyun 	case ATH10K_HTC_MSG_CONNECT_SERVICE_RESP_ID:
524*4882a593Smuzhiyun 		/* handle HTC control message */
525*4882a593Smuzhiyun 		if (completion_done(&htc->ctl_resp)) {
526*4882a593Smuzhiyun 			/* this is a fatal error, target should not be
527*4882a593Smuzhiyun 			 * sending unsolicited messages on the ep 0
528*4882a593Smuzhiyun 			 */
529*4882a593Smuzhiyun 			ath10k_warn(ar, "HTC rx ctrl still processing\n");
530*4882a593Smuzhiyun 			complete(&htc->ctl_resp);
531*4882a593Smuzhiyun 			goto out;
532*4882a593Smuzhiyun 		}
533*4882a593Smuzhiyun 
534*4882a593Smuzhiyun 		htc->control_resp_len =
535*4882a593Smuzhiyun 			min_t(int, skb->len,
536*4882a593Smuzhiyun 			      ATH10K_HTC_MAX_CTRL_MSG_LEN);
537*4882a593Smuzhiyun 
538*4882a593Smuzhiyun 		memcpy(htc->control_resp_buffer, skb->data,
539*4882a593Smuzhiyun 		       htc->control_resp_len);
540*4882a593Smuzhiyun 
541*4882a593Smuzhiyun 		complete(&htc->ctl_resp);
542*4882a593Smuzhiyun 		break;
543*4882a593Smuzhiyun 	case ATH10K_HTC_MSG_SEND_SUSPEND_COMPLETE:
544*4882a593Smuzhiyun 		htc->htc_ops.target_send_suspend_complete(ar);
545*4882a593Smuzhiyun 		break;
546*4882a593Smuzhiyun 	default:
547*4882a593Smuzhiyun 		ath10k_warn(ar, "ignoring unsolicited htc ep0 event\n");
548*4882a593Smuzhiyun 		break;
549*4882a593Smuzhiyun 	}
550*4882a593Smuzhiyun 
551*4882a593Smuzhiyun out:
552*4882a593Smuzhiyun 	kfree_skb(skb);
553*4882a593Smuzhiyun }
554*4882a593Smuzhiyun 
555*4882a593Smuzhiyun /***************/
556*4882a593Smuzhiyun /* Init/Deinit */
557*4882a593Smuzhiyun /***************/
558*4882a593Smuzhiyun 
htc_service_name(enum ath10k_htc_svc_id id)559*4882a593Smuzhiyun static const char *htc_service_name(enum ath10k_htc_svc_id id)
560*4882a593Smuzhiyun {
561*4882a593Smuzhiyun 	switch (id) {
562*4882a593Smuzhiyun 	case ATH10K_HTC_SVC_ID_RESERVED:
563*4882a593Smuzhiyun 		return "Reserved";
564*4882a593Smuzhiyun 	case ATH10K_HTC_SVC_ID_RSVD_CTRL:
565*4882a593Smuzhiyun 		return "Control";
566*4882a593Smuzhiyun 	case ATH10K_HTC_SVC_ID_WMI_CONTROL:
567*4882a593Smuzhiyun 		return "WMI";
568*4882a593Smuzhiyun 	case ATH10K_HTC_SVC_ID_WMI_DATA_BE:
569*4882a593Smuzhiyun 		return "DATA BE";
570*4882a593Smuzhiyun 	case ATH10K_HTC_SVC_ID_WMI_DATA_BK:
571*4882a593Smuzhiyun 		return "DATA BK";
572*4882a593Smuzhiyun 	case ATH10K_HTC_SVC_ID_WMI_DATA_VI:
573*4882a593Smuzhiyun 		return "DATA VI";
574*4882a593Smuzhiyun 	case ATH10K_HTC_SVC_ID_WMI_DATA_VO:
575*4882a593Smuzhiyun 		return "DATA VO";
576*4882a593Smuzhiyun 	case ATH10K_HTC_SVC_ID_NMI_CONTROL:
577*4882a593Smuzhiyun 		return "NMI Control";
578*4882a593Smuzhiyun 	case ATH10K_HTC_SVC_ID_NMI_DATA:
579*4882a593Smuzhiyun 		return "NMI Data";
580*4882a593Smuzhiyun 	case ATH10K_HTC_SVC_ID_HTT_DATA_MSG:
581*4882a593Smuzhiyun 		return "HTT Data";
582*4882a593Smuzhiyun 	case ATH10K_HTC_SVC_ID_HTT_DATA2_MSG:
583*4882a593Smuzhiyun 		return "HTT Data";
584*4882a593Smuzhiyun 	case ATH10K_HTC_SVC_ID_HTT_DATA3_MSG:
585*4882a593Smuzhiyun 		return "HTT Data";
586*4882a593Smuzhiyun 	case ATH10K_HTC_SVC_ID_TEST_RAW_STREAMS:
587*4882a593Smuzhiyun 		return "RAW";
588*4882a593Smuzhiyun 	case ATH10K_HTC_SVC_ID_HTT_LOG_MSG:
589*4882a593Smuzhiyun 		return "PKTLOG";
590*4882a593Smuzhiyun 	}
591*4882a593Smuzhiyun 
592*4882a593Smuzhiyun 	return "Unknown";
593*4882a593Smuzhiyun }
594*4882a593Smuzhiyun 
ath10k_htc_reset_endpoint_states(struct ath10k_htc * htc)595*4882a593Smuzhiyun static void ath10k_htc_reset_endpoint_states(struct ath10k_htc *htc)
596*4882a593Smuzhiyun {
597*4882a593Smuzhiyun 	struct ath10k_htc_ep *ep;
598*4882a593Smuzhiyun 	int i;
599*4882a593Smuzhiyun 
600*4882a593Smuzhiyun 	for (i = ATH10K_HTC_EP_0; i < ATH10K_HTC_EP_COUNT; i++) {
601*4882a593Smuzhiyun 		ep = &htc->endpoint[i];
602*4882a593Smuzhiyun 		ep->service_id = ATH10K_HTC_SVC_ID_UNUSED;
603*4882a593Smuzhiyun 		ep->max_ep_message_len = 0;
604*4882a593Smuzhiyun 		ep->max_tx_queue_depth = 0;
605*4882a593Smuzhiyun 		ep->eid = i;
606*4882a593Smuzhiyun 		ep->htc = htc;
607*4882a593Smuzhiyun 		ep->tx_credit_flow_enabled = true;
608*4882a593Smuzhiyun 	}
609*4882a593Smuzhiyun }
610*4882a593Smuzhiyun 
ath10k_htc_get_credit_allocation(struct ath10k_htc * htc,u16 service_id)611*4882a593Smuzhiyun static u8 ath10k_htc_get_credit_allocation(struct ath10k_htc *htc,
612*4882a593Smuzhiyun 					   u16 service_id)
613*4882a593Smuzhiyun {
614*4882a593Smuzhiyun 	u8 allocation = 0;
615*4882a593Smuzhiyun 
616*4882a593Smuzhiyun 	/* The WMI control service is the only service with flow control.
617*4882a593Smuzhiyun 	 * Let it have all transmit credits.
618*4882a593Smuzhiyun 	 */
619*4882a593Smuzhiyun 	if (service_id == ATH10K_HTC_SVC_ID_WMI_CONTROL)
620*4882a593Smuzhiyun 		allocation = htc->total_transmit_credits;
621*4882a593Smuzhiyun 
622*4882a593Smuzhiyun 	return allocation;
623*4882a593Smuzhiyun }
624*4882a593Smuzhiyun 
ath10k_htc_send_bundle(struct ath10k_htc_ep * ep,struct sk_buff * bundle_skb,struct sk_buff_head * tx_save_head)625*4882a593Smuzhiyun static int ath10k_htc_send_bundle(struct ath10k_htc_ep *ep,
626*4882a593Smuzhiyun 				  struct sk_buff *bundle_skb,
627*4882a593Smuzhiyun 				  struct sk_buff_head *tx_save_head)
628*4882a593Smuzhiyun {
629*4882a593Smuzhiyun 	struct ath10k_hif_sg_item sg_item;
630*4882a593Smuzhiyun 	struct ath10k_htc *htc = ep->htc;
631*4882a593Smuzhiyun 	struct ath10k *ar = htc->ar;
632*4882a593Smuzhiyun 	struct sk_buff *skb;
633*4882a593Smuzhiyun 	int ret, cn = 0;
634*4882a593Smuzhiyun 	unsigned int skb_len;
635*4882a593Smuzhiyun 
636*4882a593Smuzhiyun 	ath10k_dbg(ar, ATH10K_DBG_HTC, "bundle skb len %d\n", bundle_skb->len);
637*4882a593Smuzhiyun 	skb_len = bundle_skb->len;
638*4882a593Smuzhiyun 	ret = ath10k_htc_consume_credit(ep, skb_len, true);
639*4882a593Smuzhiyun 
640*4882a593Smuzhiyun 	if (!ret) {
641*4882a593Smuzhiyun 		sg_item.transfer_id = ep->eid;
642*4882a593Smuzhiyun 		sg_item.transfer_context = bundle_skb;
643*4882a593Smuzhiyun 		sg_item.vaddr = bundle_skb->data;
644*4882a593Smuzhiyun 		sg_item.len = bundle_skb->len;
645*4882a593Smuzhiyun 
646*4882a593Smuzhiyun 		ret = ath10k_hif_tx_sg(htc->ar, ep->ul_pipe_id, &sg_item, 1);
647*4882a593Smuzhiyun 		if (ret)
648*4882a593Smuzhiyun 			ath10k_htc_release_credit(ep, skb_len);
649*4882a593Smuzhiyun 	}
650*4882a593Smuzhiyun 
651*4882a593Smuzhiyun 	if (ret)
652*4882a593Smuzhiyun 		dev_kfree_skb_any(bundle_skb);
653*4882a593Smuzhiyun 
654*4882a593Smuzhiyun 	for (cn = 0; (skb = skb_dequeue_tail(tx_save_head)); cn++) {
655*4882a593Smuzhiyun 		if (ret) {
656*4882a593Smuzhiyun 			skb_pull(skb, sizeof(struct ath10k_htc_hdr));
657*4882a593Smuzhiyun 			skb_queue_head(&ep->tx_req_head, skb);
658*4882a593Smuzhiyun 		} else {
659*4882a593Smuzhiyun 			skb_queue_tail(&ep->tx_complete_head, skb);
660*4882a593Smuzhiyun 		}
661*4882a593Smuzhiyun 	}
662*4882a593Smuzhiyun 
663*4882a593Smuzhiyun 	if (!ret)
664*4882a593Smuzhiyun 		queue_work(ar->workqueue_tx_complete, &ar->tx_complete_work);
665*4882a593Smuzhiyun 
666*4882a593Smuzhiyun 	ath10k_dbg(ar, ATH10K_DBG_HTC,
667*4882a593Smuzhiyun 		   "bundle tx status %d eid %d req count %d count %d len %d\n",
668*4882a593Smuzhiyun 		   ret, ep->eid, skb_queue_len(&ep->tx_req_head), cn, skb_len);
669*4882a593Smuzhiyun 	return ret;
670*4882a593Smuzhiyun }
671*4882a593Smuzhiyun 
ath10k_htc_send_one_skb(struct ath10k_htc_ep * ep,struct sk_buff * skb)672*4882a593Smuzhiyun static void ath10k_htc_send_one_skb(struct ath10k_htc_ep *ep, struct sk_buff *skb)
673*4882a593Smuzhiyun {
674*4882a593Smuzhiyun 	struct ath10k_htc *htc = ep->htc;
675*4882a593Smuzhiyun 	struct ath10k *ar = htc->ar;
676*4882a593Smuzhiyun 	int ret;
677*4882a593Smuzhiyun 
678*4882a593Smuzhiyun 	ret = ath10k_htc_send(htc, ep->eid, skb);
679*4882a593Smuzhiyun 
680*4882a593Smuzhiyun 	if (ret)
681*4882a593Smuzhiyun 		skb_queue_head(&ep->tx_req_head, skb);
682*4882a593Smuzhiyun 
683*4882a593Smuzhiyun 	ath10k_dbg(ar, ATH10K_DBG_HTC, "tx one status %d eid %d len %d pending count %d\n",
684*4882a593Smuzhiyun 		   ret, ep->eid, skb->len, skb_queue_len(&ep->tx_req_head));
685*4882a593Smuzhiyun }
686*4882a593Smuzhiyun 
ath10k_htc_send_bundle_skbs(struct ath10k_htc_ep * ep)687*4882a593Smuzhiyun static int ath10k_htc_send_bundle_skbs(struct ath10k_htc_ep *ep)
688*4882a593Smuzhiyun {
689*4882a593Smuzhiyun 	struct ath10k_htc *htc = ep->htc;
690*4882a593Smuzhiyun 	struct sk_buff *bundle_skb, *skb;
691*4882a593Smuzhiyun 	struct sk_buff_head tx_save_head;
692*4882a593Smuzhiyun 	struct ath10k_htc_hdr *hdr;
693*4882a593Smuzhiyun 	u8 *bundle_buf;
694*4882a593Smuzhiyun 	int ret = 0, credit_pad, credit_remainder, trans_len, bundles_left = 0;
695*4882a593Smuzhiyun 
696*4882a593Smuzhiyun 	if (htc->ar->state == ATH10K_STATE_WEDGED)
697*4882a593Smuzhiyun 		return -ECOMM;
698*4882a593Smuzhiyun 
699*4882a593Smuzhiyun 	if (ep->tx_credit_flow_enabled &&
700*4882a593Smuzhiyun 	    ep->tx_credits < ATH10K_MIN_CREDIT_PER_HTC_TX_BUNDLE)
701*4882a593Smuzhiyun 		return 0;
702*4882a593Smuzhiyun 
703*4882a593Smuzhiyun 	bundles_left = ATH10K_MAX_MSG_PER_HTC_TX_BUNDLE * ep->tx_credit_size;
704*4882a593Smuzhiyun 	bundle_skb = dev_alloc_skb(bundles_left);
705*4882a593Smuzhiyun 
706*4882a593Smuzhiyun 	if (!bundle_skb)
707*4882a593Smuzhiyun 		return -ENOMEM;
708*4882a593Smuzhiyun 
709*4882a593Smuzhiyun 	bundle_buf = bundle_skb->data;
710*4882a593Smuzhiyun 	skb_queue_head_init(&tx_save_head);
711*4882a593Smuzhiyun 
712*4882a593Smuzhiyun 	while (true) {
713*4882a593Smuzhiyun 		skb = skb_dequeue(&ep->tx_req_head);
714*4882a593Smuzhiyun 		if (!skb)
715*4882a593Smuzhiyun 			break;
716*4882a593Smuzhiyun 
717*4882a593Smuzhiyun 		credit_pad = 0;
718*4882a593Smuzhiyun 		trans_len = skb->len + sizeof(*hdr);
719*4882a593Smuzhiyun 		credit_remainder = trans_len % ep->tx_credit_size;
720*4882a593Smuzhiyun 
721*4882a593Smuzhiyun 		if (credit_remainder != 0) {
722*4882a593Smuzhiyun 			credit_pad = ep->tx_credit_size - credit_remainder;
723*4882a593Smuzhiyun 			trans_len += credit_pad;
724*4882a593Smuzhiyun 		}
725*4882a593Smuzhiyun 
726*4882a593Smuzhiyun 		ret = ath10k_htc_consume_credit(ep,
727*4882a593Smuzhiyun 						bundle_buf + trans_len - bundle_skb->data,
728*4882a593Smuzhiyun 						false);
729*4882a593Smuzhiyun 		if (ret) {
730*4882a593Smuzhiyun 			skb_queue_head(&ep->tx_req_head, skb);
731*4882a593Smuzhiyun 			break;
732*4882a593Smuzhiyun 		}
733*4882a593Smuzhiyun 
734*4882a593Smuzhiyun 		if (bundles_left < trans_len) {
735*4882a593Smuzhiyun 			bundle_skb->len = bundle_buf - bundle_skb->data;
736*4882a593Smuzhiyun 			ret = ath10k_htc_send_bundle(ep, bundle_skb, &tx_save_head);
737*4882a593Smuzhiyun 
738*4882a593Smuzhiyun 			if (ret) {
739*4882a593Smuzhiyun 				skb_queue_head(&ep->tx_req_head, skb);
740*4882a593Smuzhiyun 				return ret;
741*4882a593Smuzhiyun 			}
742*4882a593Smuzhiyun 
743*4882a593Smuzhiyun 			if (skb_queue_len(&ep->tx_req_head) == 0) {
744*4882a593Smuzhiyun 				ath10k_htc_send_one_skb(ep, skb);
745*4882a593Smuzhiyun 				return ret;
746*4882a593Smuzhiyun 			}
747*4882a593Smuzhiyun 
748*4882a593Smuzhiyun 			if (ep->tx_credit_flow_enabled &&
749*4882a593Smuzhiyun 			    ep->tx_credits < ATH10K_MIN_CREDIT_PER_HTC_TX_BUNDLE) {
750*4882a593Smuzhiyun 				skb_queue_head(&ep->tx_req_head, skb);
751*4882a593Smuzhiyun 				return 0;
752*4882a593Smuzhiyun 			}
753*4882a593Smuzhiyun 
754*4882a593Smuzhiyun 			bundles_left =
755*4882a593Smuzhiyun 				ATH10K_MAX_MSG_PER_HTC_TX_BUNDLE * ep->tx_credit_size;
756*4882a593Smuzhiyun 			bundle_skb = dev_alloc_skb(bundles_left);
757*4882a593Smuzhiyun 
758*4882a593Smuzhiyun 			if (!bundle_skb) {
759*4882a593Smuzhiyun 				skb_queue_head(&ep->tx_req_head, skb);
760*4882a593Smuzhiyun 				return -ENOMEM;
761*4882a593Smuzhiyun 			}
762*4882a593Smuzhiyun 			bundle_buf = bundle_skb->data;
763*4882a593Smuzhiyun 			skb_queue_head_init(&tx_save_head);
764*4882a593Smuzhiyun 		}
765*4882a593Smuzhiyun 
766*4882a593Smuzhiyun 		skb_push(skb, sizeof(struct ath10k_htc_hdr));
767*4882a593Smuzhiyun 		ath10k_htc_prepare_tx_skb(ep, skb);
768*4882a593Smuzhiyun 
769*4882a593Smuzhiyun 		memcpy(bundle_buf, skb->data, skb->len);
770*4882a593Smuzhiyun 		hdr = (struct ath10k_htc_hdr *)bundle_buf;
771*4882a593Smuzhiyun 		hdr->flags |= ATH10K_HTC_FLAG_SEND_BUNDLE;
772*4882a593Smuzhiyun 		hdr->pad_len = __cpu_to_le16(credit_pad);
773*4882a593Smuzhiyun 		bundle_buf += trans_len;
774*4882a593Smuzhiyun 		bundles_left -= trans_len;
775*4882a593Smuzhiyun 		skb_queue_tail(&tx_save_head, skb);
776*4882a593Smuzhiyun 	}
777*4882a593Smuzhiyun 
778*4882a593Smuzhiyun 	if (bundle_buf != bundle_skb->data) {
779*4882a593Smuzhiyun 		bundle_skb->len = bundle_buf - bundle_skb->data;
780*4882a593Smuzhiyun 		ret = ath10k_htc_send_bundle(ep, bundle_skb, &tx_save_head);
781*4882a593Smuzhiyun 	} else {
782*4882a593Smuzhiyun 		dev_kfree_skb_any(bundle_skb);
783*4882a593Smuzhiyun 	}
784*4882a593Smuzhiyun 
785*4882a593Smuzhiyun 	return ret;
786*4882a593Smuzhiyun }
787*4882a593Smuzhiyun 
ath10k_htc_bundle_tx_work(struct work_struct * work)788*4882a593Smuzhiyun static void ath10k_htc_bundle_tx_work(struct work_struct *work)
789*4882a593Smuzhiyun {
790*4882a593Smuzhiyun 	struct ath10k *ar = container_of(work, struct ath10k, bundle_tx_work);
791*4882a593Smuzhiyun 	struct ath10k_htc_ep *ep;
792*4882a593Smuzhiyun 	struct sk_buff *skb;
793*4882a593Smuzhiyun 	int i;
794*4882a593Smuzhiyun 
795*4882a593Smuzhiyun 	for (i = 0; i < ARRAY_SIZE(ar->htc.endpoint); i++) {
796*4882a593Smuzhiyun 		ep = &ar->htc.endpoint[i];
797*4882a593Smuzhiyun 
798*4882a593Smuzhiyun 		if (!ep->bundle_tx)
799*4882a593Smuzhiyun 			continue;
800*4882a593Smuzhiyun 
801*4882a593Smuzhiyun 		ath10k_dbg(ar, ATH10K_DBG_HTC, "bundle tx work eid %d count %d\n",
802*4882a593Smuzhiyun 			   ep->eid, skb_queue_len(&ep->tx_req_head));
803*4882a593Smuzhiyun 
804*4882a593Smuzhiyun 		if (skb_queue_len(&ep->tx_req_head) >=
805*4882a593Smuzhiyun 		    ATH10K_MIN_MSG_PER_HTC_TX_BUNDLE) {
806*4882a593Smuzhiyun 			ath10k_htc_send_bundle_skbs(ep);
807*4882a593Smuzhiyun 		} else {
808*4882a593Smuzhiyun 			skb = skb_dequeue(&ep->tx_req_head);
809*4882a593Smuzhiyun 
810*4882a593Smuzhiyun 			if (!skb)
811*4882a593Smuzhiyun 				continue;
812*4882a593Smuzhiyun 			ath10k_htc_send_one_skb(ep, skb);
813*4882a593Smuzhiyun 		}
814*4882a593Smuzhiyun 	}
815*4882a593Smuzhiyun }
816*4882a593Smuzhiyun 
ath10k_htc_tx_complete_work(struct work_struct * work)817*4882a593Smuzhiyun static void ath10k_htc_tx_complete_work(struct work_struct *work)
818*4882a593Smuzhiyun {
819*4882a593Smuzhiyun 	struct ath10k *ar = container_of(work, struct ath10k, tx_complete_work);
820*4882a593Smuzhiyun 	struct ath10k_htc_ep *ep;
821*4882a593Smuzhiyun 	enum ath10k_htc_ep_id eid;
822*4882a593Smuzhiyun 	struct sk_buff *skb;
823*4882a593Smuzhiyun 	int i;
824*4882a593Smuzhiyun 
825*4882a593Smuzhiyun 	for (i = 0; i < ARRAY_SIZE(ar->htc.endpoint); i++) {
826*4882a593Smuzhiyun 		ep = &ar->htc.endpoint[i];
827*4882a593Smuzhiyun 		eid = ep->eid;
828*4882a593Smuzhiyun 		if (ep->bundle_tx && eid == ar->htt.eid) {
829*4882a593Smuzhiyun 			ath10k_dbg(ar, ATH10K_DBG_HTC, "bundle tx complete eid %d pending complete count%d\n",
830*4882a593Smuzhiyun 				   ep->eid, skb_queue_len(&ep->tx_complete_head));
831*4882a593Smuzhiyun 
832*4882a593Smuzhiyun 			while (true) {
833*4882a593Smuzhiyun 				skb = skb_dequeue(&ep->tx_complete_head);
834*4882a593Smuzhiyun 				if (!skb)
835*4882a593Smuzhiyun 					break;
836*4882a593Smuzhiyun 				ath10k_htc_notify_tx_completion(ep, skb);
837*4882a593Smuzhiyun 			}
838*4882a593Smuzhiyun 		}
839*4882a593Smuzhiyun 	}
840*4882a593Smuzhiyun }
841*4882a593Smuzhiyun 
ath10k_htc_send_hl(struct ath10k_htc * htc,enum ath10k_htc_ep_id eid,struct sk_buff * skb)842*4882a593Smuzhiyun int ath10k_htc_send_hl(struct ath10k_htc *htc,
843*4882a593Smuzhiyun 		       enum ath10k_htc_ep_id eid,
844*4882a593Smuzhiyun 		       struct sk_buff *skb)
845*4882a593Smuzhiyun {
846*4882a593Smuzhiyun 	struct ath10k_htc_ep *ep = &htc->endpoint[eid];
847*4882a593Smuzhiyun 	struct ath10k *ar = htc->ar;
848*4882a593Smuzhiyun 
849*4882a593Smuzhiyun 	if (sizeof(struct ath10k_htc_hdr) + skb->len > ep->tx_credit_size) {
850*4882a593Smuzhiyun 		ath10k_dbg(ar, ATH10K_DBG_HTC, "tx exceed max len %d\n", skb->len);
851*4882a593Smuzhiyun 		return -ENOMEM;
852*4882a593Smuzhiyun 	}
853*4882a593Smuzhiyun 
854*4882a593Smuzhiyun 	ath10k_dbg(ar, ATH10K_DBG_HTC, "htc send hl eid %d bundle %d tx count %d len %d\n",
855*4882a593Smuzhiyun 		   eid, ep->bundle_tx, skb_queue_len(&ep->tx_req_head), skb->len);
856*4882a593Smuzhiyun 
857*4882a593Smuzhiyun 	if (ep->bundle_tx) {
858*4882a593Smuzhiyun 		skb_queue_tail(&ep->tx_req_head, skb);
859*4882a593Smuzhiyun 		queue_work(ar->workqueue, &ar->bundle_tx_work);
860*4882a593Smuzhiyun 		return 0;
861*4882a593Smuzhiyun 	} else {
862*4882a593Smuzhiyun 		return ath10k_htc_send(htc, eid, skb);
863*4882a593Smuzhiyun 	}
864*4882a593Smuzhiyun }
865*4882a593Smuzhiyun 
ath10k_htc_setup_tx_req(struct ath10k_htc_ep * ep)866*4882a593Smuzhiyun void ath10k_htc_setup_tx_req(struct ath10k_htc_ep *ep)
867*4882a593Smuzhiyun {
868*4882a593Smuzhiyun 	if (ep->htc->max_msgs_per_htc_bundle >= ATH10K_MIN_MSG_PER_HTC_TX_BUNDLE &&
869*4882a593Smuzhiyun 	    !ep->bundle_tx) {
870*4882a593Smuzhiyun 		ep->bundle_tx = true;
871*4882a593Smuzhiyun 		skb_queue_head_init(&ep->tx_req_head);
872*4882a593Smuzhiyun 		skb_queue_head_init(&ep->tx_complete_head);
873*4882a593Smuzhiyun 	}
874*4882a593Smuzhiyun }
875*4882a593Smuzhiyun 
ath10k_htc_stop_hl(struct ath10k * ar)876*4882a593Smuzhiyun void ath10k_htc_stop_hl(struct ath10k *ar)
877*4882a593Smuzhiyun {
878*4882a593Smuzhiyun 	struct ath10k_htc_ep *ep;
879*4882a593Smuzhiyun 	int i;
880*4882a593Smuzhiyun 
881*4882a593Smuzhiyun 	cancel_work_sync(&ar->bundle_tx_work);
882*4882a593Smuzhiyun 	cancel_work_sync(&ar->tx_complete_work);
883*4882a593Smuzhiyun 
884*4882a593Smuzhiyun 	for (i = 0; i < ARRAY_SIZE(ar->htc.endpoint); i++) {
885*4882a593Smuzhiyun 		ep = &ar->htc.endpoint[i];
886*4882a593Smuzhiyun 
887*4882a593Smuzhiyun 		if (!ep->bundle_tx)
888*4882a593Smuzhiyun 			continue;
889*4882a593Smuzhiyun 
890*4882a593Smuzhiyun 		ath10k_dbg(ar, ATH10K_DBG_HTC, "stop tx work eid %d count %d\n",
891*4882a593Smuzhiyun 			   ep->eid, skb_queue_len(&ep->tx_req_head));
892*4882a593Smuzhiyun 
893*4882a593Smuzhiyun 		skb_queue_purge(&ep->tx_req_head);
894*4882a593Smuzhiyun 	}
895*4882a593Smuzhiyun }
896*4882a593Smuzhiyun 
ath10k_htc_wait_target(struct ath10k_htc * htc)897*4882a593Smuzhiyun int ath10k_htc_wait_target(struct ath10k_htc *htc)
898*4882a593Smuzhiyun {
899*4882a593Smuzhiyun 	struct ath10k *ar = htc->ar;
900*4882a593Smuzhiyun 	int i, status = 0;
901*4882a593Smuzhiyun 	unsigned long time_left;
902*4882a593Smuzhiyun 	struct ath10k_htc_msg *msg;
903*4882a593Smuzhiyun 	u16 message_id;
904*4882a593Smuzhiyun 
905*4882a593Smuzhiyun 	time_left = wait_for_completion_timeout(&htc->ctl_resp,
906*4882a593Smuzhiyun 						ATH10K_HTC_WAIT_TIMEOUT_HZ);
907*4882a593Smuzhiyun 	if (!time_left) {
908*4882a593Smuzhiyun 		/* Workaround: In some cases the PCI HIF doesn't
909*4882a593Smuzhiyun 		 * receive interrupt for the control response message
910*4882a593Smuzhiyun 		 * even if the buffer was completed. It is suspected
911*4882a593Smuzhiyun 		 * iomap writes unmasking PCI CE irqs aren't propagated
912*4882a593Smuzhiyun 		 * properly in KVM PCI-passthrough sometimes.
913*4882a593Smuzhiyun 		 */
914*4882a593Smuzhiyun 		ath10k_warn(ar, "failed to receive control response completion, polling..\n");
915*4882a593Smuzhiyun 
916*4882a593Smuzhiyun 		for (i = 0; i < CE_COUNT; i++)
917*4882a593Smuzhiyun 			ath10k_hif_send_complete_check(htc->ar, i, 1);
918*4882a593Smuzhiyun 
919*4882a593Smuzhiyun 		time_left =
920*4882a593Smuzhiyun 		wait_for_completion_timeout(&htc->ctl_resp,
921*4882a593Smuzhiyun 					    ATH10K_HTC_WAIT_TIMEOUT_HZ);
922*4882a593Smuzhiyun 
923*4882a593Smuzhiyun 		if (!time_left)
924*4882a593Smuzhiyun 			status = -ETIMEDOUT;
925*4882a593Smuzhiyun 	}
926*4882a593Smuzhiyun 
927*4882a593Smuzhiyun 	if (status < 0) {
928*4882a593Smuzhiyun 		ath10k_err(ar, "ctl_resp never came in (%d)\n", status);
929*4882a593Smuzhiyun 		return status;
930*4882a593Smuzhiyun 	}
931*4882a593Smuzhiyun 
932*4882a593Smuzhiyun 	if (htc->control_resp_len < sizeof(msg->hdr) + sizeof(msg->ready)) {
933*4882a593Smuzhiyun 		ath10k_err(ar, "Invalid HTC ready msg len:%d\n",
934*4882a593Smuzhiyun 			   htc->control_resp_len);
935*4882a593Smuzhiyun 		return -ECOMM;
936*4882a593Smuzhiyun 	}
937*4882a593Smuzhiyun 
938*4882a593Smuzhiyun 	msg = (struct ath10k_htc_msg *)htc->control_resp_buffer;
939*4882a593Smuzhiyun 	message_id   = __le16_to_cpu(msg->hdr.message_id);
940*4882a593Smuzhiyun 
941*4882a593Smuzhiyun 	if (message_id != ATH10K_HTC_MSG_READY_ID) {
942*4882a593Smuzhiyun 		ath10k_err(ar, "Invalid HTC ready msg: 0x%x\n", message_id);
943*4882a593Smuzhiyun 		return -ECOMM;
944*4882a593Smuzhiyun 	}
945*4882a593Smuzhiyun 
946*4882a593Smuzhiyun 	htc->total_transmit_credits = __le16_to_cpu(msg->ready.credit_count);
947*4882a593Smuzhiyun 	htc->target_credit_size = __le16_to_cpu(msg->ready.credit_size);
948*4882a593Smuzhiyun 
949*4882a593Smuzhiyun 	ath10k_dbg(ar, ATH10K_DBG_HTC,
950*4882a593Smuzhiyun 		   "Target ready! transmit resources: %d size:%d\n",
951*4882a593Smuzhiyun 		   htc->total_transmit_credits,
952*4882a593Smuzhiyun 		   htc->target_credit_size);
953*4882a593Smuzhiyun 
954*4882a593Smuzhiyun 	if ((htc->total_transmit_credits == 0) ||
955*4882a593Smuzhiyun 	    (htc->target_credit_size == 0)) {
956*4882a593Smuzhiyun 		ath10k_err(ar, "Invalid credit size received\n");
957*4882a593Smuzhiyun 		return -ECOMM;
958*4882a593Smuzhiyun 	}
959*4882a593Smuzhiyun 
960*4882a593Smuzhiyun 	/* The only way to determine if the ready message is an extended
961*4882a593Smuzhiyun 	 * message is from the size.
962*4882a593Smuzhiyun 	 */
963*4882a593Smuzhiyun 	if (htc->control_resp_len >=
964*4882a593Smuzhiyun 	    sizeof(msg->hdr) + sizeof(msg->ready_ext)) {
965*4882a593Smuzhiyun 		htc->alt_data_credit_size =
966*4882a593Smuzhiyun 			__le16_to_cpu(msg->ready_ext.reserved) &
967*4882a593Smuzhiyun 			ATH10K_HTC_MSG_READY_EXT_ALT_DATA_MASK;
968*4882a593Smuzhiyun 		htc->max_msgs_per_htc_bundle =
969*4882a593Smuzhiyun 			min_t(u8, msg->ready_ext.max_msgs_per_htc_bundle,
970*4882a593Smuzhiyun 			      HTC_HOST_MAX_MSG_PER_RX_BUNDLE);
971*4882a593Smuzhiyun 		ath10k_dbg(ar, ATH10K_DBG_HTC,
972*4882a593Smuzhiyun 			   "Extended ready message RX bundle size %d alt size %d\n",
973*4882a593Smuzhiyun 			   htc->max_msgs_per_htc_bundle,
974*4882a593Smuzhiyun 			   htc->alt_data_credit_size);
975*4882a593Smuzhiyun 	}
976*4882a593Smuzhiyun 
977*4882a593Smuzhiyun 	INIT_WORK(&ar->bundle_tx_work, ath10k_htc_bundle_tx_work);
978*4882a593Smuzhiyun 	INIT_WORK(&ar->tx_complete_work, ath10k_htc_tx_complete_work);
979*4882a593Smuzhiyun 
980*4882a593Smuzhiyun 	return 0;
981*4882a593Smuzhiyun }
982*4882a593Smuzhiyun 
ath10k_htc_change_tx_credit_flow(struct ath10k_htc * htc,enum ath10k_htc_ep_id eid,bool enable)983*4882a593Smuzhiyun void ath10k_htc_change_tx_credit_flow(struct ath10k_htc *htc,
984*4882a593Smuzhiyun 				      enum ath10k_htc_ep_id eid,
985*4882a593Smuzhiyun 				      bool enable)
986*4882a593Smuzhiyun {
987*4882a593Smuzhiyun 	struct ath10k *ar = htc->ar;
988*4882a593Smuzhiyun 	struct ath10k_htc_ep *ep = &ar->htc.endpoint[eid];
989*4882a593Smuzhiyun 
990*4882a593Smuzhiyun 	ep->tx_credit_flow_enabled = enable;
991*4882a593Smuzhiyun }
992*4882a593Smuzhiyun 
ath10k_htc_connect_service(struct ath10k_htc * htc,struct ath10k_htc_svc_conn_req * conn_req,struct ath10k_htc_svc_conn_resp * conn_resp)993*4882a593Smuzhiyun int ath10k_htc_connect_service(struct ath10k_htc *htc,
994*4882a593Smuzhiyun 			       struct ath10k_htc_svc_conn_req *conn_req,
995*4882a593Smuzhiyun 			       struct ath10k_htc_svc_conn_resp *conn_resp)
996*4882a593Smuzhiyun {
997*4882a593Smuzhiyun 	struct ath10k *ar = htc->ar;
998*4882a593Smuzhiyun 	struct ath10k_htc_msg *msg;
999*4882a593Smuzhiyun 	struct ath10k_htc_conn_svc *req_msg;
1000*4882a593Smuzhiyun 	struct ath10k_htc_conn_svc_response resp_msg_dummy;
1001*4882a593Smuzhiyun 	struct ath10k_htc_conn_svc_response *resp_msg = &resp_msg_dummy;
1002*4882a593Smuzhiyun 	enum ath10k_htc_ep_id assigned_eid = ATH10K_HTC_EP_COUNT;
1003*4882a593Smuzhiyun 	struct ath10k_htc_ep *ep;
1004*4882a593Smuzhiyun 	struct sk_buff *skb;
1005*4882a593Smuzhiyun 	unsigned int max_msg_size = 0;
1006*4882a593Smuzhiyun 	int length, status;
1007*4882a593Smuzhiyun 	unsigned long time_left;
1008*4882a593Smuzhiyun 	bool disable_credit_flow_ctrl = false;
1009*4882a593Smuzhiyun 	u16 message_id, service_id, flags = 0;
1010*4882a593Smuzhiyun 	u8 tx_alloc = 0;
1011*4882a593Smuzhiyun 
1012*4882a593Smuzhiyun 	/* special case for HTC pseudo control service */
1013*4882a593Smuzhiyun 	if (conn_req->service_id == ATH10K_HTC_SVC_ID_RSVD_CTRL) {
1014*4882a593Smuzhiyun 		disable_credit_flow_ctrl = true;
1015*4882a593Smuzhiyun 		assigned_eid = ATH10K_HTC_EP_0;
1016*4882a593Smuzhiyun 		max_msg_size = ATH10K_HTC_MAX_CTRL_MSG_LEN;
1017*4882a593Smuzhiyun 		memset(&resp_msg_dummy, 0, sizeof(resp_msg_dummy));
1018*4882a593Smuzhiyun 		goto setup;
1019*4882a593Smuzhiyun 	}
1020*4882a593Smuzhiyun 
1021*4882a593Smuzhiyun 	tx_alloc = ath10k_htc_get_credit_allocation(htc,
1022*4882a593Smuzhiyun 						    conn_req->service_id);
1023*4882a593Smuzhiyun 	if (!tx_alloc)
1024*4882a593Smuzhiyun 		ath10k_dbg(ar, ATH10K_DBG_BOOT,
1025*4882a593Smuzhiyun 			   "boot htc service %s does not allocate target credits\n",
1026*4882a593Smuzhiyun 			   htc_service_name(conn_req->service_id));
1027*4882a593Smuzhiyun 
1028*4882a593Smuzhiyun 	skb = ath10k_htc_build_tx_ctrl_skb(htc->ar);
1029*4882a593Smuzhiyun 	if (!skb) {
1030*4882a593Smuzhiyun 		ath10k_err(ar, "Failed to allocate HTC packet\n");
1031*4882a593Smuzhiyun 		return -ENOMEM;
1032*4882a593Smuzhiyun 	}
1033*4882a593Smuzhiyun 
1034*4882a593Smuzhiyun 	length = sizeof(msg->hdr) + sizeof(msg->connect_service);
1035*4882a593Smuzhiyun 	skb_put(skb, length);
1036*4882a593Smuzhiyun 	memset(skb->data, 0, length);
1037*4882a593Smuzhiyun 
1038*4882a593Smuzhiyun 	msg = (struct ath10k_htc_msg *)skb->data;
1039*4882a593Smuzhiyun 	msg->hdr.message_id =
1040*4882a593Smuzhiyun 		__cpu_to_le16(ATH10K_HTC_MSG_CONNECT_SERVICE_ID);
1041*4882a593Smuzhiyun 
1042*4882a593Smuzhiyun 	flags |= SM(tx_alloc, ATH10K_HTC_CONN_FLAGS_RECV_ALLOC);
1043*4882a593Smuzhiyun 
1044*4882a593Smuzhiyun 	/* Only enable credit flow control for WMI ctrl service */
1045*4882a593Smuzhiyun 	if (conn_req->service_id != ATH10K_HTC_SVC_ID_WMI_CONTROL) {
1046*4882a593Smuzhiyun 		flags |= ATH10K_HTC_CONN_FLAGS_DISABLE_CREDIT_FLOW_CTRL;
1047*4882a593Smuzhiyun 		disable_credit_flow_ctrl = true;
1048*4882a593Smuzhiyun 	}
1049*4882a593Smuzhiyun 
1050*4882a593Smuzhiyun 	req_msg = &msg->connect_service;
1051*4882a593Smuzhiyun 	req_msg->flags = __cpu_to_le16(flags);
1052*4882a593Smuzhiyun 	req_msg->service_id = __cpu_to_le16(conn_req->service_id);
1053*4882a593Smuzhiyun 
1054*4882a593Smuzhiyun 	reinit_completion(&htc->ctl_resp);
1055*4882a593Smuzhiyun 
1056*4882a593Smuzhiyun 	status = ath10k_htc_send(htc, ATH10K_HTC_EP_0, skb);
1057*4882a593Smuzhiyun 	if (status) {
1058*4882a593Smuzhiyun 		kfree_skb(skb);
1059*4882a593Smuzhiyun 		return status;
1060*4882a593Smuzhiyun 	}
1061*4882a593Smuzhiyun 
1062*4882a593Smuzhiyun 	/* wait for response */
1063*4882a593Smuzhiyun 	time_left = wait_for_completion_timeout(&htc->ctl_resp,
1064*4882a593Smuzhiyun 						ATH10K_HTC_CONN_SVC_TIMEOUT_HZ);
1065*4882a593Smuzhiyun 	if (!time_left) {
1066*4882a593Smuzhiyun 		ath10k_err(ar, "Service connect timeout\n");
1067*4882a593Smuzhiyun 		return -ETIMEDOUT;
1068*4882a593Smuzhiyun 	}
1069*4882a593Smuzhiyun 
1070*4882a593Smuzhiyun 	/* we controlled the buffer creation, it's aligned */
1071*4882a593Smuzhiyun 	msg = (struct ath10k_htc_msg *)htc->control_resp_buffer;
1072*4882a593Smuzhiyun 	resp_msg = &msg->connect_service_response;
1073*4882a593Smuzhiyun 	message_id = __le16_to_cpu(msg->hdr.message_id);
1074*4882a593Smuzhiyun 	service_id = __le16_to_cpu(resp_msg->service_id);
1075*4882a593Smuzhiyun 
1076*4882a593Smuzhiyun 	if ((message_id != ATH10K_HTC_MSG_CONNECT_SERVICE_RESP_ID) ||
1077*4882a593Smuzhiyun 	    (htc->control_resp_len < sizeof(msg->hdr) +
1078*4882a593Smuzhiyun 	     sizeof(msg->connect_service_response))) {
1079*4882a593Smuzhiyun 		ath10k_err(ar, "Invalid resp message ID 0x%x", message_id);
1080*4882a593Smuzhiyun 		return -EPROTO;
1081*4882a593Smuzhiyun 	}
1082*4882a593Smuzhiyun 
1083*4882a593Smuzhiyun 	ath10k_dbg(ar, ATH10K_DBG_HTC,
1084*4882a593Smuzhiyun 		   "HTC Service %s connect response: status: 0x%x, assigned ep: 0x%x\n",
1085*4882a593Smuzhiyun 		   htc_service_name(service_id),
1086*4882a593Smuzhiyun 		   resp_msg->status, resp_msg->eid);
1087*4882a593Smuzhiyun 
1088*4882a593Smuzhiyun 	conn_resp->connect_resp_code = resp_msg->status;
1089*4882a593Smuzhiyun 
1090*4882a593Smuzhiyun 	/* check response status */
1091*4882a593Smuzhiyun 	if (resp_msg->status != ATH10K_HTC_CONN_SVC_STATUS_SUCCESS) {
1092*4882a593Smuzhiyun 		ath10k_err(ar, "HTC Service %s connect request failed: 0x%x)\n",
1093*4882a593Smuzhiyun 			   htc_service_name(service_id),
1094*4882a593Smuzhiyun 			   resp_msg->status);
1095*4882a593Smuzhiyun 		return -EPROTO;
1096*4882a593Smuzhiyun 	}
1097*4882a593Smuzhiyun 
1098*4882a593Smuzhiyun 	assigned_eid = (enum ath10k_htc_ep_id)resp_msg->eid;
1099*4882a593Smuzhiyun 	max_msg_size = __le16_to_cpu(resp_msg->max_msg_size);
1100*4882a593Smuzhiyun 
1101*4882a593Smuzhiyun setup:
1102*4882a593Smuzhiyun 
1103*4882a593Smuzhiyun 	if (assigned_eid >= ATH10K_HTC_EP_COUNT)
1104*4882a593Smuzhiyun 		return -EPROTO;
1105*4882a593Smuzhiyun 
1106*4882a593Smuzhiyun 	if (max_msg_size == 0)
1107*4882a593Smuzhiyun 		return -EPROTO;
1108*4882a593Smuzhiyun 
1109*4882a593Smuzhiyun 	ep = &htc->endpoint[assigned_eid];
1110*4882a593Smuzhiyun 	ep->eid = assigned_eid;
1111*4882a593Smuzhiyun 
1112*4882a593Smuzhiyun 	if (ep->service_id != ATH10K_HTC_SVC_ID_UNUSED)
1113*4882a593Smuzhiyun 		return -EPROTO;
1114*4882a593Smuzhiyun 
1115*4882a593Smuzhiyun 	/* return assigned endpoint to caller */
1116*4882a593Smuzhiyun 	conn_resp->eid = assigned_eid;
1117*4882a593Smuzhiyun 	conn_resp->max_msg_len = __le16_to_cpu(resp_msg->max_msg_size);
1118*4882a593Smuzhiyun 
1119*4882a593Smuzhiyun 	/* setup the endpoint */
1120*4882a593Smuzhiyun 	ep->service_id = conn_req->service_id;
1121*4882a593Smuzhiyun 	ep->max_tx_queue_depth = conn_req->max_send_queue_depth;
1122*4882a593Smuzhiyun 	ep->max_ep_message_len = __le16_to_cpu(resp_msg->max_msg_size);
1123*4882a593Smuzhiyun 	ep->tx_credits = tx_alloc;
1124*4882a593Smuzhiyun 	ep->tx_credit_size = htc->target_credit_size;
1125*4882a593Smuzhiyun 
1126*4882a593Smuzhiyun 	if (conn_req->service_id == ATH10K_HTC_SVC_ID_HTT_DATA_MSG &&
1127*4882a593Smuzhiyun 	    htc->alt_data_credit_size != 0)
1128*4882a593Smuzhiyun 		ep->tx_credit_size = htc->alt_data_credit_size;
1129*4882a593Smuzhiyun 
1130*4882a593Smuzhiyun 	/* copy all the callbacks */
1131*4882a593Smuzhiyun 	ep->ep_ops = conn_req->ep_ops;
1132*4882a593Smuzhiyun 
1133*4882a593Smuzhiyun 	status = ath10k_hif_map_service_to_pipe(htc->ar,
1134*4882a593Smuzhiyun 						ep->service_id,
1135*4882a593Smuzhiyun 						&ep->ul_pipe_id,
1136*4882a593Smuzhiyun 						&ep->dl_pipe_id);
1137*4882a593Smuzhiyun 	if (status) {
1138*4882a593Smuzhiyun 		ath10k_dbg(ar, ATH10K_DBG_BOOT, "unsupported HTC service id: %d\n",
1139*4882a593Smuzhiyun 			   ep->service_id);
1140*4882a593Smuzhiyun 		return status;
1141*4882a593Smuzhiyun 	}
1142*4882a593Smuzhiyun 
1143*4882a593Smuzhiyun 	ath10k_dbg(ar, ATH10K_DBG_BOOT,
1144*4882a593Smuzhiyun 		   "boot htc service '%s' ul pipe %d dl pipe %d eid %d ready\n",
1145*4882a593Smuzhiyun 		   htc_service_name(ep->service_id), ep->ul_pipe_id,
1146*4882a593Smuzhiyun 		   ep->dl_pipe_id, ep->eid);
1147*4882a593Smuzhiyun 
1148*4882a593Smuzhiyun 	if (disable_credit_flow_ctrl && ep->tx_credit_flow_enabled) {
1149*4882a593Smuzhiyun 		ep->tx_credit_flow_enabled = false;
1150*4882a593Smuzhiyun 		ath10k_dbg(ar, ATH10K_DBG_BOOT,
1151*4882a593Smuzhiyun 			   "boot htc service '%s' eid %d TX flow control disabled\n",
1152*4882a593Smuzhiyun 			   htc_service_name(ep->service_id), assigned_eid);
1153*4882a593Smuzhiyun 	}
1154*4882a593Smuzhiyun 
1155*4882a593Smuzhiyun 	return status;
1156*4882a593Smuzhiyun }
1157*4882a593Smuzhiyun 
ath10k_htc_alloc_skb(struct ath10k * ar,int size)1158*4882a593Smuzhiyun struct sk_buff *ath10k_htc_alloc_skb(struct ath10k *ar, int size)
1159*4882a593Smuzhiyun {
1160*4882a593Smuzhiyun 	struct sk_buff *skb;
1161*4882a593Smuzhiyun 
1162*4882a593Smuzhiyun 	skb = dev_alloc_skb(size + sizeof(struct ath10k_htc_hdr));
1163*4882a593Smuzhiyun 	if (!skb)
1164*4882a593Smuzhiyun 		return NULL;
1165*4882a593Smuzhiyun 
1166*4882a593Smuzhiyun 	skb_reserve(skb, sizeof(struct ath10k_htc_hdr));
1167*4882a593Smuzhiyun 
1168*4882a593Smuzhiyun 	/* FW/HTC requires 4-byte aligned streams */
1169*4882a593Smuzhiyun 	if (!IS_ALIGNED((unsigned long)skb->data, 4))
1170*4882a593Smuzhiyun 		ath10k_warn(ar, "Unaligned HTC tx skb\n");
1171*4882a593Smuzhiyun 
1172*4882a593Smuzhiyun 	return skb;
1173*4882a593Smuzhiyun }
1174*4882a593Smuzhiyun 
ath10k_htc_pktlog_process_rx(struct ath10k * ar,struct sk_buff * skb)1175*4882a593Smuzhiyun static void ath10k_htc_pktlog_process_rx(struct ath10k *ar, struct sk_buff *skb)
1176*4882a593Smuzhiyun {
1177*4882a593Smuzhiyun 	trace_ath10k_htt_pktlog(ar, skb->data, skb->len);
1178*4882a593Smuzhiyun 	dev_kfree_skb_any(skb);
1179*4882a593Smuzhiyun }
1180*4882a593Smuzhiyun 
ath10k_htc_pktlog_connect(struct ath10k * ar)1181*4882a593Smuzhiyun static int ath10k_htc_pktlog_connect(struct ath10k *ar)
1182*4882a593Smuzhiyun {
1183*4882a593Smuzhiyun 	struct ath10k_htc_svc_conn_resp conn_resp;
1184*4882a593Smuzhiyun 	struct ath10k_htc_svc_conn_req conn_req;
1185*4882a593Smuzhiyun 	int status;
1186*4882a593Smuzhiyun 
1187*4882a593Smuzhiyun 	memset(&conn_req, 0, sizeof(conn_req));
1188*4882a593Smuzhiyun 	memset(&conn_resp, 0, sizeof(conn_resp));
1189*4882a593Smuzhiyun 
1190*4882a593Smuzhiyun 	conn_req.ep_ops.ep_tx_complete = NULL;
1191*4882a593Smuzhiyun 	conn_req.ep_ops.ep_rx_complete = ath10k_htc_pktlog_process_rx;
1192*4882a593Smuzhiyun 	conn_req.ep_ops.ep_tx_credits = NULL;
1193*4882a593Smuzhiyun 
1194*4882a593Smuzhiyun 	/* connect to control service */
1195*4882a593Smuzhiyun 	conn_req.service_id = ATH10K_HTC_SVC_ID_HTT_LOG_MSG;
1196*4882a593Smuzhiyun 	status = ath10k_htc_connect_service(&ar->htc, &conn_req, &conn_resp);
1197*4882a593Smuzhiyun 	if (status) {
1198*4882a593Smuzhiyun 		ath10k_warn(ar, "failed to connect to PKTLOG service: %d\n",
1199*4882a593Smuzhiyun 			    status);
1200*4882a593Smuzhiyun 		return status;
1201*4882a593Smuzhiyun 	}
1202*4882a593Smuzhiyun 
1203*4882a593Smuzhiyun 	return 0;
1204*4882a593Smuzhiyun }
1205*4882a593Smuzhiyun 
ath10k_htc_pktlog_svc_supported(struct ath10k * ar)1206*4882a593Smuzhiyun static bool ath10k_htc_pktlog_svc_supported(struct ath10k *ar)
1207*4882a593Smuzhiyun {
1208*4882a593Smuzhiyun 	u8 ul_pipe_id;
1209*4882a593Smuzhiyun 	u8 dl_pipe_id;
1210*4882a593Smuzhiyun 	int status;
1211*4882a593Smuzhiyun 
1212*4882a593Smuzhiyun 	status = ath10k_hif_map_service_to_pipe(ar, ATH10K_HTC_SVC_ID_HTT_LOG_MSG,
1213*4882a593Smuzhiyun 						&ul_pipe_id,
1214*4882a593Smuzhiyun 						&dl_pipe_id);
1215*4882a593Smuzhiyun 	if (status) {
1216*4882a593Smuzhiyun 		ath10k_dbg(ar, ATH10K_DBG_BOOT, "unsupported HTC pktlog service id: %d\n",
1217*4882a593Smuzhiyun 			   ATH10K_HTC_SVC_ID_HTT_LOG_MSG);
1218*4882a593Smuzhiyun 
1219*4882a593Smuzhiyun 		return false;
1220*4882a593Smuzhiyun 	}
1221*4882a593Smuzhiyun 
1222*4882a593Smuzhiyun 	return true;
1223*4882a593Smuzhiyun }
1224*4882a593Smuzhiyun 
ath10k_htc_start(struct ath10k_htc * htc)1225*4882a593Smuzhiyun int ath10k_htc_start(struct ath10k_htc *htc)
1226*4882a593Smuzhiyun {
1227*4882a593Smuzhiyun 	struct ath10k *ar = htc->ar;
1228*4882a593Smuzhiyun 	struct sk_buff *skb;
1229*4882a593Smuzhiyun 	int status = 0;
1230*4882a593Smuzhiyun 	struct ath10k_htc_msg *msg;
1231*4882a593Smuzhiyun 
1232*4882a593Smuzhiyun 	skb = ath10k_htc_build_tx_ctrl_skb(htc->ar);
1233*4882a593Smuzhiyun 	if (!skb)
1234*4882a593Smuzhiyun 		return -ENOMEM;
1235*4882a593Smuzhiyun 
1236*4882a593Smuzhiyun 	skb_put(skb, sizeof(msg->hdr) + sizeof(msg->setup_complete_ext));
1237*4882a593Smuzhiyun 	memset(skb->data, 0, skb->len);
1238*4882a593Smuzhiyun 
1239*4882a593Smuzhiyun 	msg = (struct ath10k_htc_msg *)skb->data;
1240*4882a593Smuzhiyun 	msg->hdr.message_id =
1241*4882a593Smuzhiyun 		__cpu_to_le16(ATH10K_HTC_MSG_SETUP_COMPLETE_EX_ID);
1242*4882a593Smuzhiyun 
1243*4882a593Smuzhiyun 	if (ar->hif.bus == ATH10K_BUS_SDIO) {
1244*4882a593Smuzhiyun 		/* Extra setup params used by SDIO */
1245*4882a593Smuzhiyun 		msg->setup_complete_ext.flags =
1246*4882a593Smuzhiyun 			__cpu_to_le32(ATH10K_HTC_SETUP_COMPLETE_FLAGS_RX_BNDL_EN);
1247*4882a593Smuzhiyun 		msg->setup_complete_ext.max_msgs_per_bundled_recv =
1248*4882a593Smuzhiyun 			htc->max_msgs_per_htc_bundle;
1249*4882a593Smuzhiyun 	}
1250*4882a593Smuzhiyun 	ath10k_dbg(ar, ATH10K_DBG_HTC, "HTC is using TX credit flow control\n");
1251*4882a593Smuzhiyun 
1252*4882a593Smuzhiyun 	status = ath10k_htc_send(htc, ATH10K_HTC_EP_0, skb);
1253*4882a593Smuzhiyun 	if (status) {
1254*4882a593Smuzhiyun 		kfree_skb(skb);
1255*4882a593Smuzhiyun 		return status;
1256*4882a593Smuzhiyun 	}
1257*4882a593Smuzhiyun 
1258*4882a593Smuzhiyun 	if (ath10k_htc_pktlog_svc_supported(ar)) {
1259*4882a593Smuzhiyun 		status = ath10k_htc_pktlog_connect(ar);
1260*4882a593Smuzhiyun 		if (status) {
1261*4882a593Smuzhiyun 			ath10k_err(ar, "failed to connect to pktlog: %d\n", status);
1262*4882a593Smuzhiyun 			return status;
1263*4882a593Smuzhiyun 		}
1264*4882a593Smuzhiyun 	}
1265*4882a593Smuzhiyun 
1266*4882a593Smuzhiyun 	return 0;
1267*4882a593Smuzhiyun }
1268*4882a593Smuzhiyun 
1269*4882a593Smuzhiyun /* registered target arrival callback from the HIF layer */
ath10k_htc_init(struct ath10k * ar)1270*4882a593Smuzhiyun int ath10k_htc_init(struct ath10k *ar)
1271*4882a593Smuzhiyun {
1272*4882a593Smuzhiyun 	int status;
1273*4882a593Smuzhiyun 	struct ath10k_htc *htc = &ar->htc;
1274*4882a593Smuzhiyun 	struct ath10k_htc_svc_conn_req conn_req;
1275*4882a593Smuzhiyun 	struct ath10k_htc_svc_conn_resp conn_resp;
1276*4882a593Smuzhiyun 
1277*4882a593Smuzhiyun 	spin_lock_init(&htc->tx_lock);
1278*4882a593Smuzhiyun 
1279*4882a593Smuzhiyun 	ath10k_htc_reset_endpoint_states(htc);
1280*4882a593Smuzhiyun 
1281*4882a593Smuzhiyun 	htc->ar = ar;
1282*4882a593Smuzhiyun 
1283*4882a593Smuzhiyun 	/* setup our pseudo HTC control endpoint connection */
1284*4882a593Smuzhiyun 	memset(&conn_req, 0, sizeof(conn_req));
1285*4882a593Smuzhiyun 	memset(&conn_resp, 0, sizeof(conn_resp));
1286*4882a593Smuzhiyun 	conn_req.ep_ops.ep_tx_complete = ath10k_htc_control_tx_complete;
1287*4882a593Smuzhiyun 	conn_req.ep_ops.ep_rx_complete = ath10k_htc_control_rx_complete;
1288*4882a593Smuzhiyun 	conn_req.max_send_queue_depth = ATH10K_NUM_CONTROL_TX_BUFFERS;
1289*4882a593Smuzhiyun 	conn_req.service_id = ATH10K_HTC_SVC_ID_RSVD_CTRL;
1290*4882a593Smuzhiyun 
1291*4882a593Smuzhiyun 	/* connect fake service */
1292*4882a593Smuzhiyun 	status = ath10k_htc_connect_service(htc, &conn_req, &conn_resp);
1293*4882a593Smuzhiyun 	if (status) {
1294*4882a593Smuzhiyun 		ath10k_err(ar, "could not connect to htc service (%d)\n",
1295*4882a593Smuzhiyun 			   status);
1296*4882a593Smuzhiyun 		return status;
1297*4882a593Smuzhiyun 	}
1298*4882a593Smuzhiyun 
1299*4882a593Smuzhiyun 	init_completion(&htc->ctl_resp);
1300*4882a593Smuzhiyun 
1301*4882a593Smuzhiyun 	return 0;
1302*4882a593Smuzhiyun }
1303