Lines Matching refs:ep_priv

469 static void ep_idle(struct imx21 *imx21, struct ep_priv *ep_priv)  in ep_idle()  argument
474 int etd_num = ep_priv->etd[i]; in ep_idle()
480 ep_priv->etd[i] = -1; in ep_idle()
491 ep_priv = list_first_entry(&imx21->queue_for_etd, in ep_idle()
492 struct ep_priv, queue); in ep_idle()
493 list_del(&ep_priv->queue); in ep_idle()
495 ep_priv->waiting_etd = 0; in ep_idle()
496 ep_priv->etd[i] = etd_num; in ep_idle()
498 if (list_empty(&ep_priv->ep->urb_list)) { in ep_idle()
503 &ep_priv->ep->urb_list, struct urb, urb_list)); in ep_idle()
512 struct ep_priv *ep_priv = urb->ep->hcpriv; in urb_done() local
525 if (list_empty(&ep_priv->ep->urb_list)) in urb_done()
526 ep_idle(imx21, ep_priv); in urb_done()
555 struct ep_priv *ep_priv = ep->hcpriv; in schedule_isoc_etds() local
566 if (list_empty(&ep_priv->td_list)) in schedule_isoc_etds()
569 etd_num = ep_priv->etd[i]; in schedule_isoc_etds()
577 td = list_entry(ep_priv->td_list.next, struct td, list); in schedule_isoc_etds()
680 static struct ep_priv *alloc_isoc_ep( in alloc_isoc_ep()
683 struct ep_priv *ep_priv; in alloc_isoc_ep() local
686 ep_priv = kzalloc(sizeof(struct ep_priv), GFP_ATOMIC); in alloc_isoc_ep()
687 if (!ep_priv) in alloc_isoc_ep()
691 ep_priv->etd[i] = -1; in alloc_isoc_ep()
693 INIT_LIST_HEAD(&ep_priv->td_list); in alloc_isoc_ep()
694 ep_priv->ep = ep; in alloc_isoc_ep()
695 ep->hcpriv = ep_priv; in alloc_isoc_ep()
696 return ep_priv; in alloc_isoc_ep()
699 static int alloc_isoc_etds(struct imx21 *imx21, struct ep_priv *ep_priv) in alloc_isoc_etds() argument
706 if (ep_priv->etd[i] < 0) { in alloc_isoc_etds()
711 ep_priv->etd[i] = etd_num; in alloc_isoc_etds()
712 imx21->etd[etd_num].ep = ep_priv->ep; in alloc_isoc_etds()
720 free_etd(imx21, ep_priv->etd[j]); in alloc_isoc_etds()
721 ep_priv->etd[j] = -1; in alloc_isoc_etds()
733 struct ep_priv *ep_priv; in imx21_hc_urb_enqueue_isoc() local
754 ep_priv = alloc_isoc_ep(imx21, ep); in imx21_hc_urb_enqueue_isoc()
755 if (ep_priv == NULL) { in imx21_hc_urb_enqueue_isoc()
760 ep_priv = ep->hcpriv; in imx21_hc_urb_enqueue_isoc()
763 ret = alloc_isoc_etds(imx21, ep_priv); in imx21_hc_urb_enqueue_isoc()
780 struct etd_priv *etd = &imx21->etd[ep_priv->etd[i]]; in imx21_hc_urb_enqueue_isoc()
804 if (list_empty(&ep_priv->td_list)) { in imx21_hc_urb_enqueue_isoc()
807 urb->start_frame = wrap_frame(list_entry(ep_priv->td_list.prev, in imx21_hc_urb_enqueue_isoc()
841 list_add_tail(&td->list, &ep_priv->td_list); in imx21_hc_urb_enqueue_isoc()
868 struct urb *urb, struct ep_priv *ep_priv) in dequeue_isoc_urb() argument
876 int etd_num = ep_priv->etd[i]; in dequeue_isoc_urb()
886 list_for_each_entry_safe(td, tmp, &ep_priv->td_list, list) { in dequeue_isoc_urb()
902 struct ep_priv *ep_priv = urb_priv->ep->hcpriv; in schedule_nonisoc_etd() local
904 int etd_num = ep_priv->etd[0]; in schedule_nonisoc_etd()
1136 static struct ep_priv *alloc_ep(void) in alloc_ep()
1139 struct ep_priv *ep_priv; in alloc_ep() local
1141 ep_priv = kzalloc(sizeof(struct ep_priv), GFP_ATOMIC); in alloc_ep()
1142 if (!ep_priv) in alloc_ep()
1146 ep_priv->etd[i] = -1; in alloc_ep()
1148 return ep_priv; in alloc_ep()
1157 struct ep_priv *ep_priv; in imx21_hc_urb_enqueue() local
1179 ep_priv = ep->hcpriv; in imx21_hc_urb_enqueue()
1180 if (ep_priv == NULL) { in imx21_hc_urb_enqueue()
1181 ep_priv = alloc_ep(); in imx21_hc_urb_enqueue()
1182 if (!ep_priv) { in imx21_hc_urb_enqueue()
1186 ep->hcpriv = ep_priv; in imx21_hc_urb_enqueue()
1187 ep_priv->ep = ep; in imx21_hc_urb_enqueue()
1210 if (ep_priv->etd[0] < 0) { in imx21_hc_urb_enqueue()
1211 if (ep_priv->waiting_etd) { in imx21_hc_urb_enqueue()
1214 ep_priv); in imx21_hc_urb_enqueue()
1218 ep_priv->etd[0] = alloc_etd(imx21); in imx21_hc_urb_enqueue()
1219 if (ep_priv->etd[0] < 0) { in imx21_hc_urb_enqueue()
1221 "no ETD available queueing %p\n", ep_priv); in imx21_hc_urb_enqueue()
1223 list_add_tail(&ep_priv->queue, &imx21->queue_for_etd); in imx21_hc_urb_enqueue()
1224 ep_priv->waiting_etd = 1; in imx21_hc_urb_enqueue()
1230 etd = &imx21->etd[ep_priv->etd[0]]; in imx21_hc_urb_enqueue()
1253 struct ep_priv *ep_priv; in imx21_hc_urb_dequeue() local
1266 ep_priv = ep->hcpriv; in imx21_hc_urb_dequeue()
1271 dequeue_isoc_urb(imx21, urb, ep_priv); in imx21_hc_urb_dequeue()
1274 int etd_num = ep_priv->etd[0]; in imx21_hc_urb_dequeue()
1420 struct ep_priv *ep_priv; in imx21_hc_endpoint_disable() local
1427 ep_priv = ep->hcpriv; in imx21_hc_endpoint_disable()
1428 dev_vdbg(imx21->dev, "disable ep=%p, ep->hcpriv=%p\n", ep, ep_priv); in imx21_hc_endpoint_disable()
1433 if (ep_priv != NULL) { in imx21_hc_endpoint_disable()
1435 if (ep_priv->etd[i] > -1) in imx21_hc_endpoint_disable()
1437 ep_priv->etd[i]); in imx21_hc_endpoint_disable()
1439 free_etd(imx21, ep_priv->etd[i]); in imx21_hc_endpoint_disable()
1441 kfree(ep_priv); in imx21_hc_endpoint_disable()