Lines Matching refs:ep
93 static int setdma_rx(struct dwc2_ep *ep, struct dwc2_request *req) in setdma_rx() argument
97 u32 ep_num = ep_index(ep); in setdma_rx()
101 ep_num ? DOEPT_SIZ_XFER_SIZE_MAX_EP : ep->ep.maxpacket); in setdma_rx()
103 ep->len = length; in setdma_rx()
104 ep->dma_buf = buf; in setdma_rx()
109 pktcnt = (length - 1)/(ep->ep.maxpacket) + 1; in setdma_rx()
113 invalidate_dcache_range((unsigned long) ep->dma_buf, in setdma_rx()
114 (unsigned long) ep->dma_buf + in setdma_rx()
115 ROUND(ep->len, CONFIG_SYS_CACHELINE_SIZE)); in setdma_rx()
117 writel((unsigned long) ep->dma_buf, ®->out_endp[ep_num].doepdma); in setdma_rx()
135 static int setdma_tx(struct dwc2_ep *ep, struct dwc2_request *req) in setdma_tx() argument
139 u32 ep_num = ep_index(ep); in setdma_tx()
145 length = min(length, (u32)ep_maxpacket(ep)); in setdma_tx()
147 ep->len = length; in setdma_tx()
148 ep->dma_buf = buf; in setdma_tx()
150 flush_dcache_range((unsigned long) ep->dma_buf, in setdma_tx()
151 (unsigned long) ep->dma_buf + in setdma_tx()
152 ROUND(ep->len, CONFIG_SYS_CACHELINE_SIZE)); in setdma_tx()
157 pktcnt = (length - 1)/(ep->ep.maxpacket) + 1; in setdma_tx()
160 writel(TX_FIFO_NUMBER(ep->fifo_num), ®->grstctl); in setdma_tx()
161 writel(TX_FIFO_NUMBER(ep->fifo_num) | TX_FIFO_FLUSH, ®->grstctl); in setdma_tx()
165 writel((unsigned long) ep->dma_buf, ®->in_endp[ep_num].diepdma); in setdma_tx()
173 ctrl |= DIEPCTL_TX_FIFO_NUM(ep->fifo_num); in setdma_tx()
195 struct dwc2_ep *ep = &dev->ep[ep_num]; in complete_rx() local
199 if (list_empty(&ep->queue)) { in complete_rx()
207 req = list_entry(ep->queue.next, struct dwc2_request, queue); in complete_rx()
215 xfer_size = ep->len - xfer_size; in complete_rx()
231 invalidate_dcache_range((unsigned long) ep->dma_buf, in complete_rx()
232 (unsigned long) ep->dma_buf + in complete_rx()
236 is_short = !!(xfer_size % ep->ep.maxpacket); in complete_rx()
251 done(ep, req, 0); in complete_rx()
253 if (!list_empty(&ep->queue)) { in complete_rx()
254 req = list_entry(ep->queue.next, in complete_rx()
259 setdma_rx(ep, req); in complete_rx()
263 setdma_rx(ep, req); in complete_rx()
268 struct dwc2_ep *ep = &dev->ep[ep_num]; in complete_tx() local
279 if (list_empty(&ep->queue)) { in complete_tx()
287 req = list_entry(ep->queue.next, struct dwc2_request, queue); in complete_tx()
291 xfer_size = ep->len; in complete_tx()
292 is_short = (xfer_size < ep->ep.maxpacket); in complete_tx()
307 last = write_fifo_ep0(ep, req); in complete_tx()
314 done(ep, req, 0); in complete_tx()
320 done(ep, req, 0); in complete_tx()
332 done(ep, req, 0); in complete_tx()
334 if (!list_empty(&ep->queue)) { in complete_tx()
335 req = list_entry(ep->queue.next, struct dwc2_request, queue); in complete_tx()
338 setdma_tx(ep, req); in complete_tx()
344 struct dwc2_ep *ep = &dev->ep[ep_num]; in dwc2_udc_check_tx_queue() local
350 if (!list_empty(&ep->queue)) { in dwc2_udc_check_tx_queue()
351 req = list_entry(ep->queue.next, struct dwc2_request, queue); in dwc2_udc_check_tx_queue()
356 if (ep_is_in(ep)) in dwc2_udc_check_tx_queue()
357 setdma_tx(ep, req); in dwc2_udc_check_tx_queue()
359 setdma_rx(ep, req); in dwc2_udc_check_tx_queue()
612 struct dwc2_ep *ep; in dwc2_queue() local
625 ep = container_of(_ep, struct dwc2_ep, ep); in dwc2_queue()
627 if (unlikely(!_ep || (!ep->desc && ep->ep.name != ep0name))) { in dwc2_queue()
630 ep->ep.name, !ep->desc, _ep); in dwc2_queue()
634 ep_num = ep_index(ep); in dwc2_queue()
635 dev = ep->dev; in dwc2_queue()
650 __func__, _ep->name, ep_is_in(ep) ? "in" : "out", in dwc2_queue()
652 list_empty(&ep->queue), ep->stopped); in dwc2_queue()
670 if (list_empty(&ep->queue) && !ep->stopped) { in dwc2_queue()
674 list_add_tail(&req->queue, &ep->queue); in dwc2_queue()
675 dwc2_ep0_kick(dev, ep); in dwc2_queue()
678 } else if (ep_is_in(ep)) { in dwc2_queue()
684 setdma_tx(ep, req); in dwc2_queue()
691 setdma_rx(ep, req); in dwc2_queue()
697 list_add_tail(&req->queue, &ep->queue); in dwc2_queue()
709 static int write_fifo_ep0(struct dwc2_ep *ep, struct dwc2_request *req) in write_fifo_ep0() argument
715 max = ep_maxpacket(ep); in write_fifo_ep0()
719 count = setdma_tx(ep, req); in write_fifo_ep0()
734 ep->ep.name, count, in write_fifo_ep0()
740 ep->dev->ep0state = WAIT_FOR_SETUP; in write_fifo_ep0()
747 static int dwc2_fifo_read(struct dwc2_ep *ep, void *cp, int max) in dwc2_fifo_read() argument
754 max, ep_index(ep), cp); in dwc2_fifo_read()
780 static inline void dwc2_udc_ep0_set_stall(struct dwc2_ep *ep) in dwc2_udc_ep0_set_stall() argument
785 dev = ep->dev; in dwc2_udc_ep0_set_stall()
798 __func__, ep_index(ep), ®->in_endp[EP0_CON].diepctl); in dwc2_udc_ep0_set_stall()
811 struct dwc2_ep *ep = &dev->ep[0]; in dwc2_ep0_read() local
813 if (!list_empty(&ep->queue)) { in dwc2_ep0_read()
814 req = list_entry(ep->queue.next, struct dwc2_request, queue); in dwc2_ep0_read()
830 ep->len = 0; in dwc2_ep0_read()
839 setdma_rx(ep, req); in dwc2_ep0_read()
848 struct dwc2_ep *ep = &dev->ep[0]; in dwc2_ep0_write() local
851 if (list_empty(&ep->queue)) in dwc2_ep0_write()
854 req = list_entry(ep->queue.next, struct dwc2_request, queue); in dwc2_ep0_write()
871 ret = write_fifo_ep0(ep, req); in dwc2_ep0_write()
920 g_status = dev->ep[ep_num].stopped; in dwc2_udc_get_status()
949 static void dwc2_udc_set_nak(struct dwc2_ep *ep) in dwc2_udc_set_nak() argument
954 ep_num = ep_index(ep); in dwc2_udc_set_nak()
955 debug("%s: ep_num = %d, ep_type = %d\n", __func__, ep_num, ep->ep_type); in dwc2_udc_set_nak()
957 if (ep_is_in(ep)) { in dwc2_udc_set_nak()
975 static void dwc2_udc_ep_set_stall(struct dwc2_ep *ep) in dwc2_udc_ep_set_stall() argument
980 ep_num = ep_index(ep); in dwc2_udc_ep_set_stall()
981 debug("%s: ep_num = %d, ep_type = %d\n", __func__, ep_num, ep->ep_type); in dwc2_udc_ep_set_stall()
983 if (ep_is_in(ep)) { in dwc2_udc_ep_set_stall()
1010 static void dwc2_udc_ep_clear_stall(struct dwc2_ep *ep) in dwc2_udc_ep_clear_stall() argument
1015 ep_num = ep_index(ep); in dwc2_udc_ep_clear_stall()
1016 debug("%s: ep_num = %d, ep_type = %d\n", __func__, ep_num, ep->ep_type); in dwc2_udc_ep_clear_stall()
1018 if (ep_is_in(ep)) { in dwc2_udc_ep_clear_stall()
1030 if (ep->bmAttributes == USB_ENDPOINT_XFER_INT in dwc2_udc_ep_clear_stall()
1031 || ep->bmAttributes == USB_ENDPOINT_XFER_BULK) { in dwc2_udc_ep_clear_stall()
1045 if (ep->bmAttributes == USB_ENDPOINT_XFER_INT in dwc2_udc_ep_clear_stall()
1046 || ep->bmAttributes == USB_ENDPOINT_XFER_BULK) { in dwc2_udc_ep_clear_stall()
1060 struct dwc2_ep *ep; in dwc2_udc_set_halt() local
1065 ep = container_of(_ep, struct dwc2_ep, ep); in dwc2_udc_set_halt()
1066 ep_num = ep_index(ep); in dwc2_udc_set_halt()
1068 if (unlikely(!_ep || !ep->desc || ep_num == EP0_CON || in dwc2_udc_set_halt()
1069 ep->desc->bmAttributes == USB_ENDPOINT_XFER_ISOC)) { in dwc2_udc_set_halt()
1070 debug("%s: %s bad ep or descriptor\n", __func__, ep->ep.name); in dwc2_udc_set_halt()
1076 if (value && ep_is_in(ep) && !list_empty(&ep->queue)) { in dwc2_udc_set_halt()
1078 __func__, ep->ep.name, in dwc2_udc_set_halt()
1079 list_entry(ep->queue.next, struct dwc2_request, queue)); in dwc2_udc_set_halt()
1084 dev = ep->dev; in dwc2_udc_set_halt()
1090 ep->stopped = 0; in dwc2_udc_set_halt()
1091 dwc2_udc_ep_clear_stall(ep); in dwc2_udc_set_halt()
1096 ep->stopped = 1; in dwc2_udc_set_halt()
1097 dwc2_udc_ep_set_stall(ep); in dwc2_udc_set_halt()
1105 static void dwc2_udc_ep_activate(struct dwc2_ep *ep) in dwc2_udc_ep_activate() argument
1110 ep_num = ep_index(ep); in dwc2_udc_ep_activate()
1113 if (ep_is_in(ep)) { in dwc2_udc_ep_activate()
1122 __func__, ep_num, ep_ctrl, ep_is_in(ep)); in dwc2_udc_ep_activate()
1128 (ep->bmAttributes << DEPCTL_TYPE_BIT); in dwc2_udc_ep_activate()
1130 (ep->ep.maxpacket << DEPCTL_MPS_BIT); in dwc2_udc_ep_activate()
1133 if (ep_is_in(ep)) { in dwc2_udc_ep_activate()
1155 struct dwc2_ep *ep; in dwc2_udc_clear_feature() local
1158 ep = container_of(_ep, struct dwc2_ep, ep); in dwc2_udc_clear_feature()
1159 ep_num = ep_index(ep); in dwc2_udc_clear_feature()
1161 dev = ep->dev; in dwc2_udc_clear_feature()
1164 __func__, ep_num, ep_is_in(ep), clear_feature_flag); in dwc2_udc_clear_feature()
1197 dwc2_udc_ep0_set_stall(ep); in dwc2_udc_clear_feature()
1203 dwc2_udc_ep_clear_stall(ep); in dwc2_udc_clear_feature()
1204 dwc2_udc_ep_activate(ep); in dwc2_udc_clear_feature()
1205 ep->stopped = 0; in dwc2_udc_clear_feature()
1219 struct dwc2_ep *ep; in dwc2_udc_set_feature() local
1222 ep = container_of(_ep, struct dwc2_ep, ep); in dwc2_udc_set_feature()
1223 ep_num = ep_index(ep); in dwc2_udc_set_feature()
1224 dev = ep->dev; in dwc2_udc_set_feature()
1274 dwc2_udc_ep0_set_stall(ep); in dwc2_udc_set_feature()
1277 ep->stopped = 1; in dwc2_udc_set_feature()
1278 dwc2_udc_ep_set_stall(ep); in dwc2_udc_set_feature()
1293 struct dwc2_ep *ep = &dev->ep[0]; in dwc2_ep0_setup() local
1298 nuke(ep, -EPROTO); in dwc2_ep0_setup()
1301 dwc2_fifo_read(ep, usb_ctrl, 8); in dwc2_ep0_setup()
1335 dwc2_udc_ep0_set_stall(ep); in dwc2_ep0_setup()
1346 dwc2_udc_ep0_set_stall(ep); in dwc2_ep0_setup()
1354 ep->bEndpointAddress |= USB_DIR_IN; in dwc2_ep0_setup()
1356 ep->bEndpointAddress &= ~USB_DIR_IN; in dwc2_ep0_setup()
1421 if (!dwc2_udc_clear_feature(&dev->ep[ep_num].ep)) in dwc2_ep0_setup()
1429 if (!dwc2_udc_set_feature(&dev->ep[ep_num].ep)) in dwc2_ep0_setup()
1456 dwc2_udc_ep0_set_stall(ep); in dwc2_ep0_setup()
1494 static void dwc2_ep0_kick(struct dwc2_udc *dev, struct dwc2_ep *ep) in dwc2_ep0_kick() argument
1497 "%s: ep_is_in = %d\n", __func__, ep_is_in(ep)); in dwc2_ep0_kick()
1498 if (ep_is_in(ep)) { in dwc2_ep0_kick()