Lines Matching refs:ep_ctx
554 struct xhci_ep_ctx *ep_ctx; in xhci_get_hw_deq() local
564 ep_ctx = xhci_get_ep_ctx(xhci, vdev->out_ctx, ep_index); in xhci_get_hw_deq()
565 return le64_to_cpu(ep_ctx->deq); in xhci_get_hw_deq()
1076 struct xhci_ep_ctx *ep_ctx; in xhci_handle_cmd_stop_ep() local
1094 ep_ctx = xhci_get_ep_ctx(xhci, ep->vdev->out_ctx, ep_index); in xhci_handle_cmd_stop_ep()
1096 trace_xhci_handle_cmd_stop_ep(ep_ctx); in xhci_handle_cmd_stop_ep()
1113 switch (GET_EP_CTX_STATE(ep_ctx)) { in xhci_handle_cmd_stop_ep()
1360 struct xhci_ep_ctx *ep_ctx; in xhci_handle_cmd_set_deq() local
1378 ep_ctx = xhci_get_ep_ctx(xhci, ep->vdev->out_ctx, ep_index); in xhci_handle_cmd_set_deq()
1381 trace_xhci_handle_cmd_set_deq_ep(ep_ctx); in xhci_handle_cmd_set_deq()
1393 ep_state = GET_EP_CTX_STATE(ep_ctx); in xhci_handle_cmd_set_deq()
1423 deq = le64_to_cpu(ep_ctx->deq) & ~EP_CTX_CYCLE_MASK; in xhci_handle_cmd_set_deq()
1466 struct xhci_ep_ctx *ep_ctx; in xhci_handle_cmd_reset_ep() local
1474 ep_ctx = xhci_get_ep_ctx(xhci, ep->vdev->out_ctx, ep_index); in xhci_handle_cmd_reset_ep()
1475 trace_xhci_handle_cmd_reset_ep(ep_ctx); in xhci_handle_cmd_reset_ep()
1529 struct xhci_ep_ctx *ep_ctx; in xhci_handle_cmd_config_ep() local
1556 ep_ctx = xhci_get_ep_ctx(xhci, virt_dev->out_ctx, ep_index); in xhci_handle_cmd_config_ep()
1557 trace_xhci_handle_cmd_config_ep(ep_ctx); in xhci_handle_cmd_config_ep()
2163 struct xhci_ep_ctx *ep_ctx, in xhci_requires_manual_halt_cleanup() argument
2176 if (GET_EP_CTX_STATE(ep_ctx) == EP_STATE_HALTED) in xhci_requires_manual_halt_cleanup()
2200 struct xhci_ep_ctx *ep_ctx; in finish_td() local
2202 ep_ctx = xhci_get_ep_ctx(xhci, ep->vdev->out_ctx, ep->ep_index); in finish_td()
2228 if (GET_EP_CTX_STATE(ep_ctx) != EP_STATE_HALTED) { in finish_td()
2302 struct xhci_ep_ctx *ep_ctx; in process_ctrl_td() local
2308 ep_ctx = xhci_get_ep_ctx(xhci, ep->vdev->out_ctx, ep->ep_index); in process_ctrl_td()
2353 ep_ctx, trb_comp_code)) in process_ctrl_td()
2594 struct xhci_ep_ctx *ep_ctx; in handle_tx_event() local
2612 ep_ctx = xhci_get_ep_ctx(xhci, ep->vdev->out_ctx, ep_index); in handle_tx_event()
2614 if (GET_EP_CTX_STATE(ep_ctx) == EP_STATE_DISABLED) { in handle_tx_event()
2809 xhci_requires_manual_halt_cleanup(xhci, ep_ctx, in handle_tx_event()
2904 xhci_requires_manual_halt_cleanup(xhci, ep_ctx, in handle_tx_event()
3308 struct xhci_ep_ctx *ep_ctx = xhci_get_ep_ctx(xhci, xdev->out_ctx, ep_index); in prepare_transfer() local
3318 ret = prepare_ring(xhci, ep_ring, GET_EP_CTX_STATE(ep_ctx), in prepare_transfer()
3419 struct xhci_ep_ctx *ep_ctx) in check_interval() argument
3424 xhci_interval = EP_INTERVAL_TO_UFRAMES(le32_to_cpu(ep_ctx->ep_info)); in check_interval()
3457 struct xhci_ep_ctx *ep_ctx; in xhci_queue_intr_tx() local
3459 ep_ctx = xhci_get_ep_ctx(xhci, xhci->devs[slot_id]->out_ctx, ep_index); in xhci_queue_intr_tx()
3460 check_interval(xhci, urb, ep_ctx); in xhci_queue_intr_tx()
4238 struct xhci_ep_ctx *ep_ctx; in xhci_queue_isoc_tx_prepare() local
4248 ep_ctx = xhci_get_ep_ctx(xhci, xdev->out_ctx, ep_index); in xhci_queue_isoc_tx_prepare()
4258 ret = prepare_ring(xhci, ep_ring, GET_EP_CTX_STATE(ep_ctx), in xhci_queue_isoc_tx_prepare()
4267 check_interval(xhci, urb, ep_ctx); in xhci_queue_isoc_tx_prepare()
4271 if (GET_EP_CTX_STATE(ep_ctx) == EP_STATE_RUNNING) { in xhci_queue_isoc_tx_prepare()