Lines Matching refs:ctrl
36 static int last_trb(struct xhci_ctrl *ctrl, struct xhci_ring *ring, in last_trb() argument
39 if (ring == ctrl->event_ring) in last_trb()
55 static bool last_trb_on_last_seg(struct xhci_ctrl *ctrl, in last_trb_on_last_seg() argument
60 if (ring == ctrl->event_ring) in last_trb_on_last_seg()
89 static void inc_enq(struct xhci_ctrl *ctrl, struct xhci_ring *ring, in inc_enq() argument
102 while (last_trb(ctrl, ring, ring->enq_seg, next)) { in inc_enq()
103 if (ring != ctrl->event_ring) { in inc_enq()
129 if (last_trb_on_last_seg(ctrl, ring, in inc_enq()
147 static void inc_deq(struct xhci_ctrl *ctrl, struct xhci_ring *ring) in inc_deq() argument
155 if (last_trb(ctrl, ring, ring->deq_seg, ring->dequeue)) { in inc_deq()
156 if (ring == ctrl->event_ring && in inc_deq()
157 last_trb_on_last_seg(ctrl, ring, in inc_deq()
166 } while (last_trb(ctrl, ring, ring->deq_seg, ring->dequeue)); in inc_deq()
181 static struct xhci_generic_trb *queue_trb(struct xhci_ctrl *ctrl, in queue_trb() argument
196 inc_enq(ctrl, ring, more_trbs_coming); in queue_trb()
210 static int prepare_ring(struct xhci_ctrl *ctrl, struct xhci_ring *ep_ring, in prepare_ring() argument
238 while (last_trb(ctrl, ep_ring, ep_ring->enq_seg, next)) { in prepare_ring()
250 if (last_trb_on_last_seg(ctrl, ep_ring, in prepare_ring()
272 void xhci_queue_command(struct xhci_ctrl *ctrl, u8 *ptr, u32 slot_id, in xhci_queue_command() argument
278 BUG_ON(prepare_ring(ctrl, ctrl->cmd_ring, EP_STATE_RUNNING)); in xhci_queue_command()
284 ctrl->cmd_ring->cycle_state; in xhci_queue_command()
293 queue_trb(ctrl, ctrl->cmd_ring, false, fields); in xhci_queue_command()
296 xhci_writel(&ctrl->dba->doorbell[0], DB_VALUE_HOST); in xhci_queue_command()
363 struct xhci_ctrl *ctrl = xhci_get_ctrl(udev); in giveback_first_trb() local
377 xhci_writel(&ctrl->dba->doorbell[udev->slot_id], in giveback_first_trb()
393 void xhci_acknowledge_event(struct xhci_ctrl *ctrl) in xhci_acknowledge_event() argument
396 inc_deq(ctrl, ctrl->event_ring); in xhci_acknowledge_event()
399 xhci_writeq(&ctrl->ir_set->erst_dequeue, in xhci_acknowledge_event()
400 (uintptr_t)ctrl->event_ring->dequeue | ERST_EHB); in xhci_acknowledge_event()
409 static int event_ready(struct xhci_ctrl *ctrl) in event_ready() argument
413 xhci_inval_cache((uintptr_t)ctrl->event_ring->dequeue, in event_ready()
416 event = ctrl->event_ring->dequeue; in event_ready()
420 ctrl->event_ring->cycle_state) in event_ready()
435 union xhci_trb *xhci_wait_for_event(struct xhci_ctrl *ctrl, trb_type expected) in xhci_wait_for_event() argument
441 union xhci_trb *event = ctrl->event_ring->dequeue; in xhci_wait_for_event()
443 if (!event_ready(ctrl)) in xhci_wait_for_event()
467 xhci_acknowledge_event(ctrl); in xhci_wait_for_event()
483 struct xhci_ctrl *ctrl = xhci_get_ctrl(udev); in reset_ep() local
484 struct xhci_ring *ring = ctrl->devs[udev->slot_id]->eps[ep_index].ring; in reset_ep()
489 xhci_queue_command(ctrl, NULL, udev->slot_id, ep_index, TRB_RESET_EP); in reset_ep()
490 event = xhci_wait_for_event(ctrl, TRB_COMPLETION); in reset_ep()
496 xhci_acknowledge_event(ctrl); in reset_ep()
498 xhci_queue_command(ctrl, (void *)((uintptr_t)ring->enqueue | in reset_ep()
500 event = xhci_wait_for_event(ctrl, TRB_COMPLETION); in reset_ep()
507 xhci_acknowledge_event(ctrl); in reset_ep()
520 struct xhci_ctrl *ctrl = xhci_get_ctrl(udev); in abort_td() local
521 struct xhci_ring *ring = ctrl->devs[udev->slot_id]->eps[ep_index].ring; in abort_td()
525 xhci_queue_command(ctrl, NULL, udev->slot_id, ep_index, TRB_STOP_RING); in abort_td()
527 event = xhci_wait_for_event(ctrl, TRB_TRANSFER); in abort_td()
536 xhci_acknowledge_event(ctrl); in abort_td()
538 event = xhci_wait_for_event(ctrl, TRB_COMPLETION); in abort_td()
545 xhci_acknowledge_event(ctrl); in abort_td()
547 xhci_queue_command(ctrl, (void *)((uintptr_t)ring->enqueue | in abort_td()
549 event = xhci_wait_for_event(ctrl, TRB_COMPLETION); in abort_td()
556 xhci_acknowledge_event(ctrl); in abort_td()
606 struct xhci_ctrl *ctrl = xhci_get_ctrl(udev); in xhci_bulk_tx() local
626 virt_dev = ctrl->devs[slot_id]; in xhci_bulk_tx()
631 ep_ctx = xhci_get_ep_ctx(ctrl, virt_dev->out_ctx, ep_index); in xhci_bulk_tx()
663 ret = prepare_ring(ctrl, ring, in xhci_bulk_tx()
725 if (HC_VERSION(xhci_readl(&ctrl->hccr->cr_capbase)) < 0x100) in xhci_bulk_tx()
744 queue_trb(ctrl, ring, (num_trbs > 1), trb_fields); in xhci_bulk_tx()
757 event = xhci_wait_for_event(ctrl, TRB_TRANSFER); in xhci_bulk_tx()
773 xhci_acknowledge_event(ctrl); in xhci_bulk_tx()
800 struct xhci_ctrl *ctrl = xhci_get_ctrl(udev); in xhci_ctrl_tx() local
804 struct xhci_virt_device *virt_dev = ctrl->devs[slot_id]; in xhci_ctrl_tx()
832 ep_ctx = xhci_get_ep_ctx(ctrl, virt_dev->out_ctx, ep_index); in xhci_ctrl_tx()
849 ret = prepare_ring(ctrl, ep_ring, in xhci_ctrl_tx()
873 if (HC_VERSION(xhci_readl(&ctrl->hccr->cr_capbase)) >= 0x100) { in xhci_ctrl_tx()
898 queue_trb(ctrl, ep_ring, true, trb_fields); in xhci_ctrl_tx()
927 queue_trb(ctrl, ep_ring, true, trb_fields); in xhci_ctrl_tx()
950 queue_trb(ctrl, ep_ring, false, trb_fields); in xhci_ctrl_tx()
954 event = xhci_wait_for_event(ctrl, TRB_TRANSFER); in xhci_ctrl_tx()
963 xhci_acknowledge_event(ctrl); in xhci_ctrl_tx()
976 event = xhci_wait_for_event(ctrl, TRB_TRANSFER); in xhci_ctrl_tx()
981 xhci_acknowledge_event(ctrl); in xhci_ctrl_tx()