Lines Matching refs:ep_ring
594 struct xhci_ring *ep_ring; in xhci_move_dequeue_past_td() local
606 ep_ring = xhci_triad_to_transfer_ring(xhci, slot_id, in xhci_move_dequeue_past_td()
608 if (!ep_ring) { in xhci_move_dequeue_past_td()
621 if (list_empty(&ep_ring->td_list)) { in xhci_move_dequeue_past_td()
622 new_seg = ep_ring->enq_seg; in xhci_move_dequeue_past_td()
623 new_deq = ep_ring->enqueue; in xhci_move_dequeue_past_td()
624 new_cycle = ep_ring->cycle_state; in xhci_move_dequeue_past_td()
634 new_seg = ep_ring->deq_seg; in xhci_move_dequeue_past_td()
635 new_deq = ep_ring->dequeue; in xhci_move_dequeue_past_td()
658 next_trb(xhci, ep_ring, &new_seg, &new_deq); in xhci_move_dequeue_past_td()
722 static void td_to_noop(struct xhci_hcd *xhci, struct xhci_ring *ep_ring, in td_to_noop() argument
738 next_trb(xhci, ep_ring, &seg, &trb); in td_to_noop()
809 struct xhci_ring *ep_ring, int status) in xhci_td_cleanup() argument
817 xhci_unmap_td_bounce_buffer(xhci, ep_ring, td); in xhci_td_cleanup()
1310 struct xhci_ring *ep_ring, in update_ring_for_set_deq_completion() argument
1317 num_trbs_free_temp = ep_ring->num_trbs_free; in update_ring_for_set_deq_completion()
1318 dequeue_temp = ep_ring->dequeue; in update_ring_for_set_deq_completion()
1326 if (trb_is_link(ep_ring->dequeue)) { in update_ring_for_set_deq_completion()
1327 ep_ring->deq_seg = ep_ring->deq_seg->next; in update_ring_for_set_deq_completion()
1328 ep_ring->dequeue = ep_ring->deq_seg->trbs; in update_ring_for_set_deq_completion()
1331 while (ep_ring->dequeue != dev->eps[ep_index].queued_deq_ptr) { in update_ring_for_set_deq_completion()
1333 ep_ring->num_trbs_free++; in update_ring_for_set_deq_completion()
1334 ep_ring->dequeue++; in update_ring_for_set_deq_completion()
1335 if (trb_is_link(ep_ring->dequeue)) { in update_ring_for_set_deq_completion()
1336 if (ep_ring->dequeue == in update_ring_for_set_deq_completion()
1339 ep_ring->deq_seg = ep_ring->deq_seg->next; in update_ring_for_set_deq_completion()
1340 ep_ring->dequeue = ep_ring->deq_seg->trbs; in update_ring_for_set_deq_completion()
1342 if (ep_ring->dequeue == dequeue_temp) { in update_ring_for_set_deq_completion()
1350 ep_ring->num_trbs_free = num_trbs_free_temp; in update_ring_for_set_deq_completion()
1366 struct xhci_ring *ep_ring; in xhci_handle_cmd_set_deq() local
1378 ep_ring = xhci_virt_ep_to_ring(xhci, ep, stream_id); in xhci_handle_cmd_set_deq()
1379 if (!ep_ring) { in xhci_handle_cmd_set_deq()
1441 ep_ring, ep_index); in xhci_handle_cmd_set_deq()
1451 ep_ring = xhci_urb_to_transfer_ring(ep->xhci, td->urb); in xhci_handle_cmd_set_deq()
1456 xhci_td_cleanup(ep->xhci, td, ep_ring, td->status); in xhci_handle_cmd_set_deq()
2205 struct xhci_ring *ep_ring, struct xhci_td *td, in finish_td() argument
2255 xhci_handle_halted_endpoint(xhci, ep, ep_ring->stream_id, td, in finish_td()
2272 xhci_handle_halted_endpoint(xhci, ep, ep_ring->stream_id, td, in finish_td()
2281 trbs_freed = xhci_num_trbs_to(ep_ring->deq_seg, ep_ring->dequeue, in finish_td()
2283 ep_ring->num_segs); in finish_td()
2287 ep_ring->num_trbs_free += trbs_freed; in finish_td()
2288 ep_ring->dequeue = td->last_trb; in finish_td()
2289 ep_ring->deq_seg = td->last_trb_seg; in finish_td()
2290 inc_deq(xhci, ep_ring); in finish_td()
2292 return xhci_td_cleanup(xhci, td, ep_ring, td->status); in finish_td()
2314 struct xhci_ring *ep_ring, struct xhci_td *td, in process_ctrl_td() argument
2403 return finish_td(xhci, ep, ep_ring, td, trb_comp_code); in process_ctrl_td()
2410 struct xhci_ring *ep_ring, struct xhci_td *td, in process_isoc_td() argument
2488 return finish_td(xhci, ep, ep_ring, td, trb_comp_code); in process_isoc_td()
2521 struct xhci_ring *ep_ring, struct xhci_td *td, in process_bulk_intr_td() argument
2568 xhci_handle_halted_endpoint(xhci, ep, ep_ring->stream_id, td, in process_bulk_intr_td()
2580 sum_trb_lengths(xhci, ep_ring, ep_trb) + in process_bulk_intr_td()
2589 return finish_td(xhci, ep, ep_ring, td, trb_comp_code); in process_bulk_intr_td()
2601 struct xhci_ring *ep_ring; in handle_tx_event() local
2626 ep_ring = xhci_dma_to_transfer_ring(ep, ep_trb_dma); in handle_tx_event()
2637 if (!ep_ring) { in handle_tx_event()
2665 list_for_each(tmp, &ep_ring->td_list) in handle_tx_event()
2678 ep_ring->last_td_was_short) in handle_tx_event()
2754 if (!list_empty(&ep_ring->td_list)) in handle_tx_event()
2762 if (!list_empty(&ep_ring->td_list)) in handle_tx_event()
2809 if (list_empty(&ep_ring->td_list)) { in handle_tx_event()
2820 ep_ring->last_td_was_short)) { in handle_tx_event()
2834 ep_ring->stream_id, in handle_tx_event()
2849 td = list_first_entry(&ep_ring->td_list, struct xhci_td, in handle_tx_event()
2855 ep_seg = trb_in_td(xhci, ep_ring->deq_seg, ep_ring->dequeue, in handle_tx_event()
2879 ep_ring->last_td_was_short) { in handle_tx_event()
2880 ep_ring->last_td_was_short = false; in handle_tx_event()
2889 trb_in_td(xhci, ep_ring->deq_seg, in handle_tx_event()
2890 ep_ring->dequeue, td->last_trb, in handle_tx_event()
2899 ep_ring->last_td_was_short = true; in handle_tx_event()
2901 ep_ring->last_td_was_short = false; in handle_tx_event()
2913 trace_xhci_handle_transfer(ep_ring, in handle_tx_event()
2929 ep_ring->stream_id, in handle_tx_event()
2938 process_ctrl_td(xhci, ep, ep_ring, td, ep_trb, event); in handle_tx_event()
2940 process_isoc_td(xhci, ep, ep_ring, td, ep_trb, event); in handle_tx_event()
2942 process_bulk_intr_td(xhci, ep, ep_ring, td, ep_trb, event); in handle_tx_event()
3214 static int prepare_ring(struct xhci_hcd *xhci, struct xhci_ring *ep_ring, in prepare_ring() argument
3250 if (room_on_ring(xhci, ep_ring, num_trbs)) in prepare_ring()
3253 if (ep_ring == xhci->cmd_ring) { in prepare_ring()
3260 num_trbs_needed = num_trbs - ep_ring->num_trbs_free; in prepare_ring()
3261 if (xhci_ring_expansion(xhci, ep_ring, num_trbs_needed, in prepare_ring()
3268 while (trb_is_link(ep_ring->enqueue)) { in prepare_ring()
3273 !(ep_ring->type == TYPE_ISOC && in prepare_ring()
3275 ep_ring->enqueue->link.control &= in prepare_ring()
3278 ep_ring->enqueue->link.control |= in prepare_ring()
3282 ep_ring->enqueue->link.control ^= cpu_to_le32(TRB_CYCLE); in prepare_ring()
3285 if (link_trb_toggles_cycle(ep_ring->enqueue)) in prepare_ring()
3286 ep_ring->cycle_state ^= 1; in prepare_ring()
3288 ep_ring->enq_seg = ep_ring->enq_seg->next; in prepare_ring()
3289 ep_ring->enqueue = ep_ring->enq_seg->trbs; in prepare_ring()
3292 if (link_trb_count++ > ep_ring->num_segs) { in prepare_ring()
3298 if (last_trb_on_seg(ep_ring->enq_seg, ep_ring->enqueue)) { in prepare_ring()
3318 struct xhci_ring *ep_ring; in prepare_transfer() local
3321 ep_ring = xhci_triad_to_transfer_ring(xhci, xdev->slot_id, ep_index, in prepare_transfer()
3323 if (!ep_ring) { in prepare_transfer()
3329 ret = prepare_ring(xhci, ep_ring, GET_EP_CTX_STATE(ep_ctx), in prepare_transfer()
3348 list_add_tail(&td->td_list, &ep_ring->td_list); in prepare_transfer()
3349 td->start_seg = ep_ring->enq_seg; in prepare_transfer()
3350 td->first_trb = ep_ring->enqueue; in prepare_transfer()
3757 struct xhci_ring *ep_ring; in xhci_queue_ctrl_tx() local
3767 ep_ring = xhci_urb_to_transfer_ring(xhci, urb); in xhci_queue_ctrl_tx()
3768 if (!ep_ring) in xhci_queue_ctrl_tx()
3802 start_trb = &ep_ring->enqueue->generic; in xhci_queue_ctrl_tx()
3803 start_cycle = ep_ring->cycle_state; in xhci_queue_ctrl_tx()
3823 queue_trb(xhci, ep_ring, true, in xhci_queue_ctrl_tx()
3859 queue_trb(xhci, ep_ring, true, in xhci_queue_ctrl_tx()
3863 field | ep_ring->cycle_state); in xhci_queue_ctrl_tx()
3867 td->last_trb = ep_ring->enqueue; in xhci_queue_ctrl_tx()
3868 td->last_trb_seg = ep_ring->enq_seg; in xhci_queue_ctrl_tx()
3876 queue_trb(xhci, ep_ring, false, in xhci_queue_ctrl_tx()
3881 field | TRB_IOC | TRB_TYPE(TRB_STATUS) | ep_ring->cycle_state); in xhci_queue_ctrl_tx()
4054 struct xhci_ring *ep_ring; in xhci_queue_isoc_tx() local
4070 ep_ring = xhci->devs[slot_id]->eps[ep_index].ring; in xhci_queue_isoc_tx()
4078 start_trb = &ep_ring->enqueue->generic; in xhci_queue_isoc_tx()
4079 start_cycle = ep_ring->cycle_state; in xhci_queue_isoc_tx()
4130 (i ? ep_ring->cycle_state : !start_cycle); in xhci_queue_isoc_tx()
4143 ep_ring->cycle_state; in xhci_queue_isoc_tx()
4155 td->last_trb = ep_ring->enqueue; in xhci_queue_isoc_tx()
4156 td->last_trb_seg = ep_ring->enq_seg; in xhci_queue_isoc_tx()
4181 queue_trb(xhci, ep_ring, more_trbs_coming, in xhci_queue_isoc_tx()
4224 urb_priv->td[0].last_trb = ep_ring->enqueue; in xhci_queue_isoc_tx()
4226 td_to_noop(xhci, ep_ring, &urb_priv->td[0], true); in xhci_queue_isoc_tx()
4229 ep_ring->enqueue = urb_priv->td[0].first_trb; in xhci_queue_isoc_tx()
4230 ep_ring->enq_seg = urb_priv->td[0].start_seg; in xhci_queue_isoc_tx()
4231 ep_ring->cycle_state = start_cycle; in xhci_queue_isoc_tx()
4232 ep_ring->num_trbs_free = ep_ring->num_trbs_free_temp; in xhci_queue_isoc_tx()
4248 struct xhci_ring *ep_ring; in xhci_queue_isoc_tx_prepare() local
4258 ep_ring = xdev->eps[ep_index].ring; in xhci_queue_isoc_tx_prepare()
4269 ret = prepare_ring(xhci, ep_ring, GET_EP_CTX_STATE(ep_ctx), in xhci_queue_isoc_tx_prepare()
4281 if (HCC_CFC(xhci->hcc_params) && !list_empty(&ep_ring->td_list)) { in xhci_queue_isoc_tx_prepare()
4314 ep_ring->num_trbs_free_temp = ep_ring->num_trbs_free; in xhci_queue_isoc_tx_prepare()