• Home
  • Raw
  • Download

Lines Matching refs:ep_ctx

531 	struct xhci_ep_ctx *ep_ctx;  in xhci_get_hw_deq()  local
541 ep_ctx = xhci_get_ep_ctx(xhci, vdev->out_ctx, ep_index); in xhci_get_hw_deq()
542 return le64_to_cpu(ep_ctx->deq); in xhci_get_hw_deq()
794 struct xhci_ep_ctx *ep_ctx; in xhci_handle_cmd_stop_ep() local
815 ep_ctx = xhci_get_ep_ctx(xhci, vdev->out_ctx, ep_index); in xhci_handle_cmd_stop_ep()
816 trace_xhci_handle_cmd_stop_ep(ep_ctx); in xhci_handle_cmd_stop_ep()
1138 struct xhci_ep_ctx *ep_ctx; in xhci_handle_cmd_set_deq() local
1156 ep_ctx = xhci_get_ep_ctx(xhci, dev->out_ctx, ep_index); in xhci_handle_cmd_set_deq()
1159 trace_xhci_handle_cmd_set_deq_ep(ep_ctx); in xhci_handle_cmd_set_deq()
1171 ep_state = GET_EP_CTX_STATE(ep_ctx); in xhci_handle_cmd_set_deq()
1201 deq = le64_to_cpu(ep_ctx->deq) & ~EP_CTX_CYCLE_MASK; in xhci_handle_cmd_set_deq()
1232 struct xhci_ep_ctx *ep_ctx; in xhci_handle_cmd_reset_ep() local
1241 ep_ctx = xhci_get_ep_ctx(xhci, vdev->out_ctx, ep_index); in xhci_handle_cmd_reset_ep()
1242 trace_xhci_handle_cmd_reset_ep(ep_ctx); in xhci_handle_cmd_reset_ep()
1308 struct xhci_ep_ctx *ep_ctx; in xhci_handle_cmd_config_ep() local
1333 ep_ctx = xhci_get_ep_ctx(xhci, virt_dev->out_ctx, ep_index); in xhci_handle_cmd_config_ep()
1334 trace_xhci_handle_cmd_config_ep(ep_ctx); in xhci_handle_cmd_config_ep()
1965 struct xhci_ep_ctx *ep_ctx, in xhci_requires_manual_halt_cleanup() argument
1978 if (GET_EP_CTX_STATE(ep_ctx) == EP_STATE_HALTED) in xhci_requires_manual_halt_cleanup()
2049 struct xhci_ep_ctx *ep_ctx; in finish_td() local
2059 ep_ctx = xhci_get_ep_ctx(xhci, xdev->out_ctx, ep_index); in finish_td()
2072 xhci_requires_manual_halt_cleanup(xhci, ep_ctx, in finish_td()
2123 struct xhci_ep_ctx *ep_ctx; in process_ctrl_td() local
2132 ep_ctx = xhci_get_ep_ctx(xhci, xdev->out_ctx, ep_index); in process_ctrl_td()
2177 ep_ctx, trb_comp_code)) in process_ctrl_td()
2426 struct xhci_ep_ctx *ep_ctx; in handle_tx_event() local
2445 ep_ctx = xhci_get_ep_ctx(xhci, xdev->out_ctx, ep_index); in handle_tx_event()
2447 if (GET_EP_CTX_STATE(ep_ctx) == EP_STATE_DISABLED) { in handle_tx_event()
2643 xhci_requires_manual_halt_cleanup(xhci, ep_ctx, in handle_tx_event()
2738 xhci_requires_manual_halt_cleanup(xhci, ep_ctx, in handle_tx_event()
3112 struct xhci_ep_ctx *ep_ctx = xhci_get_ep_ctx(xhci, xdev->out_ctx, ep_index); in prepare_transfer() local
3121 ret = prepare_ring(xhci, ep_ring, GET_EP_CTX_STATE(ep_ctx), in prepare_transfer()
3222 struct xhci_ep_ctx *ep_ctx) in check_interval() argument
3227 xhci_interval = EP_INTERVAL_TO_UFRAMES(le32_to_cpu(ep_ctx->ep_info)); in check_interval()
3260 struct xhci_ep_ctx *ep_ctx; in xhci_queue_intr_tx() local
3262 ep_ctx = xhci_get_ep_ctx(xhci, xhci->devs[slot_id]->out_ctx, ep_index); in xhci_queue_intr_tx()
3263 check_interval(xhci, urb, ep_ctx); in xhci_queue_intr_tx()
4036 struct xhci_ep_ctx *ep_ctx; in xhci_queue_isoc_tx_prepare() local
4046 ep_ctx = xhci_get_ep_ctx(xhci, xdev->out_ctx, ep_index); in xhci_queue_isoc_tx_prepare()
4056 ret = prepare_ring(xhci, ep_ring, GET_EP_CTX_STATE(ep_ctx), in xhci_queue_isoc_tx_prepare()
4065 check_interval(xhci, urb, ep_ctx); in xhci_queue_isoc_tx_prepare()
4069 if (GET_EP_CTX_STATE(ep_ctx) == EP_STATE_RUNNING) { in xhci_queue_isoc_tx_prepare()