• Home
  • Raw
  • Download

Lines Matching refs:endp

207 	struct u132_endp *endp[MAX_U132_ENDPS];  member
335 struct u132_endp *endp = kref_to_u132_endp(kref); in u132_endp_delete() local
336 struct u132 *u132 = endp->u132; in u132_endp_delete()
337 u8 usb_addr = endp->usb_addr; in u132_endp_delete()
338 u8 usb_endp = endp->usb_endp; in u132_endp_delete()
341 u8 endp_number = endp->endp_number; in u132_endp_delete()
342 struct usb_host_endpoint *hep = endp->hep; in u132_endp_delete()
343 struct u132_ring *ring = endp->ring; in u132_endp_delete()
344 struct list_head *head = &endp->endp_ring; in u132_endp_delete()
346 if (endp == ring->curr_endp) { in u132_endp_delete()
358 if (endp->input) { in u132_endp_delete()
362 if (endp->output) { in u132_endp_delete()
366 u132->endp[endp_number - 1] = NULL; in u132_endp_delete()
368 kfree(endp); in u132_endp_delete()
372 static inline void u132_endp_put_kref(struct u132 *u132, struct u132_endp *endp) in u132_endp_put_kref() argument
374 kref_put(&endp->kref, u132_endp_delete); in u132_endp_put_kref()
377 static inline void u132_endp_get_kref(struct u132 *u132, struct u132_endp *endp) in u132_endp_get_kref() argument
379 kref_get(&endp->kref); in u132_endp_get_kref()
383 struct u132_endp *endp) in u132_endp_init_kref() argument
385 kref_init(&endp->kref); in u132_endp_init_kref()
389 static void u132_endp_queue_work(struct u132 *u132, struct u132_endp *endp, in u132_endp_queue_work() argument
392 if (queue_delayed_work(workqueue, &endp->scheduler, delta)) in u132_endp_queue_work()
393 kref_get(&endp->kref); in u132_endp_queue_work()
396 static void u132_endp_cancel_work(struct u132 *u132, struct u132_endp *endp) in u132_endp_cancel_work() argument
398 if (cancel_delayed_work(&endp->scheduler)) in u132_endp_cancel_work()
399 kref_put(&endp->kref, u132_endp_delete); in u132_endp_cancel_work()
511 static void u132_hcd_giveback_urb(struct u132 *u132, struct u132_endp *endp, in u132_hcd_giveback_urb() argument
518 spin_lock_irqsave(&endp->queue_lock.slock, irqs); in u132_hcd_giveback_urb()
520 endp->queue_next += 1; in u132_hcd_giveback_urb()
521 if (ENDP_QUEUE_SIZE > --endp->queue_size) { in u132_hcd_giveback_urb()
522 endp->active = 0; in u132_hcd_giveback_urb()
523 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); in u132_hcd_giveback_urb()
525 struct list_head *next = endp->urb_more.next; in u132_hcd_giveback_urb()
529 endp->urb_list[ENDP_QUEUE_MASK & endp->queue_last++] = in u132_hcd_giveback_urb()
531 endp->active = 0; in u132_hcd_giveback_urb()
532 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); in u132_hcd_giveback_urb()
536 ring = endp->ring; in u132_hcd_giveback_urb()
541 u132_endp_put_kref(u132, endp); in u132_hcd_giveback_urb()
545 static void u132_hcd_forget_urb(struct u132 *u132, struct u132_endp *endp, in u132_hcd_forget_urb() argument
548 u132_endp_put_kref(u132, endp); in u132_hcd_forget_urb()
551 static void u132_hcd_abandon_urb(struct u132 *u132, struct u132_endp *endp, in u132_hcd_abandon_urb() argument
557 spin_lock_irqsave(&endp->queue_lock.slock, irqs); in u132_hcd_abandon_urb()
559 endp->queue_next += 1; in u132_hcd_abandon_urb()
560 if (ENDP_QUEUE_SIZE > --endp->queue_size) { in u132_hcd_abandon_urb()
561 endp->active = 0; in u132_hcd_abandon_urb()
562 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); in u132_hcd_abandon_urb()
564 struct list_head *next = endp->urb_more.next; in u132_hcd_abandon_urb()
568 endp->urb_list[ENDP_QUEUE_MASK & endp->queue_last++] = in u132_hcd_abandon_urb()
570 endp->active = 0; in u132_hcd_abandon_urb()
571 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); in u132_hcd_abandon_urb()
578 struct u132_endp *endp, struct urb *urb, u8 address, u8 toggle_bits, in edset_input() argument
579 void (*callback) (void *endp, struct urb *urb, u8 *buf, int len, in edset_input()
583 return usb_ftdi_elan_edset_input(u132->platform_dev, ring->number, endp, in edset_input()
584 urb, address, endp->usb_endp, toggle_bits, callback); in edset_input()
588 struct u132_endp *endp, struct urb *urb, u8 address, u8 toggle_bits, in edset_setup() argument
589 void (*callback) (void *endp, struct urb *urb, u8 *buf, int len, in edset_setup()
593 return usb_ftdi_elan_edset_setup(u132->platform_dev, ring->number, endp, in edset_setup()
594 urb, address, endp->usb_endp, toggle_bits, callback); in edset_setup()
598 struct u132_endp *endp, struct urb *urb, u8 address, u8 toggle_bits, in edset_single() argument
599 void (*callback) (void *endp, struct urb *urb, u8 *buf, int len, in edset_single()
604 endp, urb, address, endp->usb_endp, toggle_bits, callback); in edset_single()
608 struct u132_endp *endp, struct urb *urb, u8 address, u8 toggle_bits, in edset_output() argument
609 void (*callback) (void *endp, struct urb *urb, u8 *buf, int len, in edset_output()
614 endp, urb, address, endp->usb_endp, toggle_bits, callback); in edset_output()
626 struct u132_endp *endp = data; in u132_hcd_interrupt_recv() local
627 struct u132 *u132 = endp->u132; in u132_hcd_interrupt_recv()
628 u8 address = u132->addr[endp->usb_addr].address; in u132_hcd_interrupt_recv()
635 u132_hcd_forget_urb(u132, endp, urb, -ENODEV); in u132_hcd_interrupt_recv()
637 } else if (endp->dequeueing) { in u132_hcd_interrupt_recv()
638 endp->dequeueing = 0; in u132_hcd_interrupt_recv()
640 u132_hcd_giveback_urb(u132, endp, urb, -EINTR); in u132_hcd_interrupt_recv()
646 u132_hcd_giveback_urb(u132, endp, urb, -ENODEV); in u132_hcd_interrupt_recv()
649 struct u132_ring *ring = endp->ring; in u132_hcd_interrupt_recv()
660 endp->toggle_bits = toggle_bits; in u132_hcd_interrupt_recv()
661 usb_settoggle(udev->usb_device, endp->usb_endp, 0, in u132_hcd_interrupt_recv()
666 retval = edset_single(u132, ring, endp, urb, in u132_hcd_interrupt_recv()
667 address, endp->toggle_bits, in u132_hcd_interrupt_recv()
670 u132_hcd_giveback_urb(u132, endp, urb, in u132_hcd_interrupt_recv()
674 endp->active = 0; in u132_hcd_interrupt_recv()
675 endp->jiffies = jiffies + in u132_hcd_interrupt_recv()
680 u132_endp_put_kref(u132, endp); in u132_hcd_interrupt_recv()
685 endp->toggle_bits = toggle_bits; in u132_hcd_interrupt_recv()
686 usb_settoggle(udev->usb_device, endp->usb_endp, 0, in u132_hcd_interrupt_recv()
689 u132_hcd_giveback_urb(u132, endp, urb, 0); in u132_hcd_interrupt_recv()
693 endp->toggle_bits = toggle_bits; in u132_hcd_interrupt_recv()
694 usb_settoggle(udev->usb_device, endp->usb_endp, in u132_hcd_interrupt_recv()
697 endp->toggle_bits = 0x2; in u132_hcd_interrupt_recv()
698 usb_settoggle(udev->usb_device, endp->usb_endp, in u132_hcd_interrupt_recv()
701 endp->toggle_bits = 0x2; in u132_hcd_interrupt_recv()
702 usb_settoggle(udev->usb_device, endp->usb_endp, in u132_hcd_interrupt_recv()
709 u132_hcd_giveback_urb(u132, endp, urb, in u132_hcd_interrupt_recv()
717 u132_hcd_giveback_urb(u132, endp, urb, 0); in u132_hcd_interrupt_recv()
726 struct u132_endp *endp = data; in u132_hcd_bulk_output_sent() local
727 struct u132 *u132 = endp->u132; in u132_hcd_bulk_output_sent()
728 u8 address = u132->addr[endp->usb_addr].address; in u132_hcd_bulk_output_sent()
734 u132_hcd_forget_urb(u132, endp, urb, -ENODEV); in u132_hcd_bulk_output_sent()
736 } else if (endp->dequeueing) { in u132_hcd_bulk_output_sent()
737 endp->dequeueing = 0; in u132_hcd_bulk_output_sent()
739 u132_hcd_giveback_urb(u132, endp, urb, -EINTR); in u132_hcd_bulk_output_sent()
745 u132_hcd_giveback_urb(u132, endp, urb, -ENODEV); in u132_hcd_bulk_output_sent()
748 struct u132_ring *ring = endp->ring; in u132_hcd_bulk_output_sent()
750 endp->toggle_bits = toggle_bits; in u132_hcd_bulk_output_sent()
754 retval = edset_output(u132, ring, endp, urb, address, in u132_hcd_bulk_output_sent()
755 endp->toggle_bits, u132_hcd_bulk_output_sent); in u132_hcd_bulk_output_sent()
757 u132_hcd_giveback_urb(u132, endp, urb, retval); in u132_hcd_bulk_output_sent()
761 u132_hcd_giveback_urb(u132, endp, urb, 0); in u132_hcd_bulk_output_sent()
768 u132_hcd_giveback_urb(u132, endp, urb, 0); in u132_hcd_bulk_output_sent()
777 struct u132_endp *endp = data; in u132_hcd_bulk_input_recv() local
778 struct u132 *u132 = endp->u132; in u132_hcd_bulk_input_recv()
779 u8 address = u132->addr[endp->usb_addr].address; in u132_hcd_bulk_input_recv()
786 u132_hcd_forget_urb(u132, endp, urb, -ENODEV); in u132_hcd_bulk_input_recv()
788 } else if (endp->dequeueing) { in u132_hcd_bulk_input_recv()
789 endp->dequeueing = 0; in u132_hcd_bulk_input_recv()
791 u132_hcd_giveback_urb(u132, endp, urb, -EINTR); in u132_hcd_bulk_input_recv()
797 u132_hcd_giveback_urb(u132, endp, urb, -ENODEV); in u132_hcd_bulk_input_recv()
800 struct u132_ring *ring = endp->ring; in u132_hcd_bulk_input_recv()
812 endp->toggle_bits = toggle_bits; in u132_hcd_bulk_input_recv()
813 usb_settoggle(udev->usb_device, endp->usb_endp, 0, in u132_hcd_bulk_input_recv()
817 ring->number, endp, urb, address, in u132_hcd_bulk_input_recv()
818 endp->usb_endp, endp->toggle_bits, in u132_hcd_bulk_input_recv()
821 u132_hcd_giveback_urb(u132, endp, urb, retval); in u132_hcd_bulk_input_recv()
824 endp->toggle_bits = toggle_bits; in u132_hcd_bulk_input_recv()
825 usb_settoggle(udev->usb_device, endp->usb_endp, 0, in u132_hcd_bulk_input_recv()
828 u132_hcd_giveback_urb(u132, endp, urb, in u132_hcd_bulk_input_recv()
833 endp->toggle_bits = toggle_bits; in u132_hcd_bulk_input_recv()
834 usb_settoggle(udev->usb_device, endp->usb_endp, 0, in u132_hcd_bulk_input_recv()
837 u132_hcd_giveback_urb(u132, endp, urb, 0); in u132_hcd_bulk_input_recv()
840 endp->toggle_bits = toggle_bits; in u132_hcd_bulk_input_recv()
841 usb_settoggle(udev->usb_device, endp->usb_endp, 0, in u132_hcd_bulk_input_recv()
847 u132_hcd_giveback_urb(u132, endp, urb, 0); in u132_hcd_bulk_input_recv()
850 endp->toggle_bits = 0x2; in u132_hcd_bulk_input_recv()
851 usb_settoggle(udev->usb_device, endp->usb_endp, 0, 0); in u132_hcd_bulk_input_recv()
853 u132_hcd_giveback_urb(u132, endp, urb, in u132_hcd_bulk_input_recv()
857 endp->toggle_bits = 0x2; in u132_hcd_bulk_input_recv()
858 usb_settoggle(udev->usb_device, endp->usb_endp, 0, 0); in u132_hcd_bulk_input_recv()
863 u132_hcd_giveback_urb(u132, endp, urb, in u132_hcd_bulk_input_recv()
871 u132_hcd_giveback_urb(u132, endp, urb, 0); in u132_hcd_bulk_input_recv()
880 struct u132_endp *endp = data; in u132_hcd_configure_empty_sent() local
881 struct u132 *u132 = endp->u132; in u132_hcd_configure_empty_sent()
887 u132_hcd_forget_urb(u132, endp, urb, -ENODEV); in u132_hcd_configure_empty_sent()
889 } else if (endp->dequeueing) { in u132_hcd_configure_empty_sent()
890 endp->dequeueing = 0; in u132_hcd_configure_empty_sent()
892 u132_hcd_giveback_urb(u132, endp, urb, -EINTR); in u132_hcd_configure_empty_sent()
898 u132_hcd_giveback_urb(u132, endp, urb, -ENODEV); in u132_hcd_configure_empty_sent()
902 u132_hcd_giveback_urb(u132, endp, urb, 0); in u132_hcd_configure_empty_sent()
908 u132_hcd_giveback_urb(u132, endp, urb, 0); in u132_hcd_configure_empty_sent()
917 struct u132_endp *endp = data; in u132_hcd_configure_input_recv() local
918 struct u132 *u132 = endp->u132; in u132_hcd_configure_input_recv()
919 u8 address = u132->addr[endp->usb_addr].address; in u132_hcd_configure_input_recv()
925 u132_hcd_forget_urb(u132, endp, urb, -ENODEV); in u132_hcd_configure_input_recv()
927 } else if (endp->dequeueing) { in u132_hcd_configure_input_recv()
928 endp->dequeueing = 0; in u132_hcd_configure_input_recv()
930 u132_hcd_giveback_urb(u132, endp, urb, -EINTR); in u132_hcd_configure_input_recv()
936 u132_hcd_giveback_urb(u132, endp, urb, -ENODEV); in u132_hcd_configure_input_recv()
939 struct u132_ring *ring = endp->ring; in u132_hcd_configure_input_recv()
954 ring->number, endp, urb, address, in u132_hcd_configure_input_recv()
955 endp->usb_endp, 0x3, in u132_hcd_configure_input_recv()
958 u132_hcd_giveback_urb(u132, endp, urb, retval); in u132_hcd_configure_input_recv()
964 u132_hcd_giveback_urb(u132, endp, urb, in u132_hcd_configure_input_recv()
972 u132_hcd_giveback_urb(u132, endp, urb, in u132_hcd_configure_input_recv()
980 u132_hcd_giveback_urb(u132, endp, urb, 0); in u132_hcd_configure_input_recv()
989 struct u132_endp *endp = data; in u132_hcd_configure_empty_recv() local
990 struct u132 *u132 = endp->u132; in u132_hcd_configure_empty_recv()
996 u132_hcd_forget_urb(u132, endp, urb, -ENODEV); in u132_hcd_configure_empty_recv()
998 } else if (endp->dequeueing) { in u132_hcd_configure_empty_recv()
999 endp->dequeueing = 0; in u132_hcd_configure_empty_recv()
1001 u132_hcd_giveback_urb(u132, endp, urb, -EINTR); in u132_hcd_configure_empty_recv()
1007 u132_hcd_giveback_urb(u132, endp, urb, -ENODEV); in u132_hcd_configure_empty_recv()
1011 u132_hcd_giveback_urb(u132, endp, urb, 0); in u132_hcd_configure_empty_recv()
1017 u132_hcd_giveback_urb(u132, endp, urb, 0); in u132_hcd_configure_empty_recv()
1026 struct u132_endp *endp = data; in u132_hcd_configure_setup_sent() local
1027 struct u132 *u132 = endp->u132; in u132_hcd_configure_setup_sent()
1028 u8 address = u132->addr[endp->usb_addr].address; in u132_hcd_configure_setup_sent()
1034 u132_hcd_forget_urb(u132, endp, urb, -ENODEV); in u132_hcd_configure_setup_sent()
1036 } else if (endp->dequeueing) { in u132_hcd_configure_setup_sent()
1037 endp->dequeueing = 0; in u132_hcd_configure_setup_sent()
1039 u132_hcd_giveback_urb(u132, endp, urb, -EINTR); in u132_hcd_configure_setup_sent()
1045 u132_hcd_giveback_urb(u132, endp, urb, -ENODEV); in u132_hcd_configure_setup_sent()
1050 struct u132_ring *ring = endp->ring; in u132_hcd_configure_setup_sent()
1053 ring->number, endp, urb, address, in u132_hcd_configure_setup_sent()
1054 endp->usb_endp, 0, in u132_hcd_configure_setup_sent()
1057 u132_hcd_giveback_urb(u132, endp, urb, retval); in u132_hcd_configure_setup_sent()
1061 struct u132_ring *ring = endp->ring; in u132_hcd_configure_setup_sent()
1064 ring->number, endp, urb, address, in u132_hcd_configure_setup_sent()
1065 endp->usb_endp, 0, in u132_hcd_configure_setup_sent()
1068 u132_hcd_giveback_urb(u132, endp, urb, retval); in u132_hcd_configure_setup_sent()
1075 u132_hcd_giveback_urb(u132, endp, urb, 0); in u132_hcd_configure_setup_sent()
1084 struct u132_endp *endp = data; in u132_hcd_enumeration_empty_recv() local
1085 struct u132 *u132 = endp->u132; in u132_hcd_enumeration_empty_recv()
1086 u8 address = u132->addr[endp->usb_addr].address; in u132_hcd_enumeration_empty_recv()
1093 u132_hcd_forget_urb(u132, endp, urb, -ENODEV); in u132_hcd_enumeration_empty_recv()
1095 } else if (endp->dequeueing) { in u132_hcd_enumeration_empty_recv()
1096 endp->dequeueing = 0; in u132_hcd_enumeration_empty_recv()
1098 u132_hcd_giveback_urb(u132, endp, urb, -EINTR); in u132_hcd_enumeration_empty_recv()
1104 u132_hcd_giveback_urb(u132, endp, urb, -ENODEV); in u132_hcd_enumeration_empty_recv()
1108 endp->usb_addr = udev->usb_addr; in u132_hcd_enumeration_empty_recv()
1110 u132_hcd_giveback_urb(u132, endp, urb, 0); in u132_hcd_enumeration_empty_recv()
1116 u132_hcd_giveback_urb(u132, endp, urb, 0); in u132_hcd_enumeration_empty_recv()
1125 struct u132_endp *endp = data; in u132_hcd_enumeration_address_sent() local
1126 struct u132 *u132 = endp->u132; in u132_hcd_enumeration_address_sent()
1132 u132_hcd_forget_urb(u132, endp, urb, -ENODEV); in u132_hcd_enumeration_address_sent()
1134 } else if (endp->dequeueing) { in u132_hcd_enumeration_address_sent()
1135 endp->dequeueing = 0; in u132_hcd_enumeration_address_sent()
1137 u132_hcd_giveback_urb(u132, endp, urb, -EINTR); in u132_hcd_enumeration_address_sent()
1143 u132_hcd_giveback_urb(u132, endp, urb, -ENODEV); in u132_hcd_enumeration_address_sent()
1147 struct u132_ring *ring = endp->ring; in u132_hcd_enumeration_address_sent()
1150 ring->number, endp, urb, 0, endp->usb_endp, 0, in u132_hcd_enumeration_address_sent()
1153 u132_hcd_giveback_urb(u132, endp, urb, retval); in u132_hcd_enumeration_address_sent()
1159 u132_hcd_giveback_urb(u132, endp, urb, 0); in u132_hcd_enumeration_address_sent()
1168 struct u132_endp *endp = data; in u132_hcd_initial_empty_sent() local
1169 struct u132 *u132 = endp->u132; in u132_hcd_initial_empty_sent()
1175 u132_hcd_forget_urb(u132, endp, urb, -ENODEV); in u132_hcd_initial_empty_sent()
1177 } else if (endp->dequeueing) { in u132_hcd_initial_empty_sent()
1178 endp->dequeueing = 0; in u132_hcd_initial_empty_sent()
1180 u132_hcd_giveback_urb(u132, endp, urb, -EINTR); in u132_hcd_initial_empty_sent()
1186 u132_hcd_giveback_urb(u132, endp, urb, -ENODEV); in u132_hcd_initial_empty_sent()
1190 u132_hcd_giveback_urb(u132, endp, urb, 0); in u132_hcd_initial_empty_sent()
1196 u132_hcd_giveback_urb(u132, endp, urb, 0); in u132_hcd_initial_empty_sent()
1205 struct u132_endp *endp = data; in u132_hcd_initial_input_recv() local
1206 struct u132 *u132 = endp->u132; in u132_hcd_initial_input_recv()
1207 u8 address = u132->addr[endp->usb_addr].address; in u132_hcd_initial_input_recv()
1213 u132_hcd_forget_urb(u132, endp, urb, -ENODEV); in u132_hcd_initial_input_recv()
1215 } else if (endp->dequeueing) { in u132_hcd_initial_input_recv()
1216 endp->dequeueing = 0; in u132_hcd_initial_input_recv()
1218 u132_hcd_giveback_urb(u132, endp, urb, -EINTR); in u132_hcd_initial_input_recv()
1224 u132_hcd_giveback_urb(u132, endp, urb, -ENODEV); in u132_hcd_initial_input_recv()
1228 struct u132_ring *ring = endp->ring; in u132_hcd_initial_input_recv()
1239 ring->number, endp, urb, address, endp->usb_endp, 0x3, in u132_hcd_initial_input_recv()
1242 u132_hcd_giveback_urb(u132, endp, urb, retval); in u132_hcd_initial_input_recv()
1248 u132_hcd_giveback_urb(u132, endp, urb, 0); in u132_hcd_initial_input_recv()
1257 struct u132_endp *endp = data; in u132_hcd_initial_setup_sent() local
1258 struct u132 *u132 = endp->u132; in u132_hcd_initial_setup_sent()
1259 u8 address = u132->addr[endp->usb_addr].address; in u132_hcd_initial_setup_sent()
1265 u132_hcd_forget_urb(u132, endp, urb, -ENODEV); in u132_hcd_initial_setup_sent()
1267 } else if (endp->dequeueing) { in u132_hcd_initial_setup_sent()
1268 endp->dequeueing = 0; in u132_hcd_initial_setup_sent()
1270 u132_hcd_giveback_urb(u132, endp, urb, -EINTR); in u132_hcd_initial_setup_sent()
1276 u132_hcd_giveback_urb(u132, endp, urb, -ENODEV); in u132_hcd_initial_setup_sent()
1280 struct u132_ring *ring = endp->ring; in u132_hcd_initial_setup_sent()
1283 ring->number, endp, urb, address, endp->usb_endp, 0, in u132_hcd_initial_setup_sent()
1286 u132_hcd_giveback_urb(u132, endp, urb, retval); in u132_hcd_initial_setup_sent()
1292 u132_hcd_giveback_urb(u132, endp, urb, 0); in u132_hcd_initial_setup_sent()
1317 struct u132_endp *endp = list_entry(scan, in u132_hcd_ring_work_scheduler() local
1319 if (endp->queue_next == endp->queue_last) { in u132_hcd_ring_work_scheduler()
1320 } else if ((endp->delayed == 0) in u132_hcd_ring_work_scheduler()
1321 || time_after_eq(jiffies, endp->jiffies)) { in u132_hcd_ring_work_scheduler()
1322 ring->curr_endp = endp; in u132_hcd_ring_work_scheduler()
1329 unsigned long delta = endp->jiffies - jiffies; in u132_hcd_ring_work_scheduler()
1366 struct u132_endp *endp = in u132_hcd_endp_work_scheduler() local
1368 struct u132 *u132 = endp->u132; in u132_hcd_endp_work_scheduler()
1370 ring = endp->ring; in u132_hcd_endp_work_scheduler()
1371 if (endp->edset_flush) { in u132_hcd_endp_work_scheduler()
1372 endp->edset_flush = 0; in u132_hcd_endp_work_scheduler()
1373 if (endp->dequeueing) in u132_hcd_endp_work_scheduler()
1375 ring->number, endp); in u132_hcd_endp_work_scheduler()
1377 u132_endp_put_kref(u132, endp); in u132_hcd_endp_work_scheduler()
1379 } else if (endp->active) { in u132_hcd_endp_work_scheduler()
1381 u132_endp_put_kref(u132, endp); in u132_hcd_endp_work_scheduler()
1385 u132_endp_put_kref(u132, endp); in u132_hcd_endp_work_scheduler()
1387 } else if (endp->queue_next == endp->queue_last) { in u132_hcd_endp_work_scheduler()
1389 u132_endp_put_kref(u132, endp); in u132_hcd_endp_work_scheduler()
1391 } else if (endp->pipetype == PIPE_INTERRUPT) { in u132_hcd_endp_work_scheduler()
1392 u8 address = u132->addr[endp->usb_addr].address; in u132_hcd_endp_work_scheduler()
1395 u132_endp_put_kref(u132, endp); in u132_hcd_endp_work_scheduler()
1399 struct urb *urb = endp->urb_list[ENDP_QUEUE_MASK & in u132_hcd_endp_work_scheduler()
1400 endp->queue_next]; in u132_hcd_endp_work_scheduler()
1401 endp->active = 1; in u132_hcd_endp_work_scheduler()
1402 ring->curr_endp = endp; in u132_hcd_endp_work_scheduler()
1405 retval = edset_single(u132, ring, endp, urb, address, in u132_hcd_endp_work_scheduler()
1406 endp->toggle_bits, u132_hcd_interrupt_recv); in u132_hcd_endp_work_scheduler()
1408 u132_hcd_giveback_urb(u132, endp, urb, retval); in u132_hcd_endp_work_scheduler()
1411 } else if (endp->pipetype == PIPE_CONTROL) { in u132_hcd_endp_work_scheduler()
1412 u8 address = u132->addr[endp->usb_addr].address; in u132_hcd_endp_work_scheduler()
1415 u132_endp_put_kref(u132, endp); in u132_hcd_endp_work_scheduler()
1419 struct urb *urb = endp->urb_list[ENDP_QUEUE_MASK & in u132_hcd_endp_work_scheduler()
1420 endp->queue_next]; in u132_hcd_endp_work_scheduler()
1421 endp->active = 1; in u132_hcd_endp_work_scheduler()
1422 ring->curr_endp = endp; in u132_hcd_endp_work_scheduler()
1425 retval = edset_setup(u132, ring, endp, urb, address, in u132_hcd_endp_work_scheduler()
1428 u132_hcd_giveback_urb(u132, endp, urb, retval); in u132_hcd_endp_work_scheduler()
1430 } else if (endp->usb_addr == 0) { in u132_hcd_endp_work_scheduler()
1432 struct urb *urb = endp->urb_list[ENDP_QUEUE_MASK & in u132_hcd_endp_work_scheduler()
1433 endp->queue_next]; in u132_hcd_endp_work_scheduler()
1434 endp->active = 1; in u132_hcd_endp_work_scheduler()
1435 ring->curr_endp = endp; in u132_hcd_endp_work_scheduler()
1438 retval = edset_setup(u132, ring, endp, urb, 0, 0x2, in u132_hcd_endp_work_scheduler()
1441 u132_hcd_giveback_urb(u132, endp, urb, retval); in u132_hcd_endp_work_scheduler()
1445 struct urb *urb = endp->urb_list[ENDP_QUEUE_MASK & in u132_hcd_endp_work_scheduler()
1446 endp->queue_next]; in u132_hcd_endp_work_scheduler()
1447 address = u132->addr[endp->usb_addr].address; in u132_hcd_endp_work_scheduler()
1448 endp->active = 1; in u132_hcd_endp_work_scheduler()
1449 ring->curr_endp = endp; in u132_hcd_endp_work_scheduler()
1452 retval = edset_setup(u132, ring, endp, urb, address, in u132_hcd_endp_work_scheduler()
1455 u132_hcd_giveback_urb(u132, endp, urb, retval); in u132_hcd_endp_work_scheduler()
1459 if (endp->input) { in u132_hcd_endp_work_scheduler()
1460 u8 address = u132->addr[endp->usb_addr].address; in u132_hcd_endp_work_scheduler()
1463 u132_endp_put_kref(u132, endp); in u132_hcd_endp_work_scheduler()
1467 struct urb *urb = endp->urb_list[ in u132_hcd_endp_work_scheduler()
1468 ENDP_QUEUE_MASK & endp->queue_next]; in u132_hcd_endp_work_scheduler()
1469 endp->active = 1; in u132_hcd_endp_work_scheduler()
1470 ring->curr_endp = endp; in u132_hcd_endp_work_scheduler()
1473 retval = edset_input(u132, ring, endp, urb, in u132_hcd_endp_work_scheduler()
1474 address, endp->toggle_bits, in u132_hcd_endp_work_scheduler()
1478 u132_hcd_giveback_urb(u132, endp, urb, in u132_hcd_endp_work_scheduler()
1483 u8 address = u132->addr[endp->usb_addr].address; in u132_hcd_endp_work_scheduler()
1486 u132_endp_put_kref(u132, endp); in u132_hcd_endp_work_scheduler()
1490 struct urb *urb = endp->urb_list[ in u132_hcd_endp_work_scheduler()
1491 ENDP_QUEUE_MASK & endp->queue_next]; in u132_hcd_endp_work_scheduler()
1492 endp->active = 1; in u132_hcd_endp_work_scheduler()
1493 ring->curr_endp = endp; in u132_hcd_endp_work_scheduler()
1496 retval = edset_output(u132, ring, endp, urb, in u132_hcd_endp_work_scheduler()
1497 address, endp->toggle_bits, in u132_hcd_endp_work_scheduler()
1501 u132_hcd_giveback_urb(u132, endp, urb, in u132_hcd_endp_work_scheduler()
1870 struct u132_endp *endp = kmalloc(sizeof(struct u132_endp), mem_flags); in create_endpoint_and_queue_int() local
1872 if (!endp) in create_endpoint_and_queue_int()
1875 spin_lock_init(&endp->queue_lock.slock); in create_endpoint_and_queue_int()
1876 spin_lock_irqsave(&endp->queue_lock.slock, irqs); in create_endpoint_and_queue_int()
1879 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); in create_endpoint_and_queue_int()
1880 kfree(endp); in create_endpoint_and_queue_int()
1885 urb->ep->hcpriv = u132->endp[endp_number - 1] = endp; in create_endpoint_and_queue_int()
1886 INIT_DELAYED_WORK(&endp->scheduler, u132_hcd_endp_work_scheduler); in create_endpoint_and_queue_int()
1887 INIT_LIST_HEAD(&endp->urb_more); in create_endpoint_and_queue_int()
1888 ring = endp->ring = &u132->ring[0]; in create_endpoint_and_queue_int()
1890 list_add_tail(&endp->endp_ring, &ring->curr_endp->endp_ring); in create_endpoint_and_queue_int()
1892 INIT_LIST_HEAD(&endp->endp_ring); in create_endpoint_and_queue_int()
1893 ring->curr_endp = endp; in create_endpoint_and_queue_int()
1896 endp->dequeueing = 0; in create_endpoint_and_queue_int()
1897 endp->edset_flush = 0; in create_endpoint_and_queue_int()
1898 endp->active = 0; in create_endpoint_and_queue_int()
1899 endp->delayed = 0; in create_endpoint_and_queue_int()
1900 endp->endp_number = endp_number; in create_endpoint_and_queue_int()
1901 endp->u132 = u132; in create_endpoint_and_queue_int()
1902 endp->hep = urb->ep; in create_endpoint_and_queue_int()
1903 endp->pipetype = usb_pipetype(urb->pipe); in create_endpoint_and_queue_int()
1904 u132_endp_init_kref(u132, endp); in create_endpoint_and_queue_int()
1906 endp->toggle_bits = 0x2; in create_endpoint_and_queue_int()
1908 endp->input = 1; in create_endpoint_and_queue_int()
1909 endp->output = 0; in create_endpoint_and_queue_int()
1913 endp->toggle_bits = 0x2; in create_endpoint_and_queue_int()
1915 endp->input = 0; in create_endpoint_and_queue_int()
1916 endp->output = 1; in create_endpoint_and_queue_int()
1921 endp->delayed = 1; in create_endpoint_and_queue_int()
1922 endp->jiffies = jiffies + msecs_to_jiffies(urb->interval); in create_endpoint_and_queue_int()
1923 endp->udev_number = address; in create_endpoint_and_queue_int()
1924 endp->usb_addr = usb_addr; in create_endpoint_and_queue_int()
1925 endp->usb_endp = usb_endp; in create_endpoint_and_queue_int()
1926 endp->queue_size = 1; in create_endpoint_and_queue_int()
1927 endp->queue_last = 0; in create_endpoint_and_queue_int()
1928 endp->queue_next = 0; in create_endpoint_and_queue_int()
1929 endp->urb_list[ENDP_QUEUE_MASK & endp->queue_last++] = urb; in create_endpoint_and_queue_int()
1930 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); in create_endpoint_and_queue_int()
1931 u132_endp_queue_work(u132, endp, msecs_to_jiffies(urb->interval)); in create_endpoint_and_queue_int()
1937 struct usb_device *usb_dev, struct u132_endp *endp, u8 usb_addr, in queue_int_on_old_endpoint() argument
1941 endp->delayed = 1; in queue_int_on_old_endpoint()
1942 endp->jiffies = jiffies + msecs_to_jiffies(urb->interval); in queue_int_on_old_endpoint()
1943 if (endp->queue_size++ < ENDP_QUEUE_SIZE) { in queue_int_on_old_endpoint()
1944 endp->urb_list[ENDP_QUEUE_MASK & endp->queue_last++] = urb; in queue_int_on_old_endpoint()
1949 endp->queue_size -= 1; in queue_int_on_old_endpoint()
1952 list_add_tail(&urbq->urb_more, &endp->urb_more); in queue_int_on_old_endpoint()
1969 struct u132_endp *endp = kmalloc(sizeof(struct u132_endp), mem_flags); in create_endpoint_and_queue_bulk() local
1971 if (!endp) in create_endpoint_and_queue_bulk()
1974 spin_lock_init(&endp->queue_lock.slock); in create_endpoint_and_queue_bulk()
1975 spin_lock_irqsave(&endp->queue_lock.slock, irqs); in create_endpoint_and_queue_bulk()
1978 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); in create_endpoint_and_queue_bulk()
1979 kfree(endp); in create_endpoint_and_queue_bulk()
1984 urb->ep->hcpriv = u132->endp[endp_number - 1] = endp; in create_endpoint_and_queue_bulk()
1985 INIT_DELAYED_WORK(&endp->scheduler, u132_hcd_endp_work_scheduler); in create_endpoint_and_queue_bulk()
1986 INIT_LIST_HEAD(&endp->urb_more); in create_endpoint_and_queue_bulk()
1987 endp->dequeueing = 0; in create_endpoint_and_queue_bulk()
1988 endp->edset_flush = 0; in create_endpoint_and_queue_bulk()
1989 endp->active = 0; in create_endpoint_and_queue_bulk()
1990 endp->delayed = 0; in create_endpoint_and_queue_bulk()
1991 endp->endp_number = endp_number; in create_endpoint_and_queue_bulk()
1992 endp->u132 = u132; in create_endpoint_and_queue_bulk()
1993 endp->hep = urb->ep; in create_endpoint_and_queue_bulk()
1994 endp->pipetype = usb_pipetype(urb->pipe); in create_endpoint_and_queue_bulk()
1995 u132_endp_init_kref(u132, endp); in create_endpoint_and_queue_bulk()
1997 endp->toggle_bits = 0x2; in create_endpoint_and_queue_bulk()
2000 endp->input = 1; in create_endpoint_and_queue_bulk()
2001 endp->output = 0; in create_endpoint_and_queue_bulk()
2005 endp->toggle_bits = 0x2; in create_endpoint_and_queue_bulk()
2008 endp->input = 0; in create_endpoint_and_queue_bulk()
2009 endp->output = 1; in create_endpoint_and_queue_bulk()
2013 ring = endp->ring = &u132->ring[ring_number - 1]; in create_endpoint_and_queue_bulk()
2015 list_add_tail(&endp->endp_ring, &ring->curr_endp->endp_ring); in create_endpoint_and_queue_bulk()
2017 INIT_LIST_HEAD(&endp->endp_ring); in create_endpoint_and_queue_bulk()
2018 ring->curr_endp = endp; in create_endpoint_and_queue_bulk()
2022 endp->udev_number = address; in create_endpoint_and_queue_bulk()
2023 endp->usb_addr = usb_addr; in create_endpoint_and_queue_bulk()
2024 endp->usb_endp = usb_endp; in create_endpoint_and_queue_bulk()
2025 endp->queue_size = 1; in create_endpoint_and_queue_bulk()
2026 endp->queue_last = 0; in create_endpoint_and_queue_bulk()
2027 endp->queue_next = 0; in create_endpoint_and_queue_bulk()
2028 endp->urb_list[ENDP_QUEUE_MASK & endp->queue_last++] = urb; in create_endpoint_and_queue_bulk()
2029 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); in create_endpoint_and_queue_bulk()
2030 u132_endp_queue_work(u132, endp, 0); in create_endpoint_and_queue_bulk()
2036 struct usb_device *usb_dev, struct u132_endp *endp, u8 usb_addr, in queue_bulk_on_old_endpoint() argument
2040 if (endp->queue_size++ < ENDP_QUEUE_SIZE) { in queue_bulk_on_old_endpoint()
2041 endp->urb_list[ENDP_QUEUE_MASK & endp->queue_last++] = urb; in queue_bulk_on_old_endpoint()
2046 endp->queue_size -= 1; in queue_bulk_on_old_endpoint()
2049 list_add_tail(&urbq->urb_more, &endp->urb_more); in queue_bulk_on_old_endpoint()
2065 struct u132_endp *endp = kmalloc(sizeof(struct u132_endp), mem_flags); in create_endpoint_and_queue_control() local
2067 if (!endp) in create_endpoint_and_queue_control()
2070 spin_lock_init(&endp->queue_lock.slock); in create_endpoint_and_queue_control()
2071 spin_lock_irqsave(&endp->queue_lock.slock, irqs); in create_endpoint_and_queue_control()
2074 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); in create_endpoint_and_queue_control()
2075 kfree(endp); in create_endpoint_and_queue_control()
2080 urb->ep->hcpriv = u132->endp[endp_number - 1] = endp; in create_endpoint_and_queue_control()
2081 INIT_DELAYED_WORK(&endp->scheduler, u132_hcd_endp_work_scheduler); in create_endpoint_and_queue_control()
2082 INIT_LIST_HEAD(&endp->urb_more); in create_endpoint_and_queue_control()
2083 ring = endp->ring = &u132->ring[0]; in create_endpoint_and_queue_control()
2085 list_add_tail(&endp->endp_ring, &ring->curr_endp->endp_ring); in create_endpoint_and_queue_control()
2087 INIT_LIST_HEAD(&endp->endp_ring); in create_endpoint_and_queue_control()
2088 ring->curr_endp = endp; in create_endpoint_and_queue_control()
2091 endp->dequeueing = 0; in create_endpoint_and_queue_control()
2092 endp->edset_flush = 0; in create_endpoint_and_queue_control()
2093 endp->active = 0; in create_endpoint_and_queue_control()
2094 endp->delayed = 0; in create_endpoint_and_queue_control()
2095 endp->endp_number = endp_number; in create_endpoint_and_queue_control()
2096 endp->u132 = u132; in create_endpoint_and_queue_control()
2097 endp->hep = urb->ep; in create_endpoint_and_queue_control()
2098 u132_endp_init_kref(u132, endp); in create_endpoint_and_queue_control()
2099 u132_endp_get_kref(u132, endp); in create_endpoint_and_queue_control()
2103 endp->udev_number = address; in create_endpoint_and_queue_control()
2104 endp->usb_addr = usb_addr; in create_endpoint_and_queue_control()
2105 endp->usb_endp = usb_endp; in create_endpoint_and_queue_control()
2106 endp->input = 1; in create_endpoint_and_queue_control()
2107 endp->output = 1; in create_endpoint_and_queue_control()
2108 endp->pipetype = usb_pipetype(urb->pipe); in create_endpoint_and_queue_control()
2114 endp->queue_size = 1; in create_endpoint_and_queue_control()
2115 endp->queue_last = 0; in create_endpoint_and_queue_control()
2116 endp->queue_next = 0; in create_endpoint_and_queue_control()
2117 endp->urb_list[ENDP_QUEUE_MASK & endp->queue_last++] = urb; in create_endpoint_and_queue_control()
2118 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); in create_endpoint_and_queue_control()
2119 u132_endp_queue_work(u132, endp, 0); in create_endpoint_and_queue_control()
2124 endp->udev_number = address; in create_endpoint_and_queue_control()
2125 endp->usb_addr = usb_addr; in create_endpoint_and_queue_control()
2126 endp->usb_endp = usb_endp; in create_endpoint_and_queue_control()
2127 endp->input = 1; in create_endpoint_and_queue_control()
2128 endp->output = 1; in create_endpoint_and_queue_control()
2129 endp->pipetype = usb_pipetype(urb->pipe); in create_endpoint_and_queue_control()
2135 endp->queue_size = 1; in create_endpoint_and_queue_control()
2136 endp->queue_last = 0; in create_endpoint_and_queue_control()
2137 endp->queue_next = 0; in create_endpoint_and_queue_control()
2138 endp->urb_list[ENDP_QUEUE_MASK & endp->queue_last++] = urb; in create_endpoint_and_queue_control()
2139 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); in create_endpoint_and_queue_control()
2140 u132_endp_queue_work(u132, endp, 0); in create_endpoint_and_queue_control()
2147 struct usb_device *usb_dev, struct u132_endp *endp, u8 usb_addr, in queue_control_on_old_endpoint() argument
2153 if (endp->queue_size++ < ENDP_QUEUE_SIZE) { in queue_control_on_old_endpoint()
2154 endp->urb_list[ENDP_QUEUE_MASK & in queue_control_on_old_endpoint()
2155 endp->queue_last++] = urb; in queue_control_on_old_endpoint()
2161 endp->queue_size -= 1; in queue_control_on_old_endpoint()
2165 &endp->urb_more); in queue_control_on_old_endpoint()
2181 endp->udev_number = i; in queue_control_on_old_endpoint()
2186 endp->endp_number; in queue_control_on_old_endpoint()
2189 endp->endp_number; in queue_control_on_old_endpoint()
2203 if (endp->queue_size++ < ENDP_QUEUE_SIZE) { in queue_control_on_old_endpoint()
2204 endp->urb_list[ENDP_QUEUE_MASK & in queue_control_on_old_endpoint()
2205 endp->queue_last++] = urb; in queue_control_on_old_endpoint()
2211 endp->queue_size -= 1; in queue_control_on_old_endpoint()
2215 &endp->urb_more); in queue_control_on_old_endpoint()
2227 if (endp->queue_size++ < ENDP_QUEUE_SIZE) { in queue_control_on_old_endpoint()
2228 endp->urb_list[ENDP_QUEUE_MASK & endp->queue_last++] = in queue_control_on_old_endpoint()
2234 endp->queue_size -= 1; in queue_control_on_old_endpoint()
2237 list_add_tail(&urbq->urb_more, &endp->urb_more); in queue_control_on_old_endpoint()
2271 struct u132_endp *endp = urb->ep->hcpriv; in u132_urb_enqueue() local
2273 if (endp) { in u132_urb_enqueue()
2276 spin_lock_irqsave(&endp->queue_lock.slock, in u132_urb_enqueue()
2282 usb_dev, endp, in u132_urb_enqueue()
2289 spin_unlock_irqrestore(&endp->queue_lock.slock, in u132_urb_enqueue()
2294 u132_endp_queue_work(u132, endp, in u132_urb_enqueue()
2313 struct u132_endp *endp = urb->ep->hcpriv; in u132_urb_enqueue() local
2315 if (endp) { in u132_urb_enqueue()
2318 spin_lock_irqsave(&endp->queue_lock.slock, in u132_urb_enqueue()
2324 usb_dev, endp, in u132_urb_enqueue()
2331 spin_unlock_irqrestore(&endp->queue_lock.slock, in u132_urb_enqueue()
2336 u132_endp_queue_work(u132, endp, 0); in u132_urb_enqueue()
2346 struct u132_endp *endp = urb->ep->hcpriv; in u132_urb_enqueue() local
2364 if (endp) { in u132_urb_enqueue()
2367 spin_lock_irqsave(&endp->queue_lock.slock, in u132_urb_enqueue()
2373 endp, usb_addr, in u132_urb_enqueue()
2379 spin_unlock_irqrestore(&endp->queue_lock.slock, in u132_urb_enqueue()
2384 u132_endp_queue_work(u132, endp, 0); in u132_urb_enqueue()
2398 struct u132_endp *endp, struct urb *urb) in dequeue_from_overflow_chain() argument
2401 struct list_head *head = &endp->urb_more; in dequeue_from_overflow_chain()
2408 endp->queue_size -= 1; in dequeue_from_overflow_chain()
2417 "\n", urb, endp->endp_number, endp, endp->ring->number, in dequeue_from_overflow_chain()
2418 endp->input ? 'I' : ' ', endp->output ? 'O' : ' ', in dequeue_from_overflow_chain()
2419 endp->usb_endp, endp->usb_addr, endp->queue_size, in dequeue_from_overflow_chain()
2420 endp->queue_next, endp->queue_last); in dequeue_from_overflow_chain()
2424 static int u132_endp_urb_dequeue(struct u132 *u132, struct u132_endp *endp, in u132_endp_urb_dequeue() argument
2430 spin_lock_irqsave(&endp->queue_lock.slock, irqs); in u132_endp_urb_dequeue()
2433 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); in u132_endp_urb_dequeue()
2436 if (endp->queue_size == 0) { in u132_endp_urb_dequeue()
2439 endp->endp_number, endp, endp->ring->number, in u132_endp_urb_dequeue()
2440 endp->input ? 'I' : ' ', endp->output ? 'O' : ' ', in u132_endp_urb_dequeue()
2441 endp->usb_endp, endp->usb_addr); in u132_endp_urb_dequeue()
2442 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); in u132_endp_urb_dequeue()
2445 if (urb == endp->urb_list[ENDP_QUEUE_MASK & endp->queue_next]) { in u132_endp_urb_dequeue()
2446 if (endp->active) { in u132_endp_urb_dequeue()
2447 endp->dequeueing = 1; in u132_endp_urb_dequeue()
2448 endp->edset_flush = 1; in u132_endp_urb_dequeue()
2449 u132_endp_queue_work(u132, endp, 0); in u132_endp_urb_dequeue()
2450 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); in u132_endp_urb_dequeue()
2453 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); in u132_endp_urb_dequeue()
2454 u132_hcd_abandon_urb(u132, endp, urb, status); in u132_endp_urb_dequeue()
2459 u16 queue_size = endp->queue_size; in u132_endp_urb_dequeue()
2460 u16 queue_scan = endp->queue_next; in u132_endp_urb_dequeue()
2463 if (urb == endp->urb_list[ENDP_QUEUE_MASK & in u132_endp_urb_dequeue()
2465 urb_slot = &endp->urb_list[ENDP_QUEUE_MASK & in u132_endp_urb_dequeue()
2472 *urb_slot = endp->urb_list[ENDP_QUEUE_MASK & in u132_endp_urb_dequeue()
2474 urb_slot = &endp->urb_list[ENDP_QUEUE_MASK & in u132_endp_urb_dequeue()
2481 endp->queue_size -= 1; in u132_endp_urb_dequeue()
2482 if (list_empty(&endp->urb_more)) { in u132_endp_urb_dequeue()
2483 spin_unlock_irqrestore(&endp->queue_lock.slock, in u132_endp_urb_dequeue()
2486 struct list_head *next = endp->urb_more.next; in u132_endp_urb_dequeue()
2491 spin_unlock_irqrestore(&endp->queue_lock.slock, in u132_endp_urb_dequeue()
2497 } else if (list_empty(&endp->urb_more)) { in u132_endp_urb_dequeue()
2501 endp->endp_number, endp, endp->ring->number, in u132_endp_urb_dequeue()
2502 endp->input ? 'I' : ' ', in u132_endp_urb_dequeue()
2503 endp->output ? 'O' : ' ', endp->usb_endp, in u132_endp_urb_dequeue()
2504 endp->usb_addr, endp->queue_size, in u132_endp_urb_dequeue()
2505 endp->queue_next, endp->queue_last); in u132_endp_urb_dequeue()
2506 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); in u132_endp_urb_dequeue()
2512 retval = dequeue_from_overflow_chain(u132, endp, in u132_endp_urb_dequeue()
2514 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); in u132_endp_urb_dequeue()
2534 struct u132_endp *endp = u132->endp[endp_number - 1]; in u132_urb_dequeue() local
2535 return u132_endp_urb_dequeue(u132, endp, urb, status); in u132_urb_dequeue()
2538 struct u132_endp *endp = u132->endp[endp_number - 1]; in u132_urb_dequeue() local
2539 return u132_endp_urb_dequeue(u132, endp, urb, status); in u132_urb_dequeue()
2553 struct u132_endp *endp = hep->hcpriv; in u132_endpoint_disable() local
2554 if (endp) in u132_endpoint_disable()
2555 u132_endp_put_kref(u132, endp); in u132_endpoint_disable()
3014 struct u132_endp *endp = u132->endp[endps]; in u132_remove() local
3015 if (endp) in u132_remove()
3016 u132_endp_cancel_work(u132, endp); in u132_remove()
3082 u132->endp[endps] = NULL; in u132_initialise()