Lines Matching refs:mcs
132 static int mcs_set_reg(struct mcs_cb *mcs, __u16 reg, __u16 val) in mcs_set_reg() argument
134 struct usb_device *dev = mcs->usbdev; in mcs_set_reg()
141 static int mcs_get_reg(struct mcs_cb *mcs, __u16 reg, __u16 * val) in mcs_get_reg() argument
143 struct usb_device *dev = mcs->usbdev; in mcs_get_reg()
168 static inline int mcs_setup_transceiver_vishay(struct mcs_cb *mcs) in mcs_setup_transceiver_vishay() argument
174 ret = mcs_get_reg(mcs, MCS_XCVR_REG, &rval); in mcs_setup_transceiver_vishay()
188 ret = mcs_set_reg(mcs, MCS_XCVR_REG, rval); in mcs_setup_transceiver_vishay()
193 ret = mcs_set_reg(mcs, MCS_XCVR_REG, rval); in mcs_setup_transceiver_vishay()
198 ret = mcs_set_reg(mcs, MCS_XCVR_REG, rval); in mcs_setup_transceiver_vishay()
208 static inline int mcs_setup_transceiver_agilent(struct mcs_cb *mcs) in mcs_setup_transceiver_agilent() argument
215 static inline int mcs_setup_transceiver_sharp(struct mcs_cb *mcs) in mcs_setup_transceiver_sharp() argument
222 static inline int mcs_setup_transceiver(struct mcs_cb *mcs) in mcs_setup_transceiver() argument
233 ret = mcs_get_reg(mcs, MCS_MODE_REG, &rval); in mcs_setup_transceiver()
237 ret = mcs_set_reg(mcs, MCS_MODE_REG, rval); in mcs_setup_transceiver()
242 ret = mcs_set_reg(mcs, MCS_MINRXPW_REG, rval); in mcs_setup_transceiver()
246 ret = mcs_get_reg(mcs, MCS_MODE_REG, &rval); in mcs_setup_transceiver()
251 if(mcs->sir_tweak) in mcs_setup_transceiver()
261 mcs->speed = 9600; in mcs_setup_transceiver()
262 mcs->new_speed = 0; /* new_speed is set to 0 */ in mcs_setup_transceiver()
270 ret = mcs_set_reg(mcs, MCS_MODE_REG, rval); in mcs_setup_transceiver()
275 switch (mcs->transceiver_type) { in mcs_setup_transceiver()
277 ret = mcs_setup_transceiver_vishay(mcs); in mcs_setup_transceiver()
281 ret = mcs_setup_transceiver_sharp(mcs); in mcs_setup_transceiver()
285 ret = mcs_setup_transceiver_agilent(mcs); in mcs_setup_transceiver()
290 mcs->transceiver_type); in mcs_setup_transceiver()
299 if (mcs->transceiver_type != MCS_TSC_SHARP) { in mcs_setup_transceiver()
301 ret = mcs_get_reg(mcs, MCS_XCVR_REG, &rval); in mcs_setup_transceiver()
304 if (mcs->receive_mode) in mcs_setup_transceiver()
308 ret = mcs_set_reg(mcs, MCS_XCVR_REG, rval); in mcs_setup_transceiver()
315 ret = mcs_get_reg(mcs, MCS_MODE_REG, &rval); in mcs_setup_transceiver()
321 ret = mcs_set_reg(mcs, MCS_MODE_REG, rval); in mcs_setup_transceiver()
400 static void mcs_unwrap_mir(struct mcs_cb *mcs, __u8 *buf, int len) in mcs_unwrap_mir() argument
413 mcs->netdev->name, new_len); in mcs_unwrap_mir()
414 ++mcs->netdev->stats.rx_errors; in mcs_unwrap_mir()
415 ++mcs->netdev->stats.rx_length_errors; in mcs_unwrap_mir()
424 mcs->netdev->stats.rx_errors++; in mcs_unwrap_mir()
425 mcs->netdev->stats.rx_crc_errors++; in mcs_unwrap_mir()
431 ++mcs->netdev->stats.rx_dropped; in mcs_unwrap_mir()
440 skb->dev = mcs->netdev; in mcs_unwrap_mir()
444 mcs->netdev->stats.rx_packets++; in mcs_unwrap_mir()
445 mcs->netdev->stats.rx_bytes += new_len; in mcs_unwrap_mir()
452 static void mcs_unwrap_fir(struct mcs_cb *mcs, __u8 *buf, int len) in mcs_unwrap_fir() argument
466 mcs->netdev->name, new_len); in mcs_unwrap_fir()
467 ++mcs->netdev->stats.rx_errors; in mcs_unwrap_fir()
468 ++mcs->netdev->stats.rx_length_errors; in mcs_unwrap_fir()
476 mcs->netdev->stats.rx_errors++; in mcs_unwrap_fir()
477 mcs->netdev->stats.rx_crc_errors++; in mcs_unwrap_fir()
483 ++mcs->netdev->stats.rx_dropped; in mcs_unwrap_fir()
492 skb->dev = mcs->netdev; in mcs_unwrap_fir()
496 mcs->netdev->stats.rx_packets++; in mcs_unwrap_fir()
497 mcs->netdev->stats.rx_bytes += new_len; in mcs_unwrap_fir()
505 static inline int mcs_setup_urbs(struct mcs_cb *mcs) in mcs_setup_urbs() argument
507 mcs->rx_urb = NULL; in mcs_setup_urbs()
509 mcs->tx_urb = usb_alloc_urb(0, GFP_KERNEL); in mcs_setup_urbs()
510 if (!mcs->tx_urb) in mcs_setup_urbs()
513 mcs->rx_urb = usb_alloc_urb(0, GFP_KERNEL); in mcs_setup_urbs()
514 if (!mcs->rx_urb) { in mcs_setup_urbs()
515 usb_free_urb(mcs->tx_urb); in mcs_setup_urbs()
516 mcs->tx_urb = NULL; in mcs_setup_urbs()
527 static inline int mcs_receive_start(struct mcs_cb *mcs) in mcs_receive_start() argument
529 mcs->rx_buff.in_frame = FALSE; in mcs_receive_start()
530 mcs->rx_buff.state = OUTSIDE_FRAME; in mcs_receive_start()
532 usb_fill_bulk_urb(mcs->rx_urb, mcs->usbdev, in mcs_receive_start()
533 usb_rcvbulkpipe(mcs->usbdev, mcs->ep_in), in mcs_receive_start()
534 mcs->in_buf, 4096, mcs_receive_irq, mcs); in mcs_receive_start()
536 mcs->rx_urb->status = 0; in mcs_receive_start()
537 return usb_submit_urb(mcs->rx_urb, GFP_KERNEL); in mcs_receive_start()
541 static inline int mcs_find_endpoints(struct mcs_cb *mcs, in mcs_find_endpoints() argument
554 mcs->ep_in = ep[i].desc.bEndpointAddress; in mcs_find_endpoints()
556 mcs->ep_out = ep[i].desc.bEndpointAddress; in mcs_find_endpoints()
561 if ((mcs->ep_in != 0) && (mcs->ep_out != 0)) { in mcs_find_endpoints()
572 struct mcs_cb *mcs = container_of(work, struct mcs_cb, work); in mcs_speed_work() local
573 struct net_device *netdev = mcs->netdev; in mcs_speed_work()
575 mcs_speed_change(mcs); in mcs_speed_work()
582 static int mcs_speed_change(struct mcs_cb *mcs) in mcs_speed_change() argument
590 nspeed = mcs_speed_set[(mcs->new_speed >> 8) & 0x0f]; in mcs_speed_change()
593 mcs_get_reg(mcs, MCS_RESV_REG, &rval); in mcs_speed_change()
602 mcs_get_reg(mcs, MCS_MODE_REG, &rval); in mcs_speed_change()
605 if (mcs->new_speed <= 115200) { in mcs_speed_change()
608 if ((rst = (mcs->speed > 115200))) in mcs_speed_change()
609 mcs_set_reg(mcs, MCS_MINRXPW_REG, 0); in mcs_speed_change()
611 } else if (mcs->new_speed <= 1152000) { in mcs_speed_change()
614 if ((rst = !(mcs->speed == 576000 || mcs->speed == 1152000))) in mcs_speed_change()
615 mcs_set_reg(mcs, MCS_MINRXPW_REG, 5); in mcs_speed_change()
620 if ((rst = (mcs->speed != 4000000))) in mcs_speed_change()
621 mcs_set_reg(mcs, MCS_MINRXPW_REG, 5); in mcs_speed_change()
628 ret = mcs_set_reg(mcs, MCS_MODE_REG, rval); in mcs_speed_change()
633 switch (mcs->transceiver_type) { in mcs_speed_change()
635 ret = mcs_setup_transceiver_vishay(mcs); in mcs_speed_change()
639 ret = mcs_setup_transceiver_sharp(mcs); in mcs_speed_change()
643 ret = mcs_setup_transceiver_agilent(mcs); in mcs_speed_change()
649 mcs->transceiver_type); in mcs_speed_change()
654 mcs_get_reg(mcs, MCS_MODE_REG, &rval); in mcs_speed_change()
656 ret = mcs_set_reg(mcs, MCS_MODE_REG, rval); in mcs_speed_change()
658 mcs->speed = mcs->new_speed; in mcs_speed_change()
660 mcs->new_speed = 0; in mcs_speed_change()
683 struct mcs_cb *mcs = netdev_priv(netdev); in mcs_net_close() local
688 kfree_skb(mcs->rx_buff.skb); in mcs_net_close()
691 usb_kill_urb(mcs->rx_urb); in mcs_net_close()
692 usb_free_urb(mcs->rx_urb); in mcs_net_close()
693 usb_kill_urb(mcs->tx_urb); in mcs_net_close()
694 usb_free_urb(mcs->tx_urb); in mcs_net_close()
697 if (mcs->irlap) in mcs_net_close()
698 irlap_close(mcs->irlap); in mcs_net_close()
700 mcs->irlap = NULL; in mcs_net_close()
707 struct mcs_cb *mcs = netdev_priv(netdev); in mcs_net_open() local
711 ret = usb_clear_halt(mcs->usbdev, in mcs_net_open()
712 usb_sndbulkpipe(mcs->usbdev, mcs->ep_in)); in mcs_net_open()
715 ret = usb_clear_halt(mcs->usbdev, in mcs_net_open()
716 usb_rcvbulkpipe(mcs->usbdev, mcs->ep_out)); in mcs_net_open()
720 ret = mcs_setup_transceiver(mcs); in mcs_net_open()
727 mcs->receiving = 0; in mcs_net_open()
728 mcs->rx_buff.truesize = IRDA_SKB_MAX_MTU; in mcs_net_open()
729 mcs->rx_buff.skb = dev_alloc_skb(IRDA_SKB_MAX_MTU); in mcs_net_open()
730 if (!mcs->rx_buff.skb) in mcs_net_open()
733 skb_reserve(mcs->rx_buff.skb, 1); in mcs_net_open()
734 mcs->rx_buff.head = mcs->rx_buff.skb->data; in mcs_net_open()
741 sprintf(hwname, "usb#%d", mcs->usbdev->devnum); in mcs_net_open()
742 mcs->irlap = irlap_open(netdev, &mcs->qos, hwname); in mcs_net_open()
743 if (!mcs->irlap) { in mcs_net_open()
748 if (!mcs_setup_urbs(mcs)) in mcs_net_open()
751 ret = mcs_receive_start(mcs); in mcs_net_open()
759 usb_free_urb(mcs->rx_urb); in mcs_net_open()
760 usb_free_urb(mcs->tx_urb); in mcs_net_open()
762 irlap_close(mcs->irlap); in mcs_net_open()
764 kfree_skb(mcs->rx_buff.skb); in mcs_net_open()
773 struct mcs_cb *mcs = urb->context; in mcs_receive_irq() local
777 if (!netif_running(mcs->netdev)) in mcs_receive_irq()
790 if(mcs->speed < 576000) { in mcs_receive_irq()
791 async_unwrap_char(mcs->netdev, &mcs->netdev->stats, in mcs_receive_irq()
792 &mcs->rx_buff, 0xc0); in mcs_receive_irq()
795 async_unwrap_char(mcs->netdev, &mcs->netdev->stats, in mcs_receive_irq()
796 &mcs->rx_buff, bytes[i]); in mcs_receive_irq()
798 async_unwrap_char(mcs->netdev, &mcs->netdev->stats, in mcs_receive_irq()
799 &mcs->rx_buff, 0xc1); in mcs_receive_irq()
802 else if(mcs->speed == 576000 || mcs->speed == 1152000) { in mcs_receive_irq()
803 mcs_unwrap_mir(mcs, urb->transfer_buffer, in mcs_receive_irq()
808 mcs_unwrap_fir(mcs, urb->transfer_buffer, in mcs_receive_irq()
819 struct mcs_cb *mcs = urb->context; in mcs_send_irq() local
820 struct net_device *ndev = mcs->netdev; in mcs_send_irq()
822 if (unlikely(mcs->new_speed)) in mcs_send_irq()
823 schedule_work(&mcs->work); in mcs_send_irq()
833 struct mcs_cb *mcs; in mcs_hard_xmit() local
838 mcs = netdev_priv(ndev); in mcs_hard_xmit()
840 spin_lock_irqsave(&mcs->lock, flags); in mcs_hard_xmit()
842 mcs->new_speed = irda_get_next_speed(skb); in mcs_hard_xmit()
843 if (likely(mcs->new_speed == mcs->speed)) in mcs_hard_xmit()
844 mcs->new_speed = 0; in mcs_hard_xmit()
847 if(mcs->speed < 576000) { in mcs_hard_xmit()
848 wraplen = mcs_wrap_sir_skb(skb, mcs->out_buf); in mcs_hard_xmit()
851 else if(mcs->speed == 576000 || mcs->speed == 1152000) { in mcs_hard_xmit()
852 wraplen = mcs_wrap_mir_skb(skb, mcs->out_buf); in mcs_hard_xmit()
856 wraplen = mcs_wrap_fir_skb(skb, mcs->out_buf); in mcs_hard_xmit()
858 usb_fill_bulk_urb(mcs->tx_urb, mcs->usbdev, in mcs_hard_xmit()
859 usb_sndbulkpipe(mcs->usbdev, mcs->ep_out), in mcs_hard_xmit()
860 mcs->out_buf, wraplen, mcs_send_irq, mcs); in mcs_hard_xmit()
862 if ((ret = usb_submit_urb(mcs->tx_urb, GFP_ATOMIC))) { in mcs_hard_xmit()
869 mcs->netdev->stats.tx_errors++; in mcs_hard_xmit()
873 mcs->netdev->stats.tx_packets++; in mcs_hard_xmit()
874 mcs->netdev->stats.tx_bytes += skb->len; in mcs_hard_xmit()
878 spin_unlock_irqrestore(&mcs->lock, flags); in mcs_hard_xmit()
898 struct mcs_cb *mcs; in mcs_probe() local
901 ndev = alloc_irdadev(sizeof(*mcs)); in mcs_probe()
915 mcs = netdev_priv(ndev); in mcs_probe()
916 mcs->usbdev = udev; in mcs_probe()
917 mcs->netdev = ndev; in mcs_probe()
918 spin_lock_init(&mcs->lock); in mcs_probe()
921 irda_init_max_qos_capabilies(&mcs->qos); in mcs_probe()
924 mcs->qos.baud_rate.bits &= in mcs_probe()
929 mcs->qos.min_turn_time.bits &= qos_mtt_bits; in mcs_probe()
930 irda_qos_bits_to_value(&mcs->qos); in mcs_probe()
933 INIT_WORK(&mcs->work, mcs_speed_work); in mcs_probe()
942 ret = mcs_find_endpoints(mcs, intf->cur_altsetting->endpoint, in mcs_probe()
956 mcs->transceiver_type = transceiver_type; in mcs_probe()
957 mcs->sir_tweak = sir_tweak; in mcs_probe()
958 mcs->receive_mode = receive_mode; in mcs_probe()
960 usb_set_intfdata(intf, mcs); in mcs_probe()
973 struct mcs_cb *mcs = usb_get_intfdata(intf); in mcs_disconnect() local
975 if (!mcs) in mcs_disconnect()
978 cancel_work_sync(&mcs->work); in mcs_disconnect()
980 unregister_netdev(mcs->netdev); in mcs_disconnect()
981 free_netdev(mcs->netdev); in mcs_disconnect()