Lines Matching refs:self
135 int (*xir_fre) (struct sh_irda_self *self);
136 int (*xir_trov) (struct sh_irda_self *self);
137 int (*xir_9) (struct sh_irda_self *self);
138 int (*xir_8) (struct sh_irda_self *self);
139 int (*xir_fte) (struct sh_irda_self *self);
168 static void sh_irda_write(struct sh_irda_self *self, u32 offset, u16 data) in sh_irda_write() argument
172 spin_lock_irqsave(&self->lock, flags); in sh_irda_write()
173 iowrite16(data, self->membase + offset); in sh_irda_write()
174 spin_unlock_irqrestore(&self->lock, flags); in sh_irda_write()
177 static u16 sh_irda_read(struct sh_irda_self *self, u32 offset) in sh_irda_read() argument
182 spin_lock_irqsave(&self->lock, flags); in sh_irda_read()
183 ret = ioread16(self->membase + offset); in sh_irda_read()
184 spin_unlock_irqrestore(&self->lock, flags); in sh_irda_read()
189 static void sh_irda_update_bits(struct sh_irda_self *self, u32 offset, in sh_irda_update_bits() argument
195 spin_lock_irqsave(&self->lock, flags); in sh_irda_update_bits()
196 old = ioread16(self->membase + offset); in sh_irda_update_bits()
199 iowrite16(data, self->membase + offset); in sh_irda_update_bits()
200 spin_unlock_irqrestore(&self->lock, flags); in sh_irda_update_bits()
215 static void sh_irda_rcv_ctrl(struct sh_irda_self *self, int enable) in sh_irda_rcv_ctrl() argument
217 struct device *dev = &self->ndev->dev; in sh_irda_rcv_ctrl()
219 sh_irda_update_bits(self, IRRCTR, RE, enable ? RE : 0); in sh_irda_rcv_ctrl()
223 static int sh_irda_set_timeout(struct sh_irda_self *self, int interval) in sh_irda_set_timeout() argument
225 struct device *dev = &self->ndev->dev; in sh_irda_set_timeout()
227 if (SH_IRDA_SIR != self->mode) in sh_irda_set_timeout()
235 sh_irda_update_bits(self, IRCFR, RTO, interval << RTO_SHIFT); in sh_irda_set_timeout()
239 static int sh_irda_set_baudrate(struct sh_irda_self *self, int baudrate) in sh_irda_set_baudrate() argument
241 struct device *dev = &self->ndev->dev; in sh_irda_set_baudrate()
247 if (SH_IRDA_SIR != self->mode) { in sh_irda_set_baudrate()
259 sh_irda_update_bits(self, SIRBCR, BRC_MASK, val); in sh_irda_set_baudrate()
264 static int sh_irda_get_rcv_length(struct sh_irda_self *self) in sh_irda_get_rcv_length() argument
266 return RFL_MASK & sh_irda_read(self, IRRFLR); in sh_irda_get_rcv_length()
274 static int sh_irda_xir_fre(struct sh_irda_self *self) in sh_irda_xir_fre() argument
276 struct device *dev = &self->ndev->dev; in sh_irda_xir_fre()
281 static int sh_irda_xir_trov(struct sh_irda_self *self) in sh_irda_xir_trov() argument
283 struct device *dev = &self->ndev->dev; in sh_irda_xir_trov()
288 static int sh_irda_xir_9(struct sh_irda_self *self) in sh_irda_xir_9() argument
290 struct device *dev = &self->ndev->dev; in sh_irda_xir_9()
295 static int sh_irda_xir_8(struct sh_irda_self *self) in sh_irda_xir_8() argument
297 struct device *dev = &self->ndev->dev; in sh_irda_xir_8()
302 static int sh_irda_xir_fte(struct sh_irda_self *self) in sh_irda_xir_fte() argument
304 struct device *dev = &self->ndev->dev; in sh_irda_xir_fte()
336 static int sh_irda_sir_fre(struct sh_irda_self *self) in sh_irda_sir_fre() argument
338 struct device *dev = &self->ndev->dev; in sh_irda_sir_fre()
341 int len = sh_irda_get_rcv_length(self); in sh_irda_sir_fre()
352 data16 = sh_irda_read(self, IRDARAM + i); in sh_irda_sir_fre()
354 async_unwrap_char(self->ndev, &self->ndev->stats, in sh_irda_sir_fre()
355 &self->rx_buff, data[j]); in sh_irda_sir_fre()
357 self->ndev->last_rx = jiffies; in sh_irda_sir_fre()
359 sh_irda_rcv_ctrl(self, 1); in sh_irda_sir_fre()
364 static int sh_irda_sir_trov(struct sh_irda_self *self) in sh_irda_sir_trov() argument
366 struct device *dev = &self->ndev->dev; in sh_irda_sir_trov()
369 sh_irda_rcv_ctrl(self, 1); in sh_irda_sir_trov()
373 static int sh_irda_sir_tot(struct sh_irda_self *self) in sh_irda_sir_tot() argument
375 struct device *dev = &self->ndev->dev; in sh_irda_sir_tot()
378 sh_irda_set_baudrate(self, 9600); in sh_irda_sir_tot()
379 sh_irda_rcv_ctrl(self, 1); in sh_irda_sir_tot()
383 static int sh_irda_sir_fer(struct sh_irda_self *self) in sh_irda_sir_fer() argument
385 struct device *dev = &self->ndev->dev; in sh_irda_sir_fer()
388 sh_irda_rcv_ctrl(self, 1); in sh_irda_sir_fer()
392 static int sh_irda_sir_fte(struct sh_irda_self *self) in sh_irda_sir_fte() argument
394 struct device *dev = &self->ndev->dev; in sh_irda_sir_fte()
397 netif_wake_queue(self->ndev); in sh_irda_sir_fte()
410 static void sh_irda_set_mode(struct sh_irda_self *self, enum sh_irda_mode mode) in sh_irda_set_mode() argument
412 struct device *dev = &self->ndev->dev; in sh_irda_set_mode()
440 self->mode = mode; in sh_irda_set_mode()
441 self->xir_func = func; in sh_irda_set_mode()
442 sh_irda_update_bits(self, IRTMR, TMD_MASK, data); in sh_irda_set_mode()
454 static void sh_irda_set_irq_mask(struct sh_irda_self *self) in sh_irda_set_irq_mask() argument
460 sh_irda_update_bits(self, IRTMR, xIM_MASK, xIM_MASK); in sh_irda_set_irq_mask()
461 sh_irda_update_bits(self, SIRIMR, xIR_MASK, xIR_MASK); in sh_irda_set_irq_mask()
462 sh_irda_update_bits(self, MFIRIMR, xIR_MASK, xIR_MASK); in sh_irda_set_irq_mask()
465 sh_irda_update_bits(self, SIRICR, xIR_MASK, xIR_MASK); in sh_irda_set_irq_mask()
466 sh_irda_update_bits(self, MFIRICR, xIR_MASK, xIR_MASK); in sh_irda_set_irq_mask()
468 switch (self->mode) { in sh_irda_set_irq_mask()
486 sh_irda_update_bits(self, IRTMR, tmr_hole, 0); in sh_irda_set_irq_mask()
487 sh_irda_update_bits(self, xir_reg, xIR_MASK, 0); in sh_irda_set_irq_mask()
493 struct sh_irda_self *self = dev_id; in sh_irda_irq() local
494 struct sh_irda_xir_func *func = self->xir_func; in sh_irda_irq()
495 u16 isr = sh_irda_read(self, SIRISR); in sh_irda_irq()
498 sh_irda_write(self, SIRICR, isr); in sh_irda_irq()
501 func->xir_fre(self); in sh_irda_irq()
503 func->xir_trov(self); in sh_irda_irq()
505 func->xir_9(self); in sh_irda_irq()
507 func->xir_8(self); in sh_irda_irq()
509 func->xir_fte(self); in sh_irda_irq()
521 static void sh_irda_crc_reset(struct sh_irda_self *self) in sh_irda_crc_reset() argument
523 sh_irda_write(self, CRCCTR, CRC_RST); in sh_irda_crc_reset()
526 static void sh_irda_crc_add(struct sh_irda_self *self, u16 data) in sh_irda_crc_add() argument
528 sh_irda_write(self, CRCIR, data & CRC_IN_MASK); in sh_irda_crc_add()
531 static u16 sh_irda_crc_cnt(struct sh_irda_self *self) in sh_irda_crc_cnt() argument
533 return CRC_CT_MASK & sh_irda_read(self, CRCCTR); in sh_irda_crc_cnt()
536 static u16 sh_irda_crc_out(struct sh_irda_self *self) in sh_irda_crc_out() argument
538 return sh_irda_read(self, CRCOR); in sh_irda_crc_out()
541 static int sh_irda_crc_init(struct sh_irda_self *self) in sh_irda_crc_init() argument
543 struct device *dev = &self->ndev->dev; in sh_irda_crc_init()
547 sh_irda_crc_reset(self); in sh_irda_crc_init()
549 sh_irda_crc_add(self, 0xCC); in sh_irda_crc_init()
550 sh_irda_crc_add(self, 0xF5); in sh_irda_crc_init()
551 sh_irda_crc_add(self, 0xF1); in sh_irda_crc_init()
552 sh_irda_crc_add(self, 0xA7); in sh_irda_crc_init()
554 val = sh_irda_crc_cnt(self); in sh_irda_crc_init()
560 val = sh_irda_crc_out(self); in sh_irda_crc_init()
570 sh_irda_crc_reset(self); in sh_irda_crc_init()
581 static void sh_irda_remove_iobuf(struct sh_irda_self *self) in sh_irda_remove_iobuf() argument
583 kfree(self->rx_buff.head); in sh_irda_remove_iobuf()
585 self->tx_buff.head = NULL; in sh_irda_remove_iobuf()
586 self->tx_buff.data = NULL; in sh_irda_remove_iobuf()
587 self->rx_buff.head = NULL; in sh_irda_remove_iobuf()
588 self->rx_buff.data = NULL; in sh_irda_remove_iobuf()
591 static int sh_irda_init_iobuf(struct sh_irda_self *self, int rxsize, int txsize) in sh_irda_init_iobuf() argument
593 if (self->rx_buff.head || in sh_irda_init_iobuf()
594 self->tx_buff.head) { in sh_irda_init_iobuf()
595 dev_err(&self->ndev->dev, "iobuff has already existed."); in sh_irda_init_iobuf()
600 self->rx_buff.head = kmalloc(rxsize, GFP_KERNEL); in sh_irda_init_iobuf()
601 if (!self->rx_buff.head) in sh_irda_init_iobuf()
604 self->rx_buff.truesize = rxsize; in sh_irda_init_iobuf()
605 self->rx_buff.in_frame = FALSE; in sh_irda_init_iobuf()
606 self->rx_buff.state = OUTSIDE_FRAME; in sh_irda_init_iobuf()
607 self->rx_buff.data = self->rx_buff.head; in sh_irda_init_iobuf()
610 self->tx_buff.head = self->membase + IRDARAM; in sh_irda_init_iobuf()
611 self->tx_buff.truesize = IRDARAM_LEN; in sh_irda_init_iobuf()
625 struct sh_irda_self *self = netdev_priv(ndev); in sh_irda_hard_xmit() local
626 struct device *dev = &self->ndev->dev; in sh_irda_hard_xmit()
633 sh_irda_rcv_ctrl(self, 0); in sh_irda_hard_xmit()
635 ret = sh_irda_set_baudrate(self, speed); in sh_irda_hard_xmit()
639 self->tx_buff.len = 0; in sh_irda_hard_xmit()
643 spin_lock_irqsave(&self->lock, flags); in sh_irda_hard_xmit()
644 self->tx_buff.len = async_wrap_skb(skb, in sh_irda_hard_xmit()
645 self->tx_buff.head, in sh_irda_hard_xmit()
646 self->tx_buff.truesize); in sh_irda_hard_xmit()
647 spin_unlock_irqrestore(&self->lock, flags); in sh_irda_hard_xmit()
649 if (self->tx_buff.len > self->tx_buff.truesize) in sh_irda_hard_xmit()
650 self->tx_buff.len = self->tx_buff.truesize; in sh_irda_hard_xmit()
652 sh_irda_write(self, IRTFLR, self->tx_buff.len); in sh_irda_hard_xmit()
653 sh_irda_write(self, IRTCTR, ARMOD | TE); in sh_irda_hard_xmit()
662 sh_irda_set_baudrate(self, 9600); in sh_irda_hard_xmit()
663 netif_wake_queue(self->ndev); in sh_irda_hard_xmit()
664 sh_irda_rcv_ctrl(self, 1); in sh_irda_hard_xmit()
684 struct sh_irda_self *self = netdev_priv(ndev); in sh_irda_stats() local
686 return &self->ndev->stats; in sh_irda_stats()
691 struct sh_irda_self *self = netdev_priv(ndev); in sh_irda_open() local
694 pm_runtime_get_sync(&self->pdev->dev); in sh_irda_open()
695 err = sh_irda_crc_init(self); in sh_irda_open()
699 sh_irda_set_mode(self, SH_IRDA_SIR); in sh_irda_open()
700 sh_irda_set_timeout(self, 2); in sh_irda_open()
701 sh_irda_set_baudrate(self, 9600); in sh_irda_open()
703 self->irlap = irlap_open(ndev, &self->qos, DRIVER_NAME); in sh_irda_open()
704 if (!self->irlap) { in sh_irda_open()
710 sh_irda_rcv_ctrl(self, 1); in sh_irda_open()
711 sh_irda_set_irq_mask(self); in sh_irda_open()
718 pm_runtime_put_sync(&self->pdev->dev); in sh_irda_open()
725 struct sh_irda_self *self = netdev_priv(ndev); in sh_irda_stop() local
728 if (self->irlap) { in sh_irda_stop()
729 irlap_close(self->irlap); in sh_irda_stop()
730 self->irlap = NULL; in sh_irda_stop()
734 pm_runtime_put_sync(&self->pdev->dev); in sh_irda_stop()
759 struct sh_irda_self *self; in sh_irda_probe() local
771 ndev = alloc_irdadev(sizeof(*self)); in sh_irda_probe()
775 self = netdev_priv(ndev); in sh_irda_probe()
776 self->membase = ioremap_nocache(res->start, resource_size(res)); in sh_irda_probe()
777 if (!self->membase) { in sh_irda_probe()
783 err = sh_irda_init_iobuf(self, IRDA_SKB_MAX_MTU, IRDA_SIR_MAX_FRAME); in sh_irda_probe()
787 self->pdev = pdev; in sh_irda_probe()
790 irda_init_max_qos_capabilies(&self->qos); in sh_irda_probe()
795 self->ndev = ndev; in sh_irda_probe()
796 self->qos.baud_rate.bits &= IR_9600; /* FIXME */ in sh_irda_probe()
797 self->qos.min_turn_time.bits = 1; /* 10 ms or more */ in sh_irda_probe()
798 spin_lock_init(&self->lock); in sh_irda_probe()
800 irda_qos_bits_to_value(&self->qos); in sh_irda_probe()
807 err = devm_request_irq(&pdev->dev, irq, sh_irda_irq, 0, "sh_irda", self); in sh_irda_probe()
819 sh_irda_remove_iobuf(self); in sh_irda_probe()
821 iounmap(self->membase); in sh_irda_probe()
831 struct sh_irda_self *self = netdev_priv(ndev); in sh_irda_remove() local
833 if (!self) in sh_irda_remove()
838 sh_irda_remove_iobuf(self); in sh_irda_remove()
839 iounmap(self->membase); in sh_irda_remove()