• Home
  • Raw
  • Download

Lines Matching refs:dev

35 static void ene_set_reg_addr(struct ene_device *dev, u16 reg)  in ene_set_reg_addr()  argument
37 outb(reg >> 8, dev->hw_io + ENE_ADDR_HI); in ene_set_reg_addr()
38 outb(reg & 0xFF, dev->hw_io + ENE_ADDR_LO); in ene_set_reg_addr()
42 static u8 ene_read_reg(struct ene_device *dev, u16 reg) in ene_read_reg() argument
45 ene_set_reg_addr(dev, reg); in ene_read_reg()
46 retval = inb(dev->hw_io + ENE_IO); in ene_read_reg()
52 static void ene_write_reg(struct ene_device *dev, u16 reg, u8 value) in ene_write_reg() argument
55 ene_set_reg_addr(dev, reg); in ene_write_reg()
56 outb(value, dev->hw_io + ENE_IO); in ene_write_reg()
60 static void ene_set_reg_mask(struct ene_device *dev, u16 reg, u8 mask) in ene_set_reg_mask() argument
63 ene_set_reg_addr(dev, reg); in ene_set_reg_mask()
64 outb(inb(dev->hw_io + ENE_IO) | mask, dev->hw_io + ENE_IO); in ene_set_reg_mask()
68 static void ene_clear_reg_mask(struct ene_device *dev, u16 reg, u8 mask) in ene_clear_reg_mask() argument
71 ene_set_reg_addr(dev, reg); in ene_clear_reg_mask()
72 outb(inb(dev->hw_io + ENE_IO) & ~mask, dev->hw_io + ENE_IO); in ene_clear_reg_mask()
76 static void ene_set_clear_reg_mask(struct ene_device *dev, u16 reg, u8 mask, in ene_set_clear_reg_mask() argument
80 ene_set_reg_mask(dev, reg, mask); in ene_set_clear_reg_mask()
82 ene_clear_reg_mask(dev, reg, mask); in ene_set_clear_reg_mask()
86 static int ene_hw_detect(struct ene_device *dev) in ene_hw_detect() argument
92 ene_clear_reg_mask(dev, ENE_ECSTS, ENE_ECSTS_RSRVD); in ene_hw_detect()
93 chip_major = ene_read_reg(dev, ENE_ECVER_MAJOR); in ene_hw_detect()
94 chip_minor = ene_read_reg(dev, ENE_ECVER_MINOR); in ene_hw_detect()
95 ene_set_reg_mask(dev, ENE_ECSTS, ENE_ECSTS_RSRVD); in ene_hw_detect()
97 hw_revision = ene_read_reg(dev, ENE_ECHV); in ene_hw_detect()
98 old_ver = ene_read_reg(dev, ENE_HW_VER_OLD); in ene_hw_detect()
100 dev->pll_freq = (ene_read_reg(dev, ENE_PLLFRH) << 4) + in ene_hw_detect()
101 (ene_read_reg(dev, ENE_PLLFRL) >> 4); in ene_hw_detect()
104 dev->rx_period_adjust = in ene_hw_detect()
105 dev->pll_freq == ENE_DEFAULT_PLL_FREQ ? 2 : 4; in ene_hw_detect()
117 pr_notice("PLL freq = %d\n", dev->pll_freq); in ene_hw_detect()
125 dev->hw_revision = ENE_HW_C; in ene_hw_detect()
128 dev->hw_revision = ENE_HW_B; in ene_hw_detect()
131 dev->hw_revision = ENE_HW_D; in ene_hw_detect()
136 if (dev->hw_revision < ENE_HW_C) in ene_hw_detect()
139 fw_reg1 = ene_read_reg(dev, ENE_FW1); in ene_hw_detect()
140 fw_reg2 = ene_read_reg(dev, ENE_FW2); in ene_hw_detect()
144 dev->hw_use_gpio_0a = !!(fw_reg2 & ENE_FW2_GP0A); in ene_hw_detect()
145 dev->hw_learning_and_tx_capable = !!(fw_reg2 & ENE_FW2_LEARNING); in ene_hw_detect()
146 dev->hw_extra_buffer = !!(fw_reg1 & ENE_FW1_HAS_EXTRA_BUF); in ene_hw_detect()
148 if (dev->hw_learning_and_tx_capable) in ene_hw_detect()
149 dev->hw_fan_input = !!(fw_reg2 & ENE_FW2_FAN_INPUT); in ene_hw_detect()
153 if (dev->hw_learning_and_tx_capable) { in ene_hw_detect()
162 dev->hw_use_gpio_0a ? "40" : "0A"); in ene_hw_detect()
164 if (dev->hw_fan_input) in ene_hw_detect()
168 if (!dev->hw_fan_input) in ene_hw_detect()
170 dev->hw_use_gpio_0a ? "0A" : "40"); in ene_hw_detect()
172 if (dev->hw_extra_buffer) in ene_hw_detect()
178 static void ene_rx_setup_hw_buffer(struct ene_device *dev) in ene_rx_setup_hw_buffer() argument
182 ene_rx_read_hw_pointer(dev); in ene_rx_setup_hw_buffer()
183 dev->r_pointer = dev->w_pointer; in ene_rx_setup_hw_buffer()
185 if (!dev->hw_extra_buffer) { in ene_rx_setup_hw_buffer()
186 dev->buffer_len = ENE_FW_PACKET_SIZE * 2; in ene_rx_setup_hw_buffer()
190 tmp = ene_read_reg(dev, ENE_FW_SAMPLE_BUFFER); in ene_rx_setup_hw_buffer()
191 tmp |= ene_read_reg(dev, ENE_FW_SAMPLE_BUFFER+1) << 8; in ene_rx_setup_hw_buffer()
192 dev->extra_buf1_address = tmp; in ene_rx_setup_hw_buffer()
194 dev->extra_buf1_len = ene_read_reg(dev, ENE_FW_SAMPLE_BUFFER + 2); in ene_rx_setup_hw_buffer()
196 tmp = ene_read_reg(dev, ENE_FW_SAMPLE_BUFFER + 3); in ene_rx_setup_hw_buffer()
197 tmp |= ene_read_reg(dev, ENE_FW_SAMPLE_BUFFER + 4) << 8; in ene_rx_setup_hw_buffer()
198 dev->extra_buf2_address = tmp; in ene_rx_setup_hw_buffer()
200 dev->extra_buf2_len = ene_read_reg(dev, ENE_FW_SAMPLE_BUFFER + 5); in ene_rx_setup_hw_buffer()
202 dev->buffer_len = dev->extra_buf1_len + dev->extra_buf2_len + 8; in ene_rx_setup_hw_buffer()
206 dev->extra_buf1_address, dev->extra_buf1_len); in ene_rx_setup_hw_buffer()
208 dev->extra_buf2_address, dev->extra_buf2_len); in ene_rx_setup_hw_buffer()
210 pr_notice("Total buffer len = %d\n", dev->buffer_len); in ene_rx_setup_hw_buffer()
212 if (dev->buffer_len > 64 || dev->buffer_len < 16) in ene_rx_setup_hw_buffer()
215 if (dev->extra_buf1_address > 0xFBFC || in ene_rx_setup_hw_buffer()
216 dev->extra_buf1_address < 0xEC00) in ene_rx_setup_hw_buffer()
219 if (dev->extra_buf2_address > 0xFBFC || in ene_rx_setup_hw_buffer()
220 dev->extra_buf2_address < 0xEC00) in ene_rx_setup_hw_buffer()
223 if (dev->r_pointer > dev->buffer_len) in ene_rx_setup_hw_buffer()
226 ene_set_reg_mask(dev, ENE_FW1, ENE_FW1_EXTRA_BUF_HND); in ene_rx_setup_hw_buffer()
230 dev->hw_extra_buffer = false; in ene_rx_setup_hw_buffer()
231 ene_clear_reg_mask(dev, ENE_FW1, ENE_FW1_EXTRA_BUF_HND); in ene_rx_setup_hw_buffer()
236 static void ene_rx_restore_hw_buffer(struct ene_device *dev) in ene_rx_restore_hw_buffer() argument
238 if (!dev->hw_extra_buffer) in ene_rx_restore_hw_buffer()
241 ene_write_reg(dev, ENE_FW_SAMPLE_BUFFER + 0, in ene_rx_restore_hw_buffer()
242 dev->extra_buf1_address & 0xFF); in ene_rx_restore_hw_buffer()
243 ene_write_reg(dev, ENE_FW_SAMPLE_BUFFER + 1, in ene_rx_restore_hw_buffer()
244 dev->extra_buf1_address >> 8); in ene_rx_restore_hw_buffer()
245 ene_write_reg(dev, ENE_FW_SAMPLE_BUFFER + 2, dev->extra_buf1_len); in ene_rx_restore_hw_buffer()
247 ene_write_reg(dev, ENE_FW_SAMPLE_BUFFER + 3, in ene_rx_restore_hw_buffer()
248 dev->extra_buf2_address & 0xFF); in ene_rx_restore_hw_buffer()
249 ene_write_reg(dev, ENE_FW_SAMPLE_BUFFER + 4, in ene_rx_restore_hw_buffer()
250 dev->extra_buf2_address >> 8); in ene_rx_restore_hw_buffer()
251 ene_write_reg(dev, ENE_FW_SAMPLE_BUFFER + 5, in ene_rx_restore_hw_buffer()
252 dev->extra_buf2_len); in ene_rx_restore_hw_buffer()
253 ene_clear_reg_mask(dev, ENE_FW1, ENE_FW1_EXTRA_BUF_HND); in ene_rx_restore_hw_buffer()
257 static void ene_rx_read_hw_pointer(struct ene_device *dev) in ene_rx_read_hw_pointer() argument
259 if (dev->hw_extra_buffer) in ene_rx_read_hw_pointer()
260 dev->w_pointer = ene_read_reg(dev, ENE_FW_RX_POINTER); in ene_rx_read_hw_pointer()
262 dev->w_pointer = ene_read_reg(dev, ENE_FW2) in ene_rx_read_hw_pointer()
266 dev->w_pointer, dev->r_pointer); in ene_rx_read_hw_pointer()
270 static int ene_rx_get_sample_reg(struct ene_device *dev) in ene_rx_get_sample_reg() argument
274 if (dev->r_pointer == dev->w_pointer) { in ene_rx_get_sample_reg()
276 ene_rx_read_hw_pointer(dev); in ene_rx_get_sample_reg()
279 if (dev->r_pointer == dev->w_pointer) { in ene_rx_get_sample_reg()
280 dbg_verbose("RB: end of data at %d", dev->r_pointer); in ene_rx_get_sample_reg()
284 dbg_verbose("RB: reading at offset %d", dev->r_pointer); in ene_rx_get_sample_reg()
285 r_pointer = dev->r_pointer; in ene_rx_get_sample_reg()
287 dev->r_pointer++; in ene_rx_get_sample_reg()
288 if (dev->r_pointer == dev->buffer_len) in ene_rx_get_sample_reg()
289 dev->r_pointer = 0; in ene_rx_get_sample_reg()
291 dbg_verbose("RB: next read will be from offset %d", dev->r_pointer); in ene_rx_get_sample_reg()
300 if (r_pointer < dev->extra_buf1_len) { in ene_rx_get_sample_reg()
302 return dev->extra_buf1_address + r_pointer; in ene_rx_get_sample_reg()
305 r_pointer -= dev->extra_buf1_len; in ene_rx_get_sample_reg()
307 if (r_pointer < dev->extra_buf2_len) { in ene_rx_get_sample_reg()
309 return dev->extra_buf2_address + r_pointer; in ene_rx_get_sample_reg()
317 static void ene_rx_sense_carrier(struct ene_device *dev) in ene_rx_sense_carrier() argument
320 int period = ene_read_reg(dev, ENE_CIRCAR_PRD); in ene_rx_sense_carrier()
321 int hperiod = ene_read_reg(dev, ENE_CIRCAR_HPRD); in ene_rx_sense_carrier()
338 if (dev->carrier_detect_enabled) { in ene_rx_sense_carrier()
344 ir_raw_event_store(dev->rdev, &ev); in ene_rx_sense_carrier()
349 static void ene_rx_enable_cir_engine(struct ene_device *dev, bool enable) in ene_rx_enable_cir_engine() argument
351 ene_set_clear_reg_mask(dev, ENE_CIRCFG, in ene_rx_enable_cir_engine()
356 static void ene_rx_select_input(struct ene_device *dev, bool gpio_0a) in ene_rx_select_input() argument
358 ene_set_clear_reg_mask(dev, ENE_CIRCFG2, ENE_CIRCFG2_GPIO0A, gpio_0a); in ene_rx_select_input()
365 static void ene_rx_enable_fan_input(struct ene_device *dev, bool enable) in ene_rx_enable_fan_input() argument
367 if (!dev->hw_fan_input) in ene_rx_enable_fan_input()
371 ene_write_reg(dev, ENE_FAN_AS_IN1, 0); in ene_rx_enable_fan_input()
373 ene_write_reg(dev, ENE_FAN_AS_IN1, ENE_FAN_AS_IN1_EN); in ene_rx_enable_fan_input()
374 ene_write_reg(dev, ENE_FAN_AS_IN2, ENE_FAN_AS_IN2_EN); in ene_rx_enable_fan_input()
379 static void ene_rx_setup(struct ene_device *dev) in ene_rx_setup() argument
381 bool learning_mode = dev->learning_mode_enabled || in ene_rx_setup()
382 dev->carrier_detect_enabled; in ene_rx_setup()
389 ene_write_reg(dev, ENE_CIRCFG2, 0x00); in ene_rx_setup()
394 dev->pll_freq == ENE_DEFAULT_PLL_FREQ ? 1 : 2; in ene_rx_setup()
396 ene_write_reg(dev, ENE_CIRRLC_CFG, in ene_rx_setup()
400 if (dev->hw_revision < ENE_HW_C) in ene_rx_setup()
405 WARN_ON(!dev->hw_learning_and_tx_capable); in ene_rx_setup()
412 ene_rx_select_input(dev, !dev->hw_use_gpio_0a); in ene_rx_setup()
413 dev->rx_fan_input_inuse = false; in ene_rx_setup()
416 ene_set_reg_mask(dev, ENE_CIRCFG, ENE_CIRCFG_CARR_DEMOD); in ene_rx_setup()
419 ene_write_reg(dev, ENE_CIRCAR_PULS, 0x63); in ene_rx_setup()
420 ene_set_clear_reg_mask(dev, ENE_CIRCFG2, ENE_CIRCFG2_CARR_DETECT, in ene_rx_setup()
421 dev->carrier_detect_enabled || debug); in ene_rx_setup()
423 if (dev->hw_fan_input) in ene_rx_setup()
424 dev->rx_fan_input_inuse = true; in ene_rx_setup()
426 ene_rx_select_input(dev, dev->hw_use_gpio_0a); in ene_rx_setup()
429 ene_clear_reg_mask(dev, ENE_CIRCFG, ENE_CIRCFG_CARR_DEMOD); in ene_rx_setup()
430 ene_clear_reg_mask(dev, ENE_CIRCFG2, ENE_CIRCFG2_CARR_DETECT); in ene_rx_setup()
434 if (dev->rx_fan_input_inuse) { in ene_rx_setup()
435 dev->rdev->rx_resolution = ENE_FW_SAMPLE_PERIOD_FAN; in ene_rx_setup()
439 dev->rdev->min_timeout = dev->rdev->max_timeout = in ene_rx_setup()
443 dev->rdev->rx_resolution = sample_period; in ene_rx_setup()
450 dev->rdev->min_timeout = 127 * sample_period; in ene_rx_setup()
451 dev->rdev->max_timeout = 200000; in ene_rx_setup()
454 if (dev->hw_learning_and_tx_capable) in ene_rx_setup()
455 dev->rdev->tx_resolution = sample_period; in ene_rx_setup()
457 if (dev->rdev->timeout > dev->rdev->max_timeout) in ene_rx_setup()
458 dev->rdev->timeout = dev->rdev->max_timeout; in ene_rx_setup()
459 if (dev->rdev->timeout < dev->rdev->min_timeout) in ene_rx_setup()
460 dev->rdev->timeout = dev->rdev->min_timeout; in ene_rx_setup()
464 static void ene_rx_enable_hw(struct ene_device *dev) in ene_rx_enable_hw() argument
469 if (dev->hw_revision < ENE_HW_C) { in ene_rx_enable_hw()
470 ene_write_reg(dev, ENEB_IRQ, dev->irq << 1); in ene_rx_enable_hw()
471 ene_write_reg(dev, ENEB_IRQ_UNK1, 0x01); in ene_rx_enable_hw()
473 reg_value = ene_read_reg(dev, ENE_IRQ) & 0xF0; in ene_rx_enable_hw()
476 reg_value |= (dev->irq & ENE_IRQ_MASK); in ene_rx_enable_hw()
477 ene_write_reg(dev, ENE_IRQ, reg_value); in ene_rx_enable_hw()
481 ene_rx_enable_fan_input(dev, dev->rx_fan_input_inuse); in ene_rx_enable_hw()
482 ene_rx_enable_cir_engine(dev, !dev->rx_fan_input_inuse); in ene_rx_enable_hw()
485 ene_irq_status(dev); in ene_rx_enable_hw()
488 ene_set_reg_mask(dev, ENE_FW1, ENE_FW1_ENABLE | ENE_FW1_IRQ); in ene_rx_enable_hw()
491 ir_raw_event_set_idle(dev->rdev, true); in ene_rx_enable_hw()
495 static void ene_rx_enable(struct ene_device *dev) in ene_rx_enable() argument
497 ene_rx_enable_hw(dev); in ene_rx_enable()
498 dev->rx_enabled = true; in ene_rx_enable()
502 static void ene_rx_disable_hw(struct ene_device *dev) in ene_rx_disable_hw() argument
505 ene_rx_enable_cir_engine(dev, false); in ene_rx_disable_hw()
506 ene_rx_enable_fan_input(dev, false); in ene_rx_disable_hw()
509 ene_clear_reg_mask(dev, ENE_FW1, ENE_FW1_ENABLE | ENE_FW1_IRQ); in ene_rx_disable_hw()
510 ir_raw_event_set_idle(dev->rdev, true); in ene_rx_disable_hw()
514 static void ene_rx_disable(struct ene_device *dev) in ene_rx_disable() argument
516 ene_rx_disable_hw(dev); in ene_rx_disable()
517 dev->rx_enabled = false; in ene_rx_disable()
523 static void ene_rx_reset(struct ene_device *dev) in ene_rx_reset() argument
525 ene_clear_reg_mask(dev, ENE_CIRCFG, ENE_CIRCFG_RX_EN); in ene_rx_reset()
526 ene_set_reg_mask(dev, ENE_CIRCFG, ENE_CIRCFG_RX_EN); in ene_rx_reset()
530 static void ene_tx_set_carrier(struct ene_device *dev) in ene_tx_set_carrier() argument
535 spin_lock_irqsave(&dev->hw_lock, flags); in ene_tx_set_carrier()
537 ene_set_clear_reg_mask(dev, ENE_CIRCFG, in ene_tx_set_carrier()
538 ENE_CIRCFG_TX_CARR, dev->tx_period > 0); in ene_tx_set_carrier()
540 if (!dev->tx_period) in ene_tx_set_carrier()
543 BUG_ON(dev->tx_duty_cycle >= 100 || dev->tx_duty_cycle <= 0); in ene_tx_set_carrier()
545 tx_puls_width = dev->tx_period / (100 / dev->tx_duty_cycle); in ene_tx_set_carrier()
550 dbg("TX: pulse distance = %d * 500 ns", dev->tx_period); in ene_tx_set_carrier()
553 ene_write_reg(dev, ENE_CIRMOD_PRD, dev->tx_period | ENE_CIRMOD_PRD_POL); in ene_tx_set_carrier()
554 ene_write_reg(dev, ENE_CIRMOD_HPRD, tx_puls_width); in ene_tx_set_carrier()
556 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_tx_set_carrier()
560 static void ene_tx_set_transmitters(struct ene_device *dev) in ene_tx_set_transmitters() argument
564 spin_lock_irqsave(&dev->hw_lock, flags); in ene_tx_set_transmitters()
565 ene_set_clear_reg_mask(dev, ENE_GPIOFS8, ENE_GPIOFS8_GPIO41, in ene_tx_set_transmitters()
566 !!(dev->transmitter_mask & 0x01)); in ene_tx_set_transmitters()
567 ene_set_clear_reg_mask(dev, ENE_GPIOFS1, ENE_GPIOFS1_GPIO0D, in ene_tx_set_transmitters()
568 !!(dev->transmitter_mask & 0x02)); in ene_tx_set_transmitters()
569 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_tx_set_transmitters()
573 static void ene_tx_enable(struct ene_device *dev) in ene_tx_enable() argument
575 u8 conf1 = ene_read_reg(dev, ENE_CIRCFG); in ene_tx_enable()
576 u8 fwreg2 = ene_read_reg(dev, ENE_FW2); in ene_tx_enable()
578 dev->saved_conf1 = conf1; in ene_tx_enable()
591 if (dev->hw_revision == ENE_HW_C) in ene_tx_enable()
596 ene_write_reg(dev, ENE_CIRCFG, conf1); in ene_tx_enable()
600 static void ene_tx_disable(struct ene_device *dev) in ene_tx_disable() argument
602 ene_write_reg(dev, ENE_CIRCFG, dev->saved_conf1); in ene_tx_disable()
603 dev->tx_buffer = NULL; in ene_tx_disable()
608 static void ene_tx_sample(struct ene_device *dev) in ene_tx_sample() argument
612 bool pulse = dev->tx_sample_pulse; in ene_tx_sample()
614 if (!dev->tx_buffer) { in ene_tx_sample()
620 if (!dev->tx_sample) { in ene_tx_sample()
622 if (dev->tx_pos == dev->tx_len) { in ene_tx_sample()
623 if (!dev->tx_done) { in ene_tx_sample()
625 dev->tx_done = true; in ene_tx_sample()
629 ene_tx_disable(dev); in ene_tx_sample()
630 complete(&dev->tx_complete); in ene_tx_sample()
635 sample = dev->tx_buffer[dev->tx_pos++]; in ene_tx_sample()
636 dev->tx_sample_pulse = !dev->tx_sample_pulse; in ene_tx_sample()
638 dev->tx_sample = DIV_ROUND_CLOSEST(sample, sample_period); in ene_tx_sample()
640 if (!dev->tx_sample) in ene_tx_sample()
641 dev->tx_sample = 1; in ene_tx_sample()
644 raw_tx = min(dev->tx_sample , (unsigned int)ENE_CIRRLC_OUT_MASK); in ene_tx_sample()
645 dev->tx_sample -= raw_tx; in ene_tx_sample()
652 ene_write_reg(dev, in ene_tx_sample()
653 dev->tx_reg ? ENE_CIRRLC_OUT1 : ENE_CIRRLC_OUT0, raw_tx); in ene_tx_sample()
655 dev->tx_reg = !dev->tx_reg; in ene_tx_sample()
659 mod_timer(&dev->tx_sim_timer, jiffies + HZ / 500); in ene_tx_sample()
665 struct ene_device *dev = from_timer(dev, t, tx_sim_timer); in ene_tx_irqsim() local
668 spin_lock_irqsave(&dev->hw_lock, flags); in ene_tx_irqsim()
669 ene_tx_sample(dev); in ene_tx_irqsim()
670 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_tx_irqsim()
675 static int ene_irq_status(struct ene_device *dev) in ene_irq_status() argument
681 fw_flags2 = ene_read_reg(dev, ENE_FW2); in ene_irq_status()
683 if (dev->hw_revision < ENE_HW_C) { in ene_irq_status()
684 irq_status = ene_read_reg(dev, ENEB_IRQ_STATUS); in ene_irq_status()
689 ene_clear_reg_mask(dev, ENEB_IRQ_STATUS, ENEB_IRQ_STATUS_IR); in ene_irq_status()
693 irq_status = ene_read_reg(dev, ENE_IRQ); in ene_irq_status()
698 ene_write_reg(dev, ENE_IRQ, irq_status & ~ENE_IRQ_STATUS); in ene_irq_status()
699 ene_write_reg(dev, ENE_IRQ, irq_status & ~ENE_IRQ_STATUS); in ene_irq_status()
704 ene_write_reg(dev, ENE_FW2, fw_flags2 & ~ENE_FW2_RXIRQ); in ene_irq_status()
708 fw_flags1 = ene_read_reg(dev, ENE_FW1); in ene_irq_status()
710 ene_write_reg(dev, ENE_FW1, fw_flags1 & ~ENE_FW1_TXIRQ); in ene_irq_status()
725 struct ene_device *dev = (struct ene_device *)data; in ene_isr() local
728 spin_lock_irqsave(&dev->hw_lock, flags); in ene_isr()
731 ene_rx_read_hw_pointer(dev); in ene_isr()
732 irq_status = ene_irq_status(dev); in ene_isr()
741 if (!dev->hw_learning_and_tx_capable) { in ene_isr()
745 ene_tx_sample(dev); in ene_isr()
753 if (dev->hw_learning_and_tx_capable) in ene_isr()
754 ene_rx_sense_carrier(dev); in ene_isr()
758 if (!dev->hw_extra_buffer) in ene_isr()
759 dev->r_pointer = dev->w_pointer == 0 ? ENE_FW_PACKET_SIZE : 0; in ene_isr()
763 reg = ene_rx_get_sample_reg(dev); in ene_isr()
769 hw_value = ene_read_reg(dev, reg); in ene_isr()
771 if (dev->rx_fan_input_inuse) { in ene_isr()
776 hw_value |= ene_read_reg(dev, reg + offset) << 8; in ene_isr()
788 if (dev->rx_period_adjust) { in ene_isr()
790 hw_sample /= (100 + dev->rx_period_adjust); in ene_isr()
794 if (!dev->hw_extra_buffer && !hw_sample) { in ene_isr()
795 dev->r_pointer = dev->w_pointer; in ene_isr()
803 ir_raw_event_store_with_filter(dev->rdev, &ev); in ene_isr()
806 ir_raw_event_handle(dev->rdev); in ene_isr()
808 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_isr()
813 static void ene_setup_default_settings(struct ene_device *dev) in ene_setup_default_settings() argument
815 dev->tx_period = 32; in ene_setup_default_settings()
816 dev->tx_duty_cycle = 50; /*%*/ in ene_setup_default_settings()
817 dev->transmitter_mask = 0x03; in ene_setup_default_settings()
818 dev->learning_mode_enabled = learning_mode_force; in ene_setup_default_settings()
821 dev->rdev->timeout = MS_TO_US(150); in ene_setup_default_settings()
825 static void ene_setup_hw_settings(struct ene_device *dev) in ene_setup_hw_settings() argument
827 if (dev->hw_learning_and_tx_capable) { in ene_setup_hw_settings()
828 ene_tx_set_carrier(dev); in ene_setup_hw_settings()
829 ene_tx_set_transmitters(dev); in ene_setup_hw_settings()
832 ene_rx_setup(dev); in ene_setup_hw_settings()
838 struct ene_device *dev = rdev->priv; in ene_open() local
841 spin_lock_irqsave(&dev->hw_lock, flags); in ene_open()
842 ene_rx_enable(dev); in ene_open()
843 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_open()
850 struct ene_device *dev = rdev->priv; in ene_close() local
852 spin_lock_irqsave(&dev->hw_lock, flags); in ene_close()
854 ene_rx_disable(dev); in ene_close()
855 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_close()
861 struct ene_device *dev = rdev->priv; in ene_set_tx_mask() local
871 dev->transmitter_mask = tx_mask; in ene_set_tx_mask()
872 ene_tx_set_transmitters(dev); in ene_set_tx_mask()
879 struct ene_device *dev = rdev->priv; in ene_set_tx_carrier() local
895 dev->tx_period = period; in ene_set_tx_carrier()
896 ene_tx_set_carrier(dev); in ene_set_tx_carrier()
903 struct ene_device *dev = rdev->priv; in ene_set_tx_duty_cycle() local
905 dev->tx_duty_cycle = duty_cycle; in ene_set_tx_duty_cycle()
906 ene_tx_set_carrier(dev); in ene_set_tx_duty_cycle()
913 struct ene_device *dev = rdev->priv; in ene_set_learning_mode() local
915 if (enable == dev->learning_mode_enabled) in ene_set_learning_mode()
918 spin_lock_irqsave(&dev->hw_lock, flags); in ene_set_learning_mode()
919 dev->learning_mode_enabled = enable; in ene_set_learning_mode()
920 ene_rx_disable(dev); in ene_set_learning_mode()
921 ene_rx_setup(dev); in ene_set_learning_mode()
922 ene_rx_enable(dev); in ene_set_learning_mode()
923 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_set_learning_mode()
929 struct ene_device *dev = rdev->priv; in ene_set_carrier_report() local
932 if (enable == dev->carrier_detect_enabled) in ene_set_carrier_report()
935 spin_lock_irqsave(&dev->hw_lock, flags); in ene_set_carrier_report()
936 dev->carrier_detect_enabled = enable; in ene_set_carrier_report()
937 ene_rx_disable(dev); in ene_set_carrier_report()
938 ene_rx_setup(dev); in ene_set_carrier_report()
939 ene_rx_enable(dev); in ene_set_carrier_report()
940 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_set_carrier_report()
947 struct ene_device *dev = rdev->priv; in ene_set_idle() local
950 ene_rx_reset(dev); in ene_set_idle()
958 struct ene_device *dev = rdev->priv; in ene_transmit() local
961 dev->tx_buffer = buf; in ene_transmit()
962 dev->tx_len = n; in ene_transmit()
963 dev->tx_pos = 0; in ene_transmit()
964 dev->tx_reg = 0; in ene_transmit()
965 dev->tx_done = 0; in ene_transmit()
966 dev->tx_sample = 0; in ene_transmit()
967 dev->tx_sample_pulse = false; in ene_transmit()
969 dbg("TX: %d samples", dev->tx_len); in ene_transmit()
971 spin_lock_irqsave(&dev->hw_lock, flags); in ene_transmit()
973 ene_tx_enable(dev); in ene_transmit()
976 ene_tx_sample(dev); in ene_transmit()
977 ene_tx_sample(dev); in ene_transmit()
979 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_transmit()
981 if (wait_for_completion_timeout(&dev->tx_complete, 2 * HZ) == 0) { in ene_transmit()
983 spin_lock_irqsave(&dev->hw_lock, flags); in ene_transmit()
984 ene_tx_disable(dev); in ene_transmit()
985 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_transmit()
996 struct ene_device *dev; in ene_probe() local
999 dev = kzalloc(sizeof(struct ene_device), GFP_KERNEL); in ene_probe()
1001 if (!dev || !rdev) in ene_probe()
1008 dev->hw_io = -1; in ene_probe()
1009 dev->irq = -1; in ene_probe()
1018 spin_lock_init(&dev->hw_lock); in ene_probe()
1020 dev->hw_io = pnp_port_start(pnp_dev, 0); in ene_probe()
1021 dev->irq = pnp_irq(pnp_dev, 0); in ene_probe()
1024 pnp_set_drvdata(pnp_dev, dev); in ene_probe()
1025 dev->pnp_dev = pnp_dev; in ene_probe()
1032 error = ene_hw_detect(dev); in ene_probe()
1036 if (!dev->hw_learning_and_tx_capable && txsim) { in ene_probe()
1037 dev->hw_learning_and_tx_capable = true; in ene_probe()
1038 timer_setup(&dev->tx_sim_timer, ene_tx_irqsim, 0); in ene_probe()
1042 if (!dev->hw_learning_and_tx_capable) in ene_probe()
1046 rdev->priv = dev; in ene_probe()
1054 if (dev->hw_learning_and_tx_capable) { in ene_probe()
1056 init_completion(&dev->tx_complete); in ene_probe()
1065 dev->rdev = rdev; in ene_probe()
1067 ene_rx_setup_hw_buffer(dev); in ene_probe()
1068 ene_setup_default_settings(dev); in ene_probe()
1069 ene_setup_hw_settings(dev); in ene_probe()
1071 device_set_wakeup_capable(&pnp_dev->dev, true); in ene_probe()
1072 device_set_wakeup_enable(&pnp_dev->dev, true); in ene_probe()
1080 if (!request_region(dev->hw_io, ENE_IO_SIZE, ENE_DRIVER_NAME)) { in ene_probe()
1084 if (request_irq(dev->irq, ene_isr, in ene_probe()
1085 IRQF_SHARED, ENE_DRIVER_NAME, (void *)dev)) { in ene_probe()
1093 release_region(dev->hw_io, ENE_IO_SIZE); in ene_probe()
1099 kfree(dev); in ene_probe()
1106 struct ene_device *dev = pnp_get_drvdata(pnp_dev); in ene_remove() local
1109 rc_unregister_device(dev->rdev); in ene_remove()
1110 del_timer_sync(&dev->tx_sim_timer); in ene_remove()
1111 spin_lock_irqsave(&dev->hw_lock, flags); in ene_remove()
1112 ene_rx_disable(dev); in ene_remove()
1113 ene_rx_restore_hw_buffer(dev); in ene_remove()
1114 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_remove()
1116 free_irq(dev->irq, dev); in ene_remove()
1117 release_region(dev->hw_io, ENE_IO_SIZE); in ene_remove()
1118 kfree(dev); in ene_remove()
1122 static void ene_enable_wake(struct ene_device *dev, bool enable) in ene_enable_wake() argument
1125 ene_set_clear_reg_mask(dev, ENE_FW1, ENE_FW1_WAKE, enable); in ene_enable_wake()
1131 struct ene_device *dev = pnp_get_drvdata(pnp_dev); in ene_suspend() local
1132 bool wake = device_may_wakeup(&dev->pnp_dev->dev); in ene_suspend()
1134 if (!wake && dev->rx_enabled) in ene_suspend()
1135 ene_rx_disable_hw(dev); in ene_suspend()
1137 ene_enable_wake(dev, wake); in ene_suspend()
1143 struct ene_device *dev = pnp_get_drvdata(pnp_dev); in ene_resume() local
1144 ene_setup_hw_settings(dev); in ene_resume()
1146 if (dev->rx_enabled) in ene_resume()
1147 ene_rx_enable(dev); in ene_resume()
1149 ene_enable_wake(dev, false); in ene_resume()
1156 struct ene_device *dev = pnp_get_drvdata(pnp_dev); in ene_shutdown() local
1157 ene_enable_wake(dev, true); in ene_shutdown()