Lines Matching refs:dev

45 static void ene_set_reg_addr(struct ene_device *dev, u16 reg)  in ene_set_reg_addr()  argument
47 outb(reg >> 8, dev->hw_io + ENE_ADDR_HI); in ene_set_reg_addr()
48 outb(reg & 0xFF, dev->hw_io + ENE_ADDR_LO); in ene_set_reg_addr()
52 static u8 ene_read_reg(struct ene_device *dev, u16 reg) in ene_read_reg() argument
55 ene_set_reg_addr(dev, reg); in ene_read_reg()
56 retval = inb(dev->hw_io + ENE_IO); in ene_read_reg()
62 static void ene_write_reg(struct ene_device *dev, u16 reg, u8 value) in ene_write_reg() argument
65 ene_set_reg_addr(dev, reg); in ene_write_reg()
66 outb(value, dev->hw_io + ENE_IO); in ene_write_reg()
70 static void ene_set_reg_mask(struct ene_device *dev, u16 reg, u8 mask) in ene_set_reg_mask() argument
73 ene_set_reg_addr(dev, reg); in ene_set_reg_mask()
74 outb(inb(dev->hw_io + ENE_IO) | mask, dev->hw_io + ENE_IO); in ene_set_reg_mask()
78 static void ene_clear_reg_mask(struct ene_device *dev, u16 reg, u8 mask) in ene_clear_reg_mask() argument
81 ene_set_reg_addr(dev, reg); in ene_clear_reg_mask()
82 outb(inb(dev->hw_io + ENE_IO) & ~mask, dev->hw_io + ENE_IO); in ene_clear_reg_mask()
86 static void ene_set_clear_reg_mask(struct ene_device *dev, u16 reg, u8 mask, in ene_set_clear_reg_mask() argument
90 ene_set_reg_mask(dev, reg, mask); in ene_set_clear_reg_mask()
92 ene_clear_reg_mask(dev, reg, mask); in ene_set_clear_reg_mask()
96 static int ene_hw_detect(struct ene_device *dev) in ene_hw_detect() argument
102 ene_clear_reg_mask(dev, ENE_ECSTS, ENE_ECSTS_RSRVD); in ene_hw_detect()
103 chip_major = ene_read_reg(dev, ENE_ECVER_MAJOR); in ene_hw_detect()
104 chip_minor = ene_read_reg(dev, ENE_ECVER_MINOR); in ene_hw_detect()
105 ene_set_reg_mask(dev, ENE_ECSTS, ENE_ECSTS_RSRVD); in ene_hw_detect()
107 hw_revision = ene_read_reg(dev, ENE_ECHV); in ene_hw_detect()
108 old_ver = ene_read_reg(dev, ENE_HW_VER_OLD); in ene_hw_detect()
110 dev->pll_freq = (ene_read_reg(dev, ENE_PLLFRH) << 4) + in ene_hw_detect()
111 (ene_read_reg(dev, ENE_PLLFRL) >> 4); in ene_hw_detect()
114 dev->rx_period_adjust = in ene_hw_detect()
115 dev->pll_freq == ENE_DEFAULT_PLL_FREQ ? 2 : 4; in ene_hw_detect()
127 pr_notice("PLL freq = %d\n", dev->pll_freq); in ene_hw_detect()
135 dev->hw_revision = ENE_HW_C; in ene_hw_detect()
138 dev->hw_revision = ENE_HW_B; in ene_hw_detect()
141 dev->hw_revision = ENE_HW_D; in ene_hw_detect()
146 if (dev->hw_revision < ENE_HW_C) in ene_hw_detect()
149 fw_reg1 = ene_read_reg(dev, ENE_FW1); in ene_hw_detect()
150 fw_reg2 = ene_read_reg(dev, ENE_FW2); in ene_hw_detect()
154 dev->hw_use_gpio_0a = !!(fw_reg2 & ENE_FW2_GP0A); in ene_hw_detect()
155 dev->hw_learning_and_tx_capable = !!(fw_reg2 & ENE_FW2_LEARNING); in ene_hw_detect()
156 dev->hw_extra_buffer = !!(fw_reg1 & ENE_FW1_HAS_EXTRA_BUF); in ene_hw_detect()
158 if (dev->hw_learning_and_tx_capable) in ene_hw_detect()
159 dev->hw_fan_input = !!(fw_reg2 & ENE_FW2_FAN_INPUT); in ene_hw_detect()
163 if (dev->hw_learning_and_tx_capable) { in ene_hw_detect()
172 dev->hw_use_gpio_0a ? "40" : "0A"); in ene_hw_detect()
174 if (dev->hw_fan_input) in ene_hw_detect()
178 if (!dev->hw_fan_input) in ene_hw_detect()
180 dev->hw_use_gpio_0a ? "0A" : "40"); in ene_hw_detect()
182 if (dev->hw_extra_buffer) in ene_hw_detect()
188 static void ene_rx_setup_hw_buffer(struct ene_device *dev) in ene_rx_setup_hw_buffer() argument
192 ene_rx_read_hw_pointer(dev); in ene_rx_setup_hw_buffer()
193 dev->r_pointer = dev->w_pointer; in ene_rx_setup_hw_buffer()
195 if (!dev->hw_extra_buffer) { in ene_rx_setup_hw_buffer()
196 dev->buffer_len = ENE_FW_PACKET_SIZE * 2; in ene_rx_setup_hw_buffer()
200 tmp = ene_read_reg(dev, ENE_FW_SAMPLE_BUFFER); in ene_rx_setup_hw_buffer()
201 tmp |= ene_read_reg(dev, ENE_FW_SAMPLE_BUFFER+1) << 8; in ene_rx_setup_hw_buffer()
202 dev->extra_buf1_address = tmp; in ene_rx_setup_hw_buffer()
204 dev->extra_buf1_len = ene_read_reg(dev, ENE_FW_SAMPLE_BUFFER + 2); in ene_rx_setup_hw_buffer()
206 tmp = ene_read_reg(dev, ENE_FW_SAMPLE_BUFFER + 3); in ene_rx_setup_hw_buffer()
207 tmp |= ene_read_reg(dev, ENE_FW_SAMPLE_BUFFER + 4) << 8; in ene_rx_setup_hw_buffer()
208 dev->extra_buf2_address = tmp; in ene_rx_setup_hw_buffer()
210 dev->extra_buf2_len = ene_read_reg(dev, ENE_FW_SAMPLE_BUFFER + 5); in ene_rx_setup_hw_buffer()
212 dev->buffer_len = dev->extra_buf1_len + dev->extra_buf2_len + 8; in ene_rx_setup_hw_buffer()
216 dev->extra_buf1_address, dev->extra_buf1_len); in ene_rx_setup_hw_buffer()
218 dev->extra_buf2_address, dev->extra_buf2_len); in ene_rx_setup_hw_buffer()
220 pr_notice("Total buffer len = %d\n", dev->buffer_len); in ene_rx_setup_hw_buffer()
222 if (dev->buffer_len > 64 || dev->buffer_len < 16) in ene_rx_setup_hw_buffer()
225 if (dev->extra_buf1_address > 0xFBFC || in ene_rx_setup_hw_buffer()
226 dev->extra_buf1_address < 0xEC00) in ene_rx_setup_hw_buffer()
229 if (dev->extra_buf2_address > 0xFBFC || in ene_rx_setup_hw_buffer()
230 dev->extra_buf2_address < 0xEC00) in ene_rx_setup_hw_buffer()
233 if (dev->r_pointer > dev->buffer_len) in ene_rx_setup_hw_buffer()
236 ene_set_reg_mask(dev, ENE_FW1, ENE_FW1_EXTRA_BUF_HND); in ene_rx_setup_hw_buffer()
240 dev->hw_extra_buffer = false; in ene_rx_setup_hw_buffer()
241 ene_clear_reg_mask(dev, ENE_FW1, ENE_FW1_EXTRA_BUF_HND); in ene_rx_setup_hw_buffer()
246 static void ene_rx_restore_hw_buffer(struct ene_device *dev) in ene_rx_restore_hw_buffer() argument
248 if (!dev->hw_extra_buffer) in ene_rx_restore_hw_buffer()
251 ene_write_reg(dev, ENE_FW_SAMPLE_BUFFER + 0, in ene_rx_restore_hw_buffer()
252 dev->extra_buf1_address & 0xFF); in ene_rx_restore_hw_buffer()
253 ene_write_reg(dev, ENE_FW_SAMPLE_BUFFER + 1, in ene_rx_restore_hw_buffer()
254 dev->extra_buf1_address >> 8); in ene_rx_restore_hw_buffer()
255 ene_write_reg(dev, ENE_FW_SAMPLE_BUFFER + 2, dev->extra_buf1_len); in ene_rx_restore_hw_buffer()
257 ene_write_reg(dev, ENE_FW_SAMPLE_BUFFER + 3, in ene_rx_restore_hw_buffer()
258 dev->extra_buf2_address & 0xFF); in ene_rx_restore_hw_buffer()
259 ene_write_reg(dev, ENE_FW_SAMPLE_BUFFER + 4, in ene_rx_restore_hw_buffer()
260 dev->extra_buf2_address >> 8); in ene_rx_restore_hw_buffer()
261 ene_write_reg(dev, ENE_FW_SAMPLE_BUFFER + 5, in ene_rx_restore_hw_buffer()
262 dev->extra_buf2_len); in ene_rx_restore_hw_buffer()
263 ene_clear_reg_mask(dev, ENE_FW1, ENE_FW1_EXTRA_BUF_HND); in ene_rx_restore_hw_buffer()
267 static void ene_rx_read_hw_pointer(struct ene_device *dev) in ene_rx_read_hw_pointer() argument
269 if (dev->hw_extra_buffer) in ene_rx_read_hw_pointer()
270 dev->w_pointer = ene_read_reg(dev, ENE_FW_RX_POINTER); in ene_rx_read_hw_pointer()
272 dev->w_pointer = ene_read_reg(dev, ENE_FW2) in ene_rx_read_hw_pointer()
276 dev->w_pointer, dev->r_pointer); in ene_rx_read_hw_pointer()
280 static int ene_rx_get_sample_reg(struct ene_device *dev) in ene_rx_get_sample_reg() argument
284 if (dev->r_pointer == dev->w_pointer) { in ene_rx_get_sample_reg()
286 ene_rx_read_hw_pointer(dev); in ene_rx_get_sample_reg()
289 if (dev->r_pointer == dev->w_pointer) { in ene_rx_get_sample_reg()
290 dbg_verbose("RB: end of data at %d", dev->r_pointer); in ene_rx_get_sample_reg()
294 dbg_verbose("RB: reading at offset %d", dev->r_pointer); in ene_rx_get_sample_reg()
295 r_pointer = dev->r_pointer; in ene_rx_get_sample_reg()
297 dev->r_pointer++; in ene_rx_get_sample_reg()
298 if (dev->r_pointer == dev->buffer_len) in ene_rx_get_sample_reg()
299 dev->r_pointer = 0; in ene_rx_get_sample_reg()
301 dbg_verbose("RB: next read will be from offset %d", dev->r_pointer); in ene_rx_get_sample_reg()
310 if (r_pointer < dev->extra_buf1_len) { in ene_rx_get_sample_reg()
312 return dev->extra_buf1_address + r_pointer; in ene_rx_get_sample_reg()
315 r_pointer -= dev->extra_buf1_len; in ene_rx_get_sample_reg()
317 if (r_pointer < dev->extra_buf2_len) { in ene_rx_get_sample_reg()
319 return dev->extra_buf2_address + r_pointer; in ene_rx_get_sample_reg()
327 static void ene_rx_sense_carrier(struct ene_device *dev) in ene_rx_sense_carrier() argument
332 int period = ene_read_reg(dev, ENE_CIRCAR_PRD); in ene_rx_sense_carrier()
333 int hperiod = ene_read_reg(dev, ENE_CIRCAR_HPRD); in ene_rx_sense_carrier()
350 if (dev->carrier_detect_enabled) { in ene_rx_sense_carrier()
354 ir_raw_event_store(dev->rdev, &ev); in ene_rx_sense_carrier()
359 static void ene_rx_enable_cir_engine(struct ene_device *dev, bool enable) in ene_rx_enable_cir_engine() argument
361 ene_set_clear_reg_mask(dev, ENE_CIRCFG, in ene_rx_enable_cir_engine()
366 static void ene_rx_select_input(struct ene_device *dev, bool gpio_0a) in ene_rx_select_input() argument
368 ene_set_clear_reg_mask(dev, ENE_CIRCFG2, ENE_CIRCFG2_GPIO0A, gpio_0a); in ene_rx_select_input()
375 static void ene_rx_enable_fan_input(struct ene_device *dev, bool enable) in ene_rx_enable_fan_input() argument
377 if (!dev->hw_fan_input) in ene_rx_enable_fan_input()
381 ene_write_reg(dev, ENE_FAN_AS_IN1, 0); in ene_rx_enable_fan_input()
383 ene_write_reg(dev, ENE_FAN_AS_IN1, ENE_FAN_AS_IN1_EN); in ene_rx_enable_fan_input()
384 ene_write_reg(dev, ENE_FAN_AS_IN2, ENE_FAN_AS_IN2_EN); in ene_rx_enable_fan_input()
389 static void ene_rx_setup(struct ene_device *dev) in ene_rx_setup() argument
391 bool learning_mode = dev->learning_mode_enabled || in ene_rx_setup()
392 dev->carrier_detect_enabled; in ene_rx_setup()
399 ene_write_reg(dev, ENE_CIRCFG2, 0x00); in ene_rx_setup()
404 dev->pll_freq == ENE_DEFAULT_PLL_FREQ ? 1 : 2; in ene_rx_setup()
406 ene_write_reg(dev, ENE_CIRRLC_CFG, in ene_rx_setup()
410 if (dev->hw_revision < ENE_HW_C) in ene_rx_setup()
415 WARN_ON(!dev->hw_learning_and_tx_capable); in ene_rx_setup()
422 ene_rx_select_input(dev, !dev->hw_use_gpio_0a); in ene_rx_setup()
423 dev->rx_fan_input_inuse = false; in ene_rx_setup()
426 ene_set_reg_mask(dev, ENE_CIRCFG, ENE_CIRCFG_CARR_DEMOD); in ene_rx_setup()
429 ene_write_reg(dev, ENE_CIRCAR_PULS, 0x63); in ene_rx_setup()
430 ene_set_clear_reg_mask(dev, ENE_CIRCFG2, ENE_CIRCFG2_CARR_DETECT, in ene_rx_setup()
431 dev->carrier_detect_enabled || debug); in ene_rx_setup()
433 if (dev->hw_fan_input) in ene_rx_setup()
434 dev->rx_fan_input_inuse = true; in ene_rx_setup()
436 ene_rx_select_input(dev, dev->hw_use_gpio_0a); in ene_rx_setup()
439 ene_clear_reg_mask(dev, ENE_CIRCFG, ENE_CIRCFG_CARR_DEMOD); in ene_rx_setup()
440 ene_clear_reg_mask(dev, ENE_CIRCFG2, ENE_CIRCFG2_CARR_DETECT); in ene_rx_setup()
444 if (dev->rx_fan_input_inuse) { in ene_rx_setup()
445 dev->rdev->rx_resolution = US_TO_NS(ENE_FW_SAMPLE_PERIOD_FAN); in ene_rx_setup()
449 dev->rdev->min_timeout = dev->rdev->max_timeout = in ene_rx_setup()
453 dev->rdev->rx_resolution = US_TO_NS(sample_period); in ene_rx_setup()
460 dev->rdev->min_timeout = US_TO_NS(127 * sample_period); in ene_rx_setup()
461 dev->rdev->max_timeout = US_TO_NS(200000); in ene_rx_setup()
464 if (dev->hw_learning_and_tx_capable) in ene_rx_setup()
465 dev->rdev->tx_resolution = US_TO_NS(sample_period); in ene_rx_setup()
467 if (dev->rdev->timeout > dev->rdev->max_timeout) in ene_rx_setup()
468 dev->rdev->timeout = dev->rdev->max_timeout; in ene_rx_setup()
469 if (dev->rdev->timeout < dev->rdev->min_timeout) in ene_rx_setup()
470 dev->rdev->timeout = dev->rdev->min_timeout; in ene_rx_setup()
474 static void ene_rx_enable_hw(struct ene_device *dev) in ene_rx_enable_hw() argument
479 if (dev->hw_revision < ENE_HW_C) { in ene_rx_enable_hw()
480 ene_write_reg(dev, ENEB_IRQ, dev->irq << 1); in ene_rx_enable_hw()
481 ene_write_reg(dev, ENEB_IRQ_UNK1, 0x01); in ene_rx_enable_hw()
483 reg_value = ene_read_reg(dev, ENE_IRQ) & 0xF0; in ene_rx_enable_hw()
486 reg_value |= (dev->irq & ENE_IRQ_MASK); in ene_rx_enable_hw()
487 ene_write_reg(dev, ENE_IRQ, reg_value); in ene_rx_enable_hw()
491 ene_rx_enable_fan_input(dev, dev->rx_fan_input_inuse); in ene_rx_enable_hw()
492 ene_rx_enable_cir_engine(dev, !dev->rx_fan_input_inuse); in ene_rx_enable_hw()
495 ene_irq_status(dev); in ene_rx_enable_hw()
498 ene_set_reg_mask(dev, ENE_FW1, ENE_FW1_ENABLE | ENE_FW1_IRQ); in ene_rx_enable_hw()
501 ir_raw_event_set_idle(dev->rdev, true); in ene_rx_enable_hw()
505 static void ene_rx_enable(struct ene_device *dev) in ene_rx_enable() argument
507 ene_rx_enable_hw(dev); in ene_rx_enable()
508 dev->rx_enabled = true; in ene_rx_enable()
512 static void ene_rx_disable_hw(struct ene_device *dev) in ene_rx_disable_hw() argument
515 ene_rx_enable_cir_engine(dev, false); in ene_rx_disable_hw()
516 ene_rx_enable_fan_input(dev, false); in ene_rx_disable_hw()
519 ene_clear_reg_mask(dev, ENE_FW1, ENE_FW1_ENABLE | ENE_FW1_IRQ); in ene_rx_disable_hw()
520 ir_raw_event_set_idle(dev->rdev, true); in ene_rx_disable_hw()
524 static void ene_rx_disable(struct ene_device *dev) in ene_rx_disable() argument
526 ene_rx_disable_hw(dev); in ene_rx_disable()
527 dev->rx_enabled = false; in ene_rx_disable()
533 static void ene_rx_reset(struct ene_device *dev) in ene_rx_reset() argument
535 ene_clear_reg_mask(dev, ENE_CIRCFG, ENE_CIRCFG_RX_EN); in ene_rx_reset()
536 ene_set_reg_mask(dev, ENE_CIRCFG, ENE_CIRCFG_RX_EN); in ene_rx_reset()
540 static void ene_tx_set_carrier(struct ene_device *dev) in ene_tx_set_carrier() argument
545 spin_lock_irqsave(&dev->hw_lock, flags); in ene_tx_set_carrier()
547 ene_set_clear_reg_mask(dev, ENE_CIRCFG, in ene_tx_set_carrier()
548 ENE_CIRCFG_TX_CARR, dev->tx_period > 0); in ene_tx_set_carrier()
550 if (!dev->tx_period) in ene_tx_set_carrier()
553 BUG_ON(dev->tx_duty_cycle >= 100 || dev->tx_duty_cycle <= 0); in ene_tx_set_carrier()
555 tx_puls_width = dev->tx_period / (100 / dev->tx_duty_cycle); in ene_tx_set_carrier()
560 dbg("TX: pulse distance = %d * 500 ns", dev->tx_period); in ene_tx_set_carrier()
563 ene_write_reg(dev, ENE_CIRMOD_PRD, dev->tx_period | ENE_CIRMOD_PRD_POL); in ene_tx_set_carrier()
564 ene_write_reg(dev, ENE_CIRMOD_HPRD, tx_puls_width); in ene_tx_set_carrier()
566 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_tx_set_carrier()
570 static void ene_tx_set_transmitters(struct ene_device *dev) in ene_tx_set_transmitters() argument
574 spin_lock_irqsave(&dev->hw_lock, flags); in ene_tx_set_transmitters()
575 ene_set_clear_reg_mask(dev, ENE_GPIOFS8, ENE_GPIOFS8_GPIO41, in ene_tx_set_transmitters()
576 !!(dev->transmitter_mask & 0x01)); in ene_tx_set_transmitters()
577 ene_set_clear_reg_mask(dev, ENE_GPIOFS1, ENE_GPIOFS1_GPIO0D, in ene_tx_set_transmitters()
578 !!(dev->transmitter_mask & 0x02)); in ene_tx_set_transmitters()
579 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_tx_set_transmitters()
583 static void ene_tx_enable(struct ene_device *dev) in ene_tx_enable() argument
585 u8 conf1 = ene_read_reg(dev, ENE_CIRCFG); in ene_tx_enable()
586 u8 fwreg2 = ene_read_reg(dev, ENE_FW2); in ene_tx_enable()
588 dev->saved_conf1 = conf1; in ene_tx_enable()
601 if (dev->hw_revision == ENE_HW_C) in ene_tx_enable()
606 ene_write_reg(dev, ENE_CIRCFG, conf1); in ene_tx_enable()
610 static void ene_tx_disable(struct ene_device *dev) in ene_tx_disable() argument
612 ene_write_reg(dev, ENE_CIRCFG, dev->saved_conf1); in ene_tx_disable()
613 dev->tx_buffer = NULL; in ene_tx_disable()
618 static void ene_tx_sample(struct ene_device *dev) in ene_tx_sample() argument
622 bool pulse = dev->tx_sample_pulse; in ene_tx_sample()
624 if (!dev->tx_buffer) { in ene_tx_sample()
630 if (!dev->tx_sample) { in ene_tx_sample()
632 if (dev->tx_pos == dev->tx_len) { in ene_tx_sample()
633 if (!dev->tx_done) { in ene_tx_sample()
635 dev->tx_done = true; in ene_tx_sample()
639 ene_tx_disable(dev); in ene_tx_sample()
640 complete(&dev->tx_complete); in ene_tx_sample()
645 sample = dev->tx_buffer[dev->tx_pos++]; in ene_tx_sample()
646 dev->tx_sample_pulse = !dev->tx_sample_pulse; in ene_tx_sample()
648 dev->tx_sample = DIV_ROUND_CLOSEST(sample, sample_period); in ene_tx_sample()
650 if (!dev->tx_sample) in ene_tx_sample()
651 dev->tx_sample = 1; in ene_tx_sample()
654 raw_tx = min(dev->tx_sample , (unsigned int)ENE_CIRRLC_OUT_MASK); in ene_tx_sample()
655 dev->tx_sample -= raw_tx; in ene_tx_sample()
662 ene_write_reg(dev, in ene_tx_sample()
663 dev->tx_reg ? ENE_CIRRLC_OUT1 : ENE_CIRRLC_OUT0, raw_tx); in ene_tx_sample()
665 dev->tx_reg = !dev->tx_reg; in ene_tx_sample()
669 mod_timer(&dev->tx_sim_timer, jiffies + HZ / 500); in ene_tx_sample()
675 struct ene_device *dev = from_timer(dev, t, tx_sim_timer); in ene_tx_irqsim() local
678 spin_lock_irqsave(&dev->hw_lock, flags); in ene_tx_irqsim()
679 ene_tx_sample(dev); in ene_tx_irqsim()
680 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_tx_irqsim()
685 static int ene_irq_status(struct ene_device *dev) in ene_irq_status() argument
691 fw_flags2 = ene_read_reg(dev, ENE_FW2); in ene_irq_status()
693 if (dev->hw_revision < ENE_HW_C) { in ene_irq_status()
694 irq_status = ene_read_reg(dev, ENEB_IRQ_STATUS); in ene_irq_status()
699 ene_clear_reg_mask(dev, ENEB_IRQ_STATUS, ENEB_IRQ_STATUS_IR); in ene_irq_status()
703 irq_status = ene_read_reg(dev, ENE_IRQ); in ene_irq_status()
708 ene_write_reg(dev, ENE_IRQ, irq_status & ~ENE_IRQ_STATUS); in ene_irq_status()
709 ene_write_reg(dev, ENE_IRQ, irq_status & ~ENE_IRQ_STATUS); in ene_irq_status()
714 ene_write_reg(dev, ENE_FW2, fw_flags2 & ~ENE_FW2_RXIRQ); in ene_irq_status()
718 fw_flags1 = ene_read_reg(dev, ENE_FW1); in ene_irq_status()
720 ene_write_reg(dev, ENE_FW1, fw_flags1 & ~ENE_FW1_TXIRQ); in ene_irq_status()
735 struct ene_device *dev = (struct ene_device *)data; in ene_isr() local
738 spin_lock_irqsave(&dev->hw_lock, flags); in ene_isr()
741 ene_rx_read_hw_pointer(dev); in ene_isr()
742 irq_status = ene_irq_status(dev); in ene_isr()
751 if (!dev->hw_learning_and_tx_capable) { in ene_isr()
755 ene_tx_sample(dev); in ene_isr()
763 if (dev->hw_learning_and_tx_capable) in ene_isr()
764 ene_rx_sense_carrier(dev); in ene_isr()
768 if (!dev->hw_extra_buffer) in ene_isr()
769 dev->r_pointer = dev->w_pointer == 0 ? ENE_FW_PACKET_SIZE : 0; in ene_isr()
773 reg = ene_rx_get_sample_reg(dev); in ene_isr()
779 hw_value = ene_read_reg(dev, reg); in ene_isr()
781 if (dev->rx_fan_input_inuse) { in ene_isr()
786 hw_value |= ene_read_reg(dev, reg + offset) << 8; in ene_isr()
798 if (dev->rx_period_adjust) { in ene_isr()
800 hw_sample /= (100 + dev->rx_period_adjust); in ene_isr()
804 if (!dev->hw_extra_buffer && !hw_sample) { in ene_isr()
805 dev->r_pointer = dev->w_pointer; in ene_isr()
813 ir_raw_event_store_with_filter(dev->rdev, &ev); in ene_isr()
816 ir_raw_event_handle(dev->rdev); in ene_isr()
818 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_isr()
823 static void ene_setup_default_settings(struct ene_device *dev) in ene_setup_default_settings() argument
825 dev->tx_period = 32; in ene_setup_default_settings()
826 dev->tx_duty_cycle = 50; /*%*/ in ene_setup_default_settings()
827 dev->transmitter_mask = 0x03; in ene_setup_default_settings()
828 dev->learning_mode_enabled = learning_mode_force; in ene_setup_default_settings()
831 dev->rdev->timeout = US_TO_NS(150000); in ene_setup_default_settings()
835 static void ene_setup_hw_settings(struct ene_device *dev) in ene_setup_hw_settings() argument
837 if (dev->hw_learning_and_tx_capable) { in ene_setup_hw_settings()
838 ene_tx_set_carrier(dev); in ene_setup_hw_settings()
839 ene_tx_set_transmitters(dev); in ene_setup_hw_settings()
842 ene_rx_setup(dev); in ene_setup_hw_settings()
848 struct ene_device *dev = rdev->priv; in ene_open() local
851 spin_lock_irqsave(&dev->hw_lock, flags); in ene_open()
852 ene_rx_enable(dev); in ene_open()
853 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_open()
860 struct ene_device *dev = rdev->priv; in ene_close() local
862 spin_lock_irqsave(&dev->hw_lock, flags); in ene_close()
864 ene_rx_disable(dev); in ene_close()
865 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_close()
871 struct ene_device *dev = rdev->priv; in ene_set_tx_mask() local
881 dev->transmitter_mask = tx_mask; in ene_set_tx_mask()
882 ene_tx_set_transmitters(dev); in ene_set_tx_mask()
889 struct ene_device *dev = rdev->priv; in ene_set_tx_carrier() local
905 dev->tx_period = period; in ene_set_tx_carrier()
906 ene_tx_set_carrier(dev); in ene_set_tx_carrier()
913 struct ene_device *dev = rdev->priv; in ene_set_tx_duty_cycle() local
915 dev->tx_duty_cycle = duty_cycle; in ene_set_tx_duty_cycle()
916 ene_tx_set_carrier(dev); in ene_set_tx_duty_cycle()
923 struct ene_device *dev = rdev->priv; in ene_set_learning_mode() local
925 if (enable == dev->learning_mode_enabled) in ene_set_learning_mode()
928 spin_lock_irqsave(&dev->hw_lock, flags); in ene_set_learning_mode()
929 dev->learning_mode_enabled = enable; in ene_set_learning_mode()
930 ene_rx_disable(dev); in ene_set_learning_mode()
931 ene_rx_setup(dev); in ene_set_learning_mode()
932 ene_rx_enable(dev); in ene_set_learning_mode()
933 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_set_learning_mode()
939 struct ene_device *dev = rdev->priv; in ene_set_carrier_report() local
942 if (enable == dev->carrier_detect_enabled) in ene_set_carrier_report()
945 spin_lock_irqsave(&dev->hw_lock, flags); in ene_set_carrier_report()
946 dev->carrier_detect_enabled = enable; in ene_set_carrier_report()
947 ene_rx_disable(dev); in ene_set_carrier_report()
948 ene_rx_setup(dev); in ene_set_carrier_report()
949 ene_rx_enable(dev); in ene_set_carrier_report()
950 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_set_carrier_report()
957 struct ene_device *dev = rdev->priv; in ene_set_idle() local
960 ene_rx_reset(dev); in ene_set_idle()
968 struct ene_device *dev = rdev->priv; in ene_transmit() local
971 dev->tx_buffer = buf; in ene_transmit()
972 dev->tx_len = n; in ene_transmit()
973 dev->tx_pos = 0; in ene_transmit()
974 dev->tx_reg = 0; in ene_transmit()
975 dev->tx_done = 0; in ene_transmit()
976 dev->tx_sample = 0; in ene_transmit()
977 dev->tx_sample_pulse = false; in ene_transmit()
979 dbg("TX: %d samples", dev->tx_len); in ene_transmit()
981 spin_lock_irqsave(&dev->hw_lock, flags); in ene_transmit()
983 ene_tx_enable(dev); in ene_transmit()
986 ene_tx_sample(dev); in ene_transmit()
987 ene_tx_sample(dev); in ene_transmit()
989 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_transmit()
991 if (wait_for_completion_timeout(&dev->tx_complete, 2 * HZ) == 0) { in ene_transmit()
993 spin_lock_irqsave(&dev->hw_lock, flags); in ene_transmit()
994 ene_tx_disable(dev); in ene_transmit()
995 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_transmit()
1006 struct ene_device *dev; in ene_probe() local
1009 dev = kzalloc(sizeof(struct ene_device), GFP_KERNEL); in ene_probe()
1011 if (!dev || !rdev) in ene_probe()
1018 dev->hw_io = -1; in ene_probe()
1019 dev->irq = -1; in ene_probe()
1028 spin_lock_init(&dev->hw_lock); in ene_probe()
1030 dev->hw_io = pnp_port_start(pnp_dev, 0); in ene_probe()
1031 dev->irq = pnp_irq(pnp_dev, 0); in ene_probe()
1034 pnp_set_drvdata(pnp_dev, dev); in ene_probe()
1035 dev->pnp_dev = pnp_dev; in ene_probe()
1042 error = ene_hw_detect(dev); in ene_probe()
1046 if (!dev->hw_learning_and_tx_capable && txsim) { in ene_probe()
1047 dev->hw_learning_and_tx_capable = true; in ene_probe()
1048 timer_setup(&dev->tx_sim_timer, ene_tx_irqsim, 0); in ene_probe()
1052 if (!dev->hw_learning_and_tx_capable) in ene_probe()
1056 rdev->priv = dev; in ene_probe()
1064 if (dev->hw_learning_and_tx_capable) { in ene_probe()
1066 init_completion(&dev->tx_complete); in ene_probe()
1075 dev->rdev = rdev; in ene_probe()
1077 ene_rx_setup_hw_buffer(dev); in ene_probe()
1078 ene_setup_default_settings(dev); in ene_probe()
1079 ene_setup_hw_settings(dev); in ene_probe()
1081 device_set_wakeup_capable(&pnp_dev->dev, true); in ene_probe()
1082 device_set_wakeup_enable(&pnp_dev->dev, true); in ene_probe()
1090 if (!request_region(dev->hw_io, ENE_IO_SIZE, ENE_DRIVER_NAME)) { in ene_probe()
1094 if (request_irq(dev->irq, ene_isr, in ene_probe()
1095 IRQF_SHARED, ENE_DRIVER_NAME, (void *)dev)) { in ene_probe()
1103 release_region(dev->hw_io, ENE_IO_SIZE); in ene_probe()
1109 kfree(dev); in ene_probe()
1116 struct ene_device *dev = pnp_get_drvdata(pnp_dev); in ene_remove() local
1119 spin_lock_irqsave(&dev->hw_lock, flags); in ene_remove()
1120 ene_rx_disable(dev); in ene_remove()
1121 ene_rx_restore_hw_buffer(dev); in ene_remove()
1122 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_remove()
1124 free_irq(dev->irq, dev); in ene_remove()
1125 release_region(dev->hw_io, ENE_IO_SIZE); in ene_remove()
1126 rc_unregister_device(dev->rdev); in ene_remove()
1127 kfree(dev); in ene_remove()
1131 static void ene_enable_wake(struct ene_device *dev, bool enable) in ene_enable_wake() argument
1134 ene_set_clear_reg_mask(dev, ENE_FW1, ENE_FW1_WAKE, enable); in ene_enable_wake()
1140 struct ene_device *dev = pnp_get_drvdata(pnp_dev); in ene_suspend() local
1141 bool wake = device_may_wakeup(&dev->pnp_dev->dev); in ene_suspend()
1143 if (!wake && dev->rx_enabled) in ene_suspend()
1144 ene_rx_disable_hw(dev); in ene_suspend()
1146 ene_enable_wake(dev, wake); in ene_suspend()
1152 struct ene_device *dev = pnp_get_drvdata(pnp_dev); in ene_resume() local
1153 ene_setup_hw_settings(dev); in ene_resume()
1155 if (dev->rx_enabled) in ene_resume()
1156 ene_rx_enable(dev); in ene_resume()
1158 ene_enable_wake(dev, false); in ene_resume()
1165 struct ene_device *dev = pnp_get_drvdata(pnp_dev); in ene_shutdown() local
1166 ene_enable_wake(dev, true); in ene_shutdown()