Lines Matching refs:fmdev
66 static void fm_irq_send_flag_getcmd(struct fmdev *);
67 static void fm_irq_handle_flag_getcmd_resp(struct fmdev *);
68 static void fm_irq_handle_hw_malfunction(struct fmdev *);
69 static void fm_irq_handle_rds_start(struct fmdev *);
70 static void fm_irq_send_rdsdata_getcmd(struct fmdev *);
71 static void fm_irq_handle_rdsdata_getcmd_resp(struct fmdev *);
72 static void fm_irq_handle_rds_finish(struct fmdev *);
73 static void fm_irq_handle_tune_op_ended(struct fmdev *);
74 static void fm_irq_handle_power_enb(struct fmdev *);
75 static void fm_irq_handle_low_rssi_start(struct fmdev *);
76 static void fm_irq_afjump_set_pi(struct fmdev *);
77 static void fm_irq_handle_set_pi_resp(struct fmdev *);
78 static void fm_irq_afjump_set_pimask(struct fmdev *);
79 static void fm_irq_handle_set_pimask_resp(struct fmdev *);
80 static void fm_irq_afjump_setfreq(struct fmdev *);
81 static void fm_irq_handle_setfreq_resp(struct fmdev *);
82 static void fm_irq_afjump_enableint(struct fmdev *);
83 static void fm_irq_afjump_enableint_resp(struct fmdev *);
84 static void fm_irq_start_afjump(struct fmdev *);
85 static void fm_irq_handle_start_afjump_resp(struct fmdev *);
86 static void fm_irq_afjump_rd_freq(struct fmdev *);
87 static void fm_irq_afjump_rd_freq_resp(struct fmdev *);
88 static void fm_irq_handle_low_rssi_finish(struct fmdev *);
89 static void fm_irq_send_intmsk_cmd(struct fmdev *);
90 static void fm_irq_handle_intmsk_cmd_resp(struct fmdev *);
168 static inline void fm_irq_call(struct fmdev *fmdev) in fm_irq_call() argument
170 fmdev->irq_info.handlers[fmdev->irq_info.stage](fmdev); in fm_irq_call()
174 static inline void fm_irq_call_stage(struct fmdev *fmdev, u8 stage) in fm_irq_call_stage() argument
176 fmdev->irq_info.stage = stage; in fm_irq_call_stage()
177 fm_irq_call(fmdev); in fm_irq_call_stage()
180 static inline void fm_irq_timeout_stage(struct fmdev *fmdev, u8 stage) in fm_irq_timeout_stage() argument
182 fmdev->irq_info.stage = stage; in fm_irq_timeout_stage()
183 mod_timer(&fmdev->irq_info.timer, jiffies + FM_DRV_TX_TIMEOUT); in fm_irq_timeout_stage()
238 void fmc_update_region_info(struct fmdev *fmdev, u8 region_to_set) in fmc_update_region_info() argument
240 fmdev->rx.region = region_configs[region_to_set]; in fmc_update_region_info()
249 struct fmdev *fmdev; in recv_tasklet() local
256 fmdev = (struct fmdev *)arg; in recv_tasklet()
257 irq_info = &fmdev->irq_info; in recv_tasklet()
259 while ((skb = skb_dequeue(&fmdev->rx_q))) { in recv_tasklet()
274 if (!test_bit(FM_INTTASK_RUNNING, &fmdev->flag)) { in recv_tasklet()
275 set_bit(FM_INTTASK_RUNNING, &fmdev->flag); in recv_tasklet()
285 irq_info->handlers[irq_info->stage](fmdev); in recv_tasklet()
287 set_bit(FM_INTTASK_SCHEDULE_PENDING, &fmdev->flag); in recv_tasklet()
292 else if (evt_hdr->op == fmdev->pre_op && fmdev->resp_comp != NULL) { in recv_tasklet()
294 spin_lock_irqsave(&fmdev->resp_skb_lock, flags); in recv_tasklet()
295 fmdev->resp_skb = skb; in recv_tasklet()
296 spin_unlock_irqrestore(&fmdev->resp_skb_lock, flags); in recv_tasklet()
297 complete(fmdev->resp_comp); in recv_tasklet()
299 fmdev->resp_comp = NULL; in recv_tasklet()
300 atomic_set(&fmdev->tx_cnt, 1); in recv_tasklet()
303 else if (evt_hdr->op == fmdev->pre_op && fmdev->resp_comp == NULL) { in recv_tasklet()
304 if (fmdev->resp_skb != NULL) in recv_tasklet()
307 spin_lock_irqsave(&fmdev->resp_skb_lock, flags); in recv_tasklet()
308 fmdev->resp_skb = skb; in recv_tasklet()
309 spin_unlock_irqrestore(&fmdev->resp_skb_lock, flags); in recv_tasklet()
312 irq_info->handlers[irq_info->stage](fmdev); in recv_tasklet()
315 atomic_set(&fmdev->tx_cnt, 1); in recv_tasklet()
324 if (num_fm_hci_cmds && atomic_read(&fmdev->tx_cnt)) in recv_tasklet()
325 if (!skb_queue_empty(&fmdev->tx_q)) in recv_tasklet()
326 tasklet_schedule(&fmdev->tx_task); in recv_tasklet()
333 struct fmdev *fmdev; in send_tasklet() local
337 fmdev = (struct fmdev *)arg; in send_tasklet()
339 if (!atomic_read(&fmdev->tx_cnt)) in send_tasklet()
343 if ((jiffies - fmdev->last_tx_jiffies) > FM_DRV_TX_TIMEOUT) { in send_tasklet()
345 atomic_set(&fmdev->tx_cnt, 1); in send_tasklet()
349 skb = skb_dequeue(&fmdev->tx_q); in send_tasklet()
353 atomic_dec(&fmdev->tx_cnt); in send_tasklet()
354 fmdev->pre_op = fm_cb(skb)->fm_op; in send_tasklet()
356 if (fmdev->resp_comp != NULL) in send_tasklet()
359 fmdev->resp_comp = fm_cb(skb)->completion; in send_tasklet()
365 fmdev->resp_comp = NULL; in send_tasklet()
367 atomic_set(&fmdev->tx_cnt, 1); in send_tasklet()
369 fmdev->last_tx_jiffies = jiffies; in send_tasklet()
377 static int fm_send_cmd(struct fmdev *fmdev, u8 fm_op, u16 type, void *payload, in fm_send_cmd() argument
388 if (test_bit(FM_FW_DW_INPROGRESS, &fmdev->flag) && payload == NULL) { in fm_send_cmd()
392 if (!test_bit(FM_FW_DW_INPROGRESS, &fmdev->flag)) in fm_send_cmd()
407 if (!test_bit(FM_FW_DW_INPROGRESS, &fmdev->flag) || in fm_send_cmd()
408 test_bit(FM_INTTASK_RUNNING, &fmdev->flag)) { in fm_send_cmd()
439 skb_queue_tail(&fmdev->tx_q, skb); in fm_send_cmd()
440 tasklet_schedule(&fmdev->tx_task); in fm_send_cmd()
446 int fmc_send_cmd(struct fmdev *fmdev, u8 fm_op, u16 type, void *payload, in fmc_send_cmd() argument
454 init_completion(&fmdev->maintask_comp); in fmc_send_cmd()
455 ret = fm_send_cmd(fmdev, fm_op, type, payload, payload_len, in fmc_send_cmd()
456 &fmdev->maintask_comp); in fmc_send_cmd()
460 if (!wait_for_completion_timeout(&fmdev->maintask_comp, in fmc_send_cmd()
466 if (!fmdev->resp_skb) { in fmc_send_cmd()
470 spin_lock_irqsave(&fmdev->resp_skb_lock, flags); in fmc_send_cmd()
471 skb = fmdev->resp_skb; in fmc_send_cmd()
472 fmdev->resp_skb = NULL; in fmc_send_cmd()
473 spin_unlock_irqrestore(&fmdev->resp_skb_lock, flags); in fmc_send_cmd()
498 static inline int check_cmdresp_status(struct fmdev *fmdev, in check_cmdresp_status() argument
504 del_timer(&fmdev->irq_info.timer); in check_cmdresp_status()
506 spin_lock_irqsave(&fmdev->resp_skb_lock, flags); in check_cmdresp_status()
507 *skb = fmdev->resp_skb; in check_cmdresp_status()
508 fmdev->resp_skb = NULL; in check_cmdresp_status()
509 spin_unlock_irqrestore(&fmdev->resp_skb_lock, flags); in check_cmdresp_status()
516 mod_timer(&fmdev->irq_info.timer, jiffies + FM_DRV_TX_TIMEOUT); in check_cmdresp_status()
523 static inline void fm_irq_common_cmd_resp_helper(struct fmdev *fmdev, u8 stage) in fm_irq_common_cmd_resp_helper() argument
527 if (!check_cmdresp_status(fmdev, &skb)) in fm_irq_common_cmd_resp_helper()
528 fm_irq_call_stage(fmdev, stage); in fm_irq_common_cmd_resp_helper()
540 struct fmdev *fmdev; in int_timeout_handler() local
544 fmdev = from_timer(fmdev, t, irq_info.timer); in int_timeout_handler()
545 fmirq = &fmdev->irq_info; in int_timeout_handler()
556 fm_irq_call_stage(fmdev, FM_SEND_INTMSK_CMD_IDX); in int_timeout_handler()
560 static void fm_irq_send_flag_getcmd(struct fmdev *fmdev) in fm_irq_send_flag_getcmd() argument
565 if (!fm_send_cmd(fmdev, FLAG_GET, REG_RD, NULL, sizeof(flag), NULL)) in fm_irq_send_flag_getcmd()
566 fm_irq_timeout_stage(fmdev, FM_HANDLE_FLAG_GETCMD_RESP_IDX); in fm_irq_send_flag_getcmd()
569 static void fm_irq_handle_flag_getcmd_resp(struct fmdev *fmdev) in fm_irq_handle_flag_getcmd_resp() argument
574 if (check_cmdresp_status(fmdev, &skb)) in fm_irq_handle_flag_getcmd_resp()
578 if (fm_evt_hdr->dlen > sizeof(fmdev->irq_info.flag)) in fm_irq_handle_flag_getcmd_resp()
583 memcpy(&fmdev->irq_info.flag, skb->data, fm_evt_hdr->dlen); in fm_irq_handle_flag_getcmd_resp()
585 fmdev->irq_info.flag = be16_to_cpu((__force __be16)fmdev->irq_info.flag); in fm_irq_handle_flag_getcmd_resp()
586 fmdbg("irq: flag register(0x%x)\n", fmdev->irq_info.flag); in fm_irq_handle_flag_getcmd_resp()
589 fm_irq_call_stage(fmdev, FM_HW_MAL_FUNC_IDX); in fm_irq_handle_flag_getcmd_resp()
592 static void fm_irq_handle_hw_malfunction(struct fmdev *fmdev) in fm_irq_handle_hw_malfunction() argument
594 if (fmdev->irq_info.flag & FM_MAL_EVENT & fmdev->irq_info.mask) in fm_irq_handle_hw_malfunction()
598 fm_irq_call_stage(fmdev, FM_RDS_START_IDX); in fm_irq_handle_hw_malfunction()
601 static void fm_irq_handle_rds_start(struct fmdev *fmdev) in fm_irq_handle_rds_start() argument
603 if (fmdev->irq_info.flag & FM_RDS_EVENT & fmdev->irq_info.mask) { in fm_irq_handle_rds_start()
605 fmdev->irq_info.stage = FM_RDS_SEND_RDS_GETCMD_IDX; in fm_irq_handle_rds_start()
608 fmdev->irq_info.stage = FM_HW_TUNE_OP_ENDED_IDX; in fm_irq_handle_rds_start()
611 fm_irq_call(fmdev); in fm_irq_handle_rds_start()
614 static void fm_irq_send_rdsdata_getcmd(struct fmdev *fmdev) in fm_irq_send_rdsdata_getcmd() argument
617 if (!fm_send_cmd(fmdev, RDS_DATA_GET, REG_RD, NULL, in fm_irq_send_rdsdata_getcmd()
619 fm_irq_timeout_stage(fmdev, FM_RDS_HANDLE_RDS_GETCMD_RESP_IDX); in fm_irq_send_rdsdata_getcmd()
623 static void fm_rx_update_af_cache(struct fmdev *fmdev, u8 af) in fm_rx_update_af_cache() argument
625 struct tuned_station_info *stat_info = &fmdev->rx.stat_info; in fm_rx_update_af_cache()
626 u8 reg_idx = fmdev->rx.region.fm_band; in fm_rx_update_af_cache()
632 fmdev->rx.stat_info.af_list_max = (af - FM_RDS_1_AF_FOLLOWS + 1); in fm_rx_update_af_cache()
633 fmdev->rx.stat_info.afcache_size = 0; in fm_rx_update_af_cache()
634 fmdbg("No of expected AF : %d\n", fmdev->rx.stat_info.af_list_max); in fm_rx_update_af_cache()
645 freq = fmdev->rx.region.bot_freq + (af * 100); in fm_rx_update_af_cache()
646 if (freq == fmdev->rx.freq) { in fm_rx_update_af_cache()
648 fmdev->rx.freq, freq); in fm_rx_update_af_cache()
676 static void fm_rdsparse_swapbytes(struct fmdev *fmdev, in fm_rdsparse_swapbytes() argument
687 if (fmdev->asci_id != 0x6350) { in fm_rdsparse_swapbytes()
696 static void fm_irq_handle_rdsdata_getcmd_resp(struct fmdev *fmdev) in fm_irq_handle_rdsdata_getcmd_resp() argument
700 struct fm_rds *rds = &fmdev->rx.rds; in fm_irq_handle_rdsdata_getcmd_resp()
707 if (check_cmdresp_status(fmdev, &skb)) in fm_irq_handle_rdsdata_getcmd_resp()
745 fm_rdsparse_swapbytes(fmdev, &rds_fmt); in fm_irq_handle_rdsdata_getcmd_resp()
752 if (fmdev->rx.stat_info.picode != cur_picode) in fm_irq_handle_rdsdata_getcmd_resp()
753 fmdev->rx.stat_info.picode = cur_picode; in fm_irq_handle_rdsdata_getcmd_resp()
763 fm_rx_update_af_cache(fmdev, rds_fmt.data.group0A.af[0]); in fm_irq_handle_rdsdata_getcmd_resp()
764 fm_rx_update_af_cache(fmdev, rds_fmt.data.group0A.af[1]); in fm_irq_handle_rdsdata_getcmd_resp()
775 spin_lock_irqsave(&fmdev->rds_buff_lock, flags); in fm_irq_handle_rdsdata_getcmd_resp()
803 spin_unlock_irqrestore(&fmdev->rds_buff_lock, flags); in fm_irq_handle_rdsdata_getcmd_resp()
809 fm_irq_call_stage(fmdev, FM_RDS_FINISH_IDX); in fm_irq_handle_rdsdata_getcmd_resp()
812 static void fm_irq_handle_rds_finish(struct fmdev *fmdev) in fm_irq_handle_rds_finish() argument
814 fm_irq_call_stage(fmdev, FM_HW_TUNE_OP_ENDED_IDX); in fm_irq_handle_rds_finish()
817 static void fm_irq_handle_tune_op_ended(struct fmdev *fmdev) in fm_irq_handle_tune_op_ended() argument
819 if (fmdev->irq_info.flag & (FM_FR_EVENT | FM_BL_EVENT) & fmdev-> in fm_irq_handle_tune_op_ended()
822 if (test_and_clear_bit(FM_AF_SWITCH_INPROGRESS, &fmdev->flag)) { in fm_irq_handle_tune_op_ended()
823 fmdev->irq_info.stage = FM_AF_JUMP_RD_FREQ_IDX; in fm_irq_handle_tune_op_ended()
825 complete(&fmdev->maintask_comp); in fm_irq_handle_tune_op_ended()
826 fmdev->irq_info.stage = FM_HW_POWER_ENB_IDX; in fm_irq_handle_tune_op_ended()
829 fmdev->irq_info.stage = FM_HW_POWER_ENB_IDX; in fm_irq_handle_tune_op_ended()
831 fm_irq_call(fmdev); in fm_irq_handle_tune_op_ended()
834 static void fm_irq_handle_power_enb(struct fmdev *fmdev) in fm_irq_handle_power_enb() argument
836 if (fmdev->irq_info.flag & FM_POW_ENB_EVENT) { in fm_irq_handle_power_enb()
838 complete(&fmdev->maintask_comp); in fm_irq_handle_power_enb()
841 fm_irq_call_stage(fmdev, FM_LOW_RSSI_START_IDX); in fm_irq_handle_power_enb()
844 static void fm_irq_handle_low_rssi_start(struct fmdev *fmdev) in fm_irq_handle_low_rssi_start() argument
846 if ((fmdev->rx.af_mode == FM_RX_RDS_AF_SWITCH_MODE_ON) && in fm_irq_handle_low_rssi_start()
847 (fmdev->irq_info.flag & FM_LEV_EVENT & fmdev->irq_info.mask) && in fm_irq_handle_low_rssi_start()
848 (fmdev->rx.freq != FM_UNDEFINED_FREQ) && in fm_irq_handle_low_rssi_start()
849 (fmdev->rx.stat_info.afcache_size != 0)) { in fm_irq_handle_low_rssi_start()
853 fmdev->irq_info.mask &= ~FM_LEV_EVENT; in fm_irq_handle_low_rssi_start()
855 fmdev->rx.afjump_idx = 0; in fm_irq_handle_low_rssi_start()
856 fmdev->rx.freq_before_jump = fmdev->rx.freq; in fm_irq_handle_low_rssi_start()
857 fmdev->irq_info.stage = FM_AF_JUMP_SETPI_IDX; in fm_irq_handle_low_rssi_start()
860 fmdev->irq_info.stage = FM_SEND_INTMSK_CMD_IDX; in fm_irq_handle_low_rssi_start()
863 fm_irq_call(fmdev); in fm_irq_handle_low_rssi_start()
866 static void fm_irq_afjump_set_pi(struct fmdev *fmdev) in fm_irq_afjump_set_pi() argument
871 payload = fmdev->rx.stat_info.picode; in fm_irq_afjump_set_pi()
872 if (!fm_send_cmd(fmdev, RDS_PI_SET, REG_WR, &payload, sizeof(payload), NULL)) in fm_irq_afjump_set_pi()
873 fm_irq_timeout_stage(fmdev, FM_AF_JUMP_HANDLE_SETPI_RESP_IDX); in fm_irq_afjump_set_pi()
876 static void fm_irq_handle_set_pi_resp(struct fmdev *fmdev) in fm_irq_handle_set_pi_resp() argument
878 fm_irq_common_cmd_resp_helper(fmdev, FM_AF_JUMP_SETPI_MASK_IDX); in fm_irq_handle_set_pi_resp()
886 static void fm_irq_afjump_set_pimask(struct fmdev *fmdev) in fm_irq_afjump_set_pimask() argument
891 if (!fm_send_cmd(fmdev, RDS_PI_MASK_SET, REG_WR, &payload, sizeof(payload), NULL)) in fm_irq_afjump_set_pimask()
892 fm_irq_timeout_stage(fmdev, FM_AF_JUMP_HANDLE_SETPI_MASK_RESP_IDX); in fm_irq_afjump_set_pimask()
895 static void fm_irq_handle_set_pimask_resp(struct fmdev *fmdev) in fm_irq_handle_set_pimask_resp() argument
897 fm_irq_common_cmd_resp_helper(fmdev, FM_AF_JUMP_SET_AF_FREQ_IDX); in fm_irq_handle_set_pimask_resp()
900 static void fm_irq_afjump_setfreq(struct fmdev *fmdev) in fm_irq_afjump_setfreq() argument
905 fmdbg("Switch to %d KHz\n", fmdev->rx.stat_info.af_cache[fmdev->rx.afjump_idx]); in fm_irq_afjump_setfreq()
906 frq_index = (fmdev->rx.stat_info.af_cache[fmdev->rx.afjump_idx] - in fm_irq_afjump_setfreq()
907 fmdev->rx.region.bot_freq) / FM_FREQ_MUL; in fm_irq_afjump_setfreq()
910 if (!fm_send_cmd(fmdev, AF_FREQ_SET, REG_WR, &payload, sizeof(payload), NULL)) in fm_irq_afjump_setfreq()
911 fm_irq_timeout_stage(fmdev, FM_AF_JUMP_HANDLE_SET_AFFREQ_RESP_IDX); in fm_irq_afjump_setfreq()
914 static void fm_irq_handle_setfreq_resp(struct fmdev *fmdev) in fm_irq_handle_setfreq_resp() argument
916 fm_irq_common_cmd_resp_helper(fmdev, FM_AF_JUMP_ENABLE_INT_IDX); in fm_irq_handle_setfreq_resp()
919 static void fm_irq_afjump_enableint(struct fmdev *fmdev) in fm_irq_afjump_enableint() argument
925 if (!fm_send_cmd(fmdev, INT_MASK_SET, REG_WR, &payload, sizeof(payload), NULL)) in fm_irq_afjump_enableint()
926 fm_irq_timeout_stage(fmdev, FM_AF_JUMP_ENABLE_INT_RESP_IDX); in fm_irq_afjump_enableint()
929 static void fm_irq_afjump_enableint_resp(struct fmdev *fmdev) in fm_irq_afjump_enableint_resp() argument
931 fm_irq_common_cmd_resp_helper(fmdev, FM_AF_JUMP_START_AFJUMP_IDX); in fm_irq_afjump_enableint_resp()
934 static void fm_irq_start_afjump(struct fmdev *fmdev) in fm_irq_start_afjump() argument
939 if (!fm_send_cmd(fmdev, TUNER_MODE_SET, REG_WR, &payload, in fm_irq_start_afjump()
941 fm_irq_timeout_stage(fmdev, FM_AF_JUMP_HANDLE_START_AFJUMP_RESP_IDX); in fm_irq_start_afjump()
944 static void fm_irq_handle_start_afjump_resp(struct fmdev *fmdev) in fm_irq_handle_start_afjump_resp() argument
948 if (check_cmdresp_status(fmdev, &skb)) in fm_irq_handle_start_afjump_resp()
951 fmdev->irq_info.stage = FM_SEND_FLAG_GETCMD_IDX; in fm_irq_handle_start_afjump_resp()
952 set_bit(FM_AF_SWITCH_INPROGRESS, &fmdev->flag); in fm_irq_handle_start_afjump_resp()
953 clear_bit(FM_INTTASK_RUNNING, &fmdev->flag); in fm_irq_handle_start_afjump_resp()
956 static void fm_irq_afjump_rd_freq(struct fmdev *fmdev) in fm_irq_afjump_rd_freq() argument
960 if (!fm_send_cmd(fmdev, FREQ_SET, REG_RD, NULL, sizeof(payload), NULL)) in fm_irq_afjump_rd_freq()
961 fm_irq_timeout_stage(fmdev, FM_AF_JUMP_RD_FREQ_RESP_IDX); in fm_irq_afjump_rd_freq()
964 static void fm_irq_afjump_rd_freq_resp(struct fmdev *fmdev) in fm_irq_afjump_rd_freq_resp() argument
970 if (check_cmdresp_status(fmdev, &skb)) in fm_irq_afjump_rd_freq_resp()
977 curr_freq = fmdev->rx.region.bot_freq + ((u32)read_freq * FM_FREQ_MUL); in fm_irq_afjump_rd_freq_resp()
979 jumped_freq = fmdev->rx.stat_info.af_cache[fmdev->rx.afjump_idx]; in fm_irq_afjump_rd_freq_resp()
982 if ((curr_freq != fmdev->rx.freq_before_jump) && (curr_freq == jumped_freq)) { in fm_irq_afjump_rd_freq_resp()
984 fmdev->rx.freq = curr_freq; in fm_irq_afjump_rd_freq_resp()
985 fm_rx_reset_rds_cache(fmdev); in fm_irq_afjump_rd_freq_resp()
988 if (fmdev->rx.af_mode == FM_RX_RDS_AF_SWITCH_MODE_ON) in fm_irq_afjump_rd_freq_resp()
989 fmdev->irq_info.mask |= FM_LEV_EVENT; in fm_irq_afjump_rd_freq_resp()
991 fmdev->irq_info.stage = FM_LOW_RSSI_FINISH_IDX; in fm_irq_afjump_rd_freq_resp()
993 fmdev->rx.afjump_idx++; in fm_irq_afjump_rd_freq_resp()
996 if (fmdev->rx.afjump_idx >= fmdev->rx.stat_info.afcache_size) { in fm_irq_afjump_rd_freq_resp()
998 fmdev->irq_info.stage = FM_LOW_RSSI_FINISH_IDX; in fm_irq_afjump_rd_freq_resp()
1002 fmdev->irq_info.stage = FM_AF_JUMP_SETPI_IDX; in fm_irq_afjump_rd_freq_resp()
1005 fm_irq_call(fmdev); in fm_irq_afjump_rd_freq_resp()
1008 static void fm_irq_handle_low_rssi_finish(struct fmdev *fmdev) in fm_irq_handle_low_rssi_finish() argument
1010 fm_irq_call_stage(fmdev, FM_SEND_INTMSK_CMD_IDX); in fm_irq_handle_low_rssi_finish()
1013 static void fm_irq_send_intmsk_cmd(struct fmdev *fmdev) in fm_irq_send_intmsk_cmd() argument
1018 payload = fmdev->irq_info.mask; in fm_irq_send_intmsk_cmd()
1020 if (!fm_send_cmd(fmdev, INT_MASK_SET, REG_WR, &payload, in fm_irq_send_intmsk_cmd()
1022 fm_irq_timeout_stage(fmdev, FM_HANDLE_INTMSK_CMD_RESP_IDX); in fm_irq_send_intmsk_cmd()
1025 static void fm_irq_handle_intmsk_cmd_resp(struct fmdev *fmdev) in fm_irq_handle_intmsk_cmd_resp() argument
1029 if (check_cmdresp_status(fmdev, &skb)) in fm_irq_handle_intmsk_cmd_resp()
1035 fmdev->irq_info.stage = FM_SEND_FLAG_GETCMD_IDX; in fm_irq_handle_intmsk_cmd_resp()
1038 if (test_and_clear_bit(FM_INTTASK_SCHEDULE_PENDING, &fmdev->flag)) in fm_irq_handle_intmsk_cmd_resp()
1039 fmdev->irq_info.handlers[fmdev->irq_info.stage](fmdev); in fm_irq_handle_intmsk_cmd_resp()
1041 clear_bit(FM_INTTASK_RUNNING, &fmdev->flag); in fm_irq_handle_intmsk_cmd_resp()
1045 int fmc_is_rds_data_available(struct fmdev *fmdev, struct file *file, in fmc_is_rds_data_available() argument
1048 poll_wait(file, &fmdev->rx.rds.read_queue, pts); in fmc_is_rds_data_available()
1049 if (fmdev->rx.rds.rd_idx != fmdev->rx.rds.wr_idx) in fmc_is_rds_data_available()
1056 int fmc_transfer_rds_from_internal_buff(struct fmdev *fmdev, struct file *file, in fmc_transfer_rds_from_internal_buff() argument
1064 if (fmdev->rx.rds.wr_idx == fmdev->rx.rds.rd_idx) { in fmc_transfer_rds_from_internal_buff()
1068 ret = wait_event_interruptible(fmdev->rx.rds.read_queue, in fmc_transfer_rds_from_internal_buff()
1069 (fmdev->rx.rds.wr_idx != fmdev->rx.rds.rd_idx)); in fmc_transfer_rds_from_internal_buff()
1080 spin_lock_irqsave(&fmdev->rds_buff_lock, flags); in fmc_transfer_rds_from_internal_buff()
1082 if (fmdev->rx.rds.wr_idx == fmdev->rx.rds.rd_idx) { in fmc_transfer_rds_from_internal_buff()
1083 spin_unlock_irqrestore(&fmdev->rds_buff_lock, flags); in fmc_transfer_rds_from_internal_buff()
1086 memcpy(tmpbuf, &fmdev->rx.rds.buff[fmdev->rx.rds.rd_idx], in fmc_transfer_rds_from_internal_buff()
1088 fmdev->rx.rds.rd_idx += FM_RDS_BLK_SIZE; in fmc_transfer_rds_from_internal_buff()
1089 if (fmdev->rx.rds.rd_idx >= fmdev->rx.rds.buf_size) in fmc_transfer_rds_from_internal_buff()
1090 fmdev->rx.rds.rd_idx = 0; in fmc_transfer_rds_from_internal_buff()
1092 spin_unlock_irqrestore(&fmdev->rds_buff_lock, flags); in fmc_transfer_rds_from_internal_buff()
1104 int fmc_set_freq(struct fmdev *fmdev, u32 freq_to_set) in fmc_set_freq() argument
1106 switch (fmdev->curr_fmmode) { in fmc_set_freq()
1108 return fm_rx_set_freq(fmdev, freq_to_set); in fmc_set_freq()
1111 return fm_tx_set_freq(fmdev, freq_to_set); in fmc_set_freq()
1118 int fmc_get_freq(struct fmdev *fmdev, u32 *cur_tuned_frq) in fmc_get_freq() argument
1120 if (fmdev->rx.freq == FM_UNDEFINED_FREQ) { in fmc_get_freq()
1129 switch (fmdev->curr_fmmode) { in fmc_get_freq()
1131 *cur_tuned_frq = fmdev->rx.freq; in fmc_get_freq()
1144 int fmc_set_region(struct fmdev *fmdev, u8 region_to_set) in fmc_set_region() argument
1146 switch (fmdev->curr_fmmode) { in fmc_set_region()
1148 return fm_rx_set_region(fmdev, region_to_set); in fmc_set_region()
1151 return fm_tx_set_region(fmdev, region_to_set); in fmc_set_region()
1158 int fmc_set_mute_mode(struct fmdev *fmdev, u8 mute_mode_toset) in fmc_set_mute_mode() argument
1160 switch (fmdev->curr_fmmode) { in fmc_set_mute_mode()
1162 return fm_rx_set_mute_mode(fmdev, mute_mode_toset); in fmc_set_mute_mode()
1165 return fm_tx_set_mute_mode(fmdev, mute_mode_toset); in fmc_set_mute_mode()
1172 int fmc_set_stereo_mono(struct fmdev *fmdev, u16 mode) in fmc_set_stereo_mono() argument
1174 switch (fmdev->curr_fmmode) { in fmc_set_stereo_mono()
1176 return fm_rx_set_stereo_mono(fmdev, mode); in fmc_set_stereo_mono()
1179 return fm_tx_set_stereo_mono(fmdev, mode); in fmc_set_stereo_mono()
1186 int fmc_set_rds_mode(struct fmdev *fmdev, u8 rds_en_dis) in fmc_set_rds_mode() argument
1188 switch (fmdev->curr_fmmode) { in fmc_set_rds_mode()
1190 return fm_rx_set_rds_mode(fmdev, rds_en_dis); in fmc_set_rds_mode()
1193 return fm_tx_set_rds_mode(fmdev, rds_en_dis); in fmc_set_rds_mode()
1201 static int fm_power_down(struct fmdev *fmdev) in fm_power_down() argument
1206 if (!test_bit(FM_CORE_READY, &fmdev->flag)) { in fm_power_down()
1210 if (fmdev->curr_fmmode == FM_MODE_OFF) { in fm_power_down()
1216 ret = fmc_send_cmd(fmdev, FM_POWER_MODE, REG_WR, &payload, in fm_power_down()
1221 return fmc_release(fmdev); in fm_power_down()
1225 static int fm_download_firmware(struct fmdev *fmdev, const u8 *fw_name) in fm_download_firmware() argument
1235 set_bit(FM_FW_DW_INPROGRESS, &fmdev->flag); in fm_download_firmware()
1238 &fmdev->radio_dev->dev); in fm_download_firmware()
1265 ret = fmc_send_cmd(fmdev, 0, 0, action->data, in fm_download_firmware()
1285 clear_bit(FM_FW_DW_INPROGRESS, &fmdev->flag); in fm_download_firmware()
1291 static int load_default_rx_configuration(struct fmdev *fmdev) in load_default_rx_configuration() argument
1295 ret = fm_rx_set_volume(fmdev, FM_DEFAULT_RX_VOLUME); in load_default_rx_configuration()
1299 return fm_rx_set_rssi_threshold(fmdev, FM_DEFAULT_RSSI_THRESHOLD); in load_default_rx_configuration()
1303 static int fm_power_up(struct fmdev *fmdev, u8 mode) in fm_power_up() argument
1319 ret = fmc_prepare(fmdev); in fm_power_up()
1326 if (fmc_send_cmd(fmdev, FM_POWER_MODE, REG_WR, &payload, in fm_power_up()
1333 if (fmc_send_cmd(fmdev, ASIC_ID_GET, REG_RD, NULL, in fm_power_up()
1337 if (fmc_send_cmd(fmdev, ASIC_VER_GET, REG_RD, NULL, in fm_power_up()
1347 ret = fm_download_firmware(fmdev, fw_name); in fm_power_up()
1356 ret = fm_download_firmware(fmdev, fw_name); in fm_power_up()
1363 return fmc_release(fmdev); in fm_power_up()
1367 int fmc_set_mode(struct fmdev *fmdev, u8 fm_mode) in fmc_set_mode() argument
1375 if (fmdev->curr_fmmode == fm_mode) { in fmc_set_mode()
1382 ret = fm_power_down(fmdev); in fmc_set_mode()
1392 if (fmdev->curr_fmmode != FM_MODE_OFF) { in fmc_set_mode()
1393 ret = fm_power_down(fmdev); in fmc_set_mode()
1400 ret = fm_power_up(fmdev, fm_mode); in fmc_set_mode()
1406 fmdev->curr_fmmode = fm_mode; in fmc_set_mode()
1409 if (fmdev->curr_fmmode == FM_MODE_RX) { in fmc_set_mode()
1411 ret = load_default_rx_configuration(fmdev); in fmc_set_mode()
1420 int fmc_get_mode(struct fmdev *fmdev, u8 *fmmode) in fmc_get_mode() argument
1422 if (!test_bit(FM_CORE_READY, &fmdev->flag)) { in fmc_get_mode()
1431 *fmmode = fmdev->curr_fmmode; in fmc_get_mode()
1438 struct fmdev *fmdev; in fm_st_receive() local
1440 fmdev = (struct fmdev *)arg; in fm_st_receive()
1453 skb_queue_tail(&fmdev->rx_q, skb); in fm_st_receive()
1454 tasklet_schedule(&fmdev->rx_task); in fm_st_receive()
1465 struct fmdev *fmdev; in fm_st_reg_comp_cb() local
1467 fmdev = (struct fmdev *)arg; in fm_st_reg_comp_cb()
1468 fmdev->streg_cbdata = data; in fm_st_reg_comp_cb()
1476 int fmc_prepare(struct fmdev *fmdev) in fmc_prepare() argument
1481 if (test_bit(FM_CORE_READY, &fmdev->flag)) { in fmc_prepare()
1491 fm_st_proto.priv_data = fmdev; in fmc_prepare()
1502 fmdev->streg_cbdata = -EINPROGRESS; in fmc_prepare()
1511 if (fmdev->streg_cbdata != 0) { in fmc_prepare()
1513 fmdev->streg_cbdata); in fmc_prepare()
1533 spin_lock_init(&fmdev->rds_buff_lock); in fmc_prepare()
1534 spin_lock_init(&fmdev->resp_skb_lock); in fmc_prepare()
1537 skb_queue_head_init(&fmdev->tx_q); in fmc_prepare()
1538 tasklet_init(&fmdev->tx_task, send_tasklet, (unsigned long)fmdev); in fmc_prepare()
1541 skb_queue_head_init(&fmdev->rx_q); in fmc_prepare()
1542 tasklet_init(&fmdev->rx_task, recv_tasklet, (unsigned long)fmdev); in fmc_prepare()
1544 fmdev->irq_info.stage = 0; in fmc_prepare()
1545 atomic_set(&fmdev->tx_cnt, 1); in fmc_prepare()
1546 fmdev->resp_comp = NULL; in fmc_prepare()
1548 timer_setup(&fmdev->irq_info.timer, int_timeout_handler, 0); in fmc_prepare()
1550 fmdev->irq_info.mask = FM_MAL_EVENT; in fmc_prepare()
1553 fmdev->rx.region = region_configs[default_radio_region]; in fmc_prepare()
1555 fmdev->rx.mute_mode = FM_MUTE_OFF; in fmc_prepare()
1556 fmdev->rx.rf_depend_mute = FM_RX_RF_DEPENDENT_MUTE_OFF; in fmc_prepare()
1557 fmdev->rx.rds.flag = FM_RDS_DISABLE; in fmc_prepare()
1558 fmdev->rx.freq = FM_UNDEFINED_FREQ; in fmc_prepare()
1559 fmdev->rx.rds_mode = FM_RDS_SYSTEM_RDS; in fmc_prepare()
1560 fmdev->rx.af_mode = FM_RX_RDS_AF_SWITCH_MODE_OFF; in fmc_prepare()
1561 fmdev->irq_info.retry = 0; in fmc_prepare()
1563 fm_rx_reset_rds_cache(fmdev); in fmc_prepare()
1564 init_waitqueue_head(&fmdev->rx.rds.read_queue); in fmc_prepare()
1566 fm_rx_reset_station_info(fmdev); in fmc_prepare()
1567 set_bit(FM_CORE_READY, &fmdev->flag); in fmc_prepare()
1576 int fmc_release(struct fmdev *fmdev) in fmc_release() argument
1581 if (!test_bit(FM_CORE_READY, &fmdev->flag)) { in fmc_release()
1586 wake_up_interruptible(&fmdev->rx.rds.read_queue); in fmc_release()
1588 tasklet_kill(&fmdev->tx_task); in fmc_release()
1589 tasklet_kill(&fmdev->rx_task); in fmc_release()
1591 skb_queue_purge(&fmdev->tx_q); in fmc_release()
1592 skb_queue_purge(&fmdev->rx_q); in fmc_release()
1594 fmdev->resp_comp = NULL; in fmc_release()
1595 fmdev->rx.freq = 0; in fmc_release()
1607 clear_bit(FM_CORE_READY, &fmdev->flag); in fmc_release()
1617 struct fmdev *fmdev = NULL; in fm_drv_init() local
1622 fmdev = kzalloc(sizeof(struct fmdev), GFP_KERNEL); in fm_drv_init()
1623 if (NULL == fmdev) { in fm_drv_init()
1627 fmdev->rx.rds.buf_size = default_rds_buf * FM_RDS_BLK_SIZE; in fm_drv_init()
1628 fmdev->rx.rds.buff = kzalloc(fmdev->rx.rds.buf_size, GFP_KERNEL); in fm_drv_init()
1629 if (NULL == fmdev->rx.rds.buff) { in fm_drv_init()
1634 ret = fm_v4l2_init_video_device(fmdev, radio_nr); in fm_drv_init()
1638 fmdev->irq_info.handlers = int_handler_table; in fm_drv_init()
1639 fmdev->curr_fmmode = FM_MODE_OFF; in fm_drv_init()
1640 fmdev->tx_data.pwr_lvl = FM_PWR_LVL_DEF; in fm_drv_init()
1641 fmdev->tx_data.preemph = FM_TX_PREEMPH_50US; in fm_drv_init()
1645 kfree(fmdev->rx.rds.buff); in fm_drv_init()
1647 kfree(fmdev); in fm_drv_init()
1655 struct fmdev *fmdev = NULL; in fm_drv_exit() local
1657 fmdev = fm_v4l2_deinit_video_device(); in fm_drv_exit()
1658 if (fmdev != NULL) { in fm_drv_exit()
1659 kfree(fmdev->rx.rds.buff); in fm_drv_exit()
1660 kfree(fmdev); in fm_drv_exit()