Lines Matching refs:t7xx_dev

61 static void t7xx_dev_set_sleep_capability(struct t7xx_pci_dev *t7xx_dev, bool enable)  in t7xx_dev_set_sleep_capability()  argument
63 void __iomem *ctrl_reg = IREG_BASE(t7xx_dev) + T7XX_PCIE_MISC_CTRL; in t7xx_dev_set_sleep_capability()
76 static int t7xx_wait_pm_config(struct t7xx_pci_dev *t7xx_dev) in t7xx_wait_pm_config() argument
83 IREG_BASE(t7xx_dev) + T7XX_PCIE_RESOURCE_STATUS); in t7xx_wait_pm_config()
85 dev_err(&t7xx_dev->pdev->dev, "PM configuration timed out\n"); in t7xx_wait_pm_config()
90 static int t7xx_pci_pm_init(struct t7xx_pci_dev *t7xx_dev) in t7xx_pci_pm_init() argument
92 struct pci_dev *pdev = t7xx_dev->pdev; in t7xx_pci_pm_init()
94 INIT_LIST_HEAD(&t7xx_dev->md_pm_entities); in t7xx_pci_pm_init()
95 mutex_init(&t7xx_dev->md_pm_entity_mtx); in t7xx_pci_pm_init()
96 spin_lock_init(&t7xx_dev->md_pm_lock); in t7xx_pci_pm_init()
97 init_completion(&t7xx_dev->sleep_lock_acquire); in t7xx_pci_pm_init()
98 init_completion(&t7xx_dev->pm_sr_ack); in t7xx_pci_pm_init()
99 atomic_set(&t7xx_dev->md_pm_state, MTK_PM_INIT); in t7xx_pci_pm_init()
105 iowrite32(T7XX_L1_BIT(0), IREG_BASE(t7xx_dev) + DISABLE_ASPM_LOWPWR); in t7xx_pci_pm_init()
109 return t7xx_wait_pm_config(t7xx_dev); in t7xx_pci_pm_init()
112 void t7xx_pci_pm_init_late(struct t7xx_pci_dev *t7xx_dev) in t7xx_pci_pm_init_late() argument
115 t7xx_mhccif_mask_clr(t7xx_dev, in t7xx_pci_pm_init_late()
121 iowrite32(T7XX_L1_BIT(0), IREG_BASE(t7xx_dev) + ENABLE_ASPM_LOWPWR); in t7xx_pci_pm_init_late()
122 atomic_set(&t7xx_dev->md_pm_state, MTK_PM_RESUMED); in t7xx_pci_pm_init_late()
124 pm_runtime_put_noidle(&t7xx_dev->pdev->dev); in t7xx_pci_pm_init_late()
127 static int t7xx_pci_pm_reinit(struct t7xx_pci_dev *t7xx_dev) in t7xx_pci_pm_reinit() argument
132 atomic_set(&t7xx_dev->md_pm_state, MTK_PM_INIT); in t7xx_pci_pm_reinit()
134 pm_runtime_get_noresume(&t7xx_dev->pdev->dev); in t7xx_pci_pm_reinit()
136 iowrite32(T7XX_L1_BIT(0), IREG_BASE(t7xx_dev) + DISABLE_ASPM_LOWPWR); in t7xx_pci_pm_reinit()
137 return t7xx_wait_pm_config(t7xx_dev); in t7xx_pci_pm_reinit()
140 void t7xx_pci_pm_exp_detected(struct t7xx_pci_dev *t7xx_dev) in t7xx_pci_pm_exp_detected() argument
142 iowrite32(T7XX_L1_BIT(0), IREG_BASE(t7xx_dev) + DISABLE_ASPM_LOWPWR); in t7xx_pci_pm_exp_detected()
143 t7xx_wait_pm_config(t7xx_dev); in t7xx_pci_pm_exp_detected()
144 atomic_set(&t7xx_dev->md_pm_state, MTK_PM_EXCEPTION); in t7xx_pci_pm_exp_detected()
147 int t7xx_pci_pm_entity_register(struct t7xx_pci_dev *t7xx_dev, struct md_pm_entity *pm_entity) in t7xx_pci_pm_entity_register() argument
151 mutex_lock(&t7xx_dev->md_pm_entity_mtx); in t7xx_pci_pm_entity_register()
152 list_for_each_entry(entity, &t7xx_dev->md_pm_entities, entity) { in t7xx_pci_pm_entity_register()
154 mutex_unlock(&t7xx_dev->md_pm_entity_mtx); in t7xx_pci_pm_entity_register()
159 list_add_tail(&pm_entity->entity, &t7xx_dev->md_pm_entities); in t7xx_pci_pm_entity_register()
160 mutex_unlock(&t7xx_dev->md_pm_entity_mtx); in t7xx_pci_pm_entity_register()
164 int t7xx_pci_pm_entity_unregister(struct t7xx_pci_dev *t7xx_dev, struct md_pm_entity *pm_entity) in t7xx_pci_pm_entity_unregister() argument
168 mutex_lock(&t7xx_dev->md_pm_entity_mtx); in t7xx_pci_pm_entity_unregister()
169 list_for_each_entry_safe(entity, tmp_entity, &t7xx_dev->md_pm_entities, entity) { in t7xx_pci_pm_entity_unregister()
172 mutex_unlock(&t7xx_dev->md_pm_entity_mtx); in t7xx_pci_pm_entity_unregister()
177 mutex_unlock(&t7xx_dev->md_pm_entity_mtx); in t7xx_pci_pm_entity_unregister()
182 int t7xx_pci_sleep_disable_complete(struct t7xx_pci_dev *t7xx_dev) in t7xx_pci_sleep_disable_complete() argument
184 struct device *dev = &t7xx_dev->pdev->dev; in t7xx_pci_sleep_disable_complete()
187 ret = wait_for_completion_timeout(&t7xx_dev->sleep_lock_acquire, in t7xx_pci_sleep_disable_complete()
204 void t7xx_pci_disable_sleep(struct t7xx_pci_dev *t7xx_dev) in t7xx_pci_disable_sleep() argument
208 spin_lock_irqsave(&t7xx_dev->md_pm_lock, flags); in t7xx_pci_disable_sleep()
209 t7xx_dev->sleep_disable_count++; in t7xx_pci_disable_sleep()
210 if (atomic_read(&t7xx_dev->md_pm_state) < MTK_PM_RESUMED) in t7xx_pci_disable_sleep()
213 if (t7xx_dev->sleep_disable_count == 1) { in t7xx_pci_disable_sleep()
216 reinit_completion(&t7xx_dev->sleep_lock_acquire); in t7xx_pci_disable_sleep()
217 t7xx_dev_set_sleep_capability(t7xx_dev, false); in t7xx_pci_disable_sleep()
219 status = ioread32(IREG_BASE(t7xx_dev) + T7XX_PCIE_RESOURCE_STATUS); in t7xx_pci_disable_sleep()
223 t7xx_mhccif_h2d_swint_trigger(t7xx_dev, H2D_CH_DS_LOCK); in t7xx_pci_disable_sleep()
225 spin_unlock_irqrestore(&t7xx_dev->md_pm_lock, flags); in t7xx_pci_disable_sleep()
229 spin_unlock_irqrestore(&t7xx_dev->md_pm_lock, flags); in t7xx_pci_disable_sleep()
230 complete_all(&t7xx_dev->sleep_lock_acquire); in t7xx_pci_disable_sleep()
239 void t7xx_pci_enable_sleep(struct t7xx_pci_dev *t7xx_dev) in t7xx_pci_enable_sleep() argument
243 spin_lock_irqsave(&t7xx_dev->md_pm_lock, flags); in t7xx_pci_enable_sleep()
244 t7xx_dev->sleep_disable_count--; in t7xx_pci_enable_sleep()
245 if (atomic_read(&t7xx_dev->md_pm_state) < MTK_PM_RESUMED) in t7xx_pci_enable_sleep()
248 if (t7xx_dev->sleep_disable_count == 0) in t7xx_pci_enable_sleep()
249 t7xx_dev_set_sleep_capability(t7xx_dev, true); in t7xx_pci_enable_sleep()
252 spin_unlock_irqrestore(&t7xx_dev->md_pm_lock, flags); in t7xx_pci_enable_sleep()
255 static int t7xx_send_pm_request(struct t7xx_pci_dev *t7xx_dev, u32 request) in t7xx_send_pm_request() argument
259 reinit_completion(&t7xx_dev->pm_sr_ack); in t7xx_send_pm_request()
260 t7xx_mhccif_h2d_swint_trigger(t7xx_dev, request); in t7xx_send_pm_request()
261 wait_ret = wait_for_completion_timeout(&t7xx_dev->pm_sr_ack, in t7xx_send_pm_request()
272 struct t7xx_pci_dev *t7xx_dev; in __t7xx_pci_pm_suspend() local
276 t7xx_dev = pci_get_drvdata(pdev); in __t7xx_pci_pm_suspend()
277 if (atomic_read(&t7xx_dev->md_pm_state) <= MTK_PM_INIT) { in __t7xx_pci_pm_suspend()
282 iowrite32(T7XX_L1_BIT(0), IREG_BASE(t7xx_dev) + DISABLE_ASPM_LOWPWR); in __t7xx_pci_pm_suspend()
283 ret = t7xx_wait_pm_config(t7xx_dev); in __t7xx_pci_pm_suspend()
285 iowrite32(T7XX_L1_BIT(0), IREG_BASE(t7xx_dev) + ENABLE_ASPM_LOWPWR); in __t7xx_pci_pm_suspend()
289 atomic_set(&t7xx_dev->md_pm_state, MTK_PM_SUSPENDED); in __t7xx_pci_pm_suspend()
290 t7xx_pcie_mac_clear_int(t7xx_dev, SAP_RGU_INT); in __t7xx_pci_pm_suspend()
291 t7xx_dev->rgu_pci_irq_en = false; in __t7xx_pci_pm_suspend()
293 list_for_each_entry(entity, &t7xx_dev->md_pm_entities, entity) { in __t7xx_pci_pm_suspend()
297 ret = entity->suspend(t7xx_dev, entity->entity_param); in __t7xx_pci_pm_suspend()
305 ret = t7xx_send_pm_request(t7xx_dev, H2D_CH_SUSPEND_REQ); in __t7xx_pci_pm_suspend()
311 ret = t7xx_send_pm_request(t7xx_dev, H2D_CH_SUSPEND_REQ_AP); in __t7xx_pci_pm_suspend()
313 t7xx_send_pm_request(t7xx_dev, H2D_CH_RESUME_REQ); in __t7xx_pci_pm_suspend()
318 list_for_each_entry(entity, &t7xx_dev->md_pm_entities, entity) { in __t7xx_pci_pm_suspend()
320 entity->suspend_late(t7xx_dev, entity->entity_param); in __t7xx_pci_pm_suspend()
323 iowrite32(T7XX_L1_BIT(0), IREG_BASE(t7xx_dev) + ENABLE_ASPM_LOWPWR); in __t7xx_pci_pm_suspend()
327 list_for_each_entry(entity, &t7xx_dev->md_pm_entities, entity) { in __t7xx_pci_pm_suspend()
332 entity->resume(t7xx_dev, entity->entity_param); in __t7xx_pci_pm_suspend()
335 iowrite32(T7XX_L1_BIT(0), IREG_BASE(t7xx_dev) + ENABLE_ASPM_LOWPWR); in __t7xx_pci_pm_suspend()
336 atomic_set(&t7xx_dev->md_pm_state, MTK_PM_RESUMED); in __t7xx_pci_pm_suspend()
337 t7xx_pcie_mac_set_int(t7xx_dev, SAP_RGU_INT); in __t7xx_pci_pm_suspend()
341 static void t7xx_pcie_interrupt_reinit(struct t7xx_pci_dev *t7xx_dev) in t7xx_pcie_interrupt_reinit() argument
343 t7xx_pcie_set_mac_msix_cfg(t7xx_dev, EXT_INT_NUM); in t7xx_pcie_interrupt_reinit()
346 iowrite32(MSIX_MSK_SET_ALL, IREG_BASE(t7xx_dev) + IMASK_HOST_MSIX_CLR_GRP0_0); in t7xx_pcie_interrupt_reinit()
351 t7xx_pcie_mac_interrupts_en(t7xx_dev); in t7xx_pcie_interrupt_reinit()
352 t7xx_pcie_mac_set_int(t7xx_dev, MHCCIF_INT); in t7xx_pcie_interrupt_reinit()
355 static int t7xx_pcie_reinit(struct t7xx_pci_dev *t7xx_dev, bool is_d3) in t7xx_pcie_reinit() argument
359 ret = pcim_enable_device(t7xx_dev->pdev); in t7xx_pcie_reinit()
363 t7xx_pcie_mac_atr_init(t7xx_dev); in t7xx_pcie_reinit()
364 t7xx_pcie_interrupt_reinit(t7xx_dev); in t7xx_pcie_reinit()
367 t7xx_mhccif_init(t7xx_dev); in t7xx_pcie_reinit()
368 return t7xx_pci_pm_reinit(t7xx_dev); in t7xx_pcie_reinit()
374 static int t7xx_send_fsm_command(struct t7xx_pci_dev *t7xx_dev, u32 event) in t7xx_send_fsm_command() argument
376 struct t7xx_fsm_ctl *fsm_ctl = t7xx_dev->md->fsm_ctl; in t7xx_send_fsm_command()
377 struct device *dev = &t7xx_dev->pdev->dev; in t7xx_send_fsm_command()
386 t7xx_pcie_mac_clear_int(t7xx_dev, SAP_RGU_INT); in t7xx_send_fsm_command()
387 t7xx_pcie_mac_clear_int_status(t7xx_dev, SAP_RGU_INT); in t7xx_send_fsm_command()
388 t7xx_dev->rgu_pci_irq_en = true; in t7xx_send_fsm_command()
389 t7xx_pcie_mac_set_int(t7xx_dev, SAP_RGU_INT); in t7xx_send_fsm_command()
405 struct t7xx_pci_dev *t7xx_dev; in __t7xx_pci_pm_resume() local
410 t7xx_dev = pci_get_drvdata(pdev); in __t7xx_pci_pm_resume()
411 if (atomic_read(&t7xx_dev->md_pm_state) <= MTK_PM_INIT) { in __t7xx_pci_pm_resume()
412 iowrite32(T7XX_L1_BIT(0), IREG_BASE(t7xx_dev) + ENABLE_ASPM_LOWPWR); in __t7xx_pci_pm_resume()
416 t7xx_pcie_mac_interrupts_en(t7xx_dev); in __t7xx_pci_pm_resume()
417 prev_state = ioread32(IREG_BASE(t7xx_dev) + T7XX_PCIE_PM_RESUME_STATE); in __t7xx_pci_pm_resume()
424 u32 atr_reg_val = ioread32(IREG_BASE(t7xx_dev) + in __t7xx_pci_pm_resume()
429 ret = t7xx_send_fsm_command(t7xx_dev, FSM_CMD_STOP); in __t7xx_pci_pm_resume()
433 ret = t7xx_pcie_reinit(t7xx_dev, true); in __t7xx_pci_pm_resume()
437 t7xx_clear_rgu_irq(t7xx_dev); in __t7xx_pci_pm_resume()
438 return t7xx_send_fsm_command(t7xx_dev, FSM_CMD_START); in __t7xx_pci_pm_resume()
444 ret = t7xx_pcie_reinit(t7xx_dev, false); in __t7xx_pci_pm_resume()
449 atomic_set(&t7xx_dev->md_pm_state, MTK_PM_SUSPENDED); in __t7xx_pci_pm_resume()
450 t7xx_dev->rgu_pci_irq_en = true; in __t7xx_pci_pm_resume()
451 t7xx_pcie_mac_set_int(t7xx_dev, SAP_RGU_INT); in __t7xx_pci_pm_resume()
453 t7xx_mhccif_mask_clr(t7xx_dev, in __t7xx_pci_pm_resume()
464 ret = t7xx_pcie_reinit(t7xx_dev, false); in __t7xx_pci_pm_resume()
470 ret = t7xx_send_fsm_command(t7xx_dev, FSM_CMD_STOP); in __t7xx_pci_pm_resume()
474 t7xx_clear_rgu_irq(t7xx_dev); in __t7xx_pci_pm_resume()
475 atomic_set(&t7xx_dev->md_pm_state, MTK_PM_SUSPENDED); in __t7xx_pci_pm_resume()
480 iowrite32(T7XX_L1_BIT(0), IREG_BASE(t7xx_dev) + DISABLE_ASPM_LOWPWR); in __t7xx_pci_pm_resume()
481 t7xx_wait_pm_config(t7xx_dev); in __t7xx_pci_pm_resume()
483 list_for_each_entry(entity, &t7xx_dev->md_pm_entities, entity) { in __t7xx_pci_pm_resume()
485 entity->resume_early(t7xx_dev, entity->entity_param); in __t7xx_pci_pm_resume()
488 ret = t7xx_send_pm_request(t7xx_dev, H2D_CH_RESUME_REQ); in __t7xx_pci_pm_resume()
492 ret = t7xx_send_pm_request(t7xx_dev, H2D_CH_RESUME_REQ_AP); in __t7xx_pci_pm_resume()
496 list_for_each_entry(entity, &t7xx_dev->md_pm_entities, entity) { in __t7xx_pci_pm_resume()
498 ret = entity->resume(t7xx_dev, entity->entity_param); in __t7xx_pci_pm_resume()
505 t7xx_dev->rgu_pci_irq_en = true; in __t7xx_pci_pm_resume()
506 t7xx_pcie_mac_set_int(t7xx_dev, SAP_RGU_INT); in __t7xx_pci_pm_resume()
507 iowrite32(T7XX_L1_BIT(0), IREG_BASE(t7xx_dev) + ENABLE_ASPM_LOWPWR); in __t7xx_pci_pm_resume()
509 atomic_set(&t7xx_dev->md_pm_state, MTK_PM_RESUMED); in __t7xx_pci_pm_resume()
517 struct t7xx_pci_dev *t7xx_dev; in t7xx_pci_pm_resume_noirq() local
519 t7xx_dev = pci_get_drvdata(pdev); in t7xx_pci_pm_resume_noirq()
520 t7xx_pcie_mac_interrupts_dis(t7xx_dev); in t7xx_pci_pm_resume_noirq()
570 struct t7xx_pci_dev *t7xx_dev; in t7xx_request_irq() local
573 t7xx_dev = pci_get_drvdata(pdev); in t7xx_request_irq()
579 if (!t7xx_dev->intr_handler[i]) in t7xx_request_irq()
590 ret = request_threaded_irq(irq_vec, t7xx_dev->intr_handler[i], in t7xx_request_irq()
591 t7xx_dev->intr_thread[i], 0, irq_descr, in t7xx_request_irq()
592 t7xx_dev->callback_param[i]); in t7xx_request_irq()
601 if (!t7xx_dev->intr_handler[i]) in t7xx_request_irq()
604 free_irq(pci_irq_vector(pdev, i), t7xx_dev->callback_param[i]); in t7xx_request_irq()
611 static int t7xx_setup_msix(struct t7xx_pci_dev *t7xx_dev) in t7xx_setup_msix() argument
613 struct pci_dev *pdev = t7xx_dev->pdev; in t7xx_setup_msix()
629 t7xx_pcie_set_mac_msix_cfg(t7xx_dev, EXT_INT_NUM); in t7xx_setup_msix()
633 static int t7xx_interrupt_init(struct t7xx_pci_dev *t7xx_dev) in t7xx_interrupt_init() argument
637 if (!t7xx_dev->pdev->msix_cap) in t7xx_interrupt_init()
640 ret = t7xx_setup_msix(t7xx_dev); in t7xx_interrupt_init()
646 t7xx_pcie_mac_set_int(t7xx_dev, i); in t7xx_interrupt_init()
651 static void t7xx_pci_infracfg_ao_calc(struct t7xx_pci_dev *t7xx_dev) in t7xx_pci_infracfg_ao_calc() argument
653 t7xx_dev->base_addr.infracfg_ao_base = t7xx_dev->base_addr.pcie_ext_reg_base + in t7xx_pci_infracfg_ao_calc()
655 t7xx_dev->base_addr.pcie_dev_reg_trsl_addr; in t7xx_pci_infracfg_ao_calc()
660 struct t7xx_pci_dev *t7xx_dev; in t7xx_pci_probe() local
663 t7xx_dev = devm_kzalloc(&pdev->dev, sizeof(*t7xx_dev), GFP_KERNEL); in t7xx_pci_probe()
664 if (!t7xx_dev) in t7xx_pci_probe()
667 pci_set_drvdata(pdev, t7xx_dev); in t7xx_pci_probe()
668 t7xx_dev->pdev = pdev; in t7xx_pci_probe()
695 IREG_BASE(t7xx_dev) = pcim_iomap_table(pdev)[T7XX_PCI_IREG_BASE]; in t7xx_pci_probe()
696 t7xx_dev->base_addr.pcie_ext_reg_base = pcim_iomap_table(pdev)[T7XX_PCI_EREG_BASE]; in t7xx_pci_probe()
698 ret = t7xx_pci_pm_init(t7xx_dev); in t7xx_pci_probe()
702 t7xx_pcie_mac_atr_init(t7xx_dev); in t7xx_pci_probe()
703 t7xx_pci_infracfg_ao_calc(t7xx_dev); in t7xx_pci_probe()
704 t7xx_mhccif_init(t7xx_dev); in t7xx_pci_probe()
706 ret = t7xx_md_init(t7xx_dev); in t7xx_pci_probe()
710 t7xx_pcie_mac_interrupts_dis(t7xx_dev); in t7xx_pci_probe()
712 ret = t7xx_interrupt_init(t7xx_dev); in t7xx_pci_probe()
714 t7xx_md_exit(t7xx_dev); in t7xx_pci_probe()
718 t7xx_pcie_mac_set_int(t7xx_dev, MHCCIF_INT); in t7xx_pci_probe()
719 t7xx_pcie_mac_interrupts_en(t7xx_dev); in t7xx_pci_probe()
726 struct t7xx_pci_dev *t7xx_dev; in t7xx_pci_remove() local
729 t7xx_dev = pci_get_drvdata(pdev); in t7xx_pci_remove()
730 t7xx_md_exit(t7xx_dev); in t7xx_pci_remove()
733 if (!t7xx_dev->intr_handler[i]) in t7xx_pci_remove()
736 free_irq(pci_irq_vector(pdev, i), t7xx_dev->callback_param[i]); in t7xx_pci_remove()
739 pci_free_irq_vectors(t7xx_dev->pdev); in t7xx_pci_remove()