Lines Matching refs:trans_pcie
99 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_dump_regs() local
100 struct pci_dev *pdev = trans_pcie->pci_dev; in iwl_trans_pcie_dump_regs()
104 if (trans_pcie->pcie_dbg_dumped_once) in iwl_trans_pcie_dump_regs()
179 trans_pcie->pcie_dbg_dumped_once = 1; in iwl_trans_pcie_dump_regs()
193 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_free_fw_monitor() local
195 if (!trans_pcie->fw_mon_page) in iwl_pcie_free_fw_monitor()
198 dma_unmap_page(trans->dev, trans_pcie->fw_mon_phys, in iwl_pcie_free_fw_monitor()
199 trans_pcie->fw_mon_size, DMA_FROM_DEVICE); in iwl_pcie_free_fw_monitor()
200 __free_pages(trans_pcie->fw_mon_page, in iwl_pcie_free_fw_monitor()
201 get_order(trans_pcie->fw_mon_size)); in iwl_pcie_free_fw_monitor()
202 trans_pcie->fw_mon_page = NULL; in iwl_pcie_free_fw_monitor()
203 trans_pcie->fw_mon_phys = 0; in iwl_pcie_free_fw_monitor()
204 trans_pcie->fw_mon_size = 0; in iwl_pcie_free_fw_monitor()
209 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_alloc_fw_monitor() local
227 if (trans_pcie->fw_mon_page) { in iwl_pcie_alloc_fw_monitor()
228 dma_sync_single_for_device(trans->dev, trans_pcie->fw_mon_phys, in iwl_pcie_alloc_fw_monitor()
229 trans_pcie->fw_mon_size, in iwl_pcie_alloc_fw_monitor()
267 trans_pcie->fw_mon_page = page; in iwl_pcie_alloc_fw_monitor()
268 trans_pcie->fw_mon_phys = phys; in iwl_pcie_alloc_fw_monitor()
269 trans_pcie->fw_mon_size = size; in iwl_pcie_alloc_fw_monitor()
306 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_apm_config() local
318 pcie_capability_read_word(trans_pcie->pci_dev, PCI_EXP_LNKCTL, &lctl); in iwl_pcie_apm_config()
325 pcie_capability_read_word(trans_pcie->pci_dev, PCI_EXP_DEVCTL2, &cap); in iwl_pcie_apm_config()
611 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_nic_init() local
615 spin_lock(&trans_pcie->irq_lock); in iwl_pcie_nic_init()
617 spin_unlock(&trans_pcie->irq_lock); in iwl_pcie_nic_init()
739 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_load_firmware_chunk() local
743 trans_pcie->ucode_write_complete = false; in iwl_pcie_load_firmware_chunk()
752 ret = wait_event_timeout(trans_pcie->ucode_write_waitq, in iwl_pcie_load_firmware_chunk()
753 trans_pcie->ucode_write_complete, 5 * HZ); in iwl_pcie_load_firmware_chunk()
933 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_apply_destination() local
984 if (dest->monitor_mode == EXTERNAL_MODE && trans_pcie->fw_mon_size) { in iwl_pcie_apply_destination()
986 trans_pcie->fw_mon_phys >> dest->base_shift); in iwl_pcie_apply_destination()
989 (trans_pcie->fw_mon_phys + in iwl_pcie_apply_destination()
990 trans_pcie->fw_mon_size - 256) >> in iwl_pcie_apply_destination()
994 (trans_pcie->fw_mon_phys + in iwl_pcie_apply_destination()
995 trans_pcie->fw_mon_size) >> in iwl_pcie_apply_destination()
1003 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_load_given_ucode() local
1033 if (trans_pcie->fw_mon_size) { in iwl_pcie_load_given_ucode()
1035 trans_pcie->fw_mon_phys >> 4); in iwl_pcie_load_given_ucode()
1037 (trans_pcie->fw_mon_phys + in iwl_pcie_load_given_ucode()
1038 trans_pcie->fw_mon_size) >> 4); in iwl_pcie_load_given_ucode()
1091 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_check_hw_rf_kill() local
1101 if (trans_pcie->opmode_down) in iwl_pcie_check_hw_rf_kill()
1155 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_map_non_rx_causes() local
1156 int val = trans_pcie->def_irq | MSIX_NON_AUTO_CLEAR_CAUSE; in iwl_pcie_map_non_rx_causes()
1179 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_map_rx_causes() local
1181 trans_pcie->shared_vec_mask & IWL_SHARED_IRQ_FIRST_RSS ? 1 : 0; in iwl_pcie_map_rx_causes()
1199 if (trans_pcie->shared_vec_mask & IWL_SHARED_IRQ_NON_RX) in iwl_pcie_map_rx_causes()
1203 if (trans_pcie->shared_vec_mask & IWL_SHARED_IRQ_FIRST_RSS) in iwl_pcie_map_rx_causes()
1207 void iwl_pcie_conf_msix_hw(struct iwl_trans_pcie *trans_pcie) in iwl_pcie_conf_msix_hw() argument
1209 struct iwl_trans *trans = trans_pcie->trans; in iwl_pcie_conf_msix_hw()
1211 if (!trans_pcie->msix_enabled) { in iwl_pcie_conf_msix_hw()
1238 static void iwl_pcie_init_msix(struct iwl_trans_pcie *trans_pcie) in iwl_pcie_init_msix() argument
1240 struct iwl_trans *trans = trans_pcie->trans; in iwl_pcie_init_msix()
1242 iwl_pcie_conf_msix_hw(trans_pcie); in iwl_pcie_init_msix()
1244 if (!trans_pcie->msix_enabled) in iwl_pcie_init_msix()
1247 trans_pcie->fh_init_mask = ~iwl_read32(trans, CSR_MSIX_FH_INT_MASK_AD); in iwl_pcie_init_msix()
1248 trans_pcie->fh_mask = trans_pcie->fh_init_mask; in iwl_pcie_init_msix()
1249 trans_pcie->hw_init_mask = ~iwl_read32(trans, CSR_MSIX_HW_INT_MASK_AD); in iwl_pcie_init_msix()
1250 trans_pcie->hw_mask = trans_pcie->hw_init_mask; in iwl_pcie_init_msix()
1255 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in _iwl_trans_pcie_stop_device() local
1257 lockdep_assert_held(&trans_pcie->mutex); in _iwl_trans_pcie_stop_device()
1259 if (trans_pcie->is_down) in _iwl_trans_pcie_stop_device()
1262 trans_pcie->is_down = true; in _iwl_trans_pcie_stop_device()
1316 iwl_pcie_conf_msix_hw(trans_pcie); in _iwl_trans_pcie_stop_device()
1344 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_synchronize_irqs() local
1346 if (trans_pcie->msix_enabled) { in iwl_pcie_synchronize_irqs()
1349 for (i = 0; i < trans_pcie->alloc_vecs; i++) in iwl_pcie_synchronize_irqs()
1350 synchronize_irq(trans_pcie->msix_entries[i].vector); in iwl_pcie_synchronize_irqs()
1352 synchronize_irq(trans_pcie->pci_dev->irq); in iwl_pcie_synchronize_irqs()
1359 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_start_fw() local
1384 mutex_lock(&trans_pcie->mutex); in iwl_trans_pcie_start_fw()
1394 if (trans_pcie->is_down) { in iwl_trans_pcie_start_fw()
1440 mutex_unlock(&trans_pcie->mutex); in iwl_trans_pcie_start_fw()
1481 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_stop_device() local
1484 mutex_lock(&trans_pcie->mutex); in iwl_trans_pcie_stop_device()
1485 trans_pcie->opmode_down = true; in iwl_trans_pcie_stop_device()
1489 mutex_unlock(&trans_pcie->mutex); in iwl_trans_pcie_stop_device()
1494 struct iwl_trans_pcie __maybe_unused *trans_pcie = in iwl_trans_pcie_rf_kill() local
1497 lockdep_assert_held(&trans_pcie->mutex); in iwl_trans_pcie_rf_kill()
1554 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_d3_resume() local
1590 iwl_pcie_conf_msix_hw(trans_pcie); in iwl_trans_pcie_d3_resume()
1591 if (!trans_pcie->msix_enabled) in iwl_trans_pcie_d3_resume()
1626 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_set_interrupt_capa() local
1635 trans_pcie->msix_entries[i].entry = i; in iwl_pcie_set_interrupt_capa()
1637 num_irqs = pci_enable_msix_range(pdev, trans_pcie->msix_entries, in iwl_pcie_set_interrupt_capa()
1646 trans_pcie->def_irq = (num_irqs == max_irqs) ? num_irqs - 1 : 0; in iwl_pcie_set_interrupt_capa()
1660 trans_pcie->trans->num_rx_queues = num_irqs + 1; in iwl_pcie_set_interrupt_capa()
1661 trans_pcie->shared_vec_mask = IWL_SHARED_IRQ_NON_RX | in iwl_pcie_set_interrupt_capa()
1664 trans_pcie->trans->num_rx_queues = num_irqs; in iwl_pcie_set_interrupt_capa()
1665 trans_pcie->shared_vec_mask = IWL_SHARED_IRQ_NON_RX; in iwl_pcie_set_interrupt_capa()
1667 trans_pcie->trans->num_rx_queues = num_irqs - 1; in iwl_pcie_set_interrupt_capa()
1669 WARN_ON(trans_pcie->trans->num_rx_queues > IWL_MAX_RX_HW_QUEUES); in iwl_pcie_set_interrupt_capa()
1671 trans_pcie->alloc_vecs = num_irqs; in iwl_pcie_set_interrupt_capa()
1672 trans_pcie->msix_enabled = true; in iwl_pcie_set_interrupt_capa()
1691 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_irq_set_affinity() local
1693 i = trans_pcie->shared_vec_mask & IWL_SHARED_IRQ_FIRST_RSS ? 0 : 1; in iwl_pcie_irq_set_affinity()
1694 iter_rx_q = trans_pcie->trans->num_rx_queues - 1 + i; in iwl_pcie_irq_set_affinity()
1702 cpumask_set_cpu(cpu, &trans_pcie->affinity_mask[i]); in iwl_pcie_irq_set_affinity()
1703 ret = irq_set_affinity_hint(trans_pcie->msix_entries[i].vector, in iwl_pcie_irq_set_affinity()
1704 &trans_pcie->affinity_mask[i]); in iwl_pcie_irq_set_affinity()
1706 IWL_ERR(trans_pcie->trans, in iwl_pcie_irq_set_affinity()
1713 struct iwl_trans_pcie *trans_pcie) in iwl_pcie_init_msix_handler() argument
1717 for (i = 0; i < trans_pcie->alloc_vecs; i++) { in iwl_pcie_init_msix_handler()
1720 const char *qname = queue_name(&pdev->dev, trans_pcie, i); in iwl_pcie_init_msix_handler()
1725 msix_entry = &trans_pcie->msix_entries[i]; in iwl_pcie_init_msix_handler()
1729 (i == trans_pcie->def_irq) ? in iwl_pcie_init_msix_handler()
1736 IWL_ERR(trans_pcie->trans, in iwl_pcie_init_msix_handler()
1742 iwl_pcie_irq_set_affinity(trans_pcie->trans); in iwl_pcie_init_msix_handler()
1749 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in _iwl_trans_pcie_start_hw() local
1752 lockdep_assert_held(&trans_pcie->mutex); in _iwl_trans_pcie_start_hw()
1766 iwl_pcie_init_msix(trans_pcie); in _iwl_trans_pcie_start_hw()
1771 trans_pcie->opmode_down = false; in _iwl_trans_pcie_start_hw()
1774 trans_pcie->is_down = false; in _iwl_trans_pcie_start_hw()
1788 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_start_hw() local
1791 mutex_lock(&trans_pcie->mutex); in iwl_trans_pcie_start_hw()
1793 mutex_unlock(&trans_pcie->mutex); in iwl_trans_pcie_start_hw()
1800 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_op_mode_leave() local
1802 mutex_lock(&trans_pcie->mutex); in iwl_trans_pcie_op_mode_leave()
1813 mutex_unlock(&trans_pcie->mutex); in iwl_trans_pcie_op_mode_leave()
1851 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_configure() local
1853 trans_pcie->cmd_queue = trans_cfg->cmd_queue; in iwl_trans_pcie_configure()
1854 trans_pcie->cmd_fifo = trans_cfg->cmd_fifo; in iwl_trans_pcie_configure()
1855 trans_pcie->cmd_q_wdg_timeout = trans_cfg->cmd_q_wdg_timeout; in iwl_trans_pcie_configure()
1857 trans_pcie->n_no_reclaim_cmds = 0; in iwl_trans_pcie_configure()
1859 trans_pcie->n_no_reclaim_cmds = trans_cfg->n_no_reclaim_cmds; in iwl_trans_pcie_configure()
1860 if (trans_pcie->n_no_reclaim_cmds) in iwl_trans_pcie_configure()
1861 memcpy(trans_pcie->no_reclaim_cmds, trans_cfg->no_reclaim_cmds, in iwl_trans_pcie_configure()
1862 trans_pcie->n_no_reclaim_cmds * sizeof(u8)); in iwl_trans_pcie_configure()
1864 trans_pcie->rx_buf_size = trans_cfg->rx_buf_size; in iwl_trans_pcie_configure()
1865 trans_pcie->rx_page_order = in iwl_trans_pcie_configure()
1866 iwl_trans_get_rb_size_order(trans_pcie->rx_buf_size); in iwl_trans_pcie_configure()
1868 trans_pcie->bc_table_dword = trans_cfg->bc_table_dword; in iwl_trans_pcie_configure()
1869 trans_pcie->scd_set_active = trans_cfg->scd_set_active; in iwl_trans_pcie_configure()
1870 trans_pcie->sw_csum_tx = trans_cfg->sw_csum_tx; in iwl_trans_pcie_configure()
1872 trans_pcie->page_offs = trans_cfg->cb_data_offs; in iwl_trans_pcie_configure()
1873 trans_pcie->dev_cmd_offs = trans_cfg->cb_data_offs + sizeof(void *); in iwl_trans_pcie_configure()
1883 if (trans_pcie->napi_dev.reg_state != NETREG_DUMMY) in iwl_trans_pcie_configure()
1884 init_dummy_netdev(&trans_pcie->napi_dev); in iwl_trans_pcie_configure()
1889 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_free() local
1900 if (trans_pcie->rba.alloc_wq) { in iwl_trans_pcie_free()
1901 destroy_workqueue(trans_pcie->rba.alloc_wq); in iwl_trans_pcie_free()
1902 trans_pcie->rba.alloc_wq = NULL; in iwl_trans_pcie_free()
1905 if (trans_pcie->msix_enabled) { in iwl_trans_pcie_free()
1906 for (i = 0; i < trans_pcie->alloc_vecs; i++) { in iwl_trans_pcie_free()
1908 trans_pcie->msix_entries[i].vector, in iwl_trans_pcie_free()
1912 trans_pcie->msix_enabled = false; in iwl_trans_pcie_free()
1921 per_cpu_ptr(trans_pcie->tso_hdr_page, i); in iwl_trans_pcie_free()
1927 free_percpu(trans_pcie->tso_hdr_page); in iwl_trans_pcie_free()
1928 mutex_destroy(&trans_pcie->mutex); in iwl_trans_pcie_free()
1967 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_grab_nic_access() local
1969 spin_lock_irqsave(&trans_pcie->reg_lock, *flags); in iwl_trans_pcie_grab_nic_access()
1971 if (trans_pcie->cmd_hold_nic_awake) in iwl_trans_pcie_grab_nic_access()
2016 if (trans_pcie->scheduled_for_removal) in iwl_trans_pcie_grab_nic_access()
2042 trans_pcie->scheduled_for_removal = true; in iwl_trans_pcie_grab_nic_access()
2054 spin_unlock_irqrestore(&trans_pcie->reg_lock, *flags); in iwl_trans_pcie_grab_nic_access()
2063 __release(&trans_pcie->reg_lock); in iwl_trans_pcie_grab_nic_access()
2070 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_release_nic_access() local
2072 lockdep_assert_held(&trans_pcie->reg_lock); in iwl_trans_pcie_release_nic_access()
2078 __acquire(&trans_pcie->reg_lock); in iwl_trans_pcie_release_nic_access()
2080 if (trans_pcie->cmd_hold_nic_awake) in iwl_trans_pcie_release_nic_access()
2093 spin_unlock_irqrestore(&trans_pcie->reg_lock, *flags); in iwl_trans_pcie_release_nic_access()
2137 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_freeze_txq_timer() local
2141 struct iwl_txq *txq = trans_pcie->txq[queue]; in iwl_trans_pcie_freeze_txq_timer()
2189 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_block_txq_ptrs() local
2193 struct iwl_txq *txq = trans_pcie->txq[i]; in iwl_trans_pcie_block_txq_ptrs()
2195 if (i == trans_pcie->cmd_queue) in iwl_trans_pcie_block_txq_ptrs()
2249 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_rxq_dma_data() local
2251 if (queue >= trans->num_rx_queues || !trans_pcie->rxq) in iwl_trans_pcie_rxq_dma_data()
2254 data->fr_bd_cb = trans_pcie->rxq[queue].bd_dma; in iwl_trans_pcie_rxq_dma_data()
2255 data->urbd_stts_wrptr = trans_pcie->rxq[queue].rb_stts_dma; in iwl_trans_pcie_rxq_dma_data()
2256 data->ur_bd_cb = trans_pcie->rxq[queue].used_bd_dma; in iwl_trans_pcie_rxq_dma_data()
2264 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_wait_txq_empty() local
2269 if (!test_bit(txq_idx, trans_pcie->queue_used)) in iwl_trans_pcie_wait_txq_empty()
2273 txq = trans_pcie->txq[txq_idx]; in iwl_trans_pcie_wait_txq_empty()
2302 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_wait_txqs_empty() local
2309 if (cnt == trans_pcie->cmd_queue) in iwl_trans_pcie_wait_txqs_empty()
2311 if (!test_bit(cnt, trans_pcie->queue_used)) in iwl_trans_pcie_wait_txqs_empty()
2327 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_set_bits_mask() local
2330 spin_lock_irqsave(&trans_pcie->reg_lock, flags); in iwl_trans_pcie_set_bits_mask()
2332 spin_unlock_irqrestore(&trans_pcie->reg_lock, flags); in iwl_trans_pcie_set_bits_mask()
2337 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_ref() local
2342 pm_runtime_get(&trans_pcie->pci_dev->dev); in iwl_trans_pcie_ref()
2346 atomic_read(&trans_pcie->pci_dev->dev.power.usage_count)); in iwl_trans_pcie_ref()
2352 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_unref() local
2357 pm_runtime_mark_last_busy(&trans_pcie->pci_dev->dev); in iwl_trans_pcie_unref()
2358 pm_runtime_put_autosuspend(&trans_pcie->pci_dev->dev); in iwl_trans_pcie_unref()
2362 atomic_read(&trans_pcie->pci_dev->dev.power.usage_count)); in iwl_trans_pcie_unref()
2475 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_dbgfs_tx_queue_read() local
2485 if (!trans_pcie->txq_memory) in iwl_dbgfs_tx_queue_read()
2493 txq = trans_pcie->txq[cnt]; in iwl_dbgfs_tx_queue_read()
2497 !!test_bit(cnt, trans_pcie->queue_used), in iwl_dbgfs_tx_queue_read()
2498 !!test_bit(cnt, trans_pcie->queue_stopped), in iwl_dbgfs_tx_queue_read()
2500 (cnt == trans_pcie->cmd_queue ? " HCMD" : "")); in iwl_dbgfs_tx_queue_read()
2512 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_dbgfs_rx_queue_read() local
2519 if (!trans_pcie->rxq) in iwl_dbgfs_rx_queue_read()
2527 struct iwl_rxq *rxq = &trans_pcie->rxq[i]; in iwl_dbgfs_rx_queue_read()
2563 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_dbgfs_interrupt_read() local
2564 struct isr_statistics *isr_stats = &trans_pcie->isr_stats; in iwl_dbgfs_interrupt_read()
2621 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_dbgfs_interrupt_write() local
2622 struct isr_statistics *isr_stats = &trans_pcie->isr_stats; in iwl_dbgfs_interrupt_write()
2669 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_dbgfs_rfkill_read() local
2674 trans_pcie->debug_rfkill, in iwl_dbgfs_rfkill_read()
2686 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_dbgfs_rfkill_write() local
2687 bool old = trans_pcie->debug_rfkill; in iwl_dbgfs_rfkill_write()
2690 ret = kstrtobool_from_user(user_buf, count, &trans_pcie->debug_rfkill); in iwl_dbgfs_rfkill_write()
2693 if (old == trans_pcie->debug_rfkill) in iwl_dbgfs_rfkill_write()
2696 old, trans_pcie->debug_rfkill); in iwl_dbgfs_rfkill_write()
2730 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_get_cmdlen() local
2734 for (i = 0; i < trans_pcie->max_tbs; i++) in iwl_trans_pcie_get_cmdlen()
2744 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_dump_rbs() local
2745 int max_len = PAGE_SIZE << trans_pcie->rx_page_order; in iwl_trans_pcie_dump_rbs()
2747 struct iwl_rxq *rxq = &trans_pcie->rxq[0]; in iwl_trans_pcie_dump_rbs()
2864 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_dump_monitor() local
2867 if ((trans_pcie->fw_mon_page && in iwl_trans_pcie_dump_monitor()
2895 if (trans_pcie->fw_mon_page) { in iwl_trans_pcie_dump_monitor()
2903 trans_pcie->fw_mon_phys, in iwl_trans_pcie_dump_monitor()
2904 trans_pcie->fw_mon_size, in iwl_trans_pcie_dump_monitor()
2907 page_address(trans_pcie->fw_mon_page), in iwl_trans_pcie_dump_monitor()
2908 trans_pcie->fw_mon_size); in iwl_trans_pcie_dump_monitor()
2910 monitor_len = trans_pcie->fw_mon_size; in iwl_trans_pcie_dump_monitor()
2950 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_dump_data() local
2952 struct iwl_txq *cmdq = trans_pcie->txq[trans_pcie->cmd_queue]; in iwl_trans_pcie_dump_data()
2970 if (trans_pcie->fw_mon_page) { in iwl_trans_pcie_dump_data()
2972 trans_pcie->fw_mon_size; in iwl_trans_pcie_dump_data()
2973 monitor_len = trans_pcie->fw_mon_size; in iwl_trans_pcie_dump_data()
3045 struct iwl_rxq *rxq = &trans_pcie->rxq[0]; in iwl_trans_pcie_dump_data()
3053 (PAGE_SIZE << trans_pcie->rx_page_order)); in iwl_trans_pcie_dump_data()
3059 for (i = 0; i < trans_pcie->init_dram.paging_cnt; i++) in iwl_trans_pcie_dump_data()
3062 trans_pcie->init_dram.paging[i].size; in iwl_trans_pcie_dump_data()
3072 u16 tfd_size = trans_pcie->tfd_size; in iwl_trans_pcie_dump_data()
3115 for (i = 0; i < trans_pcie->init_dram.paging_cnt; i++) { in iwl_trans_pcie_dump_data()
3118 trans_pcie->init_dram.paging[i].physical; in iwl_trans_pcie_dump_data()
3119 u32 page_len = trans_pcie->init_dram.paging[i].size; in iwl_trans_pcie_dump_data()
3128 trans_pcie->init_dram.paging[i].block, page_len); in iwl_trans_pcie_dump_data()
3237 struct iwl_trans_pcie *trans_pcie; in iwl_trans_pcie_alloc() local
3254 trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_alloc()
3256 trans_pcie->trans = trans; in iwl_trans_pcie_alloc()
3257 trans_pcie->opmode_down = true; in iwl_trans_pcie_alloc()
3258 spin_lock_init(&trans_pcie->irq_lock); in iwl_trans_pcie_alloc()
3259 spin_lock_init(&trans_pcie->reg_lock); in iwl_trans_pcie_alloc()
3260 mutex_init(&trans_pcie->mutex); in iwl_trans_pcie_alloc()
3261 init_waitqueue_head(&trans_pcie->ucode_write_waitq); in iwl_trans_pcie_alloc()
3262 trans_pcie->tso_hdr_page = alloc_percpu(struct iwl_tso_hdr_page); in iwl_trans_pcie_alloc()
3263 if (!trans_pcie->tso_hdr_page) { in iwl_trans_pcie_alloc()
3282 trans_pcie->max_tbs = IWL_TFH_NUM_TBS; in iwl_trans_pcie_alloc()
3283 trans_pcie->tfd_size = sizeof(struct iwl_tfh_tfd); in iwl_trans_pcie_alloc()
3286 trans_pcie->max_tbs = IWL_NUM_OF_TBS; in iwl_trans_pcie_alloc()
3287 trans_pcie->tfd_size = sizeof(struct iwl_tfd); in iwl_trans_pcie_alloc()
3289 trans->max_skb_frags = IWL_PCIE_MAX_FRAGS(trans_pcie); in iwl_trans_pcie_alloc()
3315 trans_pcie->hw_base = pcim_iomap_table(pdev)[0]; in iwl_trans_pcie_alloc()
3316 if (!trans_pcie->hw_base) { in iwl_trans_pcie_alloc()
3326 trans_pcie->pci_dev = pdev; in iwl_trans_pcie_alloc()
3422 init_waitqueue_head(&trans_pcie->wait_command_queue); in iwl_trans_pcie_alloc()
3424 init_waitqueue_head(&trans_pcie->d0i3_waitq); in iwl_trans_pcie_alloc()
3426 if (trans_pcie->msix_enabled) { in iwl_trans_pcie_alloc()
3427 ret = iwl_pcie_init_msix_handler(pdev, trans_pcie); in iwl_trans_pcie_alloc()
3443 trans_pcie->inta_mask = CSR_INI_SET_MASK; in iwl_trans_pcie_alloc()
3446 trans_pcie->rba.alloc_wq = alloc_workqueue("rb_allocator", in iwl_trans_pcie_alloc()
3448 INIT_WORK(&trans_pcie->rba.rx_alloc, iwl_pcie_rx_allocator_work); in iwl_trans_pcie_alloc()
3461 free_percpu(trans_pcie->tso_hdr_page); in iwl_trans_pcie_alloc()