Lines Matching refs:ha
389 struct qla_hw_data *ha = vha->hw; in qla_init_base_qpair() local
391 rsp->qpair = ha->base_qpair; in qla_init_base_qpair()
393 ha->base_qpair->hw = ha; in qla_init_base_qpair()
394 ha->base_qpair->req = req; in qla_init_base_qpair()
395 ha->base_qpair->rsp = rsp; in qla_init_base_qpair()
396 ha->base_qpair->vha = vha; in qla_init_base_qpair()
397 ha->base_qpair->qp_lock_ptr = &ha->hardware_lock; in qla_init_base_qpair()
398 ha->base_qpair->use_shadow_reg = IS_SHADOW_REG_CAPABLE(ha) ? 1 : 0; in qla_init_base_qpair()
399 ha->base_qpair->msix = &ha->msix_entries[QLA_MSIX_RSP_Q]; in qla_init_base_qpair()
400 ha->base_qpair->srb_mempool = ha->srb_mempool; in qla_init_base_qpair()
401 INIT_LIST_HEAD(&ha->base_qpair->hints_list); in qla_init_base_qpair()
402 ha->base_qpair->enable_class_2 = ql2xenableclass2; in qla_init_base_qpair()
405 ha->base_qpair->pdev = ha->pdev; in qla_init_base_qpair()
407 if (IS_QLA27XX(ha) || IS_QLA83XX(ha) || IS_QLA28XX(ha)) in qla_init_base_qpair()
408 ha->base_qpair->reqq_start_iocbs = qla_83xx_start_iocbs; in qla_init_base_qpair()
411 static int qla2x00_alloc_queues(struct qla_hw_data *ha, struct req_que *req, in qla2x00_alloc_queues() argument
414 scsi_qla_host_t *vha = pci_get_drvdata(ha->pdev); in qla2x00_alloc_queues()
416 ha->req_q_map = kcalloc(ha->max_req_queues, sizeof(struct req_que *), in qla2x00_alloc_queues()
418 if (!ha->req_q_map) { in qla2x00_alloc_queues()
424 ha->rsp_q_map = kcalloc(ha->max_rsp_queues, sizeof(struct rsp_que *), in qla2x00_alloc_queues()
426 if (!ha->rsp_q_map) { in qla2x00_alloc_queues()
432 ha->base_qpair = kzalloc(sizeof(struct qla_qpair), GFP_KERNEL); in qla2x00_alloc_queues()
433 if (ha->base_qpair == NULL) { in qla2x00_alloc_queues()
441 if ((ql2xmqsupport || ql2xnvmeenable) && ha->max_qpairs) { in qla2x00_alloc_queues()
442 ha->queue_pair_map = kcalloc(ha->max_qpairs, sizeof(struct qla_qpair *), in qla2x00_alloc_queues()
444 if (!ha->queue_pair_map) { in qla2x00_alloc_queues()
455 ha->rsp_q_map[0] = rsp; in qla2x00_alloc_queues()
456 ha->req_q_map[0] = req; in qla2x00_alloc_queues()
457 set_bit(0, ha->rsp_qid_map); in qla2x00_alloc_queues()
458 set_bit(0, ha->req_qid_map); in qla2x00_alloc_queues()
462 kfree(ha->base_qpair); in qla2x00_alloc_queues()
463 ha->base_qpair = NULL; in qla2x00_alloc_queues()
465 kfree(ha->rsp_q_map); in qla2x00_alloc_queues()
466 ha->rsp_q_map = NULL; in qla2x00_alloc_queues()
468 kfree(ha->req_q_map); in qla2x00_alloc_queues()
469 ha->req_q_map = NULL; in qla2x00_alloc_queues()
474 static void qla2x00_free_req_que(struct qla_hw_data *ha, struct req_que *req) in qla2x00_free_req_que() argument
476 if (IS_QLAFX00(ha)) { in qla2x00_free_req_que()
478 dma_free_coherent(&ha->pdev->dev, in qla2x00_free_req_que()
482 dma_free_coherent(&ha->pdev->dev, in qla2x00_free_req_que()
492 static void qla2x00_free_rsp_que(struct qla_hw_data *ha, struct rsp_que *rsp) in qla2x00_free_rsp_que() argument
494 if (IS_QLAFX00(ha)) { in qla2x00_free_rsp_que()
496 dma_free_coherent(&ha->pdev->dev, in qla2x00_free_rsp_que()
500 dma_free_coherent(&ha->pdev->dev, in qla2x00_free_rsp_que()
507 static void qla2x00_free_queues(struct qla_hw_data *ha) in qla2x00_free_queues() argument
514 if (ha->queue_pair_map) { in qla2x00_free_queues()
515 kfree(ha->queue_pair_map); in qla2x00_free_queues()
516 ha->queue_pair_map = NULL; in qla2x00_free_queues()
518 if (ha->base_qpair) { in qla2x00_free_queues()
519 kfree(ha->base_qpair); in qla2x00_free_queues()
520 ha->base_qpair = NULL; in qla2x00_free_queues()
523 spin_lock_irqsave(&ha->hardware_lock, flags); in qla2x00_free_queues()
524 for (cnt = 0; cnt < ha->max_req_queues; cnt++) { in qla2x00_free_queues()
525 if (!test_bit(cnt, ha->req_qid_map)) in qla2x00_free_queues()
528 req = ha->req_q_map[cnt]; in qla2x00_free_queues()
529 clear_bit(cnt, ha->req_qid_map); in qla2x00_free_queues()
530 ha->req_q_map[cnt] = NULL; in qla2x00_free_queues()
532 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla2x00_free_queues()
533 qla2x00_free_req_que(ha, req); in qla2x00_free_queues()
534 spin_lock_irqsave(&ha->hardware_lock, flags); in qla2x00_free_queues()
536 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla2x00_free_queues()
538 kfree(ha->req_q_map); in qla2x00_free_queues()
539 ha->req_q_map = NULL; in qla2x00_free_queues()
542 spin_lock_irqsave(&ha->hardware_lock, flags); in qla2x00_free_queues()
543 for (cnt = 0; cnt < ha->max_rsp_queues; cnt++) { in qla2x00_free_queues()
544 if (!test_bit(cnt, ha->rsp_qid_map)) in qla2x00_free_queues()
547 rsp = ha->rsp_q_map[cnt]; in qla2x00_free_queues()
548 clear_bit(cnt, ha->rsp_qid_map); in qla2x00_free_queues()
549 ha->rsp_q_map[cnt] = NULL; in qla2x00_free_queues()
550 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla2x00_free_queues()
551 qla2x00_free_rsp_que(ha, rsp); in qla2x00_free_queues()
552 spin_lock_irqsave(&ha->hardware_lock, flags); in qla2x00_free_queues()
554 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla2x00_free_queues()
556 kfree(ha->rsp_q_map); in qla2x00_free_queues()
557 ha->rsp_q_map = NULL; in qla2x00_free_queues()
563 struct qla_hw_data *ha = vha->hw; in qla2x00_pci_info_str() local
569 pci_bus = (ha->pci_attr & (BIT_9 | BIT_10)) >> 9; in qla2x00_pci_info_str()
574 pci_bus = (ha->pci_attr & BIT_8) >> 8; in qla2x00_pci_info_str()
587 struct qla_hw_data *ha = vha->hw; in qla24xx_pci_info_str() local
590 if (pci_is_pcie(ha->pdev)) { in qla24xx_pci_info_str()
594 pcie_capability_read_dword(ha->pdev, PCI_EXP_LNKCAP, &lstat); in qla24xx_pci_info_str()
620 pci_bus = (ha->pci_attr & CSRX_PCIX_BUS_MODE_MASK) >> 8; in qla24xx_pci_info_str()
636 struct qla_hw_data *ha = vha->hw; in qla2x00_fw_version_str() local
638 snprintf(str, size, "%d.%02d.%02d ", ha->fw_major_version, in qla2x00_fw_version_str()
639 ha->fw_minor_version, ha->fw_subminor_version); in qla2x00_fw_version_str()
641 if (ha->fw_attributes & BIT_9) { in qla2x00_fw_version_str()
646 switch (ha->fw_attributes & 0xFF) { in qla2x00_fw_version_str()
660 sprintf(un_str, "(%x)", ha->fw_attributes); in qla2x00_fw_version_str()
664 if (ha->fw_attributes & 0x100) in qla2x00_fw_version_str()
673 struct qla_hw_data *ha = vha->hw; in qla24xx_fw_version_str() local
675 snprintf(str, size, "%d.%02d.%02d (%x)", ha->fw_major_version, in qla24xx_fw_version_str()
676 ha->fw_minor_version, ha->fw_subminor_version, ha->fw_attributes); in qla24xx_fw_version_str()
682 struct qla_hw_data *ha = sp->vha->hw; in qla2x00_sp_free_dma() local
691 dma_unmap_sg(&ha->pdev->dev, scsi_prot_sglist(cmd), in qla2x00_sp_free_dma()
698 qla2x00_clean_dsd_pool(ha, sp->u.scmd.crc_ctx); in qla2x00_sp_free_dma()
705 dma_pool_free(ha->dl_dma_pool, ctx0, ctx0->crc_ctx_dma); in qla2x00_sp_free_dma()
712 dma_pool_free(ha->fcp_cmnd_dma_pool, ctx1->fcp_cmnd, in qla2x00_sp_free_dma()
714 list_splice(&ctx1->dsd_list, &ha->gbl_dsd_list); in qla2x00_sp_free_dma()
715 ha->gbl_dsd_inuse -= ctx1->dsd_use_cnt; in qla2x00_sp_free_dma()
716 ha->gbl_dsd_avail += ctx1->dsd_use_cnt; in qla2x00_sp_free_dma()
717 mempool_free(ctx1, ha->ctx_mempool); in qla2x00_sp_free_dma()
737 struct qla_hw_data *ha = sp->fcport->vha->hw; in qla2xxx_qpair_sp_free_dma() local
745 dma_unmap_sg(&ha->pdev->dev, scsi_prot_sglist(cmd), in qla2xxx_qpair_sp_free_dma()
752 qla2x00_clean_dsd_pool(ha, sp->u.scmd.crc_ctx); in qla2xxx_qpair_sp_free_dma()
763 dma_pool_free(ha->dif_bundl_pool, dif_dsd->dsd_addr, in qla2xxx_qpair_sp_free_dma()
772 dma_pool_free(ha->dl_dma_pool, dif_dsd->dsd_addr, in qla2xxx_qpair_sp_free_dma()
795 dma_pool_free(ha->fcp_cmnd_dma_pool, ctx1->fcp_cmnd, in qla2xxx_qpair_sp_free_dma()
797 list_splice(&ctx1->dsd_list, &ha->gbl_dsd_list); in qla2xxx_qpair_sp_free_dma()
798 ha->gbl_dsd_inuse -= ctx1->dsd_use_cnt; in qla2xxx_qpair_sp_free_dma()
799 ha->gbl_dsd_avail += ctx1->dsd_use_cnt; in qla2xxx_qpair_sp_free_dma()
800 mempool_free(ctx1, ha->ctx_mempool); in qla2xxx_qpair_sp_free_dma()
807 dma_pool_free(ha->dl_dma_pool, ctx0, ctx0->crc_ctx_dma); in qla2xxx_qpair_sp_free_dma()
831 struct qla_hw_data *ha = vha->hw; in qla2xxx_queuecommand() local
832 struct scsi_qla_host *base_vha = pci_get_drvdata(ha->pdev); in qla2xxx_queuecommand()
842 if (ha->mqenable) { in qla2xxx_queuecommand()
849 qpair = ha->queue_pair_map[hwq]; in qla2xxx_queuecommand()
855 if (ha->flags.eeh_busy) { in qla2xxx_queuecommand()
856 if (ha->flags.pci_channel_io_perm_failure) { in qla2xxx_queuecommand()
926 rval = ha->isp_ops->start_scsi(sp); in qla2xxx_queuecommand()
955 struct qla_hw_data *ha = vha->hw; in qla2xxx_mqueuecommand() local
956 struct scsi_qla_host *base_vha = pci_get_drvdata(ha->pdev); in qla2xxx_mqueuecommand()
1007 rval = ha->isp_ops->start_scsi_mq(sp); in qla2xxx_mqueuecommand()
1054 struct qla_hw_data *ha = vha->hw; in qla2x00_eh_wait_on_command() local
1057 if (unlikely(pci_channel_offline(ha->pdev)) || ha->flags.eeh_busy) { in qla2x00_eh_wait_on_command()
1094 struct qla_hw_data *ha = vha->hw; in qla2x00_wait_for_hba_online() local
1095 scsi_qla_host_t *base_vha = pci_get_drvdata(ha->pdev); in qla2x00_wait_for_hba_online()
1101 ha->dpc_active) && time_before(jiffies, wait_online)) { in qla2x00_wait_for_hba_online()
1115 struct qla_hw_data *ha = vha->hw; in test_fcport_count() local
1119 spin_lock_irqsave(&ha->tgt.sess_lock, flags); in test_fcport_count()
1124 spin_unlock_irqrestore(&ha->tgt.sess_lock, flags); in test_fcport_count()
1164 struct qla_hw_data *ha = vha->hw; in qla2x00_wait_for_hba_ready() local
1165 scsi_qla_host_t *base_vha = pci_get_drvdata(ha->pdev); in qla2x00_wait_for_hba_ready()
1167 while ((qla2x00_reset_active(vha) || ha->dpc_active || in qla2x00_wait_for_hba_ready()
1168 ha->flags.mbox_busy) || in qla2x00_wait_for_hba_ready()
1182 struct qla_hw_data *ha = vha->hw; in qla2x00_wait_for_chip_reset() local
1183 scsi_qla_host_t *base_vha = pci_get_drvdata(ha->pdev); in qla2x00_wait_for_chip_reset()
1189 ha->dpc_active) && time_before(jiffies, wait_reset)) { in qla2x00_wait_for_chip_reset()
1194 ha->flags.chip_reset_done) in qla2x00_wait_for_chip_reset()
1197 if (ha->flags.chip_reset_done) in qla2x00_wait_for_chip_reset()
1222 uint32_t qla2x00_isp_reg_stat(struct qla_hw_data *ha) in qla2x00_isp_reg_stat() argument
1224 struct device_reg_24xx __iomem *reg = &ha->iobase->isp24; in qla2x00_isp_reg_stat()
1225 struct device_reg_82xx __iomem *reg82 = &ha->iobase->isp82; in qla2x00_isp_reg_stat()
1227 if (IS_P3P_TYPE(ha)) in qla2x00_isp_reg_stat()
1259 struct qla_hw_data *ha = vha->hw; in qla2xxx_eh_abort() local
1264 if (qla2x00_isp_reg_stat(ha)) { in qla2xxx_eh_abort()
1297 rval = ha->isp_ops->abort_command(sp); in qla2xxx_eh_abort()
1303 ratov_j = ha->r_a_tov/10 * 4 * 1000; in qla2xxx_eh_abort()
1310 __func__, ha->r_a_tov/10); in qla2xxx_eh_abort()
1339 struct qla_hw_data *ha = vha->hw; in qla2x00_eh_wait_for_pending_commands() local
1346 spin_lock_irqsave(&ha->hardware_lock, flags); in qla2x00_eh_wait_for_pending_commands()
1374 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla2x00_eh_wait_for_pending_commands()
1376 spin_lock_irqsave(&ha->hardware_lock, flags); in qla2x00_eh_wait_for_pending_commands()
1378 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla2x00_eh_wait_for_pending_commands()
1452 struct qla_hw_data *ha = vha->hw; in qla2xxx_eh_device_reset() local
1454 if (qla2x00_isp_reg_stat(ha)) { in qla2xxx_eh_device_reset()
1461 ha->isp_ops->lun_reset); in qla2xxx_eh_device_reset()
1468 struct qla_hw_data *ha = vha->hw; in qla2xxx_eh_target_reset() local
1470 if (qla2x00_isp_reg_stat(ha)) { in qla2xxx_eh_target_reset()
1477 ha->isp_ops->target_reset); in qla2xxx_eh_target_reset()
1503 struct qla_hw_data *ha = vha->hw; in qla2xxx_eh_bus_reset() local
1505 if (qla2x00_isp_reg_stat(ha)) { in qla2xxx_eh_bus_reset()
1576 struct qla_hw_data *ha = vha->hw; in qla2xxx_eh_host_reset() local
1580 scsi_qla_host_t *base_vha = pci_get_drvdata(ha->pdev); in qla2xxx_eh_host_reset()
1582 if (qla2x00_isp_reg_stat(ha)) { in qla2xxx_eh_host_reset()
1585 schedule_work(&ha->board_disable); in qla2xxx_eh_host_reset()
1599 if (qla2x00_reset_active(vha) || ha->optrom_state != QLA_SWAITING) in qla2xxx_eh_host_reset()
1614 if (ha->wq) in qla2xxx_eh_host_reset()
1615 flush_workqueue(ha->wq); in qla2xxx_eh_host_reset()
1618 if (ha->isp_ops->abort_isp(base_vha)) { in qla2xxx_eh_host_reset()
1660 struct qla_hw_data *ha = vha->hw; in qla2x00_loop_reset() local
1662 if (IS_QLAFX00(ha)) { in qla2x00_loop_reset()
1666 if (ql2xtargetreset == 1 && ha->flags.enable_target_reset) { in qla2x00_loop_reset()
1671 ret = ha->isp_ops->target_reset(fcport, 0, 0); in qla2x00_loop_reset()
1681 if (ha->flags.enable_lip_full_login && !IS_CNA_CAPABLE(ha)) { in qla2x00_loop_reset()
1692 if (ha->flags.enable_lip_reset) { in qla2x00_loop_reset()
1716 struct qla_hw_data *ha = vha->hw; in qla2x00_abort_srb() local
1730 (sp->type == SRB_SCSI_CMD && !ha->flags.eeh_busy && in qla2x00_abort_srb()
1732 !qla2x00_isp_reg_stat(ha))) { in qla2x00_abort_srb()
1741 rval = ha->isp_ops->abort_command(sp); in qla2x00_abort_srb()
1744 ratov_j = ha->r_a_tov/10 * 4 * 1000; in qla2x00_abort_srb()
1751 __func__, ha->r_a_tov/10); in qla2x00_abort_srb()
1780 struct qla_hw_data *ha = vha->hw; in __qla2x00_abort_all_cmds() local
1785 if (!ha->req_q_map) in __qla2x00_abort_all_cmds()
1827 struct qla_hw_data *ha = vha->hw; in qla2x00_abort_all_cmds() local
1830 if (!ha->base_qpair) in qla2x00_abort_all_cmds()
1832 __qla2x00_abort_all_cmds(ha->base_qpair, res); in qla2x00_abort_all_cmds()
1834 if (!ha->queue_pair_map) in qla2x00_abort_all_cmds()
1836 for (que = 0; que < ha->max_qpairs; que++) { in qla2x00_abort_all_cmds()
1837 if (!ha->queue_pair_map[que]) in qla2x00_abort_all_cmds()
1840 __qla2x00_abort_all_cmds(ha->queue_pair_map[que], res); in qla2x00_abort_all_cmds()
1884 qla2x00_config_dma_addressing(struct qla_hw_data *ha) in qla2x00_config_dma_addressing() argument
1887 ha->flags.enable_64bit_addressing = 0; in qla2x00_config_dma_addressing()
1889 if (!dma_set_mask(&ha->pdev->dev, DMA_BIT_MASK(64))) { in qla2x00_config_dma_addressing()
1891 if (MSD(dma_get_required_mask(&ha->pdev->dev)) && in qla2x00_config_dma_addressing()
1892 !dma_set_coherent_mask(&ha->pdev->dev, DMA_BIT_MASK(64))) { in qla2x00_config_dma_addressing()
1894 ha->flags.enable_64bit_addressing = 1; in qla2x00_config_dma_addressing()
1895 ha->isp_ops->calc_req_entries = qla2x00_calc_iocbs_64; in qla2x00_config_dma_addressing()
1896 ha->isp_ops->build_iocbs = qla2x00_build_scsi_iocbs_64; in qla2x00_config_dma_addressing()
1901 dma_set_mask(&ha->pdev->dev, DMA_BIT_MASK(32)); in qla2x00_config_dma_addressing()
1902 dma_set_coherent_mask(&ha->pdev->dev, DMA_BIT_MASK(32)); in qla2x00_config_dma_addressing()
1906 qla2x00_enable_intrs(struct qla_hw_data *ha) in qla2x00_enable_intrs() argument
1909 struct device_reg_2xxx __iomem *reg = &ha->iobase->isp; in qla2x00_enable_intrs()
1911 spin_lock_irqsave(&ha->hardware_lock, flags); in qla2x00_enable_intrs()
1912 ha->interrupts_on = 1; in qla2x00_enable_intrs()
1916 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla2x00_enable_intrs()
1921 qla2x00_disable_intrs(struct qla_hw_data *ha) in qla2x00_disable_intrs() argument
1924 struct device_reg_2xxx __iomem *reg = &ha->iobase->isp; in qla2x00_disable_intrs()
1926 spin_lock_irqsave(&ha->hardware_lock, flags); in qla2x00_disable_intrs()
1927 ha->interrupts_on = 0; in qla2x00_disable_intrs()
1931 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla2x00_disable_intrs()
1935 qla24xx_enable_intrs(struct qla_hw_data *ha) in qla24xx_enable_intrs() argument
1938 struct device_reg_24xx __iomem *reg = &ha->iobase->isp24; in qla24xx_enable_intrs()
1940 spin_lock_irqsave(&ha->hardware_lock, flags); in qla24xx_enable_intrs()
1941 ha->interrupts_on = 1; in qla24xx_enable_intrs()
1944 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla24xx_enable_intrs()
1948 qla24xx_disable_intrs(struct qla_hw_data *ha) in qla24xx_disable_intrs() argument
1951 struct device_reg_24xx __iomem *reg = &ha->iobase->isp24; in qla24xx_disable_intrs()
1953 if (IS_NOPOLLING_TYPE(ha)) in qla24xx_disable_intrs()
1955 spin_lock_irqsave(&ha->hardware_lock, flags); in qla24xx_disable_intrs()
1956 ha->interrupts_on = 0; in qla24xx_disable_intrs()
1959 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla24xx_disable_intrs()
1963 qla2x00_iospace_config(struct qla_hw_data *ha) in qla2x00_iospace_config() argument
1968 if (pci_request_selected_regions(ha->pdev, ha->bars, in qla2x00_iospace_config()
1970 ql_log_pci(ql_log_fatal, ha->pdev, 0x0011, in qla2x00_iospace_config()
1972 pci_name(ha->pdev)); in qla2x00_iospace_config()
1975 if (!(ha->bars & 1)) in qla2x00_iospace_config()
1979 pio = pci_resource_start(ha->pdev, 0); in qla2x00_iospace_config()
1980 if (pci_resource_flags(ha->pdev, 0) & IORESOURCE_IO) { in qla2x00_iospace_config()
1981 if (pci_resource_len(ha->pdev, 0) < MIN_IOBASE_LEN) { in qla2x00_iospace_config()
1982 ql_log_pci(ql_log_warn, ha->pdev, 0x0012, in qla2x00_iospace_config()
1984 pci_name(ha->pdev)); in qla2x00_iospace_config()
1988 ql_log_pci(ql_log_warn, ha->pdev, 0x0013, in qla2x00_iospace_config()
1990 pci_name(ha->pdev)); in qla2x00_iospace_config()
1993 ha->pio_address = pio; in qla2x00_iospace_config()
1994 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x0014, in qla2x00_iospace_config()
1996 (unsigned long long)ha->pio_address); in qla2x00_iospace_config()
2000 if (!(pci_resource_flags(ha->pdev, 1) & IORESOURCE_MEM)) { in qla2x00_iospace_config()
2001 ql_log_pci(ql_log_fatal, ha->pdev, 0x0015, in qla2x00_iospace_config()
2003 pci_name(ha->pdev)); in qla2x00_iospace_config()
2006 if (pci_resource_len(ha->pdev, 1) < MIN_IOBASE_LEN) { in qla2x00_iospace_config()
2007 ql_log_pci(ql_log_fatal, ha->pdev, 0x0016, in qla2x00_iospace_config()
2009 pci_name(ha->pdev)); in qla2x00_iospace_config()
2013 ha->iobase = ioremap(pci_resource_start(ha->pdev, 1), MIN_IOBASE_LEN); in qla2x00_iospace_config()
2014 if (!ha->iobase) { in qla2x00_iospace_config()
2015 ql_log_pci(ql_log_fatal, ha->pdev, 0x0017, in qla2x00_iospace_config()
2017 pci_name(ha->pdev)); in qla2x00_iospace_config()
2022 ha->max_req_queues = ha->max_rsp_queues = 1; in qla2x00_iospace_config()
2023 ha->msix_count = QLA_BASE_VECTORS; in qla2x00_iospace_config()
2026 if (!(ha->fw_attributes & BIT_6)) in qla2x00_iospace_config()
2030 (!IS_QLA25XX(ha) && !IS_QLA81XX(ha))) in qla2x00_iospace_config()
2033 ha->mqiobase = ioremap(pci_resource_start(ha->pdev, 3), in qla2x00_iospace_config()
2034 pci_resource_len(ha->pdev, 3)); in qla2x00_iospace_config()
2035 if (ha->mqiobase) { in qla2x00_iospace_config()
2036 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x0018, in qla2x00_iospace_config()
2037 "MQIO Base=%p.\n", ha->mqiobase); in qla2x00_iospace_config()
2039 pci_read_config_word(ha->pdev, QLA_PCI_MSIX_CONTROL, &msix); in qla2x00_iospace_config()
2040 ha->msix_count = msix + 1; in qla2x00_iospace_config()
2043 ha->max_req_queues = ha->msix_count - 1; in qla2x00_iospace_config()
2044 ha->max_rsp_queues = ha->max_req_queues; in qla2x00_iospace_config()
2046 ha->max_qpairs = ha->max_rsp_queues - 1; in qla2x00_iospace_config()
2047 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x0188, in qla2x00_iospace_config()
2048 "Max no of queues pairs: %d.\n", ha->max_qpairs); in qla2x00_iospace_config()
2050 ql_log_pci(ql_log_info, ha->pdev, 0x001a, in qla2x00_iospace_config()
2051 "MSI-X vector count: %d.\n", ha->msix_count); in qla2x00_iospace_config()
2053 ql_log_pci(ql_log_info, ha->pdev, 0x001b, in qla2x00_iospace_config()
2057 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x001c, in qla2x00_iospace_config()
2058 "MSIX Count: %d.\n", ha->msix_count); in qla2x00_iospace_config()
2067 qla83xx_iospace_config(struct qla_hw_data *ha) in qla83xx_iospace_config() argument
2071 if (pci_request_selected_regions(ha->pdev, ha->bars, in qla83xx_iospace_config()
2073 ql_log_pci(ql_log_fatal, ha->pdev, 0x0117, in qla83xx_iospace_config()
2075 pci_name(ha->pdev)); in qla83xx_iospace_config()
2081 if (!(pci_resource_flags(ha->pdev, 0) & IORESOURCE_MEM)) { in qla83xx_iospace_config()
2082 ql_log_pci(ql_log_warn, ha->pdev, 0x0118, in qla83xx_iospace_config()
2084 pci_name(ha->pdev)); in qla83xx_iospace_config()
2087 if (pci_resource_len(ha->pdev, 0) < MIN_IOBASE_LEN) { in qla83xx_iospace_config()
2088 ql_log_pci(ql_log_warn, ha->pdev, 0x0119, in qla83xx_iospace_config()
2090 pci_name(ha->pdev)); in qla83xx_iospace_config()
2094 ha->iobase = ioremap(pci_resource_start(ha->pdev, 0), MIN_IOBASE_LEN); in qla83xx_iospace_config()
2095 if (!ha->iobase) { in qla83xx_iospace_config()
2096 ql_log_pci(ql_log_fatal, ha->pdev, 0x011a, in qla83xx_iospace_config()
2098 pci_name(ha->pdev)); in qla83xx_iospace_config()
2105 ha->max_req_queues = ha->max_rsp_queues = 1; in qla83xx_iospace_config()
2106 ha->msix_count = QLA_BASE_VECTORS; in qla83xx_iospace_config()
2107 ha->mqiobase = ioremap(pci_resource_start(ha->pdev, 4), in qla83xx_iospace_config()
2108 pci_resource_len(ha->pdev, 4)); in qla83xx_iospace_config()
2110 if (!ha->mqiobase) { in qla83xx_iospace_config()
2111 ql_log_pci(ql_log_fatal, ha->pdev, 0x011d, in qla83xx_iospace_config()
2116 ha->msixbase = ioremap(pci_resource_start(ha->pdev, 2), in qla83xx_iospace_config()
2117 pci_resource_len(ha->pdev, 2)); in qla83xx_iospace_config()
2118 if (ha->msixbase) { in qla83xx_iospace_config()
2120 pci_read_config_word(ha->pdev, in qla83xx_iospace_config()
2122 ha->msix_count = (msix & PCI_MSIX_FLAGS_QSIZE) + 1; in qla83xx_iospace_config()
2129 ha->max_req_queues = ha->msix_count - 1; in qla83xx_iospace_config()
2133 ha->max_req_queues--; in qla83xx_iospace_config()
2135 ha->max_rsp_queues = ha->max_req_queues; in qla83xx_iospace_config()
2139 ha->max_qpairs = ha->max_req_queues - 1; in qla83xx_iospace_config()
2140 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x00e3, in qla83xx_iospace_config()
2141 "Max no of queues pairs: %d.\n", ha->max_qpairs); in qla83xx_iospace_config()
2143 ql_log_pci(ql_log_info, ha->pdev, 0x011c, in qla83xx_iospace_config()
2144 "MSI-X vector count: %d.\n", ha->msix_count); in qla83xx_iospace_config()
2146 ql_log_pci(ql_log_info, ha->pdev, 0x011e, in qla83xx_iospace_config()
2150 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x011f, in qla83xx_iospace_config()
2151 "MSIX Count: %d.\n", ha->msix_count); in qla83xx_iospace_config()
2550 qla2x00_set_isp_flags(struct qla_hw_data *ha) in qla2x00_set_isp_flags() argument
2552 ha->device_type = DT_EXTENDED_IDS; in qla2x00_set_isp_flags()
2553 switch (ha->pdev->device) { in qla2x00_set_isp_flags()
2555 ha->isp_type |= DT_ISP2100; in qla2x00_set_isp_flags()
2556 ha->device_type &= ~DT_EXTENDED_IDS; in qla2x00_set_isp_flags()
2557 ha->fw_srisc_address = RISC_START_ADDRESS_2100; in qla2x00_set_isp_flags()
2560 ha->isp_type |= DT_ISP2200; in qla2x00_set_isp_flags()
2561 ha->device_type &= ~DT_EXTENDED_IDS; in qla2x00_set_isp_flags()
2562 ha->fw_srisc_address = RISC_START_ADDRESS_2100; in qla2x00_set_isp_flags()
2565 ha->isp_type |= DT_ISP2300; in qla2x00_set_isp_flags()
2566 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2567 ha->fw_srisc_address = RISC_START_ADDRESS_2300; in qla2x00_set_isp_flags()
2570 ha->isp_type |= DT_ISP2312; in qla2x00_set_isp_flags()
2571 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2572 ha->fw_srisc_address = RISC_START_ADDRESS_2300; in qla2x00_set_isp_flags()
2575 ha->isp_type |= DT_ISP2322; in qla2x00_set_isp_flags()
2576 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2577 if (ha->pdev->subsystem_vendor == 0x1028 && in qla2x00_set_isp_flags()
2578 ha->pdev->subsystem_device == 0x0170) in qla2x00_set_isp_flags()
2579 ha->device_type |= DT_OEM_001; in qla2x00_set_isp_flags()
2580 ha->fw_srisc_address = RISC_START_ADDRESS_2300; in qla2x00_set_isp_flags()
2583 ha->isp_type |= DT_ISP6312; in qla2x00_set_isp_flags()
2584 ha->fw_srisc_address = RISC_START_ADDRESS_2300; in qla2x00_set_isp_flags()
2587 ha->isp_type |= DT_ISP6322; in qla2x00_set_isp_flags()
2588 ha->fw_srisc_address = RISC_START_ADDRESS_2300; in qla2x00_set_isp_flags()
2591 ha->isp_type |= DT_ISP2422; in qla2x00_set_isp_flags()
2592 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2593 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2594 ha->device_type |= DT_IIDMA; in qla2x00_set_isp_flags()
2595 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2598 ha->isp_type |= DT_ISP2432; in qla2x00_set_isp_flags()
2599 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2600 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2601 ha->device_type |= DT_IIDMA; in qla2x00_set_isp_flags()
2602 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2605 ha->isp_type |= DT_ISP8432; in qla2x00_set_isp_flags()
2606 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2607 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2608 ha->device_type |= DT_IIDMA; in qla2x00_set_isp_flags()
2609 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2612 ha->isp_type |= DT_ISP5422; in qla2x00_set_isp_flags()
2613 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2614 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2617 ha->isp_type |= DT_ISP5432; in qla2x00_set_isp_flags()
2618 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2619 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2622 ha->isp_type |= DT_ISP2532; in qla2x00_set_isp_flags()
2623 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2624 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2625 ha->device_type |= DT_IIDMA; in qla2x00_set_isp_flags()
2626 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2629 ha->isp_type |= DT_ISP8001; in qla2x00_set_isp_flags()
2630 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2631 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2632 ha->device_type |= DT_IIDMA; in qla2x00_set_isp_flags()
2633 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2636 ha->isp_type |= DT_ISP8021; in qla2x00_set_isp_flags()
2637 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2638 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2639 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2641 qla82xx_init_flags(ha); in qla2x00_set_isp_flags()
2644 ha->isp_type |= DT_ISP8044; in qla2x00_set_isp_flags()
2645 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2646 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2647 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2649 qla82xx_init_flags(ha); in qla2x00_set_isp_flags()
2652 ha->isp_type |= DT_ISP2031; in qla2x00_set_isp_flags()
2653 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2654 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2655 ha->device_type |= DT_IIDMA; in qla2x00_set_isp_flags()
2656 ha->device_type |= DT_T10_PI; in qla2x00_set_isp_flags()
2657 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2660 ha->isp_type |= DT_ISP8031; in qla2x00_set_isp_flags()
2661 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2662 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2663 ha->device_type |= DT_IIDMA; in qla2x00_set_isp_flags()
2664 ha->device_type |= DT_T10_PI; in qla2x00_set_isp_flags()
2665 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2668 ha->isp_type |= DT_ISPFX00; in qla2x00_set_isp_flags()
2671 ha->isp_type |= DT_ISP2071; in qla2x00_set_isp_flags()
2672 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2673 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2674 ha->device_type |= DT_IIDMA; in qla2x00_set_isp_flags()
2675 ha->device_type |= DT_T10_PI; in qla2x00_set_isp_flags()
2676 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2679 ha->isp_type |= DT_ISP2271; in qla2x00_set_isp_flags()
2680 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2681 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2682 ha->device_type |= DT_IIDMA; in qla2x00_set_isp_flags()
2683 ha->device_type |= DT_T10_PI; in qla2x00_set_isp_flags()
2684 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2687 ha->isp_type |= DT_ISP2261; in qla2x00_set_isp_flags()
2688 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2689 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2690 ha->device_type |= DT_IIDMA; in qla2x00_set_isp_flags()
2691 ha->device_type |= DT_T10_PI; in qla2x00_set_isp_flags()
2692 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2696 ha->isp_type |= DT_ISP2081; in qla2x00_set_isp_flags()
2697 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2698 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2699 ha->device_type |= DT_IIDMA; in qla2x00_set_isp_flags()
2700 ha->device_type |= DT_T10_PI; in qla2x00_set_isp_flags()
2701 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2705 ha->isp_type |= DT_ISP2281; in qla2x00_set_isp_flags()
2706 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2707 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2708 ha->device_type |= DT_IIDMA; in qla2x00_set_isp_flags()
2709 ha->device_type |= DT_T10_PI; in qla2x00_set_isp_flags()
2710 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2714 if (IS_QLA82XX(ha)) in qla2x00_set_isp_flags()
2715 ha->port_no = ha->portnum & 1; in qla2x00_set_isp_flags()
2718 pci_read_config_byte(ha->pdev, PCI_INTERRUPT_PIN, &ha->port_no); in qla2x00_set_isp_flags()
2719 if (IS_QLA25XX(ha) || IS_QLA2031(ha) || in qla2x00_set_isp_flags()
2720 IS_QLA27XX(ha) || IS_QLA28XX(ha)) in qla2x00_set_isp_flags()
2721 ha->port_no--; in qla2x00_set_isp_flags()
2723 ha->port_no = !(ha->port_no & 1); in qla2x00_set_isp_flags()
2726 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x000b, in qla2x00_set_isp_flags()
2728 ha->device_type, ha->port_no, ha->fw_srisc_address); in qla2x00_set_isp_flags()
2764 struct qla_hw_data *ha = vha->hw; in qla2x00_iocb_work_fn() local
2765 struct scsi_qla_host *base_vha = pci_get_drvdata(ha->pdev); in qla2x00_iocb_work_fn()
2791 struct qla_hw_data *ha; in qla2x00_probe_one() local
2841 ha = kzalloc(sizeof(struct qla_hw_data), GFP_KERNEL); in qla2x00_probe_one()
2842 if (!ha) { in qla2x00_probe_one()
2848 "Memory allocated for ha=%p.\n", ha); in qla2x00_probe_one()
2849 ha->pdev = pdev; in qla2x00_probe_one()
2850 INIT_LIST_HEAD(&ha->tgt.q_full_list); in qla2x00_probe_one()
2851 spin_lock_init(&ha->tgt.q_full_lock); in qla2x00_probe_one()
2852 spin_lock_init(&ha->tgt.sess_lock); in qla2x00_probe_one()
2853 spin_lock_init(&ha->tgt.atio_lock); in qla2x00_probe_one()
2855 atomic_set(&ha->nvme_active_aen_cnt, 0); in qla2x00_probe_one()
2858 ha->bars = bars; in qla2x00_probe_one()
2859 ha->mem_only = mem_only; in qla2x00_probe_one()
2860 spin_lock_init(&ha->hardware_lock); in qla2x00_probe_one()
2861 spin_lock_init(&ha->vport_slock); in qla2x00_probe_one()
2862 mutex_init(&ha->selflogin_lock); in qla2x00_probe_one()
2863 mutex_init(&ha->optrom_mutex); in qla2x00_probe_one()
2866 qla2x00_set_isp_flags(ha); in qla2x00_probe_one()
2869 if (IS_QLA24XX(ha) || IS_QLA25XX(ha) || IS_QLA81XX(ha) || in qla2x00_probe_one()
2870 IS_QLA83XX(ha) || IS_QLA27XX(ha) || IS_QLA28XX(ha)) in qla2x00_probe_one()
2873 ha->prev_topology = 0; in qla2x00_probe_one()
2874 ha->init_cb_size = sizeof(init_cb_t); in qla2x00_probe_one()
2875 ha->link_data_rate = PORT_SPEED_UNKNOWN; in qla2x00_probe_one()
2876 ha->optrom_size = OPTROM_SIZE_2300; in qla2x00_probe_one()
2877 ha->max_exchg = FW_MAX_EXCHANGES_CNT; in qla2x00_probe_one()
2878 atomic_set(&ha->num_pend_mbx_stage1, 0); in qla2x00_probe_one()
2879 atomic_set(&ha->num_pend_mbx_stage2, 0); in qla2x00_probe_one()
2880 atomic_set(&ha->num_pend_mbx_stage3, 0); in qla2x00_probe_one()
2881 atomic_set(&ha->zio_threshold, DEFAULT_ZIO_THRESHOLD); in qla2x00_probe_one()
2882 ha->last_zio_threshold = DEFAULT_ZIO_THRESHOLD; in qla2x00_probe_one()
2885 if (IS_QLA2100(ha)) { in qla2x00_probe_one()
2886 ha->max_fibre_devices = MAX_FIBRE_DEVICES_2100; in qla2x00_probe_one()
2887 ha->mbx_count = MAILBOX_REGISTER_COUNT_2100; in qla2x00_probe_one()
2890 ha->max_loop_id = SNS_LAST_LOOP_ID_2100; in qla2x00_probe_one()
2891 ha->gid_list_info_size = 4; in qla2x00_probe_one()
2892 ha->flash_conf_off = ~0; in qla2x00_probe_one()
2893 ha->flash_data_off = ~0; in qla2x00_probe_one()
2894 ha->nvram_conf_off = ~0; in qla2x00_probe_one()
2895 ha->nvram_data_off = ~0; in qla2x00_probe_one()
2896 ha->isp_ops = &qla2100_isp_ops; in qla2x00_probe_one()
2897 } else if (IS_QLA2200(ha)) { in qla2x00_probe_one()
2898 ha->max_fibre_devices = MAX_FIBRE_DEVICES_2100; in qla2x00_probe_one()
2899 ha->mbx_count = MAILBOX_REGISTER_COUNT_2200; in qla2x00_probe_one()
2902 ha->max_loop_id = SNS_LAST_LOOP_ID_2100; in qla2x00_probe_one()
2903 ha->gid_list_info_size = 4; in qla2x00_probe_one()
2904 ha->flash_conf_off = ~0; in qla2x00_probe_one()
2905 ha->flash_data_off = ~0; in qla2x00_probe_one()
2906 ha->nvram_conf_off = ~0; in qla2x00_probe_one()
2907 ha->nvram_data_off = ~0; in qla2x00_probe_one()
2908 ha->isp_ops = &qla2100_isp_ops; in qla2x00_probe_one()
2909 } else if (IS_QLA23XX(ha)) { in qla2x00_probe_one()
2910 ha->max_fibre_devices = MAX_FIBRE_DEVICES_2100; in qla2x00_probe_one()
2911 ha->mbx_count = MAILBOX_REGISTER_COUNT; in qla2x00_probe_one()
2914 ha->max_loop_id = SNS_LAST_LOOP_ID_2300; in qla2x00_probe_one()
2915 ha->gid_list_info_size = 6; in qla2x00_probe_one()
2916 if (IS_QLA2322(ha) || IS_QLA6322(ha)) in qla2x00_probe_one()
2917 ha->optrom_size = OPTROM_SIZE_2322; in qla2x00_probe_one()
2918 ha->flash_conf_off = ~0; in qla2x00_probe_one()
2919 ha->flash_data_off = ~0; in qla2x00_probe_one()
2920 ha->nvram_conf_off = ~0; in qla2x00_probe_one()
2921 ha->nvram_data_off = ~0; in qla2x00_probe_one()
2922 ha->isp_ops = &qla2300_isp_ops; in qla2x00_probe_one()
2923 } else if (IS_QLA24XX_TYPE(ha)) { in qla2x00_probe_one()
2924 ha->max_fibre_devices = MAX_FIBRE_DEVICES_2400; in qla2x00_probe_one()
2925 ha->mbx_count = MAILBOX_REGISTER_COUNT; in qla2x00_probe_one()
2928 ha->tgt.atio_q_length = ATIO_ENTRY_CNT_24XX; in qla2x00_probe_one()
2929 ha->max_loop_id = SNS_LAST_LOOP_ID_2300; in qla2x00_probe_one()
2930 ha->init_cb_size = sizeof(struct mid_init_cb_24xx); in qla2x00_probe_one()
2931 ha->gid_list_info_size = 8; in qla2x00_probe_one()
2932 ha->optrom_size = OPTROM_SIZE_24XX; in qla2x00_probe_one()
2933 ha->nvram_npiv_size = QLA_MAX_VPORTS_QLA24XX; in qla2x00_probe_one()
2934 ha->isp_ops = &qla24xx_isp_ops; in qla2x00_probe_one()
2935 ha->flash_conf_off = FARX_ACCESS_FLASH_CONF; in qla2x00_probe_one()
2936 ha->flash_data_off = FARX_ACCESS_FLASH_DATA; in qla2x00_probe_one()
2937 ha->nvram_conf_off = FARX_ACCESS_NVRAM_CONF; in qla2x00_probe_one()
2938 ha->nvram_data_off = FARX_ACCESS_NVRAM_DATA; in qla2x00_probe_one()
2939 } else if (IS_QLA25XX(ha)) { in qla2x00_probe_one()
2940 ha->max_fibre_devices = MAX_FIBRE_DEVICES_2400; in qla2x00_probe_one()
2941 ha->mbx_count = MAILBOX_REGISTER_COUNT; in qla2x00_probe_one()
2944 ha->tgt.atio_q_length = ATIO_ENTRY_CNT_24XX; in qla2x00_probe_one()
2945 ha->max_loop_id = SNS_LAST_LOOP_ID_2300; in qla2x00_probe_one()
2946 ha->init_cb_size = sizeof(struct mid_init_cb_24xx); in qla2x00_probe_one()
2947 ha->gid_list_info_size = 8; in qla2x00_probe_one()
2948 ha->optrom_size = OPTROM_SIZE_25XX; in qla2x00_probe_one()
2949 ha->nvram_npiv_size = QLA_MAX_VPORTS_QLA25XX; in qla2x00_probe_one()
2950 ha->isp_ops = &qla25xx_isp_ops; in qla2x00_probe_one()
2951 ha->flash_conf_off = FARX_ACCESS_FLASH_CONF; in qla2x00_probe_one()
2952 ha->flash_data_off = FARX_ACCESS_FLASH_DATA; in qla2x00_probe_one()
2953 ha->nvram_conf_off = FARX_ACCESS_NVRAM_CONF; in qla2x00_probe_one()
2954 ha->nvram_data_off = FARX_ACCESS_NVRAM_DATA; in qla2x00_probe_one()
2955 } else if (IS_QLA81XX(ha)) { in qla2x00_probe_one()
2956 ha->max_fibre_devices = MAX_FIBRE_DEVICES_2400; in qla2x00_probe_one()
2957 ha->mbx_count = MAILBOX_REGISTER_COUNT; in qla2x00_probe_one()
2960 ha->tgt.atio_q_length = ATIO_ENTRY_CNT_24XX; in qla2x00_probe_one()
2961 ha->max_loop_id = SNS_LAST_LOOP_ID_2300; in qla2x00_probe_one()
2962 ha->init_cb_size = sizeof(struct mid_init_cb_81xx); in qla2x00_probe_one()
2963 ha->gid_list_info_size = 8; in qla2x00_probe_one()
2964 ha->optrom_size = OPTROM_SIZE_81XX; in qla2x00_probe_one()
2965 ha->nvram_npiv_size = QLA_MAX_VPORTS_QLA25XX; in qla2x00_probe_one()
2966 ha->isp_ops = &qla81xx_isp_ops; in qla2x00_probe_one()
2967 ha->flash_conf_off = FARX_ACCESS_FLASH_CONF_81XX; in qla2x00_probe_one()
2968 ha->flash_data_off = FARX_ACCESS_FLASH_DATA_81XX; in qla2x00_probe_one()
2969 ha->nvram_conf_off = ~0; in qla2x00_probe_one()
2970 ha->nvram_data_off = ~0; in qla2x00_probe_one()
2971 } else if (IS_QLA82XX(ha)) { in qla2x00_probe_one()
2972 ha->max_fibre_devices = MAX_FIBRE_DEVICES_2400; in qla2x00_probe_one()
2973 ha->mbx_count = MAILBOX_REGISTER_COUNT; in qla2x00_probe_one()
2976 ha->max_loop_id = SNS_LAST_LOOP_ID_2300; in qla2x00_probe_one()
2977 ha->init_cb_size = sizeof(struct mid_init_cb_81xx); in qla2x00_probe_one()
2978 ha->gid_list_info_size = 8; in qla2x00_probe_one()
2979 ha->optrom_size = OPTROM_SIZE_82XX; in qla2x00_probe_one()
2980 ha->nvram_npiv_size = QLA_MAX_VPORTS_QLA25XX; in qla2x00_probe_one()
2981 ha->isp_ops = &qla82xx_isp_ops; in qla2x00_probe_one()
2982 ha->flash_conf_off = FARX_ACCESS_FLASH_CONF; in qla2x00_probe_one()
2983 ha->flash_data_off = FARX_ACCESS_FLASH_DATA; in qla2x00_probe_one()
2984 ha->nvram_conf_off = FARX_ACCESS_NVRAM_CONF; in qla2x00_probe_one()
2985 ha->nvram_data_off = FARX_ACCESS_NVRAM_DATA; in qla2x00_probe_one()
2986 } else if (IS_QLA8044(ha)) { in qla2x00_probe_one()
2987 ha->max_fibre_devices = MAX_FIBRE_DEVICES_2400; in qla2x00_probe_one()
2988 ha->mbx_count = MAILBOX_REGISTER_COUNT; in qla2x00_probe_one()
2991 ha->max_loop_id = SNS_LAST_LOOP_ID_2300; in qla2x00_probe_one()
2992 ha->init_cb_size = sizeof(struct mid_init_cb_81xx); in qla2x00_probe_one()
2993 ha->gid_list_info_size = 8; in qla2x00_probe_one()
2994 ha->optrom_size = OPTROM_SIZE_83XX; in qla2x00_probe_one()
2995 ha->nvram_npiv_size = QLA_MAX_VPORTS_QLA25XX; in qla2x00_probe_one()
2996 ha->isp_ops = &qla8044_isp_ops; in qla2x00_probe_one()
2997 ha->flash_conf_off = FARX_ACCESS_FLASH_CONF; in qla2x00_probe_one()
2998 ha->flash_data_off = FARX_ACCESS_FLASH_DATA; in qla2x00_probe_one()
2999 ha->nvram_conf_off = FARX_ACCESS_NVRAM_CONF; in qla2x00_probe_one()
3000 ha->nvram_data_off = FARX_ACCESS_NVRAM_DATA; in qla2x00_probe_one()
3001 } else if (IS_QLA83XX(ha)) { in qla2x00_probe_one()
3002 ha->portnum = PCI_FUNC(ha->pdev->devfn); in qla2x00_probe_one()
3003 ha->max_fibre_devices = MAX_FIBRE_DEVICES_2400; in qla2x00_probe_one()
3004 ha->mbx_count = MAILBOX_REGISTER_COUNT; in qla2x00_probe_one()
3007 ha->tgt.atio_q_length = ATIO_ENTRY_CNT_24XX; in qla2x00_probe_one()
3008 ha->max_loop_id = SNS_LAST_LOOP_ID_2300; in qla2x00_probe_one()
3009 ha->init_cb_size = sizeof(struct mid_init_cb_81xx); in qla2x00_probe_one()
3010 ha->gid_list_info_size = 8; in qla2x00_probe_one()
3011 ha->optrom_size = OPTROM_SIZE_83XX; in qla2x00_probe_one()
3012 ha->nvram_npiv_size = QLA_MAX_VPORTS_QLA25XX; in qla2x00_probe_one()
3013 ha->isp_ops = &qla83xx_isp_ops; in qla2x00_probe_one()
3014 ha->flash_conf_off = FARX_ACCESS_FLASH_CONF_81XX; in qla2x00_probe_one()
3015 ha->flash_data_off = FARX_ACCESS_FLASH_DATA_81XX; in qla2x00_probe_one()
3016 ha->nvram_conf_off = ~0; in qla2x00_probe_one()
3017 ha->nvram_data_off = ~0; in qla2x00_probe_one()
3018 } else if (IS_QLAFX00(ha)) { in qla2x00_probe_one()
3019 ha->max_fibre_devices = MAX_FIBRE_DEVICES_FX00; in qla2x00_probe_one()
3020 ha->mbx_count = MAILBOX_REGISTER_COUNT_FX00; in qla2x00_probe_one()
3021 ha->aen_mbx_count = AEN_MAILBOX_REGISTER_COUNT_FX00; in qla2x00_probe_one()
3024 ha->isp_ops = &qlafx00_isp_ops; in qla2x00_probe_one()
3025 ha->port_down_retry_count = 30; /* default value */ in qla2x00_probe_one()
3026 ha->mr.fw_hbt_cnt = QLAFX00_HEARTBEAT_INTERVAL; in qla2x00_probe_one()
3027 ha->mr.fw_reset_timer_tick = QLAFX00_RESET_INTERVAL; in qla2x00_probe_one()
3028 ha->mr.fw_critemp_timer_tick = QLAFX00_CRITEMP_INTERVAL; in qla2x00_probe_one()
3029 ha->mr.fw_hbt_en = 1; in qla2x00_probe_one()
3030 ha->mr.host_info_resend = false; in qla2x00_probe_one()
3031 ha->mr.hinfo_resend_timer_tick = QLAFX00_HINFO_RESEND_INTERVAL; in qla2x00_probe_one()
3032 } else if (IS_QLA27XX(ha)) { in qla2x00_probe_one()
3033 ha->portnum = PCI_FUNC(ha->pdev->devfn); in qla2x00_probe_one()
3034 ha->max_fibre_devices = MAX_FIBRE_DEVICES_2400; in qla2x00_probe_one()
3035 ha->mbx_count = MAILBOX_REGISTER_COUNT; in qla2x00_probe_one()
3038 ha->tgt.atio_q_length = ATIO_ENTRY_CNT_24XX; in qla2x00_probe_one()
3039 ha->max_loop_id = SNS_LAST_LOOP_ID_2300; in qla2x00_probe_one()
3040 ha->init_cb_size = sizeof(struct mid_init_cb_81xx); in qla2x00_probe_one()
3041 ha->gid_list_info_size = 8; in qla2x00_probe_one()
3042 ha->optrom_size = OPTROM_SIZE_83XX; in qla2x00_probe_one()
3043 ha->nvram_npiv_size = QLA_MAX_VPORTS_QLA25XX; in qla2x00_probe_one()
3044 ha->isp_ops = &qla27xx_isp_ops; in qla2x00_probe_one()
3045 ha->flash_conf_off = FARX_ACCESS_FLASH_CONF_81XX; in qla2x00_probe_one()
3046 ha->flash_data_off = FARX_ACCESS_FLASH_DATA_81XX; in qla2x00_probe_one()
3047 ha->nvram_conf_off = ~0; in qla2x00_probe_one()
3048 ha->nvram_data_off = ~0; in qla2x00_probe_one()
3049 } else if (IS_QLA28XX(ha)) { in qla2x00_probe_one()
3050 ha->portnum = PCI_FUNC(ha->pdev->devfn); in qla2x00_probe_one()
3051 ha->max_fibre_devices = MAX_FIBRE_DEVICES_2400; in qla2x00_probe_one()
3052 ha->mbx_count = MAILBOX_REGISTER_COUNT; in qla2x00_probe_one()
3055 ha->tgt.atio_q_length = ATIO_ENTRY_CNT_24XX; in qla2x00_probe_one()
3056 ha->max_loop_id = SNS_LAST_LOOP_ID_2300; in qla2x00_probe_one()
3057 ha->init_cb_size = sizeof(struct mid_init_cb_81xx); in qla2x00_probe_one()
3058 ha->gid_list_info_size = 8; in qla2x00_probe_one()
3059 ha->optrom_size = OPTROM_SIZE_28XX; in qla2x00_probe_one()
3060 ha->nvram_npiv_size = QLA_MAX_VPORTS_QLA25XX; in qla2x00_probe_one()
3061 ha->isp_ops = &qla27xx_isp_ops; in qla2x00_probe_one()
3062 ha->flash_conf_off = FARX_ACCESS_FLASH_CONF_28XX; in qla2x00_probe_one()
3063 ha->flash_data_off = FARX_ACCESS_FLASH_DATA_28XX; in qla2x00_probe_one()
3064 ha->nvram_conf_off = ~0; in qla2x00_probe_one()
3065 ha->nvram_data_off = ~0; in qla2x00_probe_one()
3073 ha->mbx_count, req_length, rsp_length, ha->max_loop_id, in qla2x00_probe_one()
3074 ha->init_cb_size, ha->gid_list_info_size, ha->optrom_size, in qla2x00_probe_one()
3075 ha->nvram_npiv_size, ha->max_fibre_devices); in qla2x00_probe_one()
3079 ha->isp_ops, ha->flash_conf_off, ha->flash_data_off, in qla2x00_probe_one()
3080 ha->nvram_conf_off, ha->nvram_data_off); in qla2x00_probe_one()
3083 ret = ha->isp_ops->iospace_config(ha); in qla2x00_probe_one()
3089 pdev->device, pdev->irq, ha->iobase); in qla2x00_probe_one()
3090 mutex_init(&ha->vport_lock); in qla2x00_probe_one()
3091 mutex_init(&ha->mq_lock); in qla2x00_probe_one()
3092 init_completion(&ha->mbx_cmd_comp); in qla2x00_probe_one()
3093 complete(&ha->mbx_cmd_comp); in qla2x00_probe_one()
3094 init_completion(&ha->mbx_intr_comp); in qla2x00_probe_one()
3095 init_completion(&ha->dcbx_comp); in qla2x00_probe_one()
3096 init_completion(&ha->lb_portup_comp); in qla2x00_probe_one()
3098 set_bit(0, (unsigned long *) ha->vp_idx_map); in qla2x00_probe_one()
3100 qla2x00_config_dma_addressing(ha); in qla2x00_probe_one()
3103 ha->flags.enable_64bit_addressing ? "enable" : in qla2x00_probe_one()
3105 ret = qla2x00_mem_alloc(ha, req_length, rsp_length, &req, &rsp); in qla2x00_probe_one()
3118 base_vha = qla2x00_create_host(sht, ha); in qla2x00_probe_one()
3129 if (IS_QLA2XXX_MIDTYPE(ha)) in qla2x00_probe_one()
3137 ha->mr.fcport.vha = base_vha; in qla2x00_probe_one()
3138 ha->mr.fcport.port_type = FCT_UNKNOWN; in qla2x00_probe_one()
3139 ha->mr.fcport.loop_id = FC_NO_LOOP_ID; in qla2x00_probe_one()
3140 qla2x00_set_fcport_state(&ha->mr.fcport, FCS_UNCONFIGURED); in qla2x00_probe_one()
3141 ha->mr.fcport.supported_classes = FC_COS_UNSPECIFIED; in qla2x00_probe_one()
3142 ha->mr.fcport.scan_state = 1; in qla2x00_probe_one()
3145 if (!IS_FWI2_CAPABLE(ha)) { in qla2x00_probe_one()
3146 if (IS_QLA2100(ha)) in qla2x00_probe_one()
3149 if (!IS_QLA82XX(ha)) in qla2x00_probe_one()
3152 host->max_id = ha->max_fibre_devices; in qla2x00_probe_one()
3155 if (IS_T10_PI_CAPABLE(ha) && ql2xenabledif) in qla2x00_probe_one()
3161 if (!IS_QLAFX00(ha) && !IS_FWI2_CAPABLE(ha) && in qla2x00_probe_one()
3180 ret = qla2x00_request_irqs(ha, rsp); in qla2x00_probe_one()
3185 ret = qla2x00_alloc_queues(ha, req, rsp); in qla2x00_probe_one()
3194 if (ha->mqenable) { in qla2x00_probe_one()
3196 host->nr_hw_queues = ha->max_qpairs; in qla2x00_probe_one()
3202 host->nr_hw_queues = ha->max_qpairs; in qla2x00_probe_one()
3212 qlt_probe_one_stage1(base_vha, ha); in qla2x00_probe_one()
3220 if (IS_QLAFX00(ha)) { in qla2x00_probe_one()
3221 ha->rsp_q_map[0] = rsp; in qla2x00_probe_one()
3222 ha->req_q_map[0] = req; in qla2x00_probe_one()
3223 set_bit(0, ha->req_qid_map); in qla2x00_probe_one()
3224 set_bit(0, ha->rsp_qid_map); in qla2x00_probe_one()
3228 req->req_q_in = &ha->iobase->isp24.req_q_in; in qla2x00_probe_one()
3229 req->req_q_out = &ha->iobase->isp24.req_q_out; in qla2x00_probe_one()
3230 rsp->rsp_q_in = &ha->iobase->isp24.rsp_q_in; in qla2x00_probe_one()
3231 rsp->rsp_q_out = &ha->iobase->isp24.rsp_q_out; in qla2x00_probe_one()
3232 if (ha->mqenable || IS_QLA83XX(ha) || IS_QLA27XX(ha) || in qla2x00_probe_one()
3233 IS_QLA28XX(ha)) { in qla2x00_probe_one()
3234 req->req_q_in = &ha->mqiobase->isp25mq.req_q_in; in qla2x00_probe_one()
3235 req->req_q_out = &ha->mqiobase->isp25mq.req_q_out; in qla2x00_probe_one()
3236 rsp->rsp_q_in = &ha->mqiobase->isp25mq.rsp_q_in; in qla2x00_probe_one()
3237 rsp->rsp_q_out = &ha->mqiobase->isp25mq.rsp_q_out; in qla2x00_probe_one()
3240 if (IS_QLAFX00(ha)) { in qla2x00_probe_one()
3241 req->req_q_in = &ha->iobase->ispfx00.req_q_in; in qla2x00_probe_one()
3242 req->req_q_out = &ha->iobase->ispfx00.req_q_out; in qla2x00_probe_one()
3243 rsp->rsp_q_in = &ha->iobase->ispfx00.rsp_q_in; in qla2x00_probe_one()
3244 rsp->rsp_q_out = &ha->iobase->ispfx00.rsp_q_out; in qla2x00_probe_one()
3247 if (IS_P3P_TYPE(ha)) { in qla2x00_probe_one()
3248 req->req_q_out = &ha->iobase->isp82.req_q_out[0]; in qla2x00_probe_one()
3249 rsp->rsp_q_in = &ha->iobase->isp82.rsp_q_in[0]; in qla2x00_probe_one()
3250 rsp->rsp_q_out = &ha->iobase->isp82.rsp_q_out[0]; in qla2x00_probe_one()
3255 ha->rsp_q_map, ha->req_q_map, rsp->req, req->rsp); in qla2x00_probe_one()
3263 ha->rsp_q_map, ha->req_q_map, rsp->req, req->rsp); in qla2x00_probe_one()
3268 ha->wq = alloc_workqueue("qla2xxx_wq", 0, 0); in qla2x00_probe_one()
3269 if (unlikely(!ha->wq)) { in qla2x00_probe_one()
3274 if (ha->isp_ops->initialize_adapter(base_vha)) { in qla2x00_probe_one()
3279 if (IS_QLA82XX(ha)) { in qla2x00_probe_one()
3280 qla82xx_idc_lock(ha); in qla2x00_probe_one()
3281 qla82xx_wr_32(ha, QLA82XX_CRB_DEV_STATE, in qla2x00_probe_one()
3283 qla82xx_idc_unlock(ha); in qla2x00_probe_one()
3286 } else if (IS_QLA8044(ha)) { in qla2x00_probe_one()
3287 qla8044_idc_lock(ha); in qla2x00_probe_one()
3291 qla8044_idc_unlock(ha); in qla2x00_probe_one()
3300 if (IS_QLAFX00(ha)) in qla2x00_probe_one()
3310 if (ha->mqenable) { in qla2x00_probe_one()
3320 for (i = 0; i < ha->max_qpairs; i++) in qla2x00_probe_one()
3325 if (ha->flags.running_gold_fw) in qla2x00_probe_one()
3331 ha->dpc_thread = kthread_create(qla2x00_do_dpc, ha, in qla2x00_probe_one()
3333 if (IS_ERR(ha->dpc_thread)) { in qla2x00_probe_one()
3336 ret = PTR_ERR(ha->dpc_thread); in qla2x00_probe_one()
3337 ha->dpc_thread = NULL; in qla2x00_probe_one()
3351 INIT_WORK(&ha->board_disable, qla2x00_disable_board_on_pci_error); in qla2x00_probe_one()
3353 if (IS_QLA8031(ha) || IS_MCTP_CAPABLE(ha)) { in qla2x00_probe_one()
3355 ha->dpc_lp_wq = create_singlethread_workqueue(wq_name); in qla2x00_probe_one()
3356 INIT_WORK(&ha->idc_aen, qla83xx_service_idc_aen); in qla2x00_probe_one()
3359 ha->dpc_hp_wq = create_singlethread_workqueue(wq_name); in qla2x00_probe_one()
3360 INIT_WORK(&ha->nic_core_reset, qla83xx_nic_core_reset_work); in qla2x00_probe_one()
3361 INIT_WORK(&ha->idc_state_handler, in qla2x00_probe_one()
3363 INIT_WORK(&ha->nic_core_unrecoverable, in qla2x00_probe_one()
3368 list_add_tail(&base_vha->list, &ha->vp_list); in qla2x00_probe_one()
3369 base_vha->host->irq = ha->pdev->irq; in qla2x00_probe_one()
3378 ha); in qla2x00_probe_one()
3380 if (IS_T10_PI_CAPABLE(ha) && ql2xenabledif) { in qla2x00_probe_one()
3381 if (ha->fw_attributes & BIT_4) { in qla2x00_probe_one()
3402 if (IS_PI_IPGUARD_CAPABLE(ha) && in qla2x00_probe_one()
3403 (ql2xenabledif > 1 || IS_PI_DIFB_DIX0_CAPABLE(ha))) in qla2x00_probe_one()
3414 ha->isp_ops->enable_intrs(ha); in qla2x00_probe_one()
3416 if (IS_QLAFX00(ha)) { in qla2x00_probe_one()
3419 host->sg_tablesize = (ha->mr.extended_io_enabled) ? in qla2x00_probe_one()
3429 ha->prev_minidump_failed = 0; in qla2x00_probe_one()
3443 if (IS_QLAFX00(ha)) { in qla2x00_probe_one()
3457 "QLogic %s - %s.\n", ha->model_number, ha->model_desc); in qla2x00_probe_one()
3460 pdev->device, ha->isp_ops->pci_info_str(base_vha, pci_info, in qla2x00_probe_one()
3462 pci_name(pdev), ha->flags.enable_64bit_addressing ? '+' : '-', in qla2x00_probe_one()
3464 ha->isp_ops->fw_version_str(base_vha, fw_str, sizeof(fw_str))); in qla2x00_probe_one()
3466 qlt_add_target(ha, base_vha); in qla2x00_probe_one()
3477 dma_free_coherent(&ha->pdev->dev, base_vha->gnl.size, in qla2x00_probe_one()
3485 if (ha->dpc_thread) { in qla2x00_probe_one()
3486 struct task_struct *t = ha->dpc_thread; in qla2x00_probe_one()
3488 ha->dpc_thread = NULL; in qla2x00_probe_one()
3504 qla2x00_mem_free(ha); in qla2x00_probe_one()
3505 qla2x00_free_req_que(ha, req); in qla2x00_probe_one()
3506 qla2x00_free_rsp_que(ha, rsp); in qla2x00_probe_one()
3507 qla2x00_clear_drv_active(ha); in qla2x00_probe_one()
3510 if (IS_P3P_TYPE(ha)) { in qla2x00_probe_one()
3511 if (!ha->nx_pcibase) in qla2x00_probe_one()
3512 iounmap((device_reg_t *)ha->nx_pcibase); in qla2x00_probe_one()
3514 iounmap((device_reg_t *)ha->nxdb_wr_ptr); in qla2x00_probe_one()
3516 if (ha->iobase) in qla2x00_probe_one()
3517 iounmap(ha->iobase); in qla2x00_probe_one()
3518 if (ha->cregbase) in qla2x00_probe_one()
3519 iounmap(ha->cregbase); in qla2x00_probe_one()
3521 pci_release_selected_regions(ha->pdev, ha->bars); in qla2x00_probe_one()
3522 kfree(ha); in qla2x00_probe_one()
3533 struct qla_hw_data *ha; in __qla_set_remove_flag() local
3538 ha = base_vha->hw; in __qla_set_remove_flag()
3540 spin_lock_irqsave(&ha->vport_slock, flags); in __qla_set_remove_flag()
3541 list_for_each_entry(vp, &ha->vp_list, list) in __qla_set_remove_flag()
3549 spin_unlock_irqrestore(&ha->vport_slock, flags); in __qla_set_remove_flag()
3556 struct qla_hw_data *ha; in qla2x00_shutdown() local
3559 ha = vha->hw; in qla2x00_shutdown()
3569 cancel_work_sync(&ha->board_disable); in qla2x00_shutdown()
3575 if (IS_QLAFX00(ha)) in qla2x00_shutdown()
3579 if (ha->flags.fce_enabled) { in qla2x00_shutdown()
3581 ha->flags.fce_enabled = 0; in qla2x00_shutdown()
3585 if (ha->eft) in qla2x00_shutdown()
3588 if (IS_QLA25XX(ha) || IS_QLA2031(ha) || IS_QLA27XX(ha) || in qla2x00_shutdown()
3589 IS_QLA28XX(ha)) { in qla2x00_shutdown()
3590 if (ha->flags.fw_started) in qla2x00_shutdown()
3605 if (ha->interrupts_on) { in qla2x00_shutdown()
3607 ha->isp_ops->disable_intrs(ha); in qla2x00_shutdown()
3612 qla2x00_free_fw_dump(ha); in qla2x00_shutdown()
3621 qla2x00_delete_all_vps(struct qla_hw_data *ha, scsi_qla_host_t *base_vha) in qla2x00_delete_all_vps() argument
3626 mutex_lock(&ha->vport_lock); in qla2x00_delete_all_vps()
3627 while (ha->cur_vport_count) { in qla2x00_delete_all_vps()
3628 spin_lock_irqsave(&ha->vport_slock, flags); in qla2x00_delete_all_vps()
3630 BUG_ON(base_vha->list.next == &ha->vp_list); in qla2x00_delete_all_vps()
3635 spin_unlock_irqrestore(&ha->vport_slock, flags); in qla2x00_delete_all_vps()
3636 mutex_unlock(&ha->vport_lock); in qla2x00_delete_all_vps()
3643 mutex_lock(&ha->vport_lock); in qla2x00_delete_all_vps()
3645 mutex_unlock(&ha->vport_lock); in qla2x00_delete_all_vps()
3650 qla2x00_destroy_deferred_work(struct qla_hw_data *ha) in qla2x00_destroy_deferred_work() argument
3653 if (ha->dpc_lp_wq) { in qla2x00_destroy_deferred_work()
3654 cancel_work_sync(&ha->idc_aen); in qla2x00_destroy_deferred_work()
3655 destroy_workqueue(ha->dpc_lp_wq); in qla2x00_destroy_deferred_work()
3656 ha->dpc_lp_wq = NULL; in qla2x00_destroy_deferred_work()
3659 if (ha->dpc_hp_wq) { in qla2x00_destroy_deferred_work()
3660 cancel_work_sync(&ha->nic_core_reset); in qla2x00_destroy_deferred_work()
3661 cancel_work_sync(&ha->idc_state_handler); in qla2x00_destroy_deferred_work()
3662 cancel_work_sync(&ha->nic_core_unrecoverable); in qla2x00_destroy_deferred_work()
3663 destroy_workqueue(ha->dpc_hp_wq); in qla2x00_destroy_deferred_work()
3664 ha->dpc_hp_wq = NULL; in qla2x00_destroy_deferred_work()
3668 if (ha->dpc_thread) { in qla2x00_destroy_deferred_work()
3669 struct task_struct *t = ha->dpc_thread; in qla2x00_destroy_deferred_work()
3675 ha->dpc_thread = NULL; in qla2x00_destroy_deferred_work()
3681 qla2x00_unmap_iobases(struct qla_hw_data *ha) in qla2x00_unmap_iobases() argument
3683 if (IS_QLA82XX(ha)) { in qla2x00_unmap_iobases()
3685 iounmap((device_reg_t *)ha->nx_pcibase); in qla2x00_unmap_iobases()
3687 iounmap((device_reg_t *)ha->nxdb_wr_ptr); in qla2x00_unmap_iobases()
3689 if (ha->iobase) in qla2x00_unmap_iobases()
3690 iounmap(ha->iobase); in qla2x00_unmap_iobases()
3692 if (ha->cregbase) in qla2x00_unmap_iobases()
3693 iounmap(ha->cregbase); in qla2x00_unmap_iobases()
3695 if (ha->mqiobase) in qla2x00_unmap_iobases()
3696 iounmap(ha->mqiobase); in qla2x00_unmap_iobases()
3698 if ((IS_QLA83XX(ha) || IS_QLA27XX(ha) || IS_QLA28XX(ha)) && in qla2x00_unmap_iobases()
3699 ha->msixbase) in qla2x00_unmap_iobases()
3700 iounmap(ha->msixbase); in qla2x00_unmap_iobases()
3705 qla2x00_clear_drv_active(struct qla_hw_data *ha) in qla2x00_clear_drv_active() argument
3707 if (IS_QLA8044(ha)) { in qla2x00_clear_drv_active()
3708 qla8044_idc_lock(ha); in qla2x00_clear_drv_active()
3709 qla8044_clear_drv_active(ha); in qla2x00_clear_drv_active()
3710 qla8044_idc_unlock(ha); in qla2x00_clear_drv_active()
3711 } else if (IS_QLA82XX(ha)) { in qla2x00_clear_drv_active()
3712 qla82xx_idc_lock(ha); in qla2x00_clear_drv_active()
3713 qla82xx_clear_drv_active(ha); in qla2x00_clear_drv_active()
3714 qla82xx_idc_unlock(ha); in qla2x00_clear_drv_active()
3722 struct qla_hw_data *ha; in qla2x00_remove_one() local
3725 ha = base_vha->hw; in qla2x00_remove_one()
3729 cancel_work_sync(&ha->board_disable); in qla2x00_remove_one()
3737 dma_free_coherent(&ha->pdev->dev, base_vha->gnl.size, in qla2x00_remove_one()
3741 kfree(ha); in qla2x00_remove_one()
3754 if (IS_QLA25XX(ha) || IS_QLA2031(ha) || IS_QLA27XX(ha) || in qla2x00_remove_one()
3755 IS_QLA28XX(ha)) { in qla2x00_remove_one()
3756 if (ha->flags.fw_started) in qla2x00_remove_one()
3758 } else if (!IS_QLAFX00(ha)) { in qla2x00_remove_one()
3759 if (IS_QLA8031(ha)) { in qla2x00_remove_one()
3774 dma_free_coherent(&ha->pdev->dev, in qla2x00_remove_one()
3781 if (IS_QLAFX00(ha)) in qla2x00_remove_one()
3784 qla2x00_delete_all_vps(ha, base_vha); in qla2x00_remove_one()
3797 if (ha->exlogin_buf) in qla2x00_remove_one()
3798 qla2x00_free_exlogin_buffer(ha); in qla2x00_remove_one()
3801 if (ha->exchoffld_buf) in qla2x00_remove_one()
3802 qla2x00_free_exchoffld_buffer(ha); in qla2x00_remove_one()
3804 qla2x00_destroy_deferred_work(ha); in qla2x00_remove_one()
3806 qlt_remove_target(ha, base_vha); in qla2x00_remove_one()
3811 qlt_remove_target_resources(ha); in qla2x00_remove_one()
3817 qla2x00_clear_drv_active(ha); in qla2x00_remove_one()
3821 qla2x00_unmap_iobases(ha); in qla2x00_remove_one()
3823 pci_release_selected_regions(ha->pdev, ha->bars); in qla2x00_remove_one()
3824 kfree(ha); in qla2x00_remove_one()
3848 struct qla_hw_data *ha = vha->hw; in qla2x00_free_device() local
3860 if (ha->interrupts_on) { in qla2x00_free_device()
3862 ha->isp_ops->disable_intrs(ha); in qla2x00_free_device()
3870 if (ha->wq) { in qla2x00_free_device()
3871 flush_workqueue(ha->wq); in qla2x00_free_device()
3872 destroy_workqueue(ha->wq); in qla2x00_free_device()
3873 ha->wq = NULL; in qla2x00_free_device()
3879 qla2x00_mem_free(ha); in qla2x00_free_device()
3883 qla2x00_free_queues(ha); in qla2x00_free_device()
3962 static void qla2x00_set_reserved_loop_ids(struct qla_hw_data *ha) in qla2x00_set_reserved_loop_ids() argument
3966 if (IS_FWI2_CAPABLE(ha)) in qla2x00_set_reserved_loop_ids()
3970 set_bit(i, ha->loop_id_map); in qla2x00_set_reserved_loop_ids()
3971 set_bit(MANAGEMENT_SERVER, ha->loop_id_map); in qla2x00_set_reserved_loop_ids()
3972 set_bit(BROADCAST, ha->loop_id_map); in qla2x00_set_reserved_loop_ids()
3984 qla2x00_mem_alloc(struct qla_hw_data *ha, uint16_t req_len, uint16_t rsp_len, in qla2x00_mem_alloc() argument
3989 ha->init_cb = dma_alloc_coherent(&ha->pdev->dev, ha->init_cb_size, in qla2x00_mem_alloc()
3990 &ha->init_cb_dma, GFP_KERNEL); in qla2x00_mem_alloc()
3991 if (!ha->init_cb) in qla2x00_mem_alloc()
3994 if (qlt_mem_alloc(ha) < 0) in qla2x00_mem_alloc()
3997 ha->gid_list = dma_alloc_coherent(&ha->pdev->dev, in qla2x00_mem_alloc()
3998 qla2x00_gid_list_size(ha), &ha->gid_list_dma, GFP_KERNEL); in qla2x00_mem_alloc()
3999 if (!ha->gid_list) in qla2x00_mem_alloc()
4002 ha->srb_mempool = mempool_create_slab_pool(SRB_MIN_REQ, srb_cachep); in qla2x00_mem_alloc()
4003 if (!ha->srb_mempool) in qla2x00_mem_alloc()
4006 if (IS_P3P_TYPE(ha)) { in qla2x00_mem_alloc()
4015 ha->ctx_mempool = mempool_create_slab_pool(SRB_MIN_REQ, in qla2x00_mem_alloc()
4017 if (!ha->ctx_mempool) in qla2x00_mem_alloc()
4019 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x0021, in qla2x00_mem_alloc()
4021 ctx_cachep, ha->ctx_mempool); in qla2x00_mem_alloc()
4025 ha->nvram = kzalloc(MAX_NVRAM_SIZE, GFP_KERNEL); in qla2x00_mem_alloc()
4026 if (!ha->nvram) in qla2x00_mem_alloc()
4030 ha->pdev->device); in qla2x00_mem_alloc()
4031 ha->s_dma_pool = dma_pool_create(name, &ha->pdev->dev, in qla2x00_mem_alloc()
4033 if (!ha->s_dma_pool) in qla2x00_mem_alloc()
4036 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x0022, in qla2x00_mem_alloc()
4038 ha->init_cb, ha->gid_list, ha->srb_mempool, ha->s_dma_pool); in qla2x00_mem_alloc()
4040 if (IS_P3P_TYPE(ha) || ql2xenabledif) { in qla2x00_mem_alloc()
4041 ha->dl_dma_pool = dma_pool_create(name, &ha->pdev->dev, in qla2x00_mem_alloc()
4043 if (!ha->dl_dma_pool) { in qla2x00_mem_alloc()
4044 ql_log_pci(ql_log_fatal, ha->pdev, 0x0023, in qla2x00_mem_alloc()
4049 ha->fcp_cmnd_dma_pool = dma_pool_create(name, &ha->pdev->dev, in qla2x00_mem_alloc()
4051 if (!ha->fcp_cmnd_dma_pool) { in qla2x00_mem_alloc()
4052 ql_log_pci(ql_log_fatal, ha->pdev, 0x0024, in qla2x00_mem_alloc()
4062 ha->dif_bundl_pool = dma_pool_create(name, in qla2x00_mem_alloc()
4063 &ha->pdev->dev, DIF_BUNDLING_DMA_POOL_SIZE, 8, 0); in qla2x00_mem_alloc()
4064 if (!ha->dif_bundl_pool) { in qla2x00_mem_alloc()
4065 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x0024, in qla2x00_mem_alloc()
4071 INIT_LIST_HEAD(&ha->pool.good.head); in qla2x00_mem_alloc()
4072 INIT_LIST_HEAD(&ha->pool.unusable.head); in qla2x00_mem_alloc()
4073 ha->pool.good.count = 0; in qla2x00_mem_alloc()
4074 ha->pool.unusable.count = 0; in qla2x00_mem_alloc()
4078 ql_dbg_pci(ql_dbg_init, ha->pdev, in qla2x00_mem_alloc()
4083 ha->dif_bundle_kallocs++; in qla2x00_mem_alloc()
4086 ha->dif_bundl_pool, GFP_ATOMIC, in qla2x00_mem_alloc()
4089 ql_dbg_pci(ql_dbg_init, ha->pdev, in qla2x00_mem_alloc()
4094 ha->dif_bundle_kallocs--; in qla2x00_mem_alloc()
4097 ha->dif_bundle_dma_allocs++; in qla2x00_mem_alloc()
4106 &ha->pool.unusable.head); in qla2x00_mem_alloc()
4107 ha->pool.unusable.count++; in qla2x00_mem_alloc()
4110 &ha->pool.good.head); in qla2x00_mem_alloc()
4111 ha->pool.good.count++; in qla2x00_mem_alloc()
4117 &ha->pool.good.head, list) { in qla2x00_mem_alloc()
4119 dma_pool_free(ha->dif_bundl_pool, in qla2x00_mem_alloc()
4121 ha->dif_bundle_dma_allocs--; in qla2x00_mem_alloc()
4123 ha->dif_bundle_kallocs--; in qla2x00_mem_alloc()
4126 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x0024, in qla2x00_mem_alloc()
4128 __func__, ha->pool.good.count, in qla2x00_mem_alloc()
4129 ha->pool.unusable.count); in qla2x00_mem_alloc()
4132 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x0025, in qla2x00_mem_alloc()
4134 ha->dl_dma_pool, ha->fcp_cmnd_dma_pool, in qla2x00_mem_alloc()
4135 ha->dif_bundl_pool); in qla2x00_mem_alloc()
4139 if (IS_QLA2100(ha) || IS_QLA2200(ha)) { in qla2x00_mem_alloc()
4141 ha->sns_cmd = dma_alloc_coherent(&ha->pdev->dev, in qla2x00_mem_alloc()
4142 sizeof(struct sns_cmd_pkt), &ha->sns_cmd_dma, GFP_KERNEL); in qla2x00_mem_alloc()
4143 if (!ha->sns_cmd) in qla2x00_mem_alloc()
4145 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x0026, in qla2x00_mem_alloc()
4146 "sns_cmd: %p.\n", ha->sns_cmd); in qla2x00_mem_alloc()
4149 ha->ms_iocb = dma_pool_alloc(ha->s_dma_pool, GFP_KERNEL, in qla2x00_mem_alloc()
4150 &ha->ms_iocb_dma); in qla2x00_mem_alloc()
4151 if (!ha->ms_iocb) in qla2x00_mem_alloc()
4154 ha->ct_sns = dma_alloc_coherent(&ha->pdev->dev, in qla2x00_mem_alloc()
4155 sizeof(struct ct_sns_pkt), &ha->ct_sns_dma, GFP_KERNEL); in qla2x00_mem_alloc()
4156 if (!ha->ct_sns) in qla2x00_mem_alloc()
4158 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x0027, in qla2x00_mem_alloc()
4160 ha->ms_iocb, ha->ct_sns); in qla2x00_mem_alloc()
4166 ql_log_pci(ql_log_fatal, ha->pdev, 0x0028, in qla2x00_mem_alloc()
4171 (*req)->ring = dma_alloc_coherent(&ha->pdev->dev, in qla2x00_mem_alloc()
4175 ql_log_pci(ql_log_fatal, ha->pdev, 0x0029, in qla2x00_mem_alloc()
4182 ql_log_pci(ql_log_fatal, ha->pdev, 0x002a, in qla2x00_mem_alloc()
4186 (*rsp)->hw = ha; in qla2x00_mem_alloc()
4188 (*rsp)->ring = dma_alloc_coherent(&ha->pdev->dev, in qla2x00_mem_alloc()
4192 ql_log_pci(ql_log_fatal, ha->pdev, 0x002b, in qla2x00_mem_alloc()
4198 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x002c, in qla2x00_mem_alloc()
4204 if (ha->nvram_npiv_size) { in qla2x00_mem_alloc()
4205 ha->npiv_info = kcalloc(ha->nvram_npiv_size, in qla2x00_mem_alloc()
4208 if (!ha->npiv_info) { in qla2x00_mem_alloc()
4209 ql_log_pci(ql_log_fatal, ha->pdev, 0x002d, in qla2x00_mem_alloc()
4214 ha->npiv_info = NULL; in qla2x00_mem_alloc()
4217 if (IS_CNA_CAPABLE(ha) || IS_QLA2031(ha) || IS_QLA27XX(ha) || in qla2x00_mem_alloc()
4218 IS_QLA28XX(ha)) { in qla2x00_mem_alloc()
4219 ha->ex_init_cb = dma_pool_alloc(ha->s_dma_pool, GFP_KERNEL, in qla2x00_mem_alloc()
4220 &ha->ex_init_cb_dma); in qla2x00_mem_alloc()
4221 if (!ha->ex_init_cb) in qla2x00_mem_alloc()
4223 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x002e, in qla2x00_mem_alloc()
4224 "ex_init_cb=%p.\n", ha->ex_init_cb); in qla2x00_mem_alloc()
4228 if (IS_QLA27XX(ha) || IS_QLA28XX(ha)) { in qla2x00_mem_alloc()
4229 ha->sf_init_cb = dma_pool_alloc(ha->s_dma_pool, GFP_KERNEL, in qla2x00_mem_alloc()
4230 &ha->sf_init_cb_dma); in qla2x00_mem_alloc()
4231 if (!ha->sf_init_cb) in qla2x00_mem_alloc()
4233 memset(ha->sf_init_cb, 0, sizeof(struct init_sf_cb)); in qla2x00_mem_alloc()
4234 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x0199, in qla2x00_mem_alloc()
4235 "sf_init_cb=%p.\n", ha->sf_init_cb); in qla2x00_mem_alloc()
4238 INIT_LIST_HEAD(&ha->gbl_dsd_list); in qla2x00_mem_alloc()
4241 if (!IS_FWI2_CAPABLE(ha)) { in qla2x00_mem_alloc()
4242 ha->async_pd = dma_pool_alloc(ha->s_dma_pool, GFP_KERNEL, in qla2x00_mem_alloc()
4243 &ha->async_pd_dma); in qla2x00_mem_alloc()
4244 if (!ha->async_pd) in qla2x00_mem_alloc()
4246 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x002f, in qla2x00_mem_alloc()
4247 "async_pd=%p.\n", ha->async_pd); in qla2x00_mem_alloc()
4250 INIT_LIST_HEAD(&ha->vp_list); in qla2x00_mem_alloc()
4253 ha->loop_id_map = kcalloc(BITS_TO_LONGS(LOOPID_MAP_SIZE), in qla2x00_mem_alloc()
4256 if (!ha->loop_id_map) in qla2x00_mem_alloc()
4259 qla2x00_set_reserved_loop_ids(ha); in qla2x00_mem_alloc()
4260 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x0123, in qla2x00_mem_alloc()
4261 "loop_id_map=%p.\n", ha->loop_id_map); in qla2x00_mem_alloc()
4264 ha->sfp_data = dma_alloc_coherent(&ha->pdev->dev, in qla2x00_mem_alloc()
4265 SFP_DEV_SIZE, &ha->sfp_data_dma, GFP_KERNEL); in qla2x00_mem_alloc()
4266 if (!ha->sfp_data) { in qla2x00_mem_alloc()
4267 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x011b, in qla2x00_mem_alloc()
4272 ha->flt = dma_alloc_coherent(&ha->pdev->dev, in qla2x00_mem_alloc()
4273 sizeof(struct qla_flt_header) + FLT_REGIONS_SIZE, &ha->flt_dma, in qla2x00_mem_alloc()
4275 if (!ha->flt) { in qla2x00_mem_alloc()
4276 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x011b, in qla2x00_mem_alloc()
4284 dma_free_coherent(&ha->pdev->dev, SFP_DEV_SIZE, in qla2x00_mem_alloc()
4285 ha->sfp_data, ha->sfp_data_dma); in qla2x00_mem_alloc()
4287 kfree(ha->loop_id_map); in qla2x00_mem_alloc()
4289 dma_pool_free(ha->s_dma_pool, ha->async_pd, ha->async_pd_dma); in qla2x00_mem_alloc()
4291 dma_pool_free(ha->s_dma_pool, ha->sf_init_cb, ha->sf_init_cb_dma); in qla2x00_mem_alloc()
4293 dma_pool_free(ha->s_dma_pool, ha->ex_init_cb, ha->ex_init_cb_dma); in qla2x00_mem_alloc()
4295 kfree(ha->npiv_info); in qla2x00_mem_alloc()
4297 dma_free_coherent(&ha->pdev->dev, ((*rsp)->length + 1) * in qla2x00_mem_alloc()
4305 dma_free_coherent(&ha->pdev->dev, ((*req)->length + 1) * in qla2x00_mem_alloc()
4313 dma_free_coherent(&ha->pdev->dev, sizeof(struct ct_sns_pkt), in qla2x00_mem_alloc()
4314 ha->ct_sns, ha->ct_sns_dma); in qla2x00_mem_alloc()
4315 ha->ct_sns = NULL; in qla2x00_mem_alloc()
4316 ha->ct_sns_dma = 0; in qla2x00_mem_alloc()
4318 dma_pool_free(ha->s_dma_pool, ha->ms_iocb, ha->ms_iocb_dma); in qla2x00_mem_alloc()
4319 ha->ms_iocb = NULL; in qla2x00_mem_alloc()
4320 ha->ms_iocb_dma = 0; in qla2x00_mem_alloc()
4322 if (ha->sns_cmd) in qla2x00_mem_alloc()
4323 dma_free_coherent(&ha->pdev->dev, sizeof(struct sns_cmd_pkt), in qla2x00_mem_alloc()
4324 ha->sns_cmd, ha->sns_cmd_dma); in qla2x00_mem_alloc()
4329 list_for_each_entry_safe(dsd, nxt, &ha->pool.unusable.head, in qla2x00_mem_alloc()
4332 dma_pool_free(ha->dif_bundl_pool, dsd->dsd_addr, in qla2x00_mem_alloc()
4334 ha->dif_bundle_dma_allocs--; in qla2x00_mem_alloc()
4336 ha->dif_bundle_kallocs--; in qla2x00_mem_alloc()
4337 ha->pool.unusable.count--; in qla2x00_mem_alloc()
4339 dma_pool_destroy(ha->dif_bundl_pool); in qla2x00_mem_alloc()
4340 ha->dif_bundl_pool = NULL; in qla2x00_mem_alloc()
4344 if (IS_QLA82XX(ha) || ql2xenabledif) { in qla2x00_mem_alloc()
4345 dma_pool_destroy(ha->fcp_cmnd_dma_pool); in qla2x00_mem_alloc()
4346 ha->fcp_cmnd_dma_pool = NULL; in qla2x00_mem_alloc()
4349 if (IS_QLA82XX(ha) || ql2xenabledif) { in qla2x00_mem_alloc()
4350 dma_pool_destroy(ha->dl_dma_pool); in qla2x00_mem_alloc()
4351 ha->dl_dma_pool = NULL; in qla2x00_mem_alloc()
4354 dma_pool_destroy(ha->s_dma_pool); in qla2x00_mem_alloc()
4355 ha->s_dma_pool = NULL; in qla2x00_mem_alloc()
4357 kfree(ha->nvram); in qla2x00_mem_alloc()
4358 ha->nvram = NULL; in qla2x00_mem_alloc()
4360 mempool_destroy(ha->ctx_mempool); in qla2x00_mem_alloc()
4361 ha->ctx_mempool = NULL; in qla2x00_mem_alloc()
4363 mempool_destroy(ha->srb_mempool); in qla2x00_mem_alloc()
4364 ha->srb_mempool = NULL; in qla2x00_mem_alloc()
4366 dma_free_coherent(&ha->pdev->dev, qla2x00_gid_list_size(ha), in qla2x00_mem_alloc()
4367 ha->gid_list, in qla2x00_mem_alloc()
4368 ha->gid_list_dma); in qla2x00_mem_alloc()
4369 ha->gid_list = NULL; in qla2x00_mem_alloc()
4370 ha->gid_list_dma = 0; in qla2x00_mem_alloc()
4372 qlt_mem_free(ha); in qla2x00_mem_alloc()
4374 dma_free_coherent(&ha->pdev->dev, ha->init_cb_size, ha->init_cb, in qla2x00_mem_alloc()
4375 ha->init_cb_dma); in qla2x00_mem_alloc()
4376 ha->init_cb = NULL; in qla2x00_mem_alloc()
4377 ha->init_cb_dma = 0; in qla2x00_mem_alloc()
4390 struct qla_hw_data *ha = vha->hw; in qla2x00_set_exlogins_buffer() local
4396 if (!IS_EXLOGIN_OFFLD_CAPABLE(ha)) in qla2x00_set_exlogins_buffer()
4403 ql_log_pci(ql_log_fatal, ha->pdev, 0xd029, in qla2x00_set_exlogins_buffer()
4411 if (temp != ha->exlogin_size) { in qla2x00_set_exlogins_buffer()
4412 qla2x00_free_exlogin_buffer(ha); in qla2x00_set_exlogins_buffer()
4413 ha->exlogin_size = temp; in qla2x00_set_exlogins_buffer()
4420 "EXLOGIN: requested size=0x%x\n", ha->exlogin_size); in qla2x00_set_exlogins_buffer()
4423 ha->exlogin_buf = dma_alloc_coherent(&ha->pdev->dev, in qla2x00_set_exlogins_buffer()
4424 ha->exlogin_size, &ha->exlogin_buf_dma, GFP_KERNEL); in qla2x00_set_exlogins_buffer()
4425 if (!ha->exlogin_buf) { in qla2x00_set_exlogins_buffer()
4426 ql_log_pci(ql_log_fatal, ha->pdev, 0xd02a, in qla2x00_set_exlogins_buffer()
4433 rval = qla_set_exlogin_mem_cfg(vha, ha->exlogin_buf_dma); in qla2x00_set_exlogins_buffer()
4437 qla2x00_free_exlogin_buffer(ha); in qla2x00_set_exlogins_buffer()
4450 qla2x00_free_exlogin_buffer(struct qla_hw_data *ha) in qla2x00_free_exlogin_buffer() argument
4452 if (ha->exlogin_buf) { in qla2x00_free_exlogin_buffer()
4453 dma_free_coherent(&ha->pdev->dev, ha->exlogin_size, in qla2x00_free_exlogin_buffer()
4454 ha->exlogin_buf, ha->exlogin_buf_dma); in qla2x00_free_exlogin_buffer()
4455 ha->exlogin_buf = NULL; in qla2x00_free_exlogin_buffer()
4456 ha->exlogin_size = 0; in qla2x00_free_exlogin_buffer()
4505 struct qla_hw_data *ha = vha->hw; in qla2x00_set_exchoffld_buffer() local
4507 if (!ha->flags.exchoffld_enabled) in qla2x00_set_exchoffld_buffer()
4510 if (!IS_EXCHG_OFFLD_CAPABLE(ha)) in qla2x00_set_exchoffld_buffer()
4516 ql_log_pci(ql_log_fatal, ha->pdev, 0xd012, in qla2x00_set_exchoffld_buffer()
4527 if (totsz != ha->exchoffld_size) { in qla2x00_set_exchoffld_buffer()
4528 qla2x00_free_exchoffld_buffer(ha); in qla2x00_set_exchoffld_buffer()
4530 ha->exchoffld_size = 0; in qla2x00_set_exchoffld_buffer()
4531 ha->flags.exchoffld_enabled = 0; in qla2x00_set_exchoffld_buffer()
4535 ha->exchoffld_size = totsz; in qla2x00_set_exchoffld_buffer()
4543 ha->exchoffld_size); in qla2x00_set_exchoffld_buffer()
4546 ha->exchoffld_buf = dma_alloc_coherent(&ha->pdev->dev, in qla2x00_set_exchoffld_buffer()
4547 ha->exchoffld_size, &ha->exchoffld_buf_dma, GFP_KERNEL); in qla2x00_set_exchoffld_buffer()
4548 if (!ha->exchoffld_buf) { in qla2x00_set_exchoffld_buffer()
4549 ql_log_pci(ql_log_fatal, ha->pdev, 0xd013, in qla2x00_set_exchoffld_buffer()
4552 if (ha->max_exchg > in qla2x00_set_exchoffld_buffer()
4554 ha->max_exchg -= REDUCE_EXCHANGES_CNT; in qla2x00_set_exchoffld_buffer()
4555 } else if (ha->max_exchg > in qla2x00_set_exchoffld_buffer()
4557 ha->max_exchg -= 512; in qla2x00_set_exchoffld_buffer()
4559 ha->flags.exchoffld_enabled = 0; in qla2x00_set_exchoffld_buffer()
4560 ql_log_pci(ql_log_fatal, ha->pdev, 0xd013, in qla2x00_set_exchoffld_buffer()
4563 ha->exchoffld_size = 0; in qla2x00_set_exchoffld_buffer()
4567 } else if (!ha->exchoffld_buf || (actual_cnt <= FW_DEF_EXCHANGES_CNT)) { in qla2x00_set_exchoffld_buffer()
4569 qla2x00_free_exchoffld_buffer(ha); in qla2x00_set_exchoffld_buffer()
4570 ha->exchoffld_size = 0; in qla2x00_set_exchoffld_buffer()
4571 ha->flags.exchoffld_enabled = 0; in qla2x00_set_exchoffld_buffer()
4574 ha->exchoffld_size, actual_cnt, size, totsz); in qla2x00_set_exchoffld_buffer()
4583 qla2x00_free_exchoffld_buffer(ha); in qla2x00_set_exchoffld_buffer()
4586 struct init_cb_81xx *icb = (struct init_cb_81xx *)ha->init_cb; in qla2x00_set_exchoffld_buffer()
4604 qla2x00_free_exchoffld_buffer(struct qla_hw_data *ha) in qla2x00_free_exchoffld_buffer() argument
4606 if (ha->exchoffld_buf) { in qla2x00_free_exchoffld_buffer()
4607 dma_free_coherent(&ha->pdev->dev, ha->exchoffld_size, in qla2x00_free_exchoffld_buffer()
4608 ha->exchoffld_buf, ha->exchoffld_buf_dma); in qla2x00_free_exchoffld_buffer()
4609 ha->exchoffld_buf = NULL; in qla2x00_free_exchoffld_buffer()
4610 ha->exchoffld_size = 0; in qla2x00_free_exchoffld_buffer()
4622 qla2x00_free_fw_dump(struct qla_hw_data *ha) in qla2x00_free_fw_dump() argument
4624 struct fwdt *fwdt = ha->fwdt; in qla2x00_free_fw_dump()
4627 if (ha->fce) in qla2x00_free_fw_dump()
4628 dma_free_coherent(&ha->pdev->dev, in qla2x00_free_fw_dump()
4629 FCE_SIZE, ha->fce, ha->fce_dma); in qla2x00_free_fw_dump()
4631 if (ha->eft) in qla2x00_free_fw_dump()
4632 dma_free_coherent(&ha->pdev->dev, in qla2x00_free_fw_dump()
4633 EFT_SIZE, ha->eft, ha->eft_dma); in qla2x00_free_fw_dump()
4635 if (ha->fw_dump) in qla2x00_free_fw_dump()
4636 vfree(ha->fw_dump); in qla2x00_free_fw_dump()
4638 ha->fce = NULL; in qla2x00_free_fw_dump()
4639 ha->fce_dma = 0; in qla2x00_free_fw_dump()
4640 ha->flags.fce_enabled = 0; in qla2x00_free_fw_dump()
4641 ha->eft = NULL; in qla2x00_free_fw_dump()
4642 ha->eft_dma = 0; in qla2x00_free_fw_dump()
4643 ha->fw_dumped = false; in qla2x00_free_fw_dump()
4644 ha->fw_dump_cap_flags = 0; in qla2x00_free_fw_dump()
4645 ha->fw_dump_reading = 0; in qla2x00_free_fw_dump()
4646 ha->fw_dump = NULL; in qla2x00_free_fw_dump()
4647 ha->fw_dump_len = 0; in qla2x00_free_fw_dump()
4665 qla2x00_mem_free(struct qla_hw_data *ha) in qla2x00_mem_free() argument
4667 qla2x00_free_fw_dump(ha); in qla2x00_mem_free()
4669 if (ha->mctp_dump) in qla2x00_mem_free()
4670 dma_free_coherent(&ha->pdev->dev, MCTP_DUMP_SIZE, ha->mctp_dump, in qla2x00_mem_free()
4671 ha->mctp_dump_dma); in qla2x00_mem_free()
4672 ha->mctp_dump = NULL; in qla2x00_mem_free()
4674 mempool_destroy(ha->srb_mempool); in qla2x00_mem_free()
4675 ha->srb_mempool = NULL; in qla2x00_mem_free()
4677 if (ha->dcbx_tlv) in qla2x00_mem_free()
4678 dma_free_coherent(&ha->pdev->dev, DCBX_TLV_DATA_SIZE, in qla2x00_mem_free()
4679 ha->dcbx_tlv, ha->dcbx_tlv_dma); in qla2x00_mem_free()
4680 ha->dcbx_tlv = NULL; in qla2x00_mem_free()
4682 if (ha->xgmac_data) in qla2x00_mem_free()
4683 dma_free_coherent(&ha->pdev->dev, XGMAC_DATA_SIZE, in qla2x00_mem_free()
4684 ha->xgmac_data, ha->xgmac_data_dma); in qla2x00_mem_free()
4685 ha->xgmac_data = NULL; in qla2x00_mem_free()
4687 if (ha->sns_cmd) in qla2x00_mem_free()
4688 dma_free_coherent(&ha->pdev->dev, sizeof(struct sns_cmd_pkt), in qla2x00_mem_free()
4689 ha->sns_cmd, ha->sns_cmd_dma); in qla2x00_mem_free()
4690 ha->sns_cmd = NULL; in qla2x00_mem_free()
4691 ha->sns_cmd_dma = 0; in qla2x00_mem_free()
4693 if (ha->ct_sns) in qla2x00_mem_free()
4694 dma_free_coherent(&ha->pdev->dev, sizeof(struct ct_sns_pkt), in qla2x00_mem_free()
4695 ha->ct_sns, ha->ct_sns_dma); in qla2x00_mem_free()
4696 ha->ct_sns = NULL; in qla2x00_mem_free()
4697 ha->ct_sns_dma = 0; in qla2x00_mem_free()
4699 if (ha->sfp_data) in qla2x00_mem_free()
4700 dma_free_coherent(&ha->pdev->dev, SFP_DEV_SIZE, ha->sfp_data, in qla2x00_mem_free()
4701 ha->sfp_data_dma); in qla2x00_mem_free()
4702 ha->sfp_data = NULL; in qla2x00_mem_free()
4704 if (ha->flt) in qla2x00_mem_free()
4705 dma_free_coherent(&ha->pdev->dev, in qla2x00_mem_free()
4707 ha->flt, ha->flt_dma); in qla2x00_mem_free()
4708 ha->flt = NULL; in qla2x00_mem_free()
4709 ha->flt_dma = 0; in qla2x00_mem_free()
4711 if (ha->ms_iocb) in qla2x00_mem_free()
4712 dma_pool_free(ha->s_dma_pool, ha->ms_iocb, ha->ms_iocb_dma); in qla2x00_mem_free()
4713 ha->ms_iocb = NULL; in qla2x00_mem_free()
4714 ha->ms_iocb_dma = 0; in qla2x00_mem_free()
4716 if (ha->sf_init_cb) in qla2x00_mem_free()
4717 dma_pool_free(ha->s_dma_pool, in qla2x00_mem_free()
4718 ha->sf_init_cb, ha->sf_init_cb_dma); in qla2x00_mem_free()
4720 if (ha->ex_init_cb) in qla2x00_mem_free()
4721 dma_pool_free(ha->s_dma_pool, in qla2x00_mem_free()
4722 ha->ex_init_cb, ha->ex_init_cb_dma); in qla2x00_mem_free()
4723 ha->ex_init_cb = NULL; in qla2x00_mem_free()
4724 ha->ex_init_cb_dma = 0; in qla2x00_mem_free()
4726 if (ha->async_pd) in qla2x00_mem_free()
4727 dma_pool_free(ha->s_dma_pool, ha->async_pd, ha->async_pd_dma); in qla2x00_mem_free()
4728 ha->async_pd = NULL; in qla2x00_mem_free()
4729 ha->async_pd_dma = 0; in qla2x00_mem_free()
4731 dma_pool_destroy(ha->s_dma_pool); in qla2x00_mem_free()
4732 ha->s_dma_pool = NULL; in qla2x00_mem_free()
4734 if (ha->gid_list) in qla2x00_mem_free()
4735 dma_free_coherent(&ha->pdev->dev, qla2x00_gid_list_size(ha), in qla2x00_mem_free()
4736 ha->gid_list, ha->gid_list_dma); in qla2x00_mem_free()
4737 ha->gid_list = NULL; in qla2x00_mem_free()
4738 ha->gid_list_dma = 0; in qla2x00_mem_free()
4740 if (IS_QLA82XX(ha)) { in qla2x00_mem_free()
4741 if (!list_empty(&ha->gbl_dsd_list)) { in qla2x00_mem_free()
4746 tdsd_ptr, &ha->gbl_dsd_list, list) { in qla2x00_mem_free()
4747 dma_pool_free(ha->dl_dma_pool, in qla2x00_mem_free()
4755 dma_pool_destroy(ha->dl_dma_pool); in qla2x00_mem_free()
4756 ha->dl_dma_pool = NULL; in qla2x00_mem_free()
4758 dma_pool_destroy(ha->fcp_cmnd_dma_pool); in qla2x00_mem_free()
4759 ha->fcp_cmnd_dma_pool = NULL; in qla2x00_mem_free()
4761 mempool_destroy(ha->ctx_mempool); in qla2x00_mem_free()
4762 ha->ctx_mempool = NULL; in qla2x00_mem_free()
4764 if (ql2xenabledif && ha->dif_bundl_pool) { in qla2x00_mem_free()
4767 list_for_each_entry_safe(dsd, nxt, &ha->pool.unusable.head, in qla2x00_mem_free()
4770 dma_pool_free(ha->dif_bundl_pool, dsd->dsd_addr, in qla2x00_mem_free()
4772 ha->dif_bundle_dma_allocs--; in qla2x00_mem_free()
4774 ha->dif_bundle_kallocs--; in qla2x00_mem_free()
4775 ha->pool.unusable.count--; in qla2x00_mem_free()
4777 list_for_each_entry_safe(dsd, nxt, &ha->pool.good.head, list) { in qla2x00_mem_free()
4779 dma_pool_free(ha->dif_bundl_pool, dsd->dsd_addr, in qla2x00_mem_free()
4781 ha->dif_bundle_dma_allocs--; in qla2x00_mem_free()
4783 ha->dif_bundle_kallocs--; in qla2x00_mem_free()
4787 dma_pool_destroy(ha->dif_bundl_pool); in qla2x00_mem_free()
4788 ha->dif_bundl_pool = NULL; in qla2x00_mem_free()
4790 qlt_mem_free(ha); in qla2x00_mem_free()
4792 if (ha->init_cb) in qla2x00_mem_free()
4793 dma_free_coherent(&ha->pdev->dev, ha->init_cb_size, in qla2x00_mem_free()
4794 ha->init_cb, ha->init_cb_dma); in qla2x00_mem_free()
4795 ha->init_cb = NULL; in qla2x00_mem_free()
4796 ha->init_cb_dma = 0; in qla2x00_mem_free()
4798 vfree(ha->optrom_buffer); in qla2x00_mem_free()
4799 ha->optrom_buffer = NULL; in qla2x00_mem_free()
4800 kfree(ha->nvram); in qla2x00_mem_free()
4801 ha->nvram = NULL; in qla2x00_mem_free()
4802 kfree(ha->npiv_info); in qla2x00_mem_free()
4803 ha->npiv_info = NULL; in qla2x00_mem_free()
4804 kfree(ha->swl); in qla2x00_mem_free()
4805 ha->swl = NULL; in qla2x00_mem_free()
4806 kfree(ha->loop_id_map); in qla2x00_mem_free()
4807 ha->sf_init_cb = NULL; in qla2x00_mem_free()
4808 ha->sf_init_cb_dma = 0; in qla2x00_mem_free()
4809 ha->loop_id_map = NULL; in qla2x00_mem_free()
4813 struct qla_hw_data *ha) in qla2x00_create_host() argument
4820 ql_log_pci(ql_log_fatal, ha->pdev, 0x0107, in qla2x00_create_host()
4831 vha->hw = ha; in qla2x00_create_host()
4858 (ha->max_loop_id + 1); in qla2x00_create_host()
4859 vha->gnl.l = dma_alloc_coherent(&ha->pdev->dev, in qla2x00_create_host()
4869 vha->scan.size = ha->max_fibre_devices * sizeof(struct fab_scan_rp); in qla2x00_create_host()
4874 dma_free_coherent(&ha->pdev->dev, vha->gnl.size, in qla2x00_create_host()
4886 dev_name(&(ha->pdev->dev))); in qla2x00_create_host()
5452 struct qla_hw_data *ha = base_vha->hw; in qla83xx_schedule_work() local
5456 if (ha->dpc_lp_wq) in qla83xx_schedule_work()
5457 queue_work(ha->dpc_lp_wq, &ha->idc_aen); in qla83xx_schedule_work()
5461 if (!ha->flags.nic_core_reset_hdlr_active) { in qla83xx_schedule_work()
5462 if (ha->dpc_hp_wq) in qla83xx_schedule_work()
5463 queue_work(ha->dpc_hp_wq, &ha->nic_core_reset); in qla83xx_schedule_work()
5470 if (ha->dpc_hp_wq) in qla83xx_schedule_work()
5471 queue_work(ha->dpc_hp_wq, &ha->idc_state_handler); in qla83xx_schedule_work()
5474 if (ha->dpc_hp_wq) in qla83xx_schedule_work()
5475 queue_work(ha->dpc_hp_wq, &ha->nic_core_unrecoverable); in qla83xx_schedule_work()
5489 struct qla_hw_data *ha = in qla83xx_nic_core_unrecoverable_work() local
5491 scsi_qla_host_t *base_vha = pci_get_drvdata(ha->pdev); in qla83xx_nic_core_unrecoverable_work()
5497 if (ha->flags.nic_core_reset_owner) { in qla83xx_nic_core_unrecoverable_work()
5498 ha->flags.nic_core_reset_owner = 0; in qla83xx_nic_core_unrecoverable_work()
5511 struct qla_hw_data *ha = in qla83xx_idc_state_handler_work() local
5513 scsi_qla_host_t *base_vha = pci_get_drvdata(ha->pdev); in qla83xx_idc_state_handler_work()
5557 struct qla_hw_data *ha = in qla83xx_nic_core_reset_work() local
5559 scsi_qla_host_t *base_vha = pci_get_drvdata(ha->pdev); in qla83xx_nic_core_reset_work()
5562 if (IS_QLA2031(ha)) { in qla83xx_nic_core_reset_work()
5569 if (!ha->flags.nic_core_reset_hdlr_active) { in qla83xx_nic_core_reset_work()
5582 ha->flags.nic_core_reset_hdlr_active = 1; in qla83xx_nic_core_reset_work()
5588 ha->flags.nic_core_reset_hdlr_active = 0; in qla83xx_nic_core_reset_work()
5596 struct qla_hw_data *ha = in qla83xx_service_idc_aen() local
5598 scsi_qla_host_t *base_vha = pci_get_drvdata(ha->pdev); in qla83xx_service_idc_aen()
5649 struct qla_hw_data *ha = base_vha->hw; in qla83xx_force_lock_recovery() local
5661 data = (IDC_LOCK_RECOVERY_STAGE1) | (ha->portnum << 2); in qla83xx_force_lock_recovery()
5674 if (((data & idc_lck_rcvry_owner_mask) >> 2) == ha->portnum) { in qla83xx_force_lock_recovery()
5744 struct qla_hw_data *ha = base_vha->hw; in qla83xx_idc_lock() local
5753 ha->portnum); in qla83xx_idc_lock()
5835 struct qla_hw_data *ha = vha->hw; in qla24xx_process_purex_rdp() local
5866 rsp_els = dma_alloc_coherent(&ha->pdev->dev, sizeof(*rsp_els), in qla24xx_process_purex_rdp()
5874 rsp_payload = dma_alloc_coherent(&ha->pdev->dev, sizeof(*rsp_payload), in qla24xx_process_purex_rdp()
5882 sfp = dma_alloc_coherent(&ha->pdev->dev, SFP_RTDI_LEN, in qla24xx_process_purex_rdp()
5885 stat = dma_alloc_coherent(&ha->pdev->dev, sizeof(*stat), in qla24xx_process_purex_rdp()
5983 qla25xx_fdmi_port_speed_capability(ha)); in qla24xx_process_purex_rdp()
5985 qla25xx_fdmi_port_speed_currently(ha)); in qla24xx_process_purex_rdp()
6041 if (ha->flags.plogi_template_valid) { in qla24xx_process_purex_rdp()
6043 be16_to_cpu(ha->plogi_els_payld.fl_csp.sp_bb_cred); in qla24xx_process_purex_rdp()
6223 dma_free_coherent(&ha->pdev->dev, sizeof(*stat), in qla24xx_process_purex_rdp()
6226 dma_free_coherent(&ha->pdev->dev, SFP_RTDI_LEN, in qla24xx_process_purex_rdp()
6229 dma_free_coherent(&ha->pdev->dev, sizeof(*rsp_payload), in qla24xx_process_purex_rdp()
6232 dma_free_coherent(&ha->pdev->dev, sizeof(*rsp_els), in qla24xx_process_purex_rdp()
6270 struct qla_hw_data *ha = base_vha->hw; in qla83xx_idc_unlock() local
6279 if (data == ha->portnum) { in qla83xx_idc_unlock()
6327 struct qla_hw_data *ha = vha->hw; in __qla83xx_set_drv_presence() local
6332 drv_presence |= (1 << ha->portnum); in __qla83xx_set_drv_presence()
6356 struct qla_hw_data *ha = vha->hw; in __qla83xx_clear_drv_presence() local
6361 drv_presence &= ~(1 << ha->portnum); in __qla83xx_clear_drv_presence()
6384 struct qla_hw_data *ha = vha->hw; in qla83xx_need_reset_handler() local
6389 ack_timeout = jiffies + (ha->fcoe_reset_timeout * HZ); in qla83xx_need_reset_handler()
6457 struct qla_hw_data *ha = base_vha->hw; in qla83xx_idc_state_handler() local
6463 dev_init_timeout = jiffies + (ha->fcoe_dev_init_timeout * HZ); in qla83xx_idc_state_handler()
6482 if (ha->flags.nic_core_reset_owner) in qla83xx_idc_state_handler()
6485 ha->flags.nic_core_reset_owner = 0; in qla83xx_idc_state_handler()
6488 ha->portnum); in qla83xx_idc_state_handler()
6491 if (ha->flags.nic_core_reset_owner) in qla83xx_idc_state_handler()
6507 if (!ql2xdontresethba && ha->flags.nic_core_reset_owner) in qla83xx_idc_state_handler()
6517 (ha->fcoe_dev_init_timeout * HZ); in qla83xx_idc_state_handler()
6527 if (ha->flags.quiesce_owner) in qla83xx_idc_state_handler()
6534 (ha->fcoe_dev_init_timeout * HZ); in qla83xx_idc_state_handler()
6537 if (ha->flags.nic_core_reset_owner) in qla83xx_idc_state_handler()
6540 ha->flags.nic_core_reset_owner = 0; in qla83xx_idc_state_handler()
6570 struct qla_hw_data *ha = container_of(work, struct qla_hw_data, in qla2x00_disable_board_on_pci_error() local
6572 struct pci_dev *pdev = ha->pdev; in qla2x00_disable_board_on_pci_error()
6573 scsi_qla_host_t *base_vha = pci_get_drvdata(ha->pdev); in qla2x00_disable_board_on_pci_error()
6594 qla2x00_delete_all_vps(ha, base_vha); in qla2x00_disable_board_on_pci_error()
6607 qla2x00_destroy_deferred_work(ha); in qla2x00_disable_board_on_pci_error()
6623 qla2x00_mem_free(ha); in qla2x00_disable_board_on_pci_error()
6625 qla2x00_free_queues(ha); in qla2x00_disable_board_on_pci_error()
6627 qla2x00_unmap_iobases(ha); in qla2x00_disable_board_on_pci_error()
6629 pci_release_selected_regions(ha->pdev, ha->bars); in qla2x00_disable_board_on_pci_error()
6655 struct qla_hw_data *ha; in qla2x00_do_dpc() local
6659 ha = (struct qla_hw_data *)data; in qla2x00_do_dpc()
6660 base_vha = pci_get_drvdata(ha->pdev); in qla2x00_do_dpc()
6671 if (!base_vha->flags.init_done || ha->flags.mbox_busy) in qla2x00_do_dpc()
6674 if (ha->flags.eeh_busy) { in qla2x00_do_dpc()
6676 "eeh_busy=%d.\n", ha->flags.eeh_busy); in qla2x00_do_dpc()
6680 ha->dpc_active = 1; in qla2x00_do_dpc()
6689 if (IS_P3P_TYPE(ha)) { in qla2x00_do_dpc()
6690 if (IS_QLA8044(ha)) { in qla2x00_do_dpc()
6693 qla8044_idc_lock(ha); in qla2x00_do_dpc()
6697 qla8044_idc_unlock(ha); in qla2x00_do_dpc()
6707 qla82xx_idc_lock(ha); in qla2x00_do_dpc()
6708 qla82xx_wr_32(ha, QLA82XX_CRB_DEV_STATE, in qla2x00_do_dpc()
6710 qla82xx_idc_unlock(ha); in qla2x00_do_dpc()
6739 } else if (IS_QLAFX00(ha)) { in qla2x00_do_dpc()
6803 !ha->flags.fw_started) in qla2x00_do_dpc()
6808 !ha->flags.fw_started) in qla2x00_do_dpc()
6820 if (ha->isp_ops->abort_isp(base_vha)) { in qla2x00_do_dpc()
6846 if (IS_QLAFX00(ha)) in qla2x00_do_dpc()
6852 if (IS_P3P_TYPE(ha)) { in qla2x00_do_dpc()
6853 if (IS_QLA82XX(ha)) in qla2x00_do_dpc()
6855 if (IS_QLA8044(ha)) in qla2x00_do_dpc()
6859 if (!ha->flags.quiesce_owner) { in qla2x00_do_dpc()
6861 if (IS_QLA82XX(ha)) { in qla2x00_do_dpc()
6862 qla82xx_idc_lock(ha); in qla2x00_do_dpc()
6865 qla82xx_idc_unlock(ha); in qla2x00_do_dpc()
6866 } else if (IS_QLA8044(ha)) { in qla2x00_do_dpc()
6867 qla8044_idc_lock(ha); in qla2x00_do_dpc()
6870 qla8044_idc_unlock(ha); in qla2x00_do_dpc()
6929 if (IS_QLAFX00(ha)) in qla2x00_do_dpc()
6939 if (!ha->interrupts_on) in qla2x00_do_dpc()
6940 ha->isp_ops->enable_intrs(ha); in qla2x00_do_dpc()
6944 if (ha->beacon_blink_led == 1) in qla2x00_do_dpc()
6945 ha->isp_ops->beacon_blink(base_vha); in qla2x00_do_dpc()
6951 if (ha->flags.eeh_busy || in qla2x00_do_dpc()
6952 ha->flags.pci_channel_io_perm_failure) in qla2x00_do_dpc()
6957 mutex_lock(&ha->mq_lock); in qla2x00_do_dpc()
6961 mutex_unlock(&ha->mq_lock); in qla2x00_do_dpc()
6968 ha->nvme_last_rptd_aen); in qla2x00_do_dpc()
6970 ha->nvme_last_rptd_aen)) { in qla2x00_do_dpc()
6973 ha->nvme_last_rptd_aen); in qla2x00_do_dpc()
6981 ha->last_zio_threshold); in qla2x00_do_dpc()
6983 ha->last_zio_threshold); in qla2x00_do_dpc()
6986 if (!IS_QLAFX00(ha)) in qla2x00_do_dpc()
6994 ha->dpc_active = 0; in qla2x00_do_dpc()
7006 ha->dpc_active = 0; in qla2x00_do_dpc()
7017 struct qla_hw_data *ha = vha->hw; in qla2xxx_wake_dpc() local
7018 struct task_struct *t = ha->dpc_thread; in qla2xxx_wake_dpc()
7067 struct qla_hw_data *ha = vha->hw; in qla2x00_timer() local
7070 if (ha->flags.eeh_busy) { in qla2x00_timer()
7073 ha->flags.eeh_busy); in qla2x00_timer()
7082 if (!pci_channel_offline(ha->pdev)) { in qla2x00_timer()
7083 pci_read_config_word(ha->pdev, PCI_VENDOR_ID, &w); in qla2x00_timer()
7088 if (!vha->vp_idx && IS_P3P_TYPE(ha)) { in qla2x00_timer()
7091 if (IS_QLA82XX(ha)) in qla2x00_timer()
7093 else if (IS_QLA8044(ha)) in qla2x00_timer()
7097 if (!vha->vp_idx && IS_QLAFX00(ha)) in qla2x00_timer()
7112 if (!IS_QLA2100(ha) && vha->link_down_timeout) in qla2x00_timer()
7121 spin_lock_irqsave(&ha->hardware_lock, in qla2x00_timer()
7123 req = ha->req_q_map[0]; in qla2x00_timer()
7140 if (IS_QLA82XX(ha)) in qla2x00_timer()
7148 spin_unlock_irqrestore(&ha->hardware_lock, in qla2x00_timer()
7160 if (IS_QLA82XX(ha)) in qla2x00_timer()
7173 if (!vha->vp_idx && (ha->beacon_blink_led == 1)) { in qla2x00_timer()
7175 if (!IS_P3P_TYPE(ha)) { in qla2x00_timer()
7198 index = atomic_read(&ha->nvme_active_aen_cnt); in qla2x00_timer()
7200 (index != ha->nvme_last_rptd_aen) && in qla2x00_timer()
7202 ha->zio_mode == QLA_ZIO_MODE_6 && in qla2x00_timer()
7203 !ha->flags.host_shutting_down) { in qla2x00_timer()
7206 ha->nvme_last_rptd_aen); in qla2x00_timer()
7207 ha->nvme_last_rptd_aen = atomic_read(&ha->nvme_active_aen_cnt); in qla2x00_timer()
7213 atomic_read(&ha->zio_threshold) != ha->last_zio_threshold && in qla2x00_timer()
7214 IS_ZIO_THRESHOLD_CAPABLE(ha)) { in qla2x00_timer()
7217 ha->last_zio_threshold); in qla2x00_timer()
7218 ha->last_zio_threshold = atomic_read(&ha->zio_threshold); in qla2x00_timer()
7310 struct qla_hw_data *ha = vha->hw; in qla2x00_request_firmware() local
7313 if (IS_QLA2100(ha)) { in qla2x00_request_firmware()
7315 } else if (IS_QLA2200(ha)) { in qla2x00_request_firmware()
7317 } else if (IS_QLA2300(ha) || IS_QLA2312(ha) || IS_QLA6312(ha)) { in qla2x00_request_firmware()
7319 } else if (IS_QLA2322(ha) || IS_QLA6322(ha)) { in qla2x00_request_firmware()
7321 } else if (IS_QLA24XX_TYPE(ha)) { in qla2x00_request_firmware()
7323 } else if (IS_QLA25XX(ha)) { in qla2x00_request_firmware()
7325 } else if (IS_QLA81XX(ha)) { in qla2x00_request_firmware()
7327 } else if (IS_QLA82XX(ha)) { in qla2x00_request_firmware()
7329 } else if (IS_QLA2031(ha)) { in qla2x00_request_firmware()
7331 } else if (IS_QLA8031(ha)) { in qla2x00_request_firmware()
7333 } else if (IS_QLA27XX(ha)) { in qla2x00_request_firmware()
7335 } else if (IS_QLA28XX(ha)) { in qla2x00_request_firmware()
7348 if (request_firmware(&blob->fw, blob->name, &ha->pdev->dev)) { in qla2x00_request_firmware()
7373 struct qla_hw_data *ha = vha->hw; in qla_pci_error_cleanup() local
7374 scsi_qla_host_t *base_vha = pci_get_drvdata(ha->pdev); in qla_pci_error_cleanup()
7381 ha->chip_reset++; in qla_pci_error_cleanup()
7383 ha->base_qpair->chip_reset = ha->chip_reset; in qla_pci_error_cleanup()
7384 for (i = 0; i < ha->max_qpairs; i++) { in qla_pci_error_cleanup()
7385 if (ha->queue_pair_map[i]) in qla_pci_error_cleanup()
7386 ha->queue_pair_map[i]->chip_reset = in qla_pci_error_cleanup()
7387 ha->base_qpair->chip_reset; in qla_pci_error_cleanup()
7391 if (atomic_read(&ha->num_pend_mbx_stage3)) { in qla_pci_error_cleanup()
7392 clear_bit(MBX_INTR_WAIT, &ha->mbx_cmd_flags); in qla_pci_error_cleanup()
7393 complete(&ha->mbx_intr_comp); in qla_pci_error_cleanup()
7398 while (atomic_read(&ha->num_pend_mbx_stage3) || in qla_pci_error_cleanup()
7399 atomic_read(&ha->num_pend_mbx_stage2) || in qla_pci_error_cleanup()
7400 atomic_read(&ha->num_pend_mbx_stage1)) { in qla_pci_error_cleanup()
7407 ha->flags.purge_mbox = 0; in qla_pci_error_cleanup()
7409 mutex_lock(&ha->mq_lock); in qla_pci_error_cleanup()
7412 mutex_unlock(&ha->mq_lock); in qla_pci_error_cleanup()
7416 spin_lock_irqsave(&ha->vport_slock, flags); in qla_pci_error_cleanup()
7417 list_for_each_entry(vp, &ha->vp_list, list) { in qla_pci_error_cleanup()
7419 spin_unlock_irqrestore(&ha->vport_slock, flags); in qla_pci_error_cleanup()
7421 spin_lock_irqsave(&ha->vport_slock, flags); in qla_pci_error_cleanup()
7424 spin_unlock_irqrestore(&ha->vport_slock, flags); in qla_pci_error_cleanup()
7430 spin_lock_irqsave(&ha->vport_slock, flags); in qla_pci_error_cleanup()
7431 list_for_each_entry(vp, &ha->vp_list, list) { in qla_pci_error_cleanup()
7433 spin_unlock_irqrestore(&ha->vport_slock, flags); in qla_pci_error_cleanup()
7436 spin_lock_irqsave(&ha->vport_slock, flags); in qla_pci_error_cleanup()
7439 spin_unlock_irqrestore(&ha->vport_slock, flags); in qla_pci_error_cleanup()
7447 struct qla_hw_data *ha = vha->hw; in qla2xxx_pci_error_detected() local
7460 ha->flags.eeh_busy = 0; in qla2xxx_pci_error_detected()
7467 ha->flags.eeh_busy = 1; in qla2xxx_pci_error_detected()
7471 ha->flags.pci_channel_io_perm_failure = 1; in qla2xxx_pci_error_detected()
7489 struct qla_hw_data *ha = base_vha->hw; in qla2xxx_pci_mmio_enabled() local
7490 struct device_reg_2xxx __iomem *reg = &ha->iobase->isp; in qla2xxx_pci_mmio_enabled()
7491 struct device_reg_24xx __iomem *reg24 = &ha->iobase->isp24; in qla2xxx_pci_mmio_enabled()
7493 if (IS_QLA82XX(ha)) in qla2xxx_pci_mmio_enabled()
7496 spin_lock_irqsave(&ha->hardware_lock, flags); in qla2xxx_pci_mmio_enabled()
7497 if (IS_QLA2100(ha) || IS_QLA2200(ha)){ in qla2xxx_pci_mmio_enabled()
7501 } else if (IS_QLA23XX(ha)) { in qla2xxx_pci_mmio_enabled()
7505 } else if (IS_FWI2_CAPABLE(ha)) { in qla2xxx_pci_mmio_enabled()
7510 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla2xxx_pci_mmio_enabled()
7527 struct qla_hw_data *ha = base_vha->hw; in qla2xxx_pci_slot_reset() local
7547 if (ha->mem_only) in qla2xxx_pci_slot_reset()
7559 if (ha->isp_ops->pci_config(base_vha)) in qla2xxx_pci_slot_reset()
7562 mutex_lock(&ha->mq_lock); in qla2xxx_pci_slot_reset()
7565 mutex_unlock(&ha->mq_lock); in qla2xxx_pci_slot_reset()
7569 if (ha->isp_ops->abort_isp(base_vha) == QLA_SUCCESS) in qla2xxx_pci_slot_reset()
7585 struct qla_hw_data *ha = base_vha->hw; in qla2xxx_pci_resume() local
7591 ha->flags.eeh_busy = 0; in qla2xxx_pci_resume()
7604 struct qla_hw_data *ha = base_vha->hw; in qla_pci_reset_prepare() local
7616 ha->flags.eeh_busy = 1; in qla_pci_reset_prepare()
7617 mutex_lock(&ha->mq_lock); in qla_pci_reset_prepare()
7620 mutex_unlock(&ha->mq_lock); in qla_pci_reset_prepare()
7631 struct qla_hw_data *ha = base_vha->hw; in qla_pci_reset_done() local
7640 ha->flags.eeh_busy = 0; in qla_pci_reset_done()
7641 mutex_lock(&ha->mq_lock); in qla_pci_reset_done()
7644 mutex_unlock(&ha->mq_lock); in qla_pci_reset_done()
7647 ha->isp_ops->abort_isp(base_vha); in qla_pci_reset_done()