Lines Matching refs:ha

394 	struct qla_hw_data *ha = vha->hw;  in qla_init_base_qpair()  local
396 rsp->qpair = ha->base_qpair; in qla_init_base_qpair()
398 ha->base_qpair->hw = ha; in qla_init_base_qpair()
399 ha->base_qpair->req = req; in qla_init_base_qpair()
400 ha->base_qpair->rsp = rsp; in qla_init_base_qpair()
401 ha->base_qpair->vha = vha; in qla_init_base_qpair()
402 ha->base_qpair->qp_lock_ptr = &ha->hardware_lock; in qla_init_base_qpair()
403 ha->base_qpair->use_shadow_reg = IS_SHADOW_REG_CAPABLE(ha) ? 1 : 0; in qla_init_base_qpair()
404 ha->base_qpair->msix = &ha->msix_entries[QLA_MSIX_RSP_Q]; in qla_init_base_qpair()
405 ha->base_qpair->srb_mempool = ha->srb_mempool; in qla_init_base_qpair()
406 INIT_LIST_HEAD(&ha->base_qpair->hints_list); in qla_init_base_qpair()
407 ha->base_qpair->enable_class_2 = ql2xenableclass2; in qla_init_base_qpair()
410 ha->base_qpair->pdev = ha->pdev; in qla_init_base_qpair()
412 if (IS_QLA27XX(ha) || IS_QLA83XX(ha) || IS_QLA28XX(ha)) in qla_init_base_qpair()
413 ha->base_qpair->reqq_start_iocbs = qla_83xx_start_iocbs; in qla_init_base_qpair()
416 static int qla2x00_alloc_queues(struct qla_hw_data *ha, struct req_que *req, in qla2x00_alloc_queues() argument
419 scsi_qla_host_t *vha = pci_get_drvdata(ha->pdev); in qla2x00_alloc_queues()
421 ha->req_q_map = kcalloc(ha->max_req_queues, sizeof(struct req_que *), in qla2x00_alloc_queues()
423 if (!ha->req_q_map) { in qla2x00_alloc_queues()
429 ha->rsp_q_map = kcalloc(ha->max_rsp_queues, sizeof(struct rsp_que *), in qla2x00_alloc_queues()
431 if (!ha->rsp_q_map) { in qla2x00_alloc_queues()
437 ha->base_qpair = kzalloc(sizeof(struct qla_qpair), GFP_KERNEL); in qla2x00_alloc_queues()
438 if (ha->base_qpair == NULL) { in qla2x00_alloc_queues()
446 if ((ql2xmqsupport || ql2xnvmeenable) && ha->max_qpairs) { in qla2x00_alloc_queues()
447 ha->queue_pair_map = kcalloc(ha->max_qpairs, sizeof(struct qla_qpair *), in qla2x00_alloc_queues()
449 if (!ha->queue_pair_map) { in qla2x00_alloc_queues()
460 ha->rsp_q_map[0] = rsp; in qla2x00_alloc_queues()
461 ha->req_q_map[0] = req; in qla2x00_alloc_queues()
462 set_bit(0, ha->rsp_qid_map); in qla2x00_alloc_queues()
463 set_bit(0, ha->req_qid_map); in qla2x00_alloc_queues()
467 kfree(ha->base_qpair); in qla2x00_alloc_queues()
468 ha->base_qpair = NULL; in qla2x00_alloc_queues()
470 kfree(ha->rsp_q_map); in qla2x00_alloc_queues()
471 ha->rsp_q_map = NULL; in qla2x00_alloc_queues()
473 kfree(ha->req_q_map); in qla2x00_alloc_queues()
474 ha->req_q_map = NULL; in qla2x00_alloc_queues()
479 static void qla2x00_free_req_que(struct qla_hw_data *ha, struct req_que *req) in qla2x00_free_req_que() argument
481 if (IS_QLAFX00(ha)) { in qla2x00_free_req_que()
483 dma_free_coherent(&ha->pdev->dev, in qla2x00_free_req_que()
487 dma_free_coherent(&ha->pdev->dev, in qla2x00_free_req_que()
497 static void qla2x00_free_rsp_que(struct qla_hw_data *ha, struct rsp_que *rsp) in qla2x00_free_rsp_que() argument
499 if (IS_QLAFX00(ha)) { in qla2x00_free_rsp_que()
501 dma_free_coherent(&ha->pdev->dev, in qla2x00_free_rsp_que()
505 dma_free_coherent(&ha->pdev->dev, in qla2x00_free_rsp_que()
512 static void qla2x00_free_queues(struct qla_hw_data *ha) in qla2x00_free_queues() argument
519 if (ha->queue_pair_map) { in qla2x00_free_queues()
520 kfree(ha->queue_pair_map); in qla2x00_free_queues()
521 ha->queue_pair_map = NULL; in qla2x00_free_queues()
523 if (ha->base_qpair) { in qla2x00_free_queues()
524 kfree(ha->base_qpair); in qla2x00_free_queues()
525 ha->base_qpair = NULL; in qla2x00_free_queues()
528 spin_lock_irqsave(&ha->hardware_lock, flags); in qla2x00_free_queues()
529 for (cnt = 0; cnt < ha->max_req_queues; cnt++) { in qla2x00_free_queues()
530 if (!test_bit(cnt, ha->req_qid_map)) in qla2x00_free_queues()
533 req = ha->req_q_map[cnt]; in qla2x00_free_queues()
534 clear_bit(cnt, ha->req_qid_map); in qla2x00_free_queues()
535 ha->req_q_map[cnt] = NULL; in qla2x00_free_queues()
537 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla2x00_free_queues()
538 qla2x00_free_req_que(ha, req); in qla2x00_free_queues()
539 spin_lock_irqsave(&ha->hardware_lock, flags); in qla2x00_free_queues()
541 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla2x00_free_queues()
543 kfree(ha->req_q_map); in qla2x00_free_queues()
544 ha->req_q_map = NULL; in qla2x00_free_queues()
547 spin_lock_irqsave(&ha->hardware_lock, flags); in qla2x00_free_queues()
548 for (cnt = 0; cnt < ha->max_rsp_queues; cnt++) { in qla2x00_free_queues()
549 if (!test_bit(cnt, ha->rsp_qid_map)) in qla2x00_free_queues()
552 rsp = ha->rsp_q_map[cnt]; in qla2x00_free_queues()
553 clear_bit(cnt, ha->rsp_qid_map); in qla2x00_free_queues()
554 ha->rsp_q_map[cnt] = NULL; in qla2x00_free_queues()
555 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla2x00_free_queues()
556 qla2x00_free_rsp_que(ha, rsp); in qla2x00_free_queues()
557 spin_lock_irqsave(&ha->hardware_lock, flags); in qla2x00_free_queues()
559 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla2x00_free_queues()
561 kfree(ha->rsp_q_map); in qla2x00_free_queues()
562 ha->rsp_q_map = NULL; in qla2x00_free_queues()
568 struct qla_hw_data *ha = vha->hw; in qla2x00_pci_info_str() local
574 pci_bus = (ha->pci_attr & (BIT_9 | BIT_10)) >> 9; in qla2x00_pci_info_str()
579 pci_bus = (ha->pci_attr & BIT_8) >> 8; in qla2x00_pci_info_str()
592 struct qla_hw_data *ha = vha->hw; in qla24xx_pci_info_str() local
595 if (pci_is_pcie(ha->pdev)) { in qla24xx_pci_info_str()
599 pcie_capability_read_dword(ha->pdev, PCI_EXP_LNKCAP, &lstat); in qla24xx_pci_info_str()
625 pci_bus = (ha->pci_attr & CSRX_PCIX_BUS_MODE_MASK) >> 8; in qla24xx_pci_info_str()
641 struct qla_hw_data *ha = vha->hw; in qla2x00_fw_version_str() local
643 snprintf(str, size, "%d.%02d.%02d ", ha->fw_major_version, in qla2x00_fw_version_str()
644 ha->fw_minor_version, ha->fw_subminor_version); in qla2x00_fw_version_str()
646 if (ha->fw_attributes & BIT_9) { in qla2x00_fw_version_str()
651 switch (ha->fw_attributes & 0xFF) { in qla2x00_fw_version_str()
665 sprintf(un_str, "(%x)", ha->fw_attributes); in qla2x00_fw_version_str()
669 if (ha->fw_attributes & 0x100) in qla2x00_fw_version_str()
678 struct qla_hw_data *ha = vha->hw; in qla24xx_fw_version_str() local
680 snprintf(str, size, "%d.%02d.%02d (%x)", ha->fw_major_version, in qla24xx_fw_version_str()
681 ha->fw_minor_version, ha->fw_subminor_version, ha->fw_attributes); in qla24xx_fw_version_str()
687 struct qla_hw_data *ha = sp->vha->hw; in qla2x00_sp_free_dma() local
696 dma_unmap_sg(&ha->pdev->dev, scsi_prot_sglist(cmd), in qla2x00_sp_free_dma()
703 qla2x00_clean_dsd_pool(ha, sp->u.scmd.crc_ctx); in qla2x00_sp_free_dma()
710 dma_pool_free(ha->dl_dma_pool, ctx0, ctx0->crc_ctx_dma); in qla2x00_sp_free_dma()
717 dma_pool_free(ha->fcp_cmnd_dma_pool, ctx1->fcp_cmnd, in qla2x00_sp_free_dma()
719 list_splice(&ctx1->dsd_list, &ha->gbl_dsd_list); in qla2x00_sp_free_dma()
720 ha->gbl_dsd_inuse -= ctx1->dsd_use_cnt; in qla2x00_sp_free_dma()
721 ha->gbl_dsd_avail += ctx1->dsd_use_cnt; in qla2x00_sp_free_dma()
722 mempool_free(ctx1, ha->ctx_mempool); in qla2x00_sp_free_dma()
742 struct qla_hw_data *ha = sp->fcport->vha->hw; in qla2xxx_qpair_sp_free_dma() local
750 dma_unmap_sg(&ha->pdev->dev, scsi_prot_sglist(cmd), in qla2xxx_qpair_sp_free_dma()
757 qla2x00_clean_dsd_pool(ha, sp->u.scmd.crc_ctx); in qla2xxx_qpair_sp_free_dma()
768 dma_pool_free(ha->dif_bundl_pool, dif_dsd->dsd_addr, in qla2xxx_qpair_sp_free_dma()
777 dma_pool_free(ha->dl_dma_pool, dif_dsd->dsd_addr, in qla2xxx_qpair_sp_free_dma()
800 dma_pool_free(ha->fcp_cmnd_dma_pool, ctx1->fcp_cmnd, in qla2xxx_qpair_sp_free_dma()
802 list_splice(&ctx1->dsd_list, &ha->gbl_dsd_list); in qla2xxx_qpair_sp_free_dma()
803 ha->gbl_dsd_inuse -= ctx1->dsd_use_cnt; in qla2xxx_qpair_sp_free_dma()
804 ha->gbl_dsd_avail += ctx1->dsd_use_cnt; in qla2xxx_qpair_sp_free_dma()
805 mempool_free(ctx1, ha->ctx_mempool); in qla2xxx_qpair_sp_free_dma()
812 dma_pool_free(ha->dl_dma_pool, ctx0, ctx0->crc_ctx_dma); in qla2xxx_qpair_sp_free_dma()
836 struct qla_hw_data *ha = vha->hw; in qla2xxx_queuecommand() local
837 struct scsi_qla_host *base_vha = pci_get_drvdata(ha->pdev); in qla2xxx_queuecommand()
847 if (ha->mqenable) { in qla2xxx_queuecommand()
854 qpair = ha->queue_pair_map[hwq]; in qla2xxx_queuecommand()
860 if (ha->flags.eeh_busy) { in qla2xxx_queuecommand()
861 if (ha->flags.pci_channel_io_perm_failure) { in qla2xxx_queuecommand()
931 rval = ha->isp_ops->start_scsi(sp); in qla2xxx_queuecommand()
960 struct qla_hw_data *ha = vha->hw; in qla2xxx_mqueuecommand() local
961 struct scsi_qla_host *base_vha = pci_get_drvdata(ha->pdev); in qla2xxx_mqueuecommand()
976 "qpair not online. eeh_busy=%d.\n", ha->flags.eeh_busy); in qla2xxx_mqueuecommand()
1019 rval = ha->isp_ops->start_scsi_mq(sp); in qla2xxx_mqueuecommand()
1059 struct qla_hw_data *ha = vha->hw; in qla2x00_eh_wait_on_command() local
1062 if (unlikely(pci_channel_offline(ha->pdev)) || ha->flags.eeh_busy) { in qla2x00_eh_wait_on_command()
1099 struct qla_hw_data *ha = vha->hw; in qla2x00_wait_for_hba_online() local
1100 scsi_qla_host_t *base_vha = pci_get_drvdata(ha->pdev); in qla2x00_wait_for_hba_online()
1106 ha->dpc_active) && time_before(jiffies, wait_online)) { in qla2x00_wait_for_hba_online()
1120 struct qla_hw_data *ha = vha->hw; in test_fcport_count() local
1125 spin_lock_irqsave(&ha->tgt.sess_lock, flags); in test_fcport_count()
1145 spin_unlock_irqrestore(&ha->tgt.sess_lock, flags); in test_fcport_count()
1185 struct qla_hw_data *ha = vha->hw; in qla2x00_wait_for_hba_ready() local
1186 scsi_qla_host_t *base_vha = pci_get_drvdata(ha->pdev); in qla2x00_wait_for_hba_ready()
1188 while ((qla2x00_reset_active(vha) || ha->dpc_active || in qla2x00_wait_for_hba_ready()
1189 ha->flags.mbox_busy) || in qla2x00_wait_for_hba_ready()
1203 struct qla_hw_data *ha = vha->hw; in qla2x00_wait_for_chip_reset() local
1204 scsi_qla_host_t *base_vha = pci_get_drvdata(ha->pdev); in qla2x00_wait_for_chip_reset()
1210 ha->dpc_active) && time_before(jiffies, wait_reset)) { in qla2x00_wait_for_chip_reset()
1215 ha->flags.chip_reset_done) in qla2x00_wait_for_chip_reset()
1218 if (ha->flags.chip_reset_done) in qla2x00_wait_for_chip_reset()
1251 struct qla_hw_data *ha = vha->hw; in qla2xxx_eh_abort() local
1257 if (qla2x00_isp_reg_stat(ha)) { in qla2xxx_eh_abort()
1294 rval = ha->isp_ops->abort_command(sp); in qla2xxx_eh_abort()
1300 ratov_j = ha->r_a_tov/10 * 4 * 1000; in qla2xxx_eh_abort()
1307 __func__, ha->r_a_tov/10); in qla2xxx_eh_abort()
1336 struct qla_hw_data *ha = vha->hw; in qla2x00_eh_wait_for_pending_commands() local
1343 spin_lock_irqsave(&ha->hardware_lock, flags); in qla2x00_eh_wait_for_pending_commands()
1371 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla2x00_eh_wait_for_pending_commands()
1373 spin_lock_irqsave(&ha->hardware_lock, flags); in qla2x00_eh_wait_for_pending_commands()
1375 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla2x00_eh_wait_for_pending_commands()
1394 struct qla_hw_data *ha = vha->hw; in qla2xxx_eh_device_reset() local
1397 if (qla2x00_isp_reg_stat(ha)) { in qla2xxx_eh_device_reset()
1426 if (ha->isp_ops->lun_reset(fcport, sdev->lun, 1) in qla2xxx_eh_device_reset()
1461 struct qla_hw_data *ha = vha->hw; in qla2xxx_eh_target_reset() local
1465 if (qla2x00_isp_reg_stat(ha)) { in qla2xxx_eh_target_reset()
1494 if (ha->isp_ops->target_reset(fcport, 0, 0) != QLA_SUCCESS) { in qla2xxx_eh_target_reset()
1544 struct qla_hw_data *ha = vha->hw; in qla2xxx_eh_bus_reset() local
1546 if (qla2x00_isp_reg_stat(ha)) { in qla2xxx_eh_bus_reset()
1609 struct qla_hw_data *ha = vha->hw; in qla2xxx_eh_host_reset() local
1613 scsi_qla_host_t *base_vha = pci_get_drvdata(ha->pdev); in qla2xxx_eh_host_reset()
1615 if (qla2x00_isp_reg_stat(ha)) { in qla2xxx_eh_host_reset()
1632 if (qla2x00_reset_active(vha) || ha->optrom_state != QLA_SWAITING) in qla2xxx_eh_host_reset()
1647 if (ha->wq) in qla2xxx_eh_host_reset()
1648 flush_workqueue(ha->wq); in qla2xxx_eh_host_reset()
1651 if (ha->isp_ops->abort_isp(base_vha)) { in qla2xxx_eh_host_reset()
1692 struct qla_hw_data *ha = vha->hw; in qla2x00_loop_reset() local
1694 if (IS_QLAFX00(ha)) in qla2x00_loop_reset()
1697 if (ha->flags.enable_lip_full_login && !IS_CNA_CAPABLE(ha)) { in qla2x00_loop_reset()
1708 if (ha->flags.enable_lip_reset) { in qla2x00_loop_reset()
1732 struct qla_hw_data *ha = vha->hw; in qla2x00_abort_srb() local
1746 (sp->type == SRB_SCSI_CMD && !ha->flags.eeh_busy && in qla2x00_abort_srb()
1748 !qla2x00_isp_reg_stat(ha))) { in qla2x00_abort_srb()
1757 rval = ha->isp_ops->abort_command(sp); in qla2x00_abort_srb()
1760 ratov_j = ha->r_a_tov/10 * 4 * 1000; in qla2x00_abort_srb()
1767 __func__, ha->r_a_tov/10); in qla2x00_abort_srb()
1796 struct qla_hw_data *ha = vha->hw; in __qla2x00_abort_all_cmds() local
1801 if (!ha->req_q_map) in __qla2x00_abort_all_cmds()
1843 struct qla_hw_data *ha = vha->hw; in qla2x00_abort_all_cmds() local
1846 if (!ha->base_qpair) in qla2x00_abort_all_cmds()
1848 __qla2x00_abort_all_cmds(ha->base_qpair, res); in qla2x00_abort_all_cmds()
1850 if (!ha->queue_pair_map) in qla2x00_abort_all_cmds()
1852 for (que = 0; que < ha->max_qpairs; que++) { in qla2x00_abort_all_cmds()
1853 if (!ha->queue_pair_map[que]) in qla2x00_abort_all_cmds()
1856 __qla2x00_abort_all_cmds(ha->queue_pair_map[que], res); in qla2x00_abort_all_cmds()
1900 qla2x00_config_dma_addressing(struct qla_hw_data *ha) in qla2x00_config_dma_addressing() argument
1903 ha->flags.enable_64bit_addressing = 0; in qla2x00_config_dma_addressing()
1905 if (!dma_set_mask(&ha->pdev->dev, DMA_BIT_MASK(64))) { in qla2x00_config_dma_addressing()
1907 if (MSD(dma_get_required_mask(&ha->pdev->dev)) && in qla2x00_config_dma_addressing()
1908 !dma_set_coherent_mask(&ha->pdev->dev, DMA_BIT_MASK(64))) { in qla2x00_config_dma_addressing()
1910 ha->flags.enable_64bit_addressing = 1; in qla2x00_config_dma_addressing()
1911 ha->isp_ops->calc_req_entries = qla2x00_calc_iocbs_64; in qla2x00_config_dma_addressing()
1912 ha->isp_ops->build_iocbs = qla2x00_build_scsi_iocbs_64; in qla2x00_config_dma_addressing()
1917 dma_set_mask(&ha->pdev->dev, DMA_BIT_MASK(32)); in qla2x00_config_dma_addressing()
1918 dma_set_coherent_mask(&ha->pdev->dev, DMA_BIT_MASK(32)); in qla2x00_config_dma_addressing()
1922 qla2x00_enable_intrs(struct qla_hw_data *ha) in qla2x00_enable_intrs() argument
1925 struct device_reg_2xxx __iomem *reg = &ha->iobase->isp; in qla2x00_enable_intrs()
1927 spin_lock_irqsave(&ha->hardware_lock, flags); in qla2x00_enable_intrs()
1928 ha->interrupts_on = 1; in qla2x00_enable_intrs()
1932 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla2x00_enable_intrs()
1937 qla2x00_disable_intrs(struct qla_hw_data *ha) in qla2x00_disable_intrs() argument
1940 struct device_reg_2xxx __iomem *reg = &ha->iobase->isp; in qla2x00_disable_intrs()
1942 spin_lock_irqsave(&ha->hardware_lock, flags); in qla2x00_disable_intrs()
1943 ha->interrupts_on = 0; in qla2x00_disable_intrs()
1947 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla2x00_disable_intrs()
1951 qla24xx_enable_intrs(struct qla_hw_data *ha) in qla24xx_enable_intrs() argument
1954 struct device_reg_24xx __iomem *reg = &ha->iobase->isp24; in qla24xx_enable_intrs()
1956 spin_lock_irqsave(&ha->hardware_lock, flags); in qla24xx_enable_intrs()
1957 ha->interrupts_on = 1; in qla24xx_enable_intrs()
1960 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla24xx_enable_intrs()
1964 qla24xx_disable_intrs(struct qla_hw_data *ha) in qla24xx_disable_intrs() argument
1967 struct device_reg_24xx __iomem *reg = &ha->iobase->isp24; in qla24xx_disable_intrs()
1969 if (IS_NOPOLLING_TYPE(ha)) in qla24xx_disable_intrs()
1971 spin_lock_irqsave(&ha->hardware_lock, flags); in qla24xx_disable_intrs()
1972 ha->interrupts_on = 0; in qla24xx_disable_intrs()
1975 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla24xx_disable_intrs()
1979 qla2x00_iospace_config(struct qla_hw_data *ha) in qla2x00_iospace_config() argument
1984 if (pci_request_selected_regions(ha->pdev, ha->bars, in qla2x00_iospace_config()
1986 ql_log_pci(ql_log_fatal, ha->pdev, 0x0011, in qla2x00_iospace_config()
1988 pci_name(ha->pdev)); in qla2x00_iospace_config()
1991 if (!(ha->bars & 1)) in qla2x00_iospace_config()
1995 pio = pci_resource_start(ha->pdev, 0); in qla2x00_iospace_config()
1996 if (pci_resource_flags(ha->pdev, 0) & IORESOURCE_IO) { in qla2x00_iospace_config()
1997 if (pci_resource_len(ha->pdev, 0) < MIN_IOBASE_LEN) { in qla2x00_iospace_config()
1998 ql_log_pci(ql_log_warn, ha->pdev, 0x0012, in qla2x00_iospace_config()
2000 pci_name(ha->pdev)); in qla2x00_iospace_config()
2004 ql_log_pci(ql_log_warn, ha->pdev, 0x0013, in qla2x00_iospace_config()
2006 pci_name(ha->pdev)); in qla2x00_iospace_config()
2009 ha->pio_address = pio; in qla2x00_iospace_config()
2010 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x0014, in qla2x00_iospace_config()
2012 (unsigned long long)ha->pio_address); in qla2x00_iospace_config()
2016 if (!(pci_resource_flags(ha->pdev, 1) & IORESOURCE_MEM)) { in qla2x00_iospace_config()
2017 ql_log_pci(ql_log_fatal, ha->pdev, 0x0015, in qla2x00_iospace_config()
2019 pci_name(ha->pdev)); in qla2x00_iospace_config()
2022 if (pci_resource_len(ha->pdev, 1) < MIN_IOBASE_LEN) { in qla2x00_iospace_config()
2023 ql_log_pci(ql_log_fatal, ha->pdev, 0x0016, in qla2x00_iospace_config()
2025 pci_name(ha->pdev)); in qla2x00_iospace_config()
2029 ha->iobase = ioremap(pci_resource_start(ha->pdev, 1), MIN_IOBASE_LEN); in qla2x00_iospace_config()
2030 if (!ha->iobase) { in qla2x00_iospace_config()
2031 ql_log_pci(ql_log_fatal, ha->pdev, 0x0017, in qla2x00_iospace_config()
2033 pci_name(ha->pdev)); in qla2x00_iospace_config()
2038 ha->max_req_queues = ha->max_rsp_queues = 1; in qla2x00_iospace_config()
2039 ha->msix_count = QLA_BASE_VECTORS; in qla2x00_iospace_config()
2042 if (!(ha->fw_attributes & BIT_6)) in qla2x00_iospace_config()
2046 (!IS_QLA25XX(ha) && !IS_QLA81XX(ha))) in qla2x00_iospace_config()
2049 ha->mqiobase = ioremap(pci_resource_start(ha->pdev, 3), in qla2x00_iospace_config()
2050 pci_resource_len(ha->pdev, 3)); in qla2x00_iospace_config()
2051 if (ha->mqiobase) { in qla2x00_iospace_config()
2052 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x0018, in qla2x00_iospace_config()
2053 "MQIO Base=%p.\n", ha->mqiobase); in qla2x00_iospace_config()
2055 pci_read_config_word(ha->pdev, QLA_PCI_MSIX_CONTROL, &msix); in qla2x00_iospace_config()
2056 ha->msix_count = msix + 1; in qla2x00_iospace_config()
2059 ha->max_req_queues = ha->msix_count - 1; in qla2x00_iospace_config()
2060 ha->max_rsp_queues = ha->max_req_queues; in qla2x00_iospace_config()
2062 ha->max_qpairs = ha->max_rsp_queues - 1; in qla2x00_iospace_config()
2063 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x0188, in qla2x00_iospace_config()
2064 "Max no of queues pairs: %d.\n", ha->max_qpairs); in qla2x00_iospace_config()
2066 ql_log_pci(ql_log_info, ha->pdev, 0x001a, in qla2x00_iospace_config()
2067 "MSI-X vector count: %d.\n", ha->msix_count); in qla2x00_iospace_config()
2069 ql_log_pci(ql_log_info, ha->pdev, 0x001b, in qla2x00_iospace_config()
2073 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x001c, in qla2x00_iospace_config()
2074 "MSIX Count: %d.\n", ha->msix_count); in qla2x00_iospace_config()
2083 qla83xx_iospace_config(struct qla_hw_data *ha) in qla83xx_iospace_config() argument
2087 if (pci_request_selected_regions(ha->pdev, ha->bars, in qla83xx_iospace_config()
2089 ql_log_pci(ql_log_fatal, ha->pdev, 0x0117, in qla83xx_iospace_config()
2091 pci_name(ha->pdev)); in qla83xx_iospace_config()
2097 if (!(pci_resource_flags(ha->pdev, 0) & IORESOURCE_MEM)) { in qla83xx_iospace_config()
2098 ql_log_pci(ql_log_warn, ha->pdev, 0x0118, in qla83xx_iospace_config()
2100 pci_name(ha->pdev)); in qla83xx_iospace_config()
2103 if (pci_resource_len(ha->pdev, 0) < MIN_IOBASE_LEN) { in qla83xx_iospace_config()
2104 ql_log_pci(ql_log_warn, ha->pdev, 0x0119, in qla83xx_iospace_config()
2106 pci_name(ha->pdev)); in qla83xx_iospace_config()
2110 ha->iobase = ioremap(pci_resource_start(ha->pdev, 0), MIN_IOBASE_LEN); in qla83xx_iospace_config()
2111 if (!ha->iobase) { in qla83xx_iospace_config()
2112 ql_log_pci(ql_log_fatal, ha->pdev, 0x011a, in qla83xx_iospace_config()
2114 pci_name(ha->pdev)); in qla83xx_iospace_config()
2121 ha->max_req_queues = ha->max_rsp_queues = 1; in qla83xx_iospace_config()
2122 ha->msix_count = QLA_BASE_VECTORS; in qla83xx_iospace_config()
2123 ha->mqiobase = ioremap(pci_resource_start(ha->pdev, 4), in qla83xx_iospace_config()
2124 pci_resource_len(ha->pdev, 4)); in qla83xx_iospace_config()
2126 if (!ha->mqiobase) { in qla83xx_iospace_config()
2127 ql_log_pci(ql_log_fatal, ha->pdev, 0x011d, in qla83xx_iospace_config()
2132 ha->msixbase = ioremap(pci_resource_start(ha->pdev, 2), in qla83xx_iospace_config()
2133 pci_resource_len(ha->pdev, 2)); in qla83xx_iospace_config()
2134 if (ha->msixbase) { in qla83xx_iospace_config()
2136 pci_read_config_word(ha->pdev, in qla83xx_iospace_config()
2138 ha->msix_count = (msix & PCI_MSIX_FLAGS_QSIZE) + 1; in qla83xx_iospace_config()
2145 ha->max_req_queues = ha->msix_count - 1; in qla83xx_iospace_config()
2149 ha->max_req_queues--; in qla83xx_iospace_config()
2151 ha->max_rsp_queues = ha->max_req_queues; in qla83xx_iospace_config()
2155 ha->max_qpairs = ha->max_req_queues - 1; in qla83xx_iospace_config()
2156 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x00e3, in qla83xx_iospace_config()
2157 "Max no of queues pairs: %d.\n", ha->max_qpairs); in qla83xx_iospace_config()
2159 ql_log_pci(ql_log_info, ha->pdev, 0x011c, in qla83xx_iospace_config()
2160 "MSI-X vector count: %d.\n", ha->msix_count); in qla83xx_iospace_config()
2162 ql_log_pci(ql_log_info, ha->pdev, 0x011e, in qla83xx_iospace_config()
2166 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x011f, in qla83xx_iospace_config()
2167 "MSIX Count: %d.\n", ha->msix_count); in qla83xx_iospace_config()
2566 qla2x00_set_isp_flags(struct qla_hw_data *ha) in qla2x00_set_isp_flags() argument
2568 ha->device_type = DT_EXTENDED_IDS; in qla2x00_set_isp_flags()
2569 switch (ha->pdev->device) { in qla2x00_set_isp_flags()
2571 ha->isp_type |= DT_ISP2100; in qla2x00_set_isp_flags()
2572 ha->device_type &= ~DT_EXTENDED_IDS; in qla2x00_set_isp_flags()
2573 ha->fw_srisc_address = RISC_START_ADDRESS_2100; in qla2x00_set_isp_flags()
2576 ha->isp_type |= DT_ISP2200; in qla2x00_set_isp_flags()
2577 ha->device_type &= ~DT_EXTENDED_IDS; in qla2x00_set_isp_flags()
2578 ha->fw_srisc_address = RISC_START_ADDRESS_2100; in qla2x00_set_isp_flags()
2581 ha->isp_type |= DT_ISP2300; in qla2x00_set_isp_flags()
2582 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2583 ha->fw_srisc_address = RISC_START_ADDRESS_2300; in qla2x00_set_isp_flags()
2586 ha->isp_type |= DT_ISP2312; in qla2x00_set_isp_flags()
2587 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2588 ha->fw_srisc_address = RISC_START_ADDRESS_2300; in qla2x00_set_isp_flags()
2591 ha->isp_type |= DT_ISP2322; in qla2x00_set_isp_flags()
2592 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2593 if (ha->pdev->subsystem_vendor == 0x1028 && in qla2x00_set_isp_flags()
2594 ha->pdev->subsystem_device == 0x0170) in qla2x00_set_isp_flags()
2595 ha->device_type |= DT_OEM_001; in qla2x00_set_isp_flags()
2596 ha->fw_srisc_address = RISC_START_ADDRESS_2300; in qla2x00_set_isp_flags()
2599 ha->isp_type |= DT_ISP6312; in qla2x00_set_isp_flags()
2600 ha->fw_srisc_address = RISC_START_ADDRESS_2300; in qla2x00_set_isp_flags()
2603 ha->isp_type |= DT_ISP6322; in qla2x00_set_isp_flags()
2604 ha->fw_srisc_address = RISC_START_ADDRESS_2300; in qla2x00_set_isp_flags()
2607 ha->isp_type |= DT_ISP2422; in qla2x00_set_isp_flags()
2608 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2609 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2610 ha->device_type |= DT_IIDMA; in qla2x00_set_isp_flags()
2611 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2614 ha->isp_type |= DT_ISP2432; in qla2x00_set_isp_flags()
2615 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2616 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2617 ha->device_type |= DT_IIDMA; in qla2x00_set_isp_flags()
2618 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2621 ha->isp_type |= DT_ISP8432; in qla2x00_set_isp_flags()
2622 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2623 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2624 ha->device_type |= DT_IIDMA; in qla2x00_set_isp_flags()
2625 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2628 ha->isp_type |= DT_ISP5422; in qla2x00_set_isp_flags()
2629 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2630 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2633 ha->isp_type |= DT_ISP5432; in qla2x00_set_isp_flags()
2634 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2635 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2638 ha->isp_type |= DT_ISP2532; in qla2x00_set_isp_flags()
2639 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2640 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2641 ha->device_type |= DT_IIDMA; in qla2x00_set_isp_flags()
2642 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2645 ha->isp_type |= DT_ISP8001; in qla2x00_set_isp_flags()
2646 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2647 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2648 ha->device_type |= DT_IIDMA; in qla2x00_set_isp_flags()
2649 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2652 ha->isp_type |= DT_ISP8021; in qla2x00_set_isp_flags()
2653 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2654 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2655 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2657 qla82xx_init_flags(ha); in qla2x00_set_isp_flags()
2660 ha->isp_type |= DT_ISP8044; in qla2x00_set_isp_flags()
2661 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2662 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2663 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2665 qla82xx_init_flags(ha); in qla2x00_set_isp_flags()
2668 ha->isp_type |= DT_ISP2031; in qla2x00_set_isp_flags()
2669 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2670 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2671 ha->device_type |= DT_IIDMA; in qla2x00_set_isp_flags()
2672 ha->device_type |= DT_T10_PI; in qla2x00_set_isp_flags()
2673 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2676 ha->isp_type |= DT_ISP8031; in qla2x00_set_isp_flags()
2677 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2678 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2679 ha->device_type |= DT_IIDMA; in qla2x00_set_isp_flags()
2680 ha->device_type |= DT_T10_PI; in qla2x00_set_isp_flags()
2681 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2684 ha->isp_type |= DT_ISPFX00; in qla2x00_set_isp_flags()
2687 ha->isp_type |= DT_ISP2071; in qla2x00_set_isp_flags()
2688 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2689 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2690 ha->device_type |= DT_IIDMA; in qla2x00_set_isp_flags()
2691 ha->device_type |= DT_T10_PI; in qla2x00_set_isp_flags()
2692 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2695 ha->isp_type |= DT_ISP2271; in qla2x00_set_isp_flags()
2696 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2697 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2698 ha->device_type |= DT_IIDMA; in qla2x00_set_isp_flags()
2699 ha->device_type |= DT_T10_PI; in qla2x00_set_isp_flags()
2700 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2703 ha->isp_type |= DT_ISP2261; in qla2x00_set_isp_flags()
2704 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2705 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2706 ha->device_type |= DT_IIDMA; in qla2x00_set_isp_flags()
2707 ha->device_type |= DT_T10_PI; in qla2x00_set_isp_flags()
2708 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2712 ha->isp_type |= DT_ISP2081; in qla2x00_set_isp_flags()
2713 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2714 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2715 ha->device_type |= DT_IIDMA; in qla2x00_set_isp_flags()
2716 ha->device_type |= DT_T10_PI; in qla2x00_set_isp_flags()
2717 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2721 ha->isp_type |= DT_ISP2281; in qla2x00_set_isp_flags()
2722 ha->device_type |= DT_ZIO_SUPPORTED; in qla2x00_set_isp_flags()
2723 ha->device_type |= DT_FWI2; in qla2x00_set_isp_flags()
2724 ha->device_type |= DT_IIDMA; in qla2x00_set_isp_flags()
2725 ha->device_type |= DT_T10_PI; in qla2x00_set_isp_flags()
2726 ha->fw_srisc_address = RISC_START_ADDRESS_2400; in qla2x00_set_isp_flags()
2730 if (IS_QLA82XX(ha)) in qla2x00_set_isp_flags()
2731 ha->port_no = ha->portnum & 1; in qla2x00_set_isp_flags()
2734 pci_read_config_byte(ha->pdev, PCI_INTERRUPT_PIN, &ha->port_no); in qla2x00_set_isp_flags()
2735 if (IS_QLA25XX(ha) || IS_QLA2031(ha) || in qla2x00_set_isp_flags()
2736 IS_QLA27XX(ha) || IS_QLA28XX(ha)) in qla2x00_set_isp_flags()
2737 ha->port_no--; in qla2x00_set_isp_flags()
2739 ha->port_no = !(ha->port_no & 1); in qla2x00_set_isp_flags()
2742 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x000b, in qla2x00_set_isp_flags()
2744 ha->device_type, ha->port_no, ha->fw_srisc_address); in qla2x00_set_isp_flags()
2778 struct qla_hw_data *ha = container_of(work, in qla_heartbeat_work_fn() local
2780 struct scsi_qla_host *base_vha = pci_get_drvdata(ha->pdev); in qla_heartbeat_work_fn()
2782 if (!ha->flags.mbox_busy && base_vha->flags.init_done) in qla_heartbeat_work_fn()
2790 struct qla_hw_data *ha = vha->hw; in qla2x00_iocb_work_fn() local
2791 struct scsi_qla_host *base_vha = pci_get_drvdata(ha->pdev); in qla2x00_iocb_work_fn()
2817 struct qla_hw_data *ha; in qla2x00_probe_one() local
2872 ha = kzalloc(sizeof(struct qla_hw_data), GFP_KERNEL); in qla2x00_probe_one()
2873 if (!ha) { in qla2x00_probe_one()
2879 "Memory allocated for ha=%p.\n", ha); in qla2x00_probe_one()
2880 ha->pdev = pdev; in qla2x00_probe_one()
2881 INIT_LIST_HEAD(&ha->tgt.q_full_list); in qla2x00_probe_one()
2882 spin_lock_init(&ha->tgt.q_full_lock); in qla2x00_probe_one()
2883 spin_lock_init(&ha->tgt.sess_lock); in qla2x00_probe_one()
2884 spin_lock_init(&ha->tgt.atio_lock); in qla2x00_probe_one()
2886 spin_lock_init(&ha->sadb_lock); in qla2x00_probe_one()
2887 INIT_LIST_HEAD(&ha->sadb_tx_index_list); in qla2x00_probe_one()
2888 INIT_LIST_HEAD(&ha->sadb_rx_index_list); in qla2x00_probe_one()
2890 spin_lock_init(&ha->sadb_fp_lock); in qla2x00_probe_one()
2892 if (qla_edif_sadb_build_free_pool(ha)) { in qla2x00_probe_one()
2893 kfree(ha); in qla2x00_probe_one()
2897 atomic_set(&ha->nvme_active_aen_cnt, 0); in qla2x00_probe_one()
2900 ha->bars = bars; in qla2x00_probe_one()
2901 ha->mem_only = mem_only; in qla2x00_probe_one()
2902 spin_lock_init(&ha->hardware_lock); in qla2x00_probe_one()
2903 spin_lock_init(&ha->vport_slock); in qla2x00_probe_one()
2904 mutex_init(&ha->selflogin_lock); in qla2x00_probe_one()
2905 mutex_init(&ha->optrom_mutex); in qla2x00_probe_one()
2908 qla2x00_set_isp_flags(ha); in qla2x00_probe_one()
2911 if (IS_QLA24XX(ha) || IS_QLA25XX(ha) || IS_QLA81XX(ha) || in qla2x00_probe_one()
2912 IS_QLA83XX(ha) || IS_QLA27XX(ha) || IS_QLA28XX(ha)) in qla2x00_probe_one()
2915 ha->prev_topology = 0; in qla2x00_probe_one()
2916 ha->init_cb_size = sizeof(init_cb_t); in qla2x00_probe_one()
2917 ha->link_data_rate = PORT_SPEED_UNKNOWN; in qla2x00_probe_one()
2918 ha->optrom_size = OPTROM_SIZE_2300; in qla2x00_probe_one()
2919 ha->max_exchg = FW_MAX_EXCHANGES_CNT; in qla2x00_probe_one()
2920 atomic_set(&ha->num_pend_mbx_stage1, 0); in qla2x00_probe_one()
2921 atomic_set(&ha->num_pend_mbx_stage2, 0); in qla2x00_probe_one()
2922 atomic_set(&ha->num_pend_mbx_stage3, 0); in qla2x00_probe_one()
2923 atomic_set(&ha->zio_threshold, DEFAULT_ZIO_THRESHOLD); in qla2x00_probe_one()
2924 ha->last_zio_threshold = DEFAULT_ZIO_THRESHOLD; in qla2x00_probe_one()
2927 if (IS_QLA2100(ha)) { in qla2x00_probe_one()
2928 ha->max_fibre_devices = MAX_FIBRE_DEVICES_2100; in qla2x00_probe_one()
2929 ha->mbx_count = MAILBOX_REGISTER_COUNT_2100; in qla2x00_probe_one()
2932 ha->max_loop_id = SNS_LAST_LOOP_ID_2100; in qla2x00_probe_one()
2933 ha->gid_list_info_size = 4; in qla2x00_probe_one()
2934 ha->flash_conf_off = ~0; in qla2x00_probe_one()
2935 ha->flash_data_off = ~0; in qla2x00_probe_one()
2936 ha->nvram_conf_off = ~0; in qla2x00_probe_one()
2937 ha->nvram_data_off = ~0; in qla2x00_probe_one()
2938 ha->isp_ops = &qla2100_isp_ops; in qla2x00_probe_one()
2939 } else if (IS_QLA2200(ha)) { in qla2x00_probe_one()
2940 ha->max_fibre_devices = MAX_FIBRE_DEVICES_2100; in qla2x00_probe_one()
2941 ha->mbx_count = MAILBOX_REGISTER_COUNT_2200; in qla2x00_probe_one()
2944 ha->max_loop_id = SNS_LAST_LOOP_ID_2100; in qla2x00_probe_one()
2945 ha->gid_list_info_size = 4; in qla2x00_probe_one()
2946 ha->flash_conf_off = ~0; in qla2x00_probe_one()
2947 ha->flash_data_off = ~0; in qla2x00_probe_one()
2948 ha->nvram_conf_off = ~0; in qla2x00_probe_one()
2949 ha->nvram_data_off = ~0; in qla2x00_probe_one()
2950 ha->isp_ops = &qla2100_isp_ops; in qla2x00_probe_one()
2951 } else if (IS_QLA23XX(ha)) { in qla2x00_probe_one()
2952 ha->max_fibre_devices = MAX_FIBRE_DEVICES_2100; in qla2x00_probe_one()
2953 ha->mbx_count = MAILBOX_REGISTER_COUNT; in qla2x00_probe_one()
2956 ha->max_loop_id = SNS_LAST_LOOP_ID_2300; in qla2x00_probe_one()
2957 ha->gid_list_info_size = 6; in qla2x00_probe_one()
2958 if (IS_QLA2322(ha) || IS_QLA6322(ha)) in qla2x00_probe_one()
2959 ha->optrom_size = OPTROM_SIZE_2322; in qla2x00_probe_one()
2960 ha->flash_conf_off = ~0; in qla2x00_probe_one()
2961 ha->flash_data_off = ~0; in qla2x00_probe_one()
2962 ha->nvram_conf_off = ~0; in qla2x00_probe_one()
2963 ha->nvram_data_off = ~0; in qla2x00_probe_one()
2964 ha->isp_ops = &qla2300_isp_ops; in qla2x00_probe_one()
2965 } else if (IS_QLA24XX_TYPE(ha)) { in qla2x00_probe_one()
2966 ha->max_fibre_devices = MAX_FIBRE_DEVICES_2400; in qla2x00_probe_one()
2967 ha->mbx_count = MAILBOX_REGISTER_COUNT; in qla2x00_probe_one()
2970 ha->tgt.atio_q_length = ATIO_ENTRY_CNT_24XX; in qla2x00_probe_one()
2971 ha->max_loop_id = SNS_LAST_LOOP_ID_2300; in qla2x00_probe_one()
2972 ha->init_cb_size = sizeof(struct mid_init_cb_24xx); in qla2x00_probe_one()
2973 ha->gid_list_info_size = 8; in qla2x00_probe_one()
2974 ha->optrom_size = OPTROM_SIZE_24XX; in qla2x00_probe_one()
2975 ha->nvram_npiv_size = QLA_MAX_VPORTS_QLA24XX; in qla2x00_probe_one()
2976 ha->isp_ops = &qla24xx_isp_ops; in qla2x00_probe_one()
2977 ha->flash_conf_off = FARX_ACCESS_FLASH_CONF; in qla2x00_probe_one()
2978 ha->flash_data_off = FARX_ACCESS_FLASH_DATA; in qla2x00_probe_one()
2979 ha->nvram_conf_off = FARX_ACCESS_NVRAM_CONF; in qla2x00_probe_one()
2980 ha->nvram_data_off = FARX_ACCESS_NVRAM_DATA; in qla2x00_probe_one()
2981 } else if (IS_QLA25XX(ha)) { in qla2x00_probe_one()
2982 ha->max_fibre_devices = MAX_FIBRE_DEVICES_2400; in qla2x00_probe_one()
2983 ha->mbx_count = MAILBOX_REGISTER_COUNT; in qla2x00_probe_one()
2986 ha->tgt.atio_q_length = ATIO_ENTRY_CNT_24XX; in qla2x00_probe_one()
2987 ha->max_loop_id = SNS_LAST_LOOP_ID_2300; in qla2x00_probe_one()
2988 ha->init_cb_size = sizeof(struct mid_init_cb_24xx); in qla2x00_probe_one()
2989 ha->gid_list_info_size = 8; in qla2x00_probe_one()
2990 ha->optrom_size = OPTROM_SIZE_25XX; in qla2x00_probe_one()
2991 ha->nvram_npiv_size = QLA_MAX_VPORTS_QLA25XX; in qla2x00_probe_one()
2992 ha->isp_ops = &qla25xx_isp_ops; in qla2x00_probe_one()
2993 ha->flash_conf_off = FARX_ACCESS_FLASH_CONF; in qla2x00_probe_one()
2994 ha->flash_data_off = FARX_ACCESS_FLASH_DATA; in qla2x00_probe_one()
2995 ha->nvram_conf_off = FARX_ACCESS_NVRAM_CONF; in qla2x00_probe_one()
2996 ha->nvram_data_off = FARX_ACCESS_NVRAM_DATA; in qla2x00_probe_one()
2997 } else if (IS_QLA81XX(ha)) { in qla2x00_probe_one()
2998 ha->max_fibre_devices = MAX_FIBRE_DEVICES_2400; in qla2x00_probe_one()
2999 ha->mbx_count = MAILBOX_REGISTER_COUNT; in qla2x00_probe_one()
3002 ha->tgt.atio_q_length = ATIO_ENTRY_CNT_24XX; in qla2x00_probe_one()
3003 ha->max_loop_id = SNS_LAST_LOOP_ID_2300; in qla2x00_probe_one()
3004 ha->init_cb_size = sizeof(struct mid_init_cb_81xx); in qla2x00_probe_one()
3005 ha->gid_list_info_size = 8; in qla2x00_probe_one()
3006 ha->optrom_size = OPTROM_SIZE_81XX; in qla2x00_probe_one()
3007 ha->nvram_npiv_size = QLA_MAX_VPORTS_QLA25XX; in qla2x00_probe_one()
3008 ha->isp_ops = &qla81xx_isp_ops; in qla2x00_probe_one()
3009 ha->flash_conf_off = FARX_ACCESS_FLASH_CONF_81XX; in qla2x00_probe_one()
3010 ha->flash_data_off = FARX_ACCESS_FLASH_DATA_81XX; in qla2x00_probe_one()
3011 ha->nvram_conf_off = ~0; in qla2x00_probe_one()
3012 ha->nvram_data_off = ~0; in qla2x00_probe_one()
3013 } else if (IS_QLA82XX(ha)) { in qla2x00_probe_one()
3014 ha->max_fibre_devices = MAX_FIBRE_DEVICES_2400; in qla2x00_probe_one()
3015 ha->mbx_count = MAILBOX_REGISTER_COUNT; in qla2x00_probe_one()
3018 ha->max_loop_id = SNS_LAST_LOOP_ID_2300; in qla2x00_probe_one()
3019 ha->init_cb_size = sizeof(struct mid_init_cb_81xx); in qla2x00_probe_one()
3020 ha->gid_list_info_size = 8; in qla2x00_probe_one()
3021 ha->optrom_size = OPTROM_SIZE_82XX; in qla2x00_probe_one()
3022 ha->nvram_npiv_size = QLA_MAX_VPORTS_QLA25XX; in qla2x00_probe_one()
3023 ha->isp_ops = &qla82xx_isp_ops; in qla2x00_probe_one()
3024 ha->flash_conf_off = FARX_ACCESS_FLASH_CONF; in qla2x00_probe_one()
3025 ha->flash_data_off = FARX_ACCESS_FLASH_DATA; in qla2x00_probe_one()
3026 ha->nvram_conf_off = FARX_ACCESS_NVRAM_CONF; in qla2x00_probe_one()
3027 ha->nvram_data_off = FARX_ACCESS_NVRAM_DATA; in qla2x00_probe_one()
3028 } else if (IS_QLA8044(ha)) { in qla2x00_probe_one()
3029 ha->max_fibre_devices = MAX_FIBRE_DEVICES_2400; in qla2x00_probe_one()
3030 ha->mbx_count = MAILBOX_REGISTER_COUNT; in qla2x00_probe_one()
3033 ha->max_loop_id = SNS_LAST_LOOP_ID_2300; in qla2x00_probe_one()
3034 ha->init_cb_size = sizeof(struct mid_init_cb_81xx); in qla2x00_probe_one()
3035 ha->gid_list_info_size = 8; in qla2x00_probe_one()
3036 ha->optrom_size = OPTROM_SIZE_83XX; in qla2x00_probe_one()
3037 ha->nvram_npiv_size = QLA_MAX_VPORTS_QLA25XX; in qla2x00_probe_one()
3038 ha->isp_ops = &qla8044_isp_ops; in qla2x00_probe_one()
3039 ha->flash_conf_off = FARX_ACCESS_FLASH_CONF; in qla2x00_probe_one()
3040 ha->flash_data_off = FARX_ACCESS_FLASH_DATA; in qla2x00_probe_one()
3041 ha->nvram_conf_off = FARX_ACCESS_NVRAM_CONF; in qla2x00_probe_one()
3042 ha->nvram_data_off = FARX_ACCESS_NVRAM_DATA; in qla2x00_probe_one()
3043 } else if (IS_QLA83XX(ha)) { in qla2x00_probe_one()
3044 ha->portnum = PCI_FUNC(ha->pdev->devfn); in qla2x00_probe_one()
3045 ha->max_fibre_devices = MAX_FIBRE_DEVICES_2400; in qla2x00_probe_one()
3046 ha->mbx_count = MAILBOX_REGISTER_COUNT; in qla2x00_probe_one()
3049 ha->tgt.atio_q_length = ATIO_ENTRY_CNT_24XX; in qla2x00_probe_one()
3050 ha->max_loop_id = SNS_LAST_LOOP_ID_2300; in qla2x00_probe_one()
3051 ha->init_cb_size = sizeof(struct mid_init_cb_81xx); in qla2x00_probe_one()
3052 ha->gid_list_info_size = 8; in qla2x00_probe_one()
3053 ha->optrom_size = OPTROM_SIZE_83XX; in qla2x00_probe_one()
3054 ha->nvram_npiv_size = QLA_MAX_VPORTS_QLA25XX; in qla2x00_probe_one()
3055 ha->isp_ops = &qla83xx_isp_ops; in qla2x00_probe_one()
3056 ha->flash_conf_off = FARX_ACCESS_FLASH_CONF_81XX; in qla2x00_probe_one()
3057 ha->flash_data_off = FARX_ACCESS_FLASH_DATA_81XX; in qla2x00_probe_one()
3058 ha->nvram_conf_off = ~0; in qla2x00_probe_one()
3059 ha->nvram_data_off = ~0; in qla2x00_probe_one()
3060 } else if (IS_QLAFX00(ha)) { in qla2x00_probe_one()
3061 ha->max_fibre_devices = MAX_FIBRE_DEVICES_FX00; in qla2x00_probe_one()
3062 ha->mbx_count = MAILBOX_REGISTER_COUNT_FX00; in qla2x00_probe_one()
3063 ha->aen_mbx_count = AEN_MAILBOX_REGISTER_COUNT_FX00; in qla2x00_probe_one()
3066 ha->isp_ops = &qlafx00_isp_ops; in qla2x00_probe_one()
3067 ha->port_down_retry_count = 30; /* default value */ in qla2x00_probe_one()
3068 ha->mr.fw_hbt_cnt = QLAFX00_HEARTBEAT_INTERVAL; in qla2x00_probe_one()
3069 ha->mr.fw_reset_timer_tick = QLAFX00_RESET_INTERVAL; in qla2x00_probe_one()
3070 ha->mr.fw_critemp_timer_tick = QLAFX00_CRITEMP_INTERVAL; in qla2x00_probe_one()
3071 ha->mr.fw_hbt_en = 1; in qla2x00_probe_one()
3072 ha->mr.host_info_resend = false; in qla2x00_probe_one()
3073 ha->mr.hinfo_resend_timer_tick = QLAFX00_HINFO_RESEND_INTERVAL; in qla2x00_probe_one()
3074 } else if (IS_QLA27XX(ha)) { in qla2x00_probe_one()
3075 ha->portnum = PCI_FUNC(ha->pdev->devfn); in qla2x00_probe_one()
3076 ha->max_fibre_devices = MAX_FIBRE_DEVICES_2400; in qla2x00_probe_one()
3077 ha->mbx_count = MAILBOX_REGISTER_COUNT; in qla2x00_probe_one()
3080 ha->tgt.atio_q_length = ATIO_ENTRY_CNT_24XX; in qla2x00_probe_one()
3081 ha->max_loop_id = SNS_LAST_LOOP_ID_2300; in qla2x00_probe_one()
3082 ha->init_cb_size = sizeof(struct mid_init_cb_81xx); in qla2x00_probe_one()
3083 ha->gid_list_info_size = 8; in qla2x00_probe_one()
3084 ha->optrom_size = OPTROM_SIZE_83XX; in qla2x00_probe_one()
3085 ha->nvram_npiv_size = QLA_MAX_VPORTS_QLA25XX; in qla2x00_probe_one()
3086 ha->isp_ops = &qla27xx_isp_ops; in qla2x00_probe_one()
3087 ha->flash_conf_off = FARX_ACCESS_FLASH_CONF_81XX; in qla2x00_probe_one()
3088 ha->flash_data_off = FARX_ACCESS_FLASH_DATA_81XX; in qla2x00_probe_one()
3089 ha->nvram_conf_off = ~0; in qla2x00_probe_one()
3090 ha->nvram_data_off = ~0; in qla2x00_probe_one()
3091 } else if (IS_QLA28XX(ha)) { in qla2x00_probe_one()
3092 ha->portnum = PCI_FUNC(ha->pdev->devfn); in qla2x00_probe_one()
3093 ha->max_fibre_devices = MAX_FIBRE_DEVICES_2400; in qla2x00_probe_one()
3094 ha->mbx_count = MAILBOX_REGISTER_COUNT; in qla2x00_probe_one()
3097 ha->tgt.atio_q_length = ATIO_ENTRY_CNT_24XX; in qla2x00_probe_one()
3098 ha->max_loop_id = SNS_LAST_LOOP_ID_2300; in qla2x00_probe_one()
3099 ha->init_cb_size = sizeof(struct mid_init_cb_81xx); in qla2x00_probe_one()
3100 ha->gid_list_info_size = 8; in qla2x00_probe_one()
3101 ha->optrom_size = OPTROM_SIZE_28XX; in qla2x00_probe_one()
3102 ha->nvram_npiv_size = QLA_MAX_VPORTS_QLA25XX; in qla2x00_probe_one()
3103 ha->isp_ops = &qla27xx_isp_ops; in qla2x00_probe_one()
3104 ha->flash_conf_off = FARX_ACCESS_FLASH_CONF_28XX; in qla2x00_probe_one()
3105 ha->flash_data_off = FARX_ACCESS_FLASH_DATA_28XX; in qla2x00_probe_one()
3106 ha->nvram_conf_off = ~0; in qla2x00_probe_one()
3107 ha->nvram_data_off = ~0; in qla2x00_probe_one()
3115 ha->mbx_count, req_length, rsp_length, ha->max_loop_id, in qla2x00_probe_one()
3116 ha->init_cb_size, ha->gid_list_info_size, ha->optrom_size, in qla2x00_probe_one()
3117 ha->nvram_npiv_size, ha->max_fibre_devices); in qla2x00_probe_one()
3121 ha->isp_ops, ha->flash_conf_off, ha->flash_data_off, in qla2x00_probe_one()
3122 ha->nvram_conf_off, ha->nvram_data_off); in qla2x00_probe_one()
3125 ret = ha->isp_ops->iospace_config(ha); in qla2x00_probe_one()
3131 pdev->device, pdev->irq, ha->iobase); in qla2x00_probe_one()
3132 mutex_init(&ha->vport_lock); in qla2x00_probe_one()
3133 mutex_init(&ha->mq_lock); in qla2x00_probe_one()
3134 init_completion(&ha->mbx_cmd_comp); in qla2x00_probe_one()
3135 complete(&ha->mbx_cmd_comp); in qla2x00_probe_one()
3136 init_completion(&ha->mbx_intr_comp); in qla2x00_probe_one()
3137 init_completion(&ha->dcbx_comp); in qla2x00_probe_one()
3138 init_completion(&ha->lb_portup_comp); in qla2x00_probe_one()
3140 set_bit(0, (unsigned long *) ha->vp_idx_map); in qla2x00_probe_one()
3142 qla2x00_config_dma_addressing(ha); in qla2x00_probe_one()
3145 ha->flags.enable_64bit_addressing ? "enable" : in qla2x00_probe_one()
3147 ret = qla2x00_mem_alloc(ha, req_length, rsp_length, &req, &rsp); in qla2x00_probe_one()
3160 base_vha = qla2x00_create_host(sht, ha); in qla2x00_probe_one()
3171 if (IS_QLA2XXX_MIDTYPE(ha)) in qla2x00_probe_one()
3179 ha->mr.fcport.vha = base_vha; in qla2x00_probe_one()
3180 ha->mr.fcport.port_type = FCT_UNKNOWN; in qla2x00_probe_one()
3181 ha->mr.fcport.loop_id = FC_NO_LOOP_ID; in qla2x00_probe_one()
3182 qla2x00_set_fcport_state(&ha->mr.fcport, FCS_UNCONFIGURED); in qla2x00_probe_one()
3183 ha->mr.fcport.supported_classes = FC_COS_UNSPECIFIED; in qla2x00_probe_one()
3184 ha->mr.fcport.scan_state = 1; in qla2x00_probe_one()
3191 if (!IS_FWI2_CAPABLE(ha)) { in qla2x00_probe_one()
3192 if (IS_QLA2100(ha)) in qla2x00_probe_one()
3195 if (!IS_QLA82XX(ha)) in qla2x00_probe_one()
3198 host->max_id = ha->max_fibre_devices; in qla2x00_probe_one()
3201 if (IS_T10_PI_CAPABLE(ha) && ql2xenabledif) in qla2x00_probe_one()
3207 if (!IS_QLAFX00(ha) && !IS_FWI2_CAPABLE(ha) && in qla2x00_probe_one()
3224 INIT_WORK(&ha->heartbeat_work, qla_heartbeat_work_fn); in qla2x00_probe_one()
3227 ret = qla2x00_request_irqs(ha, rsp); in qla2x00_probe_one()
3232 ret = qla2x00_alloc_queues(ha, req, rsp); in qla2x00_probe_one()
3241 if (ha->mqenable) { in qla2x00_probe_one()
3243 host->nr_hw_queues = ha->max_qpairs; in qla2x00_probe_one()
3249 host->nr_hw_queues = ha->max_qpairs; in qla2x00_probe_one()
3259 qlt_probe_one_stage1(base_vha, ha); in qla2x00_probe_one()
3267 if (IS_QLAFX00(ha)) { in qla2x00_probe_one()
3268 ha->rsp_q_map[0] = rsp; in qla2x00_probe_one()
3269 ha->req_q_map[0] = req; in qla2x00_probe_one()
3270 set_bit(0, ha->req_qid_map); in qla2x00_probe_one()
3271 set_bit(0, ha->rsp_qid_map); in qla2x00_probe_one()
3275 req->req_q_in = &ha->iobase->isp24.req_q_in; in qla2x00_probe_one()
3276 req->req_q_out = &ha->iobase->isp24.req_q_out; in qla2x00_probe_one()
3277 rsp->rsp_q_in = &ha->iobase->isp24.rsp_q_in; in qla2x00_probe_one()
3278 rsp->rsp_q_out = &ha->iobase->isp24.rsp_q_out; in qla2x00_probe_one()
3279 if (ha->mqenable || IS_QLA83XX(ha) || IS_QLA27XX(ha) || in qla2x00_probe_one()
3280 IS_QLA28XX(ha)) { in qla2x00_probe_one()
3281 req->req_q_in = &ha->mqiobase->isp25mq.req_q_in; in qla2x00_probe_one()
3282 req->req_q_out = &ha->mqiobase->isp25mq.req_q_out; in qla2x00_probe_one()
3283 rsp->rsp_q_in = &ha->mqiobase->isp25mq.rsp_q_in; in qla2x00_probe_one()
3284 rsp->rsp_q_out = &ha->mqiobase->isp25mq.rsp_q_out; in qla2x00_probe_one()
3287 if (IS_QLAFX00(ha)) { in qla2x00_probe_one()
3288 req->req_q_in = &ha->iobase->ispfx00.req_q_in; in qla2x00_probe_one()
3289 req->req_q_out = &ha->iobase->ispfx00.req_q_out; in qla2x00_probe_one()
3290 rsp->rsp_q_in = &ha->iobase->ispfx00.rsp_q_in; in qla2x00_probe_one()
3291 rsp->rsp_q_out = &ha->iobase->ispfx00.rsp_q_out; in qla2x00_probe_one()
3294 if (IS_P3P_TYPE(ha)) { in qla2x00_probe_one()
3295 req->req_q_out = &ha->iobase->isp82.req_q_out[0]; in qla2x00_probe_one()
3296 rsp->rsp_q_in = &ha->iobase->isp82.rsp_q_in[0]; in qla2x00_probe_one()
3297 rsp->rsp_q_out = &ha->iobase->isp82.rsp_q_out[0]; in qla2x00_probe_one()
3302 ha->rsp_q_map, ha->req_q_map, rsp->req, req->rsp); in qla2x00_probe_one()
3310 ha->rsp_q_map, ha->req_q_map, rsp->req, req->rsp); in qla2x00_probe_one()
3315 ha->wq = alloc_workqueue("qla2xxx_wq", WQ_MEM_RECLAIM, 0); in qla2x00_probe_one()
3316 if (unlikely(!ha->wq)) { in qla2x00_probe_one()
3321 if (ha->isp_ops->initialize_adapter(base_vha)) { in qla2x00_probe_one()
3326 if (IS_QLA82XX(ha)) { in qla2x00_probe_one()
3327 qla82xx_idc_lock(ha); in qla2x00_probe_one()
3328 qla82xx_wr_32(ha, QLA82XX_CRB_DEV_STATE, in qla2x00_probe_one()
3330 qla82xx_idc_unlock(ha); in qla2x00_probe_one()
3333 } else if (IS_QLA8044(ha)) { in qla2x00_probe_one()
3334 qla8044_idc_lock(ha); in qla2x00_probe_one()
3338 qla8044_idc_unlock(ha); in qla2x00_probe_one()
3347 if (IS_QLAFX00(ha)) in qla2x00_probe_one()
3358 if (IS_QLA25XX(ha) && !(ha->fw_attributes & BIT_6)) in qla2x00_probe_one()
3359 ha->mqenable = 0; in qla2x00_probe_one()
3361 if (ha->mqenable) { in qla2x00_probe_one()
3371 for (i = 0; i < ha->max_qpairs; i++) in qla2x00_probe_one()
3376 if (ha->flags.running_gold_fw) in qla2x00_probe_one()
3382 ha->dpc_thread = kthread_create(qla2x00_do_dpc, ha, in qla2x00_probe_one()
3384 if (IS_ERR(ha->dpc_thread)) { in qla2x00_probe_one()
3387 ret = PTR_ERR(ha->dpc_thread); in qla2x00_probe_one()
3388 ha->dpc_thread = NULL; in qla2x00_probe_one()
3402 INIT_WORK(&ha->board_disable, qla2x00_disable_board_on_pci_error); in qla2x00_probe_one()
3404 if (IS_QLA8031(ha) || IS_MCTP_CAPABLE(ha)) { in qla2x00_probe_one()
3406 ha->dpc_lp_wq = create_singlethread_workqueue(wq_name); in qla2x00_probe_one()
3407 INIT_WORK(&ha->idc_aen, qla83xx_service_idc_aen); in qla2x00_probe_one()
3410 ha->dpc_hp_wq = create_singlethread_workqueue(wq_name); in qla2x00_probe_one()
3411 INIT_WORK(&ha->nic_core_reset, qla83xx_nic_core_reset_work); in qla2x00_probe_one()
3412 INIT_WORK(&ha->idc_state_handler, in qla2x00_probe_one()
3414 INIT_WORK(&ha->nic_core_unrecoverable, in qla2x00_probe_one()
3419 list_add_tail(&base_vha->list, &ha->vp_list); in qla2x00_probe_one()
3420 base_vha->host->irq = ha->pdev->irq; in qla2x00_probe_one()
3429 ha); in qla2x00_probe_one()
3431 if (IS_T10_PI_CAPABLE(ha) && ql2xenabledif) { in qla2x00_probe_one()
3432 if (ha->fw_attributes & BIT_4) { in qla2x00_probe_one()
3453 if (IS_PI_IPGUARD_CAPABLE(ha) && in qla2x00_probe_one()
3454 (ql2xenabledif > 1 || IS_PI_DIFB_DIX0_CAPABLE(ha))) in qla2x00_probe_one()
3465 ha->isp_ops->enable_intrs(ha); in qla2x00_probe_one()
3467 if (IS_QLAFX00(ha)) { in qla2x00_probe_one()
3470 host->sg_tablesize = (ha->mr.extended_io_enabled) ? in qla2x00_probe_one()
3480 ha->prev_minidump_failed = 0; in qla2x00_probe_one()
3494 if (IS_QLAFX00(ha)) { in qla2x00_probe_one()
3508 "QLogic %s - %s.\n", ha->model_number, ha->model_desc); in qla2x00_probe_one()
3511 pdev->device, ha->isp_ops->pci_info_str(base_vha, pci_info, in qla2x00_probe_one()
3513 pci_name(pdev), ha->flags.enable_64bit_addressing ? '+' : '-', in qla2x00_probe_one()
3515 ha->isp_ops->fw_version_str(base_vha, fw_str, sizeof(fw_str))); in qla2x00_probe_one()
3517 qlt_add_target(ha, base_vha); in qla2x00_probe_one()
3530 dma_free_coherent(&ha->pdev->dev, base_vha->gnl.size, in qla2x00_probe_one()
3538 if (ha->dpc_thread) { in qla2x00_probe_one()
3539 struct task_struct *t = ha->dpc_thread; in qla2x00_probe_one()
3541 ha->dpc_thread = NULL; in qla2x00_probe_one()
3557 qla2x00_mem_free(ha); in qla2x00_probe_one()
3558 qla2x00_free_req_que(ha, req); in qla2x00_probe_one()
3559 qla2x00_free_rsp_que(ha, rsp); in qla2x00_probe_one()
3560 qla2x00_clear_drv_active(ha); in qla2x00_probe_one()
3563 if (IS_P3P_TYPE(ha)) { in qla2x00_probe_one()
3564 if (!ha->nx_pcibase) in qla2x00_probe_one()
3565 iounmap((device_reg_t *)ha->nx_pcibase); in qla2x00_probe_one()
3567 iounmap((device_reg_t *)ha->nxdb_wr_ptr); in qla2x00_probe_one()
3569 if (ha->iobase) in qla2x00_probe_one()
3570 iounmap(ha->iobase); in qla2x00_probe_one()
3571 if (ha->cregbase) in qla2x00_probe_one()
3572 iounmap(ha->cregbase); in qla2x00_probe_one()
3574 pci_release_selected_regions(ha->pdev, ha->bars); in qla2x00_probe_one()
3575 kfree(ha); in qla2x00_probe_one()
3586 struct qla_hw_data *ha; in __qla_set_remove_flag() local
3591 ha = base_vha->hw; in __qla_set_remove_flag()
3593 spin_lock_irqsave(&ha->vport_slock, flags); in __qla_set_remove_flag()
3594 list_for_each_entry(vp, &ha->vp_list, list) in __qla_set_remove_flag()
3602 spin_unlock_irqrestore(&ha->vport_slock, flags); in __qla_set_remove_flag()
3609 struct qla_hw_data *ha; in qla2x00_shutdown() local
3612 ha = vha->hw; in qla2x00_shutdown()
3622 cancel_work_sync(&ha->board_disable); in qla2x00_shutdown()
3628 if (IS_QLAFX00(ha)) in qla2x00_shutdown()
3632 if (ha->flags.fce_enabled) { in qla2x00_shutdown()
3634 ha->flags.fce_enabled = 0; in qla2x00_shutdown()
3638 if (ha->eft) in qla2x00_shutdown()
3641 if (IS_QLA25XX(ha) || IS_QLA2031(ha) || IS_QLA27XX(ha) || in qla2x00_shutdown()
3642 IS_QLA28XX(ha)) { in qla2x00_shutdown()
3643 if (ha->flags.fw_started) in qla2x00_shutdown()
3658 if (ha->interrupts_on) { in qla2x00_shutdown()
3660 ha->isp_ops->disable_intrs(ha); in qla2x00_shutdown()
3665 qla2x00_free_fw_dump(ha); in qla2x00_shutdown()
3674 qla2x00_delete_all_vps(struct qla_hw_data *ha, scsi_qla_host_t *base_vha) in qla2x00_delete_all_vps() argument
3679 mutex_lock(&ha->vport_lock); in qla2x00_delete_all_vps()
3680 while (ha->cur_vport_count) { in qla2x00_delete_all_vps()
3681 spin_lock_irqsave(&ha->vport_slock, flags); in qla2x00_delete_all_vps()
3683 BUG_ON(base_vha->list.next == &ha->vp_list); in qla2x00_delete_all_vps()
3688 spin_unlock_irqrestore(&ha->vport_slock, flags); in qla2x00_delete_all_vps()
3689 mutex_unlock(&ha->vport_lock); in qla2x00_delete_all_vps()
3696 mutex_lock(&ha->vport_lock); in qla2x00_delete_all_vps()
3698 mutex_unlock(&ha->vport_lock); in qla2x00_delete_all_vps()
3703 qla2x00_destroy_deferred_work(struct qla_hw_data *ha) in qla2x00_destroy_deferred_work() argument
3706 if (ha->dpc_lp_wq) { in qla2x00_destroy_deferred_work()
3707 cancel_work_sync(&ha->idc_aen); in qla2x00_destroy_deferred_work()
3708 destroy_workqueue(ha->dpc_lp_wq); in qla2x00_destroy_deferred_work()
3709 ha->dpc_lp_wq = NULL; in qla2x00_destroy_deferred_work()
3712 if (ha->dpc_hp_wq) { in qla2x00_destroy_deferred_work()
3713 cancel_work_sync(&ha->nic_core_reset); in qla2x00_destroy_deferred_work()
3714 cancel_work_sync(&ha->idc_state_handler); in qla2x00_destroy_deferred_work()
3715 cancel_work_sync(&ha->nic_core_unrecoverable); in qla2x00_destroy_deferred_work()
3716 destroy_workqueue(ha->dpc_hp_wq); in qla2x00_destroy_deferred_work()
3717 ha->dpc_hp_wq = NULL; in qla2x00_destroy_deferred_work()
3721 if (ha->dpc_thread) { in qla2x00_destroy_deferred_work()
3722 struct task_struct *t = ha->dpc_thread; in qla2x00_destroy_deferred_work()
3728 ha->dpc_thread = NULL; in qla2x00_destroy_deferred_work()
3734 qla2x00_unmap_iobases(struct qla_hw_data *ha) in qla2x00_unmap_iobases() argument
3736 if (IS_QLA82XX(ha)) { in qla2x00_unmap_iobases()
3738 iounmap((device_reg_t *)ha->nx_pcibase); in qla2x00_unmap_iobases()
3740 iounmap((device_reg_t *)ha->nxdb_wr_ptr); in qla2x00_unmap_iobases()
3742 if (ha->iobase) in qla2x00_unmap_iobases()
3743 iounmap(ha->iobase); in qla2x00_unmap_iobases()
3745 if (ha->cregbase) in qla2x00_unmap_iobases()
3746 iounmap(ha->cregbase); in qla2x00_unmap_iobases()
3748 if (ha->mqiobase) in qla2x00_unmap_iobases()
3749 iounmap(ha->mqiobase); in qla2x00_unmap_iobases()
3751 if ((IS_QLA83XX(ha) || IS_QLA27XX(ha) || IS_QLA28XX(ha)) && in qla2x00_unmap_iobases()
3752 ha->msixbase) in qla2x00_unmap_iobases()
3753 iounmap(ha->msixbase); in qla2x00_unmap_iobases()
3758 qla2x00_clear_drv_active(struct qla_hw_data *ha) in qla2x00_clear_drv_active() argument
3760 if (IS_QLA8044(ha)) { in qla2x00_clear_drv_active()
3761 qla8044_idc_lock(ha); in qla2x00_clear_drv_active()
3762 qla8044_clear_drv_active(ha); in qla2x00_clear_drv_active()
3763 qla8044_idc_unlock(ha); in qla2x00_clear_drv_active()
3764 } else if (IS_QLA82XX(ha)) { in qla2x00_clear_drv_active()
3765 qla82xx_idc_lock(ha); in qla2x00_clear_drv_active()
3766 qla82xx_clear_drv_active(ha); in qla2x00_clear_drv_active()
3767 qla82xx_idc_unlock(ha); in qla2x00_clear_drv_active()
3775 struct qla_hw_data *ha; in qla2x00_remove_one() local
3778 ha = base_vha->hw; in qla2x00_remove_one()
3782 cancel_work_sync(&ha->board_disable); in qla2x00_remove_one()
3790 dma_free_coherent(&ha->pdev->dev, base_vha->gnl.size, in qla2x00_remove_one()
3794 kfree(ha); in qla2x00_remove_one()
3807 if (IS_QLA25XX(ha) || IS_QLA2031(ha) || IS_QLA27XX(ha) || in qla2x00_remove_one()
3808 IS_QLA28XX(ha)) { in qla2x00_remove_one()
3809 if (ha->flags.fw_started) in qla2x00_remove_one()
3811 } else if (!IS_QLAFX00(ha)) { in qla2x00_remove_one()
3812 if (IS_QLA8031(ha)) { in qla2x00_remove_one()
3827 dma_free_coherent(&ha->pdev->dev, in qla2x00_remove_one()
3836 if (IS_QLAFX00(ha)) in qla2x00_remove_one()
3839 qla2x00_delete_all_vps(ha, base_vha); in qla2x00_remove_one()
3852 if (ha->exlogin_buf) in qla2x00_remove_one()
3853 qla2x00_free_exlogin_buffer(ha); in qla2x00_remove_one()
3856 if (ha->exchoffld_buf) in qla2x00_remove_one()
3857 qla2x00_free_exchoffld_buffer(ha); in qla2x00_remove_one()
3859 qla2x00_destroy_deferred_work(ha); in qla2x00_remove_one()
3861 qlt_remove_target(ha, base_vha); in qla2x00_remove_one()
3871 qla2x00_clear_drv_active(ha); in qla2x00_remove_one()
3875 qla2x00_unmap_iobases(ha); in qla2x00_remove_one()
3877 pci_release_selected_regions(ha->pdev, ha->bars); in qla2x00_remove_one()
3878 kfree(ha); in qla2x00_remove_one()
3902 struct qla_hw_data *ha = vha->hw; in qla2x00_free_device() local
3914 if (ha->interrupts_on) { in qla2x00_free_device()
3916 ha->isp_ops->disable_intrs(ha); in qla2x00_free_device()
3924 if (ha->wq) { in qla2x00_free_device()
3925 flush_workqueue(ha->wq); in qla2x00_free_device()
3926 destroy_workqueue(ha->wq); in qla2x00_free_device()
3927 ha->wq = NULL; in qla2x00_free_device()
3933 qla2x00_mem_free(ha); in qla2x00_free_device()
3937 qla_edif_sadb_release_free_pool(ha); in qla2x00_free_device()
3938 qla_edif_sadb_release(ha); in qla2x00_free_device()
3940 qla2x00_free_queues(ha); in qla2x00_free_device()
4030 static void qla2x00_set_reserved_loop_ids(struct qla_hw_data *ha) in qla2x00_set_reserved_loop_ids() argument
4034 if (IS_FWI2_CAPABLE(ha)) in qla2x00_set_reserved_loop_ids()
4038 set_bit(i, ha->loop_id_map); in qla2x00_set_reserved_loop_ids()
4039 set_bit(MANAGEMENT_SERVER, ha->loop_id_map); in qla2x00_set_reserved_loop_ids()
4040 set_bit(BROADCAST, ha->loop_id_map); in qla2x00_set_reserved_loop_ids()
4052 qla2x00_mem_alloc(struct qla_hw_data *ha, uint16_t req_len, uint16_t rsp_len, in qla2x00_mem_alloc() argument
4058 ha->init_cb = dma_alloc_coherent(&ha->pdev->dev, ha->init_cb_size, in qla2x00_mem_alloc()
4059 &ha->init_cb_dma, GFP_KERNEL); in qla2x00_mem_alloc()
4060 if (!ha->init_cb) in qla2x00_mem_alloc()
4063 rc = btree_init32(&ha->host_map); in qla2x00_mem_alloc()
4067 if (qlt_mem_alloc(ha) < 0) in qla2x00_mem_alloc()
4070 ha->gid_list = dma_alloc_coherent(&ha->pdev->dev, in qla2x00_mem_alloc()
4071 qla2x00_gid_list_size(ha), &ha->gid_list_dma, GFP_KERNEL); in qla2x00_mem_alloc()
4072 if (!ha->gid_list) in qla2x00_mem_alloc()
4075 ha->srb_mempool = mempool_create_slab_pool(SRB_MIN_REQ, srb_cachep); in qla2x00_mem_alloc()
4076 if (!ha->srb_mempool) in qla2x00_mem_alloc()
4079 if (IS_P3P_TYPE(ha) || IS_QLA27XX(ha) || (ql2xsecenable && IS_QLA28XX(ha))) { in qla2x00_mem_alloc()
4088 ha->ctx_mempool = mempool_create_slab_pool(SRB_MIN_REQ, in qla2x00_mem_alloc()
4090 if (!ha->ctx_mempool) in qla2x00_mem_alloc()
4092 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x0021, in qla2x00_mem_alloc()
4094 ctx_cachep, ha->ctx_mempool); in qla2x00_mem_alloc()
4098 ha->nvram = kzalloc(MAX_NVRAM_SIZE, GFP_KERNEL); in qla2x00_mem_alloc()
4099 if (!ha->nvram) in qla2x00_mem_alloc()
4103 ha->pdev->device); in qla2x00_mem_alloc()
4104 ha->s_dma_pool = dma_pool_create(name, &ha->pdev->dev, in qla2x00_mem_alloc()
4106 if (!ha->s_dma_pool) in qla2x00_mem_alloc()
4109 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x0022, in qla2x00_mem_alloc()
4111 ha->init_cb, ha->gid_list, ha->srb_mempool, ha->s_dma_pool); in qla2x00_mem_alloc()
4113 if (IS_P3P_TYPE(ha) || ql2xenabledif || (IS_QLA28XX(ha) && ql2xsecenable)) { in qla2x00_mem_alloc()
4114 ha->dl_dma_pool = dma_pool_create(name, &ha->pdev->dev, in qla2x00_mem_alloc()
4116 if (!ha->dl_dma_pool) { in qla2x00_mem_alloc()
4117 ql_log_pci(ql_log_fatal, ha->pdev, 0x0023, in qla2x00_mem_alloc()
4122 ha->fcp_cmnd_dma_pool = dma_pool_create(name, &ha->pdev->dev, in qla2x00_mem_alloc()
4124 if (!ha->fcp_cmnd_dma_pool) { in qla2x00_mem_alloc()
4125 ql_log_pci(ql_log_fatal, ha->pdev, 0x0024, in qla2x00_mem_alloc()
4135 ha->dif_bundl_pool = dma_pool_create(name, in qla2x00_mem_alloc()
4136 &ha->pdev->dev, DIF_BUNDLING_DMA_POOL_SIZE, 8, 0); in qla2x00_mem_alloc()
4137 if (!ha->dif_bundl_pool) { in qla2x00_mem_alloc()
4138 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x0024, in qla2x00_mem_alloc()
4144 INIT_LIST_HEAD(&ha->pool.good.head); in qla2x00_mem_alloc()
4145 INIT_LIST_HEAD(&ha->pool.unusable.head); in qla2x00_mem_alloc()
4146 ha->pool.good.count = 0; in qla2x00_mem_alloc()
4147 ha->pool.unusable.count = 0; in qla2x00_mem_alloc()
4151 ql_dbg_pci(ql_dbg_init, ha->pdev, in qla2x00_mem_alloc()
4156 ha->dif_bundle_kallocs++; in qla2x00_mem_alloc()
4159 ha->dif_bundl_pool, GFP_ATOMIC, in qla2x00_mem_alloc()
4162 ql_dbg_pci(ql_dbg_init, ha->pdev, in qla2x00_mem_alloc()
4167 ha->dif_bundle_kallocs--; in qla2x00_mem_alloc()
4170 ha->dif_bundle_dma_allocs++; in qla2x00_mem_alloc()
4179 &ha->pool.unusable.head); in qla2x00_mem_alloc()
4180 ha->pool.unusable.count++; in qla2x00_mem_alloc()
4183 &ha->pool.good.head); in qla2x00_mem_alloc()
4184 ha->pool.good.count++; in qla2x00_mem_alloc()
4190 &ha->pool.good.head, list) { in qla2x00_mem_alloc()
4192 dma_pool_free(ha->dif_bundl_pool, in qla2x00_mem_alloc()
4194 ha->dif_bundle_dma_allocs--; in qla2x00_mem_alloc()
4196 ha->dif_bundle_kallocs--; in qla2x00_mem_alloc()
4199 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x0024, in qla2x00_mem_alloc()
4201 __func__, ha->pool.good.count, in qla2x00_mem_alloc()
4202 ha->pool.unusable.count); in qla2x00_mem_alloc()
4205 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x0025, in qla2x00_mem_alloc()
4207 ha->dl_dma_pool, ha->fcp_cmnd_dma_pool, in qla2x00_mem_alloc()
4208 ha->dif_bundl_pool); in qla2x00_mem_alloc()
4212 if (IS_QLA2100(ha) || IS_QLA2200(ha)) { in qla2x00_mem_alloc()
4214 ha->sns_cmd = dma_alloc_coherent(&ha->pdev->dev, in qla2x00_mem_alloc()
4215 sizeof(struct sns_cmd_pkt), &ha->sns_cmd_dma, GFP_KERNEL); in qla2x00_mem_alloc()
4216 if (!ha->sns_cmd) in qla2x00_mem_alloc()
4218 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x0026, in qla2x00_mem_alloc()
4219 "sns_cmd: %p.\n", ha->sns_cmd); in qla2x00_mem_alloc()
4222 ha->ms_iocb = dma_pool_alloc(ha->s_dma_pool, GFP_KERNEL, in qla2x00_mem_alloc()
4223 &ha->ms_iocb_dma); in qla2x00_mem_alloc()
4224 if (!ha->ms_iocb) in qla2x00_mem_alloc()
4227 ha->ct_sns = dma_alloc_coherent(&ha->pdev->dev, in qla2x00_mem_alloc()
4228 sizeof(struct ct_sns_pkt), &ha->ct_sns_dma, GFP_KERNEL); in qla2x00_mem_alloc()
4229 if (!ha->ct_sns) in qla2x00_mem_alloc()
4231 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x0027, in qla2x00_mem_alloc()
4233 ha->ms_iocb, ha->ct_sns); in qla2x00_mem_alloc()
4239 ql_log_pci(ql_log_fatal, ha->pdev, 0x0028, in qla2x00_mem_alloc()
4244 (*req)->ring = dma_alloc_coherent(&ha->pdev->dev, in qla2x00_mem_alloc()
4248 ql_log_pci(ql_log_fatal, ha->pdev, 0x0029, in qla2x00_mem_alloc()
4255 ql_log_pci(ql_log_fatal, ha->pdev, 0x002a, in qla2x00_mem_alloc()
4259 (*rsp)->hw = ha; in qla2x00_mem_alloc()
4261 (*rsp)->ring = dma_alloc_coherent(&ha->pdev->dev, in qla2x00_mem_alloc()
4265 ql_log_pci(ql_log_fatal, ha->pdev, 0x002b, in qla2x00_mem_alloc()
4271 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x002c, in qla2x00_mem_alloc()
4277 if (ha->nvram_npiv_size) { in qla2x00_mem_alloc()
4278 ha->npiv_info = kcalloc(ha->nvram_npiv_size, in qla2x00_mem_alloc()
4281 if (!ha->npiv_info) { in qla2x00_mem_alloc()
4282 ql_log_pci(ql_log_fatal, ha->pdev, 0x002d, in qla2x00_mem_alloc()
4287 ha->npiv_info = NULL; in qla2x00_mem_alloc()
4290 if (IS_CNA_CAPABLE(ha) || IS_QLA2031(ha) || IS_QLA27XX(ha) || in qla2x00_mem_alloc()
4291 IS_QLA28XX(ha)) { in qla2x00_mem_alloc()
4292 ha->ex_init_cb = dma_pool_alloc(ha->s_dma_pool, GFP_KERNEL, in qla2x00_mem_alloc()
4293 &ha->ex_init_cb_dma); in qla2x00_mem_alloc()
4294 if (!ha->ex_init_cb) in qla2x00_mem_alloc()
4296 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x002e, in qla2x00_mem_alloc()
4297 "ex_init_cb=%p.\n", ha->ex_init_cb); in qla2x00_mem_alloc()
4301 if (IS_QLA27XX(ha) || IS_QLA28XX(ha)) { in qla2x00_mem_alloc()
4302 ha->sf_init_cb = dma_pool_zalloc(ha->s_dma_pool, GFP_KERNEL, in qla2x00_mem_alloc()
4303 &ha->sf_init_cb_dma); in qla2x00_mem_alloc()
4304 if (!ha->sf_init_cb) in qla2x00_mem_alloc()
4306 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x0199, in qla2x00_mem_alloc()
4307 "sf_init_cb=%p.\n", ha->sf_init_cb); in qla2x00_mem_alloc()
4310 INIT_LIST_HEAD(&ha->gbl_dsd_list); in qla2x00_mem_alloc()
4313 if (!IS_FWI2_CAPABLE(ha)) { in qla2x00_mem_alloc()
4314 ha->async_pd = dma_pool_alloc(ha->s_dma_pool, GFP_KERNEL, in qla2x00_mem_alloc()
4315 &ha->async_pd_dma); in qla2x00_mem_alloc()
4316 if (!ha->async_pd) in qla2x00_mem_alloc()
4318 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x002f, in qla2x00_mem_alloc()
4319 "async_pd=%p.\n", ha->async_pd); in qla2x00_mem_alloc()
4322 INIT_LIST_HEAD(&ha->vp_list); in qla2x00_mem_alloc()
4325 ha->loop_id_map = kcalloc(BITS_TO_LONGS(LOOPID_MAP_SIZE), in qla2x00_mem_alloc()
4328 if (!ha->loop_id_map) in qla2x00_mem_alloc()
4331 qla2x00_set_reserved_loop_ids(ha); in qla2x00_mem_alloc()
4332 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x0123, in qla2x00_mem_alloc()
4333 "loop_id_map=%p.\n", ha->loop_id_map); in qla2x00_mem_alloc()
4336 ha->sfp_data = dma_alloc_coherent(&ha->pdev->dev, in qla2x00_mem_alloc()
4337 SFP_DEV_SIZE, &ha->sfp_data_dma, GFP_KERNEL); in qla2x00_mem_alloc()
4338 if (!ha->sfp_data) { in qla2x00_mem_alloc()
4339 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x011b, in qla2x00_mem_alloc()
4344 ha->flt = dma_alloc_coherent(&ha->pdev->dev, in qla2x00_mem_alloc()
4345 sizeof(struct qla_flt_header) + FLT_REGIONS_SIZE, &ha->flt_dma, in qla2x00_mem_alloc()
4347 if (!ha->flt) { in qla2x00_mem_alloc()
4348 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x011b, in qla2x00_mem_alloc()
4354 ha->purex_dma_pool = dma_pool_create(name, &ha->pdev->dev, in qla2x00_mem_alloc()
4357 if (!ha->purex_dma_pool) { in qla2x00_mem_alloc()
4358 ql_dbg_pci(ql_dbg_init, ha->pdev, 0x011b, in qla2x00_mem_alloc()
4363 ha->elsrej.size = sizeof(struct fc_els_ls_rjt) + 16; in qla2x00_mem_alloc()
4364 ha->elsrej.c = dma_alloc_coherent(&ha->pdev->dev, in qla2x00_mem_alloc()
4365 ha->elsrej.size, &ha->elsrej.cdma, GFP_KERNEL); in qla2x00_mem_alloc()
4367 if (!ha->elsrej.c) { in qla2x00_mem_alloc()
4368 ql_dbg_pci(ql_dbg_init, ha->pdev, 0xffff, in qla2x00_mem_alloc()
4372 ha->elsrej.c->er_cmd = ELS_LS_RJT; in qla2x00_mem_alloc()
4373 ha->elsrej.c->er_reason = ELS_RJT_LOGIC; in qla2x00_mem_alloc()
4374 ha->elsrej.c->er_explan = ELS_EXPL_UNAB_DATA; in qla2x00_mem_alloc()
4378 dma_pool_destroy(ha->purex_dma_pool); in qla2x00_mem_alloc()
4380 dma_free_coherent(&ha->pdev->dev, SFP_DEV_SIZE, in qla2x00_mem_alloc()
4381 ha->flt, ha->flt_dma); in qla2x00_mem_alloc()
4384 dma_free_coherent(&ha->pdev->dev, SFP_DEV_SIZE, in qla2x00_mem_alloc()
4385 ha->sfp_data, ha->sfp_data_dma); in qla2x00_mem_alloc()
4387 kfree(ha->loop_id_map); in qla2x00_mem_alloc()
4389 dma_pool_free(ha->s_dma_pool, ha->async_pd, ha->async_pd_dma); in qla2x00_mem_alloc()
4391 dma_pool_free(ha->s_dma_pool, ha->sf_init_cb, ha->sf_init_cb_dma); in qla2x00_mem_alloc()
4393 dma_pool_free(ha->s_dma_pool, ha->ex_init_cb, ha->ex_init_cb_dma); in qla2x00_mem_alloc()
4395 kfree(ha->npiv_info); in qla2x00_mem_alloc()
4397 dma_free_coherent(&ha->pdev->dev, ((*rsp)->length + 1) * in qla2x00_mem_alloc()
4405 dma_free_coherent(&ha->pdev->dev, ((*req)->length + 1) * in qla2x00_mem_alloc()
4413 dma_free_coherent(&ha->pdev->dev, sizeof(struct ct_sns_pkt), in qla2x00_mem_alloc()
4414 ha->ct_sns, ha->ct_sns_dma); in qla2x00_mem_alloc()
4415 ha->ct_sns = NULL; in qla2x00_mem_alloc()
4416 ha->ct_sns_dma = 0; in qla2x00_mem_alloc()
4418 dma_pool_free(ha->s_dma_pool, ha->ms_iocb, ha->ms_iocb_dma); in qla2x00_mem_alloc()
4419 ha->ms_iocb = NULL; in qla2x00_mem_alloc()
4420 ha->ms_iocb_dma = 0; in qla2x00_mem_alloc()
4422 if (ha->sns_cmd) in qla2x00_mem_alloc()
4423 dma_free_coherent(&ha->pdev->dev, sizeof(struct sns_cmd_pkt), in qla2x00_mem_alloc()
4424 ha->sns_cmd, ha->sns_cmd_dma); in qla2x00_mem_alloc()
4429 list_for_each_entry_safe(dsd, nxt, &ha->pool.unusable.head, in qla2x00_mem_alloc()
4432 dma_pool_free(ha->dif_bundl_pool, dsd->dsd_addr, in qla2x00_mem_alloc()
4434 ha->dif_bundle_dma_allocs--; in qla2x00_mem_alloc()
4436 ha->dif_bundle_kallocs--; in qla2x00_mem_alloc()
4437 ha->pool.unusable.count--; in qla2x00_mem_alloc()
4439 dma_pool_destroy(ha->dif_bundl_pool); in qla2x00_mem_alloc()
4440 ha->dif_bundl_pool = NULL; in qla2x00_mem_alloc()
4444 if (IS_QLA82XX(ha) || ql2xenabledif) { in qla2x00_mem_alloc()
4445 dma_pool_destroy(ha->fcp_cmnd_dma_pool); in qla2x00_mem_alloc()
4446 ha->fcp_cmnd_dma_pool = NULL; in qla2x00_mem_alloc()
4449 if (IS_QLA82XX(ha) || ql2xenabledif) { in qla2x00_mem_alloc()
4450 dma_pool_destroy(ha->dl_dma_pool); in qla2x00_mem_alloc()
4451 ha->dl_dma_pool = NULL; in qla2x00_mem_alloc()
4454 dma_pool_destroy(ha->s_dma_pool); in qla2x00_mem_alloc()
4455 ha->s_dma_pool = NULL; in qla2x00_mem_alloc()
4457 kfree(ha->nvram); in qla2x00_mem_alloc()
4458 ha->nvram = NULL; in qla2x00_mem_alloc()
4460 mempool_destroy(ha->ctx_mempool); in qla2x00_mem_alloc()
4461 ha->ctx_mempool = NULL; in qla2x00_mem_alloc()
4463 mempool_destroy(ha->srb_mempool); in qla2x00_mem_alloc()
4464 ha->srb_mempool = NULL; in qla2x00_mem_alloc()
4466 dma_free_coherent(&ha->pdev->dev, qla2x00_gid_list_size(ha), in qla2x00_mem_alloc()
4467 ha->gid_list, in qla2x00_mem_alloc()
4468 ha->gid_list_dma); in qla2x00_mem_alloc()
4469 ha->gid_list = NULL; in qla2x00_mem_alloc()
4470 ha->gid_list_dma = 0; in qla2x00_mem_alloc()
4472 qlt_mem_free(ha); in qla2x00_mem_alloc()
4474 btree_destroy32(&ha->host_map); in qla2x00_mem_alloc()
4476 dma_free_coherent(&ha->pdev->dev, ha->init_cb_size, ha->init_cb, in qla2x00_mem_alloc()
4477 ha->init_cb_dma); in qla2x00_mem_alloc()
4478 ha->init_cb = NULL; in qla2x00_mem_alloc()
4479 ha->init_cb_dma = 0; in qla2x00_mem_alloc()
4492 struct qla_hw_data *ha = vha->hw; in qla2x00_set_exlogins_buffer() local
4498 if (!IS_EXLOGIN_OFFLD_CAPABLE(ha)) in qla2x00_set_exlogins_buffer()
4505 ql_log_pci(ql_log_fatal, ha->pdev, 0xd029, in qla2x00_set_exlogins_buffer()
4513 if (temp != ha->exlogin_size) { in qla2x00_set_exlogins_buffer()
4514 qla2x00_free_exlogin_buffer(ha); in qla2x00_set_exlogins_buffer()
4515 ha->exlogin_size = temp; in qla2x00_set_exlogins_buffer()
4522 "EXLOGIN: requested size=0x%x\n", ha->exlogin_size); in qla2x00_set_exlogins_buffer()
4525 ha->exlogin_buf = dma_alloc_coherent(&ha->pdev->dev, in qla2x00_set_exlogins_buffer()
4526 ha->exlogin_size, &ha->exlogin_buf_dma, GFP_KERNEL); in qla2x00_set_exlogins_buffer()
4527 if (!ha->exlogin_buf) { in qla2x00_set_exlogins_buffer()
4528 ql_log_pci(ql_log_fatal, ha->pdev, 0xd02a, in qla2x00_set_exlogins_buffer()
4535 rval = qla_set_exlogin_mem_cfg(vha, ha->exlogin_buf_dma); in qla2x00_set_exlogins_buffer()
4539 qla2x00_free_exlogin_buffer(ha); in qla2x00_set_exlogins_buffer()
4552 qla2x00_free_exlogin_buffer(struct qla_hw_data *ha) in qla2x00_free_exlogin_buffer() argument
4554 if (ha->exlogin_buf) { in qla2x00_free_exlogin_buffer()
4555 dma_free_coherent(&ha->pdev->dev, ha->exlogin_size, in qla2x00_free_exlogin_buffer()
4556 ha->exlogin_buf, ha->exlogin_buf_dma); in qla2x00_free_exlogin_buffer()
4557 ha->exlogin_buf = NULL; in qla2x00_free_exlogin_buffer()
4558 ha->exlogin_size = 0; in qla2x00_free_exlogin_buffer()
4607 struct qla_hw_data *ha = vha->hw; in qla2x00_set_exchoffld_buffer() local
4609 if (!ha->flags.exchoffld_enabled) in qla2x00_set_exchoffld_buffer()
4612 if (!IS_EXCHG_OFFLD_CAPABLE(ha)) in qla2x00_set_exchoffld_buffer()
4618 ql_log_pci(ql_log_fatal, ha->pdev, 0xd012, in qla2x00_set_exchoffld_buffer()
4629 if (totsz != ha->exchoffld_size) { in qla2x00_set_exchoffld_buffer()
4630 qla2x00_free_exchoffld_buffer(ha); in qla2x00_set_exchoffld_buffer()
4632 ha->exchoffld_size = 0; in qla2x00_set_exchoffld_buffer()
4633 ha->flags.exchoffld_enabled = 0; in qla2x00_set_exchoffld_buffer()
4637 ha->exchoffld_size = totsz; in qla2x00_set_exchoffld_buffer()
4645 ha->exchoffld_size); in qla2x00_set_exchoffld_buffer()
4648 ha->exchoffld_buf = dma_alloc_coherent(&ha->pdev->dev, in qla2x00_set_exchoffld_buffer()
4649 ha->exchoffld_size, &ha->exchoffld_buf_dma, GFP_KERNEL); in qla2x00_set_exchoffld_buffer()
4650 if (!ha->exchoffld_buf) { in qla2x00_set_exchoffld_buffer()
4651 ql_log_pci(ql_log_fatal, ha->pdev, 0xd013, in qla2x00_set_exchoffld_buffer()
4654 if (ha->max_exchg > in qla2x00_set_exchoffld_buffer()
4656 ha->max_exchg -= REDUCE_EXCHANGES_CNT; in qla2x00_set_exchoffld_buffer()
4657 } else if (ha->max_exchg > in qla2x00_set_exchoffld_buffer()
4659 ha->max_exchg -= 512; in qla2x00_set_exchoffld_buffer()
4661 ha->flags.exchoffld_enabled = 0; in qla2x00_set_exchoffld_buffer()
4662 ql_log_pci(ql_log_fatal, ha->pdev, 0xd013, in qla2x00_set_exchoffld_buffer()
4665 ha->exchoffld_size = 0; in qla2x00_set_exchoffld_buffer()
4669 } else if (!ha->exchoffld_buf || (actual_cnt <= FW_DEF_EXCHANGES_CNT)) { in qla2x00_set_exchoffld_buffer()
4671 qla2x00_free_exchoffld_buffer(ha); in qla2x00_set_exchoffld_buffer()
4672 ha->exchoffld_size = 0; in qla2x00_set_exchoffld_buffer()
4673 ha->flags.exchoffld_enabled = 0; in qla2x00_set_exchoffld_buffer()
4676 ha->exchoffld_size, actual_cnt, size, totsz); in qla2x00_set_exchoffld_buffer()
4685 qla2x00_free_exchoffld_buffer(ha); in qla2x00_set_exchoffld_buffer()
4688 struct init_cb_81xx *icb = (struct init_cb_81xx *)ha->init_cb; in qla2x00_set_exchoffld_buffer()
4706 qla2x00_free_exchoffld_buffer(struct qla_hw_data *ha) in qla2x00_free_exchoffld_buffer() argument
4708 if (ha->exchoffld_buf) { in qla2x00_free_exchoffld_buffer()
4709 dma_free_coherent(&ha->pdev->dev, ha->exchoffld_size, in qla2x00_free_exchoffld_buffer()
4710 ha->exchoffld_buf, ha->exchoffld_buf_dma); in qla2x00_free_exchoffld_buffer()
4711 ha->exchoffld_buf = NULL; in qla2x00_free_exchoffld_buffer()
4712 ha->exchoffld_size = 0; in qla2x00_free_exchoffld_buffer()
4724 qla2x00_free_fw_dump(struct qla_hw_data *ha) in qla2x00_free_fw_dump() argument
4726 struct fwdt *fwdt = ha->fwdt; in qla2x00_free_fw_dump()
4729 if (ha->fce) in qla2x00_free_fw_dump()
4730 dma_free_coherent(&ha->pdev->dev, in qla2x00_free_fw_dump()
4731 FCE_SIZE, ha->fce, ha->fce_dma); in qla2x00_free_fw_dump()
4733 if (ha->eft) in qla2x00_free_fw_dump()
4734 dma_free_coherent(&ha->pdev->dev, in qla2x00_free_fw_dump()
4735 EFT_SIZE, ha->eft, ha->eft_dma); in qla2x00_free_fw_dump()
4737 vfree(ha->fw_dump); in qla2x00_free_fw_dump()
4739 ha->fce = NULL; in qla2x00_free_fw_dump()
4740 ha->fce_dma = 0; in qla2x00_free_fw_dump()
4741 ha->flags.fce_enabled = 0; in qla2x00_free_fw_dump()
4742 ha->eft = NULL; in qla2x00_free_fw_dump()
4743 ha->eft_dma = 0; in qla2x00_free_fw_dump()
4744 ha->fw_dumped = false; in qla2x00_free_fw_dump()
4745 ha->fw_dump_cap_flags = 0; in qla2x00_free_fw_dump()
4746 ha->fw_dump_reading = 0; in qla2x00_free_fw_dump()
4747 ha->fw_dump = NULL; in qla2x00_free_fw_dump()
4748 ha->fw_dump_len = 0; in qla2x00_free_fw_dump()
4765 qla2x00_mem_free(struct qla_hw_data *ha) in qla2x00_mem_free() argument
4767 qla2x00_free_fw_dump(ha); in qla2x00_mem_free()
4769 if (ha->mctp_dump) in qla2x00_mem_free()
4770 dma_free_coherent(&ha->pdev->dev, MCTP_DUMP_SIZE, ha->mctp_dump, in qla2x00_mem_free()
4771 ha->mctp_dump_dma); in qla2x00_mem_free()
4772 ha->mctp_dump = NULL; in qla2x00_mem_free()
4774 mempool_destroy(ha->srb_mempool); in qla2x00_mem_free()
4775 ha->srb_mempool = NULL; in qla2x00_mem_free()
4777 if (ha->dcbx_tlv) in qla2x00_mem_free()
4778 dma_free_coherent(&ha->pdev->dev, DCBX_TLV_DATA_SIZE, in qla2x00_mem_free()
4779 ha->dcbx_tlv, ha->dcbx_tlv_dma); in qla2x00_mem_free()
4780 ha->dcbx_tlv = NULL; in qla2x00_mem_free()
4782 if (ha->xgmac_data) in qla2x00_mem_free()
4783 dma_free_coherent(&ha->pdev->dev, XGMAC_DATA_SIZE, in qla2x00_mem_free()
4784 ha->xgmac_data, ha->xgmac_data_dma); in qla2x00_mem_free()
4785 ha->xgmac_data = NULL; in qla2x00_mem_free()
4787 if (ha->sns_cmd) in qla2x00_mem_free()
4788 dma_free_coherent(&ha->pdev->dev, sizeof(struct sns_cmd_pkt), in qla2x00_mem_free()
4789 ha->sns_cmd, ha->sns_cmd_dma); in qla2x00_mem_free()
4790 ha->sns_cmd = NULL; in qla2x00_mem_free()
4791 ha->sns_cmd_dma = 0; in qla2x00_mem_free()
4793 if (ha->ct_sns) in qla2x00_mem_free()
4794 dma_free_coherent(&ha->pdev->dev, sizeof(struct ct_sns_pkt), in qla2x00_mem_free()
4795 ha->ct_sns, ha->ct_sns_dma); in qla2x00_mem_free()
4796 ha->ct_sns = NULL; in qla2x00_mem_free()
4797 ha->ct_sns_dma = 0; in qla2x00_mem_free()
4799 if (ha->sfp_data) in qla2x00_mem_free()
4800 dma_free_coherent(&ha->pdev->dev, SFP_DEV_SIZE, ha->sfp_data, in qla2x00_mem_free()
4801 ha->sfp_data_dma); in qla2x00_mem_free()
4802 ha->sfp_data = NULL; in qla2x00_mem_free()
4804 if (ha->flt) in qla2x00_mem_free()
4805 dma_free_coherent(&ha->pdev->dev, in qla2x00_mem_free()
4807 ha->flt, ha->flt_dma); in qla2x00_mem_free()
4808 ha->flt = NULL; in qla2x00_mem_free()
4809 ha->flt_dma = 0; in qla2x00_mem_free()
4811 if (ha->ms_iocb) in qla2x00_mem_free()
4812 dma_pool_free(ha->s_dma_pool, ha->ms_iocb, ha->ms_iocb_dma); in qla2x00_mem_free()
4813 ha->ms_iocb = NULL; in qla2x00_mem_free()
4814 ha->ms_iocb_dma = 0; in qla2x00_mem_free()
4816 if (ha->sf_init_cb) in qla2x00_mem_free()
4817 dma_pool_free(ha->s_dma_pool, in qla2x00_mem_free()
4818 ha->sf_init_cb, ha->sf_init_cb_dma); in qla2x00_mem_free()
4820 if (ha->ex_init_cb) in qla2x00_mem_free()
4821 dma_pool_free(ha->s_dma_pool, in qla2x00_mem_free()
4822 ha->ex_init_cb, ha->ex_init_cb_dma); in qla2x00_mem_free()
4823 ha->ex_init_cb = NULL; in qla2x00_mem_free()
4824 ha->ex_init_cb_dma = 0; in qla2x00_mem_free()
4826 if (ha->async_pd) in qla2x00_mem_free()
4827 dma_pool_free(ha->s_dma_pool, ha->async_pd, ha->async_pd_dma); in qla2x00_mem_free()
4828 ha->async_pd = NULL; in qla2x00_mem_free()
4829 ha->async_pd_dma = 0; in qla2x00_mem_free()
4831 dma_pool_destroy(ha->s_dma_pool); in qla2x00_mem_free()
4832 ha->s_dma_pool = NULL; in qla2x00_mem_free()
4834 if (ha->gid_list) in qla2x00_mem_free()
4835 dma_free_coherent(&ha->pdev->dev, qla2x00_gid_list_size(ha), in qla2x00_mem_free()
4836 ha->gid_list, ha->gid_list_dma); in qla2x00_mem_free()
4837 ha->gid_list = NULL; in qla2x00_mem_free()
4838 ha->gid_list_dma = 0; in qla2x00_mem_free()
4840 if (IS_QLA82XX(ha)) { in qla2x00_mem_free()
4841 if (!list_empty(&ha->gbl_dsd_list)) { in qla2x00_mem_free()
4846 tdsd_ptr, &ha->gbl_dsd_list, list) { in qla2x00_mem_free()
4847 dma_pool_free(ha->dl_dma_pool, in qla2x00_mem_free()
4855 dma_pool_destroy(ha->dl_dma_pool); in qla2x00_mem_free()
4856 ha->dl_dma_pool = NULL; in qla2x00_mem_free()
4858 dma_pool_destroy(ha->fcp_cmnd_dma_pool); in qla2x00_mem_free()
4859 ha->fcp_cmnd_dma_pool = NULL; in qla2x00_mem_free()
4861 mempool_destroy(ha->ctx_mempool); in qla2x00_mem_free()
4862 ha->ctx_mempool = NULL; in qla2x00_mem_free()
4864 if (ql2xenabledif && ha->dif_bundl_pool) { in qla2x00_mem_free()
4867 list_for_each_entry_safe(dsd, nxt, &ha->pool.unusable.head, in qla2x00_mem_free()
4870 dma_pool_free(ha->dif_bundl_pool, dsd->dsd_addr, in qla2x00_mem_free()
4872 ha->dif_bundle_dma_allocs--; in qla2x00_mem_free()
4874 ha->dif_bundle_kallocs--; in qla2x00_mem_free()
4875 ha->pool.unusable.count--; in qla2x00_mem_free()
4877 list_for_each_entry_safe(dsd, nxt, &ha->pool.good.head, list) { in qla2x00_mem_free()
4879 dma_pool_free(ha->dif_bundl_pool, dsd->dsd_addr, in qla2x00_mem_free()
4881 ha->dif_bundle_dma_allocs--; in qla2x00_mem_free()
4883 ha->dif_bundle_kallocs--; in qla2x00_mem_free()
4887 dma_pool_destroy(ha->dif_bundl_pool); in qla2x00_mem_free()
4888 ha->dif_bundl_pool = NULL; in qla2x00_mem_free()
4890 qlt_mem_free(ha); in qla2x00_mem_free()
4891 qla_remove_hostmap(ha); in qla2x00_mem_free()
4893 if (ha->init_cb) in qla2x00_mem_free()
4894 dma_free_coherent(&ha->pdev->dev, ha->init_cb_size, in qla2x00_mem_free()
4895 ha->init_cb, ha->init_cb_dma); in qla2x00_mem_free()
4897 dma_pool_destroy(ha->purex_dma_pool); in qla2x00_mem_free()
4898 ha->purex_dma_pool = NULL; in qla2x00_mem_free()
4900 if (ha->elsrej.c) { in qla2x00_mem_free()
4901 dma_free_coherent(&ha->pdev->dev, ha->elsrej.size, in qla2x00_mem_free()
4902 ha->elsrej.c, ha->elsrej.cdma); in qla2x00_mem_free()
4903 ha->elsrej.c = NULL; in qla2x00_mem_free()
4906 ha->init_cb = NULL; in qla2x00_mem_free()
4907 ha->init_cb_dma = 0; in qla2x00_mem_free()
4909 vfree(ha->optrom_buffer); in qla2x00_mem_free()
4910 ha->optrom_buffer = NULL; in qla2x00_mem_free()
4911 kfree(ha->nvram); in qla2x00_mem_free()
4912 ha->nvram = NULL; in qla2x00_mem_free()
4913 kfree(ha->npiv_info); in qla2x00_mem_free()
4914 ha->npiv_info = NULL; in qla2x00_mem_free()
4915 kfree(ha->swl); in qla2x00_mem_free()
4916 ha->swl = NULL; in qla2x00_mem_free()
4917 kfree(ha->loop_id_map); in qla2x00_mem_free()
4918 ha->sf_init_cb = NULL; in qla2x00_mem_free()
4919 ha->sf_init_cb_dma = 0; in qla2x00_mem_free()
4920 ha->loop_id_map = NULL; in qla2x00_mem_free()
4924 struct qla_hw_data *ha) in qla2x00_create_host() argument
4931 ql_log_pci(ql_log_fatal, ha->pdev, 0x0107, in qla2x00_create_host()
4942 vha->hw = ha; in qla2x00_create_host()
4972 (ha->max_loop_id + 1); in qla2x00_create_host()
4973 vha->gnl.l = dma_alloc_coherent(&ha->pdev->dev, in qla2x00_create_host()
4983 vha->scan.size = ha->max_fibre_devices * sizeof(struct fab_scan_rp); in qla2x00_create_host()
4988 dma_free_coherent(&ha->pdev->dev, vha->gnl.size, in qla2x00_create_host()
5000 dev_name(&(ha->pdev->dev))); in qla2x00_create_host()
5576 struct qla_hw_data *ha = base_vha->hw; in qla83xx_schedule_work() local
5580 if (ha->dpc_lp_wq) in qla83xx_schedule_work()
5581 queue_work(ha->dpc_lp_wq, &ha->idc_aen); in qla83xx_schedule_work()
5585 if (!ha->flags.nic_core_reset_hdlr_active) { in qla83xx_schedule_work()
5586 if (ha->dpc_hp_wq) in qla83xx_schedule_work()
5587 queue_work(ha->dpc_hp_wq, &ha->nic_core_reset); in qla83xx_schedule_work()
5594 if (ha->dpc_hp_wq) in qla83xx_schedule_work()
5595 queue_work(ha->dpc_hp_wq, &ha->idc_state_handler); in qla83xx_schedule_work()
5598 if (ha->dpc_hp_wq) in qla83xx_schedule_work()
5599 queue_work(ha->dpc_hp_wq, &ha->nic_core_unrecoverable); in qla83xx_schedule_work()
5613 struct qla_hw_data *ha = in qla83xx_nic_core_unrecoverable_work() local
5615 scsi_qla_host_t *base_vha = pci_get_drvdata(ha->pdev); in qla83xx_nic_core_unrecoverable_work()
5621 if (ha->flags.nic_core_reset_owner) { in qla83xx_nic_core_unrecoverable_work()
5622 ha->flags.nic_core_reset_owner = 0; in qla83xx_nic_core_unrecoverable_work()
5635 struct qla_hw_data *ha = in qla83xx_idc_state_handler_work() local
5637 scsi_qla_host_t *base_vha = pci_get_drvdata(ha->pdev); in qla83xx_idc_state_handler_work()
5681 struct qla_hw_data *ha = in qla83xx_nic_core_reset_work() local
5683 scsi_qla_host_t *base_vha = pci_get_drvdata(ha->pdev); in qla83xx_nic_core_reset_work()
5686 if (IS_QLA2031(ha)) { in qla83xx_nic_core_reset_work()
5693 if (!ha->flags.nic_core_reset_hdlr_active) { in qla83xx_nic_core_reset_work()
5706 ha->flags.nic_core_reset_hdlr_active = 1; in qla83xx_nic_core_reset_work()
5712 ha->flags.nic_core_reset_hdlr_active = 0; in qla83xx_nic_core_reset_work()
5720 struct qla_hw_data *ha = in qla83xx_service_idc_aen() local
5722 scsi_qla_host_t *base_vha = pci_get_drvdata(ha->pdev); in qla83xx_service_idc_aen()
5758 struct qla_hw_data *ha = base_vha->hw; in qla83xx_force_lock_recovery() local
5770 data = (IDC_LOCK_RECOVERY_STAGE1) | (ha->portnum << 2); in qla83xx_force_lock_recovery()
5783 if (((data & idc_lck_rcvry_owner_mask) >> 2) == ha->portnum) { in qla83xx_force_lock_recovery()
5856 struct qla_hw_data *ha = base_vha->hw; in qla83xx_idc_lock() local
5867 ha->portnum); in qla83xx_idc_lock()
5949 struct qla_hw_data *ha = vha->hw; in qla24xx_process_purex_rdp() local
5980 rsp_els = dma_alloc_coherent(&ha->pdev->dev, sizeof(*rsp_els), in qla24xx_process_purex_rdp()
5988 rsp_payload = dma_alloc_coherent(&ha->pdev->dev, sizeof(*rsp_payload), in qla24xx_process_purex_rdp()
5996 sfp = dma_alloc_coherent(&ha->pdev->dev, SFP_RTDI_LEN, in qla24xx_process_purex_rdp()
5999 stat = dma_alloc_coherent(&ha->pdev->dev, sizeof(*stat), in qla24xx_process_purex_rdp()
6097 qla25xx_fdmi_port_speed_capability(ha)); in qla24xx_process_purex_rdp()
6099 qla25xx_fdmi_port_speed_currently(ha)); in qla24xx_process_purex_rdp()
6155 if (ha->flags.plogi_template_valid) { in qla24xx_process_purex_rdp()
6157 be16_to_cpu(ha->plogi_els_payld.fl_csp.sp_bb_cred); in qla24xx_process_purex_rdp()
6337 dma_free_coherent(&ha->pdev->dev, sizeof(*stat), in qla24xx_process_purex_rdp()
6340 dma_free_coherent(&ha->pdev->dev, SFP_RTDI_LEN, in qla24xx_process_purex_rdp()
6343 dma_free_coherent(&ha->pdev->dev, sizeof(*rsp_payload), in qla24xx_process_purex_rdp()
6346 dma_free_coherent(&ha->pdev->dev, sizeof(*rsp_els), in qla24xx_process_purex_rdp()
6387 struct qla_hw_data *ha = base_vha->hw; in qla83xx_idc_unlock() local
6398 if (data == ha->portnum) { in qla83xx_idc_unlock()
6446 struct qla_hw_data *ha = vha->hw; in __qla83xx_set_drv_presence() local
6451 drv_presence |= (1 << ha->portnum); in __qla83xx_set_drv_presence()
6475 struct qla_hw_data *ha = vha->hw; in __qla83xx_clear_drv_presence() local
6480 drv_presence &= ~(1 << ha->portnum); in __qla83xx_clear_drv_presence()
6503 struct qla_hw_data *ha = vha->hw; in qla83xx_need_reset_handler() local
6508 ack_timeout = jiffies + (ha->fcoe_reset_timeout * HZ); in qla83xx_need_reset_handler()
6576 struct qla_hw_data *ha = base_vha->hw; in qla83xx_idc_state_handler() local
6582 dev_init_timeout = jiffies + (ha->fcoe_dev_init_timeout * HZ); in qla83xx_idc_state_handler()
6601 if (ha->flags.nic_core_reset_owner) in qla83xx_idc_state_handler()
6604 ha->flags.nic_core_reset_owner = 0; in qla83xx_idc_state_handler()
6607 ha->portnum); in qla83xx_idc_state_handler()
6610 if (ha->flags.nic_core_reset_owner) in qla83xx_idc_state_handler()
6626 if (!ql2xdontresethba && ha->flags.nic_core_reset_owner) in qla83xx_idc_state_handler()
6636 (ha->fcoe_dev_init_timeout * HZ); in qla83xx_idc_state_handler()
6646 if (ha->flags.quiesce_owner) in qla83xx_idc_state_handler()
6653 (ha->fcoe_dev_init_timeout * HZ); in qla83xx_idc_state_handler()
6656 if (ha->flags.nic_core_reset_owner) in qla83xx_idc_state_handler()
6659 ha->flags.nic_core_reset_owner = 0; in qla83xx_idc_state_handler()
6689 struct qla_hw_data *ha = container_of(work, struct qla_hw_data, in qla2x00_disable_board_on_pci_error() local
6691 struct pci_dev *pdev = ha->pdev; in qla2x00_disable_board_on_pci_error()
6692 scsi_qla_host_t *base_vha = pci_get_drvdata(ha->pdev); in qla2x00_disable_board_on_pci_error()
6713 qla2x00_delete_all_vps(ha, base_vha); in qla2x00_disable_board_on_pci_error()
6726 qla2x00_destroy_deferred_work(ha); in qla2x00_disable_board_on_pci_error()
6742 qla2x00_mem_free(ha); in qla2x00_disable_board_on_pci_error()
6744 qla2x00_free_queues(ha); in qla2x00_disable_board_on_pci_error()
6746 qla2x00_unmap_iobases(ha); in qla2x00_disable_board_on_pci_error()
6748 pci_release_selected_regions(ha->pdev, ha->bars); in qla2x00_disable_board_on_pci_error()
6774 struct qla_hw_data *ha; in qla2x00_do_dpc() local
6778 ha = (struct qla_hw_data *)data; in qla2x00_do_dpc()
6779 base_vha = pci_get_drvdata(ha->pdev); in qla2x00_do_dpc()
6793 if (!base_vha->flags.init_done || ha->flags.mbox_busy) in qla2x00_do_dpc()
6796 if (ha->flags.eeh_busy) { in qla2x00_do_dpc()
6798 "eeh_busy=%d.\n", ha->flags.eeh_busy); in qla2x00_do_dpc()
6802 ha->dpc_active = 1; in qla2x00_do_dpc()
6811 if (IS_P3P_TYPE(ha)) { in qla2x00_do_dpc()
6812 if (IS_QLA8044(ha)) { in qla2x00_do_dpc()
6815 qla8044_idc_lock(ha); in qla2x00_do_dpc()
6819 qla8044_idc_unlock(ha); in qla2x00_do_dpc()
6829 qla82xx_idc_lock(ha); in qla2x00_do_dpc()
6830 qla82xx_wr_32(ha, QLA82XX_CRB_DEV_STATE, in qla2x00_do_dpc()
6832 qla82xx_idc_unlock(ha); in qla2x00_do_dpc()
6861 } else if (IS_QLAFX00(ha)) { in qla2x00_do_dpc()
6925 !ha->flags.fw_started) in qla2x00_do_dpc()
6930 !ha->flags.fw_started) in qla2x00_do_dpc()
6942 if (ha->isp_ops->abort_isp(base_vha)) { in qla2x00_do_dpc()
6968 if (IS_QLAFX00(ha)) in qla2x00_do_dpc()
6974 if (IS_P3P_TYPE(ha)) { in qla2x00_do_dpc()
6975 if (IS_QLA82XX(ha)) in qla2x00_do_dpc()
6977 if (IS_QLA8044(ha)) in qla2x00_do_dpc()
6981 if (!ha->flags.quiesce_owner) { in qla2x00_do_dpc()
6983 if (IS_QLA82XX(ha)) { in qla2x00_do_dpc()
6984 qla82xx_idc_lock(ha); in qla2x00_do_dpc()
6987 qla82xx_idc_unlock(ha); in qla2x00_do_dpc()
6988 } else if (IS_QLA8044(ha)) { in qla2x00_do_dpc()
6989 qla8044_idc_lock(ha); in qla2x00_do_dpc()
6992 qla8044_idc_unlock(ha); in qla2x00_do_dpc()
7051 if (IS_QLAFX00(ha)) in qla2x00_do_dpc()
7061 if (!ha->interrupts_on) in qla2x00_do_dpc()
7062 ha->isp_ops->enable_intrs(ha); in qla2x00_do_dpc()
7066 if (ha->beacon_blink_led == 1) in qla2x00_do_dpc()
7067 ha->isp_ops->beacon_blink(base_vha); in qla2x00_do_dpc()
7073 if (ha->flags.eeh_busy || in qla2x00_do_dpc()
7074 ha->flags.pci_channel_io_perm_failure) in qla2x00_do_dpc()
7079 mutex_lock(&ha->mq_lock); in qla2x00_do_dpc()
7083 mutex_unlock(&ha->mq_lock); in qla2x00_do_dpc()
7088 u16 threshold = ha->nvme_last_rptd_aen + ha->last_zio_threshold; in qla2x00_do_dpc()
7090 if (threshold > ha->orig_fw_xcb_count) in qla2x00_do_dpc()
7091 threshold = ha->orig_fw_xcb_count; in qla2x00_do_dpc()
7103 if (!IS_QLAFX00(ha)) in qla2x00_do_dpc()
7111 ha->dpc_active = 0; in qla2x00_do_dpc()
7123 ha->dpc_active = 0; in qla2x00_do_dpc()
7134 struct qla_hw_data *ha = vha->hw; in qla2xxx_wake_dpc() local
7135 struct task_struct *t = ha->dpc_thread; in qla2xxx_wake_dpc()
7169 struct qla_hw_data *ha = vha->hw; in qla_do_heartbeat() local
7178 cmpl_cnt = ha->base_qpair->cmd_completion_cnt; in qla_do_heartbeat()
7179 if (cmpl_cnt == ha->base_qpair->prev_completion_cnt && in qla_do_heartbeat()
7180 cmpl_cnt != ha->base_qpair->cmd_cnt) { in qla_do_heartbeat()
7184 ha->base_qpair->prev_completion_cnt = cmpl_cnt; in qla_do_heartbeat()
7186 for (i = 0; i < ha->max_qpairs; i++) { in qla_do_heartbeat()
7187 if (ha->queue_pair_map[i]) { in qla_do_heartbeat()
7188 cmpl_cnt = ha->queue_pair_map[i]->cmd_completion_cnt; in qla_do_heartbeat()
7189 if (cmpl_cnt == ha->queue_pair_map[i]->prev_completion_cnt && in qla_do_heartbeat()
7190 cmpl_cnt != ha->queue_pair_map[i]->cmd_cnt) { in qla_do_heartbeat()
7194 ha->queue_pair_map[i]->prev_completion_cnt = cmpl_cnt; in qla_do_heartbeat()
7204 struct qla_hw_data *ha = vha->hw; in qla_heart_beat() local
7213 queue_work(ha->wq, &ha->heartbeat_work); in qla_heart_beat()
7233 struct qla_hw_data *ha = vha->hw; in qla2x00_timer() local
7238 if (ha->flags.eeh_busy) { in qla2x00_timer()
7241 ha->flags.eeh_busy); in qla2x00_timer()
7250 if (!pci_channel_offline(ha->pdev)) { in qla2x00_timer()
7251 pci_read_config_word(ha->pdev, PCI_VENDOR_ID, &w); in qla2x00_timer()
7256 if (!vha->vp_idx && IS_P3P_TYPE(ha)) { in qla2x00_timer()
7259 if (IS_QLA82XX(ha)) in qla2x00_timer()
7261 else if (IS_QLA8044(ha)) in qla2x00_timer()
7265 if (!vha->vp_idx && IS_QLAFX00(ha)) in qla2x00_timer()
7290 if (!IS_QLA2100(ha) && vha->link_down_timeout) in qla2x00_timer()
7299 spin_lock_irqsave(&ha->hardware_lock, in qla2x00_timer()
7301 req = ha->req_q_map[0]; in qla2x00_timer()
7318 if (IS_QLA82XX(ha)) in qla2x00_timer()
7326 spin_unlock_irqrestore(&ha->hardware_lock, in qla2x00_timer()
7338 if (IS_QLA82XX(ha)) in qla2x00_timer()
7351 if (!vha->vp_idx && (ha->beacon_blink_led == 1)) { in qla2x00_timer()
7353 if (!IS_P3P_TYPE(ha)) { in qla2x00_timer()
7380 index = atomic_read(&ha->nvme_active_aen_cnt); in qla2x00_timer()
7382 (index != ha->nvme_last_rptd_aen) && in qla2x00_timer()
7383 ha->zio_mode == QLA_ZIO_MODE_6 && in qla2x00_timer()
7384 !ha->flags.host_shutting_down) { in qla2x00_timer()
7385 ha->nvme_last_rptd_aen = atomic_read(&ha->nvme_active_aen_cnt); in qla2x00_timer()
7388 ha->nvme_last_rptd_aen); in qla2x00_timer()
7394 atomic_read(&ha->zio_threshold) != ha->last_zio_threshold && in qla2x00_timer()
7395 IS_ZIO_THRESHOLD_CAPABLE(ha)) { in qla2x00_timer()
7398 ha->last_zio_threshold); in qla2x00_timer()
7399 ha->last_zio_threshold = atomic_read(&ha->zio_threshold); in qla2x00_timer()
7493 struct qla_hw_data *ha = vha->hw; in qla2x00_request_firmware() local
7496 if (IS_QLA2100(ha)) { in qla2x00_request_firmware()
7498 } else if (IS_QLA2200(ha)) { in qla2x00_request_firmware()
7500 } else if (IS_QLA2300(ha) || IS_QLA2312(ha) || IS_QLA6312(ha)) { in qla2x00_request_firmware()
7502 } else if (IS_QLA2322(ha) || IS_QLA6322(ha)) { in qla2x00_request_firmware()
7504 } else if (IS_QLA24XX_TYPE(ha)) { in qla2x00_request_firmware()
7506 } else if (IS_QLA25XX(ha)) { in qla2x00_request_firmware()
7508 } else if (IS_QLA81XX(ha)) { in qla2x00_request_firmware()
7510 } else if (IS_QLA82XX(ha)) { in qla2x00_request_firmware()
7512 } else if (IS_QLA2031(ha)) { in qla2x00_request_firmware()
7514 } else if (IS_QLA8031(ha)) { in qla2x00_request_firmware()
7516 } else if (IS_QLA27XX(ha)) { in qla2x00_request_firmware()
7518 } else if (IS_QLA28XX(ha)) { in qla2x00_request_firmware()
7531 if (request_firmware(&blob->fw, blob->name, &ha->pdev->dev)) { in qla2x00_request_firmware()
7556 struct qla_hw_data *ha = vha->hw; in qla_pci_error_cleanup() local
7557 scsi_qla_host_t *base_vha = pci_get_drvdata(ha->pdev); in qla_pci_error_cleanup()
7566 ha->chip_reset++; in qla_pci_error_cleanup()
7568 ha->base_qpair->chip_reset = ha->chip_reset; in qla_pci_error_cleanup()
7569 for (i = 0; i < ha->max_qpairs; i++) { in qla_pci_error_cleanup()
7570 if (ha->queue_pair_map[i]) in qla_pci_error_cleanup()
7571 ha->queue_pair_map[i]->chip_reset = in qla_pci_error_cleanup()
7572 ha->base_qpair->chip_reset; in qla_pci_error_cleanup()
7580 mutex_lock(&ha->mq_lock); in qla_pci_error_cleanup()
7581 ha->base_qpair->online = 0; in qla_pci_error_cleanup()
7585 mutex_unlock(&ha->mq_lock); in qla_pci_error_cleanup()
7589 spin_lock_irqsave(&ha->vport_slock, flags); in qla_pci_error_cleanup()
7590 list_for_each_entry_safe(vp, tvp, &ha->vp_list, list) { in qla_pci_error_cleanup()
7592 spin_unlock_irqrestore(&ha->vport_slock, flags); in qla_pci_error_cleanup()
7594 spin_lock_irqsave(&ha->vport_slock, flags); in qla_pci_error_cleanup()
7597 spin_unlock_irqrestore(&ha->vport_slock, flags); in qla_pci_error_cleanup()
7603 spin_lock_irqsave(&ha->vport_slock, flags); in qla_pci_error_cleanup()
7604 list_for_each_entry_safe(vp, tvp, &ha->vp_list, list) { in qla_pci_error_cleanup()
7606 spin_unlock_irqrestore(&ha->vport_slock, flags); in qla_pci_error_cleanup()
7609 spin_lock_irqsave(&ha->vport_slock, flags); in qla_pci_error_cleanup()
7612 spin_unlock_irqrestore(&ha->vport_slock, flags); in qla_pci_error_cleanup()
7620 struct qla_hw_data *ha = vha->hw; in qla2xxx_pci_error_detected() local
7625 ha->pci_error_state = QLA_PCI_ERR_DETECTED; in qla2xxx_pci_error_detected()
7636 ha->flags.eeh_busy = 0; in qla2xxx_pci_error_detected()
7648 ha->flags.pci_channel_io_perm_failure = 1; in qla2xxx_pci_error_detected()
7669 struct qla_hw_data *ha = base_vha->hw; in qla2xxx_pci_mmio_enabled() local
7670 struct device_reg_2xxx __iomem *reg = &ha->iobase->isp; in qla2xxx_pci_mmio_enabled()
7671 struct device_reg_24xx __iomem *reg24 = &ha->iobase->isp24; in qla2xxx_pci_mmio_enabled()
7676 ha->pci_error_state = QLA_PCI_MMIO_ENABLED; in qla2xxx_pci_mmio_enabled()
7677 if (IS_QLA82XX(ha)) in qla2xxx_pci_mmio_enabled()
7680 spin_lock_irqsave(&ha->hardware_lock, flags); in qla2xxx_pci_mmio_enabled()
7681 if (IS_QLA2100(ha) || IS_QLA2200(ha)){ in qla2xxx_pci_mmio_enabled()
7685 } else if (IS_QLA23XX(ha)) { in qla2xxx_pci_mmio_enabled()
7689 } else if (IS_FWI2_CAPABLE(ha)) { in qla2xxx_pci_mmio_enabled()
7694 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla2xxx_pci_mmio_enabled()
7712 struct qla_hw_data *ha = base_vha->hw; in qla2xxx_pci_slot_reset() local
7719 ha->pci_error_state = QLA_PCI_SLOT_RESET; in qla2xxx_pci_slot_reset()
7733 if (ha->mem_only) in qla2xxx_pci_slot_reset()
7745 if (ha->isp_ops->pci_config(base_vha)) in qla2xxx_pci_slot_reset()
7748 mutex_lock(&ha->mq_lock); in qla2xxx_pci_slot_reset()
7751 mutex_unlock(&ha->mq_lock); in qla2xxx_pci_slot_reset()
7753 ha->flags.eeh_busy = 0; in qla2xxx_pci_slot_reset()
7756 ha->isp_ops->abort_isp(base_vha); in qla2xxx_pci_slot_reset()
7759 if (qla2x00_isp_reg_stat(ha)) { in qla2xxx_pci_slot_reset()
7760 ha->flags.eeh_busy = 1; in qla2xxx_pci_slot_reset()
7779 struct qla_hw_data *ha = base_vha->hw; in qla2xxx_pci_resume() local
7791 ha->pci_error_state = QLA_PCI_RESUME; in qla2xxx_pci_resume()
7798 struct qla_hw_data *ha = vha->hw; in qla_pci_set_eeh_busy() local
7799 struct scsi_qla_host *base_vha = pci_get_drvdata(ha->pdev); in qla_pci_set_eeh_busy()
7803 if (ha->flags.eeh_busy) in qla_pci_set_eeh_busy()
7807 if (!ha->flags.eeh_busy) { in qla_pci_set_eeh_busy()
7808 ha->flags.eeh_busy = 1; in qla_pci_set_eeh_busy()
7823 struct qla_hw_data *ha = vha->hw; in qla_schedule_eeh_work() local
7824 struct scsi_qla_host *base_vha = pci_get_drvdata(ha->pdev); in qla_schedule_eeh_work()
7826 if (ha->flags.eeh_busy) in qla_schedule_eeh_work()
7837 struct qla_hw_data *ha = base_vha->hw; in qla_pci_reset_prepare() local
7849 ha->flags.eeh_busy = 1; in qla_pci_reset_prepare()
7850 mutex_lock(&ha->mq_lock); in qla_pci_reset_prepare()
7853 mutex_unlock(&ha->mq_lock); in qla_pci_reset_prepare()
7864 struct qla_hw_data *ha = base_vha->hw; in qla_pci_reset_done() local
7873 ha->flags.eeh_busy = 0; in qla_pci_reset_done()
7874 mutex_lock(&ha->mq_lock); in qla_pci_reset_done()
7877 mutex_unlock(&ha->mq_lock); in qla_pci_reset_done()
7880 ha->isp_ops->abort_isp(base_vha); in qla_pci_reset_done()