Lines Matching refs:acb

110 static int arcmsr_iop_message_xfer(struct AdapterControlBlock *acb,
112 static int arcmsr_iop_confirm(struct AdapterControlBlock *acb);
124 static void arcmsr_iop_init(struct AdapterControlBlock *acb);
125 static void arcmsr_free_ccb_pool(struct AdapterControlBlock *acb);
126 static u32 arcmsr_disable_outbound_ints(struct AdapterControlBlock *acb);
127 static void arcmsr_enable_outbound_ints(struct AdapterControlBlock *acb,
129 static void arcmsr_stop_adapter_bgrb(struct AdapterControlBlock *acb);
130 static void arcmsr_hbaA_flush_cache(struct AdapterControlBlock *acb);
131 static void arcmsr_hbaB_flush_cache(struct AdapterControlBlock *acb);
134 static bool arcmsr_get_firmware_spec(struct AdapterControlBlock *acb);
135 static void arcmsr_start_adapter_bgrb(struct AdapterControlBlock *acb);
137 static void arcmsr_hbaD_message_isr(struct AdapterControlBlock *acb);
138 static void arcmsr_hbaE_message_isr(struct AdapterControlBlock *acb);
139 static void arcmsr_hbaE_postqueue_isr(struct AdapterControlBlock *acb);
140 static void arcmsr_hbaF_postqueue_isr(struct AdapterControlBlock *acb);
141 static void arcmsr_hardware_reset(struct AdapterControlBlock *acb);
143 static irqreturn_t arcmsr_interrupt(struct AdapterControlBlock *acb);
145 static void arcmsr_wait_firmware_ready(struct AdapterControlBlock *acb);
240 static void arcmsr_free_io_queue(struct AdapterControlBlock *acb) in arcmsr_free_io_queue() argument
242 switch (acb->adapter_type) { in arcmsr_free_io_queue()
247 dma_free_coherent(&acb->pdev->dev, acb->ioqueue_size, in arcmsr_free_io_queue()
248 acb->dma_coherent2, acb->dma_coherent_handle2); in arcmsr_free_io_queue()
253 static bool arcmsr_remap_pciregion(struct AdapterControlBlock *acb) in arcmsr_remap_pciregion() argument
255 struct pci_dev *pdev = acb->pdev; in arcmsr_remap_pciregion()
256 switch (acb->adapter_type){ in arcmsr_remap_pciregion()
258 acb->pmuA = ioremap(pci_resource_start(pdev,0), pci_resource_len(pdev,0)); in arcmsr_remap_pciregion()
259 if (!acb->pmuA) { in arcmsr_remap_pciregion()
260 printk(KERN_NOTICE "arcmsr%d: memory mapping region fail \n", acb->host->host_no); in arcmsr_remap_pciregion()
269 printk(KERN_NOTICE "arcmsr%d: memory mapping region fail \n", acb->host->host_no); in arcmsr_remap_pciregion()
275 printk(KERN_NOTICE "arcmsr%d: memory mapping region fail \n", acb->host->host_no); in arcmsr_remap_pciregion()
278 acb->mem_base0 = mem_base0; in arcmsr_remap_pciregion()
279 acb->mem_base1 = mem_base1; in arcmsr_remap_pciregion()
283 acb->pmuC = ioremap(pci_resource_start(pdev, 1), pci_resource_len(pdev, 1)); in arcmsr_remap_pciregion()
284 if (!acb->pmuC) { in arcmsr_remap_pciregion()
285 printk(KERN_NOTICE "arcmsr%d: memory mapping region fail \n", acb->host->host_no); in arcmsr_remap_pciregion()
288 if (readl(&acb->pmuC->outbound_doorbell) & ARCMSR_HBCMU_IOP2DRV_MESSAGE_CMD_DONE) { in arcmsr_remap_pciregion()
289 …writel(ARCMSR_HBCMU_IOP2DRV_MESSAGE_CMD_DONE_DOORBELL_CLEAR, &acb->pmuC->outbound_doorbell_clear);… in arcmsr_remap_pciregion()
303 acb->host->host_no); in arcmsr_remap_pciregion()
306 acb->mem_base0 = mem_base0; in arcmsr_remap_pciregion()
310 acb->pmuE = ioremap(pci_resource_start(pdev, 1), in arcmsr_remap_pciregion()
312 if (!acb->pmuE) { in arcmsr_remap_pciregion()
314 acb->host->host_no); in arcmsr_remap_pciregion()
317 writel(0, &acb->pmuE->host_int_status); /*clear interrupt*/ in arcmsr_remap_pciregion()
318 writel(ARCMSR_HBEMU_DOORBELL_SYNC, &acb->pmuE->iobound_doorbell); /* synchronize doorbell to 0 */ in arcmsr_remap_pciregion()
319 acb->in_doorbell = 0; in arcmsr_remap_pciregion()
320 acb->out_doorbell = 0; in arcmsr_remap_pciregion()
324 acb->pmuF = ioremap(pci_resource_start(pdev, 0), pci_resource_len(pdev, 0)); in arcmsr_remap_pciregion()
325 if (!acb->pmuF) { in arcmsr_remap_pciregion()
327 acb->host->host_no); in arcmsr_remap_pciregion()
330 writel(0, &acb->pmuF->host_int_status); /* clear interrupt */ in arcmsr_remap_pciregion()
331 writel(ARCMSR_HBFMU_DOORBELL_SYNC, &acb->pmuF->iobound_doorbell); in arcmsr_remap_pciregion()
332 acb->in_doorbell = 0; in arcmsr_remap_pciregion()
333 acb->out_doorbell = 0; in arcmsr_remap_pciregion()
340 static void arcmsr_unmap_pciregion(struct AdapterControlBlock *acb) in arcmsr_unmap_pciregion() argument
342 switch (acb->adapter_type) { in arcmsr_unmap_pciregion()
344 iounmap(acb->pmuA); in arcmsr_unmap_pciregion()
347 iounmap(acb->mem_base0); in arcmsr_unmap_pciregion()
348 iounmap(acb->mem_base1); in arcmsr_unmap_pciregion()
351 iounmap(acb->pmuC); in arcmsr_unmap_pciregion()
354 iounmap(acb->mem_base0); in arcmsr_unmap_pciregion()
357 iounmap(acb->pmuE); in arcmsr_unmap_pciregion()
360 iounmap(acb->pmuF); in arcmsr_unmap_pciregion()
368 struct AdapterControlBlock *acb = dev_id; in arcmsr_do_interrupt() local
370 handle_state = arcmsr_interrupt(acb); in arcmsr_do_interrupt()
397 static uint8_t arcmsr_hbaA_wait_msgint_ready(struct AdapterControlBlock *acb) in arcmsr_hbaA_wait_msgint_ready() argument
399 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_hbaA_wait_msgint_ready()
415 static uint8_t arcmsr_hbaB_wait_msgint_ready(struct AdapterControlBlock *acb) in arcmsr_hbaB_wait_msgint_ready() argument
417 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_hbaB_wait_msgint_ready()
488 static void arcmsr_hbaA_flush_cache(struct AdapterControlBlock *acb) in arcmsr_hbaA_flush_cache() argument
490 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_hbaA_flush_cache()
494 if (arcmsr_hbaA_wait_msgint_ready(acb)) in arcmsr_hbaA_flush_cache()
499 timeout, retry count down = %d \n", acb->host->host_no, retry_count); in arcmsr_hbaA_flush_cache()
504 static void arcmsr_hbaB_flush_cache(struct AdapterControlBlock *acb) in arcmsr_hbaB_flush_cache() argument
506 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_hbaB_flush_cache()
510 if (arcmsr_hbaB_wait_msgint_ready(acb)) in arcmsr_hbaB_flush_cache()
515 timeout,retry count down = %d \n", acb->host->host_no, retry_count); in arcmsr_hbaB_flush_cache()
573 static void arcmsr_flush_adapter_cache(struct AdapterControlBlock *acb) in arcmsr_flush_adapter_cache() argument
575 switch (acb->adapter_type) { in arcmsr_flush_adapter_cache()
578 arcmsr_hbaA_flush_cache(acb); in arcmsr_flush_adapter_cache()
581 arcmsr_hbaB_flush_cache(acb); in arcmsr_flush_adapter_cache()
584 arcmsr_hbaC_flush_cache(acb); in arcmsr_flush_adapter_cache()
587 arcmsr_hbaD_flush_cache(acb); in arcmsr_flush_adapter_cache()
591 arcmsr_hbaE_flush_cache(acb); in arcmsr_flush_adapter_cache()
596 static void arcmsr_hbaB_assign_regAddr(struct AdapterControlBlock *acb) in arcmsr_hbaB_assign_regAddr() argument
598 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_hbaB_assign_regAddr()
600 if (acb->pdev->device == PCI_DEVICE_ID_ARECA_1203) { in arcmsr_hbaB_assign_regAddr()
616 static void arcmsr_hbaD_assign_regAddr(struct AdapterControlBlock *acb) in arcmsr_hbaD_assign_regAddr() argument
618 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_hbaD_assign_regAddr()
648 static void arcmsr_hbaF_assign_regAddr(struct AdapterControlBlock *acb) in arcmsr_hbaF_assign_regAddr() argument
653 memset(acb->dma_coherent2, 0xff, acb->completeQ_size); in arcmsr_hbaF_assign_regAddr()
654 acb->message_wbuffer = (uint32_t *)round_up((unsigned long)acb->dma_coherent2 + in arcmsr_hbaF_assign_regAddr()
655 acb->completeQ_size, 4); in arcmsr_hbaF_assign_regAddr()
656 acb->message_rbuffer = ((void *)acb->message_wbuffer) + 0x100; in arcmsr_hbaF_assign_regAddr()
657 acb->msgcode_rwbuffer = ((void *)acb->message_wbuffer) + 0x200; in arcmsr_hbaF_assign_regAddr()
658 memset((void *)acb->message_wbuffer, 0, MESG_RW_BUFFER_SIZE); in arcmsr_hbaF_assign_regAddr()
659 host_buffer_dma = round_up(acb->dma_coherent_handle2 + acb->completeQ_size, 4); in arcmsr_hbaF_assign_regAddr()
660 pmuF = acb->pmuF; in arcmsr_hbaF_assign_regAddr()
669 static bool arcmsr_alloc_io_queue(struct AdapterControlBlock *acb) in arcmsr_alloc_io_queue() argument
674 struct pci_dev *pdev = acb->pdev; in arcmsr_alloc_io_queue()
676 switch (acb->adapter_type) { in arcmsr_alloc_io_queue()
678 acb->ioqueue_size = roundup(sizeof(struct MessageUnit_B), 32); in arcmsr_alloc_io_queue()
679 dma_coherent = dma_alloc_coherent(&pdev->dev, acb->ioqueue_size, in arcmsr_alloc_io_queue()
682 pr_notice("arcmsr%d: DMA allocation failed\n", acb->host->host_no); in arcmsr_alloc_io_queue()
685 acb->dma_coherent_handle2 = dma_coherent_handle; in arcmsr_alloc_io_queue()
686 acb->dma_coherent2 = dma_coherent; in arcmsr_alloc_io_queue()
687 acb->pmuB = (struct MessageUnit_B *)dma_coherent; in arcmsr_alloc_io_queue()
688 arcmsr_hbaB_assign_regAddr(acb); in arcmsr_alloc_io_queue()
692 acb->ioqueue_size = roundup(sizeof(struct MessageUnit_D), 32); in arcmsr_alloc_io_queue()
693 dma_coherent = dma_alloc_coherent(&pdev->dev, acb->ioqueue_size, in arcmsr_alloc_io_queue()
696 pr_notice("arcmsr%d: DMA allocation failed\n", acb->host->host_no); in arcmsr_alloc_io_queue()
699 acb->dma_coherent_handle2 = dma_coherent_handle; in arcmsr_alloc_io_queue()
700 acb->dma_coherent2 = dma_coherent; in arcmsr_alloc_io_queue()
701 acb->pmuD = (struct MessageUnit_D *)dma_coherent; in arcmsr_alloc_io_queue()
702 arcmsr_hbaD_assign_regAddr(acb); in arcmsr_alloc_io_queue()
708 acb->ioqueue_size = roundup(completeQ_size, 32); in arcmsr_alloc_io_queue()
709 dma_coherent = dma_alloc_coherent(&pdev->dev, acb->ioqueue_size, in arcmsr_alloc_io_queue()
712 pr_notice("arcmsr%d: DMA allocation failed\n", acb->host->host_no); in arcmsr_alloc_io_queue()
715 acb->dma_coherent_handle2 = dma_coherent_handle; in arcmsr_alloc_io_queue()
716 acb->dma_coherent2 = dma_coherent; in arcmsr_alloc_io_queue()
717 acb->pCompletionQ = dma_coherent; in arcmsr_alloc_io_queue()
718 acb->completionQ_entry = acb->ioqueue_size / sizeof(struct deliver_completeQ); in arcmsr_alloc_io_queue()
719 acb->doneq_index = 0; in arcmsr_alloc_io_queue()
726 arcmsr_wait_firmware_ready(acb); in arcmsr_alloc_io_queue()
727 QueueDepth = depthTbl[readl(&acb->pmuF->outbound_msgaddr1) & 7]; in arcmsr_alloc_io_queue()
728 acb->completeQ_size = sizeof(struct deliver_completeQ) * QueueDepth + 128; in arcmsr_alloc_io_queue()
729 acb->ioqueue_size = roundup(acb->completeQ_size + MESG_RW_BUFFER_SIZE, 32); in arcmsr_alloc_io_queue()
730 dma_coherent = dma_alloc_coherent(&pdev->dev, acb->ioqueue_size, in arcmsr_alloc_io_queue()
733 pr_notice("arcmsr%d: DMA allocation failed\n", acb->host->host_no); in arcmsr_alloc_io_queue()
736 acb->dma_coherent_handle2 = dma_coherent_handle; in arcmsr_alloc_io_queue()
737 acb->dma_coherent2 = dma_coherent; in arcmsr_alloc_io_queue()
738 acb->pCompletionQ = dma_coherent; in arcmsr_alloc_io_queue()
739 acb->completionQ_entry = acb->completeQ_size / sizeof(struct deliver_completeQ); in arcmsr_alloc_io_queue()
740 acb->doneq_index = 0; in arcmsr_alloc_io_queue()
741 arcmsr_hbaF_assign_regAddr(acb); in arcmsr_alloc_io_queue()
750 static int arcmsr_alloc_ccb_pool(struct AdapterControlBlock *acb) in arcmsr_alloc_ccb_pool() argument
752 struct pci_dev *pdev = acb->pdev; in arcmsr_alloc_ccb_pool()
765 acb->devstate[i][j] = ARECA_RAID_GONE; in arcmsr_alloc_ccb_pool()
769 firm_config_version = acb->firm_cfg_version; in arcmsr_alloc_ccb_pool()
774 acb->host->max_sectors = max_xfer_len/512; in arcmsr_alloc_ccb_pool()
775 acb->host->sg_tablesize = max_sg_entrys; in arcmsr_alloc_ccb_pool()
777 acb->uncache_size = roundup_ccbsize * acb->maxFreeCCB; in arcmsr_alloc_ccb_pool()
778 if (acb->adapter_type != ACB_ADAPTER_TYPE_F) in arcmsr_alloc_ccb_pool()
779 acb->uncache_size += acb->ioqueue_size; in arcmsr_alloc_ccb_pool()
780 dma_coherent = dma_alloc_coherent(&pdev->dev, acb->uncache_size, &dma_coherent_handle, GFP_KERNEL); in arcmsr_alloc_ccb_pool()
782 printk(KERN_NOTICE "arcmsr%d: dma_alloc_coherent got error\n", acb->host->host_no); in arcmsr_alloc_ccb_pool()
785 acb->dma_coherent = dma_coherent; in arcmsr_alloc_ccb_pool()
786 acb->dma_coherent_handle = dma_coherent_handle; in arcmsr_alloc_ccb_pool()
787 memset(dma_coherent, 0, acb->uncache_size); in arcmsr_alloc_ccb_pool()
788 acb->ccbsize = roundup_ccbsize; in arcmsr_alloc_ccb_pool()
791 acb->vir2phy_offset = (unsigned long)dma_coherent - (unsigned long)dma_coherent_handle; in arcmsr_alloc_ccb_pool()
792 for(i = 0; i < acb->maxFreeCCB; i++){ in arcmsr_alloc_ccb_pool()
794 switch (acb->adapter_type) { in arcmsr_alloc_ccb_pool()
806 acb->pccb_pool[i] = ccb_tmp; in arcmsr_alloc_ccb_pool()
807 ccb_tmp->acb = acb; in arcmsr_alloc_ccb_pool()
812 acb->maxFreeCCB = i; in arcmsr_alloc_ccb_pool()
813 acb->host->can_queue = i; in arcmsr_alloc_ccb_pool()
817 list_add_tail(&ccb_tmp->list, &acb->ccb_free_list); in arcmsr_alloc_ccb_pool()
821 if (acb->adapter_type != ACB_ADAPTER_TYPE_F) { in arcmsr_alloc_ccb_pool()
822 acb->dma_coherent_handle2 = dma_coherent_handle; in arcmsr_alloc_ccb_pool()
823 acb->dma_coherent2 = ccb_tmp; in arcmsr_alloc_ccb_pool()
825 switch (acb->adapter_type) { in arcmsr_alloc_ccb_pool()
827 acb->pmuB = (struct MessageUnit_B *)acb->dma_coherent2; in arcmsr_alloc_ccb_pool()
828 arcmsr_hbaB_assign_regAddr(acb); in arcmsr_alloc_ccb_pool()
831 acb->pmuD = (struct MessageUnit_D *)acb->dma_coherent2; in arcmsr_alloc_ccb_pool()
832 arcmsr_hbaD_assign_regAddr(acb); in arcmsr_alloc_ccb_pool()
835 acb->pCompletionQ = acb->dma_coherent2; in arcmsr_alloc_ccb_pool()
836 acb->completionQ_entry = acb->ioqueue_size / sizeof(struct deliver_completeQ); in arcmsr_alloc_ccb_pool()
837 acb->doneq_index = 0; in arcmsr_alloc_ccb_pool()
845 struct AdapterControlBlock *acb = container_of(work, in arcmsr_message_isr_bh_fn() local
847 char *acb_dev_map = (char *)acb->device_map; in arcmsr_message_isr_bh_fn()
854 switch (acb->adapter_type) { in arcmsr_message_isr_bh_fn()
856 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_message_isr_bh_fn()
863 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_message_isr_bh_fn()
870 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_message_isr_bh_fn()
877 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_message_isr_bh_fn()
884 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_message_isr_bh_fn()
891 signature = (uint32_t __iomem *)(&acb->msgcode_rwbuffer[0]); in arcmsr_message_isr_bh_fn()
892 devicemap = (char __iomem *)(&acb->msgcode_rwbuffer[21]); in arcmsr_message_isr_bh_fn()
908 scsi_add_device(acb->host, in arcmsr_message_isr_bh_fn()
912 psdev = scsi_device_lookup(acb->host, in arcmsr_message_isr_bh_fn()
926 acb->acb_flags &= ~ACB_F_MSG_GET_CONFIG; in arcmsr_message_isr_bh_fn()
930 arcmsr_request_irq(struct pci_dev *pdev, struct AdapterControlBlock *acb) in arcmsr_request_irq() argument
940 pr_info("arcmsr%d: msi-x enabled\n", acb->host->host_no); in arcmsr_request_irq()
958 acb->vector_count = nvec; in arcmsr_request_irq()
961 flags, "arcmsr", acb)) { in arcmsr_request_irq()
963 acb->host->host_no, pci_irq_vector(pdev, i)); in arcmsr_request_irq()
971 free_irq(pci_irq_vector(pdev, i), acb); in arcmsr_request_irq()
992 static int arcmsr_set_dma_mask(struct AdapterControlBlock *acb) in arcmsr_set_dma_mask() argument
994 struct pci_dev *pcidev = acb->pdev; in arcmsr_set_dma_mask()
997 if (((acb->adapter_type == ACB_ADAPTER_TYPE_A) && !dma_mask_64) || in arcmsr_set_dma_mask()
1020 struct AdapterControlBlock *acb; in arcmsr_probe() local
1034 acb = (struct AdapterControlBlock *) host->hostdata; in arcmsr_probe()
1035 memset(acb,0,sizeof(struct AdapterControlBlock)); in arcmsr_probe()
1036 acb->pdev = pdev; in arcmsr_probe()
1037 acb->adapter_type = id->driver_data; in arcmsr_probe()
1038 if (arcmsr_set_dma_mask(acb)) in arcmsr_probe()
1040 acb->host = host; in arcmsr_probe()
1058 spin_lock_init(&acb->eh_lock); in arcmsr_probe()
1059 spin_lock_init(&acb->ccblist_lock); in arcmsr_probe()
1060 spin_lock_init(&acb->postq_lock); in arcmsr_probe()
1061 spin_lock_init(&acb->doneq_lock); in arcmsr_probe()
1062 spin_lock_init(&acb->rqbuffer_lock); in arcmsr_probe()
1063 spin_lock_init(&acb->wqbuffer_lock); in arcmsr_probe()
1064 acb->acb_flags |= (ACB_F_MESSAGE_WQBUFFER_CLEARED | in arcmsr_probe()
1067 acb->acb_flags &= ~ACB_F_SCSISTOPADAPTER; in arcmsr_probe()
1068 INIT_LIST_HEAD(&acb->ccb_free_list); in arcmsr_probe()
1069 error = arcmsr_remap_pciregion(acb); in arcmsr_probe()
1073 error = arcmsr_alloc_io_queue(acb); in arcmsr_probe()
1076 error = arcmsr_get_firmware_spec(acb); in arcmsr_probe()
1080 if (acb->adapter_type != ACB_ADAPTER_TYPE_F) in arcmsr_probe()
1081 arcmsr_free_io_queue(acb); in arcmsr_probe()
1082 error = arcmsr_alloc_ccb_pool(acb); in arcmsr_probe()
1090 if (arcmsr_request_irq(pdev, acb) == FAILED) in arcmsr_probe()
1092 arcmsr_iop_init(acb); in arcmsr_probe()
1093 arcmsr_init_get_devmap_timer(acb); in arcmsr_probe()
1095 arcmsr_init_set_datetime_timer(acb); in arcmsr_probe()
1096 if(arcmsr_alloc_sysfs_attr(acb)) in arcmsr_probe()
1102 del_timer_sync(&acb->refresh_timer); in arcmsr_probe()
1103 del_timer_sync(&acb->eternal_timer); in arcmsr_probe()
1104 flush_work(&acb->arcmsr_do_message_isr_bh); in arcmsr_probe()
1105 arcmsr_stop_adapter_bgrb(acb); in arcmsr_probe()
1106 arcmsr_flush_adapter_cache(acb); in arcmsr_probe()
1107 arcmsr_free_irq(pdev, acb); in arcmsr_probe()
1111 arcmsr_free_ccb_pool(acb); in arcmsr_probe()
1114 arcmsr_free_io_queue(acb); in arcmsr_probe()
1116 arcmsr_unmap_pciregion(acb); in arcmsr_probe()
1127 struct AdapterControlBlock *acb) in arcmsr_free_irq() argument
1131 for (i = 0; i < acb->vector_count; i++) in arcmsr_free_irq()
1132 free_irq(pci_irq_vector(pdev, i), acb); in arcmsr_free_irq()
1140 struct AdapterControlBlock *acb = in arcmsr_suspend() local
1143 arcmsr_disable_outbound_ints(acb); in arcmsr_suspend()
1144 arcmsr_free_irq(pdev, acb); in arcmsr_suspend()
1145 del_timer_sync(&acb->eternal_timer); in arcmsr_suspend()
1147 del_timer_sync(&acb->refresh_timer); in arcmsr_suspend()
1148 flush_work(&acb->arcmsr_do_message_isr_bh); in arcmsr_suspend()
1149 arcmsr_stop_adapter_bgrb(acb); in arcmsr_suspend()
1150 arcmsr_flush_adapter_cache(acb); in arcmsr_suspend()
1158 struct AdapterControlBlock *acb = in arcmsr_resume() local
1161 if (arcmsr_set_dma_mask(acb)) in arcmsr_resume()
1163 if (arcmsr_request_irq(pdev, acb) == FAILED) in arcmsr_resume()
1165 switch (acb->adapter_type) { in arcmsr_resume()
1167 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_resume()
1178 writel(0, &acb->pmuE->host_int_status); in arcmsr_resume()
1179 writel(ARCMSR_HBEMU_DOORBELL_SYNC, &acb->pmuE->iobound_doorbell); in arcmsr_resume()
1180 acb->in_doorbell = 0; in arcmsr_resume()
1181 acb->out_doorbell = 0; in arcmsr_resume()
1182 acb->doneq_index = 0; in arcmsr_resume()
1185 writel(0, &acb->pmuF->host_int_status); in arcmsr_resume()
1186 writel(ARCMSR_HBFMU_DOORBELL_SYNC, &acb->pmuF->iobound_doorbell); in arcmsr_resume()
1187 acb->in_doorbell = 0; in arcmsr_resume()
1188 acb->out_doorbell = 0; in arcmsr_resume()
1189 acb->doneq_index = 0; in arcmsr_resume()
1190 arcmsr_hbaF_assign_regAddr(acb); in arcmsr_resume()
1193 arcmsr_iop_init(acb); in arcmsr_resume()
1194 arcmsr_init_get_devmap_timer(acb); in arcmsr_resume()
1196 arcmsr_init_set_datetime_timer(acb); in arcmsr_resume()
1199 arcmsr_stop_adapter_bgrb(acb); in arcmsr_resume()
1200 arcmsr_flush_adapter_cache(acb); in arcmsr_resume()
1203 arcmsr_free_ccb_pool(acb); in arcmsr_resume()
1204 if (acb->adapter_type == ACB_ADAPTER_TYPE_F) in arcmsr_resume()
1205 arcmsr_free_io_queue(acb); in arcmsr_resume()
1206 arcmsr_unmap_pciregion(acb); in arcmsr_resume()
1211 static uint8_t arcmsr_hbaA_abort_allcmd(struct AdapterControlBlock *acb) in arcmsr_hbaA_abort_allcmd() argument
1213 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_hbaA_abort_allcmd()
1215 if (!arcmsr_hbaA_wait_msgint_ready(acb)) { in arcmsr_hbaA_abort_allcmd()
1218 , acb->host->host_no); in arcmsr_hbaA_abort_allcmd()
1224 static uint8_t arcmsr_hbaB_abort_allcmd(struct AdapterControlBlock *acb) in arcmsr_hbaB_abort_allcmd() argument
1226 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_hbaB_abort_allcmd()
1229 if (!arcmsr_hbaB_wait_msgint_ready(acb)) { in arcmsr_hbaB_abort_allcmd()
1232 , acb->host->host_no); in arcmsr_hbaB_abort_allcmd()
1279 static uint8_t arcmsr_abort_allcmd(struct AdapterControlBlock *acb) in arcmsr_abort_allcmd() argument
1282 switch (acb->adapter_type) { in arcmsr_abort_allcmd()
1284 rtnval = arcmsr_hbaA_abort_allcmd(acb); in arcmsr_abort_allcmd()
1287 rtnval = arcmsr_hbaB_abort_allcmd(acb); in arcmsr_abort_allcmd()
1290 rtnval = arcmsr_hbaC_abort_allcmd(acb); in arcmsr_abort_allcmd()
1293 rtnval = arcmsr_hbaD_abort_allcmd(acb); in arcmsr_abort_allcmd()
1297 rtnval = arcmsr_hbaE_abort_allcmd(acb); in arcmsr_abort_allcmd()
1312 struct AdapterControlBlock *acb = ccb->acb; in arcmsr_ccb_complete() local
1315 atomic_dec(&acb->ccboutstandingcount); in arcmsr_ccb_complete()
1318 spin_lock_irqsave(&acb->ccblist_lock, flags); in arcmsr_ccb_complete()
1319 list_add_tail(&ccb->list, &acb->ccb_free_list); in arcmsr_ccb_complete()
1320 spin_unlock_irqrestore(&acb->ccblist_lock, flags); in arcmsr_ccb_complete()
1344 static u32 arcmsr_disable_outbound_ints(struct AdapterControlBlock *acb) in arcmsr_disable_outbound_ints() argument
1347 switch (acb->adapter_type) { in arcmsr_disable_outbound_ints()
1349 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_disable_outbound_ints()
1356 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_disable_outbound_ints()
1362 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_disable_outbound_ints()
1369 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_disable_outbound_ints()
1376 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_disable_outbound_ints()
1386 static void arcmsr_report_ccb_state(struct AdapterControlBlock *acb, in arcmsr_report_ccb_state() argument
1393 if (acb->devstate[id][lun] == ARECA_RAID_GONE) in arcmsr_report_ccb_state()
1394 acb->devstate[id][lun] = ARECA_RAID_GOOD; in arcmsr_report_ccb_state()
1400 acb->devstate[id][lun] = ARECA_RAID_GONE; in arcmsr_report_ccb_state()
1409 acb->devstate[id][lun] = ARECA_RAID_GONE; in arcmsr_report_ccb_state()
1416 acb->devstate[id][lun] = ARECA_RAID_GOOD; in arcmsr_report_ccb_state()
1426 , acb->host->host_no in arcmsr_report_ccb_state()
1430 acb->devstate[id][lun] = ARECA_RAID_GONE; in arcmsr_report_ccb_state()
1438 static void arcmsr_drain_donequeue(struct AdapterControlBlock *acb, struct CommandControlBlock *pCC… in arcmsr_drain_donequeue() argument
1440 if ((pCCB->acb != acb) || (pCCB->startdone != ARCMSR_CCB_START)) { in arcmsr_drain_donequeue()
1447 acb->host->host_no, pCCB); in arcmsr_drain_donequeue()
1455 , acb->host->host_no in arcmsr_drain_donequeue()
1456 , acb in arcmsr_drain_donequeue()
1458 , pCCB->acb in arcmsr_drain_donequeue()
1460 , atomic_read(&acb->ccboutstandingcount)); in arcmsr_drain_donequeue()
1463 arcmsr_report_ccb_state(acb, pCCB, error); in arcmsr_drain_donequeue()
1466 static void arcmsr_done4abort_postqueue(struct AdapterControlBlock *acb) in arcmsr_done4abort_postqueue() argument
1475 switch (acb->adapter_type) { in arcmsr_done4abort_postqueue()
1478 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_done4abort_postqueue()
1481 acb->outbound_int_enable; in arcmsr_done4abort_postqueue()
1485 && (i++ < acb->maxOutstanding)) { in arcmsr_done4abort_postqueue()
1487 if (acb->cdb_phyadd_hipart) in arcmsr_done4abort_postqueue()
1488 ccb_cdb_phy = ccb_cdb_phy | acb->cdb_phyadd_hipart; in arcmsr_done4abort_postqueue()
1489 pARCMSR_CDB = (struct ARCMSR_CDB *)(acb->vir2phy_offset + ccb_cdb_phy); in arcmsr_done4abort_postqueue()
1492 arcmsr_drain_donequeue(acb, pCCB, error); in arcmsr_done4abort_postqueue()
1498 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_done4abort_postqueue()
1506 if (acb->cdb_phyadd_hipart) in arcmsr_done4abort_postqueue()
1507 ccb_cdb_phy = ccb_cdb_phy | acb->cdb_phyadd_hipart; in arcmsr_done4abort_postqueue()
1508 pARCMSR_CDB = (struct ARCMSR_CDB *)(acb->vir2phy_offset + ccb_cdb_phy); in arcmsr_done4abort_postqueue()
1511 arcmsr_drain_donequeue(acb, pCCB, error); in arcmsr_done4abort_postqueue()
1520 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_done4abort_postqueue()
1521 …while ((readl(&reg->host_int_status) & ARCMSR_HBCMU_OUTBOUND_POSTQUEUE_ISR) && (i++ < acb->maxOuts… in arcmsr_done4abort_postqueue()
1525 if (acb->cdb_phyadd_hipart) in arcmsr_done4abort_postqueue()
1526 ccb_cdb_phy = ccb_cdb_phy | acb->cdb_phyadd_hipart; in arcmsr_done4abort_postqueue()
1527 pARCMSR_CDB = (struct ARCMSR_CDB *)(acb->vir2phy_offset + ccb_cdb_phy); in arcmsr_done4abort_postqueue()
1530 arcmsr_drain_donequeue(acb, pCCB, error); in arcmsr_done4abort_postqueue()
1535 struct MessageUnit_D *pmu = acb->pmuD; in arcmsr_done4abort_postqueue()
1540 residual = atomic_read(&acb->ccboutstandingcount); in arcmsr_done4abort_postqueue()
1542 spin_lock_irqsave(&acb->doneq_lock, flags); in arcmsr_done4abort_postqueue()
1554 spin_unlock_irqrestore(&acb->doneq_lock, flags); in arcmsr_done4abort_postqueue()
1558 if (acb->cdb_phyadd_hipart) in arcmsr_done4abort_postqueue()
1559 ccb_cdb_phy = ccb_cdb_phy | acb->cdb_phyadd_hipart; in arcmsr_done4abort_postqueue()
1561 (acb->vir2phy_offset + ccb_cdb_phy); in arcmsr_done4abort_postqueue()
1567 arcmsr_drain_donequeue(acb, pCCB, error); in arcmsr_done4abort_postqueue()
1571 spin_unlock_irqrestore(&acb->doneq_lock, flags); in arcmsr_done4abort_postqueue()
1580 arcmsr_hbaE_postqueue_isr(acb); in arcmsr_done4abort_postqueue()
1583 arcmsr_hbaF_postqueue_isr(acb); in arcmsr_done4abort_postqueue()
1588 static void arcmsr_remove_scsi_devices(struct AdapterControlBlock *acb) in arcmsr_remove_scsi_devices() argument
1590 char *acb_dev_map = (char *)acb->device_map; in arcmsr_remove_scsi_devices()
1596 for (i = 0; i < acb->maxFreeCCB; i++) { in arcmsr_remove_scsi_devices()
1597 ccb = acb->pccb_pool[i]; in arcmsr_remove_scsi_devices()
1609 psdev = scsi_device_lookup(acb->host, in arcmsr_remove_scsi_devices()
1624 static void arcmsr_free_pcidev(struct AdapterControlBlock *acb) in arcmsr_free_pcidev() argument
1629 host = acb->host; in arcmsr_free_pcidev()
1630 arcmsr_free_sysfs_attr(acb); in arcmsr_free_pcidev()
1632 flush_work(&acb->arcmsr_do_message_isr_bh); in arcmsr_free_pcidev()
1633 del_timer_sync(&acb->eternal_timer); in arcmsr_free_pcidev()
1635 del_timer_sync(&acb->refresh_timer); in arcmsr_free_pcidev()
1636 pdev = acb->pdev; in arcmsr_free_pcidev()
1637 arcmsr_free_irq(pdev, acb); in arcmsr_free_pcidev()
1638 arcmsr_free_ccb_pool(acb); in arcmsr_free_pcidev()
1639 if (acb->adapter_type == ACB_ADAPTER_TYPE_F) in arcmsr_free_pcidev()
1640 arcmsr_free_io_queue(acb); in arcmsr_free_pcidev()
1641 arcmsr_unmap_pciregion(acb); in arcmsr_free_pcidev()
1650 struct AdapterControlBlock *acb = in arcmsr_remove() local
1657 acb->acb_flags &= ~ACB_F_IOP_INITED; in arcmsr_remove()
1658 acb->acb_flags |= ACB_F_ADAPTER_REMOVED; in arcmsr_remove()
1659 arcmsr_remove_scsi_devices(acb); in arcmsr_remove()
1660 arcmsr_free_pcidev(acb); in arcmsr_remove()
1663 arcmsr_free_sysfs_attr(acb); in arcmsr_remove()
1665 flush_work(&acb->arcmsr_do_message_isr_bh); in arcmsr_remove()
1666 del_timer_sync(&acb->eternal_timer); in arcmsr_remove()
1668 del_timer_sync(&acb->refresh_timer); in arcmsr_remove()
1669 arcmsr_disable_outbound_ints(acb); in arcmsr_remove()
1670 arcmsr_stop_adapter_bgrb(acb); in arcmsr_remove()
1671 arcmsr_flush_adapter_cache(acb); in arcmsr_remove()
1672 acb->acb_flags |= ACB_F_SCSISTOPADAPTER; in arcmsr_remove()
1673 acb->acb_flags &= ~ACB_F_IOP_INITED; in arcmsr_remove()
1675 for (poll_count = 0; poll_count < acb->maxOutstanding; poll_count++){ in arcmsr_remove()
1676 if (!atomic_read(&acb->ccboutstandingcount)) in arcmsr_remove()
1678 arcmsr_interrupt(acb);/* FIXME: need spinlock */ in arcmsr_remove()
1682 if (atomic_read(&acb->ccboutstandingcount)) { in arcmsr_remove()
1685 arcmsr_abort_allcmd(acb); in arcmsr_remove()
1686 arcmsr_done4abort_postqueue(acb); in arcmsr_remove()
1687 for (i = 0; i < acb->maxFreeCCB; i++) { in arcmsr_remove()
1688 struct CommandControlBlock *ccb = acb->pccb_pool[i]; in arcmsr_remove()
1696 arcmsr_free_irq(pdev, acb); in arcmsr_remove()
1697 arcmsr_free_ccb_pool(acb); in arcmsr_remove()
1698 if (acb->adapter_type == ACB_ADAPTER_TYPE_F) in arcmsr_remove()
1699 arcmsr_free_io_queue(acb); in arcmsr_remove()
1700 arcmsr_unmap_pciregion(acb); in arcmsr_remove()
1709 struct AdapterControlBlock *acb = in arcmsr_shutdown() local
1711 if (acb->acb_flags & ACB_F_ADAPTER_REMOVED) in arcmsr_shutdown()
1713 del_timer_sync(&acb->eternal_timer); in arcmsr_shutdown()
1715 del_timer_sync(&acb->refresh_timer); in arcmsr_shutdown()
1716 arcmsr_disable_outbound_ints(acb); in arcmsr_shutdown()
1717 arcmsr_free_irq(pdev, acb); in arcmsr_shutdown()
1718 flush_work(&acb->arcmsr_do_message_isr_bh); in arcmsr_shutdown()
1719 arcmsr_stop_adapter_bgrb(acb); in arcmsr_shutdown()
1720 arcmsr_flush_adapter_cache(acb); in arcmsr_shutdown()
1737 static void arcmsr_enable_outbound_ints(struct AdapterControlBlock *acb, in arcmsr_enable_outbound_ints() argument
1741 switch (acb->adapter_type) { in arcmsr_enable_outbound_ints()
1744 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_enable_outbound_ints()
1749 acb->outbound_int_enable = ~(intmask_org & mask) & 0x000000ff; in arcmsr_enable_outbound_ints()
1754 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_enable_outbound_ints()
1760 acb->outbound_int_enable = (intmask_org | mask) & 0x0000000f; in arcmsr_enable_outbound_ints()
1764 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_enable_outbound_ints()
1767 acb->outbound_int_enable = ~(intmask_org & mask) & 0x0000000f; in arcmsr_enable_outbound_ints()
1771 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_enable_outbound_ints()
1779 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_enable_outbound_ints()
1788 static int arcmsr_build_ccb(struct AdapterControlBlock *acb, in arcmsr_build_ccb() argument
1808 if (unlikely(nseg > acb->host->sg_tablesize || nseg < 0)) in arcmsr_build_ccb()
1843 static void arcmsr_post_ccb(struct AdapterControlBlock *acb, struct CommandControlBlock *ccb) in arcmsr_post_ccb() argument
1847 atomic_inc(&acb->ccboutstandingcount); in arcmsr_post_ccb()
1849 switch (acb->adapter_type) { in arcmsr_post_ccb()
1851 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_post_ccb()
1862 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_post_ccb()
1880 struct MessageUnit_C __iomem *phbcmu = acb->pmuC; in arcmsr_post_ccb()
1890 struct MessageUnit_D *pmu = acb->pmuD; in arcmsr_post_ccb()
1896 spin_lock_irqsave(&acb->postq_lock, flags); in arcmsr_post_ccb()
1909 spin_unlock_irqrestore(&acb->postq_lock, flags); in arcmsr_post_ccb()
1913 struct MessageUnit_E __iomem *pmu = acb->pmuE; in arcmsr_post_ccb()
1923 struct MessageUnit_F __iomem *pmu = acb->pmuF; in arcmsr_post_ccb()
1942 static void arcmsr_hbaA_stop_bgrb(struct AdapterControlBlock *acb) in arcmsr_hbaA_stop_bgrb() argument
1944 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_hbaA_stop_bgrb()
1945 acb->acb_flags &= ~ACB_F_MSG_START_BGRB; in arcmsr_hbaA_stop_bgrb()
1947 if (!arcmsr_hbaA_wait_msgint_ready(acb)) { in arcmsr_hbaA_stop_bgrb()
1950 , acb->host->host_no); in arcmsr_hbaA_stop_bgrb()
1954 static void arcmsr_hbaB_stop_bgrb(struct AdapterControlBlock *acb) in arcmsr_hbaB_stop_bgrb() argument
1956 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_hbaB_stop_bgrb()
1957 acb->acb_flags &= ~ACB_F_MSG_START_BGRB; in arcmsr_hbaB_stop_bgrb()
1960 if (!arcmsr_hbaB_wait_msgint_ready(acb)) { in arcmsr_hbaB_stop_bgrb()
1963 , acb->host->host_no); in arcmsr_hbaB_stop_bgrb()
2006 static void arcmsr_stop_adapter_bgrb(struct AdapterControlBlock *acb) in arcmsr_stop_adapter_bgrb() argument
2008 switch (acb->adapter_type) { in arcmsr_stop_adapter_bgrb()
2010 arcmsr_hbaA_stop_bgrb(acb); in arcmsr_stop_adapter_bgrb()
2013 arcmsr_hbaB_stop_bgrb(acb); in arcmsr_stop_adapter_bgrb()
2016 arcmsr_hbaC_stop_bgrb(acb); in arcmsr_stop_adapter_bgrb()
2019 arcmsr_hbaD_stop_bgrb(acb); in arcmsr_stop_adapter_bgrb()
2023 arcmsr_hbaE_stop_bgrb(acb); in arcmsr_stop_adapter_bgrb()
2028 static void arcmsr_free_ccb_pool(struct AdapterControlBlock *acb) in arcmsr_free_ccb_pool() argument
2030 dma_free_coherent(&acb->pdev->dev, acb->uncache_size, acb->dma_coherent, acb->dma_coherent_handle); in arcmsr_free_ccb_pool()
2033 static void arcmsr_iop_message_read(struct AdapterControlBlock *acb) in arcmsr_iop_message_read() argument
2035 switch (acb->adapter_type) { in arcmsr_iop_message_read()
2037 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_iop_message_read()
2042 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_iop_message_read()
2047 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_iop_message_read()
2053 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_iop_message_read()
2060 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_iop_message_read()
2061 acb->out_doorbell ^= ARCMSR_HBEMU_DRV2IOP_DATA_READ_OK; in arcmsr_iop_message_read()
2062 writel(acb->out_doorbell, &reg->iobound_doorbell); in arcmsr_iop_message_read()
2068 static void arcmsr_iop_message_wrote(struct AdapterControlBlock *acb) in arcmsr_iop_message_wrote() argument
2070 switch (acb->adapter_type) { in arcmsr_iop_message_wrote()
2072 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_iop_message_wrote()
2082 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_iop_message_wrote()
2091 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_iop_message_wrote()
2100 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_iop_message_wrote()
2107 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_iop_message_wrote()
2108 acb->out_doorbell ^= ARCMSR_HBEMU_DRV2IOP_DATA_WRITE_OK; in arcmsr_iop_message_wrote()
2109 writel(acb->out_doorbell, &reg->iobound_doorbell); in arcmsr_iop_message_wrote()
2115 struct QBUFFER __iomem *arcmsr_get_iop_rqbuffer(struct AdapterControlBlock *acb) in arcmsr_get_iop_rqbuffer() argument
2118 switch (acb->adapter_type) { in arcmsr_get_iop_rqbuffer()
2121 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_get_iop_rqbuffer()
2126 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_get_iop_rqbuffer()
2131 struct MessageUnit_C __iomem *phbcmu = acb->pmuC; in arcmsr_get_iop_rqbuffer()
2136 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_get_iop_rqbuffer()
2141 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_get_iop_rqbuffer()
2146 qbuffer = (struct QBUFFER __iomem *)acb->message_rbuffer; in arcmsr_get_iop_rqbuffer()
2153 static struct QBUFFER __iomem *arcmsr_get_iop_wqbuffer(struct AdapterControlBlock *acb) in arcmsr_get_iop_wqbuffer() argument
2156 switch (acb->adapter_type) { in arcmsr_get_iop_wqbuffer()
2159 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_get_iop_wqbuffer()
2164 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_get_iop_wqbuffer()
2169 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_get_iop_wqbuffer()
2174 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_get_iop_wqbuffer()
2179 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_get_iop_wqbuffer()
2184 pqbuffer = (struct QBUFFER __iomem *)acb->message_wbuffer; in arcmsr_get_iop_wqbuffer()
2191 arcmsr_Read_iop_rqbuffer_in_DWORD(struct AdapterControlBlock *acb, in arcmsr_Read_iop_rqbuffer_in_DWORD() argument
2217 pQbuffer = &acb->rqbuffer[acb->rqbuf_putIndex]; in arcmsr_Read_iop_rqbuffer_in_DWORD()
2219 acb->rqbuf_putIndex++; in arcmsr_Read_iop_rqbuffer_in_DWORD()
2221 acb->rqbuf_putIndex %= ARCMSR_MAX_QBUFFER; in arcmsr_Read_iop_rqbuffer_in_DWORD()
2227 arcmsr_iop_message_read(acb); in arcmsr_Read_iop_rqbuffer_in_DWORD()
2232 arcmsr_Read_iop_rqbuffer_data(struct AdapterControlBlock *acb, in arcmsr_Read_iop_rqbuffer_data() argument
2239 if (acb->adapter_type > ACB_ADAPTER_TYPE_B) in arcmsr_Read_iop_rqbuffer_data()
2240 return arcmsr_Read_iop_rqbuffer_in_DWORD(acb, prbuffer); in arcmsr_Read_iop_rqbuffer_data()
2244 pQbuffer = &acb->rqbuffer[acb->rqbuf_putIndex]; in arcmsr_Read_iop_rqbuffer_data()
2246 acb->rqbuf_putIndex++; in arcmsr_Read_iop_rqbuffer_data()
2247 acb->rqbuf_putIndex %= ARCMSR_MAX_QBUFFER; in arcmsr_Read_iop_rqbuffer_data()
2251 arcmsr_iop_message_read(acb); in arcmsr_Read_iop_rqbuffer_data()
2255 static void arcmsr_iop2drv_data_wrote_handle(struct AdapterControlBlock *acb) in arcmsr_iop2drv_data_wrote_handle() argument
2261 spin_lock_irqsave(&acb->rqbuffer_lock, flags); in arcmsr_iop2drv_data_wrote_handle()
2262 prbuffer = arcmsr_get_iop_rqbuffer(acb); in arcmsr_iop2drv_data_wrote_handle()
2263 buf_empty_len = (acb->rqbuf_putIndex - acb->rqbuf_getIndex - 1) & in arcmsr_iop2drv_data_wrote_handle()
2266 if (arcmsr_Read_iop_rqbuffer_data(acb, prbuffer) == 0) in arcmsr_iop2drv_data_wrote_handle()
2267 acb->acb_flags |= ACB_F_IOPDATA_OVERFLOW; in arcmsr_iop2drv_data_wrote_handle()
2269 acb->acb_flags |= ACB_F_IOPDATA_OVERFLOW; in arcmsr_iop2drv_data_wrote_handle()
2270 spin_unlock_irqrestore(&acb->rqbuffer_lock, flags); in arcmsr_iop2drv_data_wrote_handle()
2273 static void arcmsr_write_ioctldata2iop_in_DWORD(struct AdapterControlBlock *acb) in arcmsr_write_ioctldata2iop_in_DWORD() argument
2281 if (acb->acb_flags & ACB_F_MESSAGE_WQBUFFER_READED) { in arcmsr_write_ioctldata2iop_in_DWORD()
2287 acb->acb_flags &= (~ACB_F_MESSAGE_WQBUFFER_READED); in arcmsr_write_ioctldata2iop_in_DWORD()
2288 pwbuffer = arcmsr_get_iop_wqbuffer(acb); in arcmsr_write_ioctldata2iop_in_DWORD()
2290 while ((acb->wqbuf_getIndex != acb->wqbuf_putIndex) in arcmsr_write_ioctldata2iop_in_DWORD()
2292 pQbuffer = &acb->wqbuffer[acb->wqbuf_getIndex]; in arcmsr_write_ioctldata2iop_in_DWORD()
2294 acb->wqbuf_getIndex++; in arcmsr_write_ioctldata2iop_in_DWORD()
2295 acb->wqbuf_getIndex %= ARCMSR_MAX_QBUFFER; in arcmsr_write_ioctldata2iop_in_DWORD()
2313 arcmsr_iop_message_wrote(acb); in arcmsr_write_ioctldata2iop_in_DWORD()
2318 arcmsr_write_ioctldata2iop(struct AdapterControlBlock *acb) in arcmsr_write_ioctldata2iop() argument
2325 if (acb->adapter_type > ACB_ADAPTER_TYPE_B) { in arcmsr_write_ioctldata2iop()
2326 arcmsr_write_ioctldata2iop_in_DWORD(acb); in arcmsr_write_ioctldata2iop()
2329 if (acb->acb_flags & ACB_F_MESSAGE_WQBUFFER_READED) { in arcmsr_write_ioctldata2iop()
2330 acb->acb_flags &= (~ACB_F_MESSAGE_WQBUFFER_READED); in arcmsr_write_ioctldata2iop()
2331 pwbuffer = arcmsr_get_iop_wqbuffer(acb); in arcmsr_write_ioctldata2iop()
2333 while ((acb->wqbuf_getIndex != acb->wqbuf_putIndex) in arcmsr_write_ioctldata2iop()
2335 pQbuffer = &acb->wqbuffer[acb->wqbuf_getIndex]; in arcmsr_write_ioctldata2iop()
2337 acb->wqbuf_getIndex++; in arcmsr_write_ioctldata2iop()
2338 acb->wqbuf_getIndex %= ARCMSR_MAX_QBUFFER; in arcmsr_write_ioctldata2iop()
2343 arcmsr_iop_message_wrote(acb); in arcmsr_write_ioctldata2iop()
2347 static void arcmsr_iop2drv_data_read_handle(struct AdapterControlBlock *acb) in arcmsr_iop2drv_data_read_handle() argument
2351 spin_lock_irqsave(&acb->wqbuffer_lock, flags); in arcmsr_iop2drv_data_read_handle()
2352 acb->acb_flags |= ACB_F_MESSAGE_WQBUFFER_READED; in arcmsr_iop2drv_data_read_handle()
2353 if (acb->wqbuf_getIndex != acb->wqbuf_putIndex) in arcmsr_iop2drv_data_read_handle()
2354 arcmsr_write_ioctldata2iop(acb); in arcmsr_iop2drv_data_read_handle()
2355 if (acb->wqbuf_getIndex == acb->wqbuf_putIndex) in arcmsr_iop2drv_data_read_handle()
2356 acb->acb_flags |= ACB_F_MESSAGE_WQBUFFER_CLEARED; in arcmsr_iop2drv_data_read_handle()
2357 spin_unlock_irqrestore(&acb->wqbuffer_lock, flags); in arcmsr_iop2drv_data_read_handle()
2360 static void arcmsr_hbaA_doorbell_isr(struct AdapterControlBlock *acb) in arcmsr_hbaA_doorbell_isr() argument
2363 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_hbaA_doorbell_isr()
2368 arcmsr_iop2drv_data_wrote_handle(acb); in arcmsr_hbaA_doorbell_isr()
2370 arcmsr_iop2drv_data_read_handle(acb); in arcmsr_hbaA_doorbell_isr()
2456 static void arcmsr_hbaA_postqueue_isr(struct AdapterControlBlock *acb) in arcmsr_hbaA_postqueue_isr() argument
2459 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_hbaA_postqueue_isr()
2467 if (acb->cdb_phyadd_hipart) in arcmsr_hbaA_postqueue_isr()
2468 cdb_phy_addr = cdb_phy_addr | acb->cdb_phyadd_hipart; in arcmsr_hbaA_postqueue_isr()
2469 pARCMSR_CDB = (struct ARCMSR_CDB *)(acb->vir2phy_offset + cdb_phy_addr); in arcmsr_hbaA_postqueue_isr()
2472 arcmsr_drain_donequeue(acb, pCCB, error); in arcmsr_hbaA_postqueue_isr()
2475 static void arcmsr_hbaB_postqueue_isr(struct AdapterControlBlock *acb) in arcmsr_hbaB_postqueue_isr() argument
2479 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_hbaB_postqueue_isr()
2488 if (acb->cdb_phyadd_hipart) in arcmsr_hbaB_postqueue_isr()
2489 cdb_phy_addr = cdb_phy_addr | acb->cdb_phyadd_hipart; in arcmsr_hbaB_postqueue_isr()
2490 pARCMSR_CDB = (struct ARCMSR_CDB *)(acb->vir2phy_offset + cdb_phy_addr); in arcmsr_hbaB_postqueue_isr()
2493 arcmsr_drain_donequeue(acb, pCCB, error); in arcmsr_hbaB_postqueue_isr()
2501 static void arcmsr_hbaC_postqueue_isr(struct AdapterControlBlock *acb) in arcmsr_hbaC_postqueue_isr() argument
2510 phbcmu = acb->pmuC; in arcmsr_hbaC_postqueue_isr()
2517 if (acb->cdb_phyadd_hipart) in arcmsr_hbaC_postqueue_isr()
2518 ccb_cdb_phy = ccb_cdb_phy | acb->cdb_phyadd_hipart; in arcmsr_hbaC_postqueue_isr()
2519 arcmsr_cdb = (struct ARCMSR_CDB *)(acb->vir2phy_offset in arcmsr_hbaC_postqueue_isr()
2526 arcmsr_drain_donequeue(acb, ccb, error); in arcmsr_hbaC_postqueue_isr()
2536 static void arcmsr_hbaD_postqueue_isr(struct AdapterControlBlock *acb) in arcmsr_hbaD_postqueue_isr() argument
2546 spin_lock_irqsave(&acb->doneq_lock, flags); in arcmsr_hbaD_postqueue_isr()
2547 pmu = acb->pmuD; in arcmsr_hbaD_postqueue_isr()
2561 if (acb->cdb_phyadd_hipart) in arcmsr_hbaD_postqueue_isr()
2562 ccb_cdb_phy = ccb_cdb_phy | acb->cdb_phyadd_hipart; in arcmsr_hbaD_postqueue_isr()
2563 arcmsr_cdb = (struct ARCMSR_CDB *)(acb->vir2phy_offset in arcmsr_hbaD_postqueue_isr()
2569 arcmsr_drain_donequeue(acb, ccb, error); in arcmsr_hbaD_postqueue_isr()
2577 spin_unlock_irqrestore(&acb->doneq_lock, flags); in arcmsr_hbaD_postqueue_isr()
2580 static void arcmsr_hbaE_postqueue_isr(struct AdapterControlBlock *acb) in arcmsr_hbaE_postqueue_isr() argument
2589 spin_lock_irqsave(&acb->doneq_lock, flags); in arcmsr_hbaE_postqueue_isr()
2590 doneq_index = acb->doneq_index; in arcmsr_hbaE_postqueue_isr()
2591 pmu = acb->pmuE; in arcmsr_hbaE_postqueue_isr()
2593 cmdSMID = acb->pCompletionQ[doneq_index].cmdSMID; in arcmsr_hbaE_postqueue_isr()
2594 ccb = acb->pccb_pool[cmdSMID]; in arcmsr_hbaE_postqueue_isr()
2595 error = (acb->pCompletionQ[doneq_index].cmdFlag in arcmsr_hbaE_postqueue_isr()
2597 arcmsr_drain_donequeue(acb, ccb, error); in arcmsr_hbaE_postqueue_isr()
2599 if (doneq_index >= acb->completionQ_entry) in arcmsr_hbaE_postqueue_isr()
2602 acb->doneq_index = doneq_index; in arcmsr_hbaE_postqueue_isr()
2604 spin_unlock_irqrestore(&acb->doneq_lock, flags); in arcmsr_hbaE_postqueue_isr()
2607 static void arcmsr_hbaF_postqueue_isr(struct AdapterControlBlock *acb) in arcmsr_hbaF_postqueue_isr() argument
2616 spin_lock_irqsave(&acb->doneq_lock, flags); in arcmsr_hbaF_postqueue_isr()
2617 doneq_index = acb->doneq_index; in arcmsr_hbaF_postqueue_isr()
2618 phbcmu = acb->pmuF; in arcmsr_hbaF_postqueue_isr()
2620 cmdSMID = acb->pCompletionQ[doneq_index].cmdSMID; in arcmsr_hbaF_postqueue_isr()
2623 ccb = acb->pccb_pool[cmdSMID]; in arcmsr_hbaF_postqueue_isr()
2624 error = (acb->pCompletionQ[doneq_index].cmdFlag & in arcmsr_hbaF_postqueue_isr()
2626 arcmsr_drain_donequeue(acb, ccb, error); in arcmsr_hbaF_postqueue_isr()
2627 acb->pCompletionQ[doneq_index].cmdSMID = 0xffff; in arcmsr_hbaF_postqueue_isr()
2629 if (doneq_index >= acb->completionQ_entry) in arcmsr_hbaF_postqueue_isr()
2632 acb->doneq_index = doneq_index; in arcmsr_hbaF_postqueue_isr()
2634 spin_unlock_irqrestore(&acb->doneq_lock, flags); in arcmsr_hbaF_postqueue_isr()
2645 static void arcmsr_hbaA_message_isr(struct AdapterControlBlock *acb) in arcmsr_hbaA_message_isr() argument
2647 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_hbaA_message_isr()
2650 if (acb->acb_flags & ACB_F_MSG_GET_CONFIG) in arcmsr_hbaA_message_isr()
2651 schedule_work(&acb->arcmsr_do_message_isr_bh); in arcmsr_hbaA_message_isr()
2653 static void arcmsr_hbaB_message_isr(struct AdapterControlBlock *acb) in arcmsr_hbaB_message_isr() argument
2655 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_hbaB_message_isr()
2659 if (acb->acb_flags & ACB_F_MSG_GET_CONFIG) in arcmsr_hbaB_message_isr()
2660 schedule_work(&acb->arcmsr_do_message_isr_bh); in arcmsr_hbaB_message_isr()
2671 static void arcmsr_hbaC_message_isr(struct AdapterControlBlock *acb) in arcmsr_hbaC_message_isr() argument
2673 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_hbaC_message_isr()
2676 if (acb->acb_flags & ACB_F_MSG_GET_CONFIG) in arcmsr_hbaC_message_isr()
2677 schedule_work(&acb->arcmsr_do_message_isr_bh); in arcmsr_hbaC_message_isr()
2680 static void arcmsr_hbaD_message_isr(struct AdapterControlBlock *acb) in arcmsr_hbaD_message_isr() argument
2682 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_hbaD_message_isr()
2686 if (acb->acb_flags & ACB_F_MSG_GET_CONFIG) in arcmsr_hbaD_message_isr()
2687 schedule_work(&acb->arcmsr_do_message_isr_bh); in arcmsr_hbaD_message_isr()
2690 static void arcmsr_hbaE_message_isr(struct AdapterControlBlock *acb) in arcmsr_hbaE_message_isr() argument
2692 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_hbaE_message_isr()
2695 if (acb->acb_flags & ACB_F_MSG_GET_CONFIG) in arcmsr_hbaE_message_isr()
2696 schedule_work(&acb->arcmsr_do_message_isr_bh); in arcmsr_hbaE_message_isr()
2699 static int arcmsr_hbaA_handle_isr(struct AdapterControlBlock *acb) in arcmsr_hbaA_handle_isr() argument
2702 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_hbaA_handle_isr()
2704 acb->outbound_int_enable; in arcmsr_hbaA_handle_isr()
2710 arcmsr_hbaA_doorbell_isr(acb); in arcmsr_hbaA_handle_isr()
2712 arcmsr_hbaA_postqueue_isr(acb); in arcmsr_hbaA_handle_isr()
2714 arcmsr_hbaA_message_isr(acb); in arcmsr_hbaA_handle_isr()
2716 acb->outbound_int_enable; in arcmsr_hbaA_handle_isr()
2723 static int arcmsr_hbaB_handle_isr(struct AdapterControlBlock *acb) in arcmsr_hbaB_handle_isr() argument
2726 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_hbaB_handle_isr()
2728 acb->outbound_int_enable; in arcmsr_hbaB_handle_isr()
2735 arcmsr_iop2drv_data_wrote_handle(acb); in arcmsr_hbaB_handle_isr()
2737 arcmsr_iop2drv_data_read_handle(acb); in arcmsr_hbaB_handle_isr()
2739 arcmsr_hbaB_postqueue_isr(acb); in arcmsr_hbaB_handle_isr()
2741 arcmsr_hbaB_message_isr(acb); in arcmsr_hbaB_handle_isr()
2743 acb->outbound_int_enable; in arcmsr_hbaB_handle_isr()
2852 static irqreturn_t arcmsr_interrupt(struct AdapterControlBlock *acb) in arcmsr_interrupt() argument
2854 switch (acb->adapter_type) { in arcmsr_interrupt()
2856 return arcmsr_hbaA_handle_isr(acb); in arcmsr_interrupt()
2858 return arcmsr_hbaB_handle_isr(acb); in arcmsr_interrupt()
2860 return arcmsr_hbaC_handle_isr(acb); in arcmsr_interrupt()
2862 return arcmsr_hbaD_handle_isr(acb); in arcmsr_interrupt()
2864 return arcmsr_hbaE_handle_isr(acb); in arcmsr_interrupt()
2866 return arcmsr_hbaF_handle_isr(acb); in arcmsr_interrupt()
2872 static void arcmsr_iop_parking(struct AdapterControlBlock *acb) in arcmsr_iop_parking() argument
2874 if (acb) { in arcmsr_iop_parking()
2876 if (acb->acb_flags & ACB_F_MSG_START_BGRB) { in arcmsr_iop_parking()
2878 acb->acb_flags &= ~ACB_F_MSG_START_BGRB; in arcmsr_iop_parking()
2879 intmask_org = arcmsr_disable_outbound_ints(acb); in arcmsr_iop_parking()
2880 arcmsr_stop_adapter_bgrb(acb); in arcmsr_iop_parking()
2881 arcmsr_flush_adapter_cache(acb); in arcmsr_iop_parking()
2882 arcmsr_enable_outbound_ints(acb, intmask_org); in arcmsr_iop_parking()
2888 void arcmsr_clear_iop2drv_rqueue_buffer(struct AdapterControlBlock *acb) in arcmsr_clear_iop2drv_rqueue_buffer() argument
2892 if (acb->acb_flags & ACB_F_IOPDATA_OVERFLOW) { in arcmsr_clear_iop2drv_rqueue_buffer()
2894 if (acb->acb_flags & ACB_F_IOPDATA_OVERFLOW) { in arcmsr_clear_iop2drv_rqueue_buffer()
2895 acb->acb_flags &= ~ACB_F_IOPDATA_OVERFLOW; in arcmsr_clear_iop2drv_rqueue_buffer()
2896 acb->rqbuf_getIndex = 0; in arcmsr_clear_iop2drv_rqueue_buffer()
2897 acb->rqbuf_putIndex = 0; in arcmsr_clear_iop2drv_rqueue_buffer()
2898 arcmsr_iop_message_read(acb); in arcmsr_clear_iop2drv_rqueue_buffer()
2900 } else if (acb->rqbuf_getIndex != in arcmsr_clear_iop2drv_rqueue_buffer()
2901 acb->rqbuf_putIndex) { in arcmsr_clear_iop2drv_rqueue_buffer()
2902 acb->rqbuf_getIndex = 0; in arcmsr_clear_iop2drv_rqueue_buffer()
2903 acb->rqbuf_putIndex = 0; in arcmsr_clear_iop2drv_rqueue_buffer()
2911 static int arcmsr_iop_message_xfer(struct AdapterControlBlock *acb, in arcmsr_iop_message_xfer() argument
2951 spin_lock_irqsave(&acb->rqbuffer_lock, flags); in arcmsr_iop_message_xfer()
2952 if (acb->rqbuf_getIndex != acb->rqbuf_putIndex) { in arcmsr_iop_message_xfer()
2953 unsigned int tail = acb->rqbuf_getIndex; in arcmsr_iop_message_xfer()
2954 unsigned int head = acb->rqbuf_putIndex; in arcmsr_iop_message_xfer()
2962 memcpy(ptmpQbuffer, acb->rqbuffer + tail, allxfer_len); in arcmsr_iop_message_xfer()
2964 memcpy(ptmpQbuffer, acb->rqbuffer + tail, cnt_to_end); in arcmsr_iop_message_xfer()
2965 memcpy(ptmpQbuffer + cnt_to_end, acb->rqbuffer, allxfer_len - cnt_to_end); in arcmsr_iop_message_xfer()
2967 acb->rqbuf_getIndex = (acb->rqbuf_getIndex + allxfer_len) % ARCMSR_MAX_QBUFFER; in arcmsr_iop_message_xfer()
2971 if (acb->acb_flags & ACB_F_IOPDATA_OVERFLOW) { in arcmsr_iop_message_xfer()
2973 acb->acb_flags &= ~ACB_F_IOPDATA_OVERFLOW; in arcmsr_iop_message_xfer()
2974 prbuffer = arcmsr_get_iop_rqbuffer(acb); in arcmsr_iop_message_xfer()
2975 if (arcmsr_Read_iop_rqbuffer_data(acb, prbuffer) == 0) in arcmsr_iop_message_xfer()
2976 acb->acb_flags |= ACB_F_IOPDATA_OVERFLOW; in arcmsr_iop_message_xfer()
2978 spin_unlock_irqrestore(&acb->rqbuffer_lock, flags); in arcmsr_iop_message_xfer()
2981 if (acb->fw_flag == FW_DEADLOCK) in arcmsr_iop_message_xfer()
3010 spin_lock_irqsave(&acb->wqbuffer_lock, flags); in arcmsr_iop_message_xfer()
3011 if (acb->wqbuf_putIndex != acb->wqbuf_getIndex) { in arcmsr_iop_message_xfer()
3014 arcmsr_write_ioctldata2iop(acb); in arcmsr_iop_message_xfer()
3023 pQbuffer = &acb->wqbuffer[acb->wqbuf_putIndex]; in arcmsr_iop_message_xfer()
3024 cnt2end = ARCMSR_MAX_QBUFFER - acb->wqbuf_putIndex; in arcmsr_iop_message_xfer()
3029 acb->wqbuf_putIndex = 0; in arcmsr_iop_message_xfer()
3030 pQbuffer = acb->wqbuffer; in arcmsr_iop_message_xfer()
3033 acb->wqbuf_putIndex += user_len; in arcmsr_iop_message_xfer()
3034 acb->wqbuf_putIndex %= ARCMSR_MAX_QBUFFER; in arcmsr_iop_message_xfer()
3035 if (acb->acb_flags & ACB_F_MESSAGE_WQBUFFER_CLEARED) { in arcmsr_iop_message_xfer()
3036 acb->acb_flags &= in arcmsr_iop_message_xfer()
3038 arcmsr_write_ioctldata2iop(acb); in arcmsr_iop_message_xfer()
3041 spin_unlock_irqrestore(&acb->wqbuffer_lock, flags); in arcmsr_iop_message_xfer()
3043 if (acb->fw_flag == FW_DEADLOCK) in arcmsr_iop_message_xfer()
3052 uint8_t *pQbuffer = acb->rqbuffer; in arcmsr_iop_message_xfer()
3054 arcmsr_clear_iop2drv_rqueue_buffer(acb); in arcmsr_iop_message_xfer()
3055 spin_lock_irqsave(&acb->rqbuffer_lock, flags); in arcmsr_iop_message_xfer()
3056 acb->acb_flags |= ACB_F_MESSAGE_RQBUFFER_CLEARED; in arcmsr_iop_message_xfer()
3057 acb->rqbuf_getIndex = 0; in arcmsr_iop_message_xfer()
3058 acb->rqbuf_putIndex = 0; in arcmsr_iop_message_xfer()
3060 spin_unlock_irqrestore(&acb->rqbuffer_lock, flags); in arcmsr_iop_message_xfer()
3061 if (acb->fw_flag == FW_DEADLOCK) in arcmsr_iop_message_xfer()
3070 uint8_t *pQbuffer = acb->wqbuffer; in arcmsr_iop_message_xfer()
3071 spin_lock_irqsave(&acb->wqbuffer_lock, flags); in arcmsr_iop_message_xfer()
3072 acb->acb_flags |= (ACB_F_MESSAGE_WQBUFFER_CLEARED | in arcmsr_iop_message_xfer()
3074 acb->wqbuf_getIndex = 0; in arcmsr_iop_message_xfer()
3075 acb->wqbuf_putIndex = 0; in arcmsr_iop_message_xfer()
3077 spin_unlock_irqrestore(&acb->wqbuffer_lock, flags); in arcmsr_iop_message_xfer()
3078 if (acb->fw_flag == FW_DEADLOCK) in arcmsr_iop_message_xfer()
3088 arcmsr_clear_iop2drv_rqueue_buffer(acb); in arcmsr_iop_message_xfer()
3089 spin_lock_irqsave(&acb->rqbuffer_lock, flags); in arcmsr_iop_message_xfer()
3090 acb->acb_flags |= ACB_F_MESSAGE_RQBUFFER_CLEARED; in arcmsr_iop_message_xfer()
3091 acb->rqbuf_getIndex = 0; in arcmsr_iop_message_xfer()
3092 acb->rqbuf_putIndex = 0; in arcmsr_iop_message_xfer()
3093 pQbuffer = acb->rqbuffer; in arcmsr_iop_message_xfer()
3095 spin_unlock_irqrestore(&acb->rqbuffer_lock, flags); in arcmsr_iop_message_xfer()
3096 spin_lock_irqsave(&acb->wqbuffer_lock, flags); in arcmsr_iop_message_xfer()
3097 acb->acb_flags |= (ACB_F_MESSAGE_WQBUFFER_CLEARED | in arcmsr_iop_message_xfer()
3099 acb->wqbuf_getIndex = 0; in arcmsr_iop_message_xfer()
3100 acb->wqbuf_putIndex = 0; in arcmsr_iop_message_xfer()
3101 pQbuffer = acb->wqbuffer; in arcmsr_iop_message_xfer()
3103 spin_unlock_irqrestore(&acb->wqbuffer_lock, flags); in arcmsr_iop_message_xfer()
3104 if (acb->fw_flag == FW_DEADLOCK) in arcmsr_iop_message_xfer()
3113 if (acb->fw_flag == FW_DEADLOCK) in arcmsr_iop_message_xfer()
3123 if (acb->fw_flag == FW_DEADLOCK) in arcmsr_iop_message_xfer()
3134 if (acb->fw_flag == FW_DEADLOCK) in arcmsr_iop_message_xfer()
3140 arcmsr_iop_parking(acb); in arcmsr_iop_message_xfer()
3144 if (acb->fw_flag == FW_DEADLOCK) in arcmsr_iop_message_xfer()
3150 arcmsr_flush_adapter_cache(acb); in arcmsr_iop_message_xfer()
3165 static struct CommandControlBlock *arcmsr_get_freeccb(struct AdapterControlBlock *acb) in arcmsr_get_freeccb() argument
3171 spin_lock_irqsave(&acb->ccblist_lock, flags); in arcmsr_get_freeccb()
3172 head = &acb->ccb_free_list; in arcmsr_get_freeccb()
3177 spin_unlock_irqrestore(&acb->ccblist_lock, flags); in arcmsr_get_freeccb()
3180 spin_unlock_irqrestore(&acb->ccblist_lock, flags); in arcmsr_get_freeccb()
3184 static void arcmsr_handle_virtual_command(struct AdapterControlBlock *acb, in arcmsr_handle_virtual_command() argument
3224 if (arcmsr_iop_message_xfer(acb, cmd)) in arcmsr_handle_virtual_command()
3237 struct AdapterControlBlock *acb = (struct AdapterControlBlock *) host->hostdata; in arcmsr_queue_command_lck() local
3241 if (acb->acb_flags & ACB_F_ADAPTER_REMOVED) { in arcmsr_queue_command_lck()
3250 arcmsr_handle_virtual_command(acb, cmd); in arcmsr_queue_command_lck()
3253 ccb = arcmsr_get_freeccb(acb); in arcmsr_queue_command_lck()
3256 if (arcmsr_build_ccb( acb, ccb, cmd ) == FAILED) { in arcmsr_queue_command_lck()
3261 arcmsr_post_ccb(acb, ccb); in arcmsr_queue_command_lck()
3320 static bool arcmsr_hbaA_get_config(struct AdapterControlBlock *acb) in arcmsr_hbaA_get_config() argument
3322 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_hbaA_get_config()
3324 arcmsr_wait_firmware_ready(acb); in arcmsr_hbaA_get_config()
3326 if (!arcmsr_hbaA_wait_msgint_ready(acb)) { in arcmsr_hbaA_get_config()
3328 miscellaneous data' timeout \n", acb->host->host_no); in arcmsr_hbaA_get_config()
3331 arcmsr_get_adapter_config(acb, reg->message_rwbuffer); in arcmsr_hbaA_get_config()
3334 static bool arcmsr_hbaB_get_config(struct AdapterControlBlock *acb) in arcmsr_hbaB_get_config() argument
3336 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_hbaB_get_config()
3338 arcmsr_wait_firmware_ready(acb); in arcmsr_hbaB_get_config()
3340 if (!arcmsr_hbaB_wait_msgint_ready(acb)) { in arcmsr_hbaB_get_config()
3341 printk(KERN_ERR "arcmsr%d: can't set driver mode.\n", acb->host->host_no); in arcmsr_hbaB_get_config()
3345 if (!arcmsr_hbaB_wait_msgint_ready(acb)) { in arcmsr_hbaB_get_config()
3347 miscellaneous data' timeout \n", acb->host->host_no); in arcmsr_hbaB_get_config()
3350 arcmsr_get_adapter_config(acb, reg->message_rwbuffer); in arcmsr_hbaB_get_config()
3377 static bool arcmsr_hbaD_get_config(struct AdapterControlBlock *acb) in arcmsr_hbaD_get_config() argument
3379 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_hbaD_get_config()
3381 if (readl(acb->pmuD->outbound_doorbell) & in arcmsr_hbaD_get_config()
3384 acb->pmuD->outbound_doorbell);/*clear interrupt*/ in arcmsr_hbaD_get_config()
3386 arcmsr_wait_firmware_ready(acb); in arcmsr_hbaD_get_config()
3390 if (!arcmsr_hbaD_wait_msgint_ready(acb)) { in arcmsr_hbaD_get_config()
3392 "miscellaneous data timeout\n", acb->host->host_no); in arcmsr_hbaD_get_config()
3395 arcmsr_get_adapter_config(acb, reg->msgcode_rwbuffer); in arcmsr_hbaD_get_config()
3450 static bool arcmsr_get_firmware_spec(struct AdapterControlBlock *acb) in arcmsr_get_firmware_spec() argument
3454 switch (acb->adapter_type) { in arcmsr_get_firmware_spec()
3456 rtn = arcmsr_hbaA_get_config(acb); in arcmsr_get_firmware_spec()
3459 rtn = arcmsr_hbaB_get_config(acb); in arcmsr_get_firmware_spec()
3462 rtn = arcmsr_hbaC_get_config(acb); in arcmsr_get_firmware_spec()
3465 rtn = arcmsr_hbaD_get_config(acb); in arcmsr_get_firmware_spec()
3468 rtn = arcmsr_hbaE_get_config(acb); in arcmsr_get_firmware_spec()
3471 rtn = arcmsr_hbaF_get_config(acb); in arcmsr_get_firmware_spec()
3476 acb->maxOutstanding = acb->firm_numbers_queue - 1; in arcmsr_get_firmware_spec()
3477 if (acb->host->can_queue >= acb->firm_numbers_queue) in arcmsr_get_firmware_spec()
3478 acb->host->can_queue = acb->maxOutstanding; in arcmsr_get_firmware_spec()
3480 acb->maxOutstanding = acb->host->can_queue; in arcmsr_get_firmware_spec()
3481 acb->maxFreeCCB = acb->host->can_queue; in arcmsr_get_firmware_spec()
3482 if (acb->maxFreeCCB < ARCMSR_MAX_FREECCB_NUM) in arcmsr_get_firmware_spec()
3483 acb->maxFreeCCB += 64; in arcmsr_get_firmware_spec()
3487 static int arcmsr_hbaA_polling_ccbdone(struct AdapterControlBlock *acb, in arcmsr_hbaA_polling_ccbdone() argument
3490 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_hbaA_polling_ccbdone()
3500 outbound_intstatus = readl(&reg->outbound_intstatus) & acb->outbound_int_enable; in arcmsr_hbaA_polling_ccbdone()
3517 if (acb->cdb_phyadd_hipart) in arcmsr_hbaA_polling_ccbdone()
3518 ccb_cdb_phy = ccb_cdb_phy | acb->cdb_phyadd_hipart; in arcmsr_hbaA_polling_ccbdone()
3519 arcmsr_cdb = (struct ARCMSR_CDB *)(acb->vir2phy_offset + ccb_cdb_phy); in arcmsr_hbaA_polling_ccbdone()
3522 if ((ccb->acb != acb) || (ccb->startdone != ARCMSR_CCB_START)) { in arcmsr_hbaA_polling_ccbdone()
3526 , acb->host->host_no in arcmsr_hbaA_polling_ccbdone()
3537 , acb->host->host_no in arcmsr_hbaA_polling_ccbdone()
3539 , atomic_read(&acb->ccboutstandingcount)); in arcmsr_hbaA_polling_ccbdone()
3543 arcmsr_report_ccb_state(acb, ccb, error); in arcmsr_hbaA_polling_ccbdone()
3548 static int arcmsr_hbaB_polling_ccbdone(struct AdapterControlBlock *acb, in arcmsr_hbaB_polling_ccbdone() argument
3551 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_hbaB_polling_ccbdone()
3586 if (acb->cdb_phyadd_hipart) in arcmsr_hbaB_polling_ccbdone()
3587 ccb_cdb_phy = ccb_cdb_phy | acb->cdb_phyadd_hipart; in arcmsr_hbaB_polling_ccbdone()
3588 arcmsr_cdb = (struct ARCMSR_CDB *)(acb->vir2phy_offset + ccb_cdb_phy); in arcmsr_hbaB_polling_ccbdone()
3591 if ((ccb->acb != acb) || (ccb->startdone != ARCMSR_CCB_START)) { in arcmsr_hbaB_polling_ccbdone()
3595 ,acb->host->host_no in arcmsr_hbaB_polling_ccbdone()
3606 , acb->host->host_no in arcmsr_hbaB_polling_ccbdone()
3608 , atomic_read(&acb->ccboutstandingcount)); in arcmsr_hbaB_polling_ccbdone()
3612 arcmsr_report_ccb_state(acb, ccb, error); in arcmsr_hbaB_polling_ccbdone()
3617 static int arcmsr_hbaC_polling_ccbdone(struct AdapterControlBlock *acb, in arcmsr_hbaC_polling_ccbdone() argument
3620 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_hbaC_polling_ccbdone()
3647 if (acb->cdb_phyadd_hipart) in arcmsr_hbaC_polling_ccbdone()
3648 ccb_cdb_phy = ccb_cdb_phy | acb->cdb_phyadd_hipart; in arcmsr_hbaC_polling_ccbdone()
3649 arcmsr_cdb = (struct ARCMSR_CDB *)(acb->vir2phy_offset + ccb_cdb_phy); in arcmsr_hbaC_polling_ccbdone()
3653 if ((pCCB->acb != acb) || (pCCB->startdone != ARCMSR_CCB_START)) { in arcmsr_hbaC_polling_ccbdone()
3657 , acb->host->host_no in arcmsr_hbaC_polling_ccbdone()
3668 , acb->host->host_no in arcmsr_hbaC_polling_ccbdone()
3670 , atomic_read(&acb->ccboutstandingcount)); in arcmsr_hbaC_polling_ccbdone()
3674 arcmsr_report_ccb_state(acb, pCCB, error); in arcmsr_hbaC_polling_ccbdone()
3679 static int arcmsr_hbaD_polling_ccbdone(struct AdapterControlBlock *acb, in arcmsr_hbaD_polling_ccbdone() argument
3688 struct MessageUnit_D *pmu = acb->pmuD; in arcmsr_hbaD_polling_ccbdone()
3693 spin_lock_irqsave(&acb->doneq_lock, flags); in arcmsr_hbaD_polling_ccbdone()
3697 spin_unlock_irqrestore(&acb->doneq_lock, flags); in arcmsr_hbaD_polling_ccbdone()
3716 spin_unlock_irqrestore(&acb->doneq_lock, flags); in arcmsr_hbaD_polling_ccbdone()
3719 if (acb->cdb_phyadd_hipart) in arcmsr_hbaD_polling_ccbdone()
3720 ccb_cdb_phy = ccb_cdb_phy | acb->cdb_phyadd_hipart; in arcmsr_hbaD_polling_ccbdone()
3721 arcmsr_cdb = (struct ARCMSR_CDB *)(acb->vir2phy_offset + in arcmsr_hbaD_polling_ccbdone()
3726 if ((pCCB->acb != acb) || in arcmsr_hbaD_polling_ccbdone()
3732 , acb->host->host_no in arcmsr_hbaD_polling_ccbdone()
3743 , acb->host->host_no in arcmsr_hbaD_polling_ccbdone()
3745 , atomic_read(&acb->ccboutstandingcount)); in arcmsr_hbaD_polling_ccbdone()
3750 arcmsr_report_ccb_state(acb, pCCB, error); in arcmsr_hbaD_polling_ccbdone()
3755 static int arcmsr_hbaE_polling_ccbdone(struct AdapterControlBlock *acb, in arcmsr_hbaE_polling_ccbdone() argument
3764 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_hbaE_polling_ccbdone()
3769 spin_lock_irqsave(&acb->doneq_lock, flags); in arcmsr_hbaE_polling_ccbdone()
3770 doneq_index = acb->doneq_index; in arcmsr_hbaE_polling_ccbdone()
3773 spin_unlock_irqrestore(&acb->doneq_lock, flags); in arcmsr_hbaE_polling_ccbdone()
3786 cmdSMID = acb->pCompletionQ[doneq_index].cmdSMID; in arcmsr_hbaE_polling_ccbdone()
3788 if (doneq_index >= acb->completionQ_entry) in arcmsr_hbaE_polling_ccbdone()
3790 acb->doneq_index = doneq_index; in arcmsr_hbaE_polling_ccbdone()
3791 spin_unlock_irqrestore(&acb->doneq_lock, flags); in arcmsr_hbaE_polling_ccbdone()
3792 pCCB = acb->pccb_pool[cmdSMID]; in arcmsr_hbaE_polling_ccbdone()
3795 if ((pCCB->acb != acb) || (pCCB->startdone != ARCMSR_CCB_START)) { in arcmsr_hbaE_polling_ccbdone()
3800 , acb->host->host_no in arcmsr_hbaE_polling_ccbdone()
3811 , acb->host->host_no in arcmsr_hbaE_polling_ccbdone()
3813 , atomic_read(&acb->ccboutstandingcount)); in arcmsr_hbaE_polling_ccbdone()
3816 error = (acb->pCompletionQ[doneq_index].cmdFlag & in arcmsr_hbaE_polling_ccbdone()
3818 arcmsr_report_ccb_state(acb, pCCB, error); in arcmsr_hbaE_polling_ccbdone()
3824 static int arcmsr_polling_ccbdone(struct AdapterControlBlock *acb, in arcmsr_polling_ccbdone() argument
3828 switch (acb->adapter_type) { in arcmsr_polling_ccbdone()
3831 rtn = arcmsr_hbaA_polling_ccbdone(acb, poll_ccb); in arcmsr_polling_ccbdone()
3834 rtn = arcmsr_hbaB_polling_ccbdone(acb, poll_ccb); in arcmsr_polling_ccbdone()
3837 rtn = arcmsr_hbaC_polling_ccbdone(acb, poll_ccb); in arcmsr_polling_ccbdone()
3840 rtn = arcmsr_hbaD_polling_ccbdone(acb, poll_ccb); in arcmsr_polling_ccbdone()
3844 rtn = arcmsr_hbaE_polling_ccbdone(acb, poll_ccb); in arcmsr_polling_ccbdone()
3942 static int arcmsr_iop_confirm(struct AdapterControlBlock *acb) in arcmsr_iop_confirm() argument
3953 switch (acb->adapter_type) { in arcmsr_iop_confirm()
3956 dma_coherent_handle = acb->dma_coherent_handle2; in arcmsr_iop_confirm()
3960 dma_coherent_handle = acb->dma_coherent_handle + in arcmsr_iop_confirm()
3964 dma_coherent_handle = acb->dma_coherent_handle; in arcmsr_iop_confirm()
3969 acb->cdb_phyaddr_hi32 = cdb_phyaddr_hi32; in arcmsr_iop_confirm()
3970 acb->cdb_phyadd_hipart = ((uint64_t)cdb_phyaddr_hi32) << 32; in arcmsr_iop_confirm()
3976 switch (acb->adapter_type) { in arcmsr_iop_confirm()
3980 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_iop_confirm()
3986 if (!arcmsr_hbaA_wait_msgint_ready(acb)) { in arcmsr_iop_confirm()
3989 acb->host->host_no); in arcmsr_iop_confirm()
3999 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_iop_confirm()
4003 if (!arcmsr_hbaB_wait_msgint_ready(acb)) { in arcmsr_iop_confirm()
4005 acb->host->host_no); in arcmsr_iop_confirm()
4021 if (!arcmsr_hbaB_wait_msgint_ready(acb)) { in arcmsr_iop_confirm()
4023 timeout \n",acb->host->host_no); in arcmsr_iop_confirm()
4027 if (!arcmsr_hbaB_wait_msgint_ready(acb)) { in arcmsr_iop_confirm()
4029 acb->host->host_no); in arcmsr_iop_confirm()
4035 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_iop_confirm()
4038 acb->adapter_index, cdb_phyaddr_hi32); in arcmsr_iop_confirm()
4043 if (!arcmsr_hbaC_wait_msgint_ready(acb)) { in arcmsr_iop_confirm()
4045 timeout \n", acb->host->host_no); in arcmsr_iop_confirm()
4052 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_iop_confirm()
4063 if (!arcmsr_hbaD_wait_msgint_ready(acb)) { in arcmsr_iop_confirm()
4065 acb->host->host_no); in arcmsr_iop_confirm()
4071 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_iop_confirm()
4076 writel(acb->ccbsize, &reg->msgcode_rwbuffer[4]); in arcmsr_iop_confirm()
4077 writel(lower_32_bits(acb->dma_coherent_handle2), &reg->msgcode_rwbuffer[5]); in arcmsr_iop_confirm()
4078 writel(upper_32_bits(acb->dma_coherent_handle2), &reg->msgcode_rwbuffer[6]); in arcmsr_iop_confirm()
4079 writel(acb->ioqueue_size, &reg->msgcode_rwbuffer[7]); in arcmsr_iop_confirm()
4081 acb->out_doorbell ^= ARCMSR_HBEMU_DRV2IOP_MESSAGE_CMD_DONE; in arcmsr_iop_confirm()
4082 writel(acb->out_doorbell, &reg->iobound_doorbell); in arcmsr_iop_confirm()
4083 if (!arcmsr_hbaE_wait_msgint_ready(acb)) { in arcmsr_iop_confirm()
4085 acb->host->host_no); in arcmsr_iop_confirm()
4091 struct MessageUnit_F __iomem *reg = acb->pmuF; in arcmsr_iop_confirm()
4093 acb->msgcode_rwbuffer[0] = ARCMSR_SIGNATURE_SET_CONFIG; in arcmsr_iop_confirm()
4094 acb->msgcode_rwbuffer[1] = ARCMSR_SIGNATURE_1886; in arcmsr_iop_confirm()
4095 acb->msgcode_rwbuffer[2] = cdb_phyaddr; in arcmsr_iop_confirm()
4096 acb->msgcode_rwbuffer[3] = cdb_phyaddr_hi32; in arcmsr_iop_confirm()
4097 acb->msgcode_rwbuffer[4] = acb->ccbsize; in arcmsr_iop_confirm()
4098 acb->msgcode_rwbuffer[5] = lower_32_bits(acb->dma_coherent_handle2); in arcmsr_iop_confirm()
4099 acb->msgcode_rwbuffer[6] = upper_32_bits(acb->dma_coherent_handle2); in arcmsr_iop_confirm()
4100 acb->msgcode_rwbuffer[7] = acb->completeQ_size; in arcmsr_iop_confirm()
4102 acb->out_doorbell ^= ARCMSR_HBEMU_DRV2IOP_MESSAGE_CMD_DONE; in arcmsr_iop_confirm()
4103 writel(acb->out_doorbell, &reg->iobound_doorbell); in arcmsr_iop_confirm()
4104 if (!arcmsr_hbaE_wait_msgint_ready(acb)) { in arcmsr_iop_confirm()
4106 acb->host->host_no); in arcmsr_iop_confirm()
4115 static void arcmsr_wait_firmware_ready(struct AdapterControlBlock *acb) in arcmsr_wait_firmware_ready() argument
4118 switch (acb->adapter_type) { in arcmsr_wait_firmware_ready()
4121 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_wait_firmware_ready()
4123 if (!(acb->acb_flags & ACB_F_IOP_INITED)) in arcmsr_wait_firmware_ready()
4131 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_wait_firmware_ready()
4133 if (!(acb->acb_flags & ACB_F_IOP_INITED)) in arcmsr_wait_firmware_ready()
4141 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_wait_firmware_ready()
4143 if (!(acb->acb_flags & ACB_F_IOP_INITED)) in arcmsr_wait_firmware_ready()
4150 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_wait_firmware_ready()
4152 if (!(acb->acb_flags & ACB_F_IOP_INITED)) in arcmsr_wait_firmware_ready()
4161 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_wait_firmware_ready()
4163 if (!(acb->acb_flags & ACB_F_IOP_INITED)) in arcmsr_wait_firmware_ready()
4174 struct AdapterControlBlock *acb = from_timer(acb, t, eternal_timer); in arcmsr_request_device_map() local
4175 if (acb->acb_flags & (ACB_F_MSG_GET_CONFIG | ACB_F_BUS_RESET | ACB_F_ABORT)) { in arcmsr_request_device_map()
4176 mod_timer(&acb->eternal_timer, jiffies + msecs_to_jiffies(6 * HZ)); in arcmsr_request_device_map()
4178 acb->fw_flag = FW_NORMAL; in arcmsr_request_device_map()
4179 switch (acb->adapter_type) { in arcmsr_request_device_map()
4181 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_request_device_map()
4186 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_request_device_map()
4191 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_request_device_map()
4197 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_request_device_map()
4202 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_request_device_map()
4204 acb->out_doorbell ^= ARCMSR_HBEMU_DRV2IOP_MESSAGE_CMD_DONE; in arcmsr_request_device_map()
4205 writel(acb->out_doorbell, &reg->iobound_doorbell); in arcmsr_request_device_map()
4209 struct MessageUnit_F __iomem *reg = acb->pmuF; in arcmsr_request_device_map()
4216 acb->out_doorbell ^= ARCMSR_HBEMU_DRV2IOP_MESSAGE_CMD_DONE; in arcmsr_request_device_map()
4217 writel(acb->out_doorbell, &reg->iobound_doorbell); in arcmsr_request_device_map()
4223 acb->acb_flags |= ACB_F_MSG_GET_CONFIG; in arcmsr_request_device_map()
4225 mod_timer(&acb->eternal_timer, jiffies + msecs_to_jiffies(6 * HZ)); in arcmsr_request_device_map()
4229 static void arcmsr_hbaA_start_bgrb(struct AdapterControlBlock *acb) in arcmsr_hbaA_start_bgrb() argument
4231 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_hbaA_start_bgrb()
4232 acb->acb_flags |= ACB_F_MSG_START_BGRB; in arcmsr_hbaA_start_bgrb()
4234 if (!arcmsr_hbaA_wait_msgint_ready(acb)) { in arcmsr_hbaA_start_bgrb()
4236 rebuild' timeout \n", acb->host->host_no); in arcmsr_hbaA_start_bgrb()
4240 static void arcmsr_hbaB_start_bgrb(struct AdapterControlBlock *acb) in arcmsr_hbaB_start_bgrb() argument
4242 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_hbaB_start_bgrb()
4243 acb->acb_flags |= ACB_F_MSG_START_BGRB; in arcmsr_hbaB_start_bgrb()
4245 if (!arcmsr_hbaB_wait_msgint_ready(acb)) { in arcmsr_hbaB_start_bgrb()
4247 rebuild' timeout \n",acb->host->host_no); in arcmsr_hbaB_start_bgrb()
4290 static void arcmsr_start_adapter_bgrb(struct AdapterControlBlock *acb) in arcmsr_start_adapter_bgrb() argument
4292 switch (acb->adapter_type) { in arcmsr_start_adapter_bgrb()
4294 arcmsr_hbaA_start_bgrb(acb); in arcmsr_start_adapter_bgrb()
4297 arcmsr_hbaB_start_bgrb(acb); in arcmsr_start_adapter_bgrb()
4300 arcmsr_hbaC_start_bgrb(acb); in arcmsr_start_adapter_bgrb()
4303 arcmsr_hbaD_start_bgrb(acb); in arcmsr_start_adapter_bgrb()
4307 arcmsr_hbaE_start_bgrb(acb); in arcmsr_start_adapter_bgrb()
4312 static void arcmsr_clear_doorbell_queue_buffer(struct AdapterControlBlock *acb) in arcmsr_clear_doorbell_queue_buffer() argument
4314 switch (acb->adapter_type) { in arcmsr_clear_doorbell_queue_buffer()
4316 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_clear_doorbell_queue_buffer()
4327 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_clear_doorbell_queue_buffer()
4344 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_clear_doorbell_queue_buffer()
4365 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_clear_doorbell_queue_buffer()
4388 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_clear_doorbell_queue_buffer()
4391 acb->in_doorbell = readl(&reg->iobound_doorbell); in arcmsr_clear_doorbell_queue_buffer()
4393 acb->out_doorbell ^= ARCMSR_HBEMU_DRV2IOP_DATA_READ_OK; in arcmsr_clear_doorbell_queue_buffer()
4394 writel(acb->out_doorbell, &reg->iobound_doorbell); in arcmsr_clear_doorbell_queue_buffer()
4397 tmp = acb->in_doorbell; in arcmsr_clear_doorbell_queue_buffer()
4398 acb->in_doorbell = readl(&reg->iobound_doorbell); in arcmsr_clear_doorbell_queue_buffer()
4399 if((tmp ^ acb->in_doorbell) & ARCMSR_HBEMU_IOP2DRV_DATA_WRITE_OK) { in arcmsr_clear_doorbell_queue_buffer()
4401 acb->out_doorbell ^= ARCMSR_HBEMU_DRV2IOP_DATA_READ_OK; in arcmsr_clear_doorbell_queue_buffer()
4402 writel(acb->out_doorbell, &reg->iobound_doorbell); in arcmsr_clear_doorbell_queue_buffer()
4411 static void arcmsr_enable_eoi_mode(struct AdapterControlBlock *acb) in arcmsr_enable_eoi_mode() argument
4413 switch (acb->adapter_type) { in arcmsr_enable_eoi_mode()
4418 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_enable_eoi_mode()
4420 if (!arcmsr_hbaB_wait_msgint_ready(acb)) { in arcmsr_enable_eoi_mode()
4432 static void arcmsr_hardware_reset(struct AdapterControlBlock *acb) in arcmsr_hardware_reset() argument
4436 struct MessageUnit_A __iomem *pmuA = acb->pmuA; in arcmsr_hardware_reset()
4437 struct MessageUnit_C __iomem *pmuC = acb->pmuC; in arcmsr_hardware_reset()
4438 struct MessageUnit_D *pmuD = acb->pmuD; in arcmsr_hardware_reset()
4441 printk(KERN_NOTICE "arcmsr%d: executing hw bus reset .....\n", acb->host->host_no); in arcmsr_hardware_reset()
4443 pci_read_config_byte(acb->pdev, i, &value[i]); in arcmsr_hardware_reset()
4446 if (acb->dev_id == 0x1680) { in arcmsr_hardware_reset()
4448 } else if (acb->dev_id == 0x1880) { in arcmsr_hardware_reset()
4459 } else if (acb->dev_id == 0x1884) { in arcmsr_hardware_reset()
4460 struct MessageUnit_E __iomem *pmuE = acb->pmuE; in arcmsr_hardware_reset()
4472 } else if (acb->dev_id == 0x1214) { in arcmsr_hardware_reset()
4475 pci_write_config_byte(acb->pdev, 0x84, 0x20); in arcmsr_hardware_reset()
4480 pci_write_config_byte(acb->pdev, i, value[i]); in arcmsr_hardware_reset()
4486 static bool arcmsr_reset_in_progress(struct AdapterControlBlock *acb) in arcmsr_reset_in_progress() argument
4490 switch(acb->adapter_type) { in arcmsr_reset_in_progress()
4492 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_reset_in_progress()
4498 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_reset_in_progress()
4504 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_reset_in_progress()
4509 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_reset_in_progress()
4516 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_reset_in_progress()
4525 static void arcmsr_iop_init(struct AdapterControlBlock *acb) in arcmsr_iop_init() argument
4529 intmask_org = arcmsr_disable_outbound_ints(acb); in arcmsr_iop_init()
4530 arcmsr_wait_firmware_ready(acb); in arcmsr_iop_init()
4531 arcmsr_iop_confirm(acb); in arcmsr_iop_init()
4533 arcmsr_start_adapter_bgrb(acb); in arcmsr_iop_init()
4535 arcmsr_clear_doorbell_queue_buffer(acb); in arcmsr_iop_init()
4536 arcmsr_enable_eoi_mode(acb); in arcmsr_iop_init()
4538 arcmsr_enable_outbound_ints(acb, intmask_org); in arcmsr_iop_init()
4539 acb->acb_flags |= ACB_F_IOP_INITED; in arcmsr_iop_init()
4542 static uint8_t arcmsr_iop_reset(struct AdapterControlBlock *acb) in arcmsr_iop_reset() argument
4550 if (atomic_read(&acb->ccboutstandingcount) != 0) { in arcmsr_iop_reset()
4552 intmask_org = arcmsr_disable_outbound_ints(acb); in arcmsr_iop_reset()
4554 rtnval = arcmsr_abort_allcmd(acb); in arcmsr_iop_reset()
4556 arcmsr_done4abort_postqueue(acb); in arcmsr_iop_reset()
4557 for (i = 0; i < acb->maxFreeCCB; i++) { in arcmsr_iop_reset()
4558 ccb = acb->pccb_pool[i]; in arcmsr_iop_reset()
4563 spin_lock_irqsave(&acb->ccblist_lock, flags); in arcmsr_iop_reset()
4564 list_add_tail(&ccb->list, &acb->ccb_free_list); in arcmsr_iop_reset()
4565 spin_unlock_irqrestore(&acb->ccblist_lock, flags); in arcmsr_iop_reset()
4568 atomic_set(&acb->ccboutstandingcount, 0); in arcmsr_iop_reset()
4570 arcmsr_enable_outbound_ints(acb, intmask_org); in arcmsr_iop_reset()
4578 struct AdapterControlBlock *acb; in arcmsr_bus_reset() local
4581 acb = (struct AdapterControlBlock *) cmd->device->host->hostdata; in arcmsr_bus_reset()
4582 if (acb->acb_flags & ACB_F_ADAPTER_REMOVED) in arcmsr_bus_reset()
4585 " num_aborts = %d \n", acb->num_resets, acb->num_aborts); in arcmsr_bus_reset()
4586 acb->num_resets++; in arcmsr_bus_reset()
4588 if (acb->acb_flags & ACB_F_BUS_RESET) { in arcmsr_bus_reset()
4591 timeout = wait_event_timeout(wait_q, (acb->acb_flags in arcmsr_bus_reset()
4596 acb->acb_flags |= ACB_F_BUS_RESET; in arcmsr_bus_reset()
4597 if (!arcmsr_iop_reset(acb)) { in arcmsr_bus_reset()
4598 arcmsr_hardware_reset(acb); in arcmsr_bus_reset()
4599 acb->acb_flags &= ~ACB_F_IOP_INITED; in arcmsr_bus_reset()
4602 if (arcmsr_reset_in_progress(acb)) { in arcmsr_bus_reset()
4604 acb->fw_flag = FW_DEADLOCK; in arcmsr_bus_reset()
4607 acb->host->host_no); in arcmsr_bus_reset()
4613 arcmsr_iop_init(acb); in arcmsr_bus_reset()
4614 acb->fw_flag = FW_NORMAL; in arcmsr_bus_reset()
4615 mod_timer(&acb->eternal_timer, jiffies + in arcmsr_bus_reset()
4617 acb->acb_flags &= ~ACB_F_BUS_RESET; in arcmsr_bus_reset()
4621 acb->acb_flags &= ~ACB_F_BUS_RESET; in arcmsr_bus_reset()
4622 acb->fw_flag = FW_NORMAL; in arcmsr_bus_reset()
4623 mod_timer(&acb->eternal_timer, jiffies + in arcmsr_bus_reset()
4630 static int arcmsr_abort_one_cmd(struct AdapterControlBlock *acb, in arcmsr_abort_one_cmd() argument
4634 rtn = arcmsr_polling_ccbdone(acb, ccb); in arcmsr_abort_one_cmd()
4640 struct AdapterControlBlock *acb = in arcmsr_abort() local
4646 if (acb->acb_flags & ACB_F_ADAPTER_REMOVED) in arcmsr_abort()
4650 acb->host->host_no, cmd->device->id, (u32)cmd->device->lun); in arcmsr_abort()
4651 acb->acb_flags |= ACB_F_ABORT; in arcmsr_abort()
4652 acb->num_aborts++; in arcmsr_abort()
4659 if (!atomic_read(&acb->ccboutstandingcount)) { in arcmsr_abort()
4660 acb->acb_flags &= ~ACB_F_ABORT; in arcmsr_abort()
4664 intmask_org = arcmsr_disable_outbound_ints(acb); in arcmsr_abort()
4665 for (i = 0; i < acb->maxFreeCCB; i++) { in arcmsr_abort()
4666 struct CommandControlBlock *ccb = acb->pccb_pool[i]; in arcmsr_abort()
4669 rtn = arcmsr_abort_one_cmd(acb, ccb); in arcmsr_abort()
4673 acb->acb_flags &= ~ACB_F_ABORT; in arcmsr_abort()
4674 arcmsr_enable_outbound_ints(acb, intmask_org); in arcmsr_abort()
4680 struct AdapterControlBlock *acb = in arcmsr_info() local
4685 switch (acb->pdev->device) { in arcmsr_info()