Lines Matching refs:acb
106 static int arcmsr_iop_message_xfer(struct AdapterControlBlock *acb,
108 static int arcmsr_iop_confirm(struct AdapterControlBlock *acb);
120 static void arcmsr_iop_init(struct AdapterControlBlock *acb);
121 static void arcmsr_free_ccb_pool(struct AdapterControlBlock *acb);
122 static u32 arcmsr_disable_outbound_ints(struct AdapterControlBlock *acb);
123 static void arcmsr_enable_outbound_ints(struct AdapterControlBlock *acb,
125 static void arcmsr_stop_adapter_bgrb(struct AdapterControlBlock *acb);
126 static void arcmsr_hbaA_flush_cache(struct AdapterControlBlock *acb);
127 static void arcmsr_hbaB_flush_cache(struct AdapterControlBlock *acb);
130 static bool arcmsr_get_firmware_spec(struct AdapterControlBlock *acb);
131 static void arcmsr_start_adapter_bgrb(struct AdapterControlBlock *acb);
133 static void arcmsr_hbaD_message_isr(struct AdapterControlBlock *acb);
134 static void arcmsr_hbaE_message_isr(struct AdapterControlBlock *acb);
135 static void arcmsr_hbaE_postqueue_isr(struct AdapterControlBlock *acb);
136 static void arcmsr_hbaF_postqueue_isr(struct AdapterControlBlock *acb);
137 static void arcmsr_hardware_reset(struct AdapterControlBlock *acb);
139 static irqreturn_t arcmsr_interrupt(struct AdapterControlBlock *acb);
141 static void arcmsr_wait_firmware_ready(struct AdapterControlBlock *acb);
233 static void arcmsr_free_io_queue(struct AdapterControlBlock *acb) in arcmsr_free_io_queue() argument
235 switch (acb->adapter_type) { in arcmsr_free_io_queue()
240 dma_free_coherent(&acb->pdev->dev, acb->ioqueue_size, in arcmsr_free_io_queue()
241 acb->dma_coherent2, acb->dma_coherent_handle2); in arcmsr_free_io_queue()
246 static bool arcmsr_remap_pciregion(struct AdapterControlBlock *acb) in arcmsr_remap_pciregion() argument
248 struct pci_dev *pdev = acb->pdev; in arcmsr_remap_pciregion()
249 switch (acb->adapter_type){ in arcmsr_remap_pciregion()
251 acb->pmuA = ioremap(pci_resource_start(pdev,0), pci_resource_len(pdev,0)); in arcmsr_remap_pciregion()
252 if (!acb->pmuA) { in arcmsr_remap_pciregion()
253 printk(KERN_NOTICE "arcmsr%d: memory mapping region fail \n", acb->host->host_no); in arcmsr_remap_pciregion()
262 printk(KERN_NOTICE "arcmsr%d: memory mapping region fail \n", acb->host->host_no); in arcmsr_remap_pciregion()
268 printk(KERN_NOTICE "arcmsr%d: memory mapping region fail \n", acb->host->host_no); in arcmsr_remap_pciregion()
271 acb->mem_base0 = mem_base0; in arcmsr_remap_pciregion()
272 acb->mem_base1 = mem_base1; in arcmsr_remap_pciregion()
276 acb->pmuC = ioremap(pci_resource_start(pdev, 1), pci_resource_len(pdev, 1)); in arcmsr_remap_pciregion()
277 if (!acb->pmuC) { in arcmsr_remap_pciregion()
278 printk(KERN_NOTICE "arcmsr%d: memory mapping region fail \n", acb->host->host_no); in arcmsr_remap_pciregion()
281 if (readl(&acb->pmuC->outbound_doorbell) & ARCMSR_HBCMU_IOP2DRV_MESSAGE_CMD_DONE) { in arcmsr_remap_pciregion()
282 …writel(ARCMSR_HBCMU_IOP2DRV_MESSAGE_CMD_DONE_DOORBELL_CLEAR, &acb->pmuC->outbound_doorbell_clear);… in arcmsr_remap_pciregion()
296 acb->host->host_no); in arcmsr_remap_pciregion()
299 acb->mem_base0 = mem_base0; in arcmsr_remap_pciregion()
303 acb->pmuE = ioremap(pci_resource_start(pdev, 1), in arcmsr_remap_pciregion()
305 if (!acb->pmuE) { in arcmsr_remap_pciregion()
307 acb->host->host_no); in arcmsr_remap_pciregion()
310 writel(0, &acb->pmuE->host_int_status); /*clear interrupt*/ in arcmsr_remap_pciregion()
311 writel(ARCMSR_HBEMU_DOORBELL_SYNC, &acb->pmuE->iobound_doorbell); /* synchronize doorbell to 0 */ in arcmsr_remap_pciregion()
312 acb->in_doorbell = 0; in arcmsr_remap_pciregion()
313 acb->out_doorbell = 0; in arcmsr_remap_pciregion()
317 acb->pmuF = ioremap(pci_resource_start(pdev, 0), pci_resource_len(pdev, 0)); in arcmsr_remap_pciregion()
318 if (!acb->pmuF) { in arcmsr_remap_pciregion()
320 acb->host->host_no); in arcmsr_remap_pciregion()
323 writel(0, &acb->pmuF->host_int_status); /* clear interrupt */ in arcmsr_remap_pciregion()
324 writel(ARCMSR_HBFMU_DOORBELL_SYNC, &acb->pmuF->iobound_doorbell); in arcmsr_remap_pciregion()
325 acb->in_doorbell = 0; in arcmsr_remap_pciregion()
326 acb->out_doorbell = 0; in arcmsr_remap_pciregion()
333 static void arcmsr_unmap_pciregion(struct AdapterControlBlock *acb) in arcmsr_unmap_pciregion() argument
335 switch (acb->adapter_type) { in arcmsr_unmap_pciregion()
337 iounmap(acb->pmuA); in arcmsr_unmap_pciregion()
340 iounmap(acb->mem_base0); in arcmsr_unmap_pciregion()
341 iounmap(acb->mem_base1); in arcmsr_unmap_pciregion()
344 iounmap(acb->pmuC); in arcmsr_unmap_pciregion()
347 iounmap(acb->mem_base0); in arcmsr_unmap_pciregion()
350 iounmap(acb->pmuE); in arcmsr_unmap_pciregion()
353 iounmap(acb->pmuF); in arcmsr_unmap_pciregion()
361 struct AdapterControlBlock *acb = dev_id; in arcmsr_do_interrupt() local
363 handle_state = arcmsr_interrupt(acb); in arcmsr_do_interrupt()
390 static uint8_t arcmsr_hbaA_wait_msgint_ready(struct AdapterControlBlock *acb) in arcmsr_hbaA_wait_msgint_ready() argument
392 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_hbaA_wait_msgint_ready()
408 static uint8_t arcmsr_hbaB_wait_msgint_ready(struct AdapterControlBlock *acb) in arcmsr_hbaB_wait_msgint_ready() argument
410 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_hbaB_wait_msgint_ready()
481 static void arcmsr_hbaA_flush_cache(struct AdapterControlBlock *acb) in arcmsr_hbaA_flush_cache() argument
483 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_hbaA_flush_cache()
487 if (arcmsr_hbaA_wait_msgint_ready(acb)) in arcmsr_hbaA_flush_cache()
492 timeout, retry count down = %d \n", acb->host->host_no, retry_count); in arcmsr_hbaA_flush_cache()
497 static void arcmsr_hbaB_flush_cache(struct AdapterControlBlock *acb) in arcmsr_hbaB_flush_cache() argument
499 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_hbaB_flush_cache()
503 if (arcmsr_hbaB_wait_msgint_ready(acb)) in arcmsr_hbaB_flush_cache()
508 timeout,retry count down = %d \n", acb->host->host_no, retry_count); in arcmsr_hbaB_flush_cache()
566 static void arcmsr_flush_adapter_cache(struct AdapterControlBlock *acb) in arcmsr_flush_adapter_cache() argument
568 switch (acb->adapter_type) { in arcmsr_flush_adapter_cache()
571 arcmsr_hbaA_flush_cache(acb); in arcmsr_flush_adapter_cache()
574 arcmsr_hbaB_flush_cache(acb); in arcmsr_flush_adapter_cache()
577 arcmsr_hbaC_flush_cache(acb); in arcmsr_flush_adapter_cache()
580 arcmsr_hbaD_flush_cache(acb); in arcmsr_flush_adapter_cache()
584 arcmsr_hbaE_flush_cache(acb); in arcmsr_flush_adapter_cache()
589 static void arcmsr_hbaB_assign_regAddr(struct AdapterControlBlock *acb) in arcmsr_hbaB_assign_regAddr() argument
591 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_hbaB_assign_regAddr()
593 if (acb->pdev->device == PCI_DEVICE_ID_ARECA_1203) { in arcmsr_hbaB_assign_regAddr()
609 static void arcmsr_hbaD_assign_regAddr(struct AdapterControlBlock *acb) in arcmsr_hbaD_assign_regAddr() argument
611 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_hbaD_assign_regAddr()
641 static void arcmsr_hbaF_assign_regAddr(struct AdapterControlBlock *acb) in arcmsr_hbaF_assign_regAddr() argument
646 memset(acb->dma_coherent2, 0xff, acb->completeQ_size); in arcmsr_hbaF_assign_regAddr()
647 acb->message_wbuffer = (uint32_t *)round_up((unsigned long)acb->dma_coherent2 + in arcmsr_hbaF_assign_regAddr()
648 acb->completeQ_size, 4); in arcmsr_hbaF_assign_regAddr()
649 acb->message_rbuffer = ((void *)acb->message_wbuffer) + 0x100; in arcmsr_hbaF_assign_regAddr()
650 acb->msgcode_rwbuffer = ((void *)acb->message_wbuffer) + 0x200; in arcmsr_hbaF_assign_regAddr()
651 memset((void *)acb->message_wbuffer, 0, MESG_RW_BUFFER_SIZE); in arcmsr_hbaF_assign_regAddr()
652 host_buffer_dma = round_up(acb->dma_coherent_handle2 + acb->completeQ_size, 4); in arcmsr_hbaF_assign_regAddr()
653 pmuF = acb->pmuF; in arcmsr_hbaF_assign_regAddr()
662 static bool arcmsr_alloc_io_queue(struct AdapterControlBlock *acb) in arcmsr_alloc_io_queue() argument
667 struct pci_dev *pdev = acb->pdev; in arcmsr_alloc_io_queue()
669 switch (acb->adapter_type) { in arcmsr_alloc_io_queue()
671 acb->ioqueue_size = roundup(sizeof(struct MessageUnit_B), 32); in arcmsr_alloc_io_queue()
672 dma_coherent = dma_alloc_coherent(&pdev->dev, acb->ioqueue_size, in arcmsr_alloc_io_queue()
675 pr_notice("arcmsr%d: DMA allocation failed\n", acb->host->host_no); in arcmsr_alloc_io_queue()
678 acb->dma_coherent_handle2 = dma_coherent_handle; in arcmsr_alloc_io_queue()
679 acb->dma_coherent2 = dma_coherent; in arcmsr_alloc_io_queue()
680 acb->pmuB = (struct MessageUnit_B *)dma_coherent; in arcmsr_alloc_io_queue()
681 arcmsr_hbaB_assign_regAddr(acb); in arcmsr_alloc_io_queue()
685 acb->ioqueue_size = roundup(sizeof(struct MessageUnit_D), 32); in arcmsr_alloc_io_queue()
686 dma_coherent = dma_alloc_coherent(&pdev->dev, acb->ioqueue_size, in arcmsr_alloc_io_queue()
689 pr_notice("arcmsr%d: DMA allocation failed\n", acb->host->host_no); in arcmsr_alloc_io_queue()
692 acb->dma_coherent_handle2 = dma_coherent_handle; in arcmsr_alloc_io_queue()
693 acb->dma_coherent2 = dma_coherent; in arcmsr_alloc_io_queue()
694 acb->pmuD = (struct MessageUnit_D *)dma_coherent; in arcmsr_alloc_io_queue()
695 arcmsr_hbaD_assign_regAddr(acb); in arcmsr_alloc_io_queue()
701 acb->ioqueue_size = roundup(completeQ_size, 32); in arcmsr_alloc_io_queue()
702 dma_coherent = dma_alloc_coherent(&pdev->dev, acb->ioqueue_size, in arcmsr_alloc_io_queue()
705 pr_notice("arcmsr%d: DMA allocation failed\n", acb->host->host_no); in arcmsr_alloc_io_queue()
708 acb->dma_coherent_handle2 = dma_coherent_handle; in arcmsr_alloc_io_queue()
709 acb->dma_coherent2 = dma_coherent; in arcmsr_alloc_io_queue()
710 acb->pCompletionQ = dma_coherent; in arcmsr_alloc_io_queue()
711 acb->completionQ_entry = acb->ioqueue_size / sizeof(struct deliver_completeQ); in arcmsr_alloc_io_queue()
712 acb->doneq_index = 0; in arcmsr_alloc_io_queue()
719 arcmsr_wait_firmware_ready(acb); in arcmsr_alloc_io_queue()
720 QueueDepth = depthTbl[readl(&acb->pmuF->outbound_msgaddr1) & 7]; in arcmsr_alloc_io_queue()
721 acb->completeQ_size = sizeof(struct deliver_completeQ) * QueueDepth + 128; in arcmsr_alloc_io_queue()
722 acb->ioqueue_size = roundup(acb->completeQ_size + MESG_RW_BUFFER_SIZE, 32); in arcmsr_alloc_io_queue()
723 dma_coherent = dma_alloc_coherent(&pdev->dev, acb->ioqueue_size, in arcmsr_alloc_io_queue()
726 pr_notice("arcmsr%d: DMA allocation failed\n", acb->host->host_no); in arcmsr_alloc_io_queue()
729 acb->dma_coherent_handle2 = dma_coherent_handle; in arcmsr_alloc_io_queue()
730 acb->dma_coherent2 = dma_coherent; in arcmsr_alloc_io_queue()
731 acb->pCompletionQ = dma_coherent; in arcmsr_alloc_io_queue()
732 acb->completionQ_entry = acb->completeQ_size / sizeof(struct deliver_completeQ); in arcmsr_alloc_io_queue()
733 acb->doneq_index = 0; in arcmsr_alloc_io_queue()
734 arcmsr_hbaF_assign_regAddr(acb); in arcmsr_alloc_io_queue()
743 static int arcmsr_alloc_ccb_pool(struct AdapterControlBlock *acb) in arcmsr_alloc_ccb_pool() argument
745 struct pci_dev *pdev = acb->pdev; in arcmsr_alloc_ccb_pool()
758 acb->devstate[i][j] = ARECA_RAID_GONE; in arcmsr_alloc_ccb_pool()
762 firm_config_version = acb->firm_cfg_version; in arcmsr_alloc_ccb_pool()
767 acb->host->max_sectors = max_xfer_len/512; in arcmsr_alloc_ccb_pool()
768 acb->host->sg_tablesize = max_sg_entrys; in arcmsr_alloc_ccb_pool()
770 acb->uncache_size = roundup_ccbsize * acb->maxFreeCCB; in arcmsr_alloc_ccb_pool()
771 if (acb->adapter_type != ACB_ADAPTER_TYPE_F) in arcmsr_alloc_ccb_pool()
772 acb->uncache_size += acb->ioqueue_size; in arcmsr_alloc_ccb_pool()
773 dma_coherent = dma_alloc_coherent(&pdev->dev, acb->uncache_size, &dma_coherent_handle, GFP_KERNEL); in arcmsr_alloc_ccb_pool()
775 printk(KERN_NOTICE "arcmsr%d: dma_alloc_coherent got error\n", acb->host->host_no); in arcmsr_alloc_ccb_pool()
778 acb->dma_coherent = dma_coherent; in arcmsr_alloc_ccb_pool()
779 acb->dma_coherent_handle = dma_coherent_handle; in arcmsr_alloc_ccb_pool()
780 memset(dma_coherent, 0, acb->uncache_size); in arcmsr_alloc_ccb_pool()
781 acb->ccbsize = roundup_ccbsize; in arcmsr_alloc_ccb_pool()
784 acb->vir2phy_offset = (unsigned long)dma_coherent - (unsigned long)dma_coherent_handle; in arcmsr_alloc_ccb_pool()
785 for(i = 0; i < acb->maxFreeCCB; i++){ in arcmsr_alloc_ccb_pool()
787 switch (acb->adapter_type) { in arcmsr_alloc_ccb_pool()
799 acb->pccb_pool[i] = ccb_tmp; in arcmsr_alloc_ccb_pool()
800 ccb_tmp->acb = acb; in arcmsr_alloc_ccb_pool()
805 acb->maxFreeCCB = i; in arcmsr_alloc_ccb_pool()
806 acb->host->can_queue = i; in arcmsr_alloc_ccb_pool()
810 list_add_tail(&ccb_tmp->list, &acb->ccb_free_list); in arcmsr_alloc_ccb_pool()
814 if (acb->adapter_type != ACB_ADAPTER_TYPE_F) { in arcmsr_alloc_ccb_pool()
815 acb->dma_coherent_handle2 = dma_coherent_handle; in arcmsr_alloc_ccb_pool()
816 acb->dma_coherent2 = ccb_tmp; in arcmsr_alloc_ccb_pool()
818 switch (acb->adapter_type) { in arcmsr_alloc_ccb_pool()
820 acb->pmuB = (struct MessageUnit_B *)acb->dma_coherent2; in arcmsr_alloc_ccb_pool()
821 arcmsr_hbaB_assign_regAddr(acb); in arcmsr_alloc_ccb_pool()
824 acb->pmuD = (struct MessageUnit_D *)acb->dma_coherent2; in arcmsr_alloc_ccb_pool()
825 arcmsr_hbaD_assign_regAddr(acb); in arcmsr_alloc_ccb_pool()
828 acb->pCompletionQ = acb->dma_coherent2; in arcmsr_alloc_ccb_pool()
829 acb->completionQ_entry = acb->ioqueue_size / sizeof(struct deliver_completeQ); in arcmsr_alloc_ccb_pool()
830 acb->doneq_index = 0; in arcmsr_alloc_ccb_pool()
838 struct AdapterControlBlock *acb = container_of(work, in arcmsr_message_isr_bh_fn() local
840 char *acb_dev_map = (char *)acb->device_map; in arcmsr_message_isr_bh_fn()
847 switch (acb->adapter_type) { in arcmsr_message_isr_bh_fn()
849 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_message_isr_bh_fn()
856 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_message_isr_bh_fn()
863 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_message_isr_bh_fn()
870 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_message_isr_bh_fn()
877 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_message_isr_bh_fn()
884 signature = (uint32_t __iomem *)(&acb->msgcode_rwbuffer[0]); in arcmsr_message_isr_bh_fn()
885 devicemap = (char __iomem *)(&acb->msgcode_rwbuffer[21]); in arcmsr_message_isr_bh_fn()
901 scsi_add_device(acb->host, in arcmsr_message_isr_bh_fn()
905 psdev = scsi_device_lookup(acb->host, in arcmsr_message_isr_bh_fn()
919 acb->acb_flags &= ~ACB_F_MSG_GET_CONFIG; in arcmsr_message_isr_bh_fn()
923 arcmsr_request_irq(struct pci_dev *pdev, struct AdapterControlBlock *acb) in arcmsr_request_irq() argument
933 pr_info("arcmsr%d: msi-x enabled\n", acb->host->host_no); in arcmsr_request_irq()
951 acb->vector_count = nvec; in arcmsr_request_irq()
954 flags, "arcmsr", acb)) { in arcmsr_request_irq()
956 acb->host->host_no, pci_irq_vector(pdev, i)); in arcmsr_request_irq()
964 free_irq(pci_irq_vector(pdev, i), acb); in arcmsr_request_irq()
985 static int arcmsr_set_dma_mask(struct AdapterControlBlock *acb) in arcmsr_set_dma_mask() argument
987 struct pci_dev *pcidev = acb->pdev; in arcmsr_set_dma_mask()
990 if (((acb->adapter_type == ACB_ADAPTER_TYPE_A) && !dma_mask_64) || in arcmsr_set_dma_mask()
1013 struct AdapterControlBlock *acb; in arcmsr_probe() local
1027 acb = (struct AdapterControlBlock *) host->hostdata; in arcmsr_probe()
1028 memset(acb,0,sizeof(struct AdapterControlBlock)); in arcmsr_probe()
1029 acb->pdev = pdev; in arcmsr_probe()
1030 acb->adapter_type = id->driver_data; in arcmsr_probe()
1031 if (arcmsr_set_dma_mask(acb)) in arcmsr_probe()
1033 acb->host = host; in arcmsr_probe()
1051 spin_lock_init(&acb->eh_lock); in arcmsr_probe()
1052 spin_lock_init(&acb->ccblist_lock); in arcmsr_probe()
1053 spin_lock_init(&acb->postq_lock); in arcmsr_probe()
1054 spin_lock_init(&acb->doneq_lock); in arcmsr_probe()
1055 spin_lock_init(&acb->rqbuffer_lock); in arcmsr_probe()
1056 spin_lock_init(&acb->wqbuffer_lock); in arcmsr_probe()
1057 acb->acb_flags |= (ACB_F_MESSAGE_WQBUFFER_CLEARED | in arcmsr_probe()
1060 acb->acb_flags &= ~ACB_F_SCSISTOPADAPTER; in arcmsr_probe()
1061 INIT_LIST_HEAD(&acb->ccb_free_list); in arcmsr_probe()
1062 error = arcmsr_remap_pciregion(acb); in arcmsr_probe()
1066 error = arcmsr_alloc_io_queue(acb); in arcmsr_probe()
1069 error = arcmsr_get_firmware_spec(acb); in arcmsr_probe()
1073 if (acb->adapter_type != ACB_ADAPTER_TYPE_F) in arcmsr_probe()
1074 arcmsr_free_io_queue(acb); in arcmsr_probe()
1075 error = arcmsr_alloc_ccb_pool(acb); in arcmsr_probe()
1083 if (arcmsr_request_irq(pdev, acb) == FAILED) in arcmsr_probe()
1085 arcmsr_iop_init(acb); in arcmsr_probe()
1086 arcmsr_init_get_devmap_timer(acb); in arcmsr_probe()
1088 arcmsr_init_set_datetime_timer(acb); in arcmsr_probe()
1089 if(arcmsr_alloc_sysfs_attr(acb)) in arcmsr_probe()
1095 del_timer_sync(&acb->refresh_timer); in arcmsr_probe()
1096 del_timer_sync(&acb->eternal_timer); in arcmsr_probe()
1097 flush_work(&acb->arcmsr_do_message_isr_bh); in arcmsr_probe()
1098 arcmsr_stop_adapter_bgrb(acb); in arcmsr_probe()
1099 arcmsr_flush_adapter_cache(acb); in arcmsr_probe()
1100 arcmsr_free_irq(pdev, acb); in arcmsr_probe()
1104 arcmsr_free_ccb_pool(acb); in arcmsr_probe()
1107 arcmsr_free_io_queue(acb); in arcmsr_probe()
1109 arcmsr_unmap_pciregion(acb); in arcmsr_probe()
1120 struct AdapterControlBlock *acb) in arcmsr_free_irq() argument
1124 for (i = 0; i < acb->vector_count; i++) in arcmsr_free_irq()
1125 free_irq(pci_irq_vector(pdev, i), acb); in arcmsr_free_irq()
1132 struct AdapterControlBlock *acb = in arcmsr_suspend() local
1135 arcmsr_disable_outbound_ints(acb); in arcmsr_suspend()
1136 arcmsr_free_irq(pdev, acb); in arcmsr_suspend()
1137 del_timer_sync(&acb->eternal_timer); in arcmsr_suspend()
1139 del_timer_sync(&acb->refresh_timer); in arcmsr_suspend()
1140 flush_work(&acb->arcmsr_do_message_isr_bh); in arcmsr_suspend()
1141 arcmsr_stop_adapter_bgrb(acb); in arcmsr_suspend()
1142 arcmsr_flush_adapter_cache(acb); in arcmsr_suspend()
1153 struct AdapterControlBlock *acb = in arcmsr_resume() local
1163 if (arcmsr_set_dma_mask(acb)) in arcmsr_resume()
1166 if (arcmsr_request_irq(pdev, acb) == FAILED) in arcmsr_resume()
1168 switch (acb->adapter_type) { in arcmsr_resume()
1170 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_resume()
1181 writel(0, &acb->pmuE->host_int_status); in arcmsr_resume()
1182 writel(ARCMSR_HBEMU_DOORBELL_SYNC, &acb->pmuE->iobound_doorbell); in arcmsr_resume()
1183 acb->in_doorbell = 0; in arcmsr_resume()
1184 acb->out_doorbell = 0; in arcmsr_resume()
1185 acb->doneq_index = 0; in arcmsr_resume()
1188 writel(0, &acb->pmuF->host_int_status); in arcmsr_resume()
1189 writel(ARCMSR_HBFMU_DOORBELL_SYNC, &acb->pmuF->iobound_doorbell); in arcmsr_resume()
1190 acb->in_doorbell = 0; in arcmsr_resume()
1191 acb->out_doorbell = 0; in arcmsr_resume()
1192 acb->doneq_index = 0; in arcmsr_resume()
1193 arcmsr_hbaF_assign_regAddr(acb); in arcmsr_resume()
1196 arcmsr_iop_init(acb); in arcmsr_resume()
1197 arcmsr_init_get_devmap_timer(acb); in arcmsr_resume()
1199 arcmsr_init_set_datetime_timer(acb); in arcmsr_resume()
1202 arcmsr_stop_adapter_bgrb(acb); in arcmsr_resume()
1203 arcmsr_flush_adapter_cache(acb); in arcmsr_resume()
1206 arcmsr_free_ccb_pool(acb); in arcmsr_resume()
1207 if (acb->adapter_type == ACB_ADAPTER_TYPE_F) in arcmsr_resume()
1208 arcmsr_free_io_queue(acb); in arcmsr_resume()
1209 arcmsr_unmap_pciregion(acb); in arcmsr_resume()
1216 static uint8_t arcmsr_hbaA_abort_allcmd(struct AdapterControlBlock *acb) in arcmsr_hbaA_abort_allcmd() argument
1218 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_hbaA_abort_allcmd()
1220 if (!arcmsr_hbaA_wait_msgint_ready(acb)) { in arcmsr_hbaA_abort_allcmd()
1223 , acb->host->host_no); in arcmsr_hbaA_abort_allcmd()
1229 static uint8_t arcmsr_hbaB_abort_allcmd(struct AdapterControlBlock *acb) in arcmsr_hbaB_abort_allcmd() argument
1231 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_hbaB_abort_allcmd()
1234 if (!arcmsr_hbaB_wait_msgint_ready(acb)) { in arcmsr_hbaB_abort_allcmd()
1237 , acb->host->host_no); in arcmsr_hbaB_abort_allcmd()
1284 static uint8_t arcmsr_abort_allcmd(struct AdapterControlBlock *acb) in arcmsr_abort_allcmd() argument
1287 switch (acb->adapter_type) { in arcmsr_abort_allcmd()
1289 rtnval = arcmsr_hbaA_abort_allcmd(acb); in arcmsr_abort_allcmd()
1292 rtnval = arcmsr_hbaB_abort_allcmd(acb); in arcmsr_abort_allcmd()
1295 rtnval = arcmsr_hbaC_abort_allcmd(acb); in arcmsr_abort_allcmd()
1298 rtnval = arcmsr_hbaD_abort_allcmd(acb); in arcmsr_abort_allcmd()
1302 rtnval = arcmsr_hbaE_abort_allcmd(acb); in arcmsr_abort_allcmd()
1317 struct AdapterControlBlock *acb = ccb->acb; in arcmsr_ccb_complete() local
1320 atomic_dec(&acb->ccboutstandingcount); in arcmsr_ccb_complete()
1323 spin_lock_irqsave(&acb->ccblist_lock, flags); in arcmsr_ccb_complete()
1324 list_add_tail(&ccb->list, &acb->ccb_free_list); in arcmsr_ccb_complete()
1325 spin_unlock_irqrestore(&acb->ccblist_lock, flags); in arcmsr_ccb_complete()
1347 static u32 arcmsr_disable_outbound_ints(struct AdapterControlBlock *acb) in arcmsr_disable_outbound_ints() argument
1350 switch (acb->adapter_type) { in arcmsr_disable_outbound_ints()
1352 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_disable_outbound_ints()
1359 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_disable_outbound_ints()
1365 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_disable_outbound_ints()
1372 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_disable_outbound_ints()
1379 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_disable_outbound_ints()
1389 static void arcmsr_report_ccb_state(struct AdapterControlBlock *acb, in arcmsr_report_ccb_state() argument
1396 if (acb->devstate[id][lun] == ARECA_RAID_GONE) in arcmsr_report_ccb_state()
1397 acb->devstate[id][lun] = ARECA_RAID_GOOD; in arcmsr_report_ccb_state()
1403 acb->devstate[id][lun] = ARECA_RAID_GONE; in arcmsr_report_ccb_state()
1412 acb->devstate[id][lun] = ARECA_RAID_GONE; in arcmsr_report_ccb_state()
1419 acb->devstate[id][lun] = ARECA_RAID_GOOD; in arcmsr_report_ccb_state()
1429 , acb->host->host_no in arcmsr_report_ccb_state()
1433 acb->devstate[id][lun] = ARECA_RAID_GONE; in arcmsr_report_ccb_state()
1441 static void arcmsr_drain_donequeue(struct AdapterControlBlock *acb, struct CommandControlBlock *pCC… in arcmsr_drain_donequeue() argument
1443 if ((pCCB->acb != acb) || (pCCB->startdone != ARCMSR_CCB_START)) { in arcmsr_drain_donequeue()
1450 acb->host->host_no, pCCB); in arcmsr_drain_donequeue()
1458 , acb->host->host_no in arcmsr_drain_donequeue()
1459 , acb in arcmsr_drain_donequeue()
1461 , pCCB->acb in arcmsr_drain_donequeue()
1463 , atomic_read(&acb->ccboutstandingcount)); in arcmsr_drain_donequeue()
1466 arcmsr_report_ccb_state(acb, pCCB, error); in arcmsr_drain_donequeue()
1469 static void arcmsr_done4abort_postqueue(struct AdapterControlBlock *acb) in arcmsr_done4abort_postqueue() argument
1478 switch (acb->adapter_type) { in arcmsr_done4abort_postqueue()
1481 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_done4abort_postqueue()
1484 acb->outbound_int_enable; in arcmsr_done4abort_postqueue()
1488 && (i++ < acb->maxOutstanding)) { in arcmsr_done4abort_postqueue()
1490 if (acb->cdb_phyadd_hipart) in arcmsr_done4abort_postqueue()
1491 ccb_cdb_phy = ccb_cdb_phy | acb->cdb_phyadd_hipart; in arcmsr_done4abort_postqueue()
1492 pARCMSR_CDB = (struct ARCMSR_CDB *)(acb->vir2phy_offset + ccb_cdb_phy); in arcmsr_done4abort_postqueue()
1495 arcmsr_drain_donequeue(acb, pCCB, error); in arcmsr_done4abort_postqueue()
1501 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_done4abort_postqueue()
1509 if (acb->cdb_phyadd_hipart) in arcmsr_done4abort_postqueue()
1510 ccb_cdb_phy = ccb_cdb_phy | acb->cdb_phyadd_hipart; in arcmsr_done4abort_postqueue()
1511 pARCMSR_CDB = (struct ARCMSR_CDB *)(acb->vir2phy_offset + ccb_cdb_phy); in arcmsr_done4abort_postqueue()
1514 arcmsr_drain_donequeue(acb, pCCB, error); in arcmsr_done4abort_postqueue()
1523 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_done4abort_postqueue()
1524 …while ((readl(®->host_int_status) & ARCMSR_HBCMU_OUTBOUND_POSTQUEUE_ISR) && (i++ < acb->maxOuts… in arcmsr_done4abort_postqueue()
1528 if (acb->cdb_phyadd_hipart) in arcmsr_done4abort_postqueue()
1529 ccb_cdb_phy = ccb_cdb_phy | acb->cdb_phyadd_hipart; in arcmsr_done4abort_postqueue()
1530 pARCMSR_CDB = (struct ARCMSR_CDB *)(acb->vir2phy_offset + ccb_cdb_phy); in arcmsr_done4abort_postqueue()
1533 arcmsr_drain_donequeue(acb, pCCB, error); in arcmsr_done4abort_postqueue()
1538 struct MessageUnit_D *pmu = acb->pmuD; in arcmsr_done4abort_postqueue()
1543 residual = atomic_read(&acb->ccboutstandingcount); in arcmsr_done4abort_postqueue()
1545 spin_lock_irqsave(&acb->doneq_lock, flags); in arcmsr_done4abort_postqueue()
1557 spin_unlock_irqrestore(&acb->doneq_lock, flags); in arcmsr_done4abort_postqueue()
1561 if (acb->cdb_phyadd_hipart) in arcmsr_done4abort_postqueue()
1562 ccb_cdb_phy = ccb_cdb_phy | acb->cdb_phyadd_hipart; in arcmsr_done4abort_postqueue()
1564 (acb->vir2phy_offset + ccb_cdb_phy); in arcmsr_done4abort_postqueue()
1570 arcmsr_drain_donequeue(acb, pCCB, error); in arcmsr_done4abort_postqueue()
1574 spin_unlock_irqrestore(&acb->doneq_lock, flags); in arcmsr_done4abort_postqueue()
1583 arcmsr_hbaE_postqueue_isr(acb); in arcmsr_done4abort_postqueue()
1586 arcmsr_hbaF_postqueue_isr(acb); in arcmsr_done4abort_postqueue()
1591 static void arcmsr_remove_scsi_devices(struct AdapterControlBlock *acb) in arcmsr_remove_scsi_devices() argument
1593 char *acb_dev_map = (char *)acb->device_map; in arcmsr_remove_scsi_devices()
1599 for (i = 0; i < acb->maxFreeCCB; i++) { in arcmsr_remove_scsi_devices()
1600 ccb = acb->pccb_pool[i]; in arcmsr_remove_scsi_devices()
1612 psdev = scsi_device_lookup(acb->host, in arcmsr_remove_scsi_devices()
1627 static void arcmsr_free_pcidev(struct AdapterControlBlock *acb) in arcmsr_free_pcidev() argument
1632 host = acb->host; in arcmsr_free_pcidev()
1633 arcmsr_free_sysfs_attr(acb); in arcmsr_free_pcidev()
1635 flush_work(&acb->arcmsr_do_message_isr_bh); in arcmsr_free_pcidev()
1636 del_timer_sync(&acb->eternal_timer); in arcmsr_free_pcidev()
1638 del_timer_sync(&acb->refresh_timer); in arcmsr_free_pcidev()
1639 pdev = acb->pdev; in arcmsr_free_pcidev()
1640 arcmsr_free_irq(pdev, acb); in arcmsr_free_pcidev()
1641 arcmsr_free_ccb_pool(acb); in arcmsr_free_pcidev()
1642 if (acb->adapter_type == ACB_ADAPTER_TYPE_F) in arcmsr_free_pcidev()
1643 arcmsr_free_io_queue(acb); in arcmsr_free_pcidev()
1644 arcmsr_unmap_pciregion(acb); in arcmsr_free_pcidev()
1653 struct AdapterControlBlock *acb = in arcmsr_remove() local
1660 acb->acb_flags &= ~ACB_F_IOP_INITED; in arcmsr_remove()
1661 acb->acb_flags |= ACB_F_ADAPTER_REMOVED; in arcmsr_remove()
1662 arcmsr_remove_scsi_devices(acb); in arcmsr_remove()
1663 arcmsr_free_pcidev(acb); in arcmsr_remove()
1666 arcmsr_free_sysfs_attr(acb); in arcmsr_remove()
1668 flush_work(&acb->arcmsr_do_message_isr_bh); in arcmsr_remove()
1669 del_timer_sync(&acb->eternal_timer); in arcmsr_remove()
1671 del_timer_sync(&acb->refresh_timer); in arcmsr_remove()
1672 arcmsr_disable_outbound_ints(acb); in arcmsr_remove()
1673 arcmsr_stop_adapter_bgrb(acb); in arcmsr_remove()
1674 arcmsr_flush_adapter_cache(acb); in arcmsr_remove()
1675 acb->acb_flags |= ACB_F_SCSISTOPADAPTER; in arcmsr_remove()
1676 acb->acb_flags &= ~ACB_F_IOP_INITED; in arcmsr_remove()
1678 for (poll_count = 0; poll_count < acb->maxOutstanding; poll_count++){ in arcmsr_remove()
1679 if (!atomic_read(&acb->ccboutstandingcount)) in arcmsr_remove()
1681 arcmsr_interrupt(acb);/* FIXME: need spinlock */ in arcmsr_remove()
1685 if (atomic_read(&acb->ccboutstandingcount)) { in arcmsr_remove()
1688 arcmsr_abort_allcmd(acb); in arcmsr_remove()
1689 arcmsr_done4abort_postqueue(acb); in arcmsr_remove()
1690 for (i = 0; i < acb->maxFreeCCB; i++) { in arcmsr_remove()
1691 struct CommandControlBlock *ccb = acb->pccb_pool[i]; in arcmsr_remove()
1699 arcmsr_free_irq(pdev, acb); in arcmsr_remove()
1700 arcmsr_free_ccb_pool(acb); in arcmsr_remove()
1701 if (acb->adapter_type == ACB_ADAPTER_TYPE_F) in arcmsr_remove()
1702 arcmsr_free_io_queue(acb); in arcmsr_remove()
1703 arcmsr_unmap_pciregion(acb); in arcmsr_remove()
1712 struct AdapterControlBlock *acb = in arcmsr_shutdown() local
1714 if (acb->acb_flags & ACB_F_ADAPTER_REMOVED) in arcmsr_shutdown()
1716 del_timer_sync(&acb->eternal_timer); in arcmsr_shutdown()
1718 del_timer_sync(&acb->refresh_timer); in arcmsr_shutdown()
1719 arcmsr_disable_outbound_ints(acb); in arcmsr_shutdown()
1720 arcmsr_free_irq(pdev, acb); in arcmsr_shutdown()
1721 flush_work(&acb->arcmsr_do_message_isr_bh); in arcmsr_shutdown()
1722 arcmsr_stop_adapter_bgrb(acb); in arcmsr_shutdown()
1723 arcmsr_flush_adapter_cache(acb); in arcmsr_shutdown()
1740 static void arcmsr_enable_outbound_ints(struct AdapterControlBlock *acb, in arcmsr_enable_outbound_ints() argument
1744 switch (acb->adapter_type) { in arcmsr_enable_outbound_ints()
1747 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_enable_outbound_ints()
1752 acb->outbound_int_enable = ~(intmask_org & mask) & 0x000000ff; in arcmsr_enable_outbound_ints()
1757 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_enable_outbound_ints()
1763 acb->outbound_int_enable = (intmask_org | mask) & 0x0000000f; in arcmsr_enable_outbound_ints()
1767 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_enable_outbound_ints()
1770 acb->outbound_int_enable = ~(intmask_org & mask) & 0x0000000f; in arcmsr_enable_outbound_ints()
1774 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_enable_outbound_ints()
1782 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_enable_outbound_ints()
1791 static int arcmsr_build_ccb(struct AdapterControlBlock *acb, in arcmsr_build_ccb() argument
1811 if (unlikely(nseg > acb->host->sg_tablesize || nseg < 0)) in arcmsr_build_ccb()
1846 static void arcmsr_post_ccb(struct AdapterControlBlock *acb, struct CommandControlBlock *ccb) in arcmsr_post_ccb() argument
1850 atomic_inc(&acb->ccboutstandingcount); in arcmsr_post_ccb()
1852 switch (acb->adapter_type) { in arcmsr_post_ccb()
1854 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_post_ccb()
1865 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_post_ccb()
1883 struct MessageUnit_C __iomem *phbcmu = acb->pmuC; in arcmsr_post_ccb()
1893 struct MessageUnit_D *pmu = acb->pmuD; in arcmsr_post_ccb()
1899 spin_lock_irqsave(&acb->postq_lock, flags); in arcmsr_post_ccb()
1912 spin_unlock_irqrestore(&acb->postq_lock, flags); in arcmsr_post_ccb()
1916 struct MessageUnit_E __iomem *pmu = acb->pmuE; in arcmsr_post_ccb()
1926 struct MessageUnit_F __iomem *pmu = acb->pmuF; in arcmsr_post_ccb()
1945 static void arcmsr_hbaA_stop_bgrb(struct AdapterControlBlock *acb) in arcmsr_hbaA_stop_bgrb() argument
1947 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_hbaA_stop_bgrb()
1948 acb->acb_flags &= ~ACB_F_MSG_START_BGRB; in arcmsr_hbaA_stop_bgrb()
1950 if (!arcmsr_hbaA_wait_msgint_ready(acb)) { in arcmsr_hbaA_stop_bgrb()
1953 , acb->host->host_no); in arcmsr_hbaA_stop_bgrb()
1957 static void arcmsr_hbaB_stop_bgrb(struct AdapterControlBlock *acb) in arcmsr_hbaB_stop_bgrb() argument
1959 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_hbaB_stop_bgrb()
1960 acb->acb_flags &= ~ACB_F_MSG_START_BGRB; in arcmsr_hbaB_stop_bgrb()
1963 if (!arcmsr_hbaB_wait_msgint_ready(acb)) { in arcmsr_hbaB_stop_bgrb()
1966 , acb->host->host_no); in arcmsr_hbaB_stop_bgrb()
2009 static void arcmsr_stop_adapter_bgrb(struct AdapterControlBlock *acb) in arcmsr_stop_adapter_bgrb() argument
2011 switch (acb->adapter_type) { in arcmsr_stop_adapter_bgrb()
2013 arcmsr_hbaA_stop_bgrb(acb); in arcmsr_stop_adapter_bgrb()
2016 arcmsr_hbaB_stop_bgrb(acb); in arcmsr_stop_adapter_bgrb()
2019 arcmsr_hbaC_stop_bgrb(acb); in arcmsr_stop_adapter_bgrb()
2022 arcmsr_hbaD_stop_bgrb(acb); in arcmsr_stop_adapter_bgrb()
2026 arcmsr_hbaE_stop_bgrb(acb); in arcmsr_stop_adapter_bgrb()
2031 static void arcmsr_free_ccb_pool(struct AdapterControlBlock *acb) in arcmsr_free_ccb_pool() argument
2033 dma_free_coherent(&acb->pdev->dev, acb->uncache_size, acb->dma_coherent, acb->dma_coherent_handle); in arcmsr_free_ccb_pool()
2036 static void arcmsr_iop_message_read(struct AdapterControlBlock *acb) in arcmsr_iop_message_read() argument
2038 switch (acb->adapter_type) { in arcmsr_iop_message_read()
2040 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_iop_message_read()
2045 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_iop_message_read()
2050 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_iop_message_read()
2056 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_iop_message_read()
2063 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_iop_message_read()
2064 acb->out_doorbell ^= ARCMSR_HBEMU_DRV2IOP_DATA_READ_OK; in arcmsr_iop_message_read()
2065 writel(acb->out_doorbell, ®->iobound_doorbell); in arcmsr_iop_message_read()
2071 static void arcmsr_iop_message_wrote(struct AdapterControlBlock *acb) in arcmsr_iop_message_wrote() argument
2073 switch (acb->adapter_type) { in arcmsr_iop_message_wrote()
2075 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_iop_message_wrote()
2085 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_iop_message_wrote()
2094 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_iop_message_wrote()
2103 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_iop_message_wrote()
2110 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_iop_message_wrote()
2111 acb->out_doorbell ^= ARCMSR_HBEMU_DRV2IOP_DATA_WRITE_OK; in arcmsr_iop_message_wrote()
2112 writel(acb->out_doorbell, ®->iobound_doorbell); in arcmsr_iop_message_wrote()
2118 struct QBUFFER __iomem *arcmsr_get_iop_rqbuffer(struct AdapterControlBlock *acb) in arcmsr_get_iop_rqbuffer() argument
2121 switch (acb->adapter_type) { in arcmsr_get_iop_rqbuffer()
2124 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_get_iop_rqbuffer()
2129 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_get_iop_rqbuffer()
2134 struct MessageUnit_C __iomem *phbcmu = acb->pmuC; in arcmsr_get_iop_rqbuffer()
2139 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_get_iop_rqbuffer()
2144 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_get_iop_rqbuffer()
2149 qbuffer = (struct QBUFFER __iomem *)acb->message_rbuffer; in arcmsr_get_iop_rqbuffer()
2156 static struct QBUFFER __iomem *arcmsr_get_iop_wqbuffer(struct AdapterControlBlock *acb) in arcmsr_get_iop_wqbuffer() argument
2159 switch (acb->adapter_type) { in arcmsr_get_iop_wqbuffer()
2162 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_get_iop_wqbuffer()
2167 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_get_iop_wqbuffer()
2172 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_get_iop_wqbuffer()
2177 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_get_iop_wqbuffer()
2182 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_get_iop_wqbuffer()
2187 pqbuffer = (struct QBUFFER __iomem *)acb->message_wbuffer; in arcmsr_get_iop_wqbuffer()
2194 arcmsr_Read_iop_rqbuffer_in_DWORD(struct AdapterControlBlock *acb, in arcmsr_Read_iop_rqbuffer_in_DWORD() argument
2220 pQbuffer = &acb->rqbuffer[acb->rqbuf_putIndex]; in arcmsr_Read_iop_rqbuffer_in_DWORD()
2222 acb->rqbuf_putIndex++; in arcmsr_Read_iop_rqbuffer_in_DWORD()
2224 acb->rqbuf_putIndex %= ARCMSR_MAX_QBUFFER; in arcmsr_Read_iop_rqbuffer_in_DWORD()
2230 arcmsr_iop_message_read(acb); in arcmsr_Read_iop_rqbuffer_in_DWORD()
2235 arcmsr_Read_iop_rqbuffer_data(struct AdapterControlBlock *acb, in arcmsr_Read_iop_rqbuffer_data() argument
2242 if (acb->adapter_type > ACB_ADAPTER_TYPE_B) in arcmsr_Read_iop_rqbuffer_data()
2243 return arcmsr_Read_iop_rqbuffer_in_DWORD(acb, prbuffer); in arcmsr_Read_iop_rqbuffer_data()
2247 pQbuffer = &acb->rqbuffer[acb->rqbuf_putIndex]; in arcmsr_Read_iop_rqbuffer_data()
2249 acb->rqbuf_putIndex++; in arcmsr_Read_iop_rqbuffer_data()
2250 acb->rqbuf_putIndex %= ARCMSR_MAX_QBUFFER; in arcmsr_Read_iop_rqbuffer_data()
2254 arcmsr_iop_message_read(acb); in arcmsr_Read_iop_rqbuffer_data()
2258 static void arcmsr_iop2drv_data_wrote_handle(struct AdapterControlBlock *acb) in arcmsr_iop2drv_data_wrote_handle() argument
2264 spin_lock_irqsave(&acb->rqbuffer_lock, flags); in arcmsr_iop2drv_data_wrote_handle()
2265 prbuffer = arcmsr_get_iop_rqbuffer(acb); in arcmsr_iop2drv_data_wrote_handle()
2266 buf_empty_len = (acb->rqbuf_putIndex - acb->rqbuf_getIndex - 1) & in arcmsr_iop2drv_data_wrote_handle()
2269 if (arcmsr_Read_iop_rqbuffer_data(acb, prbuffer) == 0) in arcmsr_iop2drv_data_wrote_handle()
2270 acb->acb_flags |= ACB_F_IOPDATA_OVERFLOW; in arcmsr_iop2drv_data_wrote_handle()
2272 acb->acb_flags |= ACB_F_IOPDATA_OVERFLOW; in arcmsr_iop2drv_data_wrote_handle()
2273 spin_unlock_irqrestore(&acb->rqbuffer_lock, flags); in arcmsr_iop2drv_data_wrote_handle()
2276 static void arcmsr_write_ioctldata2iop_in_DWORD(struct AdapterControlBlock *acb) in arcmsr_write_ioctldata2iop_in_DWORD() argument
2284 if (acb->acb_flags & ACB_F_MESSAGE_WQBUFFER_READED) { in arcmsr_write_ioctldata2iop_in_DWORD()
2290 acb->acb_flags &= (~ACB_F_MESSAGE_WQBUFFER_READED); in arcmsr_write_ioctldata2iop_in_DWORD()
2291 pwbuffer = arcmsr_get_iop_wqbuffer(acb); in arcmsr_write_ioctldata2iop_in_DWORD()
2293 while ((acb->wqbuf_getIndex != acb->wqbuf_putIndex) in arcmsr_write_ioctldata2iop_in_DWORD()
2295 pQbuffer = &acb->wqbuffer[acb->wqbuf_getIndex]; in arcmsr_write_ioctldata2iop_in_DWORD()
2297 acb->wqbuf_getIndex++; in arcmsr_write_ioctldata2iop_in_DWORD()
2298 acb->wqbuf_getIndex %= ARCMSR_MAX_QBUFFER; in arcmsr_write_ioctldata2iop_in_DWORD()
2316 arcmsr_iop_message_wrote(acb); in arcmsr_write_ioctldata2iop_in_DWORD()
2321 arcmsr_write_ioctldata2iop(struct AdapterControlBlock *acb) in arcmsr_write_ioctldata2iop() argument
2328 if (acb->adapter_type > ACB_ADAPTER_TYPE_B) { in arcmsr_write_ioctldata2iop()
2329 arcmsr_write_ioctldata2iop_in_DWORD(acb); in arcmsr_write_ioctldata2iop()
2332 if (acb->acb_flags & ACB_F_MESSAGE_WQBUFFER_READED) { in arcmsr_write_ioctldata2iop()
2333 acb->acb_flags &= (~ACB_F_MESSAGE_WQBUFFER_READED); in arcmsr_write_ioctldata2iop()
2334 pwbuffer = arcmsr_get_iop_wqbuffer(acb); in arcmsr_write_ioctldata2iop()
2336 while ((acb->wqbuf_getIndex != acb->wqbuf_putIndex) in arcmsr_write_ioctldata2iop()
2338 pQbuffer = &acb->wqbuffer[acb->wqbuf_getIndex]; in arcmsr_write_ioctldata2iop()
2340 acb->wqbuf_getIndex++; in arcmsr_write_ioctldata2iop()
2341 acb->wqbuf_getIndex %= ARCMSR_MAX_QBUFFER; in arcmsr_write_ioctldata2iop()
2346 arcmsr_iop_message_wrote(acb); in arcmsr_write_ioctldata2iop()
2350 static void arcmsr_iop2drv_data_read_handle(struct AdapterControlBlock *acb) in arcmsr_iop2drv_data_read_handle() argument
2354 spin_lock_irqsave(&acb->wqbuffer_lock, flags); in arcmsr_iop2drv_data_read_handle()
2355 acb->acb_flags |= ACB_F_MESSAGE_WQBUFFER_READED; in arcmsr_iop2drv_data_read_handle()
2356 if (acb->wqbuf_getIndex != acb->wqbuf_putIndex) in arcmsr_iop2drv_data_read_handle()
2357 arcmsr_write_ioctldata2iop(acb); in arcmsr_iop2drv_data_read_handle()
2358 if (acb->wqbuf_getIndex == acb->wqbuf_putIndex) in arcmsr_iop2drv_data_read_handle()
2359 acb->acb_flags |= ACB_F_MESSAGE_WQBUFFER_CLEARED; in arcmsr_iop2drv_data_read_handle()
2360 spin_unlock_irqrestore(&acb->wqbuffer_lock, flags); in arcmsr_iop2drv_data_read_handle()
2363 static void arcmsr_hbaA_doorbell_isr(struct AdapterControlBlock *acb) in arcmsr_hbaA_doorbell_isr() argument
2366 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_hbaA_doorbell_isr()
2371 arcmsr_iop2drv_data_wrote_handle(acb); in arcmsr_hbaA_doorbell_isr()
2373 arcmsr_iop2drv_data_read_handle(acb); in arcmsr_hbaA_doorbell_isr()
2459 static void arcmsr_hbaA_postqueue_isr(struct AdapterControlBlock *acb) in arcmsr_hbaA_postqueue_isr() argument
2462 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_hbaA_postqueue_isr()
2470 if (acb->cdb_phyadd_hipart) in arcmsr_hbaA_postqueue_isr()
2471 cdb_phy_addr = cdb_phy_addr | acb->cdb_phyadd_hipart; in arcmsr_hbaA_postqueue_isr()
2472 pARCMSR_CDB = (struct ARCMSR_CDB *)(acb->vir2phy_offset + cdb_phy_addr); in arcmsr_hbaA_postqueue_isr()
2475 arcmsr_drain_donequeue(acb, pCCB, error); in arcmsr_hbaA_postqueue_isr()
2478 static void arcmsr_hbaB_postqueue_isr(struct AdapterControlBlock *acb) in arcmsr_hbaB_postqueue_isr() argument
2482 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_hbaB_postqueue_isr()
2491 if (acb->cdb_phyadd_hipart) in arcmsr_hbaB_postqueue_isr()
2492 cdb_phy_addr = cdb_phy_addr | acb->cdb_phyadd_hipart; in arcmsr_hbaB_postqueue_isr()
2493 pARCMSR_CDB = (struct ARCMSR_CDB *)(acb->vir2phy_offset + cdb_phy_addr); in arcmsr_hbaB_postqueue_isr()
2496 arcmsr_drain_donequeue(acb, pCCB, error); in arcmsr_hbaB_postqueue_isr()
2504 static void arcmsr_hbaC_postqueue_isr(struct AdapterControlBlock *acb) in arcmsr_hbaC_postqueue_isr() argument
2513 phbcmu = acb->pmuC; in arcmsr_hbaC_postqueue_isr()
2520 if (acb->cdb_phyadd_hipart) in arcmsr_hbaC_postqueue_isr()
2521 ccb_cdb_phy = ccb_cdb_phy | acb->cdb_phyadd_hipart; in arcmsr_hbaC_postqueue_isr()
2522 arcmsr_cdb = (struct ARCMSR_CDB *)(acb->vir2phy_offset in arcmsr_hbaC_postqueue_isr()
2529 arcmsr_drain_donequeue(acb, ccb, error); in arcmsr_hbaC_postqueue_isr()
2539 static void arcmsr_hbaD_postqueue_isr(struct AdapterControlBlock *acb) in arcmsr_hbaD_postqueue_isr() argument
2549 spin_lock_irqsave(&acb->doneq_lock, flags); in arcmsr_hbaD_postqueue_isr()
2550 pmu = acb->pmuD; in arcmsr_hbaD_postqueue_isr()
2564 if (acb->cdb_phyadd_hipart) in arcmsr_hbaD_postqueue_isr()
2565 ccb_cdb_phy = ccb_cdb_phy | acb->cdb_phyadd_hipart; in arcmsr_hbaD_postqueue_isr()
2566 arcmsr_cdb = (struct ARCMSR_CDB *)(acb->vir2phy_offset in arcmsr_hbaD_postqueue_isr()
2572 arcmsr_drain_donequeue(acb, ccb, error); in arcmsr_hbaD_postqueue_isr()
2580 spin_unlock_irqrestore(&acb->doneq_lock, flags); in arcmsr_hbaD_postqueue_isr()
2583 static void arcmsr_hbaE_postqueue_isr(struct AdapterControlBlock *acb) in arcmsr_hbaE_postqueue_isr() argument
2592 spin_lock_irqsave(&acb->doneq_lock, flags); in arcmsr_hbaE_postqueue_isr()
2593 doneq_index = acb->doneq_index; in arcmsr_hbaE_postqueue_isr()
2594 pmu = acb->pmuE; in arcmsr_hbaE_postqueue_isr()
2596 cmdSMID = acb->pCompletionQ[doneq_index].cmdSMID; in arcmsr_hbaE_postqueue_isr()
2597 ccb = acb->pccb_pool[cmdSMID]; in arcmsr_hbaE_postqueue_isr()
2598 error = (acb->pCompletionQ[doneq_index].cmdFlag in arcmsr_hbaE_postqueue_isr()
2600 arcmsr_drain_donequeue(acb, ccb, error); in arcmsr_hbaE_postqueue_isr()
2602 if (doneq_index >= acb->completionQ_entry) in arcmsr_hbaE_postqueue_isr()
2605 acb->doneq_index = doneq_index; in arcmsr_hbaE_postqueue_isr()
2607 spin_unlock_irqrestore(&acb->doneq_lock, flags); in arcmsr_hbaE_postqueue_isr()
2610 static void arcmsr_hbaF_postqueue_isr(struct AdapterControlBlock *acb) in arcmsr_hbaF_postqueue_isr() argument
2619 spin_lock_irqsave(&acb->doneq_lock, flags); in arcmsr_hbaF_postqueue_isr()
2620 doneq_index = acb->doneq_index; in arcmsr_hbaF_postqueue_isr()
2621 phbcmu = acb->pmuF; in arcmsr_hbaF_postqueue_isr()
2623 cmdSMID = acb->pCompletionQ[doneq_index].cmdSMID; in arcmsr_hbaF_postqueue_isr()
2626 ccb = acb->pccb_pool[cmdSMID]; in arcmsr_hbaF_postqueue_isr()
2627 error = (acb->pCompletionQ[doneq_index].cmdFlag & in arcmsr_hbaF_postqueue_isr()
2629 arcmsr_drain_donequeue(acb, ccb, error); in arcmsr_hbaF_postqueue_isr()
2630 acb->pCompletionQ[doneq_index].cmdSMID = 0xffff; in arcmsr_hbaF_postqueue_isr()
2632 if (doneq_index >= acb->completionQ_entry) in arcmsr_hbaF_postqueue_isr()
2635 acb->doneq_index = doneq_index; in arcmsr_hbaF_postqueue_isr()
2637 spin_unlock_irqrestore(&acb->doneq_lock, flags); in arcmsr_hbaF_postqueue_isr()
2648 static void arcmsr_hbaA_message_isr(struct AdapterControlBlock *acb) in arcmsr_hbaA_message_isr() argument
2650 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_hbaA_message_isr()
2653 if (acb->acb_flags & ACB_F_MSG_GET_CONFIG) in arcmsr_hbaA_message_isr()
2654 schedule_work(&acb->arcmsr_do_message_isr_bh); in arcmsr_hbaA_message_isr()
2656 static void arcmsr_hbaB_message_isr(struct AdapterControlBlock *acb) in arcmsr_hbaB_message_isr() argument
2658 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_hbaB_message_isr()
2662 if (acb->acb_flags & ACB_F_MSG_GET_CONFIG) in arcmsr_hbaB_message_isr()
2663 schedule_work(&acb->arcmsr_do_message_isr_bh); in arcmsr_hbaB_message_isr()
2674 static void arcmsr_hbaC_message_isr(struct AdapterControlBlock *acb) in arcmsr_hbaC_message_isr() argument
2676 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_hbaC_message_isr()
2679 if (acb->acb_flags & ACB_F_MSG_GET_CONFIG) in arcmsr_hbaC_message_isr()
2680 schedule_work(&acb->arcmsr_do_message_isr_bh); in arcmsr_hbaC_message_isr()
2683 static void arcmsr_hbaD_message_isr(struct AdapterControlBlock *acb) in arcmsr_hbaD_message_isr() argument
2685 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_hbaD_message_isr()
2689 if (acb->acb_flags & ACB_F_MSG_GET_CONFIG) in arcmsr_hbaD_message_isr()
2690 schedule_work(&acb->arcmsr_do_message_isr_bh); in arcmsr_hbaD_message_isr()
2693 static void arcmsr_hbaE_message_isr(struct AdapterControlBlock *acb) in arcmsr_hbaE_message_isr() argument
2695 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_hbaE_message_isr()
2698 if (acb->acb_flags & ACB_F_MSG_GET_CONFIG) in arcmsr_hbaE_message_isr()
2699 schedule_work(&acb->arcmsr_do_message_isr_bh); in arcmsr_hbaE_message_isr()
2702 static int arcmsr_hbaA_handle_isr(struct AdapterControlBlock *acb) in arcmsr_hbaA_handle_isr() argument
2705 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_hbaA_handle_isr()
2707 acb->outbound_int_enable; in arcmsr_hbaA_handle_isr()
2713 arcmsr_hbaA_doorbell_isr(acb); in arcmsr_hbaA_handle_isr()
2715 arcmsr_hbaA_postqueue_isr(acb); in arcmsr_hbaA_handle_isr()
2717 arcmsr_hbaA_message_isr(acb); in arcmsr_hbaA_handle_isr()
2719 acb->outbound_int_enable; in arcmsr_hbaA_handle_isr()
2726 static int arcmsr_hbaB_handle_isr(struct AdapterControlBlock *acb) in arcmsr_hbaB_handle_isr() argument
2729 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_hbaB_handle_isr()
2731 acb->outbound_int_enable; in arcmsr_hbaB_handle_isr()
2738 arcmsr_iop2drv_data_wrote_handle(acb); in arcmsr_hbaB_handle_isr()
2740 arcmsr_iop2drv_data_read_handle(acb); in arcmsr_hbaB_handle_isr()
2742 arcmsr_hbaB_postqueue_isr(acb); in arcmsr_hbaB_handle_isr()
2744 arcmsr_hbaB_message_isr(acb); in arcmsr_hbaB_handle_isr()
2746 acb->outbound_int_enable; in arcmsr_hbaB_handle_isr()
2855 static irqreturn_t arcmsr_interrupt(struct AdapterControlBlock *acb) in arcmsr_interrupt() argument
2857 switch (acb->adapter_type) { in arcmsr_interrupt()
2859 return arcmsr_hbaA_handle_isr(acb); in arcmsr_interrupt()
2861 return arcmsr_hbaB_handle_isr(acb); in arcmsr_interrupt()
2863 return arcmsr_hbaC_handle_isr(acb); in arcmsr_interrupt()
2865 return arcmsr_hbaD_handle_isr(acb); in arcmsr_interrupt()
2867 return arcmsr_hbaE_handle_isr(acb); in arcmsr_interrupt()
2869 return arcmsr_hbaF_handle_isr(acb); in arcmsr_interrupt()
2875 static void arcmsr_iop_parking(struct AdapterControlBlock *acb) in arcmsr_iop_parking() argument
2877 if (acb) { in arcmsr_iop_parking()
2879 if (acb->acb_flags & ACB_F_MSG_START_BGRB) { in arcmsr_iop_parking()
2881 acb->acb_flags &= ~ACB_F_MSG_START_BGRB; in arcmsr_iop_parking()
2882 intmask_org = arcmsr_disable_outbound_ints(acb); in arcmsr_iop_parking()
2883 arcmsr_stop_adapter_bgrb(acb); in arcmsr_iop_parking()
2884 arcmsr_flush_adapter_cache(acb); in arcmsr_iop_parking()
2885 arcmsr_enable_outbound_ints(acb, intmask_org); in arcmsr_iop_parking()
2891 void arcmsr_clear_iop2drv_rqueue_buffer(struct AdapterControlBlock *acb) in arcmsr_clear_iop2drv_rqueue_buffer() argument
2895 if (acb->acb_flags & ACB_F_IOPDATA_OVERFLOW) { in arcmsr_clear_iop2drv_rqueue_buffer()
2897 if (acb->acb_flags & ACB_F_IOPDATA_OVERFLOW) { in arcmsr_clear_iop2drv_rqueue_buffer()
2898 acb->acb_flags &= ~ACB_F_IOPDATA_OVERFLOW; in arcmsr_clear_iop2drv_rqueue_buffer()
2899 acb->rqbuf_getIndex = 0; in arcmsr_clear_iop2drv_rqueue_buffer()
2900 acb->rqbuf_putIndex = 0; in arcmsr_clear_iop2drv_rqueue_buffer()
2901 arcmsr_iop_message_read(acb); in arcmsr_clear_iop2drv_rqueue_buffer()
2903 } else if (acb->rqbuf_getIndex != in arcmsr_clear_iop2drv_rqueue_buffer()
2904 acb->rqbuf_putIndex) { in arcmsr_clear_iop2drv_rqueue_buffer()
2905 acb->rqbuf_getIndex = 0; in arcmsr_clear_iop2drv_rqueue_buffer()
2906 acb->rqbuf_putIndex = 0; in arcmsr_clear_iop2drv_rqueue_buffer()
2914 static int arcmsr_iop_message_xfer(struct AdapterControlBlock *acb, in arcmsr_iop_message_xfer() argument
2954 spin_lock_irqsave(&acb->rqbuffer_lock, flags); in arcmsr_iop_message_xfer()
2955 if (acb->rqbuf_getIndex != acb->rqbuf_putIndex) { in arcmsr_iop_message_xfer()
2956 unsigned int tail = acb->rqbuf_getIndex; in arcmsr_iop_message_xfer()
2957 unsigned int head = acb->rqbuf_putIndex; in arcmsr_iop_message_xfer()
2965 memcpy(ptmpQbuffer, acb->rqbuffer + tail, allxfer_len); in arcmsr_iop_message_xfer()
2967 memcpy(ptmpQbuffer, acb->rqbuffer + tail, cnt_to_end); in arcmsr_iop_message_xfer()
2968 memcpy(ptmpQbuffer + cnt_to_end, acb->rqbuffer, allxfer_len - cnt_to_end); in arcmsr_iop_message_xfer()
2970 acb->rqbuf_getIndex = (acb->rqbuf_getIndex + allxfer_len) % ARCMSR_MAX_QBUFFER; in arcmsr_iop_message_xfer()
2974 if (acb->acb_flags & ACB_F_IOPDATA_OVERFLOW) { in arcmsr_iop_message_xfer()
2976 acb->acb_flags &= ~ACB_F_IOPDATA_OVERFLOW; in arcmsr_iop_message_xfer()
2977 prbuffer = arcmsr_get_iop_rqbuffer(acb); in arcmsr_iop_message_xfer()
2978 if (arcmsr_Read_iop_rqbuffer_data(acb, prbuffer) == 0) in arcmsr_iop_message_xfer()
2979 acb->acb_flags |= ACB_F_IOPDATA_OVERFLOW; in arcmsr_iop_message_xfer()
2981 spin_unlock_irqrestore(&acb->rqbuffer_lock, flags); in arcmsr_iop_message_xfer()
2984 if (acb->fw_flag == FW_DEADLOCK) in arcmsr_iop_message_xfer()
3013 spin_lock_irqsave(&acb->wqbuffer_lock, flags); in arcmsr_iop_message_xfer()
3014 if (acb->wqbuf_putIndex != acb->wqbuf_getIndex) { in arcmsr_iop_message_xfer()
3017 arcmsr_write_ioctldata2iop(acb); in arcmsr_iop_message_xfer()
3026 pQbuffer = &acb->wqbuffer[acb->wqbuf_putIndex]; in arcmsr_iop_message_xfer()
3027 cnt2end = ARCMSR_MAX_QBUFFER - acb->wqbuf_putIndex; in arcmsr_iop_message_xfer()
3032 acb->wqbuf_putIndex = 0; in arcmsr_iop_message_xfer()
3033 pQbuffer = acb->wqbuffer; in arcmsr_iop_message_xfer()
3036 acb->wqbuf_putIndex += user_len; in arcmsr_iop_message_xfer()
3037 acb->wqbuf_putIndex %= ARCMSR_MAX_QBUFFER; in arcmsr_iop_message_xfer()
3038 if (acb->acb_flags & ACB_F_MESSAGE_WQBUFFER_CLEARED) { in arcmsr_iop_message_xfer()
3039 acb->acb_flags &= in arcmsr_iop_message_xfer()
3041 arcmsr_write_ioctldata2iop(acb); in arcmsr_iop_message_xfer()
3044 spin_unlock_irqrestore(&acb->wqbuffer_lock, flags); in arcmsr_iop_message_xfer()
3046 if (acb->fw_flag == FW_DEADLOCK) in arcmsr_iop_message_xfer()
3055 uint8_t *pQbuffer = acb->rqbuffer; in arcmsr_iop_message_xfer()
3057 arcmsr_clear_iop2drv_rqueue_buffer(acb); in arcmsr_iop_message_xfer()
3058 spin_lock_irqsave(&acb->rqbuffer_lock, flags); in arcmsr_iop_message_xfer()
3059 acb->acb_flags |= ACB_F_MESSAGE_RQBUFFER_CLEARED; in arcmsr_iop_message_xfer()
3060 acb->rqbuf_getIndex = 0; in arcmsr_iop_message_xfer()
3061 acb->rqbuf_putIndex = 0; in arcmsr_iop_message_xfer()
3063 spin_unlock_irqrestore(&acb->rqbuffer_lock, flags); in arcmsr_iop_message_xfer()
3064 if (acb->fw_flag == FW_DEADLOCK) in arcmsr_iop_message_xfer()
3073 uint8_t *pQbuffer = acb->wqbuffer; in arcmsr_iop_message_xfer()
3074 spin_lock_irqsave(&acb->wqbuffer_lock, flags); in arcmsr_iop_message_xfer()
3075 acb->acb_flags |= (ACB_F_MESSAGE_WQBUFFER_CLEARED | in arcmsr_iop_message_xfer()
3077 acb->wqbuf_getIndex = 0; in arcmsr_iop_message_xfer()
3078 acb->wqbuf_putIndex = 0; in arcmsr_iop_message_xfer()
3080 spin_unlock_irqrestore(&acb->wqbuffer_lock, flags); in arcmsr_iop_message_xfer()
3081 if (acb->fw_flag == FW_DEADLOCK) in arcmsr_iop_message_xfer()
3091 arcmsr_clear_iop2drv_rqueue_buffer(acb); in arcmsr_iop_message_xfer()
3092 spin_lock_irqsave(&acb->rqbuffer_lock, flags); in arcmsr_iop_message_xfer()
3093 acb->acb_flags |= ACB_F_MESSAGE_RQBUFFER_CLEARED; in arcmsr_iop_message_xfer()
3094 acb->rqbuf_getIndex = 0; in arcmsr_iop_message_xfer()
3095 acb->rqbuf_putIndex = 0; in arcmsr_iop_message_xfer()
3096 pQbuffer = acb->rqbuffer; in arcmsr_iop_message_xfer()
3098 spin_unlock_irqrestore(&acb->rqbuffer_lock, flags); in arcmsr_iop_message_xfer()
3099 spin_lock_irqsave(&acb->wqbuffer_lock, flags); in arcmsr_iop_message_xfer()
3100 acb->acb_flags |= (ACB_F_MESSAGE_WQBUFFER_CLEARED | in arcmsr_iop_message_xfer()
3102 acb->wqbuf_getIndex = 0; in arcmsr_iop_message_xfer()
3103 acb->wqbuf_putIndex = 0; in arcmsr_iop_message_xfer()
3104 pQbuffer = acb->wqbuffer; in arcmsr_iop_message_xfer()
3106 spin_unlock_irqrestore(&acb->wqbuffer_lock, flags); in arcmsr_iop_message_xfer()
3107 if (acb->fw_flag == FW_DEADLOCK) in arcmsr_iop_message_xfer()
3116 if (acb->fw_flag == FW_DEADLOCK) in arcmsr_iop_message_xfer()
3126 if (acb->fw_flag == FW_DEADLOCK) in arcmsr_iop_message_xfer()
3137 if (acb->fw_flag == FW_DEADLOCK) in arcmsr_iop_message_xfer()
3143 arcmsr_iop_parking(acb); in arcmsr_iop_message_xfer()
3147 if (acb->fw_flag == FW_DEADLOCK) in arcmsr_iop_message_xfer()
3153 arcmsr_flush_adapter_cache(acb); in arcmsr_iop_message_xfer()
3168 static struct CommandControlBlock *arcmsr_get_freeccb(struct AdapterControlBlock *acb) in arcmsr_get_freeccb() argument
3170 struct list_head *head = &acb->ccb_free_list; in arcmsr_get_freeccb()
3173 spin_lock_irqsave(&acb->ccblist_lock, flags); in arcmsr_get_freeccb()
3178 spin_unlock_irqrestore(&acb->ccblist_lock, flags); in arcmsr_get_freeccb()
3181 spin_unlock_irqrestore(&acb->ccblist_lock, flags); in arcmsr_get_freeccb()
3185 static void arcmsr_handle_virtual_command(struct AdapterControlBlock *acb, in arcmsr_handle_virtual_command() argument
3225 if (arcmsr_iop_message_xfer(acb, cmd)) in arcmsr_handle_virtual_command()
3239 struct AdapterControlBlock *acb = (struct AdapterControlBlock *) host->hostdata; in arcmsr_queue_command_lck() local
3243 if (acb->acb_flags & ACB_F_ADAPTER_REMOVED) { in arcmsr_queue_command_lck()
3253 arcmsr_handle_virtual_command(acb, cmd); in arcmsr_queue_command_lck()
3256 ccb = arcmsr_get_freeccb(acb); in arcmsr_queue_command_lck()
3259 if (arcmsr_build_ccb( acb, ccb, cmd ) == FAILED) { in arcmsr_queue_command_lck()
3264 arcmsr_post_ccb(acb, ccb); in arcmsr_queue_command_lck()
3313 static bool arcmsr_hbaA_get_config(struct AdapterControlBlock *acb) in arcmsr_hbaA_get_config() argument
3315 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_hbaA_get_config()
3317 arcmsr_wait_firmware_ready(acb); in arcmsr_hbaA_get_config()
3319 if (!arcmsr_hbaA_wait_msgint_ready(acb)) { in arcmsr_hbaA_get_config()
3321 miscellaneous data' timeout \n", acb->host->host_no); in arcmsr_hbaA_get_config()
3324 arcmsr_get_adapter_config(acb, reg->message_rwbuffer); in arcmsr_hbaA_get_config()
3327 static bool arcmsr_hbaB_get_config(struct AdapterControlBlock *acb) in arcmsr_hbaB_get_config() argument
3329 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_hbaB_get_config()
3331 arcmsr_wait_firmware_ready(acb); in arcmsr_hbaB_get_config()
3333 if (!arcmsr_hbaB_wait_msgint_ready(acb)) { in arcmsr_hbaB_get_config()
3334 printk(KERN_ERR "arcmsr%d: can't set driver mode.\n", acb->host->host_no); in arcmsr_hbaB_get_config()
3338 if (!arcmsr_hbaB_wait_msgint_ready(acb)) { in arcmsr_hbaB_get_config()
3340 miscellaneous data' timeout \n", acb->host->host_no); in arcmsr_hbaB_get_config()
3343 arcmsr_get_adapter_config(acb, reg->message_rwbuffer); in arcmsr_hbaB_get_config()
3370 static bool arcmsr_hbaD_get_config(struct AdapterControlBlock *acb) in arcmsr_hbaD_get_config() argument
3372 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_hbaD_get_config()
3374 if (readl(acb->pmuD->outbound_doorbell) & in arcmsr_hbaD_get_config()
3377 acb->pmuD->outbound_doorbell);/*clear interrupt*/ in arcmsr_hbaD_get_config()
3379 arcmsr_wait_firmware_ready(acb); in arcmsr_hbaD_get_config()
3383 if (!arcmsr_hbaD_wait_msgint_ready(acb)) { in arcmsr_hbaD_get_config()
3385 "miscellaneous data timeout\n", acb->host->host_no); in arcmsr_hbaD_get_config()
3388 arcmsr_get_adapter_config(acb, reg->msgcode_rwbuffer); in arcmsr_hbaD_get_config()
3443 static bool arcmsr_get_firmware_spec(struct AdapterControlBlock *acb) in arcmsr_get_firmware_spec() argument
3447 switch (acb->adapter_type) { in arcmsr_get_firmware_spec()
3449 rtn = arcmsr_hbaA_get_config(acb); in arcmsr_get_firmware_spec()
3452 rtn = arcmsr_hbaB_get_config(acb); in arcmsr_get_firmware_spec()
3455 rtn = arcmsr_hbaC_get_config(acb); in arcmsr_get_firmware_spec()
3458 rtn = arcmsr_hbaD_get_config(acb); in arcmsr_get_firmware_spec()
3461 rtn = arcmsr_hbaE_get_config(acb); in arcmsr_get_firmware_spec()
3464 rtn = arcmsr_hbaF_get_config(acb); in arcmsr_get_firmware_spec()
3469 acb->maxOutstanding = acb->firm_numbers_queue - 1; in arcmsr_get_firmware_spec()
3470 if (acb->host->can_queue >= acb->firm_numbers_queue) in arcmsr_get_firmware_spec()
3471 acb->host->can_queue = acb->maxOutstanding; in arcmsr_get_firmware_spec()
3473 acb->maxOutstanding = acb->host->can_queue; in arcmsr_get_firmware_spec()
3474 acb->maxFreeCCB = acb->host->can_queue; in arcmsr_get_firmware_spec()
3475 if (acb->maxFreeCCB < ARCMSR_MAX_FREECCB_NUM) in arcmsr_get_firmware_spec()
3476 acb->maxFreeCCB += 64; in arcmsr_get_firmware_spec()
3480 static int arcmsr_hbaA_polling_ccbdone(struct AdapterControlBlock *acb, in arcmsr_hbaA_polling_ccbdone() argument
3483 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_hbaA_polling_ccbdone()
3493 outbound_intstatus = readl(®->outbound_intstatus) & acb->outbound_int_enable; in arcmsr_hbaA_polling_ccbdone()
3510 if (acb->cdb_phyadd_hipart) in arcmsr_hbaA_polling_ccbdone()
3511 ccb_cdb_phy = ccb_cdb_phy | acb->cdb_phyadd_hipart; in arcmsr_hbaA_polling_ccbdone()
3512 arcmsr_cdb = (struct ARCMSR_CDB *)(acb->vir2phy_offset + ccb_cdb_phy); in arcmsr_hbaA_polling_ccbdone()
3515 if ((ccb->acb != acb) || (ccb->startdone != ARCMSR_CCB_START)) { in arcmsr_hbaA_polling_ccbdone()
3519 , acb->host->host_no in arcmsr_hbaA_polling_ccbdone()
3530 , acb->host->host_no in arcmsr_hbaA_polling_ccbdone()
3532 , atomic_read(&acb->ccboutstandingcount)); in arcmsr_hbaA_polling_ccbdone()
3536 arcmsr_report_ccb_state(acb, ccb, error); in arcmsr_hbaA_polling_ccbdone()
3541 static int arcmsr_hbaB_polling_ccbdone(struct AdapterControlBlock *acb, in arcmsr_hbaB_polling_ccbdone() argument
3544 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_hbaB_polling_ccbdone()
3579 if (acb->cdb_phyadd_hipart) in arcmsr_hbaB_polling_ccbdone()
3580 ccb_cdb_phy = ccb_cdb_phy | acb->cdb_phyadd_hipart; in arcmsr_hbaB_polling_ccbdone()
3581 arcmsr_cdb = (struct ARCMSR_CDB *)(acb->vir2phy_offset + ccb_cdb_phy); in arcmsr_hbaB_polling_ccbdone()
3584 if ((ccb->acb != acb) || (ccb->startdone != ARCMSR_CCB_START)) { in arcmsr_hbaB_polling_ccbdone()
3588 ,acb->host->host_no in arcmsr_hbaB_polling_ccbdone()
3599 , acb->host->host_no in arcmsr_hbaB_polling_ccbdone()
3601 , atomic_read(&acb->ccboutstandingcount)); in arcmsr_hbaB_polling_ccbdone()
3605 arcmsr_report_ccb_state(acb, ccb, error); in arcmsr_hbaB_polling_ccbdone()
3610 static int arcmsr_hbaC_polling_ccbdone(struct AdapterControlBlock *acb, in arcmsr_hbaC_polling_ccbdone() argument
3613 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_hbaC_polling_ccbdone()
3640 if (acb->cdb_phyadd_hipart) in arcmsr_hbaC_polling_ccbdone()
3641 ccb_cdb_phy = ccb_cdb_phy | acb->cdb_phyadd_hipart; in arcmsr_hbaC_polling_ccbdone()
3642 arcmsr_cdb = (struct ARCMSR_CDB *)(acb->vir2phy_offset + ccb_cdb_phy); in arcmsr_hbaC_polling_ccbdone()
3646 if ((pCCB->acb != acb) || (pCCB->startdone != ARCMSR_CCB_START)) { in arcmsr_hbaC_polling_ccbdone()
3650 , acb->host->host_no in arcmsr_hbaC_polling_ccbdone()
3661 , acb->host->host_no in arcmsr_hbaC_polling_ccbdone()
3663 , atomic_read(&acb->ccboutstandingcount)); in arcmsr_hbaC_polling_ccbdone()
3667 arcmsr_report_ccb_state(acb, pCCB, error); in arcmsr_hbaC_polling_ccbdone()
3672 static int arcmsr_hbaD_polling_ccbdone(struct AdapterControlBlock *acb, in arcmsr_hbaD_polling_ccbdone() argument
3681 struct MessageUnit_D *pmu = acb->pmuD; in arcmsr_hbaD_polling_ccbdone()
3686 spin_lock_irqsave(&acb->doneq_lock, flags); in arcmsr_hbaD_polling_ccbdone()
3690 spin_unlock_irqrestore(&acb->doneq_lock, flags); in arcmsr_hbaD_polling_ccbdone()
3709 spin_unlock_irqrestore(&acb->doneq_lock, flags); in arcmsr_hbaD_polling_ccbdone()
3712 if (acb->cdb_phyadd_hipart) in arcmsr_hbaD_polling_ccbdone()
3713 ccb_cdb_phy = ccb_cdb_phy | acb->cdb_phyadd_hipart; in arcmsr_hbaD_polling_ccbdone()
3714 arcmsr_cdb = (struct ARCMSR_CDB *)(acb->vir2phy_offset + in arcmsr_hbaD_polling_ccbdone()
3719 if ((pCCB->acb != acb) || in arcmsr_hbaD_polling_ccbdone()
3725 , acb->host->host_no in arcmsr_hbaD_polling_ccbdone()
3736 , acb->host->host_no in arcmsr_hbaD_polling_ccbdone()
3738 , atomic_read(&acb->ccboutstandingcount)); in arcmsr_hbaD_polling_ccbdone()
3743 arcmsr_report_ccb_state(acb, pCCB, error); in arcmsr_hbaD_polling_ccbdone()
3748 static int arcmsr_hbaE_polling_ccbdone(struct AdapterControlBlock *acb, in arcmsr_hbaE_polling_ccbdone() argument
3757 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_hbaE_polling_ccbdone()
3762 spin_lock_irqsave(&acb->doneq_lock, flags); in arcmsr_hbaE_polling_ccbdone()
3763 doneq_index = acb->doneq_index; in arcmsr_hbaE_polling_ccbdone()
3766 spin_unlock_irqrestore(&acb->doneq_lock, flags); in arcmsr_hbaE_polling_ccbdone()
3779 cmdSMID = acb->pCompletionQ[doneq_index].cmdSMID; in arcmsr_hbaE_polling_ccbdone()
3781 if (doneq_index >= acb->completionQ_entry) in arcmsr_hbaE_polling_ccbdone()
3783 acb->doneq_index = doneq_index; in arcmsr_hbaE_polling_ccbdone()
3784 spin_unlock_irqrestore(&acb->doneq_lock, flags); in arcmsr_hbaE_polling_ccbdone()
3785 pCCB = acb->pccb_pool[cmdSMID]; in arcmsr_hbaE_polling_ccbdone()
3788 if ((pCCB->acb != acb) || (pCCB->startdone != ARCMSR_CCB_START)) { in arcmsr_hbaE_polling_ccbdone()
3793 , acb->host->host_no in arcmsr_hbaE_polling_ccbdone()
3804 , acb->host->host_no in arcmsr_hbaE_polling_ccbdone()
3806 , atomic_read(&acb->ccboutstandingcount)); in arcmsr_hbaE_polling_ccbdone()
3809 error = (acb->pCompletionQ[doneq_index].cmdFlag & in arcmsr_hbaE_polling_ccbdone()
3811 arcmsr_report_ccb_state(acb, pCCB, error); in arcmsr_hbaE_polling_ccbdone()
3817 static int arcmsr_polling_ccbdone(struct AdapterControlBlock *acb, in arcmsr_polling_ccbdone() argument
3821 switch (acb->adapter_type) { in arcmsr_polling_ccbdone()
3824 rtn = arcmsr_hbaA_polling_ccbdone(acb, poll_ccb); in arcmsr_polling_ccbdone()
3827 rtn = arcmsr_hbaB_polling_ccbdone(acb, poll_ccb); in arcmsr_polling_ccbdone()
3830 rtn = arcmsr_hbaC_polling_ccbdone(acb, poll_ccb); in arcmsr_polling_ccbdone()
3833 rtn = arcmsr_hbaD_polling_ccbdone(acb, poll_ccb); in arcmsr_polling_ccbdone()
3837 rtn = arcmsr_hbaE_polling_ccbdone(acb, poll_ccb); in arcmsr_polling_ccbdone()
3935 static int arcmsr_iop_confirm(struct AdapterControlBlock *acb) in arcmsr_iop_confirm() argument
3946 switch (acb->adapter_type) { in arcmsr_iop_confirm()
3949 dma_coherent_handle = acb->dma_coherent_handle2; in arcmsr_iop_confirm()
3953 dma_coherent_handle = acb->dma_coherent_handle + in arcmsr_iop_confirm()
3957 dma_coherent_handle = acb->dma_coherent_handle; in arcmsr_iop_confirm()
3962 acb->cdb_phyaddr_hi32 = cdb_phyaddr_hi32; in arcmsr_iop_confirm()
3963 acb->cdb_phyadd_hipart = ((uint64_t)cdb_phyaddr_hi32) << 32; in arcmsr_iop_confirm()
3969 switch (acb->adapter_type) { in arcmsr_iop_confirm()
3973 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_iop_confirm()
3979 if (!arcmsr_hbaA_wait_msgint_ready(acb)) { in arcmsr_iop_confirm()
3982 acb->host->host_no); in arcmsr_iop_confirm()
3992 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_iop_confirm()
3996 if (!arcmsr_hbaB_wait_msgint_ready(acb)) { in arcmsr_iop_confirm()
3998 acb->host->host_no); in arcmsr_iop_confirm()
4014 if (!arcmsr_hbaB_wait_msgint_ready(acb)) { in arcmsr_iop_confirm()
4016 timeout \n",acb->host->host_no); in arcmsr_iop_confirm()
4020 if (!arcmsr_hbaB_wait_msgint_ready(acb)) { in arcmsr_iop_confirm()
4022 acb->host->host_no); in arcmsr_iop_confirm()
4028 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_iop_confirm()
4031 acb->adapter_index, cdb_phyaddr_hi32); in arcmsr_iop_confirm()
4036 if (!arcmsr_hbaC_wait_msgint_ready(acb)) { in arcmsr_iop_confirm()
4038 timeout \n", acb->host->host_no); in arcmsr_iop_confirm()
4045 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_iop_confirm()
4056 if (!arcmsr_hbaD_wait_msgint_ready(acb)) { in arcmsr_iop_confirm()
4058 acb->host->host_no); in arcmsr_iop_confirm()
4064 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_iop_confirm()
4069 writel(acb->ccbsize, ®->msgcode_rwbuffer[4]); in arcmsr_iop_confirm()
4070 writel(lower_32_bits(acb->dma_coherent_handle2), ®->msgcode_rwbuffer[5]); in arcmsr_iop_confirm()
4071 writel(upper_32_bits(acb->dma_coherent_handle2), ®->msgcode_rwbuffer[6]); in arcmsr_iop_confirm()
4072 writel(acb->ioqueue_size, ®->msgcode_rwbuffer[7]); in arcmsr_iop_confirm()
4074 acb->out_doorbell ^= ARCMSR_HBEMU_DRV2IOP_MESSAGE_CMD_DONE; in arcmsr_iop_confirm()
4075 writel(acb->out_doorbell, ®->iobound_doorbell); in arcmsr_iop_confirm()
4076 if (!arcmsr_hbaE_wait_msgint_ready(acb)) { in arcmsr_iop_confirm()
4078 acb->host->host_no); in arcmsr_iop_confirm()
4084 struct MessageUnit_F __iomem *reg = acb->pmuF; in arcmsr_iop_confirm()
4086 acb->msgcode_rwbuffer[0] = ARCMSR_SIGNATURE_SET_CONFIG; in arcmsr_iop_confirm()
4087 acb->msgcode_rwbuffer[1] = ARCMSR_SIGNATURE_1886; in arcmsr_iop_confirm()
4088 acb->msgcode_rwbuffer[2] = cdb_phyaddr; in arcmsr_iop_confirm()
4089 acb->msgcode_rwbuffer[3] = cdb_phyaddr_hi32; in arcmsr_iop_confirm()
4090 acb->msgcode_rwbuffer[4] = acb->ccbsize; in arcmsr_iop_confirm()
4091 acb->msgcode_rwbuffer[5] = lower_32_bits(acb->dma_coherent_handle2); in arcmsr_iop_confirm()
4092 acb->msgcode_rwbuffer[6] = upper_32_bits(acb->dma_coherent_handle2); in arcmsr_iop_confirm()
4093 acb->msgcode_rwbuffer[7] = acb->completeQ_size; in arcmsr_iop_confirm()
4095 acb->out_doorbell ^= ARCMSR_HBEMU_DRV2IOP_MESSAGE_CMD_DONE; in arcmsr_iop_confirm()
4096 writel(acb->out_doorbell, ®->iobound_doorbell); in arcmsr_iop_confirm()
4097 if (!arcmsr_hbaE_wait_msgint_ready(acb)) { in arcmsr_iop_confirm()
4099 acb->host->host_no); in arcmsr_iop_confirm()
4108 static void arcmsr_wait_firmware_ready(struct AdapterControlBlock *acb) in arcmsr_wait_firmware_ready() argument
4111 switch (acb->adapter_type) { in arcmsr_wait_firmware_ready()
4114 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_wait_firmware_ready()
4116 if (!(acb->acb_flags & ACB_F_IOP_INITED)) in arcmsr_wait_firmware_ready()
4124 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_wait_firmware_ready()
4126 if (!(acb->acb_flags & ACB_F_IOP_INITED)) in arcmsr_wait_firmware_ready()
4134 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_wait_firmware_ready()
4136 if (!(acb->acb_flags & ACB_F_IOP_INITED)) in arcmsr_wait_firmware_ready()
4143 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_wait_firmware_ready()
4145 if (!(acb->acb_flags & ACB_F_IOP_INITED)) in arcmsr_wait_firmware_ready()
4154 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_wait_firmware_ready()
4156 if (!(acb->acb_flags & ACB_F_IOP_INITED)) in arcmsr_wait_firmware_ready()
4167 struct AdapterControlBlock *acb = from_timer(acb, t, eternal_timer); in arcmsr_request_device_map() local
4168 if (acb->acb_flags & (ACB_F_MSG_GET_CONFIG | ACB_F_BUS_RESET | ACB_F_ABORT)) { in arcmsr_request_device_map()
4169 mod_timer(&acb->eternal_timer, jiffies + msecs_to_jiffies(6 * HZ)); in arcmsr_request_device_map()
4171 acb->fw_flag = FW_NORMAL; in arcmsr_request_device_map()
4172 switch (acb->adapter_type) { in arcmsr_request_device_map()
4174 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_request_device_map()
4179 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_request_device_map()
4184 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_request_device_map()
4190 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_request_device_map()
4195 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_request_device_map()
4197 acb->out_doorbell ^= ARCMSR_HBEMU_DRV2IOP_MESSAGE_CMD_DONE; in arcmsr_request_device_map()
4198 writel(acb->out_doorbell, ®->iobound_doorbell); in arcmsr_request_device_map()
4202 struct MessageUnit_F __iomem *reg = acb->pmuF; in arcmsr_request_device_map()
4209 acb->out_doorbell ^= ARCMSR_HBEMU_DRV2IOP_MESSAGE_CMD_DONE; in arcmsr_request_device_map()
4210 writel(acb->out_doorbell, ®->iobound_doorbell); in arcmsr_request_device_map()
4216 acb->acb_flags |= ACB_F_MSG_GET_CONFIG; in arcmsr_request_device_map()
4218 mod_timer(&acb->eternal_timer, jiffies + msecs_to_jiffies(6 * HZ)); in arcmsr_request_device_map()
4222 static void arcmsr_hbaA_start_bgrb(struct AdapterControlBlock *acb) in arcmsr_hbaA_start_bgrb() argument
4224 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_hbaA_start_bgrb()
4225 acb->acb_flags |= ACB_F_MSG_START_BGRB; in arcmsr_hbaA_start_bgrb()
4227 if (!arcmsr_hbaA_wait_msgint_ready(acb)) { in arcmsr_hbaA_start_bgrb()
4229 rebuild' timeout \n", acb->host->host_no); in arcmsr_hbaA_start_bgrb()
4233 static void arcmsr_hbaB_start_bgrb(struct AdapterControlBlock *acb) in arcmsr_hbaB_start_bgrb() argument
4235 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_hbaB_start_bgrb()
4236 acb->acb_flags |= ACB_F_MSG_START_BGRB; in arcmsr_hbaB_start_bgrb()
4238 if (!arcmsr_hbaB_wait_msgint_ready(acb)) { in arcmsr_hbaB_start_bgrb()
4240 rebuild' timeout \n",acb->host->host_no); in arcmsr_hbaB_start_bgrb()
4283 static void arcmsr_start_adapter_bgrb(struct AdapterControlBlock *acb) in arcmsr_start_adapter_bgrb() argument
4285 switch (acb->adapter_type) { in arcmsr_start_adapter_bgrb()
4287 arcmsr_hbaA_start_bgrb(acb); in arcmsr_start_adapter_bgrb()
4290 arcmsr_hbaB_start_bgrb(acb); in arcmsr_start_adapter_bgrb()
4293 arcmsr_hbaC_start_bgrb(acb); in arcmsr_start_adapter_bgrb()
4296 arcmsr_hbaD_start_bgrb(acb); in arcmsr_start_adapter_bgrb()
4300 arcmsr_hbaE_start_bgrb(acb); in arcmsr_start_adapter_bgrb()
4305 static void arcmsr_clear_doorbell_queue_buffer(struct AdapterControlBlock *acb) in arcmsr_clear_doorbell_queue_buffer() argument
4307 switch (acb->adapter_type) { in arcmsr_clear_doorbell_queue_buffer()
4309 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_clear_doorbell_queue_buffer()
4320 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_clear_doorbell_queue_buffer()
4337 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_clear_doorbell_queue_buffer()
4358 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_clear_doorbell_queue_buffer()
4381 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_clear_doorbell_queue_buffer()
4384 acb->in_doorbell = readl(®->iobound_doorbell); in arcmsr_clear_doorbell_queue_buffer()
4386 acb->out_doorbell ^= ARCMSR_HBEMU_DRV2IOP_DATA_READ_OK; in arcmsr_clear_doorbell_queue_buffer()
4387 writel(acb->out_doorbell, ®->iobound_doorbell); in arcmsr_clear_doorbell_queue_buffer()
4390 tmp = acb->in_doorbell; in arcmsr_clear_doorbell_queue_buffer()
4391 acb->in_doorbell = readl(®->iobound_doorbell); in arcmsr_clear_doorbell_queue_buffer()
4392 if((tmp ^ acb->in_doorbell) & ARCMSR_HBEMU_IOP2DRV_DATA_WRITE_OK) { in arcmsr_clear_doorbell_queue_buffer()
4394 acb->out_doorbell ^= ARCMSR_HBEMU_DRV2IOP_DATA_READ_OK; in arcmsr_clear_doorbell_queue_buffer()
4395 writel(acb->out_doorbell, ®->iobound_doorbell); in arcmsr_clear_doorbell_queue_buffer()
4404 static void arcmsr_enable_eoi_mode(struct AdapterControlBlock *acb) in arcmsr_enable_eoi_mode() argument
4406 switch (acb->adapter_type) { in arcmsr_enable_eoi_mode()
4411 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_enable_eoi_mode()
4413 if (!arcmsr_hbaB_wait_msgint_ready(acb)) { in arcmsr_enable_eoi_mode()
4425 static void arcmsr_hardware_reset(struct AdapterControlBlock *acb) in arcmsr_hardware_reset() argument
4429 struct MessageUnit_A __iomem *pmuA = acb->pmuA; in arcmsr_hardware_reset()
4430 struct MessageUnit_C __iomem *pmuC = acb->pmuC; in arcmsr_hardware_reset()
4431 struct MessageUnit_D *pmuD = acb->pmuD; in arcmsr_hardware_reset()
4434 printk(KERN_NOTICE "arcmsr%d: executing hw bus reset .....\n", acb->host->host_no); in arcmsr_hardware_reset()
4436 pci_read_config_byte(acb->pdev, i, &value[i]); in arcmsr_hardware_reset()
4439 if (acb->dev_id == 0x1680) { in arcmsr_hardware_reset()
4441 } else if (acb->dev_id == 0x1880) { in arcmsr_hardware_reset()
4452 } else if (acb->dev_id == 0x1884) { in arcmsr_hardware_reset()
4453 struct MessageUnit_E __iomem *pmuE = acb->pmuE; in arcmsr_hardware_reset()
4465 } else if (acb->dev_id == 0x1214) { in arcmsr_hardware_reset()
4468 pci_write_config_byte(acb->pdev, 0x84, 0x20); in arcmsr_hardware_reset()
4473 pci_write_config_byte(acb->pdev, i, value[i]); in arcmsr_hardware_reset()
4479 static bool arcmsr_reset_in_progress(struct AdapterControlBlock *acb) in arcmsr_reset_in_progress() argument
4483 switch(acb->adapter_type) { in arcmsr_reset_in_progress()
4485 struct MessageUnit_A __iomem *reg = acb->pmuA; in arcmsr_reset_in_progress()
4491 struct MessageUnit_B *reg = acb->pmuB; in arcmsr_reset_in_progress()
4497 struct MessageUnit_C __iomem *reg = acb->pmuC; in arcmsr_reset_in_progress()
4502 struct MessageUnit_D *reg = acb->pmuD; in arcmsr_reset_in_progress()
4509 struct MessageUnit_E __iomem *reg = acb->pmuE; in arcmsr_reset_in_progress()
4518 static void arcmsr_iop_init(struct AdapterControlBlock *acb) in arcmsr_iop_init() argument
4522 intmask_org = arcmsr_disable_outbound_ints(acb); in arcmsr_iop_init()
4523 arcmsr_wait_firmware_ready(acb); in arcmsr_iop_init()
4524 arcmsr_iop_confirm(acb); in arcmsr_iop_init()
4526 arcmsr_start_adapter_bgrb(acb); in arcmsr_iop_init()
4528 arcmsr_clear_doorbell_queue_buffer(acb); in arcmsr_iop_init()
4529 arcmsr_enable_eoi_mode(acb); in arcmsr_iop_init()
4531 arcmsr_enable_outbound_ints(acb, intmask_org); in arcmsr_iop_init()
4532 acb->acb_flags |= ACB_F_IOP_INITED; in arcmsr_iop_init()
4535 static uint8_t arcmsr_iop_reset(struct AdapterControlBlock *acb) in arcmsr_iop_reset() argument
4543 if (atomic_read(&acb->ccboutstandingcount) != 0) { in arcmsr_iop_reset()
4545 intmask_org = arcmsr_disable_outbound_ints(acb); in arcmsr_iop_reset()
4547 rtnval = arcmsr_abort_allcmd(acb); in arcmsr_iop_reset()
4549 arcmsr_done4abort_postqueue(acb); in arcmsr_iop_reset()
4550 for (i = 0; i < acb->maxFreeCCB; i++) { in arcmsr_iop_reset()
4551 ccb = acb->pccb_pool[i]; in arcmsr_iop_reset()
4556 spin_lock_irqsave(&acb->ccblist_lock, flags); in arcmsr_iop_reset()
4557 list_add_tail(&ccb->list, &acb->ccb_free_list); in arcmsr_iop_reset()
4558 spin_unlock_irqrestore(&acb->ccblist_lock, flags); in arcmsr_iop_reset()
4561 atomic_set(&acb->ccboutstandingcount, 0); in arcmsr_iop_reset()
4563 arcmsr_enable_outbound_ints(acb, intmask_org); in arcmsr_iop_reset()
4571 struct AdapterControlBlock *acb; in arcmsr_bus_reset() local
4574 acb = (struct AdapterControlBlock *) cmd->device->host->hostdata; in arcmsr_bus_reset()
4575 if (acb->acb_flags & ACB_F_ADAPTER_REMOVED) in arcmsr_bus_reset()
4578 " num_aborts = %d \n", acb->num_resets, acb->num_aborts); in arcmsr_bus_reset()
4579 acb->num_resets++; in arcmsr_bus_reset()
4581 if (acb->acb_flags & ACB_F_BUS_RESET) { in arcmsr_bus_reset()
4584 timeout = wait_event_timeout(wait_q, (acb->acb_flags in arcmsr_bus_reset()
4589 acb->acb_flags |= ACB_F_BUS_RESET; in arcmsr_bus_reset()
4590 if (!arcmsr_iop_reset(acb)) { in arcmsr_bus_reset()
4591 arcmsr_hardware_reset(acb); in arcmsr_bus_reset()
4592 acb->acb_flags &= ~ACB_F_IOP_INITED; in arcmsr_bus_reset()
4595 if (arcmsr_reset_in_progress(acb)) { in arcmsr_bus_reset()
4597 acb->fw_flag = FW_DEADLOCK; in arcmsr_bus_reset()
4600 acb->host->host_no); in arcmsr_bus_reset()
4606 arcmsr_iop_init(acb); in arcmsr_bus_reset()
4607 acb->fw_flag = FW_NORMAL; in arcmsr_bus_reset()
4608 mod_timer(&acb->eternal_timer, jiffies + in arcmsr_bus_reset()
4610 acb->acb_flags &= ~ACB_F_BUS_RESET; in arcmsr_bus_reset()
4614 acb->acb_flags &= ~ACB_F_BUS_RESET; in arcmsr_bus_reset()
4615 acb->fw_flag = FW_NORMAL; in arcmsr_bus_reset()
4616 mod_timer(&acb->eternal_timer, jiffies + in arcmsr_bus_reset()
4623 static int arcmsr_abort_one_cmd(struct AdapterControlBlock *acb, in arcmsr_abort_one_cmd() argument
4627 rtn = arcmsr_polling_ccbdone(acb, ccb); in arcmsr_abort_one_cmd()
4633 struct AdapterControlBlock *acb = in arcmsr_abort() local
4639 if (acb->acb_flags & ACB_F_ADAPTER_REMOVED) in arcmsr_abort()
4643 acb->host->host_no, cmd->device->id, (u32)cmd->device->lun); in arcmsr_abort()
4644 acb->acb_flags |= ACB_F_ABORT; in arcmsr_abort()
4645 acb->num_aborts++; in arcmsr_abort()
4652 if (!atomic_read(&acb->ccboutstandingcount)) { in arcmsr_abort()
4653 acb->acb_flags &= ~ACB_F_ABORT; in arcmsr_abort()
4657 intmask_org = arcmsr_disable_outbound_ints(acb); in arcmsr_abort()
4658 for (i = 0; i < acb->maxFreeCCB; i++) { in arcmsr_abort()
4659 struct CommandControlBlock *ccb = acb->pccb_pool[i]; in arcmsr_abort()
4662 rtn = arcmsr_abort_one_cmd(acb, ccb); in arcmsr_abort()
4666 acb->acb_flags &= ~ACB_F_ABORT; in arcmsr_abort()
4667 arcmsr_enable_outbound_ints(acb, intmask_org); in arcmsr_abort()
4673 struct AdapterControlBlock *acb = in arcmsr_info() local
4678 switch (acb->pdev->device) { in arcmsr_info()