Lines Matching refs:ioat_dma
120 ioat_init_channel(struct ioatdma_device *ioat_dma,
122 static void ioat_intr_quirk(struct ioatdma_device *ioat_dma);
123 static void ioat_enumerate_channels(struct ioatdma_device *ioat_dma);
124 static int ioat3_dma_self_test(struct ioatdma_device *ioat_dma);
300 static int ioat_dma_self_test(struct ioatdma_device *ioat_dma) in ioat_dma_self_test() argument
305 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_dma_self_test()
306 struct device *dev = &ioat_dma->pdev->dev; in ioat_dma_self_test()
351 tx = ioat_dma->dma_dev.device_prep_dma_memcpy(dma_chan, dma_dest, in ioat_dma_self_test()
403 int ioat_dma_setup_interrupts(struct ioatdma_device *ioat_dma) in ioat_dma_setup_interrupts() argument
406 struct pci_dev *pdev = ioat_dma->pdev; in ioat_dma_setup_interrupts()
424 msixcnt = ioat_dma->dma_dev.chancnt; in ioat_dma_setup_interrupts()
426 ioat_dma->msix_entries[i].entry = i; in ioat_dma_setup_interrupts()
428 err = pci_enable_msix_exact(pdev, ioat_dma->msix_entries, msixcnt); in ioat_dma_setup_interrupts()
433 msix = &ioat_dma->msix_entries[i]; in ioat_dma_setup_interrupts()
434 ioat_chan = ioat_chan_by_index(ioat_dma, i); in ioat_dma_setup_interrupts()
440 msix = &ioat_dma->msix_entries[j]; in ioat_dma_setup_interrupts()
441 ioat_chan = ioat_chan_by_index(ioat_dma, j); in ioat_dma_setup_interrupts()
448 ioat_dma->irq_mode = IOAT_MSIX; in ioat_dma_setup_interrupts()
457 "ioat-msi", ioat_dma); in ioat_dma_setup_interrupts()
462 ioat_dma->irq_mode = IOAT_MSI; in ioat_dma_setup_interrupts()
467 IRQF_SHARED, "ioat-intx", ioat_dma); in ioat_dma_setup_interrupts()
471 ioat_dma->irq_mode = IOAT_INTX; in ioat_dma_setup_interrupts()
474 ioat_intr_quirk(ioat_dma); in ioat_dma_setup_interrupts()
476 writeb(intrctrl, ioat_dma->reg_base + IOAT_INTRCTRL_OFFSET); in ioat_dma_setup_interrupts()
481 writeb(0, ioat_dma->reg_base + IOAT_INTRCTRL_OFFSET); in ioat_dma_setup_interrupts()
482 ioat_dma->irq_mode = IOAT_NOIRQ; in ioat_dma_setup_interrupts()
487 static void ioat_disable_interrupts(struct ioatdma_device *ioat_dma) in ioat_disable_interrupts() argument
490 writeb(0, ioat_dma->reg_base + IOAT_INTRCTRL_OFFSET); in ioat_disable_interrupts()
493 static int ioat_probe(struct ioatdma_device *ioat_dma) in ioat_probe() argument
496 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_probe()
497 struct pci_dev *pdev = ioat_dma->pdev; in ioat_probe()
500 ioat_dma->completion_pool = dma_pool_create("completion_pool", dev, in ioat_probe()
505 if (!ioat_dma->completion_pool) { in ioat_probe()
510 ioat_enumerate_channels(ioat_dma); in ioat_probe()
520 err = ioat_dma_setup_interrupts(ioat_dma); in ioat_probe()
524 err = ioat3_dma_self_test(ioat_dma); in ioat_probe()
531 ioat_disable_interrupts(ioat_dma); in ioat_probe()
533 dma_pool_destroy(ioat_dma->completion_pool); in ioat_probe()
538 static int ioat_register(struct ioatdma_device *ioat_dma) in ioat_register() argument
540 int err = dma_async_device_register(&ioat_dma->dma_dev); in ioat_register()
543 ioat_disable_interrupts(ioat_dma); in ioat_register()
544 dma_pool_destroy(ioat_dma->completion_pool); in ioat_register()
550 static void ioat_dma_remove(struct ioatdma_device *ioat_dma) in ioat_dma_remove() argument
552 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_dma_remove()
554 ioat_disable_interrupts(ioat_dma); in ioat_dma_remove()
556 ioat_kobject_del(ioat_dma); in ioat_dma_remove()
565 static void ioat_enumerate_channels(struct ioatdma_device *ioat_dma) in ioat_enumerate_channels() argument
568 struct device *dev = &ioat_dma->pdev->dev; in ioat_enumerate_channels()
569 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_enumerate_channels()
574 dma->chancnt = readb(ioat_dma->reg_base + IOAT_CHANCNT_OFFSET); in ioat_enumerate_channels()
576 if (dma->chancnt > ARRAY_SIZE(ioat_dma->idx)) { in ioat_enumerate_channels()
578 dma->chancnt, ARRAY_SIZE(ioat_dma->idx)); in ioat_enumerate_channels()
579 dma->chancnt = ARRAY_SIZE(ioat_dma->idx); in ioat_enumerate_channels()
581 xfercap_log = readb(ioat_dma->reg_base + IOAT_XFERCAP_OFFSET); in ioat_enumerate_channels()
592 ioat_init_channel(ioat_dma, ioat_chan, i); in ioat_enumerate_channels()
610 struct ioatdma_device *ioat_dma = ioat_chan->ioat_dma; in ioat_free_chan_resources() local
628 if (ioat_dma->version >= IOAT_VER_3_4) in ioat_free_chan_resources()
665 dma_pool_free(ioat_dma->completion_pool, ioat_chan->completion, in ioat_free_chan_resources()
697 dma_pool_zalloc(ioat_chan->ioat_dma->completion_pool, in ioat_alloc_chan_resources()
724 if (ioat_chan->ioat_dma->version >= IOAT_VER_3_4) { in ioat_alloc_chan_resources()
766 ioat_init_channel(struct ioatdma_device *ioat_dma, in ioat_init_channel() argument
769 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_init_channel()
771 ioat_chan->ioat_dma = ioat_dma; in ioat_init_channel()
772 ioat_chan->reg_base = ioat_dma->reg_base + (0x80 * (idx + 1)); in ioat_init_channel()
777 ioat_dma->idx[idx] = ioat_chan; in ioat_init_channel()
783 static int ioat_xor_val_self_test(struct ioatdma_device *ioat_dma) in ioat_xor_val_self_test() argument
800 struct device *dev = &ioat_dma->pdev->dev; in ioat_xor_val_self_test()
801 struct dma_device *dma = &ioat_dma->dma_dev; in ioat_xor_val_self_test()
1048 static int ioat3_dma_self_test(struct ioatdma_device *ioat_dma) in ioat3_dma_self_test() argument
1052 rc = ioat_dma_self_test(ioat_dma); in ioat3_dma_self_test()
1056 rc = ioat_xor_val_self_test(ioat_dma); in ioat3_dma_self_test()
1061 static void ioat_intr_quirk(struct ioatdma_device *ioat_dma) in ioat_intr_quirk() argument
1068 dma = &ioat_dma->dma_dev; in ioat_intr_quirk()
1074 if (ioat_dma->cap & IOAT_CAP_DWBES) { in ioat_intr_quirk()
1087 static int ioat3_dma_probe(struct ioatdma_device *ioat_dma, int dca) in ioat3_dma_probe() argument
1089 struct pci_dev *pdev = ioat_dma->pdev; in ioat3_dma_probe()
1097 dma = &ioat_dma->dma_dev; in ioat3_dma_probe()
1106 ioat_dma->cap = readl(ioat_dma->reg_base + IOAT_DMA_CAP_OFFSET); in ioat3_dma_probe()
1109 ioat_dma->cap &= in ioat3_dma_probe()
1113 if (dca_en && (ioat_dma->cap & (IOAT_CAP_XOR|IOAT_CAP_PQ))) in ioat3_dma_probe()
1114 ioat_dma->cap &= ~(IOAT_CAP_XOR|IOAT_CAP_PQ); in ioat3_dma_probe()
1116 if (ioat_dma->cap & IOAT_CAP_XOR) { in ioat3_dma_probe()
1126 if (ioat_dma->cap & IOAT_CAP_PQ) { in ioat3_dma_probe()
1133 if (ioat_dma->cap & IOAT_CAP_RAID16SS) in ioat3_dma_probe()
1138 if (!(ioat_dma->cap & IOAT_CAP_XOR)) { in ioat3_dma_probe()
1144 if (ioat_dma->cap & IOAT_CAP_RAID16SS) in ioat3_dma_probe()
1154 if (ioat_dma->cap & IOAT_CAP_RAID16SS) { in ioat3_dma_probe()
1162 ioat_dma->sed_hw_pool[i] = dmam_pool_create(pool_name, in ioat3_dma_probe()
1165 if (!ioat_dma->sed_hw_pool[i]) in ioat3_dma_probe()
1171 if (!(ioat_dma->cap & (IOAT_CAP_XOR | IOAT_CAP_PQ))) in ioat3_dma_probe()
1174 err = ioat_probe(ioat_dma); in ioat3_dma_probe()
1184 err = ioat_register(ioat_dma); in ioat3_dma_probe()
1188 ioat_kobject_add(ioat_dma, &ioat_ktype); in ioat3_dma_probe()
1191 ioat_dma->dca = ioat_dca_init(pdev, ioat_dma->reg_base); in ioat3_dma_probe()
1204 if (ioat_dma->cap & IOAT_CAP_DPS) in ioat3_dma_probe()
1206 ioat_dma->reg_base + IOAT_PREFETCH_LIMIT_OFFSET); in ioat3_dma_probe()
1213 struct ioatdma_device *ioat_dma = pci_get_drvdata(pdev); in ioat_shutdown() local
1217 if (!ioat_dma) in ioat_shutdown()
1221 ioat_chan = ioat_dma->idx[i]; in ioat_shutdown()
1240 ioat_disable_interrupts(ioat_dma); in ioat_shutdown()
1243 static void ioat_resume(struct ioatdma_device *ioat_dma) in ioat_resume() argument
1250 ioat_chan = ioat_dma->idx[i]; in ioat_resume()
1300 struct ioatdma_device *ioat_dma = pci_get_drvdata(pdev); in ioat_pcie_error_resume() local
1305 ioat_resume(ioat_dma); in ioat_pcie_error_resume()