Lines Matching refs:hdma_dev
90 struct hisi_dma_dev *hdma_dev; member
141 static void hisi_dma_pause_dma(struct hisi_dma_dev *hdma_dev, u32 index, in hisi_dma_pause_dma() argument
144 void __iomem *addr = hdma_dev->base + HISI_DMA_CTRL0 + index * in hisi_dma_pause_dma()
150 static void hisi_dma_enable_dma(struct hisi_dma_dev *hdma_dev, u32 index, in hisi_dma_enable_dma() argument
153 void __iomem *addr = hdma_dev->base + HISI_DMA_CTRL0 + index * in hisi_dma_enable_dma()
159 static void hisi_dma_mask_irq(struct hisi_dma_dev *hdma_dev, u32 qp_index) in hisi_dma_mask_irq() argument
161 hisi_dma_chan_write(hdma_dev->base, HISI_DMA_INT_MSK, qp_index, in hisi_dma_mask_irq()
165 static void hisi_dma_unmask_irq(struct hisi_dma_dev *hdma_dev, u32 qp_index) in hisi_dma_unmask_irq() argument
167 void __iomem *base = hdma_dev->base; in hisi_dma_unmask_irq()
174 static void hisi_dma_do_reset(struct hisi_dma_dev *hdma_dev, u32 index) in hisi_dma_do_reset() argument
176 void __iomem *addr = hdma_dev->base + HISI_DMA_CTRL1 + index * in hisi_dma_do_reset()
182 static void hisi_dma_reset_qp_point(struct hisi_dma_dev *hdma_dev, u32 index) in hisi_dma_reset_qp_point() argument
184 hisi_dma_chan_write(hdma_dev->base, HISI_DMA_SQ_TAIL_PTR, index, 0); in hisi_dma_reset_qp_point()
185 hisi_dma_chan_write(hdma_dev->base, HISI_DMA_CQ_HEAD_PTR, index, 0); in hisi_dma_reset_qp_point()
191 struct hisi_dma_dev *hdma_dev = chan->hdma_dev; in hisi_dma_reset_or_disable_hw_chan() local
195 hisi_dma_pause_dma(hdma_dev, index, true); in hisi_dma_reset_or_disable_hw_chan()
196 hisi_dma_enable_dma(hdma_dev, index, false); in hisi_dma_reset_or_disable_hw_chan()
197 hisi_dma_mask_irq(hdma_dev, index); in hisi_dma_reset_or_disable_hw_chan()
199 ret = readl_relaxed_poll_timeout(hdma_dev->base + in hisi_dma_reset_or_disable_hw_chan()
203 dev_err(&hdma_dev->pdev->dev, "disable channel timeout!\n"); in hisi_dma_reset_or_disable_hw_chan()
207 hisi_dma_do_reset(hdma_dev, index); in hisi_dma_reset_or_disable_hw_chan()
208 hisi_dma_reset_qp_point(hdma_dev, index); in hisi_dma_reset_or_disable_hw_chan()
209 hisi_dma_pause_dma(hdma_dev, index, false); in hisi_dma_reset_or_disable_hw_chan()
212 hisi_dma_enable_dma(hdma_dev, index, true); in hisi_dma_reset_or_disable_hw_chan()
213 hisi_dma_unmask_irq(hdma_dev, index); in hisi_dma_reset_or_disable_hw_chan()
216 ret = readl_relaxed_poll_timeout(hdma_dev->base + in hisi_dma_reset_or_disable_hw_chan()
220 dev_err(&hdma_dev->pdev->dev, "reset channel timeout!\n"); in hisi_dma_reset_or_disable_hw_chan()
228 struct hisi_dma_dev *hdma_dev = chan->hdma_dev; in hisi_dma_free_chan_resources() local
233 memset(chan->sq, 0, sizeof(struct hisi_dma_sqe) * hdma_dev->chan_depth); in hisi_dma_free_chan_resources()
234 memset(chan->cq, 0, sizeof(struct hisi_dma_cqe) * hdma_dev->chan_depth); in hisi_dma_free_chan_resources()
273 struct hisi_dma_dev *hdma_dev = chan->hdma_dev; in hisi_dma_start_transfer() local
296 chan->sq_tail = (chan->sq_tail + 1) % hdma_dev->chan_depth; in hisi_dma_start_transfer()
299 hisi_dma_chan_write(hdma_dev->base, HISI_DMA_SQ_TAIL_PTR, chan->qp_num, in hisi_dma_start_transfer()
324 hisi_dma_pause_dma(chan->hdma_dev, chan->qp_num, true); in hisi_dma_terminate_all()
335 hisi_dma_pause_dma(chan->hdma_dev, chan->qp_num, false); in hisi_dma_terminate_all()
347 static int hisi_dma_alloc_qps_mem(struct hisi_dma_dev *hdma_dev) in hisi_dma_alloc_qps_mem() argument
349 size_t sq_size = sizeof(struct hisi_dma_sqe) * hdma_dev->chan_depth; in hisi_dma_alloc_qps_mem()
350 size_t cq_size = sizeof(struct hisi_dma_cqe) * hdma_dev->chan_depth; in hisi_dma_alloc_qps_mem()
351 struct device *dev = &hdma_dev->pdev->dev; in hisi_dma_alloc_qps_mem()
355 for (i = 0; i < hdma_dev->chan_num; i++) { in hisi_dma_alloc_qps_mem()
356 chan = &hdma_dev->chan[i]; in hisi_dma_alloc_qps_mem()
371 static void hisi_dma_init_hw_qp(struct hisi_dma_dev *hdma_dev, u32 index) in hisi_dma_init_hw_qp() argument
373 struct hisi_dma_chan *chan = &hdma_dev->chan[index]; in hisi_dma_init_hw_qp()
374 u32 hw_depth = hdma_dev->chan_depth - 1; in hisi_dma_init_hw_qp()
375 void __iomem *base = hdma_dev->base; in hisi_dma_init_hw_qp()
396 static void hisi_dma_enable_qp(struct hisi_dma_dev *hdma_dev, u32 qp_index) in hisi_dma_enable_qp() argument
398 hisi_dma_init_hw_qp(hdma_dev, qp_index); in hisi_dma_enable_qp()
399 hisi_dma_unmask_irq(hdma_dev, qp_index); in hisi_dma_enable_qp()
400 hisi_dma_enable_dma(hdma_dev, qp_index, true); in hisi_dma_enable_qp()
403 static void hisi_dma_disable_qp(struct hisi_dma_dev *hdma_dev, u32 qp_index) in hisi_dma_disable_qp() argument
405 hisi_dma_reset_or_disable_hw_chan(&hdma_dev->chan[qp_index], true); in hisi_dma_disable_qp()
408 static void hisi_dma_enable_qps(struct hisi_dma_dev *hdma_dev) in hisi_dma_enable_qps() argument
412 for (i = 0; i < hdma_dev->chan_num; i++) { in hisi_dma_enable_qps()
413 hdma_dev->chan[i].qp_num = i; in hisi_dma_enable_qps()
414 hdma_dev->chan[i].hdma_dev = hdma_dev; in hisi_dma_enable_qps()
415 hdma_dev->chan[i].vc.desc_free = hisi_dma_desc_free; in hisi_dma_enable_qps()
416 vchan_init(&hdma_dev->chan[i].vc, &hdma_dev->dma_dev); in hisi_dma_enable_qps()
417 hisi_dma_enable_qp(hdma_dev, i); in hisi_dma_enable_qps()
421 static void hisi_dma_disable_qps(struct hisi_dma_dev *hdma_dev) in hisi_dma_disable_qps() argument
425 for (i = 0; i < hdma_dev->chan_num; i++) { in hisi_dma_disable_qps()
426 hisi_dma_disable_qp(hdma_dev, i); in hisi_dma_disable_qps()
427 tasklet_kill(&hdma_dev->chan[i].vc.task); in hisi_dma_disable_qps()
434 struct hisi_dma_dev *hdma_dev = chan->hdma_dev; in hisi_dma_irq() local
444 chan->cq_head = (chan->cq_head + 1) % hdma_dev->chan_depth; in hisi_dma_irq()
445 hisi_dma_chan_write(hdma_dev->base, HISI_DMA_CQ_HEAD_PTR, in hisi_dma_irq()
451 dev_err(&hdma_dev->pdev->dev, "task error!\n"); in hisi_dma_irq()
460 static int hisi_dma_request_qps_irq(struct hisi_dma_dev *hdma_dev) in hisi_dma_request_qps_irq() argument
462 struct pci_dev *pdev = hdma_dev->pdev; in hisi_dma_request_qps_irq()
465 for (i = 0; i < hdma_dev->chan_num; i++) { in hisi_dma_request_qps_irq()
468 &hdma_dev->chan[i]); in hisi_dma_request_qps_irq()
477 static int hisi_dma_enable_hw_channels(struct hisi_dma_dev *hdma_dev) in hisi_dma_enable_hw_channels() argument
481 ret = hisi_dma_alloc_qps_mem(hdma_dev); in hisi_dma_enable_hw_channels()
483 dev_err(&hdma_dev->pdev->dev, "fail to allocate qp memory!\n"); in hisi_dma_enable_hw_channels()
487 ret = hisi_dma_request_qps_irq(hdma_dev); in hisi_dma_enable_hw_channels()
489 dev_err(&hdma_dev->pdev->dev, "fail to request qp irq!\n"); in hisi_dma_enable_hw_channels()
493 hisi_dma_enable_qps(hdma_dev); in hisi_dma_enable_hw_channels()
503 static void hisi_dma_set_mode(struct hisi_dma_dev *hdma_dev, in hisi_dma_set_mode() argument
506 writel_relaxed(mode == RC ? 1 : 0, hdma_dev->base + HISI_DMA_MODE); in hisi_dma_set_mode()
512 struct hisi_dma_dev *hdma_dev; in hisi_dma_probe() local
536 hdma_dev = devm_kzalloc(dev, struct_size(hdma_dev, chan, HISI_DMA_CHAN_NUM), GFP_KERNEL); in hisi_dma_probe()
537 if (!hdma_dev) in hisi_dma_probe()
540 hdma_dev->base = pcim_iomap_table(pdev)[PCI_BAR_2]; in hisi_dma_probe()
541 hdma_dev->pdev = pdev; in hisi_dma_probe()
542 hdma_dev->chan_num = HISI_DMA_CHAN_NUM; in hisi_dma_probe()
543 hdma_dev->chan_depth = HISI_DMA_Q_DEPTH_VAL; in hisi_dma_probe()
545 pci_set_drvdata(pdev, hdma_dev); in hisi_dma_probe()
559 dma_dev = &hdma_dev->dma_dev; in hisi_dma_probe()
571 hisi_dma_set_mode(hdma_dev, RC); in hisi_dma_probe()
573 ret = hisi_dma_enable_hw_channels(hdma_dev); in hisi_dma_probe()
580 hdma_dev); in hisi_dma_probe()