Lines Matching refs:fsl_chan

45 static void fsl_edma_enable_request(struct fsl_edma_chan *fsl_chan)  in fsl_edma_enable_request()  argument
47 struct edma_regs *regs = &fsl_chan->edma->regs; in fsl_edma_enable_request()
48 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_enable_request()
50 if (fsl_chan->edma->drvdata->version == v1) { in fsl_edma_enable_request()
51 edma_writeb(fsl_chan->edma, EDMA_SEEI_SEEI(ch), regs->seei); in fsl_edma_enable_request()
52 edma_writeb(fsl_chan->edma, ch, regs->serq); in fsl_edma_enable_request()
62 void fsl_edma_disable_request(struct fsl_edma_chan *fsl_chan) in fsl_edma_disable_request() argument
64 struct edma_regs *regs = &fsl_chan->edma->regs; in fsl_edma_disable_request()
65 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_disable_request()
67 if (fsl_chan->edma->drvdata->version == v1) { in fsl_edma_disable_request()
68 edma_writeb(fsl_chan->edma, ch, regs->cerq); in fsl_edma_disable_request()
69 edma_writeb(fsl_chan->edma, EDMA_CEEI_CEEI(ch), regs->ceei); in fsl_edma_disable_request()
80 static void mux_configure8(struct fsl_edma_chan *fsl_chan, void __iomem *addr, in mux_configure8() argument
93 static void mux_configure32(struct fsl_edma_chan *fsl_chan, void __iomem *addr, in mux_configure32() argument
106 void fsl_edma_chan_mux(struct fsl_edma_chan *fsl_chan, in fsl_edma_chan_mux() argument
109 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_chan_mux()
113 u32 dmamux_nr = fsl_chan->edma->drvdata->dmamuxs; in fsl_edma_chan_mux()
115 chans_per_mux = fsl_chan->edma->n_chans / dmamux_nr; in fsl_edma_chan_mux()
116 ch_off = fsl_chan->vchan.chan.chan_id % chans_per_mux; in fsl_edma_chan_mux()
118 if (fsl_chan->edma->drvdata->mux_swap) in fsl_edma_chan_mux()
121 muxaddr = fsl_chan->edma->muxbase[ch / chans_per_mux]; in fsl_edma_chan_mux()
124 if (fsl_chan->edma->drvdata->version == v3) in fsl_edma_chan_mux()
125 mux_configure32(fsl_chan, muxaddr, ch_off, slot, enable); in fsl_edma_chan_mux()
127 mux_configure8(fsl_chan, muxaddr, ch_off, slot, enable); in fsl_edma_chan_mux()
162 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_terminate_all() local
166 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all()
167 fsl_edma_disable_request(fsl_chan); in fsl_edma_terminate_all()
168 fsl_chan->edesc = NULL; in fsl_edma_terminate_all()
169 fsl_chan->idle = true; in fsl_edma_terminate_all()
170 vchan_get_all_descriptors(&fsl_chan->vchan, &head); in fsl_edma_terminate_all()
171 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all()
172 vchan_dma_desc_free_list(&fsl_chan->vchan, &head); in fsl_edma_terminate_all()
179 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_pause() local
182 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_pause()
183 if (fsl_chan->edesc) { in fsl_edma_pause()
184 fsl_edma_disable_request(fsl_chan); in fsl_edma_pause()
185 fsl_chan->status = DMA_PAUSED; in fsl_edma_pause()
186 fsl_chan->idle = true; in fsl_edma_pause()
188 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_pause()
195 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_resume() local
198 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_resume()
199 if (fsl_chan->edesc) { in fsl_edma_resume()
200 fsl_edma_enable_request(fsl_chan); in fsl_edma_resume()
201 fsl_chan->status = DMA_IN_PROGRESS; in fsl_edma_resume()
202 fsl_chan->idle = false; in fsl_edma_resume()
204 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_resume()
209 static void fsl_edma_unprep_slave_dma(struct fsl_edma_chan *fsl_chan) in fsl_edma_unprep_slave_dma() argument
211 if (fsl_chan->dma_dir != DMA_NONE) in fsl_edma_unprep_slave_dma()
212 dma_unmap_resource(fsl_chan->vchan.chan.device->dev, in fsl_edma_unprep_slave_dma()
213 fsl_chan->dma_dev_addr, in fsl_edma_unprep_slave_dma()
214 fsl_chan->dma_dev_size, in fsl_edma_unprep_slave_dma()
215 fsl_chan->dma_dir, 0); in fsl_edma_unprep_slave_dma()
216 fsl_chan->dma_dir = DMA_NONE; in fsl_edma_unprep_slave_dma()
219 static bool fsl_edma_prep_slave_dma(struct fsl_edma_chan *fsl_chan, in fsl_edma_prep_slave_dma() argument
222 struct device *dev = fsl_chan->vchan.chan.device->dev; in fsl_edma_prep_slave_dma()
230 addr = fsl_chan->cfg.dst_addr; in fsl_edma_prep_slave_dma()
231 size = fsl_chan->cfg.dst_maxburst; in fsl_edma_prep_slave_dma()
235 addr = fsl_chan->cfg.src_addr; in fsl_edma_prep_slave_dma()
236 size = fsl_chan->cfg.src_maxburst; in fsl_edma_prep_slave_dma()
244 if (fsl_chan->dma_dir == dma_dir) in fsl_edma_prep_slave_dma()
247 fsl_edma_unprep_slave_dma(fsl_chan); in fsl_edma_prep_slave_dma()
249 fsl_chan->dma_dev_addr = dma_map_resource(dev, addr, size, dma_dir, 0); in fsl_edma_prep_slave_dma()
250 if (dma_mapping_error(dev, fsl_chan->dma_dev_addr)) in fsl_edma_prep_slave_dma()
252 fsl_chan->dma_dev_size = size; in fsl_edma_prep_slave_dma()
253 fsl_chan->dma_dir = dma_dir; in fsl_edma_prep_slave_dma()
261 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_slave_config() local
263 memcpy(&fsl_chan->cfg, cfg, sizeof(*cfg)); in fsl_edma_slave_config()
264 fsl_edma_unprep_slave_dma(fsl_chan); in fsl_edma_slave_config()
270 static size_t fsl_edma_desc_residue(struct fsl_edma_chan *fsl_chan, in fsl_edma_desc_residue() argument
273 struct fsl_edma_desc *edesc = fsl_chan->edesc; in fsl_edma_desc_residue()
274 struct edma_regs *regs = &fsl_chan->edma->regs; in fsl_edma_desc_residue()
275 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_desc_residue()
282 for (len = i = 0; i < fsl_chan->edesc->n_tcds; i++) in fsl_edma_desc_residue()
290 cur_addr = edma_readl(fsl_chan->edma, &regs->tcd[ch].saddr); in fsl_edma_desc_residue()
292 cur_addr = edma_readl(fsl_chan->edma, &regs->tcd[ch].daddr); in fsl_edma_desc_residue()
295 for (i = 0; i < fsl_chan->edesc->n_tcds; i++) { in fsl_edma_desc_residue()
316 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_tx_status() local
326 return fsl_chan->status; in fsl_edma_tx_status()
328 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_tx_status()
329 vdesc = vchan_find_desc(&fsl_chan->vchan, cookie); in fsl_edma_tx_status()
330 if (fsl_chan->edesc && cookie == fsl_chan->edesc->vdesc.tx.cookie) in fsl_edma_tx_status()
332 fsl_edma_desc_residue(fsl_chan, vdesc, true); in fsl_edma_tx_status()
335 fsl_edma_desc_residue(fsl_chan, vdesc, false); in fsl_edma_tx_status()
339 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_tx_status()
341 return fsl_chan->status; in fsl_edma_tx_status()
345 static void fsl_edma_set_tcd_regs(struct fsl_edma_chan *fsl_chan, in fsl_edma_set_tcd_regs() argument
348 struct fsl_edma_engine *edma = fsl_chan->edma; in fsl_edma_set_tcd_regs()
349 struct edma_regs *regs = &fsl_chan->edma->regs; in fsl_edma_set_tcd_regs()
350 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_set_tcd_regs()
421 static struct fsl_edma_desc *fsl_edma_alloc_desc(struct fsl_edma_chan *fsl_chan, in fsl_edma_alloc_desc() argument
431 fsl_desc->echan = fsl_chan; in fsl_edma_alloc_desc()
434 fsl_desc->tcd[i].vtcd = dma_pool_alloc(fsl_chan->tcd_pool, in fsl_edma_alloc_desc()
443 dma_pool_free(fsl_chan->tcd_pool, fsl_desc->tcd[i].vtcd, in fsl_edma_alloc_desc()
454 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_prep_dma_cyclic() local
464 if (!fsl_edma_prep_slave_dma(fsl_chan, direction)) in fsl_edma_prep_dma_cyclic()
468 fsl_desc = fsl_edma_alloc_desc(fsl_chan, sg_len); in fsl_edma_prep_dma_cyclic()
476 fsl_chan->attr = in fsl_edma_prep_dma_cyclic()
477 fsl_edma_get_tcd_attr(fsl_chan->cfg.dst_addr_width); in fsl_edma_prep_dma_cyclic()
478 nbytes = fsl_chan->cfg.dst_addr_width * in fsl_edma_prep_dma_cyclic()
479 fsl_chan->cfg.dst_maxburst; in fsl_edma_prep_dma_cyclic()
481 fsl_chan->attr = in fsl_edma_prep_dma_cyclic()
482 fsl_edma_get_tcd_attr(fsl_chan->cfg.src_addr_width); in fsl_edma_prep_dma_cyclic()
483 nbytes = fsl_chan->cfg.src_addr_width * in fsl_edma_prep_dma_cyclic()
484 fsl_chan->cfg.src_maxburst; in fsl_edma_prep_dma_cyclic()
498 dst_addr = fsl_chan->dma_dev_addr; in fsl_edma_prep_dma_cyclic()
499 soff = fsl_chan->cfg.dst_addr_width; in fsl_edma_prep_dma_cyclic()
502 src_addr = fsl_chan->dma_dev_addr; in fsl_edma_prep_dma_cyclic()
505 doff = fsl_chan->cfg.src_addr_width; in fsl_edma_prep_dma_cyclic()
509 fsl_chan->attr, soff, nbytes, 0, iter, in fsl_edma_prep_dma_cyclic()
514 return vchan_tx_prep(&fsl_chan->vchan, &fsl_desc->vdesc, flags); in fsl_edma_prep_dma_cyclic()
523 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_prep_slave_sg() local
533 if (!fsl_edma_prep_slave_dma(fsl_chan, direction)) in fsl_edma_prep_slave_sg()
536 fsl_desc = fsl_edma_alloc_desc(fsl_chan, sg_len); in fsl_edma_prep_slave_sg()
543 fsl_chan->attr = in fsl_edma_prep_slave_sg()
544 fsl_edma_get_tcd_attr(fsl_chan->cfg.dst_addr_width); in fsl_edma_prep_slave_sg()
545 nbytes = fsl_chan->cfg.dst_addr_width * in fsl_edma_prep_slave_sg()
546 fsl_chan->cfg.dst_maxburst; in fsl_edma_prep_slave_sg()
548 fsl_chan->attr = in fsl_edma_prep_slave_sg()
549 fsl_edma_get_tcd_attr(fsl_chan->cfg.src_addr_width); in fsl_edma_prep_slave_sg()
550 nbytes = fsl_chan->cfg.src_addr_width * in fsl_edma_prep_slave_sg()
551 fsl_chan->cfg.src_maxburst; in fsl_edma_prep_slave_sg()
560 dst_addr = fsl_chan->dma_dev_addr; in fsl_edma_prep_slave_sg()
561 soff = fsl_chan->cfg.dst_addr_width; in fsl_edma_prep_slave_sg()
564 src_addr = fsl_chan->dma_dev_addr; in fsl_edma_prep_slave_sg()
567 doff = fsl_chan->cfg.src_addr_width; in fsl_edma_prep_slave_sg()
574 dst_addr, fsl_chan->attr, soff, in fsl_edma_prep_slave_sg()
580 dst_addr, fsl_chan->attr, soff, in fsl_edma_prep_slave_sg()
586 return vchan_tx_prep(&fsl_chan->vchan, &fsl_desc->vdesc, flags); in fsl_edma_prep_slave_sg()
590 void fsl_edma_xfer_desc(struct fsl_edma_chan *fsl_chan) in fsl_edma_xfer_desc() argument
594 lockdep_assert_held(&fsl_chan->vchan.lock); in fsl_edma_xfer_desc()
596 vdesc = vchan_next_desc(&fsl_chan->vchan); in fsl_edma_xfer_desc()
599 fsl_chan->edesc = to_fsl_edma_desc(vdesc); in fsl_edma_xfer_desc()
600 fsl_edma_set_tcd_regs(fsl_chan, fsl_chan->edesc->tcd[0].vtcd); in fsl_edma_xfer_desc()
601 fsl_edma_enable_request(fsl_chan); in fsl_edma_xfer_desc()
602 fsl_chan->status = DMA_IN_PROGRESS; in fsl_edma_xfer_desc()
603 fsl_chan->idle = false; in fsl_edma_xfer_desc()
609 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_issue_pending() local
612 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_issue_pending()
614 if (unlikely(fsl_chan->pm_state != RUNNING)) { in fsl_edma_issue_pending()
615 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_issue_pending()
620 if (vchan_issue_pending(&fsl_chan->vchan) && !fsl_chan->edesc) in fsl_edma_issue_pending()
621 fsl_edma_xfer_desc(fsl_chan); in fsl_edma_issue_pending()
623 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_issue_pending()
629 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_alloc_chan_resources() local
631 fsl_chan->tcd_pool = dma_pool_create("tcd_pool", chan->device->dev, in fsl_edma_alloc_chan_resources()
640 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_free_chan_resources() local
644 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_free_chan_resources()
645 fsl_edma_disable_request(fsl_chan); in fsl_edma_free_chan_resources()
646 fsl_edma_chan_mux(fsl_chan, 0, false); in fsl_edma_free_chan_resources()
647 fsl_chan->edesc = NULL; in fsl_edma_free_chan_resources()
648 vchan_get_all_descriptors(&fsl_chan->vchan, &head); in fsl_edma_free_chan_resources()
649 fsl_edma_unprep_slave_dma(fsl_chan); in fsl_edma_free_chan_resources()
650 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_free_chan_resources()
652 vchan_dma_desc_free_list(&fsl_chan->vchan, &head); in fsl_edma_free_chan_resources()
653 dma_pool_destroy(fsl_chan->tcd_pool); in fsl_edma_free_chan_resources()
654 fsl_chan->tcd_pool = NULL; in fsl_edma_free_chan_resources()