Lines Matching refs:atchan
80 static struct at_desc *atc_first_active(struct at_dma_chan *atchan) in atc_first_active() argument
82 return list_first_entry(&atchan->active_list, in atc_first_active()
86 static struct at_desc *atc_first_queued(struct at_dma_chan *atchan) in atc_first_queued() argument
88 return list_first_entry(&atchan->queue, in atc_first_queued()
126 static struct at_desc *atc_desc_get(struct at_dma_chan *atchan) in atc_desc_get() argument
133 spin_lock_irqsave(&atchan->lock, flags); in atc_desc_get()
134 list_for_each_entry_safe(desc, _desc, &atchan->free_list, desc_node) { in atc_desc_get()
141 dev_dbg(chan2dev(&atchan->chan_common), in atc_desc_get()
144 spin_unlock_irqrestore(&atchan->lock, flags); in atc_desc_get()
145 dev_vdbg(chan2dev(&atchan->chan_common), in atc_desc_get()
150 ret = atc_alloc_descriptor(&atchan->chan_common, GFP_NOWAIT); in atc_desc_get()
160 static void atc_desc_put(struct at_dma_chan *atchan, struct at_desc *desc) in atc_desc_put() argument
166 spin_lock_irqsave(&atchan->lock, flags); in atc_desc_put()
168 dev_vdbg(chan2dev(&atchan->chan_common), in atc_desc_put()
171 list_splice_init(&desc->tx_list, &atchan->free_list); in atc_desc_put()
172 dev_vdbg(chan2dev(&atchan->chan_common), in atc_desc_put()
174 list_add(&desc->desc_node, &atchan->free_list); in atc_desc_put()
175 spin_unlock_irqrestore(&atchan->lock, flags); in atc_desc_put()
209 static void atc_dostart(struct at_dma_chan *atchan, struct at_desc *first) in atc_dostart() argument
211 struct at_dma *atdma = to_at_dma(atchan->chan_common.device); in atc_dostart()
214 if (atc_chan_is_enabled(atchan)) { in atc_dostart()
215 dev_err(chan2dev(&atchan->chan_common), in atc_dostart()
217 dev_err(chan2dev(&atchan->chan_common), in atc_dostart()
219 channel_readl(atchan, SADDR), in atc_dostart()
220 channel_readl(atchan, DADDR), in atc_dostart()
221 channel_readl(atchan, CTRLA), in atc_dostart()
222 channel_readl(atchan, CTRLB), in atc_dostart()
223 channel_readl(atchan, DSCR)); in atc_dostart()
229 vdbg_dump_regs(atchan); in atc_dostart()
231 channel_writel(atchan, SADDR, 0); in atc_dostart()
232 channel_writel(atchan, DADDR, 0); in atc_dostart()
233 channel_writel(atchan, CTRLA, 0); in atc_dostart()
234 channel_writel(atchan, CTRLB, 0); in atc_dostart()
235 channel_writel(atchan, DSCR, first->txd.phys); in atc_dostart()
236 channel_writel(atchan, SPIP, ATC_SPIP_HOLE(first->src_hole) | in atc_dostart()
238 channel_writel(atchan, DPIP, ATC_DPIP_HOLE(first->dst_hole) | in atc_dostart()
242 dma_writel(atdma, CHER, atchan->mask); in atc_dostart()
244 vdbg_dump_regs(atchan); in atc_dostart()
252 static struct at_desc *atc_get_desc_by_cookie(struct at_dma_chan *atchan, in atc_get_desc_by_cookie() argument
257 list_for_each_entry_safe(desc, _desc, &atchan->queue, desc_node) { in atc_get_desc_by_cookie()
262 list_for_each_entry_safe(desc, _desc, &atchan->active_list, desc_node) { in atc_get_desc_by_cookie()
298 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_get_bytes_left() local
299 struct at_desc *desc_first = atc_first_active(atchan); in atc_get_bytes_left()
310 desc = atc_get_desc_by_cookie(atchan, cookie); in atc_get_bytes_left()
370 dscr = channel_readl(atchan, DSCR); in atc_get_bytes_left()
372 ctrla = channel_readl(atchan, CTRLA); in atc_get_bytes_left()
377 new_dscr = channel_readl(atchan, DSCR); in atc_get_bytes_left()
397 ctrla = channel_readl(atchan, CTRLA); in atc_get_bytes_left()
421 ctrla = channel_readl(atchan, CTRLA); in atc_get_bytes_left()
434 atc_chain_complete(struct at_dma_chan *atchan, struct at_desc *desc) in atc_chain_complete() argument
437 struct at_dma *atdma = to_at_dma(atchan->chan_common.device); in atc_chain_complete()
440 dev_vdbg(chan2dev(&atchan->chan_common), in atc_chain_complete()
443 spin_lock_irqsave(&atchan->lock, flags); in atc_chain_complete()
446 if (!atc_chan_is_cyclic(atchan)) in atc_chain_complete()
449 spin_unlock_irqrestore(&atchan->lock, flags); in atc_chain_complete()
454 if (!atc_chan_is_cyclic(atchan)) in atc_chain_complete()
459 spin_lock_irqsave(&atchan->lock, flags); in atc_chain_complete()
461 list_splice_init(&desc->tx_list, &atchan->free_list); in atc_chain_complete()
463 list_add(&desc->desc_node, &atchan->free_list); in atc_chain_complete()
464 spin_unlock_irqrestore(&atchan->lock, flags); in atc_chain_complete()
478 static void atc_advance_work(struct at_dma_chan *atchan) in atc_advance_work() argument
483 dev_vdbg(chan2dev(&atchan->chan_common), "advance_work\n"); in atc_advance_work()
485 spin_lock_irqsave(&atchan->lock, flags); in atc_advance_work()
486 if (atc_chan_is_enabled(atchan) || list_empty(&atchan->active_list)) in atc_advance_work()
487 return spin_unlock_irqrestore(&atchan->lock, flags); in atc_advance_work()
489 desc = atc_first_active(atchan); in atc_advance_work()
492 spin_unlock_irqrestore(&atchan->lock, flags); in atc_advance_work()
493 atc_chain_complete(atchan, desc); in atc_advance_work()
496 spin_lock_irqsave(&atchan->lock, flags); in atc_advance_work()
497 if (!list_empty(&atchan->active_list)) { in atc_advance_work()
498 desc = atc_first_queued(atchan); in atc_advance_work()
499 list_move_tail(&desc->desc_node, &atchan->active_list); in atc_advance_work()
500 atc_dostart(atchan, desc); in atc_advance_work()
502 spin_unlock_irqrestore(&atchan->lock, flags); in atc_advance_work()
510 static void atc_handle_error(struct at_dma_chan *atchan) in atc_handle_error() argument
517 spin_lock_irqsave(&atchan->lock, flags); in atc_handle_error()
523 bad_desc = atc_first_active(atchan); in atc_handle_error()
527 if (!list_empty(&atchan->active_list)) { in atc_handle_error()
528 desc = atc_first_queued(atchan); in atc_handle_error()
529 list_move_tail(&desc->desc_node, &atchan->active_list); in atc_handle_error()
530 atc_dostart(atchan, desc); in atc_handle_error()
540 dev_crit(chan2dev(&atchan->chan_common), in atc_handle_error()
542 dev_crit(chan2dev(&atchan->chan_common), in atc_handle_error()
544 atc_dump_lli(atchan, &bad_desc->lli); in atc_handle_error()
546 atc_dump_lli(atchan, &child->lli); in atc_handle_error()
548 spin_unlock_irqrestore(&atchan->lock, flags); in atc_handle_error()
551 atc_chain_complete(atchan, bad_desc); in atc_handle_error()
558 static void atc_handle_cyclic(struct at_dma_chan *atchan) in atc_handle_cyclic() argument
560 struct at_desc *first = atc_first_active(atchan); in atc_handle_cyclic()
563 dev_vdbg(chan2dev(&atchan->chan_common), in atc_handle_cyclic()
565 channel_readl(atchan, DSCR)); in atc_handle_cyclic()
574 struct at_dma_chan *atchan = from_tasklet(atchan, t, tasklet); in atc_tasklet() local
576 if (test_and_clear_bit(ATC_IS_ERROR, &atchan->status)) in atc_tasklet()
577 return atc_handle_error(atchan); in atc_tasklet()
579 if (atc_chan_is_cyclic(atchan)) in atc_tasklet()
580 return atc_handle_cyclic(atchan); in atc_tasklet()
582 atc_advance_work(atchan); in atc_tasklet()
588 struct at_dma_chan *atchan; in at_dma_interrupt() local
606 atchan = &atdma->chan[i]; in at_dma_interrupt()
611 AT_DMA_RES(i) | atchan->mask); in at_dma_interrupt()
613 set_bit(ATC_IS_ERROR, &atchan->status); in at_dma_interrupt()
615 tasklet_schedule(&atchan->tasklet); in at_dma_interrupt()
639 struct at_dma_chan *atchan = to_at_dma_chan(tx->chan); in atc_tx_submit() local
643 spin_lock_irqsave(&atchan->lock, flags); in atc_tx_submit()
646 list_add_tail(&desc->desc_node, &atchan->queue); in atc_tx_submit()
647 spin_unlock_irqrestore(&atchan->lock, flags); in atc_tx_submit()
665 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_prep_dma_interleaved() local
726 desc = atc_desc_get(atchan); in atc_prep_dma_interleaved()
765 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_prep_dma_memcpy() local
802 desc = atc_desc_get(atchan); in atc_prep_dma_memcpy()
829 atc_desc_put(atchan, first); in atc_prep_dma_memcpy()
838 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_create_memset_desc() local
855 desc = atc_desc_get(atchan); in atc_create_memset_desc()
944 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_prep_dma_memset_sg() local
1011 atc_desc_put(atchan, first); in atc_prep_dma_memset_sg()
1029 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_prep_slave_sg() local
1031 struct dma_slave_config *sconfig = &atchan->dma_sconfig; in atc_prep_slave_sg()
1064 | ATC_SIF(atchan->mem_if) | ATC_DIF(atchan->per_if); in atc_prep_slave_sg()
1071 desc = atc_desc_get(atchan); in atc_prep_slave_sg()
1104 | ATC_SIF(atchan->per_if) | ATC_DIF(atchan->mem_if); in atc_prep_slave_sg()
1112 desc = atc_desc_get(atchan); in atc_prep_slave_sg()
1158 atc_desc_put(atchan, first); in atc_prep_slave_sg()
1192 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_dma_cyclic_fill_desc() local
1193 struct dma_slave_config *sconfig = &atchan->dma_sconfig; in atc_dma_cyclic_fill_desc()
1211 | ATC_SIF(atchan->mem_if) in atc_dma_cyclic_fill_desc()
1212 | ATC_DIF(atchan->per_if); in atc_dma_cyclic_fill_desc()
1223 | ATC_SIF(atchan->per_if) in atc_dma_cyclic_fill_desc()
1224 | ATC_DIF(atchan->mem_if); in atc_dma_cyclic_fill_desc()
1249 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_prep_dma_cyclic() local
1251 struct dma_slave_config *sconfig = &atchan->dma_sconfig; in atc_prep_dma_cyclic()
1269 was_cyclic = test_and_set_bit(ATC_IS_CYCLIC, &atchan->status); in atc_prep_dma_cyclic()
1291 desc = atc_desc_get(atchan); in atc_prep_dma_cyclic()
1313 atc_desc_put(atchan, first); in atc_prep_dma_cyclic()
1315 clear_bit(ATC_IS_CYCLIC, &atchan->status); in atc_prep_dma_cyclic()
1322 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_config() local
1330 memcpy(&atchan->dma_sconfig, sconfig, sizeof(*sconfig)); in atc_config()
1332 convert_burst(&atchan->dma_sconfig.src_maxburst); in atc_config()
1333 convert_burst(&atchan->dma_sconfig.dst_maxburst); in atc_config()
1340 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_pause() local
1342 int chan_id = atchan->chan_common.chan_id; in atc_pause()
1347 spin_lock_irqsave(&atchan->lock, flags); in atc_pause()
1350 set_bit(ATC_IS_PAUSED, &atchan->status); in atc_pause()
1352 spin_unlock_irqrestore(&atchan->lock, flags); in atc_pause()
1359 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_resume() local
1361 int chan_id = atchan->chan_common.chan_id; in atc_resume()
1366 if (!atc_chan_is_paused(atchan)) in atc_resume()
1369 spin_lock_irqsave(&atchan->lock, flags); in atc_resume()
1372 clear_bit(ATC_IS_PAUSED, &atchan->status); in atc_resume()
1374 spin_unlock_irqrestore(&atchan->lock, flags); in atc_resume()
1381 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_terminate_all() local
1383 int chan_id = atchan->chan_common.chan_id; in atc_terminate_all()
1394 spin_lock_irqsave(&atchan->lock, flags); in atc_terminate_all()
1397 dma_writel(atdma, CHDR, AT_DMA_RES(chan_id) | atchan->mask); in atc_terminate_all()
1400 while (dma_readl(atdma, CHSR) & atchan->mask) in atc_terminate_all()
1404 list_splice_tail_init(&atchan->queue, &atchan->free_list); in atc_terminate_all()
1405 list_splice_tail_init(&atchan->active_list, &atchan->free_list); in atc_terminate_all()
1407 clear_bit(ATC_IS_PAUSED, &atchan->status); in atc_terminate_all()
1409 clear_bit(ATC_IS_CYCLIC, &atchan->status); in atc_terminate_all()
1411 spin_unlock_irqrestore(&atchan->lock, flags); in atc_terminate_all()
1431 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_tx_status() local
1446 spin_lock_irqsave(&atchan->lock, flags); in atc_tx_status()
1451 spin_unlock_irqrestore(&atchan->lock, flags); in atc_tx_status()
1473 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_issue_pending() local
1479 spin_lock_irqsave(&atchan->lock, flags); in atc_issue_pending()
1480 if (atc_chan_is_enabled(atchan) || list_empty(&atchan->queue)) in atc_issue_pending()
1481 return spin_unlock_irqrestore(&atchan->lock, flags); in atc_issue_pending()
1483 desc = atc_first_queued(atchan); in atc_issue_pending()
1484 list_move_tail(&desc->desc_node, &atchan->active_list); in atc_issue_pending()
1485 atc_dostart(atchan, desc); in atc_issue_pending()
1486 spin_unlock_irqrestore(&atchan->lock, flags); in atc_issue_pending()
1497 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_alloc_chan_resources() local
1507 if (atc_chan_is_enabled(atchan)) { in atc_alloc_chan_resources()
1512 if (!list_empty(&atchan->free_list)) { in atc_alloc_chan_resources()
1540 list_add_tail(&desc->desc_node, &atchan->free_list); in atc_alloc_chan_resources()
1546 channel_writel(atchan, CFG, cfg); in atc_alloc_chan_resources()
1560 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_free_chan_resources() local
1566 BUG_ON(!list_empty(&atchan->active_list)); in atc_free_chan_resources()
1567 BUG_ON(!list_empty(&atchan->queue)); in atc_free_chan_resources()
1568 BUG_ON(atc_chan_is_enabled(atchan)); in atc_free_chan_resources()
1570 list_for_each_entry_safe(desc, _desc, &atchan->free_list, desc_node) { in atc_free_chan_resources()
1576 list_splice_init(&atchan->free_list, &list); in atc_free_chan_resources()
1577 atchan->status = 0; in atc_free_chan_resources()
1605 struct at_dma_chan *atchan; in at_dma_xlate() local
1660 atchan = to_at_dma_chan(chan); in at_dma_xlate()
1661 atchan->per_if = dma_spec->args[0] & 0xff; in at_dma_xlate()
1662 atchan->mem_if = (dma_spec->args[0] >> 16) & 0xff; in at_dma_xlate()
1840 struct at_dma_chan *atchan = &atdma->chan[i]; in at_dma_probe() local
1842 atchan->mem_if = AT_DMA_MEM_IF; in at_dma_probe()
1843 atchan->per_if = AT_DMA_PER_IF; in at_dma_probe()
1844 atchan->chan_common.device = &atdma->dma_common; in at_dma_probe()
1845 dma_cookie_init(&atchan->chan_common); in at_dma_probe()
1846 list_add_tail(&atchan->chan_common.device_node, in at_dma_probe()
1849 atchan->ch_regs = atdma->regs + ch_regs(i); in at_dma_probe()
1850 spin_lock_init(&atchan->lock); in at_dma_probe()
1851 atchan->mask = 1 << i; in at_dma_probe()
1853 INIT_LIST_HEAD(&atchan->active_list); in at_dma_probe()
1854 INIT_LIST_HEAD(&atchan->queue); in at_dma_probe()
1855 INIT_LIST_HEAD(&atchan->free_list); in at_dma_probe()
1857 tasklet_setup(&atchan->tasklet, atc_tasklet); in at_dma_probe()
1965 struct at_dma_chan *atchan = to_at_dma_chan(chan); in at_dma_remove() local
1970 tasklet_kill(&atchan->tasklet); in at_dma_remove()
2003 struct at_dma_chan *atchan = to_at_dma_chan(chan); in at_dma_prepare() local
2005 if (atc_chan_is_enabled(atchan) && !atc_chan_is_cyclic(atchan)) in at_dma_prepare()
2011 static void atc_suspend_cyclic(struct at_dma_chan *atchan) in atc_suspend_cyclic() argument
2013 struct dma_chan *chan = &atchan->chan_common; in atc_suspend_cyclic()
2017 if (!atc_chan_is_paused(atchan)) { in atc_suspend_cyclic()
2025 atchan->save_dscr = channel_readl(atchan, DSCR); in atc_suspend_cyclic()
2027 vdbg_dump_regs(atchan); in atc_suspend_cyclic()
2038 struct at_dma_chan *atchan = to_at_dma_chan(chan); in at_dma_suspend_noirq() local
2040 if (atc_chan_is_cyclic(atchan)) in at_dma_suspend_noirq()
2041 atc_suspend_cyclic(atchan); in at_dma_suspend_noirq()
2042 atchan->save_cfg = channel_readl(atchan, CFG); in at_dma_suspend_noirq()
2052 static void atc_resume_cyclic(struct at_dma_chan *atchan) in atc_resume_cyclic() argument
2054 struct at_dma *atdma = to_at_dma(atchan->chan_common.device); in atc_resume_cyclic()
2058 channel_writel(atchan, SADDR, 0); in atc_resume_cyclic()
2059 channel_writel(atchan, DADDR, 0); in atc_resume_cyclic()
2060 channel_writel(atchan, CTRLA, 0); in atc_resume_cyclic()
2061 channel_writel(atchan, CTRLB, 0); in atc_resume_cyclic()
2062 channel_writel(atchan, DSCR, atchan->save_dscr); in atc_resume_cyclic()
2063 dma_writel(atdma, CHER, atchan->mask); in atc_resume_cyclic()
2068 vdbg_dump_regs(atchan); in atc_resume_cyclic()
2088 struct at_dma_chan *atchan = to_at_dma_chan(chan); in at_dma_resume_noirq() local
2090 channel_writel(atchan, CFG, atchan->save_cfg); in at_dma_resume_noirq()
2091 if (atc_chan_is_cyclic(atchan)) in at_dma_resume_noirq()
2092 atc_resume_cyclic(atchan); in at_dma_resume_noirq()