Lines Matching +full:llp +full:- +full:based
1 // SPDX-License-Identifier: GPL-2.0-or-later
12 #include <dt-bindings/dma/at91.h>
15 #include <linux/dma-mapping.h>
30 * --------
63 /*----------------------------------------------------------------------*/
82 return list_first_entry(&atchan->active_list, in atc_first_active()
88 return list_first_entry(&atchan->queue, in atc_first_queued()
93 * atc_alloc_descriptor - allocate and return an initialized descriptor
97 * Note: The ack-bit is positioned in the descriptor flag at creation time
106 struct at_dma *atdma = to_at_dma(chan->device); in atc_alloc_descriptor()
109 desc = dma_pool_zalloc(atdma->dma_desc_pool, gfp_flags, &phys); in atc_alloc_descriptor()
111 INIT_LIST_HEAD(&desc->tx_list); in atc_alloc_descriptor()
112 dma_async_tx_descriptor_init(&desc->txd, chan); in atc_alloc_descriptor()
114 desc->txd.flags = DMA_CTRL_ACK; in atc_alloc_descriptor()
115 desc->txd.tx_submit = atc_tx_submit; in atc_alloc_descriptor()
116 desc->txd.phys = phys; in atc_alloc_descriptor()
123 * atc_desc_get - get an unused descriptor from free_list
133 spin_lock_irqsave(&atchan->lock, flags); in atc_desc_get()
134 list_for_each_entry_safe(desc, _desc, &atchan->free_list, desc_node) { in atc_desc_get()
136 if (async_tx_test_ack(&desc->txd)) { in atc_desc_get()
137 list_del(&desc->desc_node); in atc_desc_get()
141 dev_dbg(chan2dev(&atchan->chan_common), in atc_desc_get()
144 spin_unlock_irqrestore(&atchan->lock, flags); in atc_desc_get()
145 dev_vdbg(chan2dev(&atchan->chan_common), in atc_desc_get()
150 ret = atc_alloc_descriptor(&atchan->chan_common, GFP_NOWAIT); in atc_desc_get()
156 * atc_desc_put - move a descriptor, including any children, to the free list
166 spin_lock_irqsave(&atchan->lock, flags); in atc_desc_put()
167 list_for_each_entry(child, &desc->tx_list, desc_node) in atc_desc_put()
168 dev_vdbg(chan2dev(&atchan->chan_common), in atc_desc_put()
171 list_splice_init(&desc->tx_list, &atchan->free_list); in atc_desc_put()
172 dev_vdbg(chan2dev(&atchan->chan_common), in atc_desc_put()
174 list_add(&desc->desc_node, &atchan->free_list); in atc_desc_put()
175 spin_unlock_irqrestore(&atchan->lock, flags); in atc_desc_put()
180 * atc_desc_chain - build chain adding a descriptor
194 (*prev)->lli.dscr = desc->txd.phys; in atc_desc_chain()
196 list_add_tail(&desc->desc_node, in atc_desc_chain()
197 &(*first)->tx_list); in atc_desc_chain()
203 * atc_dostart - starts the DMA engine for real
207 * Called with atchan->lock held and bh disabled
211 struct at_dma *atdma = to_at_dma(atchan->chan_common.device); in atc_dostart()
215 dev_err(chan2dev(&atchan->chan_common), in atc_dostart()
216 "BUG: Attempted to start non-idle channel\n"); in atc_dostart()
217 dev_err(chan2dev(&atchan->chan_common), in atc_dostart()
235 channel_writel(atchan, DSCR, first->txd.phys); in atc_dostart()
236 channel_writel(atchan, SPIP, ATC_SPIP_HOLE(first->src_hole) | in atc_dostart()
237 ATC_SPIP_BOUNDARY(first->boundary)); in atc_dostart()
238 channel_writel(atchan, DPIP, ATC_DPIP_HOLE(first->dst_hole) | in atc_dostart()
239 ATC_DPIP_BOUNDARY(first->boundary)); in atc_dostart()
242 dma_writel(atdma, CHER, atchan->mask); in atc_dostart()
248 * atc_get_desc_by_cookie - get the descriptor of a cookie
257 list_for_each_entry_safe(desc, _desc, &atchan->queue, desc_node) { in atc_get_desc_by_cookie()
258 if (desc->txd.cookie == cookie) in atc_get_desc_by_cookie()
262 list_for_each_entry_safe(desc, _desc, &atchan->active_list, desc_node) { in atc_get_desc_by_cookie()
263 if (desc->txd.cookie == cookie) in atc_get_desc_by_cookie()
271 * atc_calc_bytes_left - calculates the number of bytes left according to the
288 return current_len - (btsize << src_width); in atc_calc_bytes_left()
292 * atc_get_bytes_left - get the number of bytes residue for a cookie
312 return -EINVAL; in atc_get_bytes_left()
314 return desc->total_len; in atc_get_bytes_left()
317 ret = desc_first->total_len; in atc_get_bytes_left()
319 if (desc_first->lli.dscr) { in atc_get_bytes_left()
346 * - If the DMA transfer is paused, RX overruns or TX underruns in atc_get_bytes_left()
353 * - The atc_pause() function masks interrupts but we'd rather in atc_get_bytes_left()
400 return -ETIMEDOUT; in atc_get_bytes_left()
403 if (desc_first->lli.dscr == dscr) in atc_get_bytes_left()
406 ret -= desc_first->len; in atc_get_bytes_left()
407 list_for_each_entry(desc, &desc_first->tx_list, desc_node) { in atc_get_bytes_left()
408 if (desc->lli.dscr == dscr) in atc_get_bytes_left()
411 ret -= desc->len; in atc_get_bytes_left()
429 * atc_chain_complete - finish work for one transaction chain
436 struct dma_async_tx_descriptor *txd = &desc->txd; in atc_chain_complete()
437 struct at_dma *atdma = to_at_dma(atchan->chan_common.device); in atc_chain_complete()
440 dev_vdbg(chan2dev(&atchan->chan_common), in atc_chain_complete()
441 "descriptor %u complete\n", txd->cookie); in atc_chain_complete()
443 spin_lock_irqsave(&atchan->lock, flags); in atc_chain_complete()
449 spin_unlock_irqrestore(&atchan->lock, flags); in atc_chain_complete()
459 spin_lock_irqsave(&atchan->lock, flags); in atc_chain_complete()
461 list_splice_init(&desc->tx_list, &atchan->free_list); in atc_chain_complete()
463 list_add(&desc->desc_node, &atchan->free_list); in atc_chain_complete()
464 spin_unlock_irqrestore(&atchan->lock, flags); in atc_chain_complete()
467 if (desc->memset_buffer) { in atc_chain_complete()
468 dma_pool_free(atdma->memset_pool, desc->memset_vaddr, in atc_chain_complete()
469 desc->memset_paddr); in atc_chain_complete()
470 desc->memset_buffer = false; in atc_chain_complete()
475 * atc_advance_work - at the end of a transaction, move forward
483 dev_vdbg(chan2dev(&atchan->chan_common), "advance_work\n"); in atc_advance_work()
485 spin_lock_irqsave(&atchan->lock, flags); in atc_advance_work()
486 if (atc_chan_is_enabled(atchan) || list_empty(&atchan->active_list)) in atc_advance_work()
487 return spin_unlock_irqrestore(&atchan->lock, flags); in atc_advance_work()
491 list_del_init(&desc->desc_node); in atc_advance_work()
492 spin_unlock_irqrestore(&atchan->lock, flags); in atc_advance_work()
496 spin_lock_irqsave(&atchan->lock, flags); in atc_advance_work()
497 if (!list_empty(&atchan->active_list)) { in atc_advance_work()
499 list_move_tail(&desc->desc_node, &atchan->active_list); in atc_advance_work()
502 spin_unlock_irqrestore(&atchan->lock, flags); in atc_advance_work()
507 * atc_handle_error - handle errors reported by DMA controller
517 spin_lock_irqsave(&atchan->lock, flags); in atc_handle_error()
524 list_del_init(&bad_desc->desc_node); in atc_handle_error()
527 if (!list_empty(&atchan->active_list)) { in atc_handle_error()
529 list_move_tail(&desc->desc_node, &atchan->active_list); in atc_handle_error()
540 dev_crit(chan2dev(&atchan->chan_common), in atc_handle_error()
542 dev_crit(chan2dev(&atchan->chan_common), in atc_handle_error()
543 " cookie: %d\n", bad_desc->txd.cookie); in atc_handle_error()
544 atc_dump_lli(atchan, &bad_desc->lli); in atc_handle_error()
545 list_for_each_entry(child, &bad_desc->tx_list, desc_node) in atc_handle_error()
546 atc_dump_lli(atchan, &child->lli); in atc_handle_error()
548 spin_unlock_irqrestore(&atchan->lock, flags); in atc_handle_error()
555 * atc_handle_cyclic - at the end of a period, run callback function
561 struct dma_async_tx_descriptor *txd = &first->txd; in atc_handle_cyclic()
563 dev_vdbg(chan2dev(&atchan->chan_common), in atc_handle_cyclic()
564 "new cyclic period llp 0x%08x\n", in atc_handle_cyclic()
570 /*-- IRQ & Tasklet ---------------------------------------------------*/
576 if (test_and_clear_bit(ATC_IS_ERROR, &atchan->status)) in atc_tasklet()
601 dev_vdbg(atdma->dma_common.dev, in at_dma_interrupt()
605 for (i = 0; i < atdma->dma_common.chancnt; i++) { in at_dma_interrupt()
606 atchan = &atdma->chan[i]; in at_dma_interrupt()
611 AT_DMA_RES(i) | atchan->mask); in at_dma_interrupt()
613 set_bit(ATC_IS_ERROR, &atchan->status); in at_dma_interrupt()
615 tasklet_schedule(&atchan->tasklet); in at_dma_interrupt()
626 /*-- DMA Engine API --------------------------------------------------*/
629 * atc_tx_submit - set the prepared descriptor(s) to be executed by the engine
639 struct at_dma_chan *atchan = to_at_dma_chan(tx->chan); in atc_tx_submit()
643 spin_lock_irqsave(&atchan->lock, flags); in atc_tx_submit()
646 list_add_tail(&desc->desc_node, &atchan->queue); in atc_tx_submit()
647 spin_unlock_irqrestore(&atchan->lock, flags); in atc_tx_submit()
649 dev_vdbg(chan2dev(tx->chan), "tx_submit: queued %u\n", in atc_tx_submit()
650 desc->txd.cookie); in atc_tx_submit()
655 * atc_prep_dma_interleaved - prepare memory to memory interleaved operation
675 if (unlikely(!xt || xt->numf != 1 || !xt->frame_size)) in atc_prep_dma_interleaved()
678 first = xt->sgl; in atc_prep_dma_interleaved()
682 __func__, &xt->src_start, &xt->dst_start, xt->numf, in atc_prep_dma_interleaved()
683 xt->frame_size, flags); in atc_prep_dma_interleaved()
691 for (i = 0; i < xt->frame_size; i++) { in atc_prep_dma_interleaved()
692 struct data_chunk *chunk = xt->sgl + i; in atc_prep_dma_interleaved()
694 if ((chunk->size != xt->sgl->size) || in atc_prep_dma_interleaved()
703 len += chunk->size; in atc_prep_dma_interleaved()
706 dwidth = atc_get_xfer_width(xt->src_start, in atc_prep_dma_interleaved()
707 xt->dst_start, len); in atc_prep_dma_interleaved()
733 desc->lli.saddr = xt->src_start; in atc_prep_dma_interleaved()
734 desc->lli.daddr = xt->dst_start; in atc_prep_dma_interleaved()
735 desc->lli.ctrla = ctrla | xfer_count; in atc_prep_dma_interleaved()
736 desc->lli.ctrlb = ctrlb; in atc_prep_dma_interleaved()
738 desc->boundary = first->size >> dwidth; in atc_prep_dma_interleaved()
739 desc->dst_hole = (dmaengine_get_dst_icg(xt, first) >> dwidth) + 1; in atc_prep_dma_interleaved()
740 desc->src_hole = (dmaengine_get_src_icg(xt, first) >> dwidth) + 1; in atc_prep_dma_interleaved()
742 desc->txd.cookie = -EBUSY; in atc_prep_dma_interleaved()
743 desc->total_len = desc->len = len; in atc_prep_dma_interleaved()
745 /* set end-of-link to the last link descriptor of list*/ in atc_prep_dma_interleaved()
748 desc->txd.flags = flags; /* client is in control of this ack */ in atc_prep_dma_interleaved()
750 return &desc->txd; in atc_prep_dma_interleaved()
754 * atc_prep_dma_memcpy - prepare a memcpy operation
799 xfer_count = min_t(size_t, (len - offset) >> src_width, in atc_prep_dma_memcpy()
806 desc->lli.saddr = src + offset; in atc_prep_dma_memcpy()
807 desc->lli.daddr = dest + offset; in atc_prep_dma_memcpy()
808 desc->lli.ctrla = ctrla | xfer_count; in atc_prep_dma_memcpy()
809 desc->lli.ctrlb = ctrlb; in atc_prep_dma_memcpy()
811 desc->txd.cookie = 0; in atc_prep_dma_memcpy()
812 desc->len = xfer_count << src_width; in atc_prep_dma_memcpy()
818 first->txd.cookie = -EBUSY; in atc_prep_dma_memcpy()
819 first->total_len = len; in atc_prep_dma_memcpy()
821 /* set end-of-link to the last link descriptor of list*/ in atc_prep_dma_memcpy()
824 first->txd.flags = flags; /* client is in control of this ack */ in atc_prep_dma_memcpy()
826 return &first->txd; in atc_prep_dma_memcpy()
862 desc->lli.saddr = psrc; in atc_create_memset_desc()
863 desc->lli.daddr = pdst; in atc_create_memset_desc()
864 desc->lli.ctrla = ctrla | xfer_count; in atc_create_memset_desc()
865 desc->lli.ctrlb = ctrlb; in atc_create_memset_desc()
867 desc->txd.cookie = 0; in atc_create_memset_desc()
868 desc->len = len; in atc_create_memset_desc()
874 * atc_prep_dma_memset - prepare a memcpy operation
885 struct at_dma *atdma = to_at_dma(chan->device); in atc_prep_dma_memset()
898 if (!is_dma_fill_aligned(chan->device, dest, 0, len)) { in atc_prep_dma_memset()
904 vaddr = dma_pool_alloc(atdma->memset_pool, GFP_NOWAIT, &paddr); in atc_prep_dma_memset()
919 desc->memset_paddr = paddr; in atc_prep_dma_memset()
920 desc->memset_vaddr = vaddr; in atc_prep_dma_memset()
921 desc->memset_buffer = true; in atc_prep_dma_memset()
923 desc->txd.cookie = -EBUSY; in atc_prep_dma_memset()
924 desc->total_len = len; in atc_prep_dma_memset()
926 /* set end-of-link on the descriptor */ in atc_prep_dma_memset()
929 desc->txd.flags = flags; in atc_prep_dma_memset()
931 return &desc->txd; in atc_prep_dma_memset()
934 dma_pool_free(atdma->memset_pool, vaddr, paddr); in atc_prep_dma_memset()
945 struct at_dma *atdma = to_at_dma(chan->device); in atc_prep_dma_memset_sg()
962 vaddr = dma_pool_alloc(atdma->memset_pool, GFP_NOWAIT, &paddr); in atc_prep_dma_memset_sg()
977 if (!is_dma_fill_aligned(chan->device, dest, 0, len)) { in atc_prep_dma_memset_sg()
996 desc->memset_paddr = paddr; in atc_prep_dma_memset_sg()
997 desc->memset_vaddr = vaddr; in atc_prep_dma_memset_sg()
998 desc->memset_buffer = true; in atc_prep_dma_memset_sg()
1000 first->txd.cookie = -EBUSY; in atc_prep_dma_memset_sg()
1001 first->total_len = total_len; in atc_prep_dma_memset_sg()
1003 /* set end-of-link on the descriptor */ in atc_prep_dma_memset_sg()
1006 first->txd.flags = flags; in atc_prep_dma_memset_sg()
1008 return &first->txd; in atc_prep_dma_memset_sg()
1016 * atc_prep_slave_sg - prepare descriptors for a DMA_SLAVE transaction
1030 struct at_dma_slave *atslave = chan->private; in atc_prep_slave_sg()
1031 struct dma_slave_config *sconfig = &atchan->dma_sconfig; in atc_prep_slave_sg()
1053 ctrla = ATC_SCSIZE(sconfig->src_maxburst) in atc_prep_slave_sg()
1054 | ATC_DCSIZE(sconfig->dst_maxburst); in atc_prep_slave_sg()
1059 reg_width = convert_buswidth(sconfig->dst_addr_width); in atc_prep_slave_sg()
1064 | ATC_SIF(atchan->mem_if) | ATC_DIF(atchan->per_if); in atc_prep_slave_sg()
1065 reg = sconfig->dst_addr; in atc_prep_slave_sg()
1086 desc->lli.saddr = mem; in atc_prep_slave_sg()
1087 desc->lli.daddr = reg; in atc_prep_slave_sg()
1088 desc->lli.ctrla = ctrla in atc_prep_slave_sg()
1091 desc->lli.ctrlb = ctrlb; in atc_prep_slave_sg()
1092 desc->len = len; in atc_prep_slave_sg()
1099 reg_width = convert_buswidth(sconfig->src_addr_width); in atc_prep_slave_sg()
1104 | ATC_SIF(atchan->per_if) | ATC_DIF(atchan->mem_if); in atc_prep_slave_sg()
1106 reg = sconfig->src_addr; in atc_prep_slave_sg()
1127 desc->lli.saddr = reg; in atc_prep_slave_sg()
1128 desc->lli.daddr = mem; in atc_prep_slave_sg()
1129 desc->lli.ctrla = ctrla in atc_prep_slave_sg()
1132 desc->lli.ctrlb = ctrlb; in atc_prep_slave_sg()
1133 desc->len = len; in atc_prep_slave_sg()
1143 /* set end-of-link to the last link descriptor of list*/ in atc_prep_slave_sg()
1147 first->txd.cookie = -EBUSY; in atc_prep_slave_sg()
1148 first->total_len = total_len; in atc_prep_slave_sg()
1151 first->txd.flags = flags; /* client is in control of this ack */ in atc_prep_slave_sg()
1153 return &first->txd; in atc_prep_slave_sg()
1172 if (unlikely(period_len & ((1 << reg_width) - 1))) in atc_dma_cyclic_check_values()
1174 if (unlikely(buf_addr & ((1 << reg_width) - 1))) in atc_dma_cyclic_check_values()
1180 return -EINVAL; in atc_dma_cyclic_check_values()
1184 * atc_dma_cyclic_fill_desc - Fill one period descriptor
1193 struct dma_slave_config *sconfig = &atchan->dma_sconfig; in atc_dma_cyclic_fill_desc()
1197 ctrla = ATC_SCSIZE(sconfig->src_maxburst) in atc_dma_cyclic_fill_desc()
1198 | ATC_DCSIZE(sconfig->dst_maxburst) in atc_dma_cyclic_fill_desc()
1205 desc->lli.saddr = buf_addr + (period_len * period_index); in atc_dma_cyclic_fill_desc()
1206 desc->lli.daddr = sconfig->dst_addr; in atc_dma_cyclic_fill_desc()
1207 desc->lli.ctrla = ctrla; in atc_dma_cyclic_fill_desc()
1208 desc->lli.ctrlb = ATC_DST_ADDR_MODE_FIXED in atc_dma_cyclic_fill_desc()
1211 | ATC_SIF(atchan->mem_if) in atc_dma_cyclic_fill_desc()
1212 | ATC_DIF(atchan->per_if); in atc_dma_cyclic_fill_desc()
1213 desc->len = period_len; in atc_dma_cyclic_fill_desc()
1217 desc->lli.saddr = sconfig->src_addr; in atc_dma_cyclic_fill_desc()
1218 desc->lli.daddr = buf_addr + (period_len * period_index); in atc_dma_cyclic_fill_desc()
1219 desc->lli.ctrla = ctrla; in atc_dma_cyclic_fill_desc()
1220 desc->lli.ctrlb = ATC_DST_ADDR_MODE_INCR in atc_dma_cyclic_fill_desc()
1223 | ATC_SIF(atchan->per_if) in atc_dma_cyclic_fill_desc()
1224 | ATC_DIF(atchan->mem_if); in atc_dma_cyclic_fill_desc()
1225 desc->len = period_len; in atc_dma_cyclic_fill_desc()
1229 return -EINVAL; in atc_dma_cyclic_fill_desc()
1236 * atc_prep_dma_cyclic - prepare the cyclic DMA transfer
1250 struct at_dma_slave *atslave = chan->private; in atc_prep_dma_cyclic()
1251 struct dma_slave_config *sconfig = &atchan->dma_sconfig; in atc_prep_dma_cyclic()
1259 dev_vdbg(chan2dev(chan), "prep_dma_cyclic: %s buf@%pad - %d (%d/%d)\n", in atc_prep_dma_cyclic()
1269 was_cyclic = test_and_set_bit(ATC_IS_CYCLIC, &atchan->status); in atc_prep_dma_cyclic()
1279 reg_width = convert_buswidth(sconfig->dst_addr_width); in atc_prep_dma_cyclic()
1281 reg_width = convert_buswidth(sconfig->src_addr_width); in atc_prep_dma_cyclic()
1303 prev->lli.dscr = first->txd.phys; in atc_prep_dma_cyclic()
1306 first->txd.cookie = -EBUSY; in atc_prep_dma_cyclic()
1307 first->total_len = buf_len; in atc_prep_dma_cyclic()
1309 return &first->txd; in atc_prep_dma_cyclic()
1315 clear_bit(ATC_IS_CYCLIC, &atchan->status); in atc_prep_dma_cyclic()
1327 if (!chan->private) in atc_config()
1328 return -EINVAL; in atc_config()
1330 memcpy(&atchan->dma_sconfig, sconfig, sizeof(*sconfig)); in atc_config()
1332 convert_burst(&atchan->dma_sconfig.src_maxburst); in atc_config()
1333 convert_burst(&atchan->dma_sconfig.dst_maxburst); in atc_config()
1341 struct at_dma *atdma = to_at_dma(chan->device); in atc_pause()
1342 int chan_id = atchan->chan_common.chan_id; in atc_pause()
1347 spin_lock_irqsave(&atchan->lock, flags); in atc_pause()
1350 set_bit(ATC_IS_PAUSED, &atchan->status); in atc_pause()
1352 spin_unlock_irqrestore(&atchan->lock, flags); in atc_pause()
1360 struct at_dma *atdma = to_at_dma(chan->device); in atc_resume()
1361 int chan_id = atchan->chan_common.chan_id; in atc_resume()
1369 spin_lock_irqsave(&atchan->lock, flags); in atc_resume()
1372 clear_bit(ATC_IS_PAUSED, &atchan->status); in atc_resume()
1374 spin_unlock_irqrestore(&atchan->lock, flags); in atc_resume()
1382 struct at_dma *atdma = to_at_dma(chan->device); in atc_terminate_all()
1383 int chan_id = atchan->chan_common.chan_id; in atc_terminate_all()
1394 spin_lock_irqsave(&atchan->lock, flags); in atc_terminate_all()
1397 dma_writel(atdma, CHDR, AT_DMA_RES(chan_id) | atchan->mask); in atc_terminate_all()
1400 while (dma_readl(atdma, CHSR) & atchan->mask) in atc_terminate_all()
1404 list_splice_tail_init(&atchan->queue, &atchan->free_list); in atc_terminate_all()
1405 list_splice_tail_init(&atchan->active_list, &atchan->free_list); in atc_terminate_all()
1407 clear_bit(ATC_IS_PAUSED, &atchan->status); in atc_terminate_all()
1409 clear_bit(ATC_IS_CYCLIC, &atchan->status); in atc_terminate_all()
1411 spin_unlock_irqrestore(&atchan->lock, flags); in atc_terminate_all()
1417 * atc_tx_status - poll for transaction completion
1424 * the status of multiple cookies without re-checking hardware state.
1446 spin_lock_irqsave(&atchan->lock, flags); in atc_tx_status()
1451 spin_unlock_irqrestore(&atchan->lock, flags); in atc_tx_status()
1467 * atc_issue_pending - takes the first transaction descriptor in the pending
1479 spin_lock_irqsave(&atchan->lock, flags); in atc_issue_pending()
1480 if (atc_chan_is_enabled(atchan) || list_empty(&atchan->queue)) in atc_issue_pending()
1481 return spin_unlock_irqrestore(&atchan->lock, flags); in atc_issue_pending()
1484 list_move_tail(&desc->desc_node, &atchan->active_list); in atc_issue_pending()
1486 spin_unlock_irqrestore(&atchan->lock, flags); in atc_issue_pending()
1490 * atc_alloc_chan_resources - allocate resources for DMA channel
1493 * return - the number of allocated descriptors
1498 struct at_dma *atdma = to_at_dma(chan->device); in atc_alloc_chan_resources()
1509 return -EIO; in atc_alloc_chan_resources()
1512 if (!list_empty(&atchan->free_list)) { in atc_alloc_chan_resources()
1514 return -EIO; in atc_alloc_chan_resources()
1519 atslave = chan->private; in atc_alloc_chan_resources()
1522 * We need controller-specific data to set up slave in atc_alloc_chan_resources()
1525 BUG_ON(!atslave->dma_dev || atslave->dma_dev != atdma->dma_common.dev); in atc_alloc_chan_resources()
1528 if (atslave->cfg) in atc_alloc_chan_resources()
1529 cfg = atslave->cfg; in atc_alloc_chan_resources()
1536 dev_err(atdma->dma_common.dev, in atc_alloc_chan_resources()
1540 list_add_tail(&desc->desc_node, &atchan->free_list); in atc_alloc_chan_resources()
1555 * atc_free_chan_resources - free all channel resources
1561 struct at_dma *atdma = to_at_dma(chan->device); in atc_free_chan_resources()
1566 BUG_ON(!list_empty(&atchan->active_list)); in atc_free_chan_resources()
1567 BUG_ON(!list_empty(&atchan->queue)); in atc_free_chan_resources()
1570 list_for_each_entry_safe(desc, _desc, &atchan->free_list, desc_node) { in atc_free_chan_resources()
1572 list_del(&desc->desc_node); in atc_free_chan_resources()
1574 dma_pool_free(atdma->dma_desc_pool, desc, desc->txd.phys); in atc_free_chan_resources()
1576 list_splice_init(&atchan->free_list, &list); in atc_free_chan_resources()
1577 atchan->status = 0; in atc_free_chan_resources()
1582 kfree(chan->private); in atc_free_chan_resources()
1583 chan->private = NULL; in atc_free_chan_resources()
1593 if (atslave->dma_dev == chan->device->dev) { in at_dma_filter()
1594 chan->private = atslave; in at_dma_filter()
1611 if (dma_spec->args_count != 2) in at_dma_xlate()
1614 dmac_pdev = of_find_device_by_node(dma_spec->np); in at_dma_xlate()
1623 put_device(&dmac_pdev->dev); in at_dma_xlate()
1627 atslave->cfg = ATC_DST_H2SEL_HW | ATC_SRC_H2SEL_HW; in at_dma_xlate()
1632 per_id = dma_spec->args[1] & AT91_DMA_CFG_PER_ID_MASK; in at_dma_xlate()
1633 atslave->cfg |= ATC_DST_PER_MSB(per_id) | ATC_DST_PER(per_id) in at_dma_xlate()
1640 switch (dma_spec->args[1] & AT91_DMA_CFG_FIFOCFG_MASK) { in at_dma_xlate()
1642 atslave->cfg |= ATC_FIFOCFG_LARGESTBURST; in at_dma_xlate()
1645 atslave->cfg |= ATC_FIFOCFG_ENOUGHSPACE; in at_dma_xlate()
1649 atslave->cfg |= ATC_FIFOCFG_HALFFIFO; in at_dma_xlate()
1651 atslave->dma_dev = &dmac_pdev->dev; in at_dma_xlate()
1655 put_device(&dmac_pdev->dev); in at_dma_xlate()
1661 atchan->per_if = dma_spec->args[0] & 0xff; in at_dma_xlate()
1662 atchan->mem_if = (dma_spec->args[0] >> 16) & 0xff; in at_dma_xlate()
1674 /*-- Module Management -----------------------------------------------*/
1676 /* cap_mask is a multi-u32 bitfield, fill it with proper C code. */
1687 .compatible = "atmel,at91sam9rl-dma",
1690 .compatible = "atmel,at91sam9g45-dma",
1715 if (pdev->dev.of_node) { in at_dma_get_driver_data()
1717 match = of_match_node(atmel_dma_dt_ids, pdev->dev.of_node); in at_dma_get_driver_data()
1720 return match->data; in at_dma_get_driver_data()
1723 platform_get_device_id(pdev)->driver_data; in at_dma_get_driver_data()
1727 * at_dma_off - disable DMA controller
1735 dma_writel(atdma, EBCIDR, -1L); in at_dma_off()
1738 while (dma_readl(atdma, CHSR) & atdma->all_chan_mask) in at_dma_off()
1764 return -ENODEV; in at_dma_probe()
1768 return -EINVAL; in at_dma_probe()
1775 size += plat_dat->nr_channels * sizeof(struct at_dma_chan); in at_dma_probe()
1778 return -ENOMEM; in at_dma_probe()
1781 atdma->dma_common.cap_mask = plat_dat->cap_mask; in at_dma_probe()
1782 atdma->all_chan_mask = (1 << plat_dat->nr_channels) - 1; in at_dma_probe()
1785 if (!request_mem_region(io->start, size, pdev->dev.driver->name)) { in at_dma_probe()
1786 err = -EBUSY; in at_dma_probe()
1790 atdma->regs = ioremap(io->start, size); in at_dma_probe()
1791 if (!atdma->regs) { in at_dma_probe()
1792 err = -ENOMEM; in at_dma_probe()
1796 atdma->clk = clk_get(&pdev->dev, "dma_clk"); in at_dma_probe()
1797 if (IS_ERR(atdma->clk)) { in at_dma_probe()
1798 err = PTR_ERR(atdma->clk); in at_dma_probe()
1801 err = clk_prepare_enable(atdma->clk); in at_dma_probe()
1815 atdma->dma_desc_pool = dma_pool_create("at_hdmac_desc_pool", in at_dma_probe()
1816 &pdev->dev, sizeof(struct at_desc), in at_dma_probe()
1818 if (!atdma->dma_desc_pool) { in at_dma_probe()
1819 dev_err(&pdev->dev, "No memory for descriptors dma pool\n"); in at_dma_probe()
1820 err = -ENOMEM; in at_dma_probe()
1825 atdma->memset_pool = dma_pool_create("at_hdmac_memset_pool", in at_dma_probe()
1826 &pdev->dev, sizeof(int), 4, 0); in at_dma_probe()
1827 if (!atdma->memset_pool) { in at_dma_probe()
1828 dev_err(&pdev->dev, "No memory for memset dma pool\n"); in at_dma_probe()
1829 err = -ENOMEM; in at_dma_probe()
1838 INIT_LIST_HEAD(&atdma->dma_common.channels); in at_dma_probe()
1839 for (i = 0; i < plat_dat->nr_channels; i++) { in at_dma_probe()
1840 struct at_dma_chan *atchan = &atdma->chan[i]; in at_dma_probe()
1842 atchan->mem_if = AT_DMA_MEM_IF; in at_dma_probe()
1843 atchan->per_if = AT_DMA_PER_IF; in at_dma_probe()
1844 atchan->chan_common.device = &atdma->dma_common; in at_dma_probe()
1845 dma_cookie_init(&atchan->chan_common); in at_dma_probe()
1846 list_add_tail(&atchan->chan_common.device_node, in at_dma_probe()
1847 &atdma->dma_common.channels); in at_dma_probe()
1849 atchan->ch_regs = atdma->regs + ch_regs(i); in at_dma_probe()
1850 spin_lock_init(&atchan->lock); in at_dma_probe()
1851 atchan->mask = 1 << i; in at_dma_probe()
1853 INIT_LIST_HEAD(&atchan->active_list); in at_dma_probe()
1854 INIT_LIST_HEAD(&atchan->queue); in at_dma_probe()
1855 INIT_LIST_HEAD(&atchan->free_list); in at_dma_probe()
1857 tasklet_setup(&atchan->tasklet, atc_tasklet); in at_dma_probe()
1862 atdma->dma_common.device_alloc_chan_resources = atc_alloc_chan_resources; in at_dma_probe()
1863 atdma->dma_common.device_free_chan_resources = atc_free_chan_resources; in at_dma_probe()
1864 atdma->dma_common.device_tx_status = atc_tx_status; in at_dma_probe()
1865 atdma->dma_common.device_issue_pending = atc_issue_pending; in at_dma_probe()
1866 atdma->dma_common.dev = &pdev->dev; in at_dma_probe()
1868 /* set prep routines based on capability */ in at_dma_probe()
1869 if (dma_has_cap(DMA_INTERLEAVE, atdma->dma_common.cap_mask)) in at_dma_probe()
1870 atdma->dma_common.device_prep_interleaved_dma = atc_prep_dma_interleaved; in at_dma_probe()
1872 if (dma_has_cap(DMA_MEMCPY, atdma->dma_common.cap_mask)) in at_dma_probe()
1873 atdma->dma_common.device_prep_dma_memcpy = atc_prep_dma_memcpy; in at_dma_probe()
1875 if (dma_has_cap(DMA_MEMSET, atdma->dma_common.cap_mask)) { in at_dma_probe()
1876 atdma->dma_common.device_prep_dma_memset = atc_prep_dma_memset; in at_dma_probe()
1877 atdma->dma_common.device_prep_dma_memset_sg = atc_prep_dma_memset_sg; in at_dma_probe()
1878 atdma->dma_common.fill_align = DMAENGINE_ALIGN_4_BYTES; in at_dma_probe()
1881 if (dma_has_cap(DMA_SLAVE, atdma->dma_common.cap_mask)) { in at_dma_probe()
1882 atdma->dma_common.device_prep_slave_sg = atc_prep_slave_sg; in at_dma_probe()
1884 dma_cap_set(DMA_CYCLIC, atdma->dma_common.cap_mask); in at_dma_probe()
1885 atdma->dma_common.device_prep_dma_cyclic = atc_prep_dma_cyclic; in at_dma_probe()
1886 atdma->dma_common.device_config = atc_config; in at_dma_probe()
1887 atdma->dma_common.device_pause = atc_pause; in at_dma_probe()
1888 atdma->dma_common.device_resume = atc_resume; in at_dma_probe()
1889 atdma->dma_common.device_terminate_all = atc_terminate_all; in at_dma_probe()
1890 atdma->dma_common.src_addr_widths = ATC_DMA_BUSWIDTHS; in at_dma_probe()
1891 atdma->dma_common.dst_addr_widths = ATC_DMA_BUSWIDTHS; in at_dma_probe()
1892 atdma->dma_common.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); in at_dma_probe()
1893 atdma->dma_common.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in at_dma_probe()
1898 dev_info(&pdev->dev, "Atmel AHB DMA Controller ( %s%s%s), %d channels\n", in at_dma_probe()
1899 dma_has_cap(DMA_MEMCPY, atdma->dma_common.cap_mask) ? "cpy " : "", in at_dma_probe()
1900 dma_has_cap(DMA_MEMSET, atdma->dma_common.cap_mask) ? "set " : "", in at_dma_probe()
1901 dma_has_cap(DMA_SLAVE, atdma->dma_common.cap_mask) ? "slave " : "", in at_dma_probe()
1902 plat_dat->nr_channels); in at_dma_probe()
1904 err = dma_async_device_register(&atdma->dma_common); in at_dma_probe()
1906 dev_err(&pdev->dev, "Unable to register: %d.\n", err); in at_dma_probe()
1915 if (pdev->dev.of_node) { in at_dma_probe()
1916 err = of_dma_controller_register(pdev->dev.of_node, in at_dma_probe()
1919 dev_err(&pdev->dev, "could not register of_dma_controller\n"); in at_dma_probe()
1927 dma_async_device_unregister(&atdma->dma_common); in at_dma_probe()
1929 dma_pool_destroy(atdma->memset_pool); in at_dma_probe()
1931 dma_pool_destroy(atdma->dma_desc_pool); in at_dma_probe()
1935 clk_disable_unprepare(atdma->clk); in at_dma_probe()
1937 clk_put(atdma->clk); in at_dma_probe()
1939 iounmap(atdma->regs); in at_dma_probe()
1940 atdma->regs = NULL; in at_dma_probe()
1942 release_mem_region(io->start, size); in at_dma_probe()
1955 if (pdev->dev.of_node) in at_dma_remove()
1956 of_dma_controller_free(pdev->dev.of_node); in at_dma_remove()
1957 dma_async_device_unregister(&atdma->dma_common); in at_dma_remove()
1959 dma_pool_destroy(atdma->memset_pool); in at_dma_remove()
1960 dma_pool_destroy(atdma->dma_desc_pool); in at_dma_remove()
1963 list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels, in at_dma_remove()
1968 atc_disable_chan_irq(atdma, chan->chan_id); in at_dma_remove()
1970 tasklet_kill(&atchan->tasklet); in at_dma_remove()
1971 list_del(&chan->device_node); in at_dma_remove()
1974 clk_disable_unprepare(atdma->clk); in at_dma_remove()
1975 clk_put(atdma->clk); in at_dma_remove()
1977 iounmap(atdma->regs); in at_dma_remove()
1978 atdma->regs = NULL; in at_dma_remove()
1981 release_mem_region(io->start, resource_size(io)); in at_dma_remove()
1993 clk_disable_unprepare(atdma->clk); in at_dma_shutdown()
2001 list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels, in at_dma_prepare()
2006 return -EAGAIN; in at_dma_prepare()
2013 struct dma_chan *chan = &atchan->chan_common; in atc_suspend_cyclic()
2025 atchan->save_dscr = channel_readl(atchan, DSCR); in atc_suspend_cyclic()
2036 list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels, in at_dma_suspend_noirq()
2042 atchan->save_cfg = channel_readl(atchan, CFG); in at_dma_suspend_noirq()
2044 atdma->save_imr = dma_readl(atdma, EBCIMR); in at_dma_suspend_noirq()
2048 clk_disable_unprepare(atdma->clk); in at_dma_suspend_noirq()
2054 struct at_dma *atdma = to_at_dma(atchan->chan_common.device); in atc_resume_cyclic()
2062 channel_writel(atchan, DSCR, atchan->save_dscr); in atc_resume_cyclic()
2063 dma_writel(atdma, CHER, atchan->mask); in atc_resume_cyclic()
2077 clk_prepare_enable(atdma->clk); in at_dma_resume_noirq()
2085 dma_writel(atdma, EBCIER, atdma->save_imr); in at_dma_resume_noirq()
2086 list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels, in at_dma_resume_noirq()
2090 channel_writel(atchan, CFG, atchan->save_cfg); in at_dma_resume_noirq()