Lines Matching refs:dmatx
273 struct pl011_dmatx_data dmatx; member
441 uap->dmatx.chan = chan; in pl011_dma_probe()
444 dma_chan_name(uap->dmatx.chan)); in pl011_dma_probe()
532 if (uap->dmatx.chan) in pl011_dma_remove()
533 dma_release_channel(uap->dmatx.chan); in pl011_dma_remove()
549 struct pl011_dmatx_data *dmatx = &uap->dmatx; in pl011_dma_tx_callback() local
554 if (uap->dmatx.queued) in pl011_dma_tx_callback()
555 dma_unmap_sg(dmatx->chan->device->dev, &dmatx->sg, 1, in pl011_dma_tx_callback()
573 uap->dmatx.queued = false; in pl011_dma_tx_callback()
598 struct pl011_dmatx_data *dmatx = &uap->dmatx; in pl011_dma_tx_refill() local
599 struct dma_chan *chan = dmatx->chan; in pl011_dma_tx_refill()
613 uap->dmatx.queued = false; in pl011_dma_tx_refill()
628 memcpy(&dmatx->buf[0], &xmit->buf[xmit->tail], count); in pl011_dma_tx_refill()
637 memcpy(&dmatx->buf[0], &xmit->buf[xmit->tail], first); in pl011_dma_tx_refill()
639 memcpy(&dmatx->buf[first], &xmit->buf[0], second); in pl011_dma_tx_refill()
642 dmatx->sg.length = count; in pl011_dma_tx_refill()
644 if (dma_map_sg(dma_dev->dev, &dmatx->sg, 1, DMA_TO_DEVICE) != 1) { in pl011_dma_tx_refill()
645 uap->dmatx.queued = false; in pl011_dma_tx_refill()
650 desc = dmaengine_prep_slave_sg(chan, &dmatx->sg, 1, DMA_MEM_TO_DEV, in pl011_dma_tx_refill()
653 dma_unmap_sg(dma_dev->dev, &dmatx->sg, 1, DMA_TO_DEVICE); in pl011_dma_tx_refill()
654 uap->dmatx.queued = false; in pl011_dma_tx_refill()
675 uap->dmatx.queued = true; in pl011_dma_tx_refill()
708 if (uap->dmatx.queued) { in pl011_dma_tx_irq()
734 if (uap->dmatx.queued) { in pl011_dma_tx_stop()
759 if (!uap->dmatx.queued) { in pl011_dma_tx_start()
814 dmaengine_terminate_async(uap->dmatx.chan); in pl011_dma_flush_buffer()
816 if (uap->dmatx.queued) { in pl011_dma_flush_buffer()
817 dma_unmap_sg(uap->dmatx.chan->device->dev, &uap->dmatx.sg, 1, in pl011_dma_flush_buffer()
819 uap->dmatx.queued = false; in pl011_dma_flush_buffer()
1116 if (!uap->dmatx.chan) in pl011_dma_startup()
1119 uap->dmatx.buf = kmalloc(PL011_DMA_BUFFER_SIZE, GFP_KERNEL | __GFP_DMA); in pl011_dma_startup()
1120 if (!uap->dmatx.buf) { in pl011_dma_startup()
1126 sg_init_one(&uap->dmatx.sg, uap->dmatx.buf, PL011_DMA_BUFFER_SIZE); in pl011_dma_startup()
1201 dmaengine_terminate_all(uap->dmatx.chan); in pl011_dma_shutdown()
1202 if (uap->dmatx.queued) { in pl011_dma_shutdown()
1203 dma_unmap_sg(uap->dmatx.chan->device->dev, &uap->dmatx.sg, 1, in pl011_dma_shutdown()
1205 uap->dmatx.queued = false; in pl011_dma_shutdown()
1208 kfree(uap->dmatx.buf); in pl011_dma_shutdown()