Lines Matching refs:sw_desc
517 struct xilinx_dpdma_sw_desc *sw_desc, in xilinx_dpdma_sw_desc_set_dma_addrs() argument
522 struct xilinx_dpdma_hw_desc *hw_desc = &sw_desc->hw; in xilinx_dpdma_sw_desc_set_dma_addrs()
549 prev->hw.next_desc = lower_32_bits(sw_desc->dma_addr); in xilinx_dpdma_sw_desc_set_dma_addrs()
553 upper_32_bits(sw_desc->dma_addr)); in xilinx_dpdma_sw_desc_set_dma_addrs()
567 struct xilinx_dpdma_sw_desc *sw_desc; in xilinx_dpdma_chan_alloc_sw_desc() local
570 sw_desc = dma_pool_zalloc(chan->desc_pool, GFP_ATOMIC, &dma_addr); in xilinx_dpdma_chan_alloc_sw_desc()
571 if (!sw_desc) in xilinx_dpdma_chan_alloc_sw_desc()
574 sw_desc->dma_addr = dma_addr; in xilinx_dpdma_chan_alloc_sw_desc()
576 return sw_desc; in xilinx_dpdma_chan_alloc_sw_desc()
588 struct xilinx_dpdma_sw_desc *sw_desc) in xilinx_dpdma_chan_free_sw_desc() argument
590 dma_pool_free(chan->desc_pool, sw_desc, sw_desc->dma_addr); in xilinx_dpdma_chan_free_sw_desc()
603 struct xilinx_dpdma_sw_desc *sw_desc; in xilinx_dpdma_chan_dump_tx_desc() local
610 list_for_each_entry(sw_desc, &tx_desc->descriptors, node) { in xilinx_dpdma_chan_dump_tx_desc()
611 struct xilinx_dpdma_hw_desc *hw_desc = &sw_desc->hw; in xilinx_dpdma_chan_dump_tx_desc()
614 dev_dbg(dev, "descriptor DMA addr: %pad\n", &sw_desc->dma_addr); in xilinx_dpdma_chan_dump_tx_desc()
668 struct xilinx_dpdma_sw_desc *sw_desc, *next; in xilinx_dpdma_chan_free_tx_desc() local
676 list_for_each_entry_safe(sw_desc, next, &desc->descriptors, node) { in xilinx_dpdma_chan_free_tx_desc()
677 list_del(&sw_desc->node); in xilinx_dpdma_chan_free_tx_desc()
678 xilinx_dpdma_chan_free_sw_desc(desc->chan, sw_desc); in xilinx_dpdma_chan_free_tx_desc()
700 struct xilinx_dpdma_sw_desc *sw_desc; in xilinx_dpdma_chan_prep_interleaved_dma() local
715 sw_desc = xilinx_dpdma_chan_alloc_sw_desc(chan); in xilinx_dpdma_chan_prep_interleaved_dma()
716 if (!sw_desc) { in xilinx_dpdma_chan_prep_interleaved_dma()
721 xilinx_dpdma_sw_desc_set_dma_addrs(chan->xdev, sw_desc, sw_desc, in xilinx_dpdma_chan_prep_interleaved_dma()
724 hw_desc = &sw_desc->hw; in xilinx_dpdma_chan_prep_interleaved_dma()
736 list_add_tail(&sw_desc->node, &tx_desc->descriptors); in xilinx_dpdma_chan_prep_interleaved_dma()
839 struct xilinx_dpdma_sw_desc *sw_desc; in xilinx_dpdma_chan_queue_transfer() local
869 list_for_each_entry(sw_desc, &desc->descriptors, node) in xilinx_dpdma_chan_queue_transfer()
870 sw_desc->hw.desc_id = desc->vdesc.tx.cookie in xilinx_dpdma_chan_queue_transfer()
873 sw_desc = list_first_entry(&desc->descriptors, in xilinx_dpdma_chan_queue_transfer()
876 lower_32_bits(sw_desc->dma_addr)); in xilinx_dpdma_chan_queue_transfer()
880 upper_32_bits(sw_desc->dma_addr))); in xilinx_dpdma_chan_queue_transfer()
1081 struct xilinx_dpdma_sw_desc *sw_desc; in xilinx_dpdma_chan_vsync_irq() local
1095 sw_desc = list_first_entry(&pending->descriptors, in xilinx_dpdma_chan_vsync_irq()
1097 if (sw_desc->hw.desc_id != desc_id) in xilinx_dpdma_chan_vsync_irq()