Lines Matching full:pch

1758 	struct dma_pl330_chan *pch;  in dma_pl330_rqcb()  local
1764 pch = desc->pchan; in dma_pl330_rqcb()
1767 if (!pch) in dma_pl330_rqcb()
1770 spin_lock_irqsave(&pch->lock, flags); in dma_pl330_rqcb()
1774 spin_unlock_irqrestore(&pch->lock, flags); in dma_pl330_rqcb()
1776 tasklet_schedule(&pch->task); in dma_pl330_rqcb()
2243 static inline void fill_queue(struct dma_pl330_chan *pch) in fill_queue() argument
2248 list_for_each_entry(desc, &pch->work_list, node) { in fill_queue()
2254 ret = pl330_submit_req(pch->thread, desc); in fill_queue()
2263 dev_err(pch->dmac->ddma.dev, "%s:%d Bad Desc(%d)\n", in fill_queue()
2265 tasklet_schedule(&pch->task); in fill_queue()
2272 struct dma_pl330_chan *pch = from_tasklet(pch, t, task); in pl330_tasklet() local
2277 spin_lock_irqsave(&pch->lock, flags); in pl330_tasklet()
2280 list_for_each_entry_safe(desc, _dt, &pch->work_list, node) { in pl330_tasklet()
2284 list_move_tail(&desc->node, &pch->completed_list); in pl330_tasklet()
2292 spin_unlock_irqrestore(&pch->lock, flags); in pl330_tasklet()
2294 spin_lock_irqsave(&pch->lock, flags); in pl330_tasklet()
2301 fill_queue(pch); in pl330_tasklet()
2303 if (list_empty(&pch->work_list)) { in pl330_tasklet()
2304 spin_lock(&pch->thread->dmac->lock); in pl330_tasklet()
2305 _stop(pch->thread); in pl330_tasklet()
2306 spin_unlock(&pch->thread->dmac->lock); in pl330_tasklet()
2307 power_down = pch->active; in pl330_tasklet()
2308 pch->active = false; in pl330_tasklet()
2311 spin_lock(&pch->thread->dmac->lock); in pl330_tasklet()
2312 _start(pch->thread); in pl330_tasklet()
2313 spin_unlock(&pch->thread->dmac->lock); in pl330_tasklet()
2316 while (!list_empty(&pch->completed_list)) { in pl330_tasklet()
2319 desc = list_first_entry(&pch->completed_list, in pl330_tasklet()
2325 list_move_tail(&desc->node, &pch->dmac->desc_pool); in pl330_tasklet()
2330 spin_unlock_irqrestore(&pch->lock, flags); in pl330_tasklet()
2332 spin_lock_irqsave(&pch->lock, flags); in pl330_tasklet()
2335 spin_unlock_irqrestore(&pch->lock, flags); in pl330_tasklet()
2339 pm_runtime_mark_last_busy(pch->dmac->ddma.dev); in pl330_tasklet()
2340 pm_runtime_put_autosuspend(pch->dmac->ddma.dev); in pl330_tasklet()
2366 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_alloc_chan_resources() local
2367 struct pl330_dmac *pl330 = pch->dmac; in pl330_alloc_chan_resources()
2374 pch->thread = pl330_request_channel(pl330); in pl330_alloc_chan_resources()
2375 if (!pch->thread) { in pl330_alloc_chan_resources()
2380 tasklet_setup(&pch->task, pl330_tasklet); in pl330_alloc_chan_resources()
2406 static void pl330_unprep_slave_fifo(struct dma_pl330_chan *pch) in pl330_unprep_slave_fifo() argument
2408 if (pch->dir != DMA_NONE) in pl330_unprep_slave_fifo()
2409 dma_unmap_resource(pch->chan.device->dev, pch->fifo_dma, in pl330_unprep_slave_fifo()
2410 1 << pch->burst_sz, pch->dir, 0); in pl330_unprep_slave_fifo()
2411 pch->dir = DMA_NONE; in pl330_unprep_slave_fifo()
2415 static bool pl330_prep_slave_fifo(struct dma_pl330_chan *pch, in pl330_prep_slave_fifo() argument
2418 struct device *dev = pch->chan.device->dev; in pl330_prep_slave_fifo()
2422 if (pch->dir == dma_dir) in pl330_prep_slave_fifo()
2425 pl330_unprep_slave_fifo(pch); in pl330_prep_slave_fifo()
2426 pch->fifo_dma = dma_map_resource(dev, pch->fifo_addr, in pl330_prep_slave_fifo()
2427 1 << pch->burst_sz, dma_dir, 0); in pl330_prep_slave_fifo()
2428 if (dma_mapping_error(dev, pch->fifo_dma)) in pl330_prep_slave_fifo()
2431 pch->dir = dma_dir; in pl330_prep_slave_fifo()
2449 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_config_write() local
2451 pl330_unprep_slave_fifo(pch); in pl330_config_write()
2454 pch->fifo_addr = slave_config->dst_addr; in pl330_config_write()
2456 pch->burst_sz = __ffs(slave_config->dst_addr_width); in pl330_config_write()
2459 pch->slave_config.src_interlace_size = slave_config->src_interlace_size; in pl330_config_write()
2461 pch->burst_len = fixup_burst_len(slave_config->dst_maxburst, in pl330_config_write()
2462 pch->dmac->quirks); in pl330_config_write()
2465 pch->fifo_addr = slave_config->src_addr; in pl330_config_write()
2467 pch->burst_sz = __ffs(slave_config->src_addr_width); in pl330_config_write()
2470 pch->slave_config.dst_interlace_size = slave_config->dst_interlace_size; in pl330_config_write()
2472 pch->burst_len = fixup_burst_len(slave_config->src_maxburst, in pl330_config_write()
2473 pch->dmac->quirks); in pl330_config_write()
2482 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_config() local
2484 memcpy(&pch->slave_config, slave_config, sizeof(*slave_config)); in pl330_config()
2491 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_terminate_all() local
2494 struct pl330_dmac *pl330 = pch->dmac; in pl330_terminate_all()
2498 spin_lock_irqsave(&pch->lock, flags); in pl330_terminate_all()
2501 _stop(pch->thread); in pl330_terminate_all()
2502 pch->thread->req[0].desc = NULL; in pl330_terminate_all()
2503 pch->thread->req[1].desc = NULL; in pl330_terminate_all()
2504 pch->thread->req_running = -1; in pl330_terminate_all()
2507 power_down = pch->active; in pl330_terminate_all()
2508 pch->active = false; in pl330_terminate_all()
2511 list_for_each_entry(desc, &pch->submitted_list, node) { in pl330_terminate_all()
2516 list_for_each_entry(desc, &pch->work_list , node) { in pl330_terminate_all()
2521 list_splice_tail_init(&pch->submitted_list, &pl330->desc_pool); in pl330_terminate_all()
2522 list_splice_tail_init(&pch->work_list, &pl330->desc_pool); in pl330_terminate_all()
2523 list_splice_tail_init(&pch->completed_list, &pl330->desc_pool); in pl330_terminate_all()
2524 spin_unlock_irqrestore(&pch->lock, flags); in pl330_terminate_all()
2542 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_pause() local
2543 struct pl330_dmac *pl330 = pch->dmac; in pl330_pause()
2547 spin_lock_irqsave(&pch->lock, flags); in pl330_pause()
2550 _stop(pch->thread); in pl330_pause()
2553 spin_unlock_irqrestore(&pch->lock, flags); in pl330_pause()
2562 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_free_chan_resources() local
2563 struct pl330_dmac *pl330 = pch->dmac; in pl330_free_chan_resources()
2566 tasklet_kill(&pch->task); in pl330_free_chan_resources()
2568 pm_runtime_get_sync(pch->dmac->ddma.dev); in pl330_free_chan_resources()
2571 pl330_release_channel(pch->thread); in pl330_free_chan_resources()
2572 pch->thread = NULL; in pl330_free_chan_resources()
2574 list_splice_tail_init(&pch->work_list, &pch->dmac->desc_pool); in pl330_free_chan_resources()
2577 pm_runtime_mark_last_busy(pch->dmac->ddma.dev); in pl330_free_chan_resources()
2578 pm_runtime_put_autosuspend(pch->dmac->ddma.dev); in pl330_free_chan_resources()
2579 pl330_unprep_slave_fifo(pch); in pl330_free_chan_resources()
2582 static int pl330_get_current_xferred_count(struct dma_pl330_chan *pch, in pl330_get_current_xferred_count() argument
2585 struct pl330_thread *thrd = pch->thread; in pl330_get_current_xferred_count()
2586 struct pl330_dmac *pl330 = pch->dmac; in pl330_get_current_xferred_count()
2599 pm_runtime_mark_last_busy(pch->dmac->ddma.dev); in pl330_get_current_xferred_count()
2616 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_tx_status() local
2627 spin_lock_irqsave(&pch->lock, flags); in pl330_tx_status()
2628 spin_lock(&pch->thread->dmac->lock); in pl330_tx_status()
2630 if (pch->thread->req_running != -1) in pl330_tx_status()
2631 running = pch->thread->req[pch->thread->req_running].desc; in pl330_tx_status()
2633 last_enq = pch->thread->req[pch->thread->lstenq].desc; in pl330_tx_status()
2636 list_for_each_entry(desc, &pch->work_list, node) { in pl330_tx_status()
2641 pl330_get_current_xferred_count(pch, desc); in pl330_tx_status()
2671 spin_unlock(&pch->thread->dmac->lock); in pl330_tx_status()
2672 spin_unlock_irqrestore(&pch->lock, flags); in pl330_tx_status()
2682 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_issue_pending() local
2685 spin_lock_irqsave(&pch->lock, flags); in pl330_issue_pending()
2686 if (list_empty(&pch->work_list)) { in pl330_issue_pending()
2692 WARN_ON(list_empty(&pch->submitted_list)); in pl330_issue_pending()
2693 pch->active = true; in pl330_issue_pending()
2694 pm_runtime_get_sync(pch->dmac->ddma.dev); in pl330_issue_pending()
2696 list_splice_tail_init(&pch->submitted_list, &pch->work_list); in pl330_issue_pending()
2697 spin_unlock_irqrestore(&pch->lock, flags); in pl330_issue_pending()
2699 pl330_tasklet(&pch->task); in pl330_issue_pending()
2710 struct dma_pl330_chan *pch = to_pchan(tx->chan); in pl330_tx_submit() local
2714 spin_lock_irqsave(&pch->lock, flags); in pl330_tx_submit()
2724 list_move_tail(&desc->node, &pch->submitted_list); in pl330_tx_submit()
2729 list_add_tail(&last->node, &pch->submitted_list); in pl330_tx_submit()
2730 spin_unlock_irqrestore(&pch->lock, flags); in pl330_tx_submit()
2792 static struct dma_pl330_desc *pl330_get_desc(struct dma_pl330_chan *pch) in pl330_get_desc() argument
2794 struct pl330_dmac *pl330 = pch->dmac; in pl330_get_desc()
2795 u8 *peri_id = pch->chan.private; in pl330_get_desc()
2814 desc->pchan = pch; in pl330_get_desc()
2818 desc->peri = peri_id ? pch->chan.chan_id : 0; in pl330_get_desc()
2819 desc->rqcfg.pcfg = &pch->dmac->pcfg; in pl330_get_desc()
2824 dma_async_tx_descriptor_init(&desc->txd, &pch->chan); in pl330_get_desc()
2838 __pl330_prep_dma_memcpy(struct dma_pl330_chan *pch, dma_addr_t dst, in __pl330_prep_dma_memcpy() argument
2841 struct dma_pl330_desc *desc = pl330_get_desc(pch); in __pl330_prep_dma_memcpy()
2844 dev_err(pch->dmac->ddma.dev, "%s:%d Unable to fetch desc\n", in __pl330_prep_dma_memcpy()
2867 struct dma_pl330_chan *pch = desc->pchan; in get_burst_len() local
2868 struct pl330_dmac *pl330 = pch->dmac; in get_burst_len()
2888 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_prep_dma_cyclic() local
2896 dev_err(pch->dmac->ddma.dev, "%s:%d Invalid dma direction\n", in pl330_prep_dma_cyclic()
2901 pl330_config_write(chan, &pch->slave_config, direction); in pl330_prep_dma_cyclic()
2903 if (!pl330_prep_slave_fifo(pch, direction)) in pl330_prep_dma_cyclic()
2906 desc = pl330_get_desc(pch); in pl330_prep_dma_cyclic()
2908 dev_err(pch->dmac->ddma.dev, "%s:%d Unable to fetch desc\n", in pl330_prep_dma_cyclic()
2918 dst = pch->fifo_dma; in pl330_prep_dma_cyclic()
2923 src = pch->fifo_dma; in pl330_prep_dma_cyclic()
2931 desc->rqcfg.brst_size = pch->burst_sz; in pl330_prep_dma_cyclic()
2932 desc->rqcfg.brst_len = pch->burst_len; in pl330_prep_dma_cyclic()
2940 desc->src_interlace_size = pch->slave_config.src_interlace_size; in pl330_prep_dma_cyclic()
2941 desc->dst_interlace_size = pch->slave_config.dst_interlace_size; in pl330_prep_dma_cyclic()
2951 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_prep_dma_memcpy() local
2955 if (unlikely(!pch || !len)) in pl330_prep_dma_memcpy()
2958 pl330 = pch->dmac; in pl330_prep_dma_memcpy()
2960 desc = __pl330_prep_dma_memcpy(pch, dst, src, len); in pl330_prep_dma_memcpy()
3026 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_prep_slave_sg() local
3030 if (unlikely(!pch || !sgl || !sg_len)) in pl330_prep_slave_sg()
3033 pl330_config_write(chan, &pch->slave_config, direction); in pl330_prep_slave_sg()
3035 if (!pl330_prep_slave_fifo(pch, direction)) in pl330_prep_slave_sg()
3042 desc = pl330_get_desc(pch); in pl330_prep_slave_sg()
3044 struct pl330_dmac *pl330 = pch->dmac; in pl330_prep_slave_sg()
3046 dev_err(pch->dmac->ddma.dev, in pl330_prep_slave_sg()
3062 fill_px(&desc->px, pch->fifo_dma, sg_dma_address(sg), in pl330_prep_slave_sg()
3067 fill_px(&desc->px, sg_dma_address(sg), pch->fifo_dma, in pl330_prep_slave_sg()
3071 desc->rqcfg.brst_size = pch->burst_sz; in pl330_prep_slave_sg()
3072 desc->rqcfg.brst_len = pch->burst_len; in pl330_prep_slave_sg()
3076 desc->src_interlace_size = pch->slave_config.src_interlace_size; in pl330_prep_slave_sg()
3077 desc->dst_interlace_size = pch->slave_config.dst_interlace_size; in pl330_prep_slave_sg()
3118 struct dma_pl330_chan *pch = &pl330->peripherals[pr]; in pl330_debugfs_show() local
3120 if (!pch->thread || thrd->id != pch->thread->id) in pl330_debugfs_show()
3189 struct dma_pl330_chan *pch, *_p; in pl330_probe() local
3280 pl330->peripherals = kcalloc(num_chan, sizeof(*pch), GFP_KERNEL); in pl330_probe()
3287 pch = &pl330->peripherals[i]; in pl330_probe()
3289 pch->chan.private = adev->dev.of_node; in pl330_probe()
3290 INIT_LIST_HEAD(&pch->submitted_list); in pl330_probe()
3291 INIT_LIST_HEAD(&pch->work_list); in pl330_probe()
3292 INIT_LIST_HEAD(&pch->completed_list); in pl330_probe()
3293 spin_lock_init(&pch->lock); in pl330_probe()
3294 pch->thread = NULL; in pl330_probe()
3295 pch->chan.device = pd; in pl330_probe()
3296 pch->dmac = pl330; in pl330_probe()
3297 pch->dir = DMA_NONE; in pl330_probe()
3300 list_add_tail(&pch->chan.device_node, &pd->channels); in pl330_probe()
3367 list_for_each_entry_safe(pch, _p, &pl330->ddma.channels, in pl330_probe()
3371 list_del(&pch->chan.device_node); in pl330_probe()
3374 if (pch->thread) { in pl330_probe()
3375 pl330_terminate_all(&pch->chan); in pl330_probe()
3376 pl330_free_chan_resources(&pch->chan); in pl330_probe()
3393 struct dma_pl330_chan *pch, *_p; in pl330_remove() local
3410 list_for_each_entry_safe(pch, _p, &pl330->ddma.channels, in pl330_remove()
3414 list_del(&pch->chan.device_node); in pl330_remove()
3417 if (pch->thread) { in pl330_remove()
3418 pl330_terminate_all(&pch->chan); in pl330_remove()
3419 pl330_free_chan_resources(&pch->chan); in pl330_remove()