Lines Matching refs:schan

158 	struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(c);  in dma_chan_to_sirfsoc_dma()  local
159 return container_of(schan, struct sirfsoc_dma, channels[c->chan_id]); in dma_chan_to_sirfsoc_dma()
236 static void sirfsoc_dma_execute(struct sirfsoc_dma_chan *schan) in sirfsoc_dma_execute() argument
238 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan); in sirfsoc_dma_execute()
239 int cid = schan->chan.chan_id; in sirfsoc_dma_execute()
248 sdesc = list_first_entry(&schan->queued, struct sirfsoc_dma_desc, in sirfsoc_dma_execute()
251 list_move_tail(&sdesc->node, &schan->active); in sirfsoc_dma_execute()
257 sdma->exec_desc(sdesc, cid, schan->mode, base); in sirfsoc_dma_execute()
260 schan->happened_cyclic = schan->completed_cyclic = 0; in sirfsoc_dma_execute()
267 struct sirfsoc_dma_chan *schan; in sirfsoc_dma_irq() local
282 schan = &sdma->channels[ch]; in sirfsoc_dma_irq()
283 spin_lock(&schan->lock); in sirfsoc_dma_irq()
284 sdesc = list_first_entry(&schan->active, in sirfsoc_dma_irq()
288 list_splice_tail_init(&schan->active, in sirfsoc_dma_irq()
289 &schan->completed); in sirfsoc_dma_irq()
291 if (!list_empty(&schan->queued)) in sirfsoc_dma_irq()
292 sirfsoc_dma_execute(schan); in sirfsoc_dma_irq()
294 schan->happened_cyclic++; in sirfsoc_dma_irq()
295 spin_unlock(&schan->lock); in sirfsoc_dma_irq()
304 schan = &sdma->channels[0]; in sirfsoc_dma_irq()
305 spin_lock(&schan->lock); in sirfsoc_dma_irq()
306 sdesc = list_first_entry(&schan->active, in sirfsoc_dma_irq()
314 list_splice_tail_init(&schan->active, in sirfsoc_dma_irq()
315 &schan->completed); in sirfsoc_dma_irq()
317 if (!list_empty(&schan->queued)) in sirfsoc_dma_irq()
318 sirfsoc_dma_execute(schan); in sirfsoc_dma_irq()
322 schan->happened_cyclic++; in sirfsoc_dma_irq()
324 spin_unlock(&schan->lock); in sirfsoc_dma_irq()
341 struct sirfsoc_dma_chan *schan; in sirfsoc_dma_process_completed() local
350 schan = &sdma->channels[i]; in sirfsoc_dma_process_completed()
353 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_process_completed()
354 if (!list_empty(&schan->completed)) { in sirfsoc_dma_process_completed()
355 list_splice_tail_init(&schan->completed, &list); in sirfsoc_dma_process_completed()
356 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_process_completed()
368 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_process_completed()
369 list_splice_tail_init(&list, &schan->free); in sirfsoc_dma_process_completed()
370 schan->chan.completed_cookie = last_cookie; in sirfsoc_dma_process_completed()
371 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_process_completed()
373 if (list_empty(&schan->active)) { in sirfsoc_dma_process_completed()
374 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_process_completed()
379 sdesc = list_first_entry(&schan->active, in sirfsoc_dma_process_completed()
383 happened_cyclic = schan->happened_cyclic; in sirfsoc_dma_process_completed()
384 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_process_completed()
387 while (happened_cyclic != schan->completed_cyclic) { in sirfsoc_dma_process_completed()
389 schan->completed_cyclic++; in sirfsoc_dma_process_completed()
406 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(txd->chan); in sirfsoc_dma_tx_submit() local
413 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_tx_submit()
416 list_move_tail(&sdesc->node, &schan->queued); in sirfsoc_dma_tx_submit()
420 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_tx_submit()
428 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_slave_config() local
435 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_slave_config()
436 schan->mode = (config->src_maxburst == 4 ? 1 : 0); in sirfsoc_dma_slave_config()
437 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_slave_config()
444 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_terminate_all() local
445 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan); in sirfsoc_dma_terminate_all()
446 int cid = schan->chan.chan_id; in sirfsoc_dma_terminate_all()
449 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_terminate_all()
480 list_splice_tail_init(&schan->active, &schan->free); in sirfsoc_dma_terminate_all()
481 list_splice_tail_init(&schan->queued, &schan->free); in sirfsoc_dma_terminate_all()
483 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_terminate_all()
490 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_pause_chan() local
491 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan); in sirfsoc_dma_pause_chan()
492 int cid = schan->chan.chan_id; in sirfsoc_dma_pause_chan()
495 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_pause_chan()
517 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_pause_chan()
524 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_resume_chan() local
525 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan); in sirfsoc_dma_resume_chan()
526 int cid = schan->chan.chan_id; in sirfsoc_dma_resume_chan()
529 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_resume_chan()
550 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_resume_chan()
559 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_alloc_chan_resources() local
587 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_alloc_chan_resources()
589 list_splice_tail_init(&descs, &schan->free); in sirfsoc_dma_alloc_chan_resources()
590 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_alloc_chan_resources()
598 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_free_chan_resources() local
604 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_free_chan_resources()
607 BUG_ON(!list_empty(&schan->prepared)); in sirfsoc_dma_free_chan_resources()
608 BUG_ON(!list_empty(&schan->queued)); in sirfsoc_dma_free_chan_resources()
609 BUG_ON(!list_empty(&schan->active)); in sirfsoc_dma_free_chan_resources()
610 BUG_ON(!list_empty(&schan->completed)); in sirfsoc_dma_free_chan_resources()
613 list_splice_tail_init(&schan->free, &descs); in sirfsoc_dma_free_chan_resources()
615 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_free_chan_resources()
627 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_issue_pending() local
630 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_issue_pending()
632 if (list_empty(&schan->active) && !list_empty(&schan->queued)) in sirfsoc_dma_issue_pending()
633 sirfsoc_dma_execute(schan); in sirfsoc_dma_issue_pending()
635 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_issue_pending()
644 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_tx_status() local
648 int cid = schan->chan.chan_id; in sirfsoc_dma_tx_status()
653 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_tx_status()
655 if (list_empty(&schan->active)) { in sirfsoc_dma_tx_status()
658 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_tx_status()
661 sdesc = list_first_entry(&schan->active, struct sirfsoc_dma_desc, node); in sirfsoc_dma_tx_status()
683 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_tx_status()
693 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_prep_interleaved() local
704 spin_lock_irqsave(&schan->lock, iflags); in sirfsoc_dma_prep_interleaved()
705 if (!list_empty(&schan->free)) { in sirfsoc_dma_prep_interleaved()
706 sdesc = list_first_entry(&schan->free, struct sirfsoc_dma_desc, in sirfsoc_dma_prep_interleaved()
710 spin_unlock_irqrestore(&schan->lock, iflags); in sirfsoc_dma_prep_interleaved()
720 spin_lock_irqsave(&schan->lock, iflags); in sirfsoc_dma_prep_interleaved()
740 list_add_tail(&sdesc->node, &schan->prepared); in sirfsoc_dma_prep_interleaved()
746 spin_unlock_irqrestore(&schan->lock, iflags); in sirfsoc_dma_prep_interleaved()
750 spin_unlock_irqrestore(&schan->lock, iflags); in sirfsoc_dma_prep_interleaved()
761 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_prep_cyclic() local
780 spin_lock_irqsave(&schan->lock, iflags); in sirfsoc_dma_prep_cyclic()
781 if (!list_empty(&schan->free)) { in sirfsoc_dma_prep_cyclic()
782 sdesc = list_first_entry(&schan->free, struct sirfsoc_dma_desc, in sirfsoc_dma_prep_cyclic()
786 spin_unlock_irqrestore(&schan->lock, iflags); in sirfsoc_dma_prep_cyclic()
792 spin_lock_irqsave(&schan->lock, iflags); in sirfsoc_dma_prep_cyclic()
798 list_add_tail(&sdesc->node, &schan->prepared); in sirfsoc_dma_prep_cyclic()
799 spin_unlock_irqrestore(&schan->lock, iflags); in sirfsoc_dma_prep_cyclic()
845 struct sirfsoc_dma_chan *schan; in sirfsoc_dma_probe() local
926 schan = &sdma->channels[i]; in sirfsoc_dma_probe()
928 schan->chan.device = dma; in sirfsoc_dma_probe()
929 dma_cookie_init(&schan->chan); in sirfsoc_dma_probe()
931 INIT_LIST_HEAD(&schan->free); in sirfsoc_dma_probe()
932 INIT_LIST_HEAD(&schan->prepared); in sirfsoc_dma_probe()
933 INIT_LIST_HEAD(&schan->queued); in sirfsoc_dma_probe()
934 INIT_LIST_HEAD(&schan->active); in sirfsoc_dma_probe()
935 INIT_LIST_HEAD(&schan->completed); in sirfsoc_dma_probe()
937 spin_lock_init(&schan->lock); in sirfsoc_dma_probe()
938 list_add_tail(&schan->chan.device_node, &dma->channels); in sirfsoc_dma_probe()
1013 struct sirfsoc_dma_chan *schan; in sirfsoc_dma_pm_suspend() local
1042 schan = &sdma->channels[ch]; in sirfsoc_dma_pm_suspend()
1043 if (list_empty(&schan->active)) in sirfsoc_dma_pm_suspend()
1061 struct sirfsoc_dma_chan *schan; in sirfsoc_dma_pm_resume() local
1085 schan = &sdma->channels[ch]; in sirfsoc_dma_pm_resume()
1086 if (list_empty(&schan->active)) in sirfsoc_dma_pm_resume()
1088 sdesc = list_first_entry(&schan->active, in sirfsoc_dma_pm_resume()