Lines Matching refs:hvc
495 struct mtk_hsdma_vchan *hvc) in mtk_hsdma_issue_vchan_pending() argument
500 lockdep_assert_held(&hvc->vc.lock); in mtk_hsdma_issue_vchan_pending()
502 list_for_each_entry_safe(vd, vd2, &hvc->vc.desc_issued, node) { in mtk_hsdma_issue_vchan_pending()
528 list_move_tail(&vd->node, &hvc->desc_hw_processing); in mtk_hsdma_issue_vchan_pending()
534 struct mtk_hsdma_vchan *hvc; in mtk_hsdma_free_rooms_in_ring() local
583 hvc = to_hsdma_vchan(cb->vd->tx.chan); in mtk_hsdma_free_rooms_in_ring()
585 spin_lock(&hvc->vc.lock); in mtk_hsdma_free_rooms_in_ring()
593 if (hvc->issue_synchronize && in mtk_hsdma_free_rooms_in_ring()
594 list_empty(&hvc->desc_hw_processing)) { in mtk_hsdma_free_rooms_in_ring()
595 complete(&hvc->issue_completion); in mtk_hsdma_free_rooms_in_ring()
596 hvc->issue_synchronize = false; in mtk_hsdma_free_rooms_in_ring()
598 spin_unlock(&hvc->vc.lock); in mtk_hsdma_free_rooms_in_ring()
633 hvc = &hsdma->vc[i]; in mtk_hsdma_free_rooms_in_ring()
634 spin_lock(&hvc->vc.lock); in mtk_hsdma_free_rooms_in_ring()
635 mtk_hsdma_issue_vchan_pending(hsdma, hvc); in mtk_hsdma_free_rooms_in_ring()
636 spin_unlock(&hvc->vc.lock); in mtk_hsdma_free_rooms_in_ring()
662 struct mtk_hsdma_vchan *hvc = to_hsdma_vchan(c); in mtk_hsdma_find_active_desc() local
665 list_for_each_entry(vd, &hvc->desc_hw_processing, node) in mtk_hsdma_find_active_desc()
669 list_for_each_entry(vd, &hvc->vc.desc_issued, node) in mtk_hsdma_find_active_desc()
680 struct mtk_hsdma_vchan *hvc = to_hsdma_vchan(c); in mtk_hsdma_tx_status() local
691 spin_lock_irqsave(&hvc->vc.lock, flags); in mtk_hsdma_tx_status()
693 spin_unlock_irqrestore(&hvc->vc.lock, flags); in mtk_hsdma_tx_status()
708 struct mtk_hsdma_vchan *hvc = to_hsdma_vchan(c); in mtk_hsdma_issue_pending() local
711 spin_lock_irqsave(&hvc->vc.lock, flags); in mtk_hsdma_issue_pending()
713 if (vchan_issue_pending(&hvc->vc)) in mtk_hsdma_issue_pending()
714 mtk_hsdma_issue_vchan_pending(hsdma, hvc); in mtk_hsdma_issue_pending()
716 spin_unlock_irqrestore(&hvc->vc.lock, flags); in mtk_hsdma_issue_pending()
757 struct mtk_hsdma_vchan *hvc = to_hsdma_vchan(c); in mtk_hsdma_free_active_desc() local
765 spin_lock(&hvc->vc.lock); in mtk_hsdma_free_active_desc()
766 if (!list_empty(&hvc->desc_hw_processing)) { in mtk_hsdma_free_active_desc()
767 hvc->issue_synchronize = true; in mtk_hsdma_free_active_desc()
770 spin_unlock(&hvc->vc.lock); in mtk_hsdma_free_active_desc()
773 wait_for_completion(&hvc->issue_completion); in mtk_hsdma_free_active_desc()
778 WARN_ONCE(!list_empty(&hvc->desc_hw_processing), in mtk_hsdma_free_active_desc()
782 vchan_synchronize(&hvc->vc); in mtk_hsdma_free_active_desc()
784 WARN_ONCE(!list_empty(&hvc->vc.desc_completed), in mtk_hsdma_free_active_desc()