Searched refs:dma_caps (Results 1 – 5 of 5) sorted by relevance
411 struct dma_slave_caps dma_caps; in snd_dmaengine_pcm_refine_runtime_hwparams() local421 ret = dma_get_slave_caps(chan, &dma_caps); in snd_dmaengine_pcm_refine_runtime_hwparams()423 if (dma_caps.cmd_pause && dma_caps.cmd_resume) in snd_dmaengine_pcm_refine_runtime_hwparams()425 if (dma_caps.residue_granularity <= DMA_RESIDUE_GRANULARITY_SEGMENT) in snd_dmaengine_pcm_refine_runtime_hwparams()429 addr_widths = dma_caps.dst_addr_widths; in snd_dmaengine_pcm_refine_runtime_hwparams()431 addr_widths = dma_caps.src_addr_widths; in snd_dmaengine_pcm_refine_runtime_hwparams()
203 struct dma_slave_caps dma_caps; in dmaengine_pcm_can_report_residue() local206 ret = dma_get_slave_caps(chan, &dma_caps); in dmaengine_pcm_can_report_residue()213 if (dma_caps.residue_granularity == DMA_RESIDUE_GRANULARITY_DESCRIPTOR) in dmaengine_pcm_can_report_residue()
1053 struct dma_slave_caps dma_caps; in s3c24xx_serial_request_dma() local1076 ret = dma_get_slave_caps(dma->rx_chan, &dma_caps); in s3c24xx_serial_request_dma()1078 dma_caps.residue_granularity < DMA_RESIDUE_GRANULARITY_BURST) { in s3c24xx_serial_request_dma()1093 ret = dma_get_slave_caps(dma->tx_chan, &dma_caps); in s3c24xx_serial_request_dma()1095 dma_caps.residue_granularity < DMA_RESIDUE_GRANULARITY_BURST) { in s3c24xx_serial_request_dma()
4971 struct wmi_dma_ring_capabilities *dma_caps; in ath11k_wmi_tlv_dma_ring_caps() local4977 dma_caps = (struct wmi_dma_ring_capabilities *)ptr; in ath11k_wmi_tlv_dma_ring_caps()5000 if (dma_caps[i].module_id >= WMI_DIRECT_BUF_MAX) { in ath11k_wmi_tlv_dma_ring_caps()5001 ath11k_warn(ab, "Invalid module id %d\n", dma_caps[i].module_id); in ath11k_wmi_tlv_dma_ring_caps()5006 dir_buff_caps[i].id = dma_caps[i].module_id; in ath11k_wmi_tlv_dma_ring_caps()5007 dir_buff_caps[i].pdev_id = DP_HW2SW_MACID(dma_caps[i].pdev_id); in ath11k_wmi_tlv_dma_ring_caps()5008 dir_buff_caps[i].min_elem = dma_caps[i].min_elem; in ath11k_wmi_tlv_dma_ring_caps()5009 dir_buff_caps[i].min_buf_sz = dma_caps[i].min_buf_sz; in ath11k_wmi_tlv_dma_ring_caps()5010 dir_buff_caps[i].min_buf_align = dma_caps[i].min_buf_align; in ath11k_wmi_tlv_dma_ring_caps()
4790 struct ath12k_wmi_dma_ring_caps_params *dma_caps; in ath12k_wmi_dma_ring_caps() local4796 dma_caps = (struct ath12k_wmi_dma_ring_caps_params *)ptr; in ath12k_wmi_dma_ring_caps()4819 if (le32_to_cpu(dma_caps[i].module_id) >= WMI_DIRECT_BUF_MAX) { in ath12k_wmi_dma_ring_caps()4821 le32_to_cpu(dma_caps[i].module_id)); in ath12k_wmi_dma_ring_caps()4826 dir_buff_caps[i].id = le32_to_cpu(dma_caps[i].module_id); in ath12k_wmi_dma_ring_caps()4828 DP_HW2SW_MACID(le32_to_cpu(dma_caps[i].pdev_id)); in ath12k_wmi_dma_ring_caps()4829 dir_buff_caps[i].min_elem = le32_to_cpu(dma_caps[i].min_elem); in ath12k_wmi_dma_ring_caps()4830 dir_buff_caps[i].min_buf_sz = le32_to_cpu(dma_caps[i].min_buf_sz); in ath12k_wmi_dma_ring_caps()4831 dir_buff_caps[i].min_buf_align = le32_to_cpu(dma_caps[i].min_buf_align); in ath12k_wmi_dma_ring_caps()