/linux/drivers/crypto/intel/qat/qat_common/ |
H A D | qat_bl.c | 26 bl_dma_dir = blp != blpout ? DMA_TO_DEVICE : DMA_BIDIRECTIONAL; in qat_bl_free_bufl() 32 dma_unmap_single(dev, blp, sz, DMA_TO_DEVICE); in qat_bl_free_bufl() 43 dma_unmap_single(dev, blpout, sz_out, DMA_TO_DEVICE); in qat_bl_free_bufl() 90 bufl_dma_dir = sgl != sglout ? DMA_TO_DEVICE : DMA_BIDIRECTIONAL; in __qat_bl_sgl_to_bufl() 120 blp = dma_map_single(dev, bufl, sz, DMA_TO_DEVICE); in __qat_bl_sgl_to_bufl() 183 bloutp = dma_map_single(dev, buflout, sz_out, DMA_TO_DEVICE); in __qat_bl_sgl_to_bufl() 198 dma_unmap_single(dev, bloutp, sz_out, DMA_TO_DEVICE); in __qat_bl_sgl_to_bufl() 215 dma_unmap_single(dev, blp, sz, DMA_TO_DEVICE); in __qat_bl_sgl_to_bufl() 386 new_blp = dma_map_single(dev, new_bl, new_bl_size, DMA_TO_DEVICE); in qat_bl_realloc_map_new_dst() 391 dma_unmap_single(dev, qat_bufs->bloutp, qat_bufs->sz_out, DMA_TO_DEVICE); in qat_bl_realloc_map_new_dst() [all …]
|
H A D | qat_asym_algs.c | 170 DMA_TO_DEVICE); in qat_dh_cb() 184 DMA_TO_DEVICE); in qat_dh_cb() 187 DMA_TO_DEVICE); in qat_dh_cb() 307 DMA_TO_DEVICE); in qat_dh_compute_value() 338 DMA_TO_DEVICE); in qat_dh_compute_value() 344 DMA_TO_DEVICE); in qat_dh_compute_value() 364 DMA_TO_DEVICE); in qat_dh_compute_value() 369 DMA_TO_DEVICE); in qat_dh_compute_value() 380 DMA_TO_DEVICE); in qat_dh_compute_value() 577 DMA_TO_DEVICE); in qat_rsa_cb() [all …]
|
/linux/drivers/crypto/ccree/ |
H A D | cc_buffer_mgr.c | 297 if (dma_map_sg(dev, &areq_ctx->ccm_adata_sg, 1, DMA_TO_DEVICE) != 1) { in cc_set_aead_conf_buf() 322 if (dma_map_sg(dev, areq_ctx->buff_sg, 1, DMA_TO_DEVICE) != 1) { in cc_set_hash_buf() 360 dma_unmap_sg(dev, src, req_ctx->in_nents, DMA_TO_DEVICE); in cc_unmap_cipher_request() 382 int src_direction = (src != dst ? DMA_TO_DEVICE : DMA_BIDIRECTIONAL); in cc_map_cipher_request() 462 int src_direction = (req->src != req->dst ? DMA_TO_DEVICE : DMA_BIDIRECTIONAL); in cc_unmap_aead_request() 477 AES_BLOCK_SIZE, DMA_TO_DEVICE); in cc_unmap_aead_request() 482 AES_BLOCK_SIZE, DMA_TO_DEVICE); in cc_unmap_aead_request() 487 AES_BLOCK_SIZE, DMA_TO_DEVICE); in cc_unmap_aead_request() 494 AES_BLOCK_SIZE, DMA_TO_DEVICE); in cc_unmap_aead_request() 497 dma_unmap_sg(dev, &areq_ctx->ccm_adata_sg, 1, DMA_TO_DEVICE); in cc_unmap_aead_request() [all …]
|
/linux/drivers/crypto/caam/ |
H A D | caamhash.c | 175 DMA_TO_DEVICE); in buf_map_to_sec4_sg() 478 DMA_TO_DEVICE); in ahash_setkey() 504 DMA_TO_DEVICE); in axcbc_setkey() 558 dma_unmap_sg(dev, req->src, edesc->src_nents, DMA_TO_DEVICE); in ahash_unmap() 562 edesc->sec4_sg_bytes, DMA_TO_DEVICE); in ahash_unmap() 566 DMA_TO_DEVICE); in ahash_unmap() 740 src_dma = dma_map_single(ctx->jrdev, sg, sgsize, DMA_TO_DEVICE); in ahash_edesc_add_src() 862 DMA_TO_DEVICE); in ahash_update_ctx() 882 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); in ahash_update_ctx() 910 DMA_TO_DEVICE); in ahash_update_ctx() [all …]
|
H A D | caamalg_qi2.c | 158 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap() 169 dma_unmap_single(dev, qm_sg_dma, qm_sg_bytes, DMA_TO_DEVICE); in caam_unmap() 403 DMA_TO_DEVICE); in aead_edesc_alloc() 419 DMA_TO_DEVICE); in aead_edesc_alloc() 490 iv_dma = dma_map_single(dev, iv, ivsize, DMA_TO_DEVICE); in aead_edesc_alloc() 514 DMA_TO_DEVICE); in aead_edesc_alloc() 518 iv_dma, ivsize, DMA_TO_DEVICE, 0, 0); in aead_edesc_alloc() 535 qm_sg_dma = dma_map_single(dev, sg_table, qm_sg_bytes, DMA_TO_DEVICE); in aead_edesc_alloc() 538 dma_unmap_single(dev, edesc->assoclen_dma, 4, DMA_TO_DEVICE); in aead_edesc_alloc() 540 iv_dma, ivsize, DMA_TO_DEVICE, 0, 0); in aead_edesc_alloc() [all …]
|
/linux/drivers/net/ethernet/marvell/octeon_ep/ |
H A D | octep_tx.c | 60 tx_buffer->skb->len, DMA_TO_DEVICE); in octep_iq_process_completions() 71 tx_buffer->sglist[0].len[3], DMA_TO_DEVICE); in octep_iq_process_completions() 76 tx_buffer->sglist[i >> 2].len[3 - (i & 3)], DMA_TO_DEVICE); in octep_iq_process_completions() 121 tx_buffer->skb->len, DMA_TO_DEVICE); in octep_iq_free_pending() 133 DMA_TO_DEVICE); in octep_iq_free_pending() 138 tx_buffer->sglist[i >> 2].len[3 - (i & 3)], DMA_TO_DEVICE); in octep_iq_free_pending()
|
/linux/drivers/net/ethernet/marvell/octeon_ep_vf/ |
H A D | octep_vf_tx.c | 61 tx_buffer->skb->len, DMA_TO_DEVICE); in octep_vf_iq_process_completions() 72 tx_buffer->sglist[0].len[3], DMA_TO_DEVICE); in octep_vf_iq_process_completions() 77 tx_buffer->sglist[i >> 2].len[3 - (i & 3)], DMA_TO_DEVICE); in octep_vf_iq_process_completions() 120 tx_buffer->skb->len, DMA_TO_DEVICE); in octep_vf_iq_free_pending() 132 DMA_TO_DEVICE); in octep_vf_iq_free_pending() 137 tx_buffer->sglist[i >> 2].len[i & 3], DMA_TO_DEVICE); in octep_vf_iq_free_pending()
|
/linux/drivers/media/pci/ivtv/ |
H A D | ivtv-udma.c | 87 DMA_TO_DEVICE); in ivtv_udma_alloc() 143 dma->page_count, DMA_TO_DEVICE); in ivtv_udma_setup() 174 DMA_TO_DEVICE); in ivtv_udma_unmap() 191 sizeof(itv->udma.SGarray), DMA_TO_DEVICE); in ivtv_udma_free() 197 itv->udma.page_count, DMA_TO_DEVICE); in ivtv_udma_free()
|
H A D | ivtv-udma.h | 27 sizeof(itv->udma.SGarray), DMA_TO_DEVICE); in ivtv_udma_sync_for_device() 33 sizeof(itv->udma.SGarray), DMA_TO_DEVICE); in ivtv_udma_sync_for_cpu()
|
/linux/include/linux/ |
H A D | dma-direction.h | 7 DMA_TO_DEVICE = 1, enumerator 14 return dir == DMA_BIDIRECTIONAL || dir == DMA_TO_DEVICE || in valid_dma_direction()
|
/linux/drivers/crypto/allwinner/sun8i-ce/ |
H A D | sun8i-ce-cipher.c | 187 rctx->addr_key = dma_map_single(ce->dev, op->key, op->keylen, DMA_TO_DEVICE); in sun8i_ce_cipher_prepare() 205 DMA_TO_DEVICE); in sun8i_ce_cipher_prepare() 223 nr_sgs = dma_map_sg(ce->dev, areq->src, ns, DMA_TO_DEVICE); in sun8i_ce_cipher_prepare() 277 dma_unmap_sg(ce->dev, areq->src, ns, DMA_TO_DEVICE); in sun8i_ce_cipher_prepare() 284 dma_unmap_single(ce->dev, rctx->addr_iv, rctx->ivlen, DMA_TO_DEVICE); in sun8i_ce_cipher_prepare() 296 dma_unmap_single(ce->dev, rctx->addr_key, op->keylen, DMA_TO_DEVICE); in sun8i_ce_cipher_prepare() 326 dma_unmap_sg(ce->dev, areq->src, nr_sgs, DMA_TO_DEVICE); in sun8i_ce_cipher_unprepare() 332 dma_unmap_single(ce->dev, rctx->addr_iv, rctx->ivlen, DMA_TO_DEVICE); in sun8i_ce_cipher_unprepare() 344 dma_unmap_single(ce->dev, rctx->addr_key, op->keylen, DMA_TO_DEVICE); in sun8i_ce_cipher_unprepare()
|
/linux/drivers/infiniband/ulp/iser/ |
H A D | iser_initiator.c | 101 DMA_TO_DEVICE); in iser_prepare_write_cmd() 138 iser_dma_unmap_task_data(iser_task, ISER_DIR_OUT, DMA_TO_DEVICE); in iser_prepare_write_cmd() 153 tx_desc->dma_addr, ISER_HEADERS_LEN, DMA_TO_DEVICE); in iser_create_send_desc() 169 ISCSI_DEF_MAX_RECV_SEG_LEN, DMA_TO_DEVICE); in iser_free_login_buf() 193 DMA_TO_DEVICE); in iser_alloc_login_buf() 216 DMA_TO_DEVICE); in iser_alloc_login_buf() 500 task->data_count, DMA_TO_DEVICE); in iser_send_control() 505 task->data_count, DMA_TO_DEVICE); in iser_send_control() 706 ISER_HEADERS_LEN, DMA_TO_DEVICE); in iser_dataout_comp() 745 DMA_TO_DEVICE); in iser_task_rdma_finalize()
|
/linux/arch/arc/mm/ |
H A D | dma.c | 55 case DMA_TO_DEVICE: in arch_sync_dma_for_device() 76 case DMA_TO_DEVICE: in arch_sync_dma_for_cpu()
|
/linux/arch/nios2/mm/ |
H A D | dma-mapping.c | 31 case DMA_TO_DEVICE: in arch_sync_dma_for_device() 56 case DMA_TO_DEVICE: in arch_sync_dma_for_cpu()
|
/linux/drivers/spi/ |
H A D | spi-fsl-cpm.c | 138 DMA_TO_DEVICE); in fsl_spi_cpm_bufs() 171 dma_unmap_single(dev, mspi->tx_dma, t->len, DMA_TO_DEVICE); in fsl_spi_cpm_bufs() 182 dma_unmap_single(dev, mspi->tx_dma, t->len, DMA_TO_DEVICE); in fsl_spi_cpm_bufs_complete() 354 DMA_TO_DEVICE); in fsl_spi_cpm_init() 390 dma_unmap_single(dev, mspi->dma_dummy_tx, PAGE_SIZE, DMA_TO_DEVICE); in fsl_spi_cpm_init() 410 dma_unmap_single(dev, mspi->dma_dummy_tx, PAGE_SIZE, DMA_TO_DEVICE); in fsl_spi_cpm_free()
|
/linux/drivers/net/ethernet/altera/ |
H A D | altera_sgdma.c | 81 priv->txdescmem, DMA_TO_DEVICE); in sgdma_initialize() 94 priv->txdescmem, DMA_TO_DEVICE); in sgdma_initialize() 97 priv->rxdescmem, DMA_TO_DEVICE); in sgdma_initialize() 110 priv->txdescmem, DMA_TO_DEVICE); in sgdma_uninitialize() 365 DMA_TO_DEVICE); in sgdma_async_read() 392 SGDMA_DESC_LEN, DMA_TO_DEVICE); in sgdma_async_write()
|
/linux/drivers/net/ethernet/mellanox/mlxbf_gige/ |
H A D | mlxbf_gige_tx.c | 69 priv->tx_skb[i]->len, DMA_TO_DEVICE); in mlxbf_gige_tx_deinit() 159 priv->tx_skb[tx_wqe_index]->len, DMA_TO_DEVICE); in mlxbf_gige_handle_tx_complete() 219 &tx_buf_dma, DMA_TO_DEVICE); in mlxbf_gige_start_xmit() 234 skb->len, DMA_TO_DEVICE); in mlxbf_gige_start_xmit()
|
/linux/drivers/net/wireless/intel/iwlwifi/pcie/ |
H A D | tx-gen2.c | 37 DMA_TO_DEVICE, DMA_ATTR_SKIP_CPU_SYNC); in get_workaround_page() 121 DMA_TO_DEVICE); in iwl_txq_gen2_set_tb_with_wa() 127 DMA_TO_DEVICE); in iwl_txq_gen2_set_tb_with_wa() 149 dma_unmap_page(trans->dev, oldphys, len, DMA_TO_DEVICE); in iwl_txq_gen2_set_tb_with_wa() 151 dma_unmap_single(trans->dev, oldphys, len, DMA_TO_DEVICE); in iwl_txq_gen2_set_tb_with_wa() 283 DMA_TO_DEVICE); in iwl_txq_gen2_build_amsdu() 332 tb_phys = dma_map_single(trans->dev, tb1_addr, len, DMA_TO_DEVICE); in iwl_txq_gen2_build_tx_amsdu() 371 fragsz, DMA_TO_DEVICE); in iwl_txq_gen2_tx_add_frags() 427 tb_phys = dma_map_single(trans->dev, tb1_addr, tb1_len, DMA_TO_DEVICE); in iwl_txq_gen2_build_tx() 445 tb2_len, DMA_TO_DEVICE); in iwl_txq_gen2_build_tx() [all …]
|
/linux/drivers/gpu/drm/nouveau/nvkm/core/ |
H A D | firmware.c | 209 fw->img, fw->phys, DMA_TO_DEVICE); in nvkm_firmware_dtor() 213 dma_unmap_sgtable(fw->device->dev, &fw->mem.sgt, DMA_TO_DEVICE, 0); in nvkm_firmware_dtor() 244 DMA_TO_DEVICE, in nvkm_firmware_ctor() 286 DMA_TO_DEVICE, 0); in nvkm_firmware_ctor()
|
/linux/drivers/net/ethernet/stmicro/stmmac/ |
H A D | chain_mode.c | 37 bmax, DMA_TO_DEVICE); in jumbo_frm() 55 bmax, DMA_TO_DEVICE); in jumbo_frm() 68 DMA_TO_DEVICE); in jumbo_frm()
|
H A D | ring_mode.c | 41 DMA_TO_DEVICE); in jumbo_frm() 62 DMA_TO_DEVICE); in jumbo_frm() 76 nopaged_len, DMA_TO_DEVICE); in jumbo_frm()
|
/linux/drivers/scsi/snic/ |
H A D | snic_io.c | 92 DMA_TO_DEVICE); in snic_free_wq_buf() 162 pa = dma_map_single(&snic->pdev->dev, os_buf, len, DMA_TO_DEVICE); in snic_queue_wq_desc() 176 dma_unmap_single(&snic->pdev->dev, pa, len, DMA_TO_DEVICE); in snic_queue_wq_desc() 343 DMA_TO_DEVICE); in snic_req_free() 353 DMA_TO_DEVICE); in snic_req_free() 362 DMA_TO_DEVICE); in snic_req_free()
|
/linux/drivers/crypto/allwinner/sun8i-ss/ |
H A D | sun8i-ss-cipher.c | 150 a = dma_map_single(ss->dev, sf->iv[i], ivsize, DMA_TO_DEVICE); in sun8i_ss_setup_ivs() 176 dma_unmap_single(ss->dev, rctx->p_iv[i], ivsize, DMA_TO_DEVICE); in sun8i_ss_setup_ivs() 217 rctx->p_key = dma_map_single(ss->dev, op->key, op->keylen, DMA_TO_DEVICE); in sun8i_ss_cipher() 239 nr_sgs = dma_map_sg(ss->dev, areq->src, nsgs, DMA_TO_DEVICE); in sun8i_ss_cipher() 303 dma_unmap_sg(ss->dev, areq->src, nsgs, DMA_TO_DEVICE); in sun8i_ss_cipher() 310 dma_unmap_single(ss->dev, rctx->p_iv[i], ivsize, DMA_TO_DEVICE); in sun8i_ss_cipher() 325 dma_unmap_single(ss->dev, rctx->p_key, op->keylen, DMA_TO_DEVICE); in sun8i_ss_cipher()
|
/linux/arch/csky/mm/ |
H A D | dma-mapping.c | 62 case DMA_TO_DEVICE: in arch_sync_dma_for_device() 78 case DMA_TO_DEVICE: in arch_sync_dma_for_cpu()
|
/linux/drivers/scsi/ibmvscsi_tgt/ |
H A D | libsrp.c | 212 (dir == DMA_TO_DEVICE && nmd == srp_cmd->data_out_desc_cnt)) { in srp_indirect_data() 230 err = rdma_io(cmd, &dummy, 1, &id->table_desc, 1, DMA_TO_DEVICE, in srp_indirect_data() 313 if (dir == DMA_TO_DEVICE) in srp_transfer_data() 346 if (dir == DMA_TO_DEVICE) { in srp_data_length() 398 *dir = DMA_TO_DEVICE; in srp_get_desc_table()
|