/titanic_44/usr/src/uts/common/io/drm/ |
H A D | drm_dma.c | 50 dev->dma = drm_calloc(1, sizeof (*dev->dma), DRM_MEM_DMA); in drm_dma_setup() 51 if (dev->dma == NULL) in drm_dma_setup() 55 pbuf = &(dev->dma->bufs[0]); in drm_dma_setup() 65 drm_device_dma_t *dma = dev->dma; in drm_dma_takedown() local 68 if (dma == NULL) in drm_dma_takedown() 73 if (dma->bufs[i].seg_count) { in drm_dma_takedown() 74 drm_free(dma->bufs[i].seglist, in drm_dma_takedown() 75 dma->bufs[i].seg_count * in drm_dma_takedown() 76 sizeof (*dma->bufs[0].seglist), DRM_MEM_SEGS); in drm_dma_takedown() 79 for (j = 0; j < dma->bufs[i].buf_count; j++) { in drm_dma_takedown() [all …]
|
H A D | drm_bufs.c | 397 drm_device_dma_t *dma = dev->dma; in drm_do_addbufs_agp() local 411 if (!dma) in drm_do_addbufs_agp() 425 entry = &dma->bufs[order]; in drm_do_addbufs_agp() 444 buf->idx = dma->buf_count + entry->buf_count; in drm_do_addbufs_agp() 449 buf->offset = (dma->byte_count + offset); in drm_do_addbufs_agp() 471 (dma->buf_count + entry->buf_count) * sizeof (*dma->buflist), in drm_do_addbufs_agp() 481 bcopy(temp_buflist, dma->buflist, in drm_do_addbufs_agp() 482 dma->buf_count * sizeof (*dma->buflist)); in drm_do_addbufs_agp() 483 kmem_free(dma->buflist, dma->buf_count *sizeof (*dma->buflist)); in drm_do_addbufs_agp() 484 dma->buflist = temp_buflist; in drm_do_addbufs_agp() [all …]
|
/titanic_44/usr/src/uts/i86pc/io/ |
H A D | rootnex.c | 389 rootnex_dma_t *dma, ddi_dma_attr_t *attr, ddi_dma_obj_t *dmao, int kmflag); 391 rootnex_dma_t *dma, ddi_dma_attr_t *attr); 392 static void rootnex_teardown_copybuf(rootnex_dma_t *dma); 393 static int rootnex_setup_windows(ddi_dma_impl_t *hp, rootnex_dma_t *dma, 395 static void rootnex_teardown_windows(rootnex_dma_t *dma); 396 static void rootnex_init_win(ddi_dma_impl_t *hp, rootnex_dma_t *dma, 399 rootnex_dma_t *dma, ddi_dma_cookie_t *cookie, off_t cur_offset, 402 rootnex_dma_t *dma, rootnex_window_t **windowp, ddi_dma_cookie_t *cookie, 405 rootnex_dma_t *dma, rootnex_window_t **windowp, 408 rootnex_dma_t *dma, rootnex_window_t **windowp, ddi_dma_cookie_t *cookie); [all …]
|
/titanic_44/usr/src/uts/common/io/atge/ |
H A D | atge_l1.c | 129 atge_dma_t *dma; in atge_l1_alloc_dma() local 143 dma = atge_alloc_a_dma_blk(atgep, &atge_l1_dma_attr_tx_desc, in atge_l1_alloc_dma() 145 if (dma == NULL) { in atge_l1_alloc_dma() 150 atgep->atge_tx_ring->r_desc_ring = dma; in atge_l1_alloc_dma() 171 dma = atge_alloc_a_dma_blk(atgep, &atge_l1_dma_attr_rx_desc, in atge_l1_alloc_dma() 173 if (dma == NULL) { in atge_l1_alloc_dma() 178 l1->atge_rx_ring->r_desc_ring = dma; in atge_l1_alloc_dma() 197 dma = atge_alloc_a_dma_blk(atgep, &atge_l1_dma_attr_cmb, in atge_l1_alloc_dma() 199 l1->atge_l1_cmb = dma; in atge_l1_alloc_dma() 200 if (dma == NULL) { in atge_l1_alloc_dma() [all …]
|
H A D | atge_l1c.c | 157 atge_dma_t *dma; in atge_l1c_alloc_dma() local 171 dma = atge_alloc_a_dma_blk(atgep, &atge_l1c_dma_attr_tx_desc, in atge_l1c_alloc_dma() 173 if (dma == NULL) { in atge_l1c_alloc_dma() 178 atgep->atge_tx_ring->r_desc_ring = dma; in atge_l1c_alloc_dma() 199 dma = atge_alloc_a_dma_blk(atgep, &atge_l1c_dma_attr_rx_desc, in atge_l1c_alloc_dma() 201 if (dma == NULL) { in atge_l1c_alloc_dma() 206 l1c->atge_rx_ring->r_desc_ring = dma; in atge_l1c_alloc_dma() 225 dma = atge_alloc_a_dma_blk(atgep, &atge_l1c_dma_attr_cmb, in atge_l1c_alloc_dma() 227 l1c->atge_l1c_cmb = dma; in atge_l1c_alloc_dma() 228 if (dma == NULL) { in atge_l1c_alloc_dma() [all …]
|
H A D | atge_l1e.c | 110 atge_dma_t *dma; in atge_rx_desc_free() local 121 dma = l1e->atge_l1e_rx_page[pages]; in atge_rx_desc_free() 122 if (dma != NULL) { in atge_rx_desc_free() 123 (void) ddi_dma_unbind_handle(dma->hdl); in atge_rx_desc_free() 124 ddi_dma_mem_free(&dma->acchdl); in atge_rx_desc_free() 125 ddi_dma_free_handle(&dma->hdl); in atge_rx_desc_free() 126 kmem_free(dma, sizeof (atge_dma_t)); in atge_rx_desc_free() 137 atge_dma_t *dma; in atge_l1e_alloc_dma() local 154 dma = atge_alloc_a_dma_blk(atgep, &atge_l1e_dma_attr_tx_desc, in atge_l1e_alloc_dma() 156 if (dma == NULL) { in atge_l1e_alloc_dma() [all …]
|
H A D | atge.h | 134 #define ATGE_PUT64(dma, addr, v) \ argument 135 ddi_put64(dma->acchdl, (addr), (v)) 137 #define ATGE_PUT32(dma, addr, v) \ argument 138 ddi_put32(dma->acchdl, (addr), (v)) 140 #define ATGE_GET32(dma, addr) \ argument 141 ddi_get32(dma->acchdl, (addr)) 143 #define ATGE_GET64(dma, addr) \ argument 144 ddi_get64(dma->acchdl, (addr)) 146 #define DMA_SYNC(dma, s, l, d) \ argument 147 (void) ddi_dma_sync(dma->hdl, (off_t)(s), (l), d)
|
H A D | atge_main.c | 1431 atge_dma_t *dma; in atge_alloc_buffers() local 1440 dma = atge_buf_alloc(r->r_atge, buflen, f); in atge_alloc_buffers() 1441 if (dma == NULL) { in atge_alloc_buffers() 1446 tbl[i] = dma; in atge_alloc_buffers() 1475 atge_dma_t *dma; in atge_alloc_a_dma_blk() local 1477 dma = kmem_zalloc(sizeof (atge_dma_t), KM_SLEEP); in atge_alloc_a_dma_blk() 1480 DDI_DMA_SLEEP, NULL, &dma->hdl); in atge_alloc_a_dma_blk() 1488 err = ddi_dma_mem_alloc(dma->hdl, in atge_alloc_a_dma_blk() 1490 &dma->addr, &dma->len, &dma->acchdl); in atge_alloc_a_dma_blk() 1495 ddi_dma_free_handle(&dma->hdl); in atge_alloc_a_dma_blk() [all …]
|
/titanic_44/usr/src/uts/common/io/1394/adapters/ |
H A D | hci1394_ixl_misc.c | 103 ixl_exec_startp->compiler_privatep)->dma[0].dma_bound; in hci1394_ixl_set_start() 167 &xferctlp->dma[ixldepth], ixlcur->ixl_opcode, in hci1394_ixl_reset_status() 270 hci1394_ixl_check_status(hci1394_xfer_ctl_dma_t *dma, uint16_t ixlopcode, in hci1394_ixl_check_status() argument 288 hcidescp = (hci1394_desc_t *)(dma->dma_descp); in hci1394_ixl_check_status() 289 hcidesc_off = (off_t)hcidescp - (off_t)dma->dma_buf->bi_kaddr; in hci1394_ixl_check_status() 290 acc_hdl = dma->dma_buf->bi_handle; in hci1394_ixl_check_status() 291 dma_hdl = dma->dma_buf->bi_dma_handle; in hci1394_ixl_check_status() 345 hcicnt = dma->dma_bound & DESC_Z_MASK; in hci1394_ixl_check_status() 347 hcidesc_off = (off_t)hcidescp - (off_t)dma->dma_buf->bi_kaddr; in hci1394_ixl_check_status() 407 (off_t)dma->dma_buf->bi_kaddr; in hci1394_ixl_check_status()
|
H A D | hci1394_ixl_isr.c | 339 if (hci1394_ixl_check_status(&xferctlp->dma[ixldepth], in hci1394_ixl_intr_check_xfer() 375 if (hci1394_ixl_check_status(&xferctlp->dma[ixldepth], in hci1394_ixl_intr_check_xfer() 543 hci1394_xfer_ctl_dma_t *dma; in hci1394_ixl_intr_check_done() local 576 dma = &xferctlp->dma[ixldepth]; in hci1394_ixl_intr_check_done() 603 dmastartp = dma->dma_bound & ~DESC_Z_MASK; in hci1394_ixl_intr_check_done() 604 dmaendp = dmastartp + ((dma->dma_bound & DESC_Z_MASK) << 4); in hci1394_ixl_intr_check_done() 654 acc_hdl = dma->dma_buf->bi_handle; in hci1394_ixl_intr_check_done() 655 dma_hdl = dma->dma_buf->bi_dma_handle; in hci1394_ixl_intr_check_done() 656 hcidescp = (hci1394_desc_t *)dma->dma_descp; in hci1394_ixl_intr_check_done() 657 hcidesc_off = (off_t)hcidescp - (off_t)dma->dma_buf->bi_kaddr; in hci1394_ixl_intr_check_done() [all …]
|
H A D | hci1394_ixl_update.c | 584 ixlp->compiler_privatep)->dma[0].dma_bound; in hci1394_ixl_update_prep_jump() 609 hcidescp = (hci1394_desc_t *)xferctlp->dma[xferctlp->cnt - 1].dma_descp; in hci1394_ixl_update_prep_jump() 610 acc_hdl = xferctlp->dma[xferctlp->cnt - 1].dma_buf->bi_handle; in hci1394_ixl_update_prep_jump() 611 dma_hdl = xferctlp->dma[xferctlp->cnt - 1].dma_buf->bi_dma_handle; in hci1394_ixl_update_prep_jump() 765 uvp->hci_offset = xferctlp->dma[0].dma_bound & DESC_Z_MASK; in hci1394_ixl_update_prep_set_skipmode() 842 uvp->skipaddr = xferctlp->dma[0].dma_bound; in hci1394_ixl_update_prep_set_skipmode() 865 uvp->skipaddr = xferctlp->dma[0].dma_bound; in hci1394_ixl_update_prep_set_skipmode() 948 uvp->hdr_offset = xferctlp->dma[0].dma_bound & DESC_Z_MASK; in hci1394_ixl_update_prep_set_tagsync() 1068 uvp->hci_offset = xferctlp->dma[0].dma_bound & DESC_Z_MASK; in hci1394_ixl_update_prep_recv_pkt() 1088 hcidescp = (hci1394_desc_t *)xferctlp->dma[0].dma_descp - in hci1394_ixl_update_prep_recv_pkt() [all …]
|
H A D | hci1394_ixl_comp.c | 325 ixl_exec_stp->compiler_privatep)->dma[0].dma_bound; in hci1394_compile_ixl_endup() 1049 xferctl_curp->dma[0].dma_descp; in hci1394_finalize_all_xfer_desc() 1050 acc_hdl = xferctl_curp->dma[0].dma_buf->bi_handle; in hci1394_finalize_all_xfer_desc() 1082 xferctl_curp->dma[repcnt - 1].dma_descp; in hci1394_finalize_all_xfer_desc() 1084 xferctl_curp->dma[repcnt - 1].dma_buf->bi_handle; in hci1394_finalize_all_xfer_desc() 1099 dma_execnext_addr = xferctl_nxtp->dma[0].dma_bound; in hci1394_finalize_all_xfer_desc() 1108 xferctl_curp->dma[repcnt - 1].dma_descp; in hci1394_finalize_all_xfer_desc() 1110 xferctl_curp->dma[repcnt - 1].dma_buf->bi_handle; in hci1394_finalize_all_xfer_desc() 1121 xferctl_curp->dma[repcnt - 1].dma_descp; in hci1394_finalize_all_xfer_desc() 1122 acc_hdl = xferctl_curp->dma[repcnt - 1].dma_buf->bi_handle; in hci1394_finalize_all_xfer_desc() [all …]
|
/titanic_44/usr/src/cmd/fm/schemes/mem/sparc/ |
H A D | mem_disc.c | 255 dimm_map_arg_t *dma = arg; in picl_frutree_parse() local 271 for (pm = dma->dma_pm; pm != NULL; pm = pm->pm_next) { in picl_frutree_parse() 282 dm->dm_next = dma->dma_dm; in picl_frutree_parse() 283 dma->dma_dm = dm; in picl_frutree_parse() 355 dimm_map_arg_t dma; in mem_discover() local 362 dma.dma_pm = path_map; in mem_discover() 363 dma.dma_dm = NULL; in mem_discover() 366 &dma)) < 0 && errno == ENOENT && path_map == NULL) { in mem_discover() 379 if (dma.dma_dm == NULL) { in mem_discover() 387 mem.mem_dm = dma.dma_dm; in mem_discover()
|
/titanic_44/usr/src/uts/sun4u/sys/ |
H A D | zuluvm.h | 103 int zuluvm_dma_alloc_ctx(zuluvm_info_t devp, int dma, short *ctx, 105 int zuluvm_dma_preload(zuluvm_info_t devp, int dma, int num, 107 int zuluvm_dma_free_ctx(zuluvm_info_t devp, int dma);
|
/titanic_44/usr/src/uts/intel/io/drm/ |
H A D | radeon_cp.c | 2078 drm_device_dma_t *dma = dev->dma; in radeon_freelist_get() local 2085 if (++dev_priv->last_buf >= dma->buf_count) in radeon_freelist_get() 2093 for (i = start; i < dma->buf_count; i++) { in radeon_freelist_get() 2094 buf = dma->buflist[i]; in radeon_freelist_get() 2119 drm_device_dma_t *dma = dev->dma; 2127 if (++dev_priv->last_buf >= dma->buf_count) 2134 for (i = start; i < dma->buf_count; i++) { 2135 buf = dma->buflist[i]; 2154 drm_device_dma_t *dma = dev->dma; in radeon_freelist_reset() local 2159 for (i = 0; i < dma->buf_count; i++) { in radeon_freelist_reset() [all …]
|
H A D | radeon_state.c | 2345 drm_device_dma_t *dma = dev->dma; in radeon_cp_vertex() local 2364 if (vertex.idx < 0 || vertex.idx >= dma->buf_count) { in radeon_cp_vertex() 2366 vertex.idx, dma->buf_count - 1); in radeon_cp_vertex() 2377 buf = dma->buflist[vertex.idx]; in radeon_cp_vertex() 2433 drm_device_dma_t *dma = dev->dma; in radeon_cp_indices() local 2452 if (elts.idx < 0 || elts.idx >= dma->buf_count) { in radeon_cp_indices() 2454 elts.idx, dma->buf_count - 1); in radeon_cp_indices() 2465 buf = dma->buflist[elts.idx]; in radeon_cp_indices() 2628 drm_device_dma_t *dma = dev->dma; in radeon_cp_indirect() local 2645 if (indirect.idx < 0 || indirect.idx >= dma->buf_count) { in radeon_cp_indirect() [all …]
|
H A D | r300_cmdbuf.c | 815 drm_device_dma_t *dma = dev->dma; in r300_do_cp_cmdbuf() local 909 idx = header.dma.buf_idx; in r300_do_cp_cmdbuf() 910 if (idx < 0 || idx >= dma->buf_count) { in r300_do_cp_cmdbuf() 912 idx, dma->buf_count - 1); in r300_do_cp_cmdbuf() 917 buf = dma->buflist[idx]; in r300_do_cp_cmdbuf()
|
/titanic_44/usr/src/uts/common/io/nvme/ |
H A D | nvme.c | 635 nvme_check_dma_hdl(nvme_dma_t *dma) in nvme_check_dma_hdl() argument 639 if (dma == NULL) in nvme_check_dma_hdl() 642 ddi_fm_dma_err_get(dma->nd_dmah, &error, DDI_FME_VERSION); in nvme_check_dma_hdl() 651 nvme_free_dma_common(nvme_dma_t *dma) in nvme_free_dma_common() argument 653 if (dma->nd_dmah != NULL) in nvme_free_dma_common() 654 (void) ddi_dma_unbind_handle(dma->nd_dmah); in nvme_free_dma_common() 655 if (dma->nd_acch != NULL) in nvme_free_dma_common() 656 ddi_dma_mem_free(&dma->nd_acch); in nvme_free_dma_common() 657 if (dma->nd_dmah != NULL) in nvme_free_dma_common() 658 ddi_dma_free_handle(&dma->nd_dmah); in nvme_free_dma_common() [all …]
|
/titanic_44/usr/src/uts/common/fs/nfs/ |
H A D | nfs4_stub_vnops.c | 213 static void nfs4_trigger_domount_args_destroy(domount_args_t *dma, 742 domount_args_t *dma; in nfs4_trigger_mount() local 834 error = nfs4_trigger_domount_args_create(vp, cr, &dma); in nfs4_trigger_mount() 847 nfs4_trigger_domount_args_destroy(dma, vp); in nfs4_trigger_mount() 855 error = nfs4_trigger_domount(vp, dma, &vfsp, mcred, newvpp); in nfs4_trigger_mount() 856 nfs4_trigger_domount_args_destroy(dma, vp); in nfs4_trigger_mount() 905 domount_args_t *dma; in nfs4_trigger_domount_args_create() local 1044 dma = kmem_zalloc(sizeof (domount_args_t), KM_SLEEP); in nfs4_trigger_domount_args_create() 1045 dma->dma_esi = esi_first; in nfs4_trigger_domount_args_create() 1046 dma->dma_hostlist = hostlist; in nfs4_trigger_domount_args_create() [all …]
|
/titanic_44/usr/src/uts/common/io/myri10ge/drv/ |
H A D | myri10ge_var.h | 121 struct myri10ge_dma_stuff dma; member 151 mcp_dma_addr_t dma; /* Kept in network byte order */ member 307 struct myri10ge_dma_stuff dma; member 496 uint_t alloc_flags, int bind_flags, struct myri10ge_dma_stuff *dma, 498 void myri10ge_dma_free(struct myri10ge_dma_stuff *dma);
|
/titanic_44/usr/src/uts/sun4u/io/ |
H A D | zuluvm.c | 1055 zuluvm_dma_alloc_ctx(zuluvm_info_t devp, int dma, short *mmuctx, in zuluvm_dma_alloc_ctx() argument 1075 tnf_int, dma, dma, in zuluvm_dma_alloc_ctx() 1108 switch (dma) { in zuluvm_dma_alloc_ctx() 1156 zuluvm_dma_preload(zuluvm_info_t devp, int dma, in zuluvm_dma_preload() argument 1168 tnf_int, dma, dma, in zuluvm_dma_preload() 1172 switch (dma) { in zuluvm_dma_preload() 1239 zuluvm_dma_free_ctx(zuluvm_info_t devp, int dma) in zuluvm_dma_free_ctx() argument 1250 tnf_int, dma, dma, in zuluvm_dma_free_ctx() 1300 switch (dma) { in zuluvm_dma_free_ctx()
|
/titanic_44/usr/src/uts/common/io/sdcard/impl/ |
H A D | sda_host.c | 55 sda_host_alloc(dev_info_t *dip, int nslot, sda_ops_t *ops, ddi_dma_attr_t *dma) in sda_host_alloc() argument 66 h->h_dma = dma; in sda_host_alloc()
|
/titanic_44/usr/src/uts/intel/os/ |
H A D | bootenv.rc | 27 setprop ata-dma-enabled 1 28 setprop atapi-cd-dma-enabled 1
|
/titanic_44/usr/src/uts/common/io/ib/adapters/tavor/ |
H A D | tavor.conf | 34 active-dma-flush=1;
|
/titanic_44/usr/src/uts/common/sys/1394/adapters/ |
H A D | hci1394_ixl.h | 138 hci1394_xfer_ctl_dma_t dma[1]; /* addrs of descriptor blocks, cnt of */ member 280 int hci1394_ixl_check_status(hci1394_xfer_ctl_dma_t *dma, uint16_t ixlopcode,
|