/illumos-gate/usr/src/uts/sun4/io/efcode/ |
H A D | fc_ddi.c | 62 struct ddi_dma_req dmareq; in fc_ddi_dma_buf_bind_handle() local 71 dmareq.dmar_flags = flags; in fc_ddi_dma_buf_bind_handle() 72 dmareq.dmar_fp = waitfp; in fc_ddi_dma_buf_bind_handle() 73 dmareq.dmar_arg = arg; in fc_ddi_dma_buf_bind_handle() 74 dmareq.dmar_object.dmao_size = (uint_t)bp->b_bcount; in fc_ddi_dma_buf_bind_handle() 77 dmareq.dmar_object.dmao_type = DMA_OTYP_PAGES; in fc_ddi_dma_buf_bind_handle() 78 dmareq.dmar_object.dmao_obj.pp_obj.pp_pp = bp->b_pages; in fc_ddi_dma_buf_bind_handle() 79 dmareq.dmar_object.dmao_obj.pp_obj.pp_offset = in fc_ddi_dma_buf_bind_handle() 82 dmareq.dmar_object.dmao_obj.virt_obj.v_addr = bp->b_un.b_addr; in fc_ddi_dma_buf_bind_handle() 84 dmareq.dmar_object.dmao_obj.virt_obj.v_priv = in fc_ddi_dma_buf_bind_handle() [all …]
|
/illumos-gate/usr/src/uts/sun4/os/ |
H A D | dvma.c | 60 struct ddi_dma_req dmareq; in dvma_reserve() local 70 bzero(&dmareq, sizeof (dmareq)); in dvma_reserve() 71 dmareq.dmar_fp = DDI_DMA_DONTWAIT; in dvma_reserve() 72 dmareq.dmar_flags = DDI_DMA_RDWR | DDI_DMA_STREAMING; in dvma_reserve() 73 dmareq.dmar_limits = &dma_lim; in dvma_reserve() 74 dmareq.dmar_object.dmao_size = pages; in dvma_reserve() 84 ret = ddi_dma_mctl(dip, dip, reqhdl, DDI_DMA_RESERVE, (off_t *)&dmareq, in dvma_reserve()
|
/illumos-gate/usr/src/uts/sun4u/io/pci/ |
H A D | pci_dma.c | 360 ddi_dma_req_t *dmareq) in pci_dma_lmts2hdl() argument 367 ddi_dma_lim_t *lim_p = dmareq->dmar_limits; in pci_dma_lmts2hdl() 378 if (!(mp = pci_dma_allocmp(dip, rdip, dmareq->dmar_fp, in pci_dma_lmts2hdl() 379 dmareq->dmar_arg))) in pci_dma_lmts2hdl() 552 pci_dma_type(pci_t *pci_p, ddi_dma_req_t *dmareq, ddi_dma_impl_t *mp) in pci_dma_type() argument 555 ddi_dma_obj_t *dobj_p = &dmareq->dmar_object; in pci_dma_type() 563 mp->dmai_rflags = pci_dma_consist_check(dmareq->dmar_flags, pbm_p); in pci_dma_type() 584 int (*waitfp)(caddr_t) = dmareq->dmar_fp; in pci_dma_type() 597 dmareq->dmar_arg, in pci_dma_type() 701 pci_dma_vapfn(pci_t *pci_p, ddi_dma_req_t *dmareq, ddi_dma_impl_t *mp, in pci_dma_vapfn() argument [all …]
|
H A D | pci_fdvma.c | 179 ddi_dma_req_t *dmareq, ddi_dma_handle_t *handlep) in pci_fdvma_reserve() argument 186 ddi_dma_lim_t *lim_p = dmareq->dmar_limits; in pci_fdvma_reserve() 206 npages = dmareq->dmar_object.dmao_size; in pci_fdvma_reserve() 227 dmareq->dmar_fp == DDI_DMA_SLEEP ? VM_SLEEP : VM_NOSLEEP)); in pci_fdvma_reserve() 250 pci_dma_consist_check(dmareq->dmar_flags, pci_p->pci_pbm_p); in pci_fdvma_reserve() 251 if (!(dmareq->dmar_flags & DDI_DMA_RDWR)) in pci_fdvma_reserve() 255 mp->dmai_minxfer = dmareq->dmar_limits->dlim_minxfer; in pci_fdvma_reserve() 256 mp->dmai_burstsizes = dmareq->dmar_limits->dlim_burstsizes; in pci_fdvma_reserve()
|
H A D | pci.c | 571 pci_dma_setup(dev_info_t *dip, dev_info_t *rdip, ddi_dma_req_t *dmareq, in pci_dma_setup() argument 583 if (!(mp = pci_dma_lmts2hdl(dip, rdip, iommu_p, dmareq))) in pci_dma_setup() 587 if (ret = pci_dma_type(pci_p, dmareq, mp)) in pci_dma_setup() 589 if (ret = pci_dma_pfn(pci_p, dmareq, mp)) in pci_dma_setup() 594 if ((ret = pci_dvma_win(pci_p, dmareq, mp)) || !handlep) in pci_dma_setup() 605 if (ret = pci_dvma_map(mp, dmareq, iommu_p)) in pci_dma_setup() 609 if ((ret = pci_dma_physwin(pci_p, dmareq, mp)) || !handlep) in pci_dma_setup() 695 ddi_dma_handle_t handle, ddi_dma_req_t *dmareq, in pci_dma_bindhdl() argument 704 ddi_driver_name(rdip), ddi_get_instance(rdip), mp, dmareq); in pci_dma_bindhdl() 712 if (ret = pci_dma_type(pci_p, dmareq, mp)) in pci_dma_bindhdl() [all …]
|
/illumos-gate/usr/src/uts/sun4/io/px/ |
H A D | px_dma.c | 179 ddi_dma_req_t *dmareq) in px_dma_lmts2hdl() argument 186 ddi_dma_lim_t *lim_p = dmareq->dmar_limits; in px_dma_lmts2hdl() 197 if (!(mp = px_dma_allocmp(dip, rdip, dmareq->dmar_fp, in px_dma_lmts2hdl() 198 dmareq->dmar_arg))) in px_dma_lmts2hdl() 388 px_dma_type(px_t *px_p, ddi_dma_req_t *dmareq, ddi_dma_impl_t *mp) in px_dma_type() argument 391 ddi_dma_obj_t *dobj_p = &dmareq->dmar_object; in px_dma_type() 397 mp->dmai_rflags = dmareq->dmar_flags & DMP_DDIFLAGS; in px_dma_type() 552 px_dma_pfn(px_t *px_p, ddi_dma_req_t *dmareq, ddi_dma_impl_t *mp) in px_dma_pfn() argument 555 int (*waitfp)(caddr_t) = dmareq->dmar_fp; in px_dma_pfn() 578 ddi_set_callback(waitfp, dmareq->dmar_arg, in px_dma_pfn() [all …]
|
H A D | px_fdvma.c | 132 ddi_dma_req_t *dmareq, ddi_dma_handle_t *handlep) in px_fdvma_reserve() argument 139 ddi_dma_lim_t *lim_p = dmareq->dmar_limits; in px_fdvma_reserve() 159 npages = dmareq->dmar_object.dmao_size; in px_fdvma_reserve() 179 dmareq->dmar_fp == DDI_DMA_SLEEP ? VM_SLEEP : VM_NOSLEEP)); in px_fdvma_reserve() 200 mp->dmai_burstsizes = dmareq->dmar_limits->dlim_burstsizes; in px_fdvma_reserve()
|
H A D | px.c | 837 px_dma_setup(dev_info_t *dip, dev_info_t *rdip, ddi_dma_req_t *dmareq, in px_dma_setup() argument 849 mp = px_dma_lmts2hdl(dip, rdip, mmu_p, dmareq); in px_dma_setup() 854 if (ret = px_dma_type(px_p, dmareq, mp)) in px_dma_setup() 856 if (ret = px_dma_pfn(px_p, dmareq, mp)) in px_dma_setup() 861 ret = px_dvma_win(px_p, dmareq, mp); in px_dma_setup() 872 if (ret = px_dvma_map(mp, dmareq, mmu_p)) in px_dma_setup() 876 ret = px_dma_physwin(px_p, dmareq, mp); in px_dma_setup() 964 ddi_dma_handle_t handle, ddi_dma_req_t *dmareq, in px_dma_bindhdl() argument 973 ddi_driver_name(rdip), ddi_get_instance(rdip), mp, dmareq); in px_dma_bindhdl() 981 if (ret = px_dma_type(px_p, dmareq, mp)) in px_dma_bindhdl() [all …]
|
H A D | px_var.h | 160 ddi_dma_req_t *dmareq, ddi_dma_handle_t *handlep); 166 ddi_dma_handle_t handle, ddi_dma_req_t *dmareq,
|
H A D | px_dma.h | 213 px_mmu_t *mmu_p, ddi_dma_req_t *dmareq); 220 extern int px_dvma_map(ddi_dma_impl_t *mp, ddi_dma_req_t *dmareq, 223 extern int px_dma_physwin(px_t *px_p, ddi_dma_req_t *dmareq,
|
H A D | px_fdvma.h | 37 struct ddi_dma_req *dmareq, ddi_dma_handle_t *handlep);
|
/illumos-gate/usr/src/uts/sun4u/io/ |
H A D | iommu.c | 344 #define OBJSIZE dmareq->dmar_object.dmao_size 821 check_dma_attr(struct ddi_dma_req *dmareq, ddi_dma_attr_t *dma_attr, in check_dma_attr() argument 832 if ((dmareq->dmar_flags & DDI_DMA_PARTIAL) == 0) in check_dma_attr() 839 if (!((addrlow + dmareq->dmar_object.dmao_size == 0) && in check_dma_attr() 841 if ((dmareq->dmar_flags & DDI_DMA_PARTIAL) == 0) in check_dma_attr() 851 ddi_dma_handle_t handle, struct ddi_dma_req *dmareq, in iommu_dma_bindhdl() argument 876 size = (uint32_t)dmareq->dmar_object.dmao_size; in iommu_dma_bindhdl() 878 rval = check_dma_attr(dmareq, dma_attr, &size); in iommu_dma_bindhdl() 884 mp->dmai_rflags = (dmareq->dmar_flags & DMP_DDIFLAGS) | in iommu_dma_bindhdl() 887 switch (dmareq->dmar_object.dmao_type) { in iommu_dma_bindhdl() [all …]
|
/illumos-gate/usr/src/uts/intel/sys/ |
H A D | iommulib.h | 70 struct ddi_dma_req *dmareq, ddi_dma_cookie_t *cookiep, 87 struct ddi_dma_req *dmareq, ddi_dma_obj_t *dmao); 141 ddi_dma_handle_t handle, struct ddi_dma_req *dmareq, 227 ddi_dma_handle_t dma_handle, struct ddi_dma_req *dmareq, 242 ddi_dma_handle_t dma_handle, struct ddi_dma_req *dmareq, 275 ddi_dma_handle_t handle, struct ddi_dma_req *dmareq,
|
/illumos-gate/usr/src/uts/i86pc/io/amd_iommu/ |
H A D | amd_iommu_page_tables.c | 1000 struct ddi_dma_req *dmareq) in init_pte() argument 1019 if (R == 0 && ((dmareq->dmar_flags & DDI_DMA_WRITE) || in init_pte() 1020 (dmareq->dmar_flags & DDI_DMA_RDWR))) { in init_pte() 1023 if (W == 0 && ((dmareq->dmar_flags & DDI_DMA_READ) || in init_pte() 1024 (dmareq->dmar_flags & DDI_DMA_RDWR))) { in init_pte() 1042 if (dmareq->dmar_flags & DDI_DMA_RDWR) { in init_pte() 1046 if (dmareq->dmar_flags & DDI_DMA_WRITE) { in init_pte() 1049 if (dmareq->dmar_flags & DDI_DMA_READ) { in init_pte() 1092 struct ddi_dma_req *dmareq, in amd_iommu_setup_1_pgtable() argument 1146 error = init_pte(pt, pa, AMD_IOMMU_VA_BITS(va, level), dmareq); in amd_iommu_setup_1_pgtable() [all …]
|
H A D | amd_iommu_page_tables.h | 119 ddi_dma_attr_t *attrp, struct ddi_dma_req *dmareq,
|
H A D | amd_iommu_impl.c | 52 struct ddi_dma_req *dmareq, ddi_dma_cookie_t *cookiep, 65 struct ddi_dma_req *dmareq, ddi_dma_obj_t *dmao); 1494 struct ddi_dma_req *dmareq, ddi_dma_cookie_t *cookie_array, uint_t ccount, in map_current_window() argument 1522 if ((error = amd_iommu_map_pa2va(iommu, rdip, attrp, dmareq, in map_current_window() 1616 struct ddi_dma_req *dmareq, ddi_dma_cookie_t *cookiep, in amd_iommu_bindhdl() argument 1633 dmareq, cookiep, ccountp); in amd_iommu_bindhdl() 1668 error = map_current_window(iommu, rdip, attrp, dmareq, in amd_iommu_bindhdl() 1894 struct ddi_dma_req *dmareq, ddi_dma_obj_t *dmao) in amd_iommu_mapobject() argument
|
/illumos-gate/usr/src/uts/intel/io/ |
H A D | iommulib.c | 672 ddi_dma_handle_t dma_handle, struct ddi_dma_req *dmareq, in iommulib_nexdma_bindhdl() argument 682 dmareq, cookiep, ccountp)); in iommulib_nexdma_bindhdl() 731 ddi_dma_handle_t dma_handle, struct ddi_dma_req *dmareq, in iommulib_nexdma_mapobject() argument 738 dma_handle, dmareq, dmao)); in iommulib_nexdma_mapobject() 778 ddi_dma_handle_t handle, struct ddi_dma_req *dmareq, in iommulib_iommu_dma_bindhdl() argument 784 return (nexops->nops_dma_bindhdl(dip, rdip, handle, dmareq, in iommulib_iommu_dma_bindhdl()
|
/illumos-gate/usr/src/uts/sun4u/sys/pci/ |
H A D | pci_fdvma.h | 39 struct ddi_dma_req *dmareq, ddi_dma_handle_t *handlep);
|
H A D | pci_var.h | 220 ddi_dma_req_t *dmareq, ddi_dma_handle_t *handlep); 226 ddi_dma_handle_t handle, ddi_dma_req_t *dmareq,
|
H A D | pci_dma.h | 239 iommu_t *iommu_p, ddi_dma_req_t *dmareq); 247 extern int pci_dvma_map(ddi_dma_impl_t *mp, ddi_dma_req_t *dmareq, 252 extern int pci_dma_physwin(pci_t *pci_p, ddi_dma_req_t *dmareq,
|
/illumos-gate/usr/src/uts/i86pc/io/ |
H A D | rootnex.c | 196 ddi_dma_handle_t handle, struct ddi_dma_req *dmareq, 224 ddi_dma_handle_t handle, struct ddi_dma_req *dmareq, 378 static int rootnex_bind_slowpath(ddi_dma_impl_t *hp, struct ddi_dma_req *dmareq, 380 static int rootnex_setup_copybuf(ddi_dma_impl_t *hp, struct ddi_dma_req *dmareq, 1953 ddi_dma_handle_t handle, struct ddi_dma_req *dmareq, in rootnex_coredma_bindhdl() argument 1976 if (dmareq->dmar_fp == DDI_DMA_SLEEP) { in rootnex_coredma_bindhdl() 1982 hp->dmai_rflags = dmareq->dmar_flags & DMP_DDIFLAGS; in rootnex_coredma_bindhdl() 2009 dma->dp_dma = dmareq->dmar_object; in rootnex_coredma_bindhdl() 2014 e = iommulib_nexdma_mapobject(dip, rdip, handle, dmareq, dmao); in rootnex_coredma_bindhdl() 2200 e = rootnex_bind_slowpath(hp, dmareq, dma, attr, &dma->dp_dma, in rootnex_coredma_bindhdl() [all …]
|
H A D | immu_dvma.c | 110 struct ddi_dma_req *dmareq, ddi_dma_obj_t *dmao); 380 dma_to_immu_flags(struct ddi_dma_req *dmareq) in dma_to_immu_flags() argument 384 if (dmareq->dmar_fp == DDI_DMA_SLEEP) { in dma_to_immu_flags() 400 if (dmareq->dmar_flags & DDI_DMA_READ) in dma_to_immu_flags() 403 if (dmareq->dmar_flags & DDI_DMA_WRITE) in dma_to_immu_flags() 410 if ((dmareq->dmar_flags & (DDI_DMA_READ | DDI_DMA_WRITE)) == 0) { in dma_to_immu_flags() 2553 immu_hdl_priv_t *ihp, struct ddi_dma_req *dmareq, in immu_map_dvmaseg() argument 2576 immu_flags = dma_to_immu_flags(dmareq); in immu_map_dvmaseg() 2580 dmar_object = &dmareq->dmar_object; in immu_map_dvmaseg() 2641 dmareq->dmar_fp == DDI_DMA_SLEEP ? VM_SLEEP : VM_NOSLEEP); in immu_map_dvmaseg() [all …]
|
/illumos-gate/usr/src/uts/common/sys/ |
H A D | devops.h | 205 struct ddi_dma_req *dmareq, 213 ddi_dma_handle_t handle, struct ddi_dma_req *dmareq, 330 struct ddi_dma_req *dmareq,
|
/illumos-gate/usr/src/uts/sun4v/io/niumx/ |
H A D | niumx.c | 68 ddi_dma_handle_t handle, ddi_dma_req_t *dmareq, 725 ddi_dma_handle_t handle, ddi_dma_req_t *dmareq, in niumx_dma_bindhdl() argument 728 int (*waitfp)(caddr_t) = dmareq->dmar_fp; in niumx_dma_bindhdl() 730 ddi_dma_obj_t *dobj_p = &dmareq->dmar_object; in niumx_dma_bindhdl() 736 NIUMX_NAMEINST(rdip), mp, dmareq); in niumx_dma_bindhdl() 739 mp->dmai_rflags = dmareq->dmar_flags & DMP_DDIFLAGS | DMP_NOSYNC; in niumx_dma_bindhdl()
|
/illumos-gate/usr/src/uts/common/os/ |
H A D | sunddi.c | 724 ddi_dma_handle_t handle, struct ddi_dma_req *dmareq, in ddi_dma_bindhdl() argument 734 return ((*funcp)(dip, rdip, handle, dmareq, cp, ccountp)); in ddi_dma_bindhdl() 5168 ddi_dma_handle_t handle, struct ddi_dma_req *dmareq, in ddi_no_dma_bindhdl() argument 5171 _NOTE(ARGUNUSED(dip, rdip, handle, dmareq, cp, ccountp)) in ddi_no_dma_bindhdl() 6992 struct ddi_dma_req dmareq; in ddi_dma_buf_bind_handle() local 7003 dmareq.dmar_flags = flags; in ddi_dma_buf_bind_handle() 7004 dmareq.dmar_fp = waitfp; in ddi_dma_buf_bind_handle() 7005 dmareq.dmar_arg = arg; in ddi_dma_buf_bind_handle() 7006 dmareq.dmar_object.dmao_size = (uint_t)bp->b_bcount; in ddi_dma_buf_bind_handle() 7009 dmareq.dmar_object.dmao_type = DMA_OTYP_PAGES; in ddi_dma_buf_bind_handle() [all …]
|