Home
last modified time | relevance | path

Searched refs:dmareq (Results 1 – 25 of 31) sorted by relevance

12

/titanic_50/usr/src/uts/sun4/io/efcode/
H A Dfc_ddi.c61 struct ddi_dma_req dmareq; in fc_ddi_dma_buf_bind_handle() local
70 dmareq.dmar_flags = flags; in fc_ddi_dma_buf_bind_handle()
71 dmareq.dmar_fp = waitfp; in fc_ddi_dma_buf_bind_handle()
72 dmareq.dmar_arg = arg; in fc_ddi_dma_buf_bind_handle()
73 dmareq.dmar_object.dmao_size = (uint_t)bp->b_bcount; in fc_ddi_dma_buf_bind_handle()
76 dmareq.dmar_object.dmao_type = DMA_OTYP_PAGES; in fc_ddi_dma_buf_bind_handle()
77 dmareq.dmar_object.dmao_obj.pp_obj.pp_pp = bp->b_pages; in fc_ddi_dma_buf_bind_handle()
78 dmareq.dmar_object.dmao_obj.pp_obj.pp_offset = in fc_ddi_dma_buf_bind_handle()
81 dmareq.dmar_object.dmao_obj.virt_obj.v_addr = bp->b_un.b_addr; in fc_ddi_dma_buf_bind_handle()
83 dmareq.dmar_object.dmao_obj.virt_obj.v_priv = in fc_ddi_dma_buf_bind_handle()
[all …]
/titanic_50/usr/src/uts/sun4/os/
H A Ddvma.c60 struct ddi_dma_req dmareq; in dvma_reserve() local
70 bzero(&dmareq, sizeof (dmareq)); in dvma_reserve()
71 dmareq.dmar_fp = DDI_DMA_DONTWAIT; in dvma_reserve()
72 dmareq.dmar_flags = DDI_DMA_RDWR | DDI_DMA_STREAMING; in dvma_reserve()
73 dmareq.dmar_limits = &dma_lim; in dvma_reserve()
74 dmareq.dmar_object.dmao_size = pages; in dvma_reserve()
84 ret = ddi_dma_mctl(dip, dip, reqhdl, DDI_DMA_RESERVE, (off_t *)&dmareq, in dvma_reserve()
/titanic_50/usr/src/uts/sun4u/io/pci/
H A Dpci_dma.c358 ddi_dma_req_t *dmareq) in pci_dma_lmts2hdl() argument
365 ddi_dma_lim_t *lim_p = dmareq->dmar_limits; in pci_dma_lmts2hdl()
376 if (!(mp = pci_dma_allocmp(dip, rdip, dmareq->dmar_fp, in pci_dma_lmts2hdl()
377 dmareq->dmar_arg))) in pci_dma_lmts2hdl()
550 pci_dma_type(pci_t *pci_p, ddi_dma_req_t *dmareq, ddi_dma_impl_t *mp) in pci_dma_type() argument
553 ddi_dma_obj_t *dobj_p = &dmareq->dmar_object; in pci_dma_type()
561 mp->dmai_rflags = pci_dma_consist_check(dmareq->dmar_flags, pbm_p); in pci_dma_type()
582 int (*waitfp)(caddr_t) = dmareq->dmar_fp; in pci_dma_type()
595 dmareq->dmar_arg, in pci_dma_type()
699 pci_dma_vapfn(pci_t *pci_p, ddi_dma_req_t *dmareq, ddi_dma_impl_t *mp, in pci_dma_vapfn() argument
[all …]
H A Dpci_fdvma.c179 ddi_dma_req_t *dmareq, ddi_dma_handle_t *handlep) in pci_fdvma_reserve() argument
186 ddi_dma_lim_t *lim_p = dmareq->dmar_limits; in pci_fdvma_reserve()
206 npages = dmareq->dmar_object.dmao_size; in pci_fdvma_reserve()
227 dmareq->dmar_fp == DDI_DMA_SLEEP ? VM_SLEEP : VM_NOSLEEP)); in pci_fdvma_reserve()
250 pci_dma_consist_check(dmareq->dmar_flags, pci_p->pci_pbm_p); in pci_fdvma_reserve()
251 if (!(dmareq->dmar_flags & DDI_DMA_RDWR)) in pci_fdvma_reserve()
255 mp->dmai_minxfer = dmareq->dmar_limits->dlim_minxfer; in pci_fdvma_reserve()
256 mp->dmai_burstsizes = dmareq->dmar_limits->dlim_burstsizes; in pci_fdvma_reserve()
H A Dpci.c569 pci_dma_setup(dev_info_t *dip, dev_info_t *rdip, ddi_dma_req_t *dmareq, in pci_dma_setup() argument
581 if (!(mp = pci_dma_lmts2hdl(dip, rdip, iommu_p, dmareq))) in pci_dma_setup()
585 if (ret = pci_dma_type(pci_p, dmareq, mp)) in pci_dma_setup()
587 if (ret = pci_dma_pfn(pci_p, dmareq, mp)) in pci_dma_setup()
592 if ((ret = pci_dvma_win(pci_p, dmareq, mp)) || !handlep) in pci_dma_setup()
603 if (ret = pci_dvma_map(mp, dmareq, iommu_p)) in pci_dma_setup()
607 if ((ret = pci_dma_physwin(pci_p, dmareq, mp)) || !handlep) in pci_dma_setup()
693 ddi_dma_handle_t handle, ddi_dma_req_t *dmareq, in pci_dma_bindhdl() argument
702 ddi_driver_name(rdip), ddi_get_instance(rdip), mp, dmareq); in pci_dma_bindhdl()
710 if (ret = pci_dma_type(pci_p, dmareq, mp)) in pci_dma_bindhdl()
[all …]
/titanic_50/usr/src/uts/sun4/io/px/
H A Dpx_dma.c177 ddi_dma_req_t *dmareq) in px_dma_lmts2hdl() argument
184 ddi_dma_lim_t *lim_p = dmareq->dmar_limits; in px_dma_lmts2hdl()
195 if (!(mp = px_dma_allocmp(dip, rdip, dmareq->dmar_fp, in px_dma_lmts2hdl()
196 dmareq->dmar_arg))) in px_dma_lmts2hdl()
386 px_dma_type(px_t *px_p, ddi_dma_req_t *dmareq, ddi_dma_impl_t *mp) in px_dma_type() argument
389 ddi_dma_obj_t *dobj_p = &dmareq->dmar_object; in px_dma_type()
395 mp->dmai_rflags = dmareq->dmar_flags & DMP_DDIFLAGS; in px_dma_type()
550 px_dma_pfn(px_t *px_p, ddi_dma_req_t *dmareq, ddi_dma_impl_t *mp) in px_dma_pfn() argument
553 int (*waitfp)(caddr_t) = dmareq->dmar_fp; in px_dma_pfn()
576 ddi_set_callback(waitfp, dmareq->dmar_arg, in px_dma_pfn()
[all …]
H A Dpx_fdvma.c132 ddi_dma_req_t *dmareq, ddi_dma_handle_t *handlep) in px_fdvma_reserve() argument
139 ddi_dma_lim_t *lim_p = dmareq->dmar_limits; in px_fdvma_reserve()
159 npages = dmareq->dmar_object.dmao_size; in px_fdvma_reserve()
179 dmareq->dmar_fp == DDI_DMA_SLEEP ? VM_SLEEP : VM_NOSLEEP)); in px_fdvma_reserve()
200 mp->dmai_burstsizes = dmareq->dmar_limits->dlim_burstsizes; in px_fdvma_reserve()
H A Dpx.c839 px_dma_setup(dev_info_t *dip, dev_info_t *rdip, ddi_dma_req_t *dmareq, in px_dma_setup() argument
851 if (!(mp = px_dma_lmts2hdl(dip, rdip, mmu_p, dmareq))) in px_dma_setup()
855 if (ret = px_dma_type(px_p, dmareq, mp)) in px_dma_setup()
857 if (ret = px_dma_pfn(px_p, dmareq, mp)) in px_dma_setup()
862 if ((ret = px_dvma_win(px_p, dmareq, mp)) || !handlep) in px_dma_setup()
873 if (ret = px_dvma_map(mp, dmareq, mmu_p)) in px_dma_setup()
877 if ((ret = px_dma_physwin(px_p, dmareq, mp)) || !handlep) in px_dma_setup()
963 ddi_dma_handle_t handle, ddi_dma_req_t *dmareq, in px_dma_bindhdl() argument
972 ddi_driver_name(rdip), ddi_get_instance(rdip), mp, dmareq); in px_dma_bindhdl()
980 if (ret = px_dma_type(px_p, dmareq, mp)) in px_dma_bindhdl()
[all …]
H A Dpx_var.h160 ddi_dma_req_t *dmareq, ddi_dma_handle_t *handlep);
166 ddi_dma_handle_t handle, ddi_dma_req_t *dmareq,
H A Dpx_fdvma.h37 struct ddi_dma_req *dmareq, ddi_dma_handle_t *handlep);
H A Dpx_dma.h213 px_mmu_t *mmu_p, ddi_dma_req_t *dmareq);
220 extern int px_dvma_map(ddi_dma_impl_t *mp, ddi_dma_req_t *dmareq,
223 extern int px_dma_physwin(px_t *px_p, ddi_dma_req_t *dmareq,
/titanic_50/usr/src/uts/sun4u/io/
H A Diommu.c344 #define OBJSIZE dmareq->dmar_object.dmao_size
821 check_dma_attr(struct ddi_dma_req *dmareq, ddi_dma_attr_t *dma_attr, in check_dma_attr() argument
832 if ((dmareq->dmar_flags & DDI_DMA_PARTIAL) == 0) in check_dma_attr()
839 if (!((addrlow + dmareq->dmar_object.dmao_size == 0) && in check_dma_attr()
841 if ((dmareq->dmar_flags & DDI_DMA_PARTIAL) == 0) in check_dma_attr()
851 ddi_dma_handle_t handle, struct ddi_dma_req *dmareq, in iommu_dma_bindhdl() argument
876 size = (uint32_t)dmareq->dmar_object.dmao_size; in iommu_dma_bindhdl()
878 rval = check_dma_attr(dmareq, dma_attr, &size); in iommu_dma_bindhdl()
884 mp->dmai_rflags = (dmareq->dmar_flags & DMP_DDIFLAGS) | in iommu_dma_bindhdl()
887 switch (dmareq->dmar_object.dmao_type) { in iommu_dma_bindhdl()
[all …]
/titanic_50/usr/src/uts/intel/sys/
H A Diommulib.h70 struct ddi_dma_req *dmareq, ddi_dma_cookie_t *cookiep,
87 struct ddi_dma_req *dmareq, ddi_dma_obj_t *dmao);
141 ddi_dma_handle_t handle, struct ddi_dma_req *dmareq,
227 ddi_dma_handle_t dma_handle, struct ddi_dma_req *dmareq,
242 ddi_dma_handle_t dma_handle, struct ddi_dma_req *dmareq,
275 ddi_dma_handle_t handle, struct ddi_dma_req *dmareq,
/titanic_50/usr/src/uts/i86pc/io/amd_iommu/
H A Damd_iommu_page_tables.c1000 struct ddi_dma_req *dmareq) in init_pte() argument
1019 if (R == 0 && ((dmareq->dmar_flags & DDI_DMA_WRITE) || in init_pte()
1020 (dmareq->dmar_flags & DDI_DMA_RDWR))) { in init_pte()
1023 if (W == 0 && ((dmareq->dmar_flags & DDI_DMA_READ) || in init_pte()
1024 (dmareq->dmar_flags & DDI_DMA_RDWR))) { in init_pte()
1042 if (dmareq->dmar_flags & DDI_DMA_RDWR) { in init_pte()
1046 if (dmareq->dmar_flags & DDI_DMA_WRITE) { in init_pte()
1049 if (dmareq->dmar_flags & DDI_DMA_READ) { in init_pte()
1092 struct ddi_dma_req *dmareq, in amd_iommu_setup_1_pgtable() argument
1146 error = init_pte(pt, pa, AMD_IOMMU_VA_BITS(va, level), dmareq); in amd_iommu_setup_1_pgtable()
[all …]
H A Damd_iommu_page_tables.h119 ddi_dma_attr_t *attrp, struct ddi_dma_req *dmareq,
H A Damd_iommu_impl.c51 struct ddi_dma_req *dmareq, ddi_dma_cookie_t *cookiep,
64 struct ddi_dma_req *dmareq, ddi_dma_obj_t *dmao);
1495 struct ddi_dma_req *dmareq, ddi_dma_cookie_t *cookie_array, uint_t ccount, in map_current_window() argument
1523 if ((error = amd_iommu_map_pa2va(iommu, rdip, attrp, dmareq, in map_current_window()
1617 struct ddi_dma_req *dmareq, ddi_dma_cookie_t *cookiep, in amd_iommu_bindhdl() argument
1634 dmareq, cookiep, ccountp); in amd_iommu_bindhdl()
1669 error = map_current_window(iommu, rdip, attrp, dmareq, in amd_iommu_bindhdl()
1895 struct ddi_dma_req *dmareq, ddi_dma_obj_t *dmao) in amd_iommu_mapobject() argument
/titanic_50/usr/src/uts/i86pc/io/
H A Drootnex.c204 ddi_dma_handle_t handle, struct ddi_dma_req *dmareq,
232 ddi_dma_handle_t handle, struct ddi_dma_req *dmareq,
382 static int rootnex_valid_bind_parms(ddi_dma_req_t *dmareq,
388 static int rootnex_bind_slowpath(ddi_dma_impl_t *hp, struct ddi_dma_req *dmareq,
390 static int rootnex_setup_copybuf(ddi_dma_impl_t *hp, struct ddi_dma_req *dmareq,
1961 ddi_dma_handle_t handle, struct ddi_dma_req *dmareq, in rootnex_coredma_bindhdl() argument
1984 if (dmareq->dmar_fp == DDI_DMA_SLEEP) { in rootnex_coredma_bindhdl()
1990 hp->dmai_rflags = dmareq->dmar_flags & DMP_DDIFLAGS; in rootnex_coredma_bindhdl()
2018 e = rootnex_valid_bind_parms(dmareq, attr); in rootnex_coredma_bindhdl()
2027 dma->dp_dma = dmareq->dmar_object; in rootnex_coredma_bindhdl()
[all …]
H A Dimmu_dvma.c108 struct ddi_dma_req *dmareq, ddi_dma_obj_t *dmao);
378 dma_to_immu_flags(struct ddi_dma_req *dmareq) in dma_to_immu_flags() argument
382 if (dmareq->dmar_fp == DDI_DMA_SLEEP) { in dma_to_immu_flags()
398 if (dmareq->dmar_flags & DDI_DMA_READ) in dma_to_immu_flags()
401 if (dmareq->dmar_flags & DDI_DMA_WRITE) in dma_to_immu_flags()
408 if ((dmareq->dmar_flags & (DDI_DMA_READ | DDI_DMA_WRITE)) == 0) { in dma_to_immu_flags()
2551 immu_hdl_priv_t *ihp, struct ddi_dma_req *dmareq, in immu_map_dvmaseg() argument
2572 immu_flags = dma_to_immu_flags(dmareq); in immu_map_dvmaseg()
2576 dmar_object = &dmareq->dmar_object; in immu_map_dvmaseg()
2637 dmareq->dmar_fp == DDI_DMA_SLEEP ? VM_SLEEP : VM_NOSLEEP); in immu_map_dvmaseg()
[all …]
/titanic_50/usr/src/uts/sun4u/sys/pci/
H A Dpci_fdvma.h39 struct ddi_dma_req *dmareq, ddi_dma_handle_t *handlep);
H A Dpci_var.h220 ddi_dma_req_t *dmareq, ddi_dma_handle_t *handlep);
226 ddi_dma_handle_t handle, ddi_dma_req_t *dmareq,
H A Dpci_dma.h239 iommu_t *iommu_p, ddi_dma_req_t *dmareq);
247 extern int pci_dvma_map(ddi_dma_impl_t *mp, ddi_dma_req_t *dmareq,
252 extern int pci_dma_physwin(pci_t *pci_p, ddi_dma_req_t *dmareq,
/titanic_50/usr/src/uts/intel/io/
H A Diommulib.c674 ddi_dma_handle_t dma_handle, struct ddi_dma_req *dmareq, in iommulib_nexdma_bindhdl() argument
684 dmareq, cookiep, ccountp)); in iommulib_nexdma_bindhdl()
733 ddi_dma_handle_t dma_handle, struct ddi_dma_req *dmareq, in iommulib_nexdma_mapobject() argument
740 dma_handle, dmareq, dmao)); in iommulib_nexdma_mapobject()
780 ddi_dma_handle_t handle, struct ddi_dma_req *dmareq, in iommulib_iommu_dma_bindhdl() argument
786 return (nexops->nops_dma_bindhdl(dip, rdip, handle, dmareq, in iommulib_iommu_dma_bindhdl()
/titanic_50/usr/src/uts/common/sys/
H A Ddevops.h205 struct ddi_dma_req *dmareq,
213 ddi_dma_handle_t handle, struct ddi_dma_req *dmareq,
330 struct ddi_dma_req *dmareq,
/titanic_50/usr/src/uts/sun4v/io/niumx/
H A Dniumx.c68 ddi_dma_handle_t handle, ddi_dma_req_t *dmareq,
723 ddi_dma_handle_t handle, ddi_dma_req_t *dmareq, in niumx_dma_bindhdl() argument
726 int (*waitfp)(caddr_t) = dmareq->dmar_fp; in niumx_dma_bindhdl()
728 ddi_dma_obj_t *dobj_p = &dmareq->dmar_object; in niumx_dma_bindhdl()
734 NIUMX_NAMEINST(rdip), mp, dmareq); in niumx_dma_bindhdl()
737 mp->dmai_rflags = dmareq->dmar_flags & DMP_DDIFLAGS | DMP_NOSYNC; in niumx_dma_bindhdl()
/titanic_50/usr/src/uts/common/os/
H A Dsunddi.c794 ddi_dma_handle_t handle, struct ddi_dma_req *dmareq, in ddi_dma_bindhdl() argument
804 return ((*funcp)(dip, rdip, handle, dmareq, cp, ccountp)); in ddi_dma_bindhdl()
5237 ddi_dma_handle_t handle, struct ddi_dma_req *dmareq, in ddi_no_dma_bindhdl() argument
5240 _NOTE(ARGUNUSED(dip, rdip, handle, dmareq, cp, ccountp)) in ddi_no_dma_bindhdl()
7074 struct ddi_dma_req dmareq; in ddi_dma_buf_bind_handle() local
7077 dmareq.dmar_flags = flags; in ddi_dma_buf_bind_handle()
7078 dmareq.dmar_fp = waitfp; in ddi_dma_buf_bind_handle()
7079 dmareq.dmar_arg = arg; in ddi_dma_buf_bind_handle()
7080 dmareq.dmar_object.dmao_size = (uint_t)bp->b_bcount; in ddi_dma_buf_bind_handle()
7083 dmareq.dmar_object.dmao_type = DMA_OTYP_PAGES; in ddi_dma_buf_bind_handle()
[all …]

12