Lines Matching refs:softsp

118 iommu_init(struct sbus_soft_state *softsp, caddr_t address)  in iommu_init()  argument
138 softsp->iommu_ctrl_reg = REG_ADDR(address, OFF_IOMMU_CTRL_REG); in iommu_init()
139 softsp->tsb_base_addr = REG_ADDR(address, OFF_TSB_BASE_ADDR); in iommu_init()
140 softsp->iommu_flush_reg = REG_ADDR(address, OFF_IOMMU_FLUSH_REG); in iommu_init()
141 softsp->iommu_tlb_tag = REG_ADDR(address, OFF_IOMMU_TLB_TAG); in iommu_init()
142 softsp->iommu_tlb_data = REG_ADDR(address, OFF_IOMMU_TLB_DATA); in iommu_init()
146 mutex_init(&softsp->dma_pool_lock, NULL, MUTEX_DEFAULT, NULL); in iommu_init()
147 mutex_init(&softsp->intr_poll_list_lock, NULL, MUTEX_DEFAULT, NULL); in iommu_init()
150 if ((softsp->iommu_tsb_cookie = iommu_tsb_alloc(softsp->upa_id)) == in iommu_init()
153 ddi_driver_name(softsp->dip), in iommu_init()
154 ddi_get_instance(softsp->dip)); in iommu_init()
157 softsp->soft_tsb_base_addr = in iommu_init()
158 iommu_tsb_cookie_to_va(softsp->iommu_tsb_cookie); in iommu_init()
159 softsp->iommu_dvma_size = in iommu_init()
160 iommu_tsb_cookie_to_size(softsp->iommu_tsb_cookie) << in iommu_init()
162 softsp->iommu_dvma_base = (ioaddr_t) in iommu_init()
163 (0 - (ioaddr_t)softsp->iommu_dvma_size); in iommu_init()
166 ddi_driver_name(softsp->dip), ddi_get_instance(softsp->dip)); in iommu_init()
171 softsp->dvma_arena = vmem_create(name, in iommu_init()
172 (void *)(uintptr_t)softsp->iommu_dvma_base, in iommu_init()
173 softsp->iommu_dvma_size, PAGESIZE, NULL, NULL, NULL, in iommu_init()
177 softsp->dma_reserve = iommu_btop(softsp->iommu_dvma_size >> 1); in iommu_init()
180 mutex_init(&softsp->iomemlock, NULL, MUTEX_DEFAULT, NULL); in iommu_init()
181 softsp->iomem = (struct io_mem_list *)0; in iommu_init()
193 (void) iommu_resume_init(softsp); in iommu_init()
196 if (*softsp->tsb_base_addr != in iommu_init()
197 va_to_pa((caddr_t)softsp->soft_tsb_base_addr)) { in iommu_init()
198 iommu_tsb_free(softsp->iommu_tsb_cookie); in iommu_init()
202 softsp->sbus_io_lo_pfn = UINT32_MAX; in iommu_init()
203 softsp->sbus_io_hi_pfn = 0; in iommu_init()
204 for (i = 0; i < sysio_pd_getnrng(softsp->dip); i++) { in iommu_init()
209 rangep = sysio_pd_getrng(softsp->dip, i); in iommu_init()
216 softsp->sbus_io_lo_pfn = (lopfn < softsp->sbus_io_lo_pfn) ? in iommu_init()
217 lopfn : softsp->sbus_io_lo_pfn; in iommu_init()
219 softsp->sbus_io_hi_pfn = (hipfn > softsp->sbus_io_hi_pfn) ? in iommu_init()
220 hipfn : softsp->sbus_io_hi_pfn; in iommu_init()
225 (void *)softsp->iommu_ctrl_reg, (void *)softsp->tsb_base_addr, in iommu_init()
226 (void *)softsp->iommu_flush_reg, in iommu_init()
227 (void *)softsp->soft_tsb_base_addr)); in iommu_init()
238 iommu_uninit(struct sbus_soft_state *softsp) in iommu_uninit() argument
240 vmem_destroy(softsp->dvma_arena); in iommu_uninit()
243 *softsp->iommu_ctrl_reg &= in iommu_uninit()
246 iommu_tsb_free(softsp->iommu_tsb_cookie); in iommu_uninit()
256 iommu_resume_init(struct sbus_soft_state *softsp) in iommu_resume_init() argument
265 *softsp->tsb_base_addr = va_to_pa((caddr_t)softsp->soft_tsb_base_addr); in iommu_resume_init()
275 tsb_bytes = iommu_tsb_cookie_to_size(softsp->iommu_tsb_cookie); in iommu_resume_init()
286 *softsp->iommu_ctrl_reg = (uint64_t)(tsb_size << TSB_SIZE_SHIFT in iommu_resume_init()
293 iommu_tlb_flush(struct sbus_soft_state *softsp, ioaddr_t addr, pgcnt_t npages) in iommu_tlb_flush() argument
301 *softsp->iommu_flush_reg = (uint64_t)addr; in iommu_tlb_flush()
302 tmpreg = *softsp->sbus_ctrl_reg; in iommu_tlb_flush()
307 for (i = 0, vaddr_reg = softsp->iommu_tlb_tag, in iommu_tlb_flush()
308 valid_bit_reg = softsp->iommu_tlb_data; in iommu_tlb_flush()
327 *softsp->iommu_flush_reg = (uint64_t)ioaddr; in iommu_tlb_flush()
334 tmpreg = *softsp->sbus_ctrl_reg; in iommu_tlb_flush()
347 softsp->iommu_dvma_base)))
366 struct sbus_soft_state *softsp = mppriv->softsp; in iommu_remove_mappings() local
372 ASSERT(softsp != NULL); in iommu_remove_mappings()
381 mutex_enter(&softsp->iomemlock); in iommu_remove_mappings()
382 prevp = &softsp->iomem; in iommu_remove_mappings()
383 walk = softsp->iomem; in iommu_remove_mappings()
394 mutex_exit(&softsp->iomemlock); in iommu_remove_mappings()
400 iotte_ptr = IOTTE_NDX(ioaddr, softsp->soft_tsb_base_addr); in iommu_remove_mappings()
407 iommu_tlb_flush(softsp, ioaddr, 1); in iommu_remove_mappings()
428 struct sbus_soft_state *softsp = mppriv->softsp; in iommu_create_vaddr_mappings() local
435 ASSERT(softsp != NULL); in iommu_create_vaddr_mappings()
443 iotte_ptr = IOTTE_NDX(ioaddr, softsp->soft_tsb_base_addr); in iommu_create_vaddr_mappings()
459 } else if (softsp->stream_buf_off) { in iommu_create_vaddr_mappings()
480 iommu_tlb_flush(softsp, ioaddr, npages); in iommu_create_vaddr_mappings()
504 if (IS_INTRA_SBUS(softsp, pfn)) { in iommu_create_vaddr_mappings()
536 iommu_tlb_flush(softsp, ioaddr, 1); in iommu_create_vaddr_mappings()
551 mutex_enter(&softsp->iomemlock); in iommu_create_vaddr_mappings()
552 iomemp->next = softsp->iomem; in iommu_create_vaddr_mappings()
553 softsp->iomem = iomemp; in iommu_create_vaddr_mappings()
554 mutex_exit(&softsp->iomemlock); in iommu_create_vaddr_mappings()
578 struct sbus_soft_state *softsp = mppriv->softsp; in iommu_create_pp_mappings() local
589 ASSERT(softsp != NULL); in iommu_create_pp_mappings()
594 iotte_ptr = IOTTE_NDX(ioaddr, softsp->soft_tsb_base_addr); in iommu_create_pp_mappings()
605 } else if (softsp->stream_buf_off) { in iommu_create_pp_mappings()
626 iommu_tlb_flush(softsp, ioaddr, npages); in iommu_create_pp_mappings()
647 iommu_tlb_flush(softsp, ioaddr, 1); in iommu_create_pp_mappings()
664 mutex_enter(&softsp->iomemlock); in iommu_create_pp_mappings()
665 iomemp->next = softsp->iomem; in iommu_create_pp_mappings()
666 softsp->iomem = iomemp; in iommu_create_pp_mappings()
667 mutex_exit(&softsp->iomemlock); in iommu_create_pp_mappings()
676 struct sbus_soft_state *softsp, uint_t *burstsizep, uint_t burstsize64, in iommu_dma_lim_setup() argument
686 *burstsizep &= softsp->sbus_burst_sizes; in iommu_dma_lim_setup()
694 if (!softsp->sbus64_burst_sizes || (ddi_get_parent(rdip) != dip)) { in iommu_dma_lim_setup()
699 *burstsizep &= softsp->sbus_burst_sizes; in iommu_dma_lim_setup()
705 *burstsizep &= softsp->sbus_burst_sizes; in iommu_dma_lim_setup()
712 *burstsizep &= (softsp->sbus64_burst_sizes | in iommu_dma_lim_setup()
713 softsp->sbus_burst_sizes); in iommu_dma_lim_setup()
719 *burstsizep &= (softsp->sbus64_burst_sizes >> in iommu_dma_lim_setup()
728 (1 << (ddi_ffs(softsp->sbus64_burst_sizes) - 1))); in iommu_dma_lim_setup()
742 struct sbus_soft_state *softsp = (struct sbus_soft_state *) in iommu_dma_allochdl() local
748 (void) iommu_dma_lim_setup(dip, rdip, softsp, in iommu_dma_allochdl()
764 (addrhigh < (ioaddr_t)softsp->iommu_dvma_base)) { in iommu_dma_allochdl()
776 &softsp->dvma_call_list_id); in iommu_dma_allochdl()
797 mppriv->softsp = softsp; in iommu_dma_allochdl()
809 struct sbus_soft_state *softsp = mppriv->softsp; in iommu_dma_freehdl() local
810 ASSERT(softsp != NULL); in iommu_dma_freehdl()
814 if (softsp->dvma_call_list_id != 0) { in iommu_dma_freehdl()
815 ddi_run_callback(&softsp->dvma_call_list_id); in iommu_dma_freehdl()
862 struct sbus_soft_state *softsp; in iommu_dma_bindhdl() local
921 softsp = mppriv->softsp; in iommu_dma_bindhdl()
922 ASSERT(softsp != NULL); in iommu_dma_bindhdl()
955 if (npages >= iommu_btop(softsp->iommu_dvma_size) - in iommu_dma_bindhdl()
970 ioaddr = (ioaddr_t)(uintptr_t)vmem_alloc(softsp->dvma_arena, in iommu_dma_bindhdl()
993 softsp->soft_tsb_base_addr); in iommu_dma_bindhdl()
998 } else if (softsp->stream_buf_off) in iommu_dma_bindhdl()
1008 iommu_tlb_flush(softsp, ioaddr, 1); in iommu_dma_bindhdl()
1037 mutex_enter(&softsp->iomemlock); in iommu_dma_bindhdl()
1038 iomemp->next = softsp->iomem; in iommu_dma_bindhdl()
1039 softsp->iomem = iomemp; in iommu_dma_bindhdl()
1040 mutex_exit(&softsp->iomemlock); in iommu_dma_bindhdl()
1046 ioaddr = (ioaddr_t)(uintptr_t)vmem_xalloc(softsp->dvma_arena, in iommu_dma_bindhdl()
1065 ASSERT(mp->dmai_mapping >= softsp->iommu_dvma_base); in iommu_dma_bindhdl()
1104 vmem_free(softsp->dvma_arena, (void *)(uintptr_t)ioaddr, in iommu_dma_bindhdl()
1107 vmem_xfree(softsp->dvma_arena, (void *)(uintptr_t)ioaddr, in iommu_dma_bindhdl()
1115 dmareq->dmar_arg, &softsp->dvma_call_list_id); in iommu_dma_bindhdl()
1131 struct sbus_soft_state *softsp = mppriv->softsp; in iommu_dma_unbindhdl() local
1132 ASSERT(softsp != NULL); in iommu_dma_unbindhdl()
1144 sync_stream_buf(softsp, addr, npages, (int *)&mppriv->sync_flag, in iommu_dma_unbindhdl()
1157 vmem_free(softsp->dvma_arena, (void *)(uintptr_t)addr, size); in iommu_dma_unbindhdl()
1159 vmem_xfree(softsp->dvma_arena, (void *)(uintptr_t)addr, size); in iommu_dma_unbindhdl()
1165 if (softsp->dvma_call_list_id != 0) in iommu_dma_unbindhdl()
1166 ddi_run_callback(&softsp->dvma_call_list_id); in iommu_dma_unbindhdl()
1181 sync_stream_buf(mppriv->softsp, mp->dmai_mapping, in iommu_dma_flush()
1296 sync_stream_buf(mppriv->softsp, mp->dmai_mapping, in iommu_map_window()
1322 return (iommu_dma_lim_setup(dip, rdip, mppriv->softsp, in iommu_dma_mctl()
1336 struct sbus_soft_state *softsp = in iommu_dma_mctl() local
1347 if ((AHI <= ALO) || (AHI < softsp->iommu_dvma_base)) { in iommu_dma_mctl()
1354 mutex_enter(&softsp->dma_pool_lock); in iommu_dma_mctl()
1355 if (np > softsp->dma_reserve) { in iommu_dma_mctl()
1356 mutex_exit(&softsp->dma_pool_lock); in iommu_dma_mctl()
1362 softsp->dma_reserve -= np; in iommu_dma_mctl()
1363 mutex_exit(&softsp->dma_pool_lock); in iommu_dma_mctl()
1370 ioaddr = (ioaddr_t)(uintptr_t)vmem_xalloc(softsp->dvma_arena, in iommu_dma_mctl()
1377 mutex_enter(&softsp->dma_pool_lock); in iommu_dma_mctl()
1378 softsp->dma_reserve += np; in iommu_dma_mctl()
1379 mutex_exit(&softsp->dma_pool_lock); in iommu_dma_mctl()
1413 iommu_fast_dvma->softsp = (caddr_t)softsp; in iommu_dma_mctl()
1433 struct sbus_soft_state *softsp = (struct sbus_soft_state *) in iommu_dma_mctl() local
1434 iommu_fast_dvma->softsp; in iommu_dma_mctl()
1436 ASSERT(softsp != NULL); in iommu_dma_mctl()
1439 iotte_ptr = IOTTE_NDX(ioaddr, softsp->soft_tsb_base_addr); in iommu_dma_mctl()
1443 iommu_tlb_flush(softsp, ioaddr, 1); in iommu_dma_mctl()
1451 mutex_enter(&softsp->dma_pool_lock); in iommu_dma_mctl()
1452 softsp->dma_reserve += np; in iommu_dma_mctl()
1453 mutex_exit(&softsp->dma_pool_lock); in iommu_dma_mctl()
1456 vmem_free(softsp->dvma_arena, in iommu_dma_mctl()
1459 vmem_xfree(softsp->dvma_arena, in iommu_dma_mctl()
1477 if (softsp->dvma_call_list_id != 0) in iommu_dma_mctl()
1478 ddi_run_callback(&softsp->dvma_call_list_id); in iommu_dma_mctl()
1509 struct sbus_soft_state *softsp = (struct sbus_soft_state *) in iommu_dvma_kaddr_load() local
1510 iommu_fast_dvma->softsp; in iommu_dvma_kaddr_load()
1516 ASSERT(softsp != NULL); in iommu_dvma_kaddr_load()
1539 iotte_ptr = IOTTE_NDX(ioaddr, softsp->soft_tsb_base_addr); in iommu_dvma_kaddr_load()
1545 else if (!softsp->stream_buf_off) in iommu_dvma_kaddr_load()
1559 iommu_tlb_flush(softsp, ioaddr, 1); in iommu_dvma_kaddr_load()
1578 mutex_enter(&softsp->iomemlock); in iommu_dvma_kaddr_load()
1579 iomemp->next = softsp->iomem; in iommu_dvma_kaddr_load()
1580 softsp->iomem = iomemp; in iommu_dvma_kaddr_load()
1581 mutex_exit(&softsp->iomemlock); in iommu_dvma_kaddr_load()
1594 struct sbus_soft_state *softsp = (struct sbus_soft_state *) in iommu_dvma_unload() local
1595 iommu_fast_dvma->softsp; in iommu_dvma_unload()
1600 ASSERT(softsp != NULL); in iommu_dvma_unload()
1606 mutex_enter(&softsp->iomemlock); in iommu_dvma_unload()
1607 prevp = &softsp->iomem; in iommu_dvma_unload()
1608 walk = softsp->iomem; in iommu_dvma_unload()
1618 mutex_exit(&softsp->iomemlock); in iommu_dvma_unload()
1631 sync_stream_buf(softsp, ioaddr, npages, in iommu_dvma_unload()
1646 struct sbus_soft_state *softsp = (struct sbus_soft_state *) in iommu_dvma_sync() local
1647 iommu_fast_dvma->softsp; in iommu_dvma_sync()
1652 ASSERT(softsp != NULL); in iommu_dvma_sync()
1661 sync_stream_buf(softsp, ioaddr, npages, in iommu_dvma_sync()