Lines Matching full:cfg
236 static inline bool arm_lpae_concat_mandatory(struct io_pgtable_cfg *cfg, in arm_lpae_concat_mandatory() argument
239 unsigned int ias = cfg->ias; in arm_lpae_concat_mandatory()
240 unsigned int oas = cfg->oas; in arm_lpae_concat_mandatory()
261 struct io_pgtable_cfg *cfg, in __arm_lpae_alloc_pages() argument
264 struct device *dev = cfg->iommu_dev; in __arm_lpae_alloc_pages()
274 if (cfg->alloc) in __arm_lpae_alloc_pages()
275 pages = cfg->alloc(cookie, alloc_size, gfp); in __arm_lpae_alloc_pages()
283 if (!cfg->coherent_walk) { in __arm_lpae_alloc_pages()
303 if (cfg->free) in __arm_lpae_alloc_pages()
304 cfg->free(cookie, pages, size); in __arm_lpae_alloc_pages()
312 struct io_pgtable_cfg *cfg, in __arm_lpae_free_pages() argument
315 if (!cfg->coherent_walk) in __arm_lpae_free_pages()
316 dma_unmap_single(cfg->iommu_dev, __arm_lpae_dma_addr(pages), in __arm_lpae_free_pages()
319 if (cfg->free) in __arm_lpae_free_pages()
320 cfg->free(cookie, pages, size); in __arm_lpae_free_pages()
326 struct io_pgtable_cfg *cfg) in __arm_lpae_sync_pte() argument
328 dma_sync_single_for_device(cfg->iommu_dev, __arm_lpae_dma_addr(ptep), in __arm_lpae_sync_pte()
332 static void __arm_lpae_clear_pte(arm_lpae_iopte *ptep, struct io_pgtable_cfg *cfg, int num_entries) in __arm_lpae_clear_pte() argument
337 if (!cfg->coherent_walk && num_entries) in __arm_lpae_clear_pte()
338 __arm_lpae_sync_pte(ptep, num_entries, cfg); in __arm_lpae_clear_pte()
351 struct io_pgtable_cfg *cfg = &data->iop.cfg; in __arm_lpae_init_pte() local
363 if (!cfg->coherent_walk) in __arm_lpae_init_pte()
364 __arm_lpae_sync_pte(ptep, num_entries, cfg); in __arm_lpae_init_pte()
377 WARN_ON(!(data->iop.cfg.quirks & IO_PGTABLE_QUIRK_NO_WARN)); in arm_lpae_init_pte()
405 struct io_pgtable_cfg *cfg = &data->iop.cfg; in arm_lpae_install_table() local
408 if (cfg->quirks & IO_PGTABLE_QUIRK_ARM_NS) in arm_lpae_install_table()
420 if (cfg->coherent_walk || (old & ARM_LPAE_PTE_SW_SYNC)) in arm_lpae_install_table()
424 __arm_lpae_sync_pte(ptep, 1, cfg); in arm_lpae_install_table()
439 struct io_pgtable_cfg *cfg = &data->iop.cfg; in __arm_lpae_map() local
464 cptep = __arm_lpae_alloc_pages(tblsz, gfp, cfg, data->iop.cookie); in __arm_lpae_map()
470 __arm_lpae_free_pages(cptep, tblsz, cfg, data->iop.cookie); in __arm_lpae_map()
471 } else if (!cfg->coherent_walk && !(pte & ARM_LPAE_PTE_SW_SYNC)) { in __arm_lpae_map()
472 __arm_lpae_sync_pte(ptep, 1, cfg); in __arm_lpae_map()
479 WARN_ON(!(cfg->quirks & IO_PGTABLE_QUIRK_NO_WARN)); in __arm_lpae_map()
498 else if (data->iop.cfg.quirks & IO_PGTABLE_QUIRK_ARM_HD) in arm_lpae_prot_to_pte()
519 if (data->iop.cfg.quirks & IO_PGTABLE_QUIRK_ARM_S2FWB) in arm_lpae_prot_to_pte()
549 if (data->iop.cfg.quirks & IO_PGTABLE_QUIRK_ARM_NS) in arm_lpae_prot_to_pte()
563 struct io_pgtable_cfg *cfg = &data->iop.cfg; in arm_lpae_map_pages() local
567 long iaext = (s64)iova >> cfg->ias; in arm_lpae_map_pages()
569 if (WARN_ON(!pgsize || (pgsize & cfg->pgsize_bitmap) != pgsize)) in arm_lpae_map_pages()
572 if (cfg->quirks & IO_PGTABLE_QUIRK_ARM_TTBR1) in arm_lpae_map_pages()
574 if (WARN_ON(iaext || paddr >> cfg->oas)) in arm_lpae_map_pages()
620 __arm_lpae_free_pages(start, table_size, &data->iop.cfg, data->iop.cookie); in __arm_lpae_free_pgtable()
648 WARN_ON(!(data->iop.cfg.quirks & IO_PGTABLE_QUIRK_NO_WARN)); in __arm_lpae_unmap()
661 WARN_ON(!(data->iop.cfg.quirks & IO_PGTABLE_QUIRK_NO_WARN)); in __arm_lpae_unmap()
666 __arm_lpae_clear_pte(&ptep[i], &iop->cfg, 1); in __arm_lpae_unmap()
676 __arm_lpae_clear_pte(ptep, &iop->cfg, i); in __arm_lpae_unmap()
698 struct io_pgtable_cfg *cfg = &data->iop.cfg; in arm_lpae_unmap_pages() local
700 long iaext = (s64)iova >> cfg->ias; in arm_lpae_unmap_pages()
702 if (WARN_ON(!pgsize || (pgsize & cfg->pgsize_bitmap) != pgsize || !pgcount)) in arm_lpae_unmap_pages()
705 if (cfg->quirks & IO_PGTABLE_QUIRK_ARM_TTBR1) in arm_lpae_unmap_pages()
860 struct io_pgtable_cfg *cfg = &data->iop.cfg; in arm_lpae_read_and_clear_dirty() local
874 if (WARN_ON((iova + size - 1) & ~(BIT(cfg->ias) - 1))) in arm_lpae_read_and_clear_dirty()
882 static void arm_lpae_restrict_pgsizes(struct io_pgtable_cfg *cfg) in arm_lpae_restrict_pgsizes() argument
894 if (cfg->pgsize_bitmap & PAGE_SIZE) in arm_lpae_restrict_pgsizes()
896 else if (cfg->pgsize_bitmap & ~PAGE_MASK) in arm_lpae_restrict_pgsizes()
897 granule = 1UL << __fls(cfg->pgsize_bitmap & ~PAGE_MASK); in arm_lpae_restrict_pgsizes()
898 else if (cfg->pgsize_bitmap & PAGE_MASK) in arm_lpae_restrict_pgsizes()
899 granule = 1UL << __ffs(cfg->pgsize_bitmap & PAGE_MASK); in arm_lpae_restrict_pgsizes()
913 if (cfg->oas > 48) in arm_lpae_restrict_pgsizes()
920 cfg->pgsize_bitmap &= page_sizes; in arm_lpae_restrict_pgsizes()
921 cfg->ias = min(cfg->ias, max_addr_bits); in arm_lpae_restrict_pgsizes()
922 cfg->oas = min(cfg->oas, max_addr_bits); in arm_lpae_restrict_pgsizes()
926 arm_lpae_alloc_pgtable(struct io_pgtable_cfg *cfg) in arm_lpae_alloc_pgtable() argument
931 arm_lpae_restrict_pgsizes(cfg); in arm_lpae_alloc_pgtable()
933 if (!(cfg->pgsize_bitmap & (SZ_4K | SZ_16K | SZ_64K))) in arm_lpae_alloc_pgtable()
936 if (cfg->ias > ARM_LPAE_MAX_ADDR_BITS) in arm_lpae_alloc_pgtable()
939 if (cfg->oas > ARM_LPAE_MAX_ADDR_BITS) in arm_lpae_alloc_pgtable()
946 pg_shift = __ffs(cfg->pgsize_bitmap); in arm_lpae_alloc_pgtable()
949 va_bits = cfg->ias - pg_shift; in arm_lpae_alloc_pgtable()
968 arm_64_lpae_alloc_pgtable_s1(struct io_pgtable_cfg *cfg, void *cookie) in arm_64_lpae_alloc_pgtable_s1() argument
972 typeof(&cfg->arm_lpae_s1_cfg.tcr) tcr = &cfg->arm_lpae_s1_cfg.tcr; in arm_64_lpae_alloc_pgtable_s1()
975 if (cfg->quirks & ~(IO_PGTABLE_QUIRK_ARM_NS | in arm_64_lpae_alloc_pgtable_s1()
982 data = arm_lpae_alloc_pgtable(cfg); in arm_64_lpae_alloc_pgtable_s1()
987 if (cfg->coherent_walk) { in arm_64_lpae_alloc_pgtable_s1()
991 if (cfg->quirks & IO_PGTABLE_QUIRK_ARM_OUTER_WBWA) in arm_64_lpae_alloc_pgtable_s1()
996 if (!(cfg->quirks & IO_PGTABLE_QUIRK_ARM_OUTER_WBWA)) in arm_64_lpae_alloc_pgtable_s1()
1002 tg1 = cfg->quirks & IO_PGTABLE_QUIRK_ARM_TTBR1; in arm_64_lpae_alloc_pgtable_s1()
1015 switch (cfg->oas) { in arm_64_lpae_alloc_pgtable_s1()
1041 tcr->tsz = 64ULL - cfg->ias; in arm_64_lpae_alloc_pgtable_s1()
1053 cfg->arm_lpae_s1_cfg.mair = reg; in arm_64_lpae_alloc_pgtable_s1()
1057 GFP_KERNEL, cfg, cookie); in arm_64_lpae_alloc_pgtable_s1()
1065 cfg->arm_lpae_s1_cfg.ttbr = virt_to_phys(data->pgd); in arm_64_lpae_alloc_pgtable_s1()
1074 arm_64_lpae_alloc_pgtable_s2(struct io_pgtable_cfg *cfg, void *cookie) in arm_64_lpae_alloc_pgtable_s2() argument
1078 typeof(&cfg->arm_lpae_s2_cfg.vtcr) vtcr = &cfg->arm_lpae_s2_cfg.vtcr; in arm_64_lpae_alloc_pgtable_s2()
1080 if (cfg->quirks & ~(IO_PGTABLE_QUIRK_ARM_S2FWB | in arm_64_lpae_alloc_pgtable_s2()
1084 data = arm_lpae_alloc_pgtable(cfg); in arm_64_lpae_alloc_pgtable_s2()
1088 if (arm_lpae_concat_mandatory(cfg, data)) { in arm_64_lpae_alloc_pgtable_s2()
1097 if (cfg->coherent_walk) { in arm_64_lpae_alloc_pgtable_s2()
1122 switch (cfg->oas) { in arm_64_lpae_alloc_pgtable_s2()
1148 vtcr->tsz = 64ULL - cfg->ias; in arm_64_lpae_alloc_pgtable_s2()
1153 GFP_KERNEL, cfg, cookie); in arm_64_lpae_alloc_pgtable_s2()
1161 cfg->arm_lpae_s2_cfg.vttbr = virt_to_phys(data->pgd); in arm_64_lpae_alloc_pgtable_s2()
1170 arm_32_lpae_alloc_pgtable_s1(struct io_pgtable_cfg *cfg, void *cookie) in arm_32_lpae_alloc_pgtable_s1() argument
1172 if (cfg->ias > 32 || cfg->oas > 40) in arm_32_lpae_alloc_pgtable_s1()
1175 cfg->pgsize_bitmap &= (SZ_4K | SZ_2M | SZ_1G); in arm_32_lpae_alloc_pgtable_s1()
1176 return arm_64_lpae_alloc_pgtable_s1(cfg, cookie); in arm_32_lpae_alloc_pgtable_s1()
1180 arm_32_lpae_alloc_pgtable_s2(struct io_pgtable_cfg *cfg, void *cookie) in arm_32_lpae_alloc_pgtable_s2() argument
1182 if (cfg->ias > 40 || cfg->oas > 40) in arm_32_lpae_alloc_pgtable_s2()
1185 cfg->pgsize_bitmap &= (SZ_4K | SZ_2M | SZ_1G); in arm_32_lpae_alloc_pgtable_s2()
1186 return arm_64_lpae_alloc_pgtable_s2(cfg, cookie); in arm_32_lpae_alloc_pgtable_s2()
1190 arm_mali_lpae_alloc_pgtable(struct io_pgtable_cfg *cfg, void *cookie) in arm_mali_lpae_alloc_pgtable() argument
1195 if (cfg->quirks) in arm_mali_lpae_alloc_pgtable()
1198 if (cfg->ias > 48 || cfg->oas > 40) in arm_mali_lpae_alloc_pgtable()
1201 cfg->pgsize_bitmap &= (SZ_4K | SZ_2M | SZ_1G); in arm_mali_lpae_alloc_pgtable()
1203 data = arm_lpae_alloc_pgtable(cfg); in arm_mali_lpae_alloc_pgtable()
1219 cfg->arm_mali_lpae_cfg.memattr = in arm_mali_lpae_alloc_pgtable()
1228 cfg, cookie); in arm_mali_lpae_alloc_pgtable()
1235 cfg->arm_mali_lpae_cfg.transtab = virt_to_phys(data->pgd) | in arm_mali_lpae_alloc_pgtable()
1238 if (cfg->coherent_walk) in arm_mali_lpae_alloc_pgtable()
1239 cfg->arm_mali_lpae_cfg.transtab |= ARM_MALI_LPAE_TTBR_SHARE_OUTER; in arm_mali_lpae_alloc_pgtable()
1310 struct io_pgtable_cfg *cfg = &data->iop.cfg; in arm_lpae_dump_ops() local
1312 pr_err("cfg: pgsize_bitmap 0x%lx, ias %u-bit\n", in arm_lpae_dump_ops()
1313 cfg->pgsize_bitmap, cfg->ias); in arm_lpae_dump_ops()
1325 static int __init arm_lpae_run_tests(struct io_pgtable_cfg *cfg) in arm_lpae_run_tests() argument
1338 cfg_cookie = cfg; in arm_lpae_run_tests()
1339 ops = alloc_io_pgtable_ops(fmts[i], cfg, cfg); in arm_lpae_run_tests()
1362 for_each_set_bit(j, &cfg->pgsize_bitmap, BITS_PER_LONG) { in arm_lpae_run_tests()
1385 for_each_set_bit(j, &cfg->pgsize_bitmap, BITS_PER_LONG) { in arm_lpae_run_tests()
1410 size = 1UL << __fls(cfg->pgsize_bitmap); in arm_lpae_run_tests()
1411 iova = (1UL << cfg->ias) - size; in arm_lpae_run_tests()
1442 struct io_pgtable_cfg cfg = { in arm_lpae_do_selftests() local
1452 cfg.iommu_dev = &dev->dev; in arm_lpae_do_selftests()
1458 cfg.pgsize_bitmap = pgsize[i]; in arm_lpae_do_selftests()
1459 cfg.ias = address_size[k]; in arm_lpae_do_selftests()
1460 cfg.oas = address_size[j]; in arm_lpae_do_selftests()
1462 pgsize[i], cfg.ias, cfg.oas); in arm_lpae_do_selftests()
1463 if (arm_lpae_run_tests(&cfg)) in arm_lpae_do_selftests()