Lines Matching refs:cfg

51 #define _ARM_V7S_LVL_BITS(lvl, cfg)	((lvl) == 1 ? ((cfg)->ias - 20) : 8)  argument
55 #define ARM_V7S_PTES_PER_LVL(lvl, cfg) (1 << _ARM_V7S_LVL_BITS(lvl, cfg)) argument
56 #define ARM_V7S_TABLE_SIZE(lvl, cfg) \ argument
57 (ARM_V7S_PTES_PER_LVL(lvl, cfg) * sizeof(arm_v7s_iopte))
62 #define _ARM_V7S_IDX_MASK(lvl, cfg) (ARM_V7S_PTES_PER_LVL(lvl, cfg) - 1) argument
63 #define ARM_V7S_LVL_IDX(addr, lvl, cfg) ({ \ argument
65 ((addr) >> ARM_V7S_LVL_SHIFT(_l)) & _ARM_V7S_IDX_MASK(_l, cfg); \
179 static bool arm_v7s_is_mtk_enabled(struct io_pgtable_cfg *cfg) in arm_v7s_is_mtk_enabled() argument
182 (cfg->quirks & IO_PGTABLE_QUIRK_ARM_MTK_EXT); in arm_v7s_is_mtk_enabled()
186 struct io_pgtable_cfg *cfg) in paddr_to_iopte() argument
190 if (!arm_v7s_is_mtk_enabled(cfg)) in paddr_to_iopte()
203 struct io_pgtable_cfg *cfg) in iopte_to_paddr() argument
216 if (!arm_v7s_is_mtk_enabled(cfg)) in iopte_to_paddr()
231 return phys_to_virt(iopte_to_paddr(pte, lvl, &data->iop.cfg)); in iopte_deref()
237 struct io_pgtable_cfg *cfg = &data->iop.cfg; in __arm_v7s_alloc_table() local
238 struct device *dev = cfg->iommu_dev; in __arm_v7s_alloc_table()
241 size_t size = ARM_V7S_TABLE_SIZE(lvl, cfg); in __arm_v7s_alloc_table()
259 if (!cfg->coherent_walk) { in __arm_v7s_alloc_table()
289 struct io_pgtable_cfg *cfg = &data->iop.cfg; in __arm_v7s_free_table() local
290 struct device *dev = cfg->iommu_dev; in __arm_v7s_free_table()
291 size_t size = ARM_V7S_TABLE_SIZE(lvl, cfg); in __arm_v7s_free_table()
293 if (!cfg->coherent_walk) in __arm_v7s_free_table()
303 struct io_pgtable_cfg *cfg) in __arm_v7s_pte_sync() argument
305 if (cfg->coherent_walk) in __arm_v7s_pte_sync()
308 dma_sync_single_for_device(cfg->iommu_dev, __arm_v7s_dma_addr(ptep), in __arm_v7s_pte_sync()
312 int num_entries, struct io_pgtable_cfg *cfg) in __arm_v7s_set_pte() argument
319 __arm_v7s_pte_sync(ptep, num_entries, cfg); in __arm_v7s_set_pte()
323 struct io_pgtable_cfg *cfg) in arm_v7s_prot_to_pte() argument
325 bool ap = !(cfg->quirks & IO_PGTABLE_QUIRK_NO_PERMS); in arm_v7s_prot_to_pte()
347 if (lvl == 1 && (cfg->quirks & IO_PGTABLE_QUIRK_ARM_NS)) in arm_v7s_prot_to_pte()
422 struct io_pgtable_cfg *cfg = &data->iop.cfg; in arm_v7s_init_pte() local
435 tblp = ptep - ARM_V7S_LVL_IDX(iova, lvl, cfg); in arm_v7s_init_pte()
445 pte = arm_v7s_prot_to_pte(prot, lvl, cfg); in arm_v7s_init_pte()
449 pte |= paddr_to_iopte(paddr, lvl, cfg); in arm_v7s_init_pte()
451 __arm_v7s_set_pte(ptep, pte, num_entries, cfg); in arm_v7s_init_pte()
458 struct io_pgtable_cfg *cfg) in arm_v7s_install_table() argument
463 if (cfg->quirks & IO_PGTABLE_QUIRK_ARM_NS) in arm_v7s_install_table()
474 __arm_v7s_pte_sync(ptep, 1, cfg); in arm_v7s_install_table()
483 struct io_pgtable_cfg *cfg = &data->iop.cfg; in __arm_v7s_map() local
488 ptep += ARM_V7S_LVL_IDX(iova, lvl, cfg); in __arm_v7s_map()
506 pte = arm_v7s_install_table(cptep, ptep, 0, cfg); in __arm_v7s_map()
511 __arm_v7s_pte_sync(ptep, 1, cfg); in __arm_v7s_map()
537 if (WARN_ON(iova >= (1ULL << data->iop.cfg.ias) || in arm_v7s_map_pages()
538 paddr >= (1ULL << data->iop.cfg.oas))) in arm_v7s_map_pages()
572 for (i = 0; i < ARM_V7S_PTES_PER_LVL(1, &data->iop.cfg); i++) { in arm_v7s_free_pgtable()
603 __arm_v7s_pte_sync(ptep, ARM_V7S_CONT_PAGES, &iop->cfg); in arm_v7s_split_cont()
616 struct io_pgtable_cfg *cfg = &data->iop.cfg; in arm_v7s_split_blk_unmap() local
624 num_ptes = ARM_V7S_PTES_PER_LVL(2, cfg); in arm_v7s_split_blk_unmap()
626 unmap_idx = ARM_V7S_LVL_IDX(iova, 2, cfg); in arm_v7s_split_blk_unmap()
628 pte = arm_v7s_prot_to_pte(arm_v7s_pte_to_prot(blk_pte, 1), 2, cfg); in arm_v7s_split_blk_unmap()
637 __arm_v7s_set_pte(&tablep[i], pte, num_entries, cfg); in arm_v7s_split_blk_unmap()
640 pte = arm_v7s_install_table(tablep, ptep, blk_pte, cfg); in arm_v7s_split_blk_unmap()
668 idx = ARM_V7S_LVL_IDX(iova, lvl, &iop->cfg); in __arm_v7s_unmap()
698 __arm_v7s_set_pte(ptep, 0, num_entries, &iop->cfg); in __arm_v7s_unmap()
707 } else if (iop->cfg.quirks & IO_PGTABLE_QUIRK_NON_STRICT) { in __arm_v7s_unmap()
741 if (WARN_ON(iova >= (1ULL << data->iop.cfg.ias))) in arm_v7s_unmap_pages()
771 ptep += ARM_V7S_LVL_IDX(iova, ++lvl, &data->iop.cfg); in arm_v7s_iova_to_phys()
782 return iopte_to_paddr(pte, lvl, &data->iop.cfg) | (iova & ~mask); in arm_v7s_iova_to_phys()
785 static struct io_pgtable *arm_v7s_alloc_pgtable(struct io_pgtable_cfg *cfg, in arm_v7s_alloc_pgtable() argument
790 if (cfg->ias > (arm_v7s_is_mtk_enabled(cfg) ? 34 : ARM_V7S_ADDR_BITS)) in arm_v7s_alloc_pgtable()
793 if (cfg->oas > (arm_v7s_is_mtk_enabled(cfg) ? 35 : ARM_V7S_ADDR_BITS)) in arm_v7s_alloc_pgtable()
796 if (cfg->quirks & ~(IO_PGTABLE_QUIRK_ARM_NS | in arm_v7s_alloc_pgtable()
803 if (cfg->quirks & IO_PGTABLE_QUIRK_ARM_MTK_EXT && in arm_v7s_alloc_pgtable()
804 !(cfg->quirks & IO_PGTABLE_QUIRK_NO_PERMS)) in arm_v7s_alloc_pgtable()
813 ARM_V7S_TABLE_SIZE(2, cfg), in arm_v7s_alloc_pgtable()
814 ARM_V7S_TABLE_SIZE(2, cfg), in arm_v7s_alloc_pgtable()
828 data->iop.cfg = *cfg; in arm_v7s_alloc_pgtable()
834 cfg->pgsize_bitmap &= SZ_4K | SZ_64K | SZ_1M | SZ_16M; in arm_v7s_alloc_pgtable()
837 cfg->arm_v7s_cfg.tcr = 0; in arm_v7s_alloc_pgtable()
844 cfg->arm_v7s_cfg.prrr = ARM_V7S_PRRR_TR(1, ARM_V7S_PRRR_TYPE_DEVICE) | in arm_v7s_alloc_pgtable()
849 cfg->arm_v7s_cfg.nmrr = ARM_V7S_NMRR_IR(7, ARM_V7S_RGN_WBWA) | in arm_v7s_alloc_pgtable()
861 cfg->arm_v7s_cfg.ttbr = virt_to_phys(data->pgd) | ARM_V7S_TTBR_S | in arm_v7s_alloc_pgtable()
862 (cfg->coherent_walk ? (ARM_V7S_TTBR_NOS | in arm_v7s_alloc_pgtable()
918 struct io_pgtable_cfg cfg = { in arm_v7s_do_selftests() local
931 cfg_cookie = &cfg; in arm_v7s_do_selftests()
933 ops = alloc_io_pgtable_ops(ARM_V7S, &cfg, &cfg); in arm_v7s_do_selftests()
956 for_each_set_bit(i, &cfg.pgsize_bitmap, BITS_PER_LONG) { in arm_v7s_do_selftests()
978 size = 1UL << __ffs(cfg.pgsize_bitmap); in arm_v7s_do_selftests()
996 for_each_set_bit(i, &cfg.pgsize_bitmap, BITS_PER_LONG) { in arm_v7s_do_selftests()