Lines Matching refs:smmu_domain

245 static void arm_smmu_tlb_sync_context(struct arm_smmu_domain *smmu_domain)  in arm_smmu_tlb_sync_context()  argument
247 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_tlb_sync_context()
250 spin_lock_irqsave(&smmu_domain->cb_lock, flags); in arm_smmu_tlb_sync_context()
251 __arm_smmu_tlb_sync(smmu, ARM_SMMU_CB(smmu, smmu_domain->cfg.cbndx), in arm_smmu_tlb_sync_context()
253 spin_unlock_irqrestore(&smmu_domain->cb_lock, flags); in arm_smmu_tlb_sync_context()
258 struct arm_smmu_domain *smmu_domain = cookie; in arm_smmu_tlb_inv_context_s1() local
264 arm_smmu_cb_write(smmu_domain->smmu, smmu_domain->cfg.cbndx, in arm_smmu_tlb_inv_context_s1()
265 ARM_SMMU_CB_S1_TLBIASID, smmu_domain->cfg.asid); in arm_smmu_tlb_inv_context_s1()
266 arm_smmu_tlb_sync_context(smmu_domain); in arm_smmu_tlb_inv_context_s1()
271 struct arm_smmu_domain *smmu_domain = cookie; in arm_smmu_tlb_inv_context_s2() local
272 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_tlb_inv_context_s2()
276 arm_smmu_gr0_write(smmu, ARM_SMMU_GR0_TLBIVMID, smmu_domain->cfg.vmid); in arm_smmu_tlb_inv_context_s2()
283 struct arm_smmu_domain *smmu_domain = cookie; in arm_smmu_tlb_inv_range_s1() local
284 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_tlb_inv_range_s1()
285 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_tlb_inv_range_s1()
311 struct arm_smmu_domain *smmu_domain = cookie; in arm_smmu_tlb_inv_range_s2() local
312 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_tlb_inv_range_s2()
313 int idx = smmu_domain->cfg.cbndx; in arm_smmu_tlb_inv_range_s2()
320 if (smmu_domain->cfg.fmt == ARM_SMMU_CTX_FMT_AARCH64) in arm_smmu_tlb_inv_range_s2()
376 struct arm_smmu_domain *smmu_domain = cookie; in arm_smmu_tlb_add_page_s2_v1() local
377 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_tlb_add_page_s2_v1()
382 arm_smmu_gr0_write(smmu, ARM_SMMU_GR0_TLBIVMID, smmu_domain->cfg.vmid); in arm_smmu_tlb_add_page_s2_v1()
408 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_context_fault() local
409 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_context_fault()
410 int idx = smmu_domain->cfg.cbndx; in arm_smmu_context_fault()
461 static void arm_smmu_init_context_bank(struct arm_smmu_domain *smmu_domain, in arm_smmu_init_context_bank() argument
464 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_init_context_bank()
465 struct arm_smmu_cb *cb = &smmu_domain->smmu->cbs[cfg->cbndx]; in arm_smmu_init_context_bank()
604 static int arm_smmu_alloc_context_bank(struct arm_smmu_domain *smmu_domain, in arm_smmu_alloc_context_bank() argument
609 return smmu->impl->alloc_context_bank(smmu_domain, smmu, dev, start); in arm_smmu_alloc_context_bank()
623 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_init_domain_context() local
624 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_init_domain_context()
627 mutex_lock(&smmu_domain->init_mutex); in arm_smmu_init_domain_context()
628 if (smmu_domain->smmu) in arm_smmu_init_domain_context()
632 smmu_domain->stage = ARM_SMMU_DOMAIN_BYPASS; in arm_smmu_init_domain_context()
633 smmu_domain->smmu = smmu; in arm_smmu_init_domain_context()
656 smmu_domain->stage = ARM_SMMU_DOMAIN_S2; in arm_smmu_init_domain_context()
658 smmu_domain->stage = ARM_SMMU_DOMAIN_S1; in arm_smmu_init_domain_context()
673 (smmu_domain->stage == ARM_SMMU_DOMAIN_S1)) in arm_smmu_init_domain_context()
686 switch (smmu_domain->stage) { in arm_smmu_init_domain_context()
703 smmu_domain->flush_ops = &arm_smmu_s1_tlb_ops; in arm_smmu_init_domain_context()
723 smmu_domain->flush_ops = &arm_smmu_s2_tlb_ops_v2; in arm_smmu_init_domain_context()
725 smmu_domain->flush_ops = &arm_smmu_s2_tlb_ops_v1; in arm_smmu_init_domain_context()
732 ret = arm_smmu_alloc_context_bank(smmu_domain, smmu, dev, start); in arm_smmu_init_domain_context()
737 smmu_domain->smmu = smmu; in arm_smmu_init_domain_context()
747 if (smmu_domain->stage == ARM_SMMU_DOMAIN_S2) in arm_smmu_init_domain_context()
757 .tlb = smmu_domain->flush_ops, in arm_smmu_init_domain_context()
762 ret = smmu->impl->init_context(smmu_domain, &pgtbl_cfg, dev); in arm_smmu_init_domain_context()
767 if (smmu_domain->non_strict) in arm_smmu_init_domain_context()
770 pgtbl_ops = alloc_io_pgtable_ops(fmt, &pgtbl_cfg, smmu_domain); in arm_smmu_init_domain_context()
789 arm_smmu_init_context_bank(smmu_domain, &pgtbl_cfg); in arm_smmu_init_domain_context()
811 mutex_unlock(&smmu_domain->init_mutex); in arm_smmu_init_domain_context()
814 smmu_domain->pgtbl_ops = pgtbl_ops; in arm_smmu_init_domain_context()
819 smmu_domain->smmu = NULL; in arm_smmu_init_domain_context()
821 mutex_unlock(&smmu_domain->init_mutex); in arm_smmu_init_domain_context()
827 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_destroy_domain_context() local
828 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_destroy_domain_context()
829 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_destroy_domain_context()
851 free_io_pgtable_ops(smmu_domain->pgtbl_ops); in arm_smmu_destroy_domain_context()
859 struct arm_smmu_domain *smmu_domain; in arm_smmu_domain_alloc() local
870 smmu_domain = kzalloc(sizeof(*smmu_domain), GFP_KERNEL); in arm_smmu_domain_alloc()
871 if (!smmu_domain) in arm_smmu_domain_alloc()
875 iommu_get_dma_cookie(&smmu_domain->domain))) { in arm_smmu_domain_alloc()
876 kfree(smmu_domain); in arm_smmu_domain_alloc()
880 mutex_init(&smmu_domain->init_mutex); in arm_smmu_domain_alloc()
881 spin_lock_init(&smmu_domain->cb_lock); in arm_smmu_domain_alloc()
883 return &smmu_domain->domain; in arm_smmu_domain_alloc()
888 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_domain_free() local
896 kfree(smmu_domain); in arm_smmu_domain_free()
1102 static int arm_smmu_domain_add_master(struct arm_smmu_domain *smmu_domain, in arm_smmu_domain_add_master() argument
1106 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_domain_add_master()
1108 u8 cbndx = smmu_domain->cfg.cbndx; in arm_smmu_domain_add_master()
1112 if (smmu_domain->stage == ARM_SMMU_DOMAIN_BYPASS) in arm_smmu_domain_add_master()
1135 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_attach_dev() local
1172 if (smmu_domain->smmu != smmu) { in arm_smmu_attach_dev()
1175 dev_name(smmu_domain->smmu->dev), dev_name(smmu->dev)); in arm_smmu_attach_dev()
1181 ret = arm_smmu_domain_add_master(smmu_domain, cfg, fwspec); in arm_smmu_attach_dev()
1240 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_flush_iotlb_all() local
1241 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_flush_iotlb_all()
1243 if (smmu_domain->flush_ops) { in arm_smmu_flush_iotlb_all()
1245 smmu_domain->flush_ops->tlb_flush_all(smmu_domain); in arm_smmu_flush_iotlb_all()
1253 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_iotlb_sync() local
1254 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_iotlb_sync()
1261 smmu_domain->stage == ARM_SMMU_DOMAIN_S1) in arm_smmu_iotlb_sync()
1262 arm_smmu_tlb_sync_context(smmu_domain); in arm_smmu_iotlb_sync()
1271 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_iova_to_phys_hard() local
1272 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_iova_to_phys_hard()
1273 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_iova_to_phys_hard()
1274 struct io_pgtable_ops *ops= smmu_domain->pgtbl_ops; in arm_smmu_iova_to_phys_hard()
1287 spin_lock_irqsave(&smmu_domain->cb_lock, flags); in arm_smmu_iova_to_phys_hard()
1297 spin_unlock_irqrestore(&smmu_domain->cb_lock, flags); in arm_smmu_iova_to_phys_hard()
1306 spin_unlock_irqrestore(&smmu_domain->cb_lock, flags); in arm_smmu_iova_to_phys_hard()
1323 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_iova_to_phys() local
1324 struct io_pgtable_ops *ops = smmu_domain->pgtbl_ops; in arm_smmu_iova_to_phys()
1332 if (smmu_domain->smmu->features & ARM_SMMU_FEAT_TRANS_OPS && in arm_smmu_iova_to_phys()
1333 smmu_domain->stage == ARM_SMMU_DOMAIN_S1) in arm_smmu_iova_to_phys()
1501 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_domain_get_attr() local
1507 *(int *)data = (smmu_domain->stage == ARM_SMMU_DOMAIN_NESTED); in arm_smmu_domain_get_attr()
1516 *(int *)data = smmu_domain->non_strict; in arm_smmu_domain_get_attr()
1531 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_domain_set_attr() local
1533 mutex_lock(&smmu_domain->init_mutex); in arm_smmu_domain_set_attr()
1539 if (smmu_domain->smmu) { in arm_smmu_domain_set_attr()
1545 smmu_domain->stage = ARM_SMMU_DOMAIN_NESTED; in arm_smmu_domain_set_attr()
1547 smmu_domain->stage = ARM_SMMU_DOMAIN_S1; in arm_smmu_domain_set_attr()
1556 smmu_domain->non_strict = *(int *)data; in arm_smmu_domain_set_attr()
1566 mutex_unlock(&smmu_domain->init_mutex); in arm_smmu_domain_set_attr()