Lines Matching refs:cfg

251 	__arm_smmu_tlb_sync(smmu, ARM_SMMU_CB(smmu, smmu_domain->cfg.cbndx),  in arm_smmu_tlb_sync_context()
264 arm_smmu_cb_write(smmu_domain->smmu, smmu_domain->cfg.cbndx, in arm_smmu_tlb_inv_context_s1()
265 ARM_SMMU_CB_S1_TLBIASID, smmu_domain->cfg.asid); in arm_smmu_tlb_inv_context_s1()
276 arm_smmu_gr0_write(smmu, ARM_SMMU_GR0_TLBIVMID, smmu_domain->cfg.vmid); in arm_smmu_tlb_inv_context_s2()
285 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_tlb_inv_range_s1() local
286 int idx = cfg->cbndx; in arm_smmu_tlb_inv_range_s1()
291 if (cfg->fmt != ARM_SMMU_CTX_FMT_AARCH64) { in arm_smmu_tlb_inv_range_s1()
293 iova |= cfg->asid; in arm_smmu_tlb_inv_range_s1()
300 iova |= (u64)cfg->asid << 48; in arm_smmu_tlb_inv_range_s1()
313 int idx = smmu_domain->cfg.cbndx; in arm_smmu_tlb_inv_range_s2()
320 if (smmu_domain->cfg.fmt == ARM_SMMU_CTX_FMT_AARCH64) in arm_smmu_tlb_inv_range_s2()
382 arm_smmu_gr0_write(smmu, ARM_SMMU_GR0_TLBIVMID, smmu_domain->cfg.vmid); in arm_smmu_tlb_add_page_s2_v1()
410 int idx = smmu_domain->cfg.cbndx; in arm_smmu_context_fault()
464 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_init_context_bank() local
465 struct arm_smmu_cb *cb = &smmu_domain->smmu->cbs[cfg->cbndx]; in arm_smmu_init_context_bank()
466 bool stage1 = cfg->cbar != CBAR_TYPE_S2_TRANS; in arm_smmu_init_context_bank()
468 cb->cfg = cfg; in arm_smmu_init_context_bank()
472 if (cfg->fmt == ARM_SMMU_CTX_FMT_AARCH32_S) { in arm_smmu_init_context_bank()
477 if (cfg->fmt == ARM_SMMU_CTX_FMT_AARCH64) in arm_smmu_init_context_bank()
488 if (cfg->fmt == ARM_SMMU_CTX_FMT_AARCH32_S) { in arm_smmu_init_context_bank()
493 cfg->asid); in arm_smmu_init_context_bank()
495 cfg->asid); in arm_smmu_init_context_bank()
508 if (cfg->fmt == ARM_SMMU_CTX_FMT_AARCH32_S) { in arm_smmu_init_context_bank()
523 struct arm_smmu_cfg *cfg = cb->cfg; in arm_smmu_write_context_bank() local
526 if (!cfg) { in arm_smmu_write_context_bank()
531 stage1 = cfg->cbar != CBAR_TYPE_S2_TRANS; in arm_smmu_write_context_bank()
535 if (cfg->fmt == ARM_SMMU_CTX_FMT_AARCH64) in arm_smmu_write_context_bank()
541 reg |= FIELD_PREP(ARM_SMMU_CBA2R_VMID16, cfg->vmid); in arm_smmu_write_context_bank()
547 reg = FIELD_PREP(ARM_SMMU_CBAR_TYPE, cfg->cbar); in arm_smmu_write_context_bank()
549 reg |= FIELD_PREP(ARM_SMMU_CBAR_IRPTNDX, cfg->irptndx); in arm_smmu_write_context_bank()
562 reg |= FIELD_PREP(ARM_SMMU_CBAR_VMID, cfg->vmid); in arm_smmu_write_context_bank()
576 if (cfg->fmt == ARM_SMMU_CTX_FMT_AARCH32_S) { in arm_smmu_write_context_bank()
577 arm_smmu_cb_write(smmu, idx, ARM_SMMU_CB_CONTEXTIDR, cfg->asid); in arm_smmu_write_context_bank()
624 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_init_domain_context() local
669 cfg->fmt = ARM_SMMU_CTX_FMT_AARCH32_L; in arm_smmu_init_domain_context()
674 cfg->fmt = ARM_SMMU_CTX_FMT_AARCH32_S; in arm_smmu_init_domain_context()
675 if ((IS_ENABLED(CONFIG_64BIT) || cfg->fmt == ARM_SMMU_CTX_FMT_NONE) && in arm_smmu_init_domain_context()
679 cfg->fmt = ARM_SMMU_CTX_FMT_AARCH64; in arm_smmu_init_domain_context()
681 if (cfg->fmt == ARM_SMMU_CTX_FMT_NONE) { in arm_smmu_init_domain_context()
688 cfg->cbar = CBAR_TYPE_S1_TRANS_S2_BYPASS; in arm_smmu_init_domain_context()
692 if (cfg->fmt == ARM_SMMU_CTX_FMT_AARCH64) { in arm_smmu_init_domain_context()
694 } else if (cfg->fmt == ARM_SMMU_CTX_FMT_AARCH32_L) { in arm_smmu_init_domain_context()
711 cfg->cbar = CBAR_TYPE_S2_TRANS; in arm_smmu_init_domain_context()
715 if (cfg->fmt == ARM_SMMU_CTX_FMT_AARCH64) { in arm_smmu_init_domain_context()
739 cfg->cbndx = ret; in arm_smmu_init_domain_context()
741 cfg->irptndx = atomic_inc_return(&smmu->irptndx); in arm_smmu_init_domain_context()
742 cfg->irptndx %= smmu->num_context_irqs; in arm_smmu_init_domain_context()
744 cfg->irptndx = cfg->cbndx; in arm_smmu_init_domain_context()
748 cfg->vmid = cfg->cbndx + 1; in arm_smmu_init_domain_context()
750 cfg->asid = cfg->cbndx; in arm_smmu_init_domain_context()
790 arm_smmu_write_context_bank(smmu, cfg->cbndx); in arm_smmu_init_domain_context()
796 irq = smmu->irqs[smmu->num_global_irqs + cfg->irptndx]; in arm_smmu_init_domain_context()
807 cfg->irptndx, irq); in arm_smmu_init_domain_context()
808 cfg->irptndx = ARM_SMMU_INVALID_IRPTNDX; in arm_smmu_init_domain_context()
818 __arm_smmu_free_bitmap(smmu->context_map, cfg->cbndx); in arm_smmu_init_domain_context()
829 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_destroy_domain_context() local
843 smmu->cbs[cfg->cbndx].cfg = NULL; in arm_smmu_destroy_domain_context()
844 arm_smmu_write_context_bank(smmu, cfg->cbndx); in arm_smmu_destroy_domain_context()
846 if (cfg->irptndx != ARM_SMMU_INVALID_IRPTNDX) { in arm_smmu_destroy_domain_context()
847 irq = smmu->irqs[smmu->num_global_irqs + cfg->irptndx]; in arm_smmu_destroy_domain_context()
852 __arm_smmu_free_bitmap(smmu->context_map, cfg->cbndx); in arm_smmu_destroy_domain_context()
1041 struct arm_smmu_master_cfg *cfg = dev_iommu_priv_get(dev); in arm_smmu_master_alloc_smes() local
1042 struct arm_smmu_device *smmu = cfg->smmu; in arm_smmu_master_alloc_smes()
1048 for_each_cfg_sme(cfg, fwspec, i, idx) { in arm_smmu_master_alloc_smes()
1068 cfg->smendx[i] = (s16)idx; in arm_smmu_master_alloc_smes()
1072 for_each_cfg_sme(cfg, fwspec, i, idx) in arm_smmu_master_alloc_smes()
1080 arm_smmu_free_sme(smmu, cfg->smendx[i]); in arm_smmu_master_alloc_smes()
1081 cfg->smendx[i] = INVALID_SMENDX; in arm_smmu_master_alloc_smes()
1087 static void arm_smmu_master_free_smes(struct arm_smmu_master_cfg *cfg, in arm_smmu_master_free_smes() argument
1090 struct arm_smmu_device *smmu = cfg->smmu; in arm_smmu_master_free_smes()
1094 for_each_cfg_sme(cfg, fwspec, i, idx) { in arm_smmu_master_free_smes()
1097 cfg->smendx[i] = INVALID_SMENDX; in arm_smmu_master_free_smes()
1103 struct arm_smmu_master_cfg *cfg, in arm_smmu_domain_add_master() argument
1108 u8 cbndx = smmu_domain->cfg.cbndx; in arm_smmu_domain_add_master()
1117 for_each_cfg_sme(cfg, fwspec, i, idx) { in arm_smmu_domain_add_master()
1137 struct arm_smmu_master_cfg *cfg; in arm_smmu_attach_dev() local
1153 cfg = dev_iommu_priv_get(dev); in arm_smmu_attach_dev()
1154 if (!cfg) in arm_smmu_attach_dev()
1157 smmu = cfg->smmu; in arm_smmu_attach_dev()
1181 ret = arm_smmu_domain_add_master(smmu_domain, cfg, fwspec); in arm_smmu_attach_dev()
1273 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_iova_to_phys_hard() local
1280 int ret, idx = cfg->cbndx; in arm_smmu_iova_to_phys_hard()
1289 if (cfg->fmt == ARM_SMMU_CTX_FMT_AARCH64) in arm_smmu_iova_to_phys_hard()
1367 struct arm_smmu_master_cfg *cfg; in arm_smmu_probe_device() local
1406 cfg = kzalloc(offsetof(struct arm_smmu_master_cfg, smendx[i]), in arm_smmu_probe_device()
1408 if (!cfg) in arm_smmu_probe_device()
1411 cfg->smmu = smmu; in arm_smmu_probe_device()
1412 dev_iommu_priv_set(dev, cfg); in arm_smmu_probe_device()
1414 cfg->smendx[i] = INVALID_SMENDX; in arm_smmu_probe_device()
1432 kfree(cfg); in arm_smmu_probe_device()
1441 struct arm_smmu_master_cfg *cfg; in arm_smmu_release_device() local
1448 cfg = dev_iommu_priv_get(dev); in arm_smmu_release_device()
1449 smmu = cfg->smmu; in arm_smmu_release_device()
1455 arm_smmu_master_free_smes(cfg, fwspec); in arm_smmu_release_device()
1460 kfree(cfg); in arm_smmu_release_device()
1466 struct arm_smmu_master_cfg *cfg = dev_iommu_priv_get(dev); in arm_smmu_device_group() local
1468 struct arm_smmu_device *smmu = cfg->smmu; in arm_smmu_device_group()
1472 for_each_cfg_sme(cfg, fwspec, i, idx) { in arm_smmu_device_group()
1492 for_each_cfg_sme(cfg, fwspec, i, idx) in arm_smmu_device_group()
1603 struct arm_smmu_master_cfg *cfg = dev_iommu_priv_get(dev); in arm_smmu_def_domain_type() local
1604 const struct arm_smmu_impl *impl = cfg->smmu->impl; in arm_smmu_def_domain_type()