• Home
  • Raw
  • Download

Lines Matching refs:smmu_domain

226 static void arm_smmu_tlb_sync_context(struct arm_smmu_domain *smmu_domain)  in arm_smmu_tlb_sync_context()  argument
228 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_tlb_sync_context()
231 spin_lock_irqsave(&smmu_domain->cb_lock, flags); in arm_smmu_tlb_sync_context()
232 __arm_smmu_tlb_sync(smmu, ARM_SMMU_CB(smmu, smmu_domain->cfg.cbndx), in arm_smmu_tlb_sync_context()
234 spin_unlock_irqrestore(&smmu_domain->cb_lock, flags); in arm_smmu_tlb_sync_context()
239 struct arm_smmu_domain *smmu_domain = cookie; in arm_smmu_tlb_inv_context_s1() local
245 arm_smmu_cb_write(smmu_domain->smmu, smmu_domain->cfg.cbndx, in arm_smmu_tlb_inv_context_s1()
246 ARM_SMMU_CB_S1_TLBIASID, smmu_domain->cfg.asid); in arm_smmu_tlb_inv_context_s1()
247 arm_smmu_tlb_sync_context(smmu_domain); in arm_smmu_tlb_inv_context_s1()
252 struct arm_smmu_domain *smmu_domain = cookie; in arm_smmu_tlb_inv_context_s2() local
253 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_tlb_inv_context_s2()
257 arm_smmu_gr0_write(smmu, ARM_SMMU_GR0_TLBIVMID, smmu_domain->cfg.vmid); in arm_smmu_tlb_inv_context_s2()
264 struct arm_smmu_domain *smmu_domain = cookie; in arm_smmu_tlb_inv_range_s1() local
265 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_tlb_inv_range_s1()
266 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_tlb_inv_range_s1()
292 struct arm_smmu_domain *smmu_domain = cookie; in arm_smmu_tlb_inv_range_s2() local
293 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_tlb_inv_range_s2()
294 int idx = smmu_domain->cfg.cbndx; in arm_smmu_tlb_inv_range_s2()
301 if (smmu_domain->cfg.fmt == ARM_SMMU_CTX_FMT_AARCH64) in arm_smmu_tlb_inv_range_s2()
312 struct arm_smmu_domain *smmu_domain = cookie; in arm_smmu_tlb_inv_walk_s1() local
313 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_tlb_inv_walk_s1()
364 struct arm_smmu_domain *smmu_domain = cookie; in arm_smmu_tlb_add_page_s2_v1() local
365 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_tlb_add_page_s2_v1()
370 arm_smmu_gr0_write(smmu, ARM_SMMU_GR0_TLBIVMID, smmu_domain->cfg.vmid); in arm_smmu_tlb_add_page_s2_v1()
396 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_context_fault() local
397 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_context_fault()
398 int idx = smmu_domain->cfg.cbndx; in arm_smmu_context_fault()
454 static void arm_smmu_init_context_bank(struct arm_smmu_domain *smmu_domain, in arm_smmu_init_context_bank() argument
457 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_init_context_bank()
458 struct arm_smmu_cb *cb = &smmu_domain->smmu->cbs[cfg->cbndx]; in arm_smmu_init_context_bank()
600 static int arm_smmu_alloc_context_bank(struct arm_smmu_domain *smmu_domain, in arm_smmu_alloc_context_bank() argument
605 return smmu->impl->alloc_context_bank(smmu_domain, smmu, dev, start); in arm_smmu_alloc_context_bank()
619 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_init_domain_context() local
620 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_init_domain_context()
623 mutex_lock(&smmu_domain->init_mutex); in arm_smmu_init_domain_context()
624 if (smmu_domain->smmu) in arm_smmu_init_domain_context()
628 smmu_domain->stage = ARM_SMMU_DOMAIN_BYPASS; in arm_smmu_init_domain_context()
629 smmu_domain->smmu = smmu; in arm_smmu_init_domain_context()
652 smmu_domain->stage = ARM_SMMU_DOMAIN_S2; in arm_smmu_init_domain_context()
654 smmu_domain->stage = ARM_SMMU_DOMAIN_S1; in arm_smmu_init_domain_context()
669 (smmu_domain->stage == ARM_SMMU_DOMAIN_S1)) in arm_smmu_init_domain_context()
682 switch (smmu_domain->stage) { in arm_smmu_init_domain_context()
699 smmu_domain->flush_ops = &arm_smmu_s1_tlb_ops; in arm_smmu_init_domain_context()
719 smmu_domain->flush_ops = &arm_smmu_s2_tlb_ops_v2; in arm_smmu_init_domain_context()
721 smmu_domain->flush_ops = &arm_smmu_s2_tlb_ops_v1; in arm_smmu_init_domain_context()
728 ret = arm_smmu_alloc_context_bank(smmu_domain, smmu, dev, start); in arm_smmu_init_domain_context()
733 smmu_domain->smmu = smmu; in arm_smmu_init_domain_context()
743 if (smmu_domain->stage == ARM_SMMU_DOMAIN_S2) in arm_smmu_init_domain_context()
753 .tlb = smmu_domain->flush_ops, in arm_smmu_init_domain_context()
758 ret = smmu->impl->init_context(smmu_domain, &pgtbl_cfg, dev); in arm_smmu_init_domain_context()
763 if (smmu_domain->pgtbl_quirks) in arm_smmu_init_domain_context()
764 pgtbl_cfg.quirks |= smmu_domain->pgtbl_quirks; in arm_smmu_init_domain_context()
766 pgtbl_ops = alloc_io_pgtable_ops(fmt, &pgtbl_cfg, smmu_domain); in arm_smmu_init_domain_context()
785 arm_smmu_init_context_bank(smmu_domain, &pgtbl_cfg); in arm_smmu_init_domain_context()
807 mutex_unlock(&smmu_domain->init_mutex); in arm_smmu_init_domain_context()
810 smmu_domain->pgtbl_ops = pgtbl_ops; in arm_smmu_init_domain_context()
815 smmu_domain->smmu = NULL; in arm_smmu_init_domain_context()
817 mutex_unlock(&smmu_domain->init_mutex); in arm_smmu_init_domain_context()
823 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_destroy_domain_context() local
824 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_destroy_domain_context()
825 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_destroy_domain_context()
847 free_io_pgtable_ops(smmu_domain->pgtbl_ops); in arm_smmu_destroy_domain_context()
855 struct arm_smmu_domain *smmu_domain; in arm_smmu_domain_alloc() local
866 smmu_domain = kzalloc(sizeof(*smmu_domain), GFP_KERNEL); in arm_smmu_domain_alloc()
867 if (!smmu_domain) in arm_smmu_domain_alloc()
870 mutex_init(&smmu_domain->init_mutex); in arm_smmu_domain_alloc()
871 spin_lock_init(&smmu_domain->cb_lock); in arm_smmu_domain_alloc()
873 return &smmu_domain->domain; in arm_smmu_domain_alloc()
878 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_domain_free() local
885 kfree(smmu_domain); in arm_smmu_domain_free()
1084 static int arm_smmu_domain_add_master(struct arm_smmu_domain *smmu_domain, in arm_smmu_domain_add_master() argument
1088 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_domain_add_master()
1090 u8 cbndx = smmu_domain->cfg.cbndx; in arm_smmu_domain_add_master()
1094 if (smmu_domain->stage == ARM_SMMU_DOMAIN_BYPASS) in arm_smmu_domain_add_master()
1113 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_attach_dev() local
1150 if (smmu_domain->smmu != smmu) { in arm_smmu_attach_dev()
1156 ret = arm_smmu_domain_add_master(smmu_domain, cfg, fwspec); in arm_smmu_attach_dev()
1215 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_flush_iotlb_all() local
1216 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_flush_iotlb_all()
1218 if (smmu_domain->flush_ops) { in arm_smmu_flush_iotlb_all()
1220 smmu_domain->flush_ops->tlb_flush_all(smmu_domain); in arm_smmu_flush_iotlb_all()
1228 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_iotlb_sync() local
1229 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_iotlb_sync()
1236 smmu_domain->stage == ARM_SMMU_DOMAIN_S1) in arm_smmu_iotlb_sync()
1237 arm_smmu_tlb_sync_context(smmu_domain); in arm_smmu_iotlb_sync()
1246 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_iova_to_phys_hard() local
1247 struct arm_smmu_device *smmu = smmu_domain->smmu; in arm_smmu_iova_to_phys_hard()
1248 struct arm_smmu_cfg *cfg = &smmu_domain->cfg; in arm_smmu_iova_to_phys_hard()
1249 struct io_pgtable_ops *ops= smmu_domain->pgtbl_ops; in arm_smmu_iova_to_phys_hard()
1262 spin_lock_irqsave(&smmu_domain->cb_lock, flags); in arm_smmu_iova_to_phys_hard()
1272 spin_unlock_irqrestore(&smmu_domain->cb_lock, flags); in arm_smmu_iova_to_phys_hard()
1281 spin_unlock_irqrestore(&smmu_domain->cb_lock, flags); in arm_smmu_iova_to_phys_hard()
1298 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_iova_to_phys() local
1299 struct io_pgtable_ops *ops = smmu_domain->pgtbl_ops; in arm_smmu_iova_to_phys()
1304 if (smmu_domain->smmu->features & ARM_SMMU_FEAT_TRANS_OPS && in arm_smmu_iova_to_phys()
1305 smmu_domain->stage == ARM_SMMU_DOMAIN_S1) in arm_smmu_iova_to_phys()
1488 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_enable_nesting() local
1491 mutex_lock(&smmu_domain->init_mutex); in arm_smmu_enable_nesting()
1492 if (smmu_domain->smmu) in arm_smmu_enable_nesting()
1495 smmu_domain->stage = ARM_SMMU_DOMAIN_NESTED; in arm_smmu_enable_nesting()
1496 mutex_unlock(&smmu_domain->init_mutex); in arm_smmu_enable_nesting()
1504 struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain); in arm_smmu_set_pgtable_quirks() local
1507 mutex_lock(&smmu_domain->init_mutex); in arm_smmu_set_pgtable_quirks()
1508 if (smmu_domain->smmu) in arm_smmu_set_pgtable_quirks()
1511 smmu_domain->pgtbl_quirks = quirks; in arm_smmu_set_pgtable_quirks()
1512 mutex_unlock(&smmu_domain->init_mutex); in arm_smmu_set_pgtable_quirks()