Home
last modified time | relevance | path

Searched refs:domain (Results 1 – 25 of 536) sorted by relevance

12345678910>>...22

/drivers/iommu/
Dipmmu-vmsa.c267 static u32 ipmmu_ctx_read_root(struct ipmmu_vmsa_domain *domain, in ipmmu_ctx_read_root() argument
270 return ipmmu_read(domain->mmu->root, in ipmmu_ctx_read_root()
271 domain->context_id * IM_CTX_SIZE + reg); in ipmmu_ctx_read_root()
274 static void ipmmu_ctx_write_root(struct ipmmu_vmsa_domain *domain, in ipmmu_ctx_write_root() argument
277 ipmmu_write(domain->mmu->root, in ipmmu_ctx_write_root()
278 domain->context_id * IM_CTX_SIZE + reg, data); in ipmmu_ctx_write_root()
281 static void ipmmu_ctx_write_all(struct ipmmu_vmsa_domain *domain, in ipmmu_ctx_write_all() argument
284 if (domain->mmu != domain->mmu->root) in ipmmu_ctx_write_all()
285 ipmmu_write(domain->mmu, in ipmmu_ctx_write_all()
286 domain->context_id * IM_CTX_SIZE + reg, data); in ipmmu_ctx_write_all()
[all …]
Dintel-iommu.c310 #define for_each_domain_iommu(idx, domain) \ argument
312 if (domain->iommu_refcnt[idx])
340 static void domain_exit(struct dmar_domain *domain);
341 static void domain_remove_dev_info(struct dmar_domain *domain);
346 static int domain_detach_iommu(struct dmar_domain *domain,
349 static int intel_iommu_attach_device(struct iommu_domain *domain,
351 static phys_addr_t intel_iommu_iova_to_phys(struct iommu_domain *domain,
434 return container_of(dom, struct dmar_domain, domain); in to_dmar_domain()
497 struct dmar_domain *domain) in set_iommu_domain() argument
511 domains[did & 0xff] = domain; in set_iommu_domain()
[all …]
Dexynos-iommu.c236 struct iommu_domain *domain; /* domain this device is attached */ member
252 struct iommu_domain domain; /* generic domain data structure */ member
272 struct exynos_iommu_domain *domain; /* domain we belong to */ member
283 return container_of(dom, struct exynos_iommu_domain, domain); in to_exynos_domain()
443 if (data->domain) in exynos_sysmmu_irq()
444 ret = report_iommu_fault(&data->domain->domain, in exynos_sysmmu_irq()
676 if (data->domain) { in exynos_sysmmu_suspend()
694 if (data->domain) { in exynos_sysmmu_resume()
735 struct exynos_iommu_domain *domain; in exynos_iommu_domain_alloc() local
742 domain = kzalloc(sizeof(*domain), GFP_KERNEL); in exynos_iommu_domain_alloc()
[all …]
Damd_iommu.c102 static void update_domain(struct protection_domain *domain);
103 static int protection_domain_init(struct protection_domain *domain);
112 struct protection_domain domain; member
187 return container_of(dom, struct protection_domain, domain); in to_pdomain()
190 static struct dma_ops_domain* to_dma_ops_domain(struct protection_domain *domain) in to_dma_ops_domain() argument
192 BUG_ON(domain->flags != PD_DMA_OPS_MASK); in to_dma_ops_domain()
193 return container_of(domain, struct dma_ops_domain, domain); in to_dma_ops_domain()
474 if (dev_data->domain) in iommu_uninit_device()
1269 static void __domain_flush_pages(struct protection_domain *domain, in __domain_flush_pages() argument
1276 build_inv_iommu_pages(&cmd, address, size, domain->id, pde); in __domain_flush_pages()
[all …]
Diommu.c46 struct iommu_domain *domain; member
222 static int __iommu_attach_device(struct iommu_domain *domain,
224 static int __iommu_attach_group(struct iommu_domain *domain,
226 static void __iommu_detach_group(struct iommu_domain *domain,
643 struct iommu_domain *domain = group->default_domain; in iommu_group_create_direct_mappings() local
649 if (!domain || domain->type != IOMMU_DOMAIN_DMA) in iommu_group_create_direct_mappings()
652 BUG_ON(!domain->pgsize_bitmap); in iommu_group_create_direct_mappings()
654 pg_size = 1UL << __ffs(domain->pgsize_bitmap); in iommu_group_create_direct_mappings()
663 if (domain->ops->apply_resv_region) in iommu_group_create_direct_mappings()
664 domain->ops->apply_resv_region(dev, domain, entry); in iommu_group_create_direct_mappings()
[all …]
Ds390-iommu.c25 struct iommu_domain domain; member
39 return container_of(dom, struct s390_domain, domain); in to_s390_domain()
75 return &s390_domain->domain; in s390_domain_alloc()
78 static void s390_domain_free(struct iommu_domain *domain) in s390_domain_free() argument
80 struct s390_domain *s390_domain = to_s390_domain(domain); in s390_domain_free()
86 static int s390_iommu_attach_device(struct iommu_domain *domain, in s390_iommu_attach_device() argument
89 struct s390_domain *s390_domain = to_s390_domain(domain); in s390_iommu_attach_device()
114 domain->geometry.aperture_start = zdev->start_dma; in s390_iommu_attach_device()
115 domain->geometry.aperture_end = zdev->end_dma; in s390_iommu_attach_device()
116 domain->geometry.force_aperture = true; in s390_iommu_attach_device()
[all …]
Ddma-iommu.c78 int iommu_get_dma_cookie(struct iommu_domain *domain) in iommu_get_dma_cookie() argument
80 if (domain->iova_cookie) in iommu_get_dma_cookie()
83 domain->iova_cookie = cookie_alloc(IOMMU_DMA_IOVA_COOKIE); in iommu_get_dma_cookie()
84 if (!domain->iova_cookie) in iommu_get_dma_cookie()
103 int iommu_get_msi_cookie(struct iommu_domain *domain, dma_addr_t base) in iommu_get_msi_cookie() argument
107 if (domain->type != IOMMU_DOMAIN_UNMANAGED) in iommu_get_msi_cookie()
110 if (domain->iova_cookie) in iommu_get_msi_cookie()
118 domain->iova_cookie = cookie; in iommu_get_msi_cookie()
130 void iommu_put_dma_cookie(struct iommu_domain *domain) in iommu_put_dma_cookie() argument
132 struct iommu_dma_cookie *cookie = domain->iova_cookie; in iommu_put_dma_cookie()
[all …]
Dtegra-gart.c106 static int gart_iommu_attach_dev(struct iommu_domain *domain, in gart_iommu_attach_dev() argument
114 if (gart->active_domain && gart->active_domain != domain) { in gart_iommu_attach_dev()
116 } else if (dev->archdata.iommu != domain) { in gart_iommu_attach_dev()
117 dev->archdata.iommu = domain; in gart_iommu_attach_dev()
118 gart->active_domain = domain; in gart_iommu_attach_dev()
127 static void gart_iommu_detach_dev(struct iommu_domain *domain, in gart_iommu_detach_dev() argument
134 if (dev->archdata.iommu == domain) { in gart_iommu_detach_dev()
146 struct iommu_domain *domain; in gart_iommu_domain_alloc() local
151 domain = kzalloc(sizeof(*domain), GFP_KERNEL); in gart_iommu_domain_alloc()
152 if (domain) { in gart_iommu_domain_alloc()
[all …]
/drivers/soc/imx/
Dgpcv2.c132 struct imx_pgc_domain *domain = container_of(genpd, in imx_gpc_pu_pgc_sw_pxx_req() local
138 const bool has_regulator = !IS_ERR(domain->regulator); in imx_gpc_pu_pgc_sw_pxx_req()
142 regmap_update_bits(domain->regmap, GPC_PGC_CPU_MAPPING, in imx_gpc_pu_pgc_sw_pxx_req()
143 domain->bits.map, domain->bits.map); in imx_gpc_pu_pgc_sw_pxx_req()
146 ret = regulator_enable(domain->regulator); in imx_gpc_pu_pgc_sw_pxx_req()
148 dev_err(domain->dev, "failed to enable regulator\n"); in imx_gpc_pu_pgc_sw_pxx_req()
154 for (i = 0; i < domain->num_clks; i++) in imx_gpc_pu_pgc_sw_pxx_req()
155 clk_prepare_enable(domain->clk[i]); in imx_gpc_pu_pgc_sw_pxx_req()
158 regmap_update_bits(domain->regmap, GPC_PGC_CTRL(domain->pgc), in imx_gpc_pu_pgc_sw_pxx_req()
161 if (domain->bits.hsk) in imx_gpc_pu_pgc_sw_pxx_req()
[all …]
Dgpc.c129 static int imx_pgc_get_clocks(struct device *dev, struct imx_pm_domain *domain) in imx_pgc_get_clocks() argument
142 domain->clk[i] = clk; in imx_pgc_get_clocks()
144 domain->num_clks = i; in imx_pgc_get_clocks()
150 clk_put(domain->clk[i]); in imx_pgc_get_clocks()
155 static void imx_pgc_put_clocks(struct imx_pm_domain *domain) in imx_pgc_put_clocks() argument
159 for (i = domain->num_clks - 1; i >= 0; i--) in imx_pgc_put_clocks()
160 clk_put(domain->clk[i]); in imx_pgc_put_clocks()
163 static int imx_pgc_parse_dt(struct device *dev, struct imx_pm_domain *domain) in imx_pgc_parse_dt() argument
166 domain->supply = devm_regulator_get_optional(dev, "power"); in imx_pgc_parse_dt()
167 if (IS_ERR(domain->supply)) { in imx_pgc_parse_dt()
[all …]
/drivers/dca/
Ddca-core.c44 struct dca_domain *domain; in dca_allocate_domain() local
46 domain = kzalloc(sizeof(*domain), GFP_NOWAIT); in dca_allocate_domain()
47 if (!domain) in dca_allocate_domain()
50 INIT_LIST_HEAD(&domain->dca_providers); in dca_allocate_domain()
51 domain->pci_rc = rc; in dca_allocate_domain()
53 return domain; in dca_allocate_domain()
56 static void dca_free_domain(struct dca_domain *domain) in dca_free_domain() argument
58 list_del(&domain->node); in dca_free_domain()
59 kfree(domain); in dca_free_domain()
81 struct dca_domain *domain; in unregister_dca_providers() local
[all …]
/drivers/soc/dove/
Dpmu.c143 static int pmu_domain_power_off(struct generic_pm_domain *domain) in pmu_domain_power_off() argument
145 struct pmu_domain *pmu_dom = to_pmu_domain(domain); in pmu_domain_power_off()
177 static int pmu_domain_power_on(struct generic_pm_domain *domain) in pmu_domain_power_on() argument
179 struct pmu_domain *pmu_dom = to_pmu_domain(domain); in pmu_domain_power_on()
211 static void __pmu_domain_register(struct pmu_domain *domain, in __pmu_domain_register() argument
214 unsigned int val = readl_relaxed(domain->pmu->pmu_base + PMU_PWR); in __pmu_domain_register()
216 domain->base.power_off = pmu_domain_power_off; in __pmu_domain_register()
217 domain->base.power_on = pmu_domain_power_on; in __pmu_domain_register()
219 pm_genpd_init(&domain->base, NULL, !(val & domain->pwr_mask)); in __pmu_domain_register()
222 of_genpd_add_provider_simple(np, &domain->base); in __pmu_domain_register()
[all …]
/drivers/firmware/arm_scmi/
Dperf.c59 __le32 domain; member
64 __le32 domain; member
75 __le32 domain; member
80 __le32 domain; member
96 __le32 domain; member
185 scmi_perf_domain_attributes_get(const struct scmi_handle *handle, u32 domain, in scmi_perf_domain_attributes_get() argument
193 SCMI_PROTOCOL_PERF, sizeof(domain), in scmi_perf_domain_attributes_get()
198 put_unaligned_le32(domain, t->tx.buf); in scmi_perf_domain_attributes_get()
237 scmi_perf_describe_levels_get(const struct scmi_handle *handle, u32 domain, in scmi_perf_describe_levels_get() argument
257 dom_info->domain = cpu_to_le32(domain); in scmi_perf_describe_levels_get()
[all …]
Dreset.c78 scmi_reset_domain_attributes_get(const struct scmi_handle *handle, u32 domain, in scmi_reset_domain_attributes_get() argument
86 SCMI_PROTOCOL_RESET, sizeof(domain), in scmi_reset_domain_attributes_get()
91 put_unaligned_le32(domain, t->tx.buf); in scmi_reset_domain_attributes_get()
117 static char *scmi_reset_name_get(const struct scmi_handle *handle, u32 domain) in scmi_reset_name_get() argument
120 struct reset_dom_info *dom = pi->dom_info + domain; in scmi_reset_name_get()
125 static int scmi_reset_latency_get(const struct scmi_handle *handle, u32 domain) in scmi_reset_latency_get() argument
128 struct reset_dom_info *dom = pi->dom_info + domain; in scmi_reset_latency_get()
133 static int scmi_domain_reset(const struct scmi_handle *handle, u32 domain, in scmi_domain_reset() argument
140 struct reset_dom_info *rdom = pi->dom_info + domain; in scmi_domain_reset()
151 dom->domain_id = cpu_to_le32(domain); in scmi_domain_reset()
[all …]
Dpower.c36 __le32 domain; member
41 __le32 domain; member
86 scmi_power_domain_attributes_get(const struct scmi_handle *handle, u32 domain, in scmi_power_domain_attributes_get() argument
94 SCMI_PROTOCOL_POWER, sizeof(domain), in scmi_power_domain_attributes_get()
99 put_unaligned_le32(domain, t->tx.buf); in scmi_power_domain_attributes_get()
117 scmi_power_state_set(const struct scmi_handle *handle, u32 domain, u32 state) in scmi_power_state_set() argument
130 st->domain = cpu_to_le32(domain); in scmi_power_state_set()
140 scmi_power_state_get(const struct scmi_handle *handle, u32 domain, u32 *state) in scmi_power_state_get() argument
150 put_unaligned_le32(domain, t->tx.buf); in scmi_power_state_get()
167 static char *scmi_power_name_get(const struct scmi_handle *handle, u32 domain) in scmi_power_name_get() argument
[all …]
/drivers/irqchip/
Dirq-atmel-aic5.c88 struct irq_domain *domain = d->domain; in aic5_mask() local
89 struct irq_chip_generic *bgc = irq_get_domain_generic_chip(domain, 0); in aic5_mask()
105 struct irq_domain *domain = d->domain; in aic5_unmask() local
106 struct irq_chip_generic *bgc = irq_get_domain_generic_chip(domain, 0); in aic5_unmask()
122 struct irq_domain *domain = d->domain; in aic5_retrigger() local
123 struct irq_chip_generic *bgc = irq_get_domain_generic_chip(domain, 0); in aic5_retrigger()
136 struct irq_domain *domain = d->domain; in aic5_set_type() local
137 struct irq_chip_generic *bgc = irq_get_domain_generic_chip(domain, 0); in aic5_set_type()
157 struct irq_domain *domain = d->domain; in aic5_suspend() local
158 struct irq_domain_chip_generic *dgc = domain->gc; in aic5_suspend()
[all …]
Dirq-ti-sci-intr.c66 static int ti_sci_intr_irq_domain_translate(struct irq_domain *domain, in ti_sci_intr_irq_domain_translate() argument
71 struct ti_sci_intr_irq_domain *intr = domain->host_data; in ti_sci_intr_irq_domain_translate()
88 static void ti_sci_intr_irq_domain_free(struct irq_domain *domain, in ti_sci_intr_irq_domain_free() argument
91 struct ti_sci_intr_irq_domain *intr = domain->host_data; in ti_sci_intr_irq_domain_free()
95 parent_data = irq_domain_get_irq_data(domain->parent, virq); in ti_sci_intr_irq_domain_free()
96 data = irq_domain_get_irq_data(domain, virq); in ti_sci_intr_irq_domain_free()
103 irq_domain_free_irqs_parent(domain, virq, 1); in ti_sci_intr_irq_domain_free()
115 static int ti_sci_intr_alloc_gic_irq(struct irq_domain *domain, in ti_sci_intr_alloc_gic_irq() argument
118 struct ti_sci_intr_irq_domain *intr = domain->host_data; in ti_sci_intr_alloc_gic_irq()
131 fwspec.fwnode = domain->parent->fwnode; in ti_sci_intr_alloc_gic_irq()
[all …]
Dirq-mmp.c50 struct irq_domain *domain; member
69 struct irq_domain *domain = d->domain; in icu_mask_ack_irq() local
70 struct icu_chip_data *data = (struct icu_chip_data *)domain->host_data; in icu_mask_ack_irq()
93 struct irq_domain *domain = d->domain; in icu_mask_irq() local
94 struct icu_chip_data *data = (struct icu_chip_data *)domain->host_data; in icu_mask_irq()
122 struct irq_domain *domain = d->domain; in icu_unmask_irq() local
123 struct icu_chip_data *data = (struct icu_chip_data *)domain->host_data; in icu_unmask_irq()
150 struct irq_domain *domain; in icu_mux_irq_demux() local
159 domain = icu_data[i].domain; in icu_mux_irq_demux()
160 data = (struct icu_chip_data *)domain->host_data; in icu_mux_irq_demux()
[all …]
Dirq-ath79-misc.c34 struct irq_domain *domain = irq_desc_get_handler_data(desc); in ath79_misc_irq_handler() local
36 void __iomem *base = domain->host_data; in ath79_misc_irq_handler()
53 generic_handle_irq(irq_linear_revmap(domain, bit)); in ath79_misc_irq_handler()
118 struct irq_domain *domain, int irq) in ath79_misc_intc_domain_init() argument
120 void __iomem *base = domain->host_data; in ath79_misc_intc_domain_init()
122 ath79_perfcount_irq = irq_create_mapping(domain, ATH79_MISC_PERF_IRQ); in ath79_misc_intc_domain_init()
128 irq_set_chained_handler_and_data(irq, ath79_misc_irq_handler, domain); in ath79_misc_intc_domain_init()
134 struct irq_domain *domain; in ath79_misc_intc_of_init() local
150 domain = irq_domain_add_linear(node, ATH79_MISC_IRQ_COUNT, in ath79_misc_intc_of_init()
152 if (!domain) { in ath79_misc_intc_of_init()
[all …]
/drivers/media/platform/qcom/venus/
Dhfi_parser.c25 cap->domain = VIDC_SESSION_TYPE_DEC; in init_codecs()
32 cap->domain = VIDC_SESSION_TYPE_ENC; in init_codecs()
38 u32 codecs, u32 domain, func cb, void *data, in for_each_codec() argument
46 if (cap->valid && cap->domain == domain) in for_each_codec()
48 if (cap->codec & codecs && cap->domain == domain) in for_each_codec()
63 parse_alloc_mode(struct venus_core *core, u32 codecs, u32 domain, void *data) in parse_alloc_mode() argument
78 codecs, domain, fill_buf_mode, type, 1); in parse_alloc_mode()
94 parse_profile_level(struct venus_core *core, u32 codecs, u32 domain, void *data) in parse_profile_level() argument
105 for_each_codec(core->caps, ARRAY_SIZE(core->caps), codecs, domain, in parse_profile_level()
119 parse_caps(struct venus_core *core, u32 codecs, u32 domain, void *data) in parse_caps() argument
[all …]
/drivers/vfio/
Dvfio_iommu_type1.c76 struct iommu_domain *domain; member
661 static long vfio_sync_unpin(struct vfio_dma *dma, struct vfio_domain *domain, in vfio_sync_unpin() argument
668 iommu_tlb_sync(domain->domain, iotlb_gather); in vfio_sync_unpin()
694 static size_t unmap_unpin_fast(struct vfio_domain *domain, in unmap_unpin_fast() argument
705 unmapped = iommu_unmap_fast(domain->domain, *iova, len, in unmap_unpin_fast()
726 *unlocked += vfio_sync_unpin(dma, domain, unmapped_list, in unmap_unpin_fast()
734 static size_t unmap_unpin_slow(struct vfio_domain *domain, in unmap_unpin_slow() argument
739 size_t unmapped = iommu_unmap(domain->domain, *iova, len); in unmap_unpin_slow()
756 struct vfio_domain *domain, *d; in vfio_unmap_unpin() local
775 domain = d = list_first_entry(&iommu->domain_list, in vfio_unmap_unpin()
[all …]
/drivers/gpu/drm/etnaviv/
Detnaviv_perfmon.c19 const struct etnaviv_pm_domain *domain,
40 const struct etnaviv_pm_domain *domain, in perf_reg_read() argument
43 gpu_write(gpu, domain->profile_config, signal->data); in perf_reg_read()
45 return gpu_read(gpu, domain->profile_read); in perf_reg_read()
49 const struct etnaviv_pm_domain *domain, in pipe_reg_read() argument
60 gpu_write(gpu, domain->profile_config, signal->data); in pipe_reg_read()
61 value += gpu_read(gpu, domain->profile_read); in pipe_reg_read()
73 const struct etnaviv_pm_domain *domain, in hi_total_cycle_read() argument
87 const struct etnaviv_pm_domain *domain, in hi_total_idle_cycle_read() argument
427 struct drm_etnaviv_pm_domain *domain) in etnaviv_pm_query_dom() argument
[all …]
/drivers/soc/xilinx/
Dzynqmp_pm_domains.c72 static int zynqmp_gpd_power_on(struct generic_pm_domain *domain) in zynqmp_gpd_power_on() argument
80 pd = container_of(domain, struct zynqmp_pm_domain, gpd); in zynqmp_gpd_power_on()
87 __func__, domain->name, pd->node_id, ret); in zynqmp_gpd_power_on()
91 pr_debug("%s() Powered on %s domain\n", __func__, domain->name); in zynqmp_gpd_power_on()
104 static int zynqmp_gpd_power_off(struct generic_pm_domain *domain) in zynqmp_gpd_power_off() argument
115 pd = container_of(domain, struct zynqmp_pm_domain, gpd); in zynqmp_gpd_power_off()
120 __func__, domain->name); in zynqmp_gpd_power_off()
124 list_for_each_entry_safe(pdd, tmp, &domain->dev_list, list_node) { in zynqmp_gpd_power_off()
129 domain->name); in zynqmp_gpd_power_off()
143 __func__, domain->name, pd->node_id, ret); in zynqmp_gpd_power_off()
[all …]
/drivers/gpu/drm/i915/display/
Dintel_display_power.h232 #define for_each_power_domain(domain, mask) \ argument
233 for ((domain) = 0; (domain) < POWER_DOMAIN_NUM; (domain)++) \
234 for_each_if(BIT_ULL(domain) & (mask))
273 enum intel_display_power_domain domain);
276 enum intel_display_power_domain domain);
278 enum intel_display_power_domain domain);
280 enum intel_display_power_domain domain);
283 enum intel_display_power_domain domain);
285 enum intel_display_power_domain domain);
287 enum intel_display_power_domain domain,
[all …]
/drivers/gpu/drm/nouveau/nvkm/engine/device/
Dctrl.c74 const struct nvkm_domain *domain; in nvkm_control_mthd_pstate_attr() local
94 domain = clk->domains; in nvkm_control_mthd_pstate_attr()
96 while (domain->name != nv_clk_src_max) { in nvkm_control_mthd_pstate_attr()
97 if (domain->mname && ++j == args->v0.index) in nvkm_control_mthd_pstate_attr()
99 domain++; in nvkm_control_mthd_pstate_attr()
102 if (domain->name == nv_clk_src_max) in nvkm_control_mthd_pstate_attr()
111 lo = pstate->base.domain[domain->name]; in nvkm_control_mthd_pstate_attr()
114 lo = min(lo, cstate->domain[domain->name]); in nvkm_control_mthd_pstate_attr()
115 hi = max(hi, cstate->domain[domain->name]); in nvkm_control_mthd_pstate_attr()
120 lo = max(nvkm_clk_read(clk, domain->name), 0); in nvkm_control_mthd_pstate_attr()
[all …]

12345678910>>...22