Lines Matching full:v3d
74 /* Short representation (e.g. 33, 41) of the V3D tech version
99 /* virtual address bits from V3D to the MMU. */
102 /* Number of V3D cores. */
161 v3d_has_csd(struct v3d_dev *v3d) in v3d_has_csd() argument
163 return v3d->ver >= 41; in v3d_has_csd()
166 #define v3d_to_pdev(v3d) to_platform_device((v3d)->drm.dev) argument
170 struct v3d_dev *v3d; member
200 /* v3d seqno for signaled() test */
211 #define V3D_READ(offset) readl(v3d->hub_regs + offset)
212 #define V3D_WRITE(offset, val) writel(val, v3d->hub_regs + offset)
214 #define V3D_BRIDGE_READ(offset) readl(v3d->bridge_regs + offset)
215 #define V3D_BRIDGE_WRITE(offset, val) writel(val, v3d->bridge_regs + offset)
217 #define V3D_GCA_READ(offset) readl(v3d->gca_regs + offset)
218 #define V3D_GCA_WRITE(offset, val) writel(val, v3d->gca_regs + offset)
220 #define V3D_CORE_READ(core, offset) readl(v3d->core_regs[core] + offset)
221 #define V3D_CORE_WRITE(core, offset, val) writel(val, v3d->core_regs[core] + offset)
228 struct v3d_dev *v3d; member
236 /* v3d fence to be signaled by IRQ handler when the job is complete. */
378 struct dma_fence *v3d_fence_create(struct v3d_dev *v3d, enum v3d_queue queue);
393 void v3d_reset(struct v3d_dev *v3d);
394 void v3d_invalidate_caches(struct v3d_dev *v3d);
395 void v3d_clean_caches(struct v3d_dev *v3d);
398 int v3d_irq_init(struct v3d_dev *v3d);
399 void v3d_irq_enable(struct v3d_dev *v3d);
400 void v3d_irq_disable(struct v3d_dev *v3d);
401 void v3d_irq_reset(struct v3d_dev *v3d);
406 int v3d_mmu_set_page_table(struct v3d_dev *v3d);
411 int v3d_sched_init(struct v3d_dev *v3d);
412 void v3d_sched_fini(struct v3d_dev *v3d);
417 void v3d_perfmon_start(struct v3d_dev *v3d, struct v3d_perfmon *perfmon);
418 void v3d_perfmon_stop(struct v3d_dev *v3d, struct v3d_perfmon *perfmon,