/drivers/gpu/drm/vc4/ |
D | vc4_irq.c | 60 struct vc4_dev *vc4 = in vc4_overflow_mem_work() local 67 mutex_lock(&vc4->bin_bo_lock); in vc4_overflow_mem_work() 69 if (!vc4->bin_bo) in vc4_overflow_mem_work() 72 bo = vc4->bin_bo; in vc4_overflow_mem_work() 74 bin_bo_slot = vc4_v3d_get_bin_slot(vc4); in vc4_overflow_mem_work() 80 spin_lock_irqsave(&vc4->job_lock, irqflags); in vc4_overflow_mem_work() 82 if (vc4->bin_alloc_overflow) { in vc4_overflow_mem_work() 89 exec = vc4_first_bin_job(vc4); in vc4_overflow_mem_work() 91 exec = vc4_last_render_job(vc4); in vc4_overflow_mem_work() 93 exec->bin_slots |= vc4->bin_alloc_overflow; in vc4_overflow_mem_work() [all …]
|
D | vc4_bo.c | 40 static void vc4_bo_stats_print(struct drm_printer *p, struct vc4_dev *vc4) in vc4_bo_stats_print() argument 44 for (i = 0; i < vc4->num_labels; i++) { in vc4_bo_stats_print() 45 if (!vc4->bo_labels[i].num_allocated) in vc4_bo_stats_print() 49 vc4->bo_labels[i].name, in vc4_bo_stats_print() 50 vc4->bo_labels[i].size_allocated / 1024, in vc4_bo_stats_print() 51 vc4->bo_labels[i].num_allocated); in vc4_bo_stats_print() 54 mutex_lock(&vc4->purgeable.lock); in vc4_bo_stats_print() 55 if (vc4->purgeable.num) in vc4_bo_stats_print() 57 vc4->purgeable.size / 1024, vc4->purgeable.num); in vc4_bo_stats_print() 59 if (vc4->purgeable.purged_num) in vc4_bo_stats_print() [all …]
|
D | vc4_v3d.c | 103 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_v3d_debugfs_ident() local 104 int ret = vc4_v3d_pm_get(vc4); in vc4_v3d_debugfs_ident() 119 vc4_v3d_pm_put(vc4); in vc4_v3d_debugfs_ident() 130 vc4_v3d_pm_get(struct vc4_dev *vc4) in vc4_v3d_pm_get() argument 132 mutex_lock(&vc4->power_lock); in vc4_v3d_pm_get() 133 if (vc4->power_refcount++ == 0) { in vc4_v3d_pm_get() 134 int ret = pm_runtime_get_sync(&vc4->v3d->pdev->dev); in vc4_v3d_pm_get() 137 vc4->power_refcount--; in vc4_v3d_pm_get() 138 mutex_unlock(&vc4->power_lock); in vc4_v3d_pm_get() 142 mutex_unlock(&vc4->power_lock); in vc4_v3d_pm_get() [all …]
|
D | vc4_gem.c | 42 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_queue_hangcheck() local 44 mod_timer(&vc4->hangcheck.timer, in vc4_queue_hangcheck() 74 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_get_hang_state_ioctl() local 79 if (!vc4->v3d) { in vc4_get_hang_state_ioctl() 84 spin_lock_irqsave(&vc4->job_lock, irqflags); in vc4_get_hang_state_ioctl() 85 kernel_state = vc4->hang_state; in vc4_get_hang_state_ioctl() 87 spin_unlock_irqrestore(&vc4->job_lock, irqflags); in vc4_get_hang_state_ioctl() 97 spin_unlock_irqrestore(&vc4->job_lock, irqflags); in vc4_get_hang_state_ioctl() 101 vc4->hang_state = NULL; in vc4_get_hang_state_ioctl() 102 spin_unlock_irqrestore(&vc4->job_lock, irqflags); in vc4_get_hang_state_ioctl() [all …]
|
D | vc4_kms.c | 67 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_get_ctm_state() local 71 ret = drm_modeset_lock(&vc4->ctm_state_lock, state->acquire_ctx); in vc4_get_ctm_state() 111 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_ctm_obj_fini() local 113 drm_atomic_private_obj_fini(&vc4->ctm_manager); in vc4_ctm_obj_fini() 116 static int vc4_ctm_obj_init(struct vc4_dev *vc4) in vc4_ctm_obj_init() argument 120 drm_modeset_lock_init(&vc4->ctm_state_lock); in vc4_ctm_obj_init() 126 drm_atomic_private_obj_init(&vc4->base, &vc4->ctm_manager, &ctm_state->base, in vc4_ctm_obj_init() 129 return drmm_add_action_or_reset(&vc4->base, vc4_ctm_obj_fini, NULL); in vc4_ctm_obj_init() 152 vc4_ctm_commit(struct vc4_dev *vc4, struct drm_atomic_state *state) in vc4_ctm_commit() argument 154 struct vc4_ctm_state *ctm_state = to_vc4_ctm_state(vc4->ctm_manager.state); in vc4_ctm_commit() [all …]
|
D | vc4_hvs.c | 69 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_hvs_dump_state() local 70 struct drm_printer p = drm_info_printer(&vc4->hvs->pdev->dev); in vc4_hvs_dump_state() 73 drm_print_regset32(&p, &vc4->hvs->regset); in vc4_hvs_dump_state() 79 readl((u32 __iomem *)vc4->hvs->dlist + i + 0), in vc4_hvs_dump_state() 80 readl((u32 __iomem *)vc4->hvs->dlist + i + 1), in vc4_hvs_dump_state() 81 readl((u32 __iomem *)vc4->hvs->dlist + i + 2), in vc4_hvs_dump_state() 82 readl((u32 __iomem *)vc4->hvs->dlist + i + 3)); in vc4_hvs_dump_state() 90 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_hvs_debugfs_underrun() local 93 drm_printf(&p, "%d\n", atomic_read(&vc4->underrun)); in vc4_hvs_debugfs_underrun() 163 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_hvs_lut_load() local [all …]
|
D | vc4_drv.c | 70 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_get_param_ioctl() local 77 if (!vc4->v3d) in vc4_get_param_ioctl() 82 ret = vc4_v3d_pm_get(vc4); in vc4_get_param_ioctl() 86 vc4_v3d_pm_put(vc4); in vc4_get_param_ioctl() 89 ret = vc4_v3d_pm_get(vc4); in vc4_get_param_ioctl() 93 vc4_v3d_pm_put(vc4); in vc4_get_param_ioctl() 96 ret = vc4_v3d_pm_get(vc4); in vc4_get_param_ioctl() 100 vc4_v3d_pm_put(vc4); in vc4_get_param_ioctl() 133 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_close() local 137 vc4_v3d_bin_bo_put(vc4); in vc4_close() [all …]
|
D | vc4_perfmon.c | 30 void vc4_perfmon_start(struct vc4_dev *vc4, struct vc4_perfmon *perfmon) in vc4_perfmon_start() argument 35 if (WARN_ON_ONCE(!perfmon || vc4->active_perfmon)) in vc4_perfmon_start() 44 vc4->active_perfmon = perfmon; in vc4_perfmon_start() 47 void vc4_perfmon_stop(struct vc4_dev *vc4, struct vc4_perfmon *perfmon, in vc4_perfmon_stop() argument 52 if (WARN_ON_ONCE(!vc4->active_perfmon || in vc4_perfmon_stop() 53 perfmon != vc4->active_perfmon)) in vc4_perfmon_stop() 62 vc4->active_perfmon = NULL; in vc4_perfmon_stop() 103 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_perfmon_create_ioctl() local 110 if (!vc4->v3d) { in vc4_perfmon_create_ioctl() 155 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_perfmon_destroy_ioctl() local [all …]
|
D | vc4_drv.h | 314 struct vc4_dev *vc4; member 547 #define V3D_READ(offset) readl(vc4->v3d->regs + offset) 548 #define V3D_WRITE(offset, val) writel(val, vc4->v3d->regs + offset) 549 #define HVS_READ(offset) readl(vc4->hvs->regs + offset) 550 #define HVS_WRITE(offset, val) writel(val, vc4->hvs->regs + offset) 685 vc4_first_bin_job(struct vc4_dev *vc4) in vc4_first_bin_job() argument 687 return list_first_entry_or_null(&vc4->bin_job_list, in vc4_first_bin_job() 692 vc4_first_render_job(struct vc4_dev *vc4) in vc4_first_render_job() argument 694 return list_first_entry_or_null(&vc4->render_job_list, in vc4_first_render_job() 699 vc4_last_render_job(struct vc4_dev *vc4) in vc4_last_render_job() argument [all …]
|
D | vc4_debugfs.c | 26 struct vc4_dev *vc4 = to_vc4_dev(minor->dev); in vc4_debugfs_init() local 30 minor->debugfs_root, &vc4->load_tracker_enabled); in vc4_debugfs_init() 32 list_for_each_entry(entry, &vc4->debugfs_list, link) { in vc4_debugfs_init() 63 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_debugfs_add_file() local 75 list_add(&entry->link, &vc4->debugfs_list); in vc4_debugfs_add_file()
|
D | vc4_crtc.c | 69 vc4_crtc_get_cob_allocation(struct vc4_dev *vc4, unsigned int channel) in vc4_crtc_get_cob_allocation() argument 89 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_crtc_get_scanout_position() local 128 cob_size = vc4_crtc_get_cob_allocation(vc4, vc4_crtc_state->assigned_channel); in vc4_crtc_get_scanout_position() 213 struct vc4_dev *vc4 = to_vc4_dev(vc4_crtc->base.dev); in vc4_get_fifo_full_level() local 255 if (!vc4->hvs->hvs5) in vc4_get_fifo_full_level() 311 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_crtc_config_pv() local 394 if (vc4->hvs->hvs5) in vc4_crtc_config_pv() 419 struct vc4_dev *vc4 = to_vc4_dev(dev); in require_hvs_enabled() local 647 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_crtc_handle_page_flip() local 712 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_async_page_flip_complete() local [all …]
|
D | Makefile | 5 vc4-y := \ 28 vc4-$(CONFIG_DEBUG_FS) += vc4_debugfs.o 30 obj-$(CONFIG_DRM_VC4) += vc4.o
|
D | vc4_plane.c | 195 struct vc4_dev *vc4 = to_vc4_dev(plane->dev); in vc4_plane_destroy_state() local 201 spin_lock_irqsave(&vc4->hvs->mm_lock, irqflags); in vc4_plane_destroy_state() 203 spin_unlock_irqrestore(&vc4->hvs->mm_lock, irqflags); in vc4_plane_destroy_state() 446 struct vc4_dev *vc4 = to_vc4_dev(state->plane->dev); in vc4_lbm_size() local 483 lbm = roundup(lbm, vc4->hvs->hvs5 ? 128 : 64); in vc4_lbm_size() 486 lbm /= vc4->hvs->hvs5 ? 4 : 2; in vc4_lbm_size() 530 struct vc4_dev *vc4; in vc4_plane_calc_load() local 532 vc4 = to_vc4_dev(state->plane->dev); in vc4_plane_calc_load() 533 if (!vc4->load_tracker_available) in vc4_plane_calc_load() 585 struct vc4_dev *vc4 = to_vc4_dev(state->plane->dev); in vc4_plane_allocate_lbm() local [all …]
|
D | vc4_fence.c | 39 struct vc4_dev *vc4 = to_vc4_dev(f->dev); in vc4_fence_signaled() local 41 return vc4->finished_seqno >= f->seqno; in vc4_fence_signaled()
|
D | vc4_trace.h | 14 #define TRACE_SYSTEM vc4 59 #define TRACE_INCLUDE_PATH ../../drivers/gpu/drm/vc4
|
D | vc4_dpi.c | 269 struct vc4_dev *vc4 = to_vc4_dev(drm); in vc4_dpi_bind() local 328 vc4->dpi = dpi; in vc4_dpi_bind() 344 struct vc4_dev *vc4 = to_vc4_dev(drm); in vc4_dpi_unbind() local 353 vc4->dpi = NULL; in vc4_dpi_unbind()
|
D | vc4_txp.c | 468 struct vc4_dev *vc4 = to_vc4_dev(drm); in vc4_txp_bind() local 520 vc4->txp = txp; in vc4_txp_bind() 531 struct vc4_dev *vc4 = to_vc4_dev(drm); in vc4_txp_unbind() local 536 vc4->txp = NULL; in vc4_txp_unbind()
|
D | vc4_vec.c | 526 struct vc4_dev *vc4 = to_vc4_dev(drm); in vc4_vec_bind() local 577 vc4->vec = vec; in vc4_vec_bind() 594 struct vc4_dev *vc4 = to_vc4_dev(drm); in vc4_vec_unbind() local 601 vc4->vec = NULL; in vc4_vec_unbind()
|
D | vc4_validate.c | 352 struct vc4_dev *vc4 = to_vc4_dev(dev); in validate_tile_binning_config() local 382 bin_slot = vc4_v3d_get_bin_slot(vc4); in validate_tile_binning_config() 395 bin_addr = vc4->bin_bo->base.paddr + bin_slot * vc4->bin_alloc_size; in validate_tile_binning_config() 417 *(uint32_t *)(validated + 4) = (bin_addr + vc4->bin_alloc_size - in validate_tile_binning_config()
|
/drivers/net/dsa/b53/ |
D | b53_common.c | 355 u8 mgmt, vc0, vc1, vc4 = 0, vc5; in b53_enable_vlan() local 362 b53_read8(dev, B53_VLAN_PAGE, B53_VLAN_CTRL4_25, &vc4); in b53_enable_vlan() 365 b53_read8(dev, B53_VLAN_PAGE, B53_VLAN_CTRL4_63XX, &vc4); in b53_enable_vlan() 368 b53_read8(dev, B53_VLAN_PAGE, B53_VLAN_CTRL4, &vc4); in b53_enable_vlan() 375 vc4 &= ~VC4_ING_VID_CHECK_MASK; in b53_enable_vlan() 377 vc4 |= VC4_ING_VID_VIO_DROP << VC4_ING_VID_CHECK_S; in b53_enable_vlan() 380 vc4 |= VC4_ING_VID_VIO_FWD << VC4_ING_VID_CHECK_S; in b53_enable_vlan() 393 vc4 &= ~VC4_ING_VID_CHECK_MASK; in b53_enable_vlan() 397 vc4 |= VC4_ING_VID_VIO_FWD << VC4_ING_VID_CHECK_S; in b53_enable_vlan() 399 vc4 |= VC4_ING_VID_VIO_TO_IMP << VC4_ING_VID_CHECK_S; in b53_enable_vlan() [all …]
|
/drivers/staging/vc04_services/bcm2835-camera/ |
D | TODO | 5 vc4 driver can import them. This may involve bringing in the VCSM
|
/drivers/gpu/drm/ |
D | Makefile | 76 obj-$(CONFIG_DRM_VC4) += vc4/
|
D | Kconfig | 358 source "drivers/gpu/drm/vc4/Kconfig"
|
/drivers/staging/vc04_services/interface/ |
D | TODO | 17 camera images into vc4 for rendering/display.
|