/drivers/gpu/drm/vc4/ |
D | vc4_gem.c | 146 struct vc4_exec_info *exec[2]; in vc4_save_hang_state() local 158 exec[0] = vc4_first_bin_job(vc4); in vc4_save_hang_state() 159 exec[1] = vc4_first_render_job(vc4); in vc4_save_hang_state() 160 if (!exec[0] && !exec[1]) { in vc4_save_hang_state() 168 if (!exec[i]) in vc4_save_hang_state() 172 list_for_each_entry(bo, &exec[i]->unref_list, unref_head) in vc4_save_hang_state() 174 state->bo_count += exec[i]->bo_count + unref_list_count; in vc4_save_hang_state() 187 if (!exec[i]) in vc4_save_hang_state() 190 for (j = 0; j < exec[i]->bo_count; j++) { in vc4_save_hang_state() 191 drm_gem_object_get(&exec[i]->bo[j]->base); in vc4_save_hang_state() [all …]
|
D | vc4_validate.c | 51 struct vc4_exec_info *exec, \ 106 vc4_use_bo(struct vc4_exec_info *exec, uint32_t hindex) in vc4_use_bo() argument 111 if (hindex >= exec->bo_count) { in vc4_use_bo() 113 hindex, exec->bo_count); in vc4_use_bo() 116 obj = exec->bo[hindex]; in vc4_use_bo() 129 vc4_use_handle(struct vc4_exec_info *exec, uint32_t gem_handles_packet_index) in vc4_use_handle() argument 131 return vc4_use_bo(exec, exec->bo_index[gem_handles_packet_index]); in vc4_use_handle() 135 validate_bin_pos(struct vc4_exec_info *exec, void *untrusted, uint32_t pos) in validate_bin_pos() argument 140 return (untrusted - 1 == exec->bin_u + pos); in validate_bin_pos() 159 vc4_check_tex_size(struct vc4_exec_info *exec, struct drm_gem_cma_object *fbo, in vc4_check_tex_size() argument [all …]
|
D | vc4_render_cl.c | 99 static uint32_t vc4_full_res_offset(struct vc4_exec_info *exec, in vc4_full_res_offset() argument 105 (DIV_ROUND_UP(exec->args->width, 32) * y + x); in vc4_full_res_offset() 123 static void emit_tile(struct vc4_exec_info *exec, in emit_tile() argument 127 struct drm_vc4_submit_cl *args = exec->args; in emit_tile() 139 vc4_full_res_offset(exec, setup->color_read, in emit_tile() 155 vc4_full_res_offset(exec, setup->zs_read, in emit_tile() 185 rcl_u32(setup, (exec->tile_alloc_offset + in emit_tile() 186 (y * exec->bin_tiles_x + x) * 32)); in emit_tile() 201 vc4_full_res_offset(exec, setup->msaa_color_write, in emit_tile() 219 vc4_full_res_offset(exec, setup->msaa_zs_write, in emit_tile() [all …]
|
D | vc4_irq.c | 64 struct vc4_exec_info *exec; in vc4_overflow_mem_work() local 82 exec = vc4_first_bin_job(vc4); in vc4_overflow_mem_work() 83 if (!exec) in vc4_overflow_mem_work() 84 exec = vc4_last_render_job(vc4); in vc4_overflow_mem_work() 85 if (exec) { in vc4_overflow_mem_work() 86 exec->bin_slots |= vc4->bin_alloc_overflow; in vc4_overflow_mem_work() 107 struct vc4_exec_info *exec = vc4_first_bin_job(vc4); in vc4_irq_finish_bin_job() local 109 if (!exec) in vc4_irq_finish_bin_job() 112 vc4_move_job_to_render(dev, exec); in vc4_irq_finish_bin_job() 120 struct vc4_exec_info *exec = vc4_first_bin_job(vc4); in vc4_cancel_bin_job() local [all …]
|
D | vc4_drv.h | 553 void vc4_move_job_to_render(struct drm_device *dev, struct vc4_exec_info *exec); 603 struct vc4_exec_info *exec); 606 vc4_validate_shader_recs(struct drm_device *dev, struct vc4_exec_info *exec); 608 struct drm_gem_cma_object *vc4_use_bo(struct vc4_exec_info *exec, 611 int vc4_get_rcl(struct drm_device *dev, struct vc4_exec_info *exec); 613 bool vc4_check_tex_size(struct vc4_exec_info *exec,
|
D | vc4_v3d.c | 166 struct vc4_exec_info *exec; in vc4_v3d_get_bin_slot() local 182 exec = vc4_last_render_job(vc4); in vc4_v3d_get_bin_slot() 183 if (exec) in vc4_v3d_get_bin_slot() 184 seqno = exec->seqno; in vc4_v3d_get_bin_slot()
|
/drivers/gpu/drm/nouveau/nvkm/subdev/timer/ |
D | base.c | 37 LIST_HEAD(exec); in nvkm_timer_alarm_trigger() 54 list_add(&alarm->exec, &exec); in nvkm_timer_alarm_trigger() 63 list_for_each_entry_safe(alarm, atemp, &exec, exec) { in nvkm_timer_alarm_trigger() 64 list_del(&alarm->exec); in nvkm_timer_alarm_trigger()
|
/drivers/leds/ |
D | leds-lp8501.c | 164 u8 exec; in lp8501_run_engine() local 182 ret = lp55xx_read(chip, LP8501_REG_ENABLE, &exec); in lp8501_run_engine() 189 exec = (exec & ~LP8501_EXEC_ENG1_M) | LP8501_RUN_ENG1; in lp8501_run_engine() 194 exec = (exec & ~LP8501_EXEC_ENG2_M) | LP8501_RUN_ENG2; in lp8501_run_engine() 199 exec = (exec & ~LP8501_EXEC_ENG3_M) | LP8501_RUN_ENG3; in lp8501_run_engine() 205 lp55xx_update_bits(chip, LP8501_REG_ENABLE, LP8501_EXEC_M, exec); in lp8501_run_engine()
|
D | leds-lp5521.c | 178 u8 exec; in lp5521_run_engine() local 197 ret = lp55xx_read(chip, LP5521_REG_ENABLE, &exec); in lp5521_run_engine() 204 exec = (exec & ~LP5521_EXEC_R_M) | LP5521_RUN_R; in lp5521_run_engine() 209 exec = (exec & ~LP5521_EXEC_G_M) | LP5521_RUN_G; in lp5521_run_engine() 214 exec = (exec & ~LP5521_EXEC_B_M) | LP5521_RUN_B; in lp5521_run_engine() 220 lp55xx_update_bits(chip, LP5521_REG_ENABLE, LP5521_EXEC_M, exec); in lp5521_run_engine()
|
D | leds-lp5562.c | 160 u8 exec; in lp5562_run_engine() local 182 ret = lp55xx_read(chip, LP5562_REG_ENABLE, &exec); in lp5562_run_engine() 189 exec = (exec & ~LP5562_EXEC_ENG1_M) | LP5562_RUN_ENG1; in lp5562_run_engine() 194 exec = (exec & ~LP5562_EXEC_ENG2_M) | LP5562_RUN_ENG2; in lp5562_run_engine() 199 exec = (exec & ~LP5562_EXEC_ENG3_M) | LP5562_RUN_ENG3; in lp5562_run_engine() 205 lp55xx_update_bits(chip, LP5562_REG_ENABLE, LP5562_EXEC_M, exec); in lp5562_run_engine()
|
D | leds-lp5523.c | 234 u8 exec; in lp5523_run_engine() local 252 ret = lp55xx_read(chip, LP5523_REG_ENABLE, &exec); in lp5523_run_engine() 259 exec = (exec & ~LP5523_EXEC_ENG1_M) | LP5523_RUN_ENG1; in lp5523_run_engine() 264 exec = (exec & ~LP5523_EXEC_ENG2_M) | LP5523_RUN_ENG2; in lp5523_run_engine() 269 exec = (exec & ~LP5523_EXEC_ENG3_M) | LP5523_RUN_ENG3; in lp5523_run_engine() 275 lp55xx_update_bits(chip, LP5523_REG_ENABLE, LP5523_EXEC_M, exec); in lp5523_run_engine()
|
/drivers/gpu/drm/i915/ |
D | i915_gem_execbuffer.c | 194 struct drm_i915_gem_exec_object2 *exec; /** ioctl execobj[] */ member 250 #define exec_entry(EB, VMA) (&(EB)->exec[(VMA)->exec_flags - (EB)->flags]) 437 entry->handle, (int)(entry - eb->exec)); in eb_validate_vma() 466 struct drm_i915_gem_exec_object2 *entry = &eb->exec[i]; in eb_add_vma() 704 u32 handle = eb->exec[i].handle; in eb_lookup_vmas() 1519 const unsigned int nreloc = eb->exec[i].relocation_count; in eb_copy_relocations() 1528 err = check_relocations(&eb->exec[i]); in eb_copy_relocations() 1532 urelocs = u64_to_user_ptr(eb->exec[i].relocs_ptr); in eb_copy_relocations() 1577 eb->exec[i].relocs_ptr = (uintptr_t)relocs; in eb_copy_relocations() 1585 u64_to_ptr(typeof(*relocs), eb->exec[i].relocs_ptr); in eb_copy_relocations() [all …]
|
/drivers/gpu/drm/nouveau/nvkm/subdev/mxm/ |
D | mxms.c | 97 bool (*exec)(struct nvkm_mxm *, u8 *, void *), void *info) in mxms_foreach() 148 if (mxm->subdev.debug >= NV_DBG_DEBUG && (exec == NULL)) { in mxms_foreach() 170 if (!exec(mxm, desc, info)) in mxms_foreach()
|
D | base.c | 200 bool (*exec)(struct nvkm_mxm *, u8 version); member 218 if (shadow->exec(mxm, version)) { in mxm_shadow()
|
/drivers/gpu/drm/nouveau/nvkm/subdev/devinit/ |
D | gm200.c | 118 u32 exec, args; in gm200_devinit_post() local 127 ret = pmu_load(init, 0x04, post, &exec, &args); in gm200_devinit_post() 152 pmu_exec(init, exec); in gm200_devinit_post()
|
/drivers/gpu/drm/nouveau/nvkm/subdev/bus/ |
D | hwsq.h | 75 hwsq_exec(struct hwsq *ram, bool exec) in hwsq_exec() argument 79 ret = nvkm_hwsq_fini(&ram->hwsq, exec); in hwsq_exec()
|
D | hwsq.c | 61 nvkm_hwsq_fini(struct nvkm_hwsq **phwsq, bool exec) in nvkm_hwsq_fini() argument 70 if (exec) in nvkm_hwsq_fini()
|
/drivers/gpu/drm/nouveau/nvkm/subdev/bios/ |
D | dcb.c | 212 int (*exec)(struct nvkm_bios *, void *, int, u16)) in dcb_outp_foreach() 229 ret = exec(bios, data, idx, outp); in dcb_outp_foreach()
|
/drivers/gpu/drm/nouveau/nvkm/subdev/fb/ |
D | ramfuc.h | 71 ramfuc_exec(struct ramfuc *ram, bool exec) in ramfuc_exec() argument 75 ret = nvkm_memx_fini(&ram->memx, exec); in ramfuc_exec()
|
/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/ |
D | memx.c | 72 nvkm_memx_fini(struct nvkm_memx **pmemx, bool exec) in nvkm_memx_fini() argument 88 if (exec) { in nvkm_memx_fini()
|
/drivers/gpu/drm/nouveau/include/nvkm/subdev/ |
D | bus.h | 14 int nvkm_hwsq_fini(struct nvkm_hwsq **, bool exec);
|
D | pmu.h | 49 int nvkm_memx_fini(struct nvkm_memx **, bool exec);
|
D | timer.h | 8 struct list_head exec; member
|
/drivers/connector/ |
D | Kconfig | 20 events such as fork, exec, id change (uid, gid, suid, etc), and exit.
|
/drivers/gpu/drm/nouveau/include/nvkm/subdev/bios/ |
D | dcb.h | 65 int dcb_outp_foreach(struct nvkm_bios *, void *data, int (*exec)
|