/drivers/gpu/drm/i915/gt/uc/ |
D | intel_guc.c | 36 void intel_guc_notify(struct intel_guc *guc) in intel_guc_notify() argument 38 struct intel_gt *gt = guc_to_gt(guc); in intel_guc_notify() 46 intel_uncore_write(gt->uncore, guc->notify_reg, GUC_SEND_TRIGGER); in intel_guc_notify() 49 static inline i915_reg_t guc_send_reg(struct intel_guc *guc, u32 i) in guc_send_reg() argument 51 GEM_BUG_ON(!guc->send_regs.base); in guc_send_reg() 52 GEM_BUG_ON(!guc->send_regs.count); in guc_send_reg() 53 GEM_BUG_ON(i >= guc->send_regs.count); in guc_send_reg() 55 return _MMIO(guc->send_regs.base + 4 * i); in guc_send_reg() 58 void intel_guc_init_send_regs(struct intel_guc *guc) in intel_guc_init_send_regs() argument 60 struct intel_gt *gt = guc_to_gt(guc); in intel_guc_init_send_regs() [all …]
|
D | intel_guc.h | 47 void (*reset)(struct intel_guc *guc); 48 void (*enable)(struct intel_guc *guc); 49 void (*disable)(struct intel_guc *guc); 102 inline int intel_guc_send(struct intel_guc *guc, const u32 *action, u32 len) in intel_guc_send() argument 104 return intel_guc_ct_send(&guc->ct, action, len, NULL, 0, 0); in intel_guc_send() 108 inline int intel_guc_send_nb(struct intel_guc *guc, const u32 *action, u32 len, in intel_guc_send_nb() argument 111 return intel_guc_ct_send(&guc->ct, action, len, NULL, 0, in intel_guc_send_nb() 116 intel_guc_send_and_receive(struct intel_guc *guc, const u32 *action, u32 len, in intel_guc_send_and_receive() argument 119 return intel_guc_ct_send(&guc->ct, action, len, in intel_guc_send_and_receive() 123 static inline int intel_guc_send_busy_loop(struct intel_guc *guc, in intel_guc_send_busy_loop() argument [all …]
|
D | intel_uc.c | 120 intel_guc_init_early(&uc->guc); in intel_uc_init_early() 133 intel_guc_init_late(&uc->guc); in intel_uc_init_late() 149 intel_guc_init_send_regs(&uc->guc); in intel_uc_init_mmio() 154 struct intel_guc *guc = &uc->guc; in __uc_capture_load_err_log() local 156 if (guc->log.vma && !uc->load_err_log) in __uc_capture_load_err_log() 157 uc->load_err_log = i915_gem_object_get(guc->log.vma->obj); in __uc_capture_load_err_log() 181 static void guc_clear_mmio_msg(struct intel_guc *guc) in guc_clear_mmio_msg() argument 183 intel_uncore_write(guc_to_gt(guc)->uncore, SOFT_SCRATCH(15), 0); in guc_clear_mmio_msg() 186 static void guc_get_mmio_msg(struct intel_guc *guc) in guc_get_mmio_msg() argument 190 spin_lock_irq(&guc->irq_lock); in guc_get_mmio_msg() [all …]
|
D | intel_guc_ads.c | 54 static u32 guc_ads_regset_size(struct intel_guc *guc) in guc_ads_regset_size() argument 56 GEM_BUG_ON(!guc->ads_regset_size); in guc_ads_regset_size() 57 return guc->ads_regset_size; in guc_ads_regset_size() 60 static u32 guc_ads_golden_ctxt_size(struct intel_guc *guc) in guc_ads_golden_ctxt_size() argument 62 return PAGE_ALIGN(guc->ads_golden_ctxt_size); in guc_ads_golden_ctxt_size() 65 static u32 guc_ads_private_data_size(struct intel_guc *guc) in guc_ads_private_data_size() argument 67 return PAGE_ALIGN(guc->fw.private_data_size); in guc_ads_private_data_size() 70 static u32 guc_ads_regset_offset(struct intel_guc *guc) in guc_ads_regset_offset() argument 75 static u32 guc_ads_golden_ctxt_offset(struct intel_guc *guc) in guc_ads_golden_ctxt_offset() argument 79 offset = guc_ads_regset_offset(guc) + in guc_ads_golden_ctxt_offset() [all …]
|
D | intel_guc_submission.c | 267 return &ce->engine->gt->uc.guc; in ce_to_guc() 275 static struct guc_lrc_desc *__get_lrc_desc(struct intel_guc *guc, u32 index) in __get_lrc_desc() argument 277 struct guc_lrc_desc *base = guc->lrc_desc_pool_vaddr; in __get_lrc_desc() 284 static inline struct intel_context *__get_context(struct intel_guc *guc, u32 id) in __get_context() argument 286 struct intel_context *ce = xa_load(&guc->context_lookup, id); in __get_context() 293 static int guc_lrc_desc_pool_create(struct intel_guc *guc) in guc_lrc_desc_pool_create() argument 300 ret = intel_guc_allocate_and_map_vma(guc, size, &guc->lrc_desc_pool, in guc_lrc_desc_pool_create() 301 (void **)&guc->lrc_desc_pool_vaddr); in guc_lrc_desc_pool_create() 308 static void guc_lrc_desc_pool_destroy(struct intel_guc *guc) in guc_lrc_desc_pool_destroy() argument 310 guc->lrc_desc_pool_vaddr = NULL; in guc_lrc_desc_pool_destroy() [all …]
|
D | intel_guc_rc.c | 10 static bool __guc_rc_supported(struct intel_guc *guc) in __guc_rc_supported() argument 13 return guc->submission_supported && in __guc_rc_supported() 14 GRAPHICS_VER(guc_to_gt(guc)->i915) >= 12; in __guc_rc_supported() 17 static bool __guc_rc_selected(struct intel_guc *guc) in __guc_rc_selected() argument 19 if (!intel_guc_rc_is_supported(guc)) in __guc_rc_selected() 22 return guc->submission_selected; in __guc_rc_selected() 25 void intel_guc_rc_init_early(struct intel_guc *guc) in intel_guc_rc_init_early() argument 27 guc->rc_supported = __guc_rc_supported(guc); in intel_guc_rc_init_early() 28 guc->rc_selected = __guc_rc_selected(guc); in intel_guc_rc_init_early() 31 static int guc_action_control_gucrc(struct intel_guc *guc, bool enable) in guc_action_control_gucrc() argument [all …]
|
D | intel_guc_debugfs.c | 19 struct intel_guc *guc = m->private; in guc_info_show() local 22 if (!intel_guc_is_supported(guc)) in guc_info_show() 25 intel_guc_load_status(guc, &p); in guc_info_show() 27 intel_guc_log_info(&guc->log, &p); in guc_info_show() 29 if (!intel_guc_submission_is_used(guc)) in guc_info_show() 32 intel_guc_ct_print_info(&guc->ct, &p); in guc_info_show() 33 intel_guc_submission_print_info(guc, &p); in guc_info_show() 34 intel_guc_ads_print_policy_info(guc, &p); in guc_info_show() 42 struct intel_guc *guc = m->private; in guc_registered_contexts_show() local 45 if (!intel_guc_submission_is_used(guc)) in guc_registered_contexts_show() [all …]
|
D | intel_guc_submission.h | 16 void intel_guc_submission_init_early(struct intel_guc *guc); 17 int intel_guc_submission_init(struct intel_guc *guc); 18 void intel_guc_submission_enable(struct intel_guc *guc); 19 void intel_guc_submission_disable(struct intel_guc *guc); 20 void intel_guc_submission_fini(struct intel_guc *guc); 21 int intel_guc_preempt_work_create(struct intel_guc *guc); 22 void intel_guc_preempt_work_destroy(struct intel_guc *guc); 24 void intel_guc_submission_print_info(struct intel_guc *guc, 26 void intel_guc_submission_print_context_info(struct intel_guc *guc, 34 int intel_guc_wait_for_pending_msg(struct intel_guc *guc, [all …]
|
D | intel_guc_rc.h | 11 void intel_guc_rc_init_early(struct intel_guc *guc); 13 static inline bool intel_guc_rc_is_supported(struct intel_guc *guc) in intel_guc_rc_is_supported() argument 15 return guc->rc_supported; in intel_guc_rc_is_supported() 18 static inline bool intel_guc_rc_is_wanted(struct intel_guc *guc) in intel_guc_rc_is_wanted() argument 20 return guc->submission_selected && intel_guc_rc_is_supported(guc); in intel_guc_rc_is_wanted() 23 static inline bool intel_guc_rc_is_used(struct intel_guc *guc) in intel_guc_rc_is_used() argument 25 return intel_guc_submission_is_used(guc) && intel_guc_rc_is_wanted(guc); in intel_guc_rc_is_used() 28 int intel_guc_rc_enable(struct intel_guc *guc); 29 int intel_guc_rc_disable(struct intel_guc *guc);
|
D | intel_guc_slpc.c | 25 static bool __detect_slpc_supported(struct intel_guc *guc) in __detect_slpc_supported() argument 28 return guc->submission_supported && in __detect_slpc_supported() 29 GRAPHICS_VER(guc_to_gt(guc)->i915) >= 12; in __detect_slpc_supported() 32 static bool __guc_slpc_selected(struct intel_guc *guc) in __guc_slpc_selected() argument 34 if (!intel_guc_slpc_is_supported(guc)) in __guc_slpc_selected() 37 return guc->submission_selected; in __guc_slpc_selected() 42 struct intel_guc *guc = slpc_to_guc(slpc); in intel_guc_slpc_init_early() local 44 slpc->supported = __detect_slpc_supported(guc); in intel_guc_slpc_init_early() 45 slpc->selected = __guc_slpc_selected(guc); in intel_guc_slpc_init_early() 84 struct intel_guc *guc = slpc_to_guc(slpc); in intel_guc_slpc_init() local [all …]
|
D | intel_guc_log.c | 24 static int guc_action_flush_log_complete(struct intel_guc *guc) in guc_action_flush_log_complete() argument 30 return intel_guc_send(guc, action, ARRAY_SIZE(action)); in guc_action_flush_log_complete() 33 static int guc_action_flush_log(struct intel_guc *guc) in guc_action_flush_log() argument 40 return intel_guc_send(guc, action, ARRAY_SIZE(action)); in guc_action_flush_log() 43 static int guc_action_control_log(struct intel_guc *guc, bool enable, in guc_action_control_log() argument 55 return intel_guc_send(guc, action, ARRAY_SIZE(action)); in guc_action_control_log() 362 struct intel_guc *guc = log_to_guc(log); in guc_log_relay_create() local 363 struct drm_i915_private *dev_priv = guc_to_gt(guc)->i915; in guc_log_relay_create() 409 struct intel_guc *guc = log_to_guc(log); in guc_log_capture_logs() local 410 struct drm_i915_private *dev_priv = guc_to_gt(guc)->i915; in guc_log_capture_logs() [all …]
|
D | intel_guc_slpc.h | 15 static inline bool intel_guc_slpc_is_supported(struct intel_guc *guc) in intel_guc_slpc_is_supported() argument 17 return guc->slpc.supported; in intel_guc_slpc_is_supported() 20 static inline bool intel_guc_slpc_is_wanted(struct intel_guc *guc) in intel_guc_slpc_is_wanted() argument 22 return guc->slpc.selected; in intel_guc_slpc_is_wanted() 25 static inline bool intel_guc_slpc_is_used(struct intel_guc *guc) in intel_guc_slpc_is_used() argument 27 return intel_guc_submission_is_used(guc) && intel_guc_slpc_is_wanted(guc); in intel_guc_slpc_is_used()
|
D | intel_guc_ads.h | 12 int intel_guc_ads_create(struct intel_guc *guc); 13 void intel_guc_ads_destroy(struct intel_guc *guc); 14 void intel_guc_ads_init_late(struct intel_guc *guc); 15 void intel_guc_ads_reset(struct intel_guc *guc); 16 void intel_guc_ads_print_policy_info(struct intel_guc *guc,
|
D | intel_uc.h | 30 struct intel_guc guc; member 85 uc_state_checkers(guc, guc); 87 uc_state_checkers(guc, guc_submission); 88 uc_state_checkers(guc, guc_slpc); 89 uc_state_checkers(guc, guc_rc); 96 return intel_guc_wait_for_idle(&uc->guc, timeout); in intel_uc_wait_for_idle()
|
D | intel_guc_fw.c | 131 int intel_guc_fw_upload(struct intel_guc *guc) in intel_guc_fw_upload() argument 133 struct intel_gt *gt = guc_to_gt(guc); in intel_guc_fw_upload() 144 guc_xfer_rsa(&guc->fw, uncore); in intel_guc_fw_upload() 150 ret = intel_uc_fw_upload(&guc->fw, 0x2000, UOS_MOVE); in intel_guc_fw_upload() 158 intel_uc_fw_change_status(&guc->fw, INTEL_UC_FIRMWARE_RUNNING); in intel_guc_fw_upload() 162 intel_uc_fw_change_status(&guc->fw, INTEL_UC_FIRMWARE_LOAD_FAIL); in intel_guc_fw_upload()
|
D | intel_huc.c | 60 struct intel_guc *guc = >->uc.guc; in intel_huc_rsa_data_create() local 81 vma = intel_guc_allocate_vma(guc, PAGE_SIZE); in intel_huc_rsa_data_create() 159 struct intel_guc *guc = >->uc.guc; in intel_huc_auth() local 171 ret = intel_guc_auth_huc(guc, in intel_huc_auth() 172 intel_guc_ggtt_offset(guc, huc->rsa_data)); in intel_huc_auth()
|
D | intel_guc_ct.c | 159 static int guc_action_register_ct_buffer(struct intel_guc *guc, u32 type, in guc_action_register_ct_buffer() argument 176 return intel_guc_send_mmio(guc, request, ARRAY_SIZE(request), NULL, 0); in guc_action_register_ct_buffer() 196 static int guc_action_deregister_ct_buffer(struct intel_guc *guc, u32 type) in guc_action_deregister_ct_buffer() argument 208 return intel_guc_send_mmio(guc, request, ARRAY_SIZE(request), NULL, 0); in guc_action_deregister_ct_buffer() 231 struct intel_guc *guc = ct_to_guc(ct); in intel_guc_ct_init() local 240 err = i915_inject_probe_error(guc_to_gt(guc)->i915, -ENXIO); in intel_guc_ct_init() 247 err = intel_guc_allocate_and_map_vma(guc, blob_size, &ct->vma, &blob); in intel_guc_ct_init() 254 CT_DEBUG(ct, "base=%#x size=%u\n", intel_guc_ggtt_offset(guc, ct->vma), blob_size); in intel_guc_ct_init() 304 struct intel_guc *guc = ct_to_guc(ct); in intel_guc_ct_enable() local 314 base = intel_guc_ggtt_offset(guc, ct->vma); in intel_guc_ct_enable() [all …]
|
D | intel_guc_fw.h | 11 int intel_guc_fw_upload(struct intel_guc *guc);
|
D | intel_guc_debugfs.h | 12 void intel_guc_debugfs_register(struct intel_guc *guc, struct dentry *root);
|
/drivers/gpu/drm/i915/ |
D | intel_wopcm.c | 224 u32 guc_fw_size = intel_uc_fw_get_upload_size(>->uc.guc.fw); in intel_wopcm_init() 234 GEM_BUG_ON(wopcm->guc.base); in intel_wopcm_init() 235 GEM_BUG_ON(wopcm->guc.size); in intel_wopcm_init() 272 wopcm->guc.base = guc_wopcm_base; in intel_wopcm_init() 273 wopcm->guc.size = guc_wopcm_size; in intel_wopcm_init() 274 GEM_BUG_ON(!wopcm->guc.base); in intel_wopcm_init() 275 GEM_BUG_ON(!wopcm->guc.size); in intel_wopcm_init()
|
D | intel_wopcm.h | 24 } guc; member 39 return wopcm->guc.base; in intel_wopcm_guc_base() 54 return wopcm->guc.size; in intel_wopcm_guc_size()
|
/drivers/gpu/drm/i915/gt/ |
D | intel_gt.h | 27 static inline struct intel_gt *guc_to_gt(struct intel_guc *guc) in guc_to_gt() argument 29 return container_of(guc, struct intel_gt, uc.guc); in guc_to_gt()
|
D | intel_gt_irq.c | 17 static void guc_irq_handler(struct intel_guc *guc, u16 iir) in guc_irq_handler() argument 20 intel_guc_to_host_event_handler(guc); in guc_irq_handler() 62 return guc_irq_handler(>->uc.guc, iir); in gen11_other_irq_handler() 340 guc_irq_handler(>->uc.guc, iir >> 16); in gen8_gt_irq_handler()
|
D | selftest_slpc.c | 43 struct intel_guc_slpc *slpc = >->uc.guc.slpc; in live_slpc_clamp_min() 178 slpc = >->uc.guc.slpc; in live_slpc_clamp_max()
|
/drivers/gpu/drm/i915/selftests/ |
D | intel_scheduler_helpers.c | 56 err = intel_guc_global_policies_update(&engine->gt->uc.guc); in intel_selftest_modify_policy() 75 return intel_guc_global_policies_update(&engine->gt->uc.guc); in intel_selftest_restore_policy()
|