/drivers/gpu/drm/i915/gvt/ |
D | interrupt.c | 157 if (i915_mmio_reg_offset(irq->info[i]->reg_base) == reg) in regbase_to_irq_info() 335 regbase_to_iir(i915_mmio_reg_offset(info->reg_base))) in update_upstream_irq() 337 regbase_to_ier(i915_mmio_reg_offset(info->reg_base))); in update_upstream_irq() 364 u32 isr = i915_mmio_reg_offset(up_irq_info->reg_base); in update_upstream_irq() 370 i915_mmio_reg_offset(up_irq_info->reg_base)); in update_upstream_irq() 372 i915_mmio_reg_offset(up_irq_info->reg_base)); in update_upstream_irq() 453 reg_base = i915_mmio_reg_offset(info->reg_base); in propagate_event() 511 if (!(vgpu_vreg(vgpu, i915_mmio_reg_offset(GEN8_MASTER_IRQ)) & in gen8_check_pending_irq() 522 reg_base = i915_mmio_reg_offset(info->reg_base); in gen8_check_pending_irq() 528 if (vgpu_vreg(vgpu, i915_mmio_reg_offset(GEN8_MASTER_IRQ)) in gen8_check_pending_irq()
|
D | edid.c | 384 if (offset == i915_mmio_reg_offset(PCH_GMBUS2)) in intel_gvt_i2c_handle_gmbus_read() 386 else if (offset == i915_mmio_reg_offset(PCH_GMBUS3)) in intel_gvt_i2c_handle_gmbus_read() 414 if (offset == i915_mmio_reg_offset(PCH_GMBUS0)) in intel_gvt_i2c_handle_gmbus_write() 416 else if (offset == i915_mmio_reg_offset(PCH_GMBUS1)) in intel_gvt_i2c_handle_gmbus_write() 418 else if (offset == i915_mmio_reg_offset(PCH_GMBUS2)) in intel_gvt_i2c_handle_gmbus_write() 420 else if (offset == i915_mmio_reg_offset(PCH_GMBUS3)) in intel_gvt_i2c_handle_gmbus_write()
|
D | scheduler.c | 92 i915_mmio_reg_offset(EU_PERF_CNTL0), in sr_oa_regs() 93 i915_mmio_reg_offset(EU_PERF_CNTL1), in sr_oa_regs() 94 i915_mmio_reg_offset(EU_PERF_CNTL2), in sr_oa_regs() 95 i915_mmio_reg_offset(EU_PERF_CNTL3), in sr_oa_regs() 96 i915_mmio_reg_offset(EU_PERF_CNTL4), in sr_oa_regs() 97 i915_mmio_reg_offset(EU_PERF_CNTL5), in sr_oa_regs() 98 i915_mmio_reg_offset(EU_PERF_CNTL6), in sr_oa_regs() 114 i915_mmio_reg_offset(GEN8_OACTXCONTROL); in sr_oa_regs() 274 vgpu_vreg(vgpu, i915_mmio_reg_offset(reg)) = in save_ring_hw_state() 278 vgpu_vreg(vgpu, i915_mmio_reg_offset(reg)) = in save_ring_hw_state() [all …]
|
D | mmio_context.c | 228 *cs++ = i915_mmio_reg_offset(mmio->reg); in restore_context_mmio_for_inhibit() 258 *cs++ = i915_mmio_reg_offset(GEN9_GFX_MOCS(index)); in restore_render_mocs_control_for_inhibit() 285 *cs++ = i915_mmio_reg_offset(GEN9_LNCFCMOCS(index)); in restore_render_mocs_l3cc_for_inhibit() 541 i915_mmio_reg_offset(mmio->reg), in switch_mmio()
|
D | handlers.c | 167 ((offset - i915_mmio_reg_offset(FENCE_REG_GEN6_LO(0))) >> 3) 170 (num * 8 + i915_mmio_reg_offset(FENCE_REG_GEN6_LO(0))) 784 reg_nonpriv != i915_mmio_reg_offset(RING_NOPID(engine->mmio_base))) { in force_nonpriv_write() 802 if (offset == i915_mmio_reg_offset(DDI_BUF_CTL(PORT_E))) in ddi_buf_ctl_mmio_write() 884 end = i915_mmio_reg_offset(i915_end); in calc_index() 1079 if (reg == i915_mmio_reg_offset(DP_AUX_CH_CTL(AUX_CH_A))) in trigger_aux_channel_interrupt() 1082 reg == i915_mmio_reg_offset(DP_AUX_CH_CTL(AUX_CH_B))) in trigger_aux_channel_interrupt() 1085 reg == i915_mmio_reg_offset(DP_AUX_CH_CTL(AUX_CH_C))) in trigger_aux_channel_interrupt() 1088 reg == i915_mmio_reg_offset(DP_AUX_CH_CTL(AUX_CH_D))) in trigger_aux_channel_interrupt() 1959 offset == i915_mmio_reg_offset(RING_TIMESTAMP(engine->mmio_base)) || in mmio_read_from_hw() [all …]
|
D | gvt.h | 466 (*(u32 *)(vgpu->mmio.vreg + i915_mmio_reg_offset(reg))) 470 (*(u64 *)(vgpu->mmio.vreg + i915_mmio_reg_offset(reg)))
|
/drivers/gpu/drm/i915/gt/ |
D | selftest_lrc.c | 304 i915_mmio_reg_offset(RING_START(engine->mmio_base)), in live_lrc_fixed() 309 i915_mmio_reg_offset(RING_CTL(engine->mmio_base)), in live_lrc_fixed() 314 i915_mmio_reg_offset(RING_HEAD(engine->mmio_base)), in live_lrc_fixed() 319 i915_mmio_reg_offset(RING_TAIL(engine->mmio_base)), in live_lrc_fixed() 324 i915_mmio_reg_offset(RING_MI_MODE(engine->mmio_base)), in live_lrc_fixed() 329 i915_mmio_reg_offset(RING_BBSTATE(engine->mmio_base)), in live_lrc_fixed() 334 i915_mmio_reg_offset(RING_BB_PER_CTX_PTR(engine->mmio_base)), in live_lrc_fixed() 339 i915_mmio_reg_offset(RING_INDIRECT_CTX(engine->mmio_base)), in live_lrc_fixed() 344 i915_mmio_reg_offset(RING_INDIRECT_CTX_OFFSET(engine->mmio_base)), in live_lrc_fixed() 349 i915_mmio_reg_offset(RING_CTX_TIMESTAMP(engine->mmio_base)), in live_lrc_fixed() [all …]
|
D | intel_lrc.c | 1261 *cs++ = i915_mmio_reg_offset(GEN8_RING_CS_GPR(0, 0)); in gen12_emit_timestamp_wa() 1269 *cs++ = i915_mmio_reg_offset(GEN8_RING_CS_GPR(0, 0)); in gen12_emit_timestamp_wa() 1270 *cs++ = i915_mmio_reg_offset(RING_CTX_TIMESTAMP(0)); in gen12_emit_timestamp_wa() 1275 *cs++ = i915_mmio_reg_offset(GEN8_RING_CS_GPR(0, 0)); in gen12_emit_timestamp_wa() 1276 *cs++ = i915_mmio_reg_offset(RING_CTX_TIMESTAMP(0)); in gen12_emit_timestamp_wa() 1289 *cs++ = i915_mmio_reg_offset(GEN8_RING_CS_GPR(0, 0)); in gen12_emit_restore_scratch() 1305 *cs++ = i915_mmio_reg_offset(GEN8_RING_CS_GPR(0, 0)); in gen12_emit_cmd_buf_wa() 1313 *cs++ = i915_mmio_reg_offset(GEN8_RING_CS_GPR(0, 0)); in gen12_emit_cmd_buf_wa() 1314 *cs++ = i915_mmio_reg_offset(RING_CMD_BUF_CCTL(0)); in gen12_emit_cmd_buf_wa() 1330 *cs++ = i915_mmio_reg_offset(DRAW_WATERMARK); in dg2_emit_draw_watermark_setting() [all …]
|
D | selftest_workarounds.c | 157 *cs++ = i915_mmio_reg_offset(RING_FORCE_TO_NONPRIV(base, i)); in read_nonprivs() 184 return i915_mmio_reg_offset(reg); in get_whitelist_reg() 465 u32 reg = i915_mmio_reg_offset(engine->whitelist.list[i].reg); in whitelist_writable_count() 520 u32 reg = i915_mmio_reg_offset(engine->whitelist.list[i].reg); in check_dirty_whitelist() 870 u32 reg = i915_mmio_reg_offset(engine->whitelist.list[i].reg); in read_whitelisted_registers() 906 u32 reg = i915_mmio_reg_offset(engine->whitelist.list[i].reg); in scrub_whitelisted_registers() 961 u32 offset = i915_mmio_reg_offset(reg); in find_reg() 965 i915_mmio_reg_offset(tbl->reg) == offset) in find_reg() 989 i915_mmio_reg_offset(reg), a, b); in result_eq() 1011 i915_mmio_reg_offset(reg), a); in result_neq() [all …]
|
D | intel_workarounds.c | 146 unsigned int addr = i915_mmio_reg_offset(wa->reg); in _wa_add() 175 if (i915_mmio_reg_offset(wal->list[mid].reg) < addr) { in _wa_add() 177 } else if (i915_mmio_reg_offset(wal->list[mid].reg) > addr) { in _wa_add() 185 i915_mmio_reg_offset(wa_->reg), in _wa_add() 204 GEM_BUG_ON(i915_mmio_reg_offset(wa_[0].reg) == in _wa_add() 205 i915_mmio_reg_offset(wa_[1].reg)); in _wa_add() 206 if (i915_mmio_reg_offset(wa_[1].reg) > in _wa_add() 207 i915_mmio_reg_offset(wa_[0].reg)) in _wa_add() 1006 *cs++ = i915_mmio_reg_offset(wa->reg); in intel_engine_emit_ctx_wa() 1792 name, from, i915_mmio_reg_offset(wa->reg), in wa_verify() [all …]
|
D | intel_ring_submission.c | 666 *cs++ = i915_mmio_reg_offset(RING_PP_DIR_DCLV(engine->mmio_base)); in load_pd_dir() 670 *cs++ = i915_mmio_reg_offset(RING_PP_DIR_BASE(engine->mmio_base)); in load_pd_dir() 675 *cs++ = i915_mmio_reg_offset(RING_PP_DIR_BASE(engine->mmio_base)); in load_pd_dir() 680 *cs++ = i915_mmio_reg_offset(RING_INSTPM(engine->mmio_base)); in load_pd_dir() 728 *cs++ = i915_mmio_reg_offset( in mi_set_context() 783 *cs++ = i915_mmio_reg_offset(last_reg); in mi_set_context() 790 *cs++ = i915_mmio_reg_offset(last_reg); in mi_set_context() 825 *cs++ = i915_mmio_reg_offset(GEN7_L3LOG(slice, i)); in remap_l3_slice()
|
D | selftest_rps.c | 103 *cs++ = i915_mmio_reg_offset(CS_GPR(i)); in create_spin_counter() 105 *cs++ = i915_mmio_reg_offset(CS_GPR(i)) + 4; in create_spin_counter() 110 *cs++ = i915_mmio_reg_offset(CS_GPR(INC)); in create_spin_counter() 125 *cs++ = i915_mmio_reg_offset(CS_GPR(COUNT)); in create_spin_counter() 208 i915_mmio_reg_offset(BXT_RP_STATE_CAP), in show_pstate_limits() 213 i915_mmio_reg_offset(GEN9_RP_STATE_LIMITS), in show_pstate_limits()
|
D | gen8_engine_cs.c | 211 *cs++ = i915_mmio_reg_offset(inv_reg) + gsi_offset; in gen12_emit_aux_table_inv() 219 *cs++ = i915_mmio_reg_offset(inv_reg) + gsi_offset; in gen12_emit_aux_table_inv() 503 *cs++ = i915_mmio_reg_offset(RING_PREDICATE_RESULT(0)); in __xehp_emit_bb_start()
|
D | selftest_mocs.c | 152 u32 addr = i915_mmio_reg_offset(GEN9_LNCFCMOCS(0)); in read_l3cc_table() 197 u32 reg = i915_mmio_reg_offset(GEN9_LNCFCMOCS(0)); in check_l3cc_table()
|
D | gen7_renderclear.c | 400 batch_add(&cmds, i915_mmio_reg_offset(CACHE_MODE_0_GEN7)); in emit_batch() 405 batch_add(&cmds, i915_mmio_reg_offset(CACHE_MODE_1)); in emit_batch()
|
D | selftest_rc6.c | 147 *cs++ = i915_mmio_reg_offset(GEN8_RC6_CTX_INFO); in __live_rc6_ctx()
|
/drivers/gpu/drm/i915/ |
D | i915_reg_defs.h | 282 #define i915_mmio_reg_offset(r) \ macro 284 #define i915_mmio_reg_equal(a, b) (i915_mmio_reg_offset(a) == i915_mmio_reg_offset(b))
|
D | intel_uncore.h | 329 u32 offset = i915_mmio_reg_offset(reg); \ 339 u32 offset = i915_mmio_reg_offset(reg); \ 517 readl(base + i915_mmio_reg_offset(reg)) 519 writel(value, base + i915_mmio_reg_offset(reg))
|
D | i915_perf.c | 1358 *cs++ = i915_mmio_reg_offset(reg); in __store_reg_to_mem() 1521 offset = oa_context_image_offset(ce, i915_mmio_reg_offset(reg)); in set_oa_ctx_ctrl_offset() 1957 *cs++ = i915_mmio_reg_offset(reg) + 4 * d; in save_restore_register() 2054 *cs++ = i915_mmio_reg_offset(CS_GPR(START_TS)) + 4; in alloc_noa_wait() 2057 *cs++ = i915_mmio_reg_offset(RING_TIMESTAMP(base)); in alloc_noa_wait() 2058 *cs++ = i915_mmio_reg_offset(CS_GPR(START_TS)); in alloc_noa_wait() 2072 *cs++ = i915_mmio_reg_offset(CS_GPR(NOW_TS)) + 4; in alloc_noa_wait() 2075 *cs++ = i915_mmio_reg_offset(RING_TIMESTAMP(base)); in alloc_noa_wait() 2076 *cs++ = i915_mmio_reg_offset(CS_GPR(NOW_TS)); in alloc_noa_wait() 2095 *cs++ = i915_mmio_reg_offset(CS_GPR(JUMP_PREDICATE)); in alloc_noa_wait() [all …]
|
D | i915_ioctl.c | 55 u32 entry_offset = i915_mmio_reg_offset(entry->offset_ldw); in i915_reg_read_ioctl()
|
D | intel_device_info.c | 342 ip_ver_read(i915, i915_mmio_reg_offset(GMD_ID_GRAPHICS), in intel_ipver_early_init() 350 ip_ver_read(i915, i915_mmio_reg_offset(GMD_ID_MEDIA), in intel_ipver_early_init()
|
D | intel_uncore.c | 1911 i915_mmio_reg_offset(reg))) in __unclaimed_reg_debug() 1925 i915_mmio_reg_offset(reg)); in __unclaimed_previous_reg_debug() 2008 u32 offset = i915_mmio_reg_offset(reg); \ 2065 return __fwtable_reg_read_fw_domains(uncore, i915_mmio_reg_offset(reg)); in fwtable_reg_read_fw_domains() 2114 u32 offset = i915_mmio_reg_offset(reg); \ 2155 return __fwtable_reg_write_fw_domains(uncore, i915_mmio_reg_offset(reg)); in fwtable_reg_write_fw_domains() 2225 d->reg_set = uncore->regs + i915_mmio_reg_offset(reg_set) + uncore->gsi_offset; in __fw_domain_init() 2226 d->reg_ack = uncore->regs + i915_mmio_reg_offset(reg_ack) + uncore->gsi_offset; in __fw_domain_init()
|
/drivers/gpu/drm/i915/display/ |
D | intel_dsb.c | 127 return prev_opcode == opcode && prev_reg == i915_mmio_reg_offset(reg); in intel_dsb_prev_ins_is_write() 173 i915_mmio_reg_offset(reg)); in intel_dsb_reg_write() 187 i915_mmio_reg_offset(reg); in intel_dsb_reg_write()
|
/drivers/gpu/drm/i915/gt/uc/ |
D | intel_guc.c | 182 guc->send_regs.base = i915_mmio_reg_offset(MEDIA_SOFT_SCRATCH(0)); in intel_guc_init_early() 185 guc->send_regs.base = i915_mmio_reg_offset(GEN11_SOFT_SCRATCH(0)); in intel_guc_init_early() 195 guc->send_regs.base = i915_mmio_reg_offset(SOFT_SCRATCH(0)); in intel_guc_init_early()
|
D | intel_uc.c | 415 i915_mmio_reg_offset(DMA_GUC_WOPCM_OFFSET), in uc_init_wopcm() 418 i915_mmio_reg_offset(GUC_WOPCM_SIZE), in uc_init_wopcm()
|