Home
last modified time | relevance | path

Searched refs:i915_mmio_reg_offset (Results 1 – 25 of 39) sorted by relevance

12

/drivers/gpu/drm/i915/gvt/
Dinterrupt.c154 if (i915_mmio_reg_offset(irq->info[i]->reg_base) == reg) in regbase_to_irq_info()
332 regbase_to_iir(i915_mmio_reg_offset(info->reg_base))) in update_upstream_irq()
334 regbase_to_ier(i915_mmio_reg_offset(info->reg_base))); in update_upstream_irq()
361 u32 isr = i915_mmio_reg_offset(up_irq_info->reg_base); in update_upstream_irq()
367 i915_mmio_reg_offset(up_irq_info->reg_base)); in update_upstream_irq()
369 i915_mmio_reg_offset(up_irq_info->reg_base)); in update_upstream_irq()
415 reg_base = i915_mmio_reg_offset(info->reg_base); in propagate_event()
473 if (!(vgpu_vreg(vgpu, i915_mmio_reg_offset(GEN8_MASTER_IRQ)) & in gen8_check_pending_irq()
484 reg_base = i915_mmio_reg_offset(info->reg_base); in gen8_check_pending_irq()
490 if (vgpu_vreg(vgpu, i915_mmio_reg_offset(GEN8_MASTER_IRQ)) in gen8_check_pending_irq()
Dedid.c381 if (offset == i915_mmio_reg_offset(PCH_GMBUS2)) in intel_gvt_i2c_handle_gmbus_read()
383 else if (offset == i915_mmio_reg_offset(PCH_GMBUS3)) in intel_gvt_i2c_handle_gmbus_read()
411 if (offset == i915_mmio_reg_offset(PCH_GMBUS0)) in intel_gvt_i2c_handle_gmbus_write()
413 else if (offset == i915_mmio_reg_offset(PCH_GMBUS1)) in intel_gvt_i2c_handle_gmbus_write()
415 else if (offset == i915_mmio_reg_offset(PCH_GMBUS2)) in intel_gvt_i2c_handle_gmbus_write()
417 else if (offset == i915_mmio_reg_offset(PCH_GMBUS3)) in intel_gvt_i2c_handle_gmbus_write()
Dscheduler.c88 i915_mmio_reg_offset(EU_PERF_CNTL0), in sr_oa_regs()
89 i915_mmio_reg_offset(EU_PERF_CNTL1), in sr_oa_regs()
90 i915_mmio_reg_offset(EU_PERF_CNTL2), in sr_oa_regs()
91 i915_mmio_reg_offset(EU_PERF_CNTL3), in sr_oa_regs()
92 i915_mmio_reg_offset(EU_PERF_CNTL4), in sr_oa_regs()
93 i915_mmio_reg_offset(EU_PERF_CNTL5), in sr_oa_regs()
94 i915_mmio_reg_offset(EU_PERF_CNTL6), in sr_oa_regs()
110 i915_mmio_reg_offset(GEN8_OACTXCONTROL); in sr_oa_regs()
256 vgpu_vreg(vgpu, i915_mmio_reg_offset(reg)) = in save_ring_hw_state()
260 vgpu_vreg(vgpu, i915_mmio_reg_offset(reg)) = in save_ring_hw_state()
[all …]
Dmmio_context.c222 *cs++ = i915_mmio_reg_offset(mmio->reg); in restore_context_mmio_for_inhibit()
252 *cs++ = i915_mmio_reg_offset(GEN9_GFX_MOCS(index)); in restore_render_mocs_control_for_inhibit()
279 *cs++ = i915_mmio_reg_offset(GEN9_LNCFCMOCS(index)); in restore_render_mocs_l3cc_for_inhibit()
535 i915_mmio_reg_offset(mmio->reg), in switch_mmio()
Dhandlers.c169 ((offset - i915_mmio_reg_offset(FENCE_REG_GEN6_LO(0))) >> 3)
172 (num * 8 + i915_mmio_reg_offset(FENCE_REG_GEN6_LO(0)))
533 reg_nonpriv != i915_mmio_reg_offset(RING_NOPID(engine->mmio_base))) { in force_nonpriv_write()
551 if (offset == i915_mmio_reg_offset(DDI_BUF_CTL(PORT_E))) in ddi_buf_ctl_mmio_write()
633 end = i915_mmio_reg_offset(i915_end); in calc_index()
828 if (reg == i915_mmio_reg_offset(DP_AUX_CH_CTL(AUX_CH_A))) in trigger_aux_channel_interrupt()
831 reg == i915_mmio_reg_offset(DP_AUX_CH_CTL(AUX_CH_B))) in trigger_aux_channel_interrupt()
834 reg == i915_mmio_reg_offset(DP_AUX_CH_CTL(AUX_CH_C))) in trigger_aux_channel_interrupt()
837 reg == i915_mmio_reg_offset(DP_AUX_CH_CTL(AUX_CH_D))) in trigger_aux_channel_interrupt()
1708 offset == i915_mmio_reg_offset(RING_TIMESTAMP(engine->mmio_base)) || in mmio_read_from_hw()
[all …]
Dgvt.h437 (*(u32 *)(vgpu->mmio.vreg + i915_mmio_reg_offset(reg)))
441 (*(u64 *)(vgpu->mmio.vreg + i915_mmio_reg_offset(reg)))
/drivers/gpu/drm/i915/gt/
Dintel_workarounds.c117 unsigned int addr = i915_mmio_reg_offset(wa->reg); in _wa_add()
145 if (i915_mmio_reg_offset(wal->list[mid].reg) < addr) { in _wa_add()
147 } else if (i915_mmio_reg_offset(wal->list[mid].reg) > addr) { in _wa_add()
154 i915_mmio_reg_offset(wa_->reg), in _wa_add()
173 GEM_BUG_ON(i915_mmio_reg_offset(wa_[0].reg) == in _wa_add()
174 i915_mmio_reg_offset(wa_[1].reg)); in _wa_add()
175 if (i915_mmio_reg_offset(wa_[1].reg) > in _wa_add()
176 i915_mmio_reg_offset(wa_[0].reg)) in _wa_add()
749 *cs++ = i915_mmio_reg_offset(wa->reg); in intel_engine_emit_ctx_wa()
1336 name, from, i915_mmio_reg_offset(wa->reg), in wa_verify()
[all …]
Dselftest_workarounds.c159 *cs++ = i915_mmio_reg_offset(RING_FORCE_TO_NONPRIV(base, i)); in read_nonprivs()
186 return i915_mmio_reg_offset(reg); in get_whitelist_reg()
449 u32 reg = i915_mmio_reg_offset(engine->whitelist.list[i].reg); in whitelist_writable_count()
503 u32 reg = i915_mmio_reg_offset(engine->whitelist.list[i].reg); in check_dirty_whitelist()
821 u32 reg = i915_mmio_reg_offset(engine->whitelist.list[i].reg); in read_whitelisted_registers()
860 u32 reg = i915_mmio_reg_offset(engine->whitelist.list[i].reg); in scrub_whitelisted_registers()
919 u32 offset = i915_mmio_reg_offset(reg); in find_reg()
923 i915_mmio_reg_offset(tbl->reg) == offset) in find_reg()
947 i915_mmio_reg_offset(reg), a, b); in result_eq()
969 i915_mmio_reg_offset(reg), a); in result_neq()
[all …]
Dintel_ring_submission.c634 *cs++ = i915_mmio_reg_offset(RING_PP_DIR_DCLV(engine->mmio_base)); in load_pd_dir()
638 *cs++ = i915_mmio_reg_offset(RING_PP_DIR_BASE(engine->mmio_base)); in load_pd_dir()
643 *cs++ = i915_mmio_reg_offset(RING_PP_DIR_BASE(engine->mmio_base)); in load_pd_dir()
648 *cs++ = i915_mmio_reg_offset(RING_INSTPM(engine->mmio_base)); in load_pd_dir()
696 *cs++ = i915_mmio_reg_offset( in mi_set_context()
751 *cs++ = i915_mmio_reg_offset(last_reg); in mi_set_context()
758 *cs++ = i915_mmio_reg_offset(last_reg); in mi_set_context()
792 *cs++ = i915_mmio_reg_offset(GEN7_L3LOG(slice, i)); in remap_l3_slice()
Dselftest_rps.c99 *cs++ = i915_mmio_reg_offset(CS_GPR(i)); in create_spin_counter()
101 *cs++ = i915_mmio_reg_offset(CS_GPR(i)) + 4; in create_spin_counter()
106 *cs++ = i915_mmio_reg_offset(CS_GPR(INC)); in create_spin_counter()
121 *cs++ = i915_mmio_reg_offset(CS_GPR(COUNT)); in create_spin_counter()
201 i915_mmio_reg_offset(BXT_RP_STATE_CAP), in show_pstate_limits()
206 i915_mmio_reg_offset(GEN9_RP_STATE_LIMITS), in show_pstate_limits()
Dintel_lrc.c3339 *cs++ = i915_mmio_reg_offset(GEN8_RING_CS_GPR(0, 0)); in gen12_emit_timestamp_wa()
3347 *cs++ = i915_mmio_reg_offset(GEN8_RING_CS_GPR(0, 0)); in gen12_emit_timestamp_wa()
3348 *cs++ = i915_mmio_reg_offset(RING_CTX_TIMESTAMP(0)); in gen12_emit_timestamp_wa()
3353 *cs++ = i915_mmio_reg_offset(GEN8_RING_CS_GPR(0, 0)); in gen12_emit_timestamp_wa()
3354 *cs++ = i915_mmio_reg_offset(RING_CTX_TIMESTAMP(0)); in gen12_emit_timestamp_wa()
3367 *cs++ = i915_mmio_reg_offset(GEN8_RING_CS_GPR(0, 0)); in gen12_emit_restore_scratch()
3383 *cs++ = i915_mmio_reg_offset(GEN8_RING_CS_GPR(0, 0)); in gen12_emit_cmd_buf_wa()
3391 *cs++ = i915_mmio_reg_offset(GEN8_RING_CS_GPR(0, 0)); in gen12_emit_cmd_buf_wa()
3392 *cs++ = i915_mmio_reg_offset(RING_CMD_BUF_CCTL(0)); in gen12_emit_cmd_buf_wa()
3641 *cs++ = i915_mmio_reg_offset(GEN8_RING_PDP_UDW(base, i)); in emit_pdps()
[all …]
Dselftest_mocs.c172 u32 addr = i915_mmio_reg_offset(GEN9_LNCFCMOCS(0)); in read_l3cc_table()
211 u32 reg = i915_mmio_reg_offset(GEN9_LNCFCMOCS(0)); in check_l3cc_table()
Dgen7_renderclear.c399 batch_add(&cmds, i915_mmio_reg_offset(CACHE_MODE_0_GEN7)); in emit_batch()
404 batch_add(&cmds, i915_mmio_reg_offset(CACHE_MODE_1)); in emit_batch()
Dselftest_lrc.c1091 *cs++ = i915_mmio_reg_offset(RING_TIMESTAMP(rq->engine->mmio_base)); in create_rewinder()
4901 i915_mmio_reg_offset(RING_START(engine->mmio_base)), in live_lrc_fixed()
4906 i915_mmio_reg_offset(RING_CTL(engine->mmio_base)), in live_lrc_fixed()
4911 i915_mmio_reg_offset(RING_HEAD(engine->mmio_base)), in live_lrc_fixed()
4916 i915_mmio_reg_offset(RING_TAIL(engine->mmio_base)), in live_lrc_fixed()
4921 i915_mmio_reg_offset(RING_MI_MODE(engine->mmio_base)), in live_lrc_fixed()
4926 i915_mmio_reg_offset(RING_BBSTATE(engine->mmio_base)), in live_lrc_fixed()
4931 i915_mmio_reg_offset(RING_BB_PER_CTX_PTR(engine->mmio_base)), in live_lrc_fixed()
4936 i915_mmio_reg_offset(RING_INDIRECT_CTX(engine->mmio_base)), in live_lrc_fixed()
4941 i915_mmio_reg_offset(RING_INDIRECT_CTX_OFFSET(engine->mmio_base)), in live_lrc_fixed()
[all …]
Dselftest_rc6.c139 *cs++ = i915_mmio_reg_offset(GEN8_RC6_CTX_INFO); in __live_rc6_ctx()
Dintel_rc6.c732 i = (i915_mmio_reg_offset(reg) - in intel_rc6_residency_ns()
733 i915_mmio_reg_offset(GEN6_GT_GFX_RC6_LOCKED)) / sizeof(u32); in intel_rc6_residency_ns()
Dintel_gt.c745 if (!i915_mmio_reg_offset(rb.reg)) in intel_gt_invalidate_tlbs()
768 if (!i915_mmio_reg_offset(rb.reg)) in intel_gt_invalidate_tlbs()
Dselftest_engine_cs.c55 *cs++ = i915_mmio_reg_offset(RING_TIMESTAMP(rq->engine->mmio_base)); in write_timestamp()
/drivers/gpu/drm/i915/
Di915_perf.c1618 *cs++ = i915_mmio_reg_offset(reg) + 4 * d; in save_restore_register()
1689 *cs++ = i915_mmio_reg_offset(CS_GPR(START_TS)) + 4; in alloc_noa_wait()
1692 *cs++ = i915_mmio_reg_offset(RING_TIMESTAMP(base)); in alloc_noa_wait()
1693 *cs++ = i915_mmio_reg_offset(CS_GPR(START_TS)); in alloc_noa_wait()
1707 *cs++ = i915_mmio_reg_offset(CS_GPR(NOW_TS)) + 4; in alloc_noa_wait()
1710 *cs++ = i915_mmio_reg_offset(RING_TIMESTAMP(base)); in alloc_noa_wait()
1711 *cs++ = i915_mmio_reg_offset(CS_GPR(NOW_TS)); in alloc_noa_wait()
1730 *cs++ = i915_mmio_reg_offset(CS_GPR(JUMP_PREDICATE)); in alloc_noa_wait()
1731 *cs++ = i915_mmio_reg_offset(MI_PREDICATE_RESULT_1); in alloc_noa_wait()
1749 *cs++ = i915_mmio_reg_offset(CS_GPR(DELTA_TARGET)); in alloc_noa_wait()
[all …]
Dintel_uncore.h272 return read##s__(uncore->regs + i915_mmio_reg_offset(reg)); \
279 write##s__(val, uncore->regs + i915_mmio_reg_offset(reg)); \
421 readl(base + i915_mmio_reg_offset(reg))
423 writel(value, base + i915_mmio_reg_offset(reg))
Dintel_uncore.c986 u32 offset = i915_mmio_reg_offset(*reg); in mmio_reg_cmp()
1204 i915_mmio_reg_offset(reg))) in __unclaimed_reg_debug()
1283 u32 offset = i915_mmio_reg_offset(reg); \
1338 return __##func##_reg_read_fw_domains(uncore, i915_mmio_reg_offset(reg)); \
1392 u32 offset = i915_mmio_reg_offset(reg); \
1431 return __##func##_reg_write_fw_domains(uncore, i915_mmio_reg_offset(reg)); \
1506 d->reg_set = uncore->regs + i915_mmio_reg_offset(reg_set); in __fw_domain_init()
1507 d->reg_ack = uncore->regs + i915_mmio_reg_offset(reg_ack); in __fw_domain_init()
1964 u32 entry_offset = i915_mmio_reg_offset(entry->offset_ldw); in i915_reg_read_ioctl()
/drivers/gpu/drm/i915/display/
Dintel_dsb.c128 if (reg_val != i915_mmio_reg_offset(reg)) { in intel_dsb_indexed_reg_write()
140 i915_mmio_reg_offset(reg); in intel_dsb_indexed_reg_write()
193 i915_mmio_reg_offset(reg); in intel_dsb_reg_write()
/drivers/gpu/drm/i915/gt/uc/
Dintel_uc.c390 i915_mmio_reg_offset(DMA_GUC_WOPCM_OFFSET), in uc_init_wopcm()
393 i915_mmio_reg_offset(GUC_WOPCM_SIZE), in uc_init_wopcm()
Dintel_guc.c65 i915_mmio_reg_offset(GEN11_SOFT_SCRATCH(0)); in intel_guc_init_send_regs()
68 guc->send_regs.base = i915_mmio_reg_offset(SOFT_SCRATCH(0)); in intel_guc_init_send_regs()
/drivers/gpu/drm/i915/selftests/
Dintel_uncore.c79 u32 offset = i915_mmio_reg_offset(*reg); in intel_shadow_table_check()

12