/drivers/gpu/drm/radeon/ |
D | radeon_bios.c | 256 bus_cntl = RREG32(R600_BUS_CNTL); in ni_read_disabled_bios() 257 d1vga_control = RREG32(AVIVO_D1VGA_CONTROL); in ni_read_disabled_bios() 258 d2vga_control = RREG32(AVIVO_D2VGA_CONTROL); in ni_read_disabled_bios() 259 vga_render_control = RREG32(AVIVO_VGA_RENDER_CONTROL); in ni_read_disabled_bios() 260 rom_cntl = RREG32(R600_ROM_CNTL); in ni_read_disabled_bios() 302 viph_control = RREG32(RADEON_VIPH_CONTROL); in r700_read_disabled_bios() 303 bus_cntl = RREG32(R600_BUS_CNTL); in r700_read_disabled_bios() 304 d1vga_control = RREG32(AVIVO_D1VGA_CONTROL); in r700_read_disabled_bios() 305 d2vga_control = RREG32(AVIVO_D2VGA_CONTROL); in r700_read_disabled_bios() 306 vga_render_control = RREG32(AVIVO_VGA_RENDER_CONTROL); in r700_read_disabled_bios() [all …]
|
D | radeon_legacy_encoders.c | 57 lvds_gen_cntl = RREG32(RADEON_LVDS_GEN_CNTL); in radeon_legacy_lvds_update() 85 disp_pwr_man = RREG32(RADEON_DISP_PWR_MAN); in radeon_legacy_lvds_update() 88 lvds_pll_cntl = RREG32(RADEON_LVDS_PLL_CNTL); in radeon_legacy_lvds_update() 93 lvds_pll_cntl = RREG32(RADEON_LVDS_PLL_CNTL); in radeon_legacy_lvds_update() 188 lvds_pll_cntl = RREG32(RADEON_LVDS_PLL_CNTL); in radeon_legacy_lvds_mode_set() 191 lvds_ss_gen_cntl = RREG32(RADEON_LVDS_SS_GEN_CNTL); in radeon_legacy_lvds_mode_set() 198 lvds_gen_cntl = RREG32(RADEON_LVDS_GEN_CNTL); in radeon_legacy_lvds_mode_set() 209 lvds_gen_cntl = RREG32(RADEON_LVDS_GEN_CNTL); in radeon_legacy_lvds_mode_set() 279 backlight_level = (RREG32(RADEON_LVDS_GEN_CNTL) >> in radeon_legacy_get_backlight_level() 353 backlight_level = (RREG32(RADEON_LVDS_GEN_CNTL) >> in radeon_legacy_backlight_get_brightness() [all …]
|
D | radeon_i2c.c | 129 temp = RREG32(rec->mask_clk_reg); in pre_xfer() 135 temp = RREG32(rec->a_clk_reg) & ~rec->a_clk_mask; in pre_xfer() 138 temp = RREG32(rec->a_data_reg) & ~rec->a_data_mask; in pre_xfer() 142 temp = RREG32(rec->en_clk_reg) & ~rec->en_clk_mask; in pre_xfer() 145 temp = RREG32(rec->en_data_reg) & ~rec->en_data_mask; in pre_xfer() 149 temp = RREG32(rec->mask_clk_reg) | rec->mask_clk_mask; in pre_xfer() 151 temp = RREG32(rec->mask_clk_reg); in pre_xfer() 153 temp = RREG32(rec->mask_data_reg) | rec->mask_data_mask; in pre_xfer() 155 temp = RREG32(rec->mask_data_reg); in pre_xfer() 168 temp = RREG32(rec->mask_clk_reg) & ~rec->mask_clk_mask; in post_xfer() [all …]
|
D | vce_v2_0.c | 43 tmp = RREG32(VCE_CLOCK_GATING_B); in vce_v2_0_set_sw_cg() 47 tmp = RREG32(VCE_UENC_CLOCK_GATING); in vce_v2_0_set_sw_cg() 51 tmp = RREG32(VCE_UENC_REG_CLOCK_GATING); in vce_v2_0_set_sw_cg() 57 tmp = RREG32(VCE_CLOCK_GATING_B); in vce_v2_0_set_sw_cg() 62 tmp = RREG32(VCE_UENC_CLOCK_GATING); in vce_v2_0_set_sw_cg() 67 tmp = RREG32(VCE_UENC_REG_CLOCK_GATING); in vce_v2_0_set_sw_cg() 77 tmp = RREG32(VCE_CLOCK_GATING_B); in vce_v2_0_set_dyn_cg() 87 orig = tmp = RREG32(VCE_UENC_CLOCK_GATING); in vce_v2_0_set_dyn_cg() 93 orig = tmp = RREG32(VCE_UENC_REG_CLOCK_GATING); in vce_v2_0_set_dyn_cg() 130 tmp = RREG32(VCE_CLOCK_GATING_A); in vce_v2_0_init_cg() [all …]
|
D | rs600.c | 58 if (RREG32(AVIVO_D1CRTC_STATUS + crtc_offsets[crtc]) & AVIVO_D1CRTC_V_BLANK) in avivo_is_in_vblank() 68 pos1 = RREG32(AVIVO_D1CRTC_STATUS_POSITION + crtc_offsets[crtc]); in avivo_is_counter_moving() 69 pos2 = RREG32(AVIVO_D1CRTC_STATUS_POSITION + crtc_offsets[crtc]); in avivo_is_counter_moving() 92 if (!(RREG32(AVIVO_D1CRTC_CONTROL + crtc_offsets[crtc]) & AVIVO_CRTC_EN)) in avivo_wait_for_vblank() 116 u32 tmp = RREG32(AVIVO_D1GRPH_UPDATE + radeon_crtc->crtc_offset); in rs600_page_flip() 133 if (RREG32(AVIVO_D1GRPH_UPDATE + radeon_crtc->crtc_offset) & AVIVO_D1GRPH_SURFACE_UPDATE_PENDING) in rs600_page_flip() 149 return !!(RREG32(AVIVO_D1GRPH_UPDATE + radeon_crtc->crtc_offset) & in rs600_page_flip_pending() 227 tmp = RREG32(voltage->gpio.reg); in rs600_pm_misc() 236 tmp = RREG32(voltage->gpio.reg); in rs600_pm_misc() 322 tmp = RREG32(AVIVO_D1CRTC_CONTROL + radeon_crtc->crtc_offset); in rs600_pm_prepare() [all …]
|
D | vce_v1_0.c | 63 return RREG32(VCE_RB_RPTR); in vce_v1_0_get_rptr() 65 return RREG32(VCE_RB_RPTR2); in vce_v1_0_get_rptr() 80 return RREG32(VCE_RB_WPTR); in vce_v1_0_get_wptr() 82 return RREG32(VCE_RB_WPTR2); in vce_v1_0_get_wptr() 107 tmp = RREG32(VCE_CLOCK_GATING_A); in vce_v1_0_enable_mgcg() 111 tmp = RREG32(VCE_UENC_CLOCK_GATING); in vce_v1_0_enable_mgcg() 116 tmp = RREG32(VCE_UENC_REG_CLOCK_GATING); in vce_v1_0_enable_mgcg() 120 tmp = RREG32(VCE_CLOCK_GATING_A); in vce_v1_0_enable_mgcg() 124 tmp = RREG32(VCE_UENC_CLOCK_GATING); in vce_v1_0_enable_mgcg() 129 tmp = RREG32(VCE_UENC_REG_CLOCK_GATING); in vce_v1_0_enable_mgcg() [all …]
|
D | r600.c | 121 r = RREG32(R600_RCU_DATA); in r600_rcu_rreg() 143 r = RREG32(R600_UVD_CTX_DATA); in r600_uvd_ctx_rreg() 177 *val = RREG32(reg); in r600_get_allowed_info_register() 346 u32 temp = (RREG32(CG_THERMAL_STATUS) & ASIC_T_MASK) >> in rv6xx_get_temp() 791 if (RREG32(GRBM_STATUS) & GUI_ACTIVE) in r600_gui_idle() 805 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE) in r600_hpd_sense() 809 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE) in r600_hpd_sense() 813 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE) in r600_hpd_sense() 817 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE) in r600_hpd_sense() 822 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE) in r600_hpd_sense() [all …]
|
D | rv730_dpm.c | 203 RREG32(CG_SPLL_FUNC_CNTL); in rv730_read_clock_registers() 205 RREG32(CG_SPLL_FUNC_CNTL_2); in rv730_read_clock_registers() 207 RREG32(CG_SPLL_FUNC_CNTL_3); in rv730_read_clock_registers() 209 RREG32(CG_SPLL_SPREAD_SPECTRUM); in rv730_read_clock_registers() 211 RREG32(CG_SPLL_SPREAD_SPECTRUM_2); in rv730_read_clock_registers() 214 RREG32(TCI_MCLK_PWRMGT_CNTL); in rv730_read_clock_registers() 216 RREG32(TCI_DLL_CNTL); in rv730_read_clock_registers() 218 RREG32(CG_MPLL_FUNC_CNTL); in rv730_read_clock_registers() 220 RREG32(CG_MPLL_FUNC_CNTL_2); in rv730_read_clock_registers() 222 RREG32(CG_MPLL_FUNC_CNTL_3); in rv730_read_clock_registers() [all …]
|
D | evergreen.c | 48 r = RREG32(EVERGREEN_CG_IND_DATA); in eg_cg_rreg() 70 r = RREG32(EVERGREEN_PIF_PHY0_DATA); in eg_pif_phy0_rreg() 92 r = RREG32(EVERGREEN_PIF_PHY1_DATA); in eg_pif_phy1_rreg() 1099 *val = RREG32(reg); in evergreen_get_allowed_info_register() 1151 if (RREG32(status_reg) & DCLK_STATUS) in sumo_set_uvd_clock() 1164 u32 cg_scratch = RREG32(CG_SCRATCH1); in sumo_set_uvd_clocks() 1345 if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK) in dce4_is_in_vblank() 1355 pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]); in dce4_is_counter_moving() 1356 pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]); in dce4_is_counter_moving() 1379 if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN)) in dce4_wait_for_vblank() [all …]
|
D | r100.c | 74 if (RREG32(RADEON_CRTC_STATUS) & RADEON_CRTC_VBLANK_CUR) in r100_is_in_vblank() 79 if (RREG32(RADEON_CRTC2_STATUS) & RADEON_CRTC2_VBLANK_CUR) in r100_is_in_vblank() 91 vline1 = (RREG32(RADEON_CRTC_VLINE_CRNT_VLINE) >> 16) & RADEON_CRTC_V_TOTAL; in r100_is_counter_moving() 92 vline2 = (RREG32(RADEON_CRTC_VLINE_CRNT_VLINE) >> 16) & RADEON_CRTC_V_TOTAL; in r100_is_counter_moving() 94 vline1 = (RREG32(RADEON_CRTC2_VLINE_CRNT_VLINE) >> 16) & RADEON_CRTC_V_TOTAL; in r100_is_counter_moving() 95 vline2 = (RREG32(RADEON_CRTC2_VLINE_CRNT_VLINE) >> 16) & RADEON_CRTC_V_TOTAL; in r100_is_counter_moving() 119 if (!(RREG32(RADEON_CRTC_GEN_CNTL) & RADEON_CRTC_EN)) in r100_wait_for_vblank() 122 if (!(RREG32(RADEON_CRTC2_GEN_CNTL) & RADEON_CRTC2_EN)) in r100_wait_for_vblank() 168 if (RREG32(RADEON_CRTC_OFFSET + radeon_crtc->crtc_offset) & RADEON_CRTC_OFFSET__GUI_TRIG_OFFSET) in r100_page_flip() 194 return !!(RREG32(RADEON_CRTC_OFFSET + radeon_crtc->crtc_offset) & in r100_page_flip_pending() [all …]
|
D | rs400.c | 153 tmp = RREG32(RADEON_BUS_CNTL) & ~RS600_BUS_MASTER_DIS; in rs400_gart_enable() 157 tmp = RREG32(RADEON_BUS_CNTL) & ~RADEON_BUS_MASTER_DIS; in rs400_gart_enable() 244 tmp = RREG32(RADEON_MC_STATUS); in rs400_mc_wait_for_idle() 259 "programming pipes. Bad things might happen. %08x\n", RREG32(RADEON_MC_STATUS)); in rs400_gpu_init() 273 base = (RREG32(RADEON_NB_TOM) & 0xffff) << 16; in rs400_mc_init() 287 r = RREG32(RS480_NB_MC_DATA); in rs400_mc_rreg() 312 tmp = RREG32(RADEON_HOST_PATH_CNTL); in rs400_debugfs_gart_info() 314 tmp = RREG32(RADEON_BUS_CNTL); in rs400_debugfs_gart_info() 327 tmp = RREG32(RS690_HDP_FB_LOCATION); in rs400_debugfs_gart_info() 330 tmp = RREG32(RADEON_AGP_BASE); in rs400_debugfs_gart_info() [all …]
|
D | cik.c | 172 *val = RREG32(reg); in cik_get_allowed_info_register() 189 r = RREG32(CIK_DIDT_IND_DATA); in cik_didt_rreg() 251 (void)RREG32(PCIE_INDEX); in cik_pciep_rreg() 252 r = RREG32(PCIE_DATA); in cik_pciep_rreg() 263 (void)RREG32(PCIE_INDEX); in cik_pciep_wreg() 265 (void)RREG32(PCIE_DATA); in cik_pciep_wreg() 1912 running = RREG32(MC_SEQ_SUP_CNTL) & RUN_MASK; in ci_mc_load_microcode() 1930 tmp = RREG32(MC_SEQ_MISC0); in ci_mc_load_microcode() 1953 if (RREG32(MC_SEQ_TRAIN_WAKEUP_CNTL) & TRAIN_DONE_D0) in ci_mc_load_microcode() 1958 if (RREG32(MC_SEQ_TRAIN_WAKEUP_CNTL) & TRAIN_DONE_D1) in ci_mc_load_microcode() [all …]
|
D | si.c | 1296 *val = RREG32(reg); in si_get_allowed_info_register() 1319 tmp = RREG32(CG_CLKPIN_CNTL_2); in si_get_xclk() 1323 tmp = RREG32(CG_CLKPIN_CNTL); in si_get_xclk() 1336 temp = (RREG32(CG_MULT_THERMAL_STATUS) & CTF_TEMP_MASK) >> in si_get_temp() 1599 running = RREG32(MC_SEQ_SUP_CNTL) & RUN_MASK; in si_mc_load_microcode() 1631 if (RREG32(MC_SEQ_TRAIN_WAKEUP_CNTL) & TRAIN_DONE_D0) in si_mc_load_microcode() 1636 if (RREG32(MC_SEQ_TRAIN_WAKEUP_CNTL) & TRAIN_DONE_D1) in si_mc_load_microcode() 1749 if (((RREG32(MC_SEQ_MISC0) & 0xff000000) >> 24) == 0x58) in si_init_microcode() 1992 if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) & in dce6_line_buffer_adjust() 2014 u32 tmp = RREG32(MC_SHARED_CHMAP); in si_get_number_of_dram_channels() [all …]
|
/drivers/gpu/drm/amd/amdgpu/ |
D | gmc_v8_0.c | 170 blackout = RREG32(mmMC_SHARED_BLACKOUT_CNTL); in gmc_v8_0_mc_stop() 189 tmp = RREG32(mmMC_SHARED_BLACKOUT_CNTL); in gmc_v8_0_mc_resume() 290 running = REG_GET_FIELD(RREG32(mmMC_SEQ_SUP_CNTL), MC_SEQ_SUP_CNTL, RUN); in gmc_v8_0_mc_load_microcode() 313 if (REG_GET_FIELD(RREG32(mmMC_SEQ_TRAIN_WAKEUP_CNTL), in gmc_v8_0_mc_load_microcode() 319 if (REG_GET_FIELD(RREG32(mmMC_SEQ_TRAIN_WAKEUP_CNTL), in gmc_v8_0_mc_load_microcode() 398 tmp = RREG32(mmHDP_MISC_CNTL); in gmc_v8_0_mc_program() 402 tmp = RREG32(mmHDP_HOST_PATH_CNTL); in gmc_v8_0_mc_program() 421 tmp = RREG32(mmMC_ARB_RAMCFG); in gmc_v8_0_mc_init() 427 tmp = RREG32(mmMC_SHARED_CHMAP); in gmc_v8_0_mc_init() 463 adev->mc.mc_vram_size = RREG32(mmCONFIG_MEMSIZE) * 1024ULL * 1024ULL; in gmc_v8_0_mc_init() [all …]
|
D | gmc_v7_0.c | 87 blackout = RREG32(mmMC_SHARED_BLACKOUT_CNTL); in gmc_v7_0_mc_stop() 106 tmp = RREG32(mmMC_SHARED_BLACKOUT_CNTL); in gmc_v7_0_mc_resume() 203 running = REG_GET_FIELD(RREG32(mmMC_SEQ_SUP_CNTL), MC_SEQ_SUP_CNTL, RUN); in gmc_v7_0_mc_load_microcode() 226 if (REG_GET_FIELD(RREG32(mmMC_SEQ_TRAIN_WAKEUP_CNTL), in gmc_v7_0_mc_load_microcode() 232 if (REG_GET_FIELD(RREG32(mmMC_SEQ_TRAIN_WAKEUP_CNTL), in gmc_v7_0_mc_load_microcode() 311 tmp = RREG32(mmHDP_MISC_CNTL); in gmc_v7_0_mc_program() 315 tmp = RREG32(mmHDP_HOST_PATH_CNTL); in gmc_v7_0_mc_program() 334 tmp = RREG32(mmMC_ARB_RAMCFG); in gmc_v7_0_mc_init() 340 tmp = RREG32(mmMC_SHARED_CHMAP); in gmc_v7_0_mc_init() 376 adev->mc.mc_vram_size = RREG32(mmCONFIG_MEMSIZE) * 1024ULL * 1024ULL; in gmc_v7_0_mc_init() [all …]
|
D | amdgpu_i2c.c | 50 temp = RREG32(rec->mask_clk_reg); in amdgpu_i2c_pre_xfer() 56 temp = RREG32(rec->a_clk_reg) & ~rec->a_clk_mask; in amdgpu_i2c_pre_xfer() 59 temp = RREG32(rec->a_data_reg) & ~rec->a_data_mask; in amdgpu_i2c_pre_xfer() 63 temp = RREG32(rec->en_clk_reg) & ~rec->en_clk_mask; in amdgpu_i2c_pre_xfer() 66 temp = RREG32(rec->en_data_reg) & ~rec->en_data_mask; in amdgpu_i2c_pre_xfer() 70 temp = RREG32(rec->mask_clk_reg) | rec->mask_clk_mask; in amdgpu_i2c_pre_xfer() 72 temp = RREG32(rec->mask_clk_reg); in amdgpu_i2c_pre_xfer() 74 temp = RREG32(rec->mask_data_reg) | rec->mask_data_mask; in amdgpu_i2c_pre_xfer() 76 temp = RREG32(rec->mask_data_reg); in amdgpu_i2c_pre_xfer() 89 temp = RREG32(rec->mask_clk_reg) & ~rec->mask_clk_mask; in amdgpu_i2c_post_xfer() [all …]
|
D | vce_v3_0.c | 81 return RREG32(mmVCE_RB_RPTR); in vce_v3_0_ring_get_rptr() 83 return RREG32(mmVCE_RB_RPTR2); in vce_v3_0_ring_get_rptr() 85 return RREG32(mmVCE_RB_RPTR3); in vce_v3_0_ring_get_rptr() 100 return RREG32(mmVCE_RB_WPTR); in vce_v3_0_ring_get_wptr() 102 return RREG32(mmVCE_RB_WPTR2); in vce_v3_0_ring_get_wptr() 104 return RREG32(mmVCE_RB_WPTR3); in vce_v3_0_ring_get_wptr() 145 data = RREG32(mmVCE_CLOCK_GATING_B); in vce_v3_0_set_vce_sw_clock_gating() 150 data = RREG32(mmVCE_UENC_CLOCK_GATING); in vce_v3_0_set_vce_sw_clock_gating() 155 data = RREG32(mmVCE_UENC_CLOCK_GATING_2); in vce_v3_0_set_vce_sw_clock_gating() 160 data = RREG32(mmVCE_UENC_REG_CLOCK_GATING); in vce_v3_0_set_vce_sw_clock_gating() [all …]
|
D | cik_ih.c | 60 u32 ih_cntl = RREG32(mmIH_CNTL); in cik_ih_enable_interrupts() 61 u32 ih_rb_cntl = RREG32(mmIH_RB_CNTL); in cik_ih_enable_interrupts() 79 u32 ih_rb_cntl = RREG32(mmIH_RB_CNTL); in cik_ih_disable_interrupts() 80 u32 ih_cntl = RREG32(mmIH_CNTL); in cik_ih_disable_interrupts() 115 interrupt_cntl = RREG32(mmINTERRUPT_CNTL); in cik_ih_irq_init() 201 tmp = RREG32(mmIH_RB_CNTL); in cik_ih_get_wptr() 350 u32 tmp = RREG32(mmSRBM_STATUS); in cik_ih_is_idle() 366 tmp = RREG32(mmSRBM_STATUS) & SRBM_STATUS__IH_BUSY_MASK; in cik_ih_wait_for_idle() 379 u32 tmp = RREG32(mmSRBM_STATUS); in cik_ih_soft_reset() 385 tmp = RREG32(mmSRBM_SOFT_RESET); in cik_ih_soft_reset() [all …]
|
D | iceland_ih.c | 60 u32 ih_cntl = RREG32(mmIH_CNTL); in iceland_ih_enable_interrupts() 61 u32 ih_rb_cntl = RREG32(mmIH_RB_CNTL); in iceland_ih_enable_interrupts() 79 u32 ih_rb_cntl = RREG32(mmIH_RB_CNTL); in iceland_ih_disable_interrupts() 80 u32 ih_cntl = RREG32(mmIH_CNTL); in iceland_ih_disable_interrupts() 115 interrupt_cntl = RREG32(mmINTERRUPT_CNTL); in iceland_ih_irq_init() 147 ih_cntl = RREG32(mmIH_CNTL); in iceland_ih_irq_init() 203 tmp = RREG32(mmIH_RB_CNTL); in iceland_ih_get_wptr() 329 u32 tmp = RREG32(mmSRBM_STATUS); in iceland_ih_is_idle() 345 tmp = RREG32(mmSRBM_STATUS); in iceland_ih_wait_for_idle() 357 u32 tmp = RREG32(mmSRBM_STATUS); in iceland_ih_soft_reset() [all …]
|
D | cz_ih.c | 60 u32 ih_cntl = RREG32(mmIH_CNTL); in cz_ih_enable_interrupts() 61 u32 ih_rb_cntl = RREG32(mmIH_RB_CNTL); in cz_ih_enable_interrupts() 79 u32 ih_rb_cntl = RREG32(mmIH_RB_CNTL); in cz_ih_disable_interrupts() 80 u32 ih_cntl = RREG32(mmIH_CNTL); in cz_ih_disable_interrupts() 115 interrupt_cntl = RREG32(mmINTERRUPT_CNTL); in cz_ih_irq_init() 147 ih_cntl = RREG32(mmIH_CNTL); in cz_ih_irq_init() 203 tmp = RREG32(mmIH_RB_CNTL); in cz_ih_get_wptr() 329 u32 tmp = RREG32(mmSRBM_STATUS); in cz_ih_is_idle() 345 tmp = RREG32(mmSRBM_STATUS); in cz_ih_wait_for_idle() 357 u32 tmp = RREG32(mmSRBM_STATUS); in cz_ih_soft_reset() [all …]
|
D | gmc_v6_0.c | 60 blackout = RREG32(MC_SHARED_BLACKOUT_CNTL); in gmc_v6_0_mc_stop() 80 tmp = RREG32(MC_SHARED_BLACKOUT_CNTL); in gmc_v6_0_mc_resume() 161 running = RREG32(MC_SEQ_SUP_CNTL) & RUN_MASK; in gmc_v6_0_mc_load_microcode() 186 if (RREG32(MC_SEQ_TRAIN_WAKEUP_CNTL) & TRAIN_DONE_D0) in gmc_v6_0_mc_load_microcode() 191 if (RREG32(MC_SEQ_TRAIN_WAKEUP_CNTL) & TRAIN_DONE_D1) in gmc_v6_0_mc_load_microcode() 270 tmp = RREG32(MC_ARB_RAMCFG); in gmc_v6_0_mc_init() 278 tmp = RREG32(MC_SHARED_CHMAP); in gmc_v6_0_mc_init() 314 adev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL; in gmc_v6_0_mc_init() 315 adev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL; in gmc_v6_0_mc_init() 360 tmp = RREG32(VM_CONTEXT1_CNTL); in gmc_v6_0_set_fault_enable_default() [all …]
|
D | si_ih.c | 33 u32 ih_cntl = RREG32(IH_CNTL); in si_ih_enable_interrupts() 34 u32 ih_rb_cntl = RREG32(IH_RB_CNTL); in si_ih_enable_interrupts() 45 u32 ih_rb_cntl = RREG32(IH_RB_CNTL); in si_ih_disable_interrupts() 46 u32 ih_cntl = RREG32(IH_CNTL); in si_ih_disable_interrupts() 66 interrupt_cntl = RREG32(INTERRUPT_CNTL); in si_ih_irq_init() 114 tmp = RREG32(IH_RB_CNTL); in si_ih_get_wptr() 209 u32 tmp = RREG32(SRBM_STATUS); in si_ih_is_idle() 235 u32 tmp = RREG32(SRBM_STATUS); in si_ih_soft_reset() 241 tmp = RREG32(SRBM_SOFT_RESET); in si_ih_soft_reset() 245 tmp = RREG32(SRBM_SOFT_RESET); in si_ih_soft_reset() [all …]
|
D | vce_v2_0.c | 61 return RREG32(mmVCE_RB_RPTR); in vce_v2_0_ring_get_rptr() 63 return RREG32(mmVCE_RB_RPTR2); in vce_v2_0_ring_get_rptr() 78 return RREG32(mmVCE_RB_WPTR); in vce_v2_0_ring_get_wptr() 80 return RREG32(mmVCE_RB_WPTR2); in vce_v2_0_ring_get_wptr() 106 uint32_t status = RREG32(mmVCE_LMI_STATUS); in vce_v2_0_lmi_clean() 123 uint32_t status = RREG32(mmVCE_STATUS); in vce_v2_0_firmware_loaded() 320 tmp = RREG32(mmVCE_CLOCK_GATING_B); in vce_v2_0_set_sw_cg() 324 tmp = RREG32(mmVCE_UENC_CLOCK_GATING); in vce_v2_0_set_sw_cg() 328 tmp = RREG32(mmVCE_UENC_REG_CLOCK_GATING); in vce_v2_0_set_sw_cg() 334 tmp = RREG32(mmVCE_CLOCK_GATING_B); in vce_v2_0_set_sw_cg() [all …]
|
D | si.c | 890 (void)RREG32(AMDGPU_PCIE_INDEX); in si_pcie_rreg() 891 r = RREG32(AMDGPU_PCIE_DATA); in si_pcie_rreg() 902 (void)RREG32(AMDGPU_PCIE_INDEX); in si_pcie_wreg() 904 (void)RREG32(AMDGPU_PCIE_DATA); in si_pcie_wreg() 915 (void)RREG32(PCIE_PORT_INDEX); in si_pciep_rreg() 916 r = RREG32(PCIE_PORT_DATA); in si_pciep_rreg() 927 (void)RREG32(PCIE_PORT_INDEX); in si_pciep_wreg() 929 (void)RREG32(PCIE_PORT_DATA); in si_pciep_wreg() 940 r = RREG32(SMC_IND_DATA_0); in si_smc_rreg() 1006 val = RREG32(reg_offset); in si_read_indexed_register() [all …]
|
D | dce_v10_0.c | 175 r = RREG32(mmAZALIA_F0_CODEC_ENDPOINT_DATA + block_offset); in dce_v10_0_audio_endpt_rreg() 194 if (RREG32(mmCRTC_STATUS + crtc_offsets[crtc]) & in dce_v10_0_is_in_vblank() 205 pos1 = RREG32(mmCRTC_STATUS_POSITION + crtc_offsets[crtc]); in dce_v10_0_is_counter_moving() 206 pos2 = RREG32(mmCRTC_STATUS_POSITION + crtc_offsets[crtc]); in dce_v10_0_is_counter_moving() 229 if (!(RREG32(mmCRTC_CONTROL + crtc_offsets[crtc]) & CRTC_CONTROL__CRTC_MASTER_EN_MASK)) in dce_v10_0_vblank_wait() 257 return RREG32(mmCRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]); in dce_v10_0_vblank_get_counter() 295 tmp = RREG32(mmGRPH_FLIP_CONTROL + amdgpu_crtc->crtc_offset); in dce_v10_0_page_flip() 306 RREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset); in dce_v10_0_page_flip() 315 *vbl = RREG32(mmCRTC_V_BLANK_START_END + crtc_offsets[crtc]); in dce_v10_0_crtc_get_scanoutpos() 316 *position = RREG32(mmCRTC_STATUS_POSITION + crtc_offsets[crtc]); in dce_v10_0_crtc_get_scanoutpos() [all …]
|