/drivers/gpu/drm/radeon/ |
D | rv740_dpm.c | 125 struct atom_clock_dividers dividers; in rv740_populate_sclk_value() local 138 engine_clock, false, ÷rs); in rv740_populate_sclk_value() 142 reference_divider = 1 + dividers.ref_div; in rv740_populate_sclk_value() 144 tmp = (u64) engine_clock * reference_divider * dividers.post_div * 16384; in rv740_populate_sclk_value() 149 spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div); in rv740_populate_sclk_value() 150 spll_func_cntl |= SPLL_PDIV_A(dividers.post_div); in rv740_populate_sclk_value() 161 u32 vco_freq = engine_clock * dividers.post_div; in rv740_populate_sclk_value() 200 struct atom_clock_dividers dividers; in rv740_populate_mclk_value() local 206 memory_clock, false, ÷rs); in rv740_populate_mclk_value() 210 ibias = rv770_map_clkf_to_ibias(rdev, dividers.whole_fb_div); in rv740_populate_mclk_value() [all …]
|
D | rv730_dpm.c | 45 struct atom_clock_dividers dividers; in rv730_populate_sclk_value() local 58 engine_clock, false, ÷rs); in rv730_populate_sclk_value() 62 reference_divider = 1 + dividers.ref_div; in rv730_populate_sclk_value() 64 if (dividers.enable_post_div) in rv730_populate_sclk_value() 65 post_divider = ((dividers.post_div >> 4) & 0xf) + in rv730_populate_sclk_value() 66 (dividers.post_div & 0xf) + 2; in rv730_populate_sclk_value() 75 if (dividers.enable_post_div) in rv730_populate_sclk_value() 80 spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div); in rv730_populate_sclk_value() 81 spll_func_cntl |= SPLL_HILEN((dividers.post_div >> 4) & 0xf); in rv730_populate_sclk_value() 82 spll_func_cntl |= SPLL_LOLEN(dividers.post_div & 0xf); in rv730_populate_sclk_value() [all …]
|
D | rv6xx_dpm.c | 143 struct atom_clock_dividers dividers; in rv6xx_convert_clock_to_stepping() local 146 clock, false, ÷rs); in rv6xx_convert_clock_to_stepping() 150 if (dividers.enable_post_div) in rv6xx_convert_clock_to_stepping() 151 step->post_divider = 2 + (dividers.post_div & 0xF) + (dividers.post_div >> 4); in rv6xx_convert_clock_to_stepping() 527 struct atom_clock_dividers *dividers, in rv6xx_calculate_vco_frequency() argument 530 return ref_clock * ((dividers->fb_div & ~1) << fb_divider_scale) / in rv6xx_calculate_vco_frequency() 531 (dividers->ref_div + 1); in rv6xx_calculate_vco_frequency() 554 struct atom_clock_dividers dividers; in rv6xx_program_engine_spread_spectrum() local 561 …if (radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, clock, false, ÷rs) == 0) { in rv6xx_program_engine_spread_spectrum() 562 vco_freq = rv6xx_calculate_vco_frequency(ref_clk, ÷rs, in rv6xx_program_engine_spread_spectrum() [all …]
|
D | rv770_dpm.c | 319 struct atom_clock_dividers *dividers, in rv770_calculate_fractional_mpll_feedback_divider() argument 331 post_divider = dividers->post_div; in rv770_calculate_fractional_mpll_feedback_divider() 332 reference_divider = dividers->ref_div; in rv770_calculate_fractional_mpll_feedback_divider() 401 struct atom_clock_dividers dividers; in rv770_populate_mclk_value() local 409 memory_clock, false, ÷rs); in rv770_populate_mclk_value() 413 if ((dividers.ref_div < 1) || (dividers.ref_div > 5)) in rv770_populate_mclk_value() 418 ÷rs, &clkf, &clkfrac); in rv770_populate_mclk_value() 420 ret = rv770_encode_yclk_post_div(dividers.post_div, &postdiv_yclk); in rv770_populate_mclk_value() 431 mpll_ad_func_cntl |= CLKR(encoded_reference_dividers[dividers.ref_div - 1]); in rv770_populate_mclk_value() 437 if (dividers.vco_mode) in rv770_populate_mclk_value() [all …]
|
D | rs780_dpm.c | 77 struct atom_clock_dividers dividers; in rs780_initialize_dpm_power_state() local 82 default_state->sclk_low, false, ÷rs); in rs780_initialize_dpm_power_state() 86 r600_engine_clock_entry_set_reference_divider(rdev, 0, dividers.ref_div); in rs780_initialize_dpm_power_state() 87 r600_engine_clock_entry_set_feedback_divider(rdev, 0, dividers.fb_div); in rs780_initialize_dpm_power_state() 88 r600_engine_clock_entry_set_post_divider(rdev, 0, dividers.post_div); in rs780_initialize_dpm_power_state() 90 if (dividers.enable_post_div) in rs780_initialize_dpm_power_state() 1010 struct atom_clock_dividers dividers; in rs780_dpm_force_performance_level() local 1021 ps->sclk_high, false, ÷rs); in rs780_dpm_force_performance_level() 1025 rs780_force_fbdiv(rdev, dividers.fb_div); in rs780_dpm_force_performance_level() 1028 ps->sclk_low, false, ÷rs); in rs780_dpm_force_performance_level() [all …]
|
D | cypress_dpm.c | 494 struct atom_clock_dividers dividers; in cypress_populate_mclk_value() local 501 memory_clock, strobe_mode, ÷rs); in cypress_populate_mclk_value() 509 dividers.post_div = 1; in cypress_populate_mclk_value() 512 ibias = cypress_map_clkf_to_ibias(rdev, dividers.whole_fb_div); in cypress_populate_mclk_value() 519 mpll_ad_func_cntl |= CLKR(dividers.ref_div); in cypress_populate_mclk_value() 520 mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div); in cypress_populate_mclk_value() 521 mpll_ad_func_cntl |= CLKF(dividers.whole_fb_div); in cypress_populate_mclk_value() 522 mpll_ad_func_cntl |= CLKFRAC(dividers.frac_fb_div); in cypress_populate_mclk_value() 525 if (dividers.vco_mode) in cypress_populate_mclk_value() 536 mpll_dq_func_cntl |= CLKR(dividers.ref_div); in cypress_populate_mclk_value() [all …]
|
D | radeon_atombios.c | 2788 struct atom_clock_dividers *dividers) in radeon_atom_get_clock_dividers() argument 2795 memset(dividers, 0, sizeof(struct atom_clock_dividers)); in radeon_atom_get_clock_dividers() 2808 dividers->post_div = args.v1.ucPostDiv; in radeon_atom_get_clock_dividers() 2809 dividers->fb_div = args.v1.ucFbDiv; in radeon_atom_get_clock_dividers() 2810 dividers->enable_post_div = true; in radeon_atom_get_clock_dividers() 2822 dividers->post_div = args.v2.ucPostDiv; in radeon_atom_get_clock_dividers() 2823 dividers->fb_div = le16_to_cpu(args.v2.usFbDiv); in radeon_atom_get_clock_dividers() 2824 dividers->ref_div = args.v2.ucAction; in radeon_atom_get_clock_dividers() 2826 dividers->enable_post_div = (le32_to_cpu(args.v2.ulClock) & (1 << 24)) ? in radeon_atom_get_clock_dividers() 2828 dividers->vco_mode = (le32_to_cpu(args.v2.ulClock) & (1 << 25)) ? 1 : 0; in radeon_atom_get_clock_dividers() [all …]
|
D | ni_dpm.c | 2004 struct atom_clock_dividers dividers; in ni_calculate_sclk_params() local 2018 engine_clock, false, ÷rs); in ni_calculate_sclk_params() 2022 reference_divider = 1 + dividers.ref_div; in ni_calculate_sclk_params() 2025 tmp = (u64) engine_clock * reference_divider * dividers.post_div * 16834; in ni_calculate_sclk_params() 2030 spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div); in ni_calculate_sclk_params() 2031 spll_func_cntl |= SPLL_PDIV_A(dividers.post_div); in ni_calculate_sclk_params() 2042 u32 vco_freq = engine_clock * dividers.post_div; in ni_calculate_sclk_params() 2177 struct atom_clock_dividers dividers; in ni_populate_mclk_value() local 2184 memory_clock, strobe_mode, ÷rs); in ni_populate_mclk_value() 2192 dividers.post_div = 1; in ni_populate_mclk_value() [all …]
|
D | kv_dpm.c | 537 struct atom_clock_dividers dividers; in kv_set_divider_value() local 541 sclk, false, ÷rs); in kv_set_divider_value() 545 pi->graphics_level[index].SclkDid = (u8)dividers.post_div; in kv_set_divider_value() 822 struct atom_clock_dividers dividers; in kv_populate_uvd_table() local 845 table->entries[i].vclk, false, ÷rs); in kv_populate_uvd_table() 848 pi->uvd_level[i].VclkDivider = (u8)dividers.post_div; in kv_populate_uvd_table() 851 table->entries[i].dclk, false, ÷rs); in kv_populate_uvd_table() 854 pi->uvd_level[i].DclkDivider = (u8)dividers.post_div; in kv_populate_uvd_table() 895 struct atom_clock_dividers dividers; in kv_populate_vce_table() local 913 table->entries[i].evclk, false, ÷rs); in kv_populate_vce_table() [all …]
|
D | trinity_dpm.c | 365 struct atom_clock_dividers dividers; in trinity_gfx_powergating_initialize() local 372 25000, false, ÷rs); in trinity_gfx_powergating_initialize() 380 value |= PDS_DIV(dividers.post_div); in trinity_gfx_powergating_initialize() 584 struct atom_clock_dividers dividers; in trinity_set_divider_value() local 590 sclk, false, ÷rs); in trinity_set_divider_value() 596 value |= CLK_DIVIDER(dividers.post_div); in trinity_set_divider_value() 600 sclk/2, false, ÷rs); in trinity_set_divider_value() 606 value |= PD_SCLK_DIVIDER(dividers.post_div); in trinity_set_divider_value()
|
D | ci_dpm.c | 2185 struct atom_clock_dividers dividers; in ci_populate_smc_uvd_level() local 2202 table->UvdLevel[count].VclkFrequency, false, ÷rs); in ci_populate_smc_uvd_level() 2206 table->UvdLevel[count].VclkDivider = (u8)dividers.post_divider; in ci_populate_smc_uvd_level() 2210 table->UvdLevel[count].DclkFrequency, false, ÷rs); in ci_populate_smc_uvd_level() 2214 table->UvdLevel[count].DclkDivider = (u8)dividers.post_divider; in ci_populate_smc_uvd_level() 2228 struct atom_clock_dividers dividers; in ci_populate_smc_vce_level() local 2243 table->VceLevel[count].Frequency, false, ÷rs); in ci_populate_smc_vce_level() 2247 table->VceLevel[count].Divider = (u8)dividers.post_divider; in ci_populate_smc_vce_level() 2261 struct atom_clock_dividers dividers; in ci_populate_smc_acp_level() local 2276 table->AcpLevel[count].Frequency, false, ÷rs); in ci_populate_smc_acp_level() [all …]
|
D | sumo_dpm.c | 552 struct atom_clock_dividers dividers; in sumo_program_power_level() local 556 pl->sclk, false, ÷rs); in sumo_program_power_level() 560 sumo_set_divider_value(rdev, index, dividers.post_div); in sumo_program_power_level() 787 struct atom_clock_dividers dividers; in sumo_program_acpi_power_level() local 792 false, ÷rs); in sumo_program_acpi_power_level() 796 WREG32_P(CG_ACPI_CNTL, SCLK_ACPI_DIV(dividers.post_div), ~SCLK_ACPI_DIV_MASK); in sumo_program_acpi_power_level()
|
D | si_dpm.c | 4767 struct atom_clock_dividers dividers; in si_calculate_sclk_params() local 4781 engine_clock, false, ÷rs); in si_calculate_sclk_params() 4785 reference_divider = 1 + dividers.ref_div; in si_calculate_sclk_params() 4787 tmp = (u64) engine_clock * reference_divider * dividers.post_div * 16384; in si_calculate_sclk_params() 4792 spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div); in si_calculate_sclk_params() 4793 spll_func_cntl |= SPLL_PDIV_A(dividers.post_div); in si_calculate_sclk_params() 4804 u32 vco_freq = engine_clock * dividers.post_div; in si_calculate_sclk_params() 6475 struct atom_clock_dividers dividers; in si_dpm_init() local 6539 0, false, ÷rs); in si_dpm_init() 6541 pi->ref_div = dividers.ref_div + 1; in si_dpm_init()
|
D | btc_dpm.c | 2554 struct atom_clock_dividers dividers; in btc_dpm_init() local 2604 0, false, ÷rs); in btc_dpm_init() 2606 pi->ref_div = dividers.ref_div + 1; in btc_dpm_init()
|
D | cik.c | 9521 struct atom_clock_dividers dividers; in cik_set_uvd_clock() local 9525 clock, false, ÷rs); in cik_set_uvd_clock() 9531 tmp |= dividers.post_divider; in cik_set_uvd_clock() 9560 struct atom_clock_dividers dividers; in cik_set_vce_clocks() local 9564 ecclk, false, ÷rs); in cik_set_vce_clocks() 9578 tmp |= dividers.post_divider; in cik_set_vce_clocks()
|
D | evergreen.c | 1043 struct atom_clock_dividers dividers; in sumo_set_uvd_clock() local 1046 clock, false, ÷rs); in sumo_set_uvd_clock() 1050 WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK)); in sumo_set_uvd_clock()
|
D | radeon.h | 289 struct atom_clock_dividers *dividers);
|