Home
last modified time | relevance | path

Searched refs:entries (Results 1 – 25 of 510) sorted by relevance

12345678910>>...21

/drivers/gpu/drm/amd/pm/powerplay/hwmgr/
Dsmu_helper.c224 vvalue = vol_table->entries[i].value; in phm_trim_voltage_table()
228 if (vvalue == table->entries[j].value) { in phm_trim_voltage_table()
235 table->entries[table->count].value = vvalue; in phm_trim_voltage_table()
236 table->entries[table->count].smio_low = in phm_trim_voltage_table()
237 vol_table->entries[i].smio_low; in phm_trim_voltage_table()
265 vol_table->entries[i].value = dep_table->entries[i].mvdd; in phm_get_svi2_mvdd_voltage_table()
266 vol_table->entries[i].smio_low = 0; in phm_get_svi2_mvdd_voltage_table()
293 vol_table->entries[i].value = dep_table->entries[i].vddci; in phm_get_svi2_vddci_voltage_table()
294 vol_table->entries[i].smio_low = 0; in phm_get_svi2_vddci_voltage_table()
321 vol_table->entries[i].value = lookup_table->entries[i].us_vdd; in phm_get_svi2_vdd_voltage_table()
[all …]
Dvega10_processpptables.c320 (ATOM_Vega10_GFXCLK_Dependency_Record_V2 *)gfxclk_dep_table->entries; in init_over_drive_limits()
354 mm_table = kzalloc(struct_size(mm_table, entries, mm_dependency_table->ucNumEntries), in get_mm_clock_voltage_table()
362 mm_dependency_record = &mm_dependency_table->entries[i]; in get_mm_clock_voltage_table()
363 mm_table->entries[i].vddcInd = mm_dependency_record->ucVddcInd; in get_mm_clock_voltage_table()
364 mm_table->entries[i].samclock = in get_mm_clock_voltage_table()
366 mm_table->entries[i].eclk = le32_to_cpu(mm_dependency_record->ulEClk); in get_mm_clock_voltage_table()
367 mm_table->entries[i].vclk = le32_to_cpu(mm_dependency_record->ulVClk); in get_mm_clock_voltage_table()
368 mm_table->entries[i].dclk = le32_to_cpu(mm_dependency_record->ulDClk); in get_mm_clock_voltage_table()
577 clk_table = kzalloc(struct_size(clk_table, entries, clk_dep_table->ucNumEntries), in get_socclk_voltage_dependency_table()
585 clk_table->entries[i].vddInd = in get_socclk_voltage_dependency_table()
[all …]
Dsmu8_hwmgr.c79 if (clock <= ptable->entries[i].ecclk) in smu8_get_eclk_level()
87 if (clock >= ptable->entries[i].ecclk) in smu8_get_eclk_level()
110 if (clock <= table->entries[i].clk) in smu8_get_sclk_level()
118 if (clock >= table->entries[i].clk) in smu8_get_sclk_level()
140 if (clock <= ptable->entries[i].vclk) in smu8_get_uvd_level()
148 if (clock >= ptable->entries[i].vclk) in smu8_get_uvd_level()
265 table->sclk = dep_table->entries[dep_table->count-1].clk; in smu8_construct_max_power_limits_table()
267 (uint16_t)dep_table->entries[dep_table->count-1].v); in smu8_construct_max_power_limits_table()
279 table_clk_vlt = kzalloc(struct_size(table_clk_vlt, entries, 8), in smu8_init_dynamic_state_adjustment_rule_settings()
288 table_clk_vlt->entries[0].clk = PP_DAL_POWERLEVEL_0; in smu8_init_dynamic_state_adjustment_rule_settings()
[all …]
Dprocesspptables.c385 dep_table = kzalloc(struct_size(dep_table, entries, table->ucNumEntries), in get_clock_voltage_dependency_table()
393 dep_table->entries[i].clk = in get_clock_voltage_dependency_table()
394 ((unsigned long)table->entries[i].ucClockHigh << 16) | in get_clock_voltage_dependency_table()
395 le16_to_cpu(table->entries[i].usClockLow); in get_clock_voltage_dependency_table()
396 dep_table->entries[i].v = in get_clock_voltage_dependency_table()
397 (unsigned long)le16_to_cpu(table->entries[i].usVoltage); in get_clock_voltage_dependency_table()
419 clock_table->values[i] = (unsigned long)table->entries[i].clk; in get_valid_clk()
430 limits->sclk = ((unsigned long)table->entries[0].ucSclkHigh << 16) | in get_clock_voltage_limit()
431 le16_to_cpu(table->entries[0].usSclkLow); in get_clock_voltage_limit()
432 limits->mclk = ((unsigned long)table->entries[0].ucMclkHigh << 16) | in get_clock_voltage_limit()
[all …]
Dvega10_hwmgr.c328 od_lookup_table->entries[i].us_vdd = vddc_lookup_table->entries[i].us_vdd; in vega10_odn_initial_default_setting()
343 odn_table->max_vddc = dep_table[0]->entries[dep_table[0]->count - 1].vddc; in vega10_odn_initial_default_setting()
345 odn_table->min_vddc = dep_table[0]->entries[0].vddc; in vega10_odn_initial_default_setting()
348 …od_table[2]->entries[i].clk = hwmgr->platform_descriptor.overdriveLimit.memoryClock > od_table[2]-… in vega10_odn_initial_default_setting()
350 od_table[2]->entries[i].clk; in vega10_odn_initial_default_setting()
351 od_table[2]->entries[i].vddc = odn_table->max_vddc > od_table[2]->entries[i].vddc ? in vega10_odn_initial_default_setting()
353 od_table[2]->entries[i].vddc; in vega10_odn_initial_default_setting()
531 voltage_id = table_info->vdd_dep_on_socclk->entries[entry_id].vddInd; in vega10_get_socclk_for_voltage_evv()
532 if (lookup_table->entries[voltage_id].us_vdd == virtual_voltage_id) in vega10_get_socclk_for_voltage_evv()
540 *socclk = table_info->vdd_dep_on_socclk->entries[entry_id].clk; in vega10_get_socclk_for_voltage_evv()
[all …]
Dsmu7_hwmgr.c302 voltage_table->entries[i].value = in phm_get_svi2_voltage_table_v0()
303 voltage_dependency_table->entries[i].v; in phm_get_svi2_voltage_table_v0()
304 voltage_table->entries[i].smio_low = 0; in phm_get_svi2_voltage_table_v0()
676 pcie_table->entries[i].gen_speed), in smu7_setup_default_pcie_table()
678 pcie_table->entries[i].lane_width)); in smu7_setup_default_pcie_table()
809 allowed_vdd_sclk_table->entries[i].clk) { in smu7_setup_dpm_tables_v0()
811 allowed_vdd_sclk_table->entries[i].clk; in smu7_setup_dpm_tables_v0()
823 allowed_vdd_mclk_table->entries[i].clk) { in smu7_setup_dpm_tables_v0()
825 allowed_vdd_mclk_table->entries[i].clk; in smu7_setup_dpm_tables_v0()
833 data->dpm_table.vddc_table.dpm_levels[i].value = allowed_vdd_mclk_table->entries[i].v; in smu7_setup_dpm_tables_v0()
[all …]
Dsmu10_hwmgr.c136 table_clk_vlt = kzalloc(struct_size(table_clk_vlt, entries, count), in smu10_init_dynamic_state_adjustment_rule_settings()
145 table_clk_vlt->entries[0].clk = PP_DAL_POWERLEVEL_0; in smu10_init_dynamic_state_adjustment_rule_settings()
146 table_clk_vlt->entries[0].v = 0; in smu10_init_dynamic_state_adjustment_rule_settings()
147 table_clk_vlt->entries[1].clk = PP_DAL_POWERLEVEL_1; in smu10_init_dynamic_state_adjustment_rule_settings()
148 table_clk_vlt->entries[1].v = 1; in smu10_init_dynamic_state_adjustment_rule_settings()
149 table_clk_vlt->entries[2].clk = PP_DAL_POWERLEVEL_2; in smu10_init_dynamic_state_adjustment_rule_settings()
150 table_clk_vlt->entries[2].v = 2; in smu10_init_dynamic_state_adjustment_rule_settings()
151 table_clk_vlt->entries[3].clk = PP_DAL_POWERLEVEL_3; in smu10_init_dynamic_state_adjustment_rule_settings()
152 table_clk_vlt->entries[3].v = 3; in smu10_init_dynamic_state_adjustment_rule_settings()
153 table_clk_vlt->entries[4].clk = PP_DAL_POWERLEVEL_4; in smu10_init_dynamic_state_adjustment_rule_settings()
[all …]
/drivers/net/ethernet/netronome/nfp/nfpcore/
Dnfp_nsp_eth.c247 union eth_table_entry *entries; in __nfp_eth_read_ports() local
251 entries = kzalloc(NSP_ETH_TABLE_SIZE, GFP_KERNEL); in __nfp_eth_read_ports()
252 if (!entries) in __nfp_eth_read_ports()
255 ret = nfp_nsp_read_eth_table(nsp, entries, NSP_ETH_TABLE_SIZE); in __nfp_eth_read_ports()
262 if (entries[i].port & NSP_ETH_PORT_LANES_MASK) in __nfp_eth_read_ports()
281 if (entries[i].port & NSP_ETH_PORT_LANES_MASK) in __nfp_eth_read_ports()
282 nfp_eth_port_translate(nsp, &entries[i], i, in __nfp_eth_read_ports()
289 kfree(entries); in __nfp_eth_read_ports()
294 kfree(entries); in __nfp_eth_read_ports()
300 union eth_table_entry *entries; in nfp_eth_config_start() local
[all …]
/drivers/net/dsa/sja1105/
Dsja1105_vl.c27 if (list_empty(&gating_cfg->entries)) { in sja1105_insert_gate_entry()
28 list_add(&e->list, &gating_cfg->entries); in sja1105_insert_gate_entry()
32 list_for_each_entry(p, &gating_cfg->entries, list) { in sja1105_insert_gate_entry()
65 list_for_each_entry(e, &gating_cfg->entries, list) { in sja1105_gating_cfg_time_to_interval()
70 if (prev == &gating_cfg->entries) in sja1105_gating_cfg_time_to_interval()
76 last_e = list_last_entry(&gating_cfg->entries, in sja1105_gating_cfg_time_to_interval()
85 list_for_each_entry_safe(e, n, &gating_cfg->entries, list) { in sja1105_free_gating_config()
144 u8 gate_state = rule->vl.entries[i].gate_state; in sja1105_compose_gating_subschedule()
157 time += rule->vl.entries[i].interval; in sja1105_compose_gating_subschedule()
340 kfree(table->entries); in sja1105_init_virtual_links()
[all …]
Dsja1105_tas.c49 if (!list_empty(&gating_cfg->entries)) { in sja1105_tas_set_runtime_params()
185 kfree(table->entries); in sja1105_init_scheduling()
192 kfree(table->entries); in sja1105_init_scheduling()
199 kfree(table->entries); in sja1105_init_scheduling()
206 kfree(table->entries); in sja1105_init_scheduling()
218 if (!list_empty(&gating_cfg->entries)) { in sja1105_init_scheduling()
231 table->entries = kcalloc(num_entries, table->ops->unpacked_entry_size, in sja1105_init_scheduling()
233 if (!table->entries) in sja1105_init_scheduling()
236 schedule = table->entries; in sja1105_init_scheduling()
240 table->entries = kcalloc(SJA1105_MAX_SCHEDULE_ENTRY_POINTS_PARAMS_COUNT, in sja1105_init_scheduling()
[all …]
/drivers/misc/vmw_vmci/
Dvmci_handle_array.c68 array->entries[array->size] = handle; in vmci_handle_arr_append_entry()
84 if (vmci_handle_is_equal(array->entries[i], entry_handle)) { in vmci_handle_arr_remove_entry()
85 handle = array->entries[i]; in vmci_handle_arr_remove_entry()
87 array->entries[i] = array->entries[array->size]; in vmci_handle_arr_remove_entry()
88 array->entries[array->size] = VMCI_INVALID_HANDLE; in vmci_handle_arr_remove_entry()
105 handle = array->entries[array->size]; in vmci_handle_arr_remove_tail()
106 array->entries[array->size] = VMCI_INVALID_HANDLE; in vmci_handle_arr_remove_tail()
121 return array->entries[index]; in vmci_handle_arr_get_entry()
130 if (vmci_handle_is_equal(array->entries[i], entry_handle)) in vmci_handle_arr_has_entry()
143 return array->entries; in vmci_handle_arr_get_handles()
/drivers/gpu/drm/i915/display/
Dintel_ddi_buf_trans.c29 .entries = _hsw_ddi_translations_dp,
46 .entries = _hsw_ddi_translations_fdi,
67 .entries = _hsw_ddi_translations_hdmi,
85 .entries = _bdw_ddi_translations_edp,
102 .entries = _bdw_ddi_translations_dp,
119 .entries = _bdw_ddi_translations_fdi,
138 .entries = _bdw_ddi_translations_hdmi,
157 .entries = _skl_ddi_translations_dp,
175 .entries = _skl_u_ddi_translations_dp,
193 .entries = _skl_y_ddi_translations_dp,
[all …]
/drivers/gpu/drm/radeon/
Dr600_dpm.c828 radeon_table->entries = kzalloc(size, GFP_KERNEL); in r600_parse_clk_voltage_dep_table()
829 if (!radeon_table->entries) in r600_parse_clk_voltage_dep_table()
832 entry = &atom_table->entries[0]; in r600_parse_clk_voltage_dep_table()
834 radeon_table->entries[i].clk = le16_to_cpu(entry->usClockLow) | in r600_parse_clk_voltage_dep_table()
836 radeon_table->entries[i].v = le16_to_cpu(entry->usVoltage); in r600_parse_clk_voltage_dep_table()
938 kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries); in r600_parse_extended_power_table()
949 kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries); in r600_parse_extended_power_table()
950 kfree(rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk.entries); in r600_parse_extended_power_table()
961 kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries); in r600_parse_extended_power_table()
962 kfree(rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk.entries); in r600_parse_extended_power_table()
[all …]
Dkv_dpm.c404 return vddc_sclk_table->entries[vid_2bit].v; in kv_convert_vid2_to_vid7()
406 return vddc_sclk_table->entries[vddc_sclk_table->count - 1].v; in kv_convert_vid2_to_vid7()
409 if (vid_mapping_table->entries[i].vid_2bit == vid_2bit) in kv_convert_vid2_to_vid7()
410 return vid_mapping_table->entries[i].vid_7bit; in kv_convert_vid2_to_vid7()
412 return vid_mapping_table->entries[vid_mapping_table->num_entries - 1].vid_7bit; in kv_convert_vid2_to_vid7()
426 if (vddc_sclk_table->entries[i].v == vid_7bit) in kv_convert_vid7_to_vid2()
432 if (vid_mapping_table->entries[i].vid_7bit == vid_7bit) in kv_convert_vid7_to_vid2()
433 return vid_mapping_table->entries[i].vid_2bit; in kv_convert_vid7_to_vid2()
436 return vid_mapping_table->entries[vid_mapping_table->num_entries - 1].vid_2bit; in kv_convert_vid7_to_vid2()
566 if (table->entries[i].clk == pi->boot_pl.sclk) in kv_program_bootup_state()
[all …]
/drivers/media/dvb-frontends/
Ddvb-pll.c66 } entries[]; member
79 .entries = {
102 .entries = {
125 .entries = {
143 .entries = {
158 .entries = {
180 .entries = {
194 .entries = {
220 .entries = {
253 .entries = {
[all …]
/drivers/net/ethernet/mellanox/mlx4/
Dport.c68 table->entries[i] = 0; in mlx4_init_mac_table()
82 table->entries[i] = 0; in mlx4_init_vlan_table()
105 if (index < 0 || index >= table->max || !table->entries[index]) { in validate_index()
120 (MLX4_MAC_MASK & be64_to_cpu(table->entries[i]))) in find_index()
128 __be64 *entries) in mlx4_set_port_mac_table() argument
138 memcpy(mailbox->buf, entries, MLX4_MAC_TABLE_SIZE); in mlx4_set_port_mac_table()
160 if (mac == (MLX4_MAC_MASK & be64_to_cpu(table->entries[i]))) { in mlx4_find_cached_mac()
216 if (((MLX4_MAC_MASK & mac) == (MLX4_MAC_MASK & be64_to_cpu(table->entries[i])))) in __mlx4_register_mac()
218 if (((MLX4_MAC_MASK & mac) == (MLX4_MAC_MASK & be64_to_cpu(dup_table->entries[i])))) in __mlx4_register_mac()
243 ((MLX4_MAC_MASK & mac) == (MLX4_MAC_MASK & be64_to_cpu(table->entries[index_at_dup_port])))) in __mlx4_register_mac()
[all …]
/drivers/net/wireless/intersil/p54/
Deeprom.c81 size_t entries; member
154 if ((!list->entries) || (!list->band_channel_num[band])) in p54_generate_band()
172 (i < list->entries); i++) { in p54_generate_band()
243 for (i = list->entries; i >= 0; i--) { in p54_update_channel_param()
250 if ((i < 0) && (list->entries < list->max_entries)) { in p54_update_channel_param()
259 i = list->entries++; in p54_update_channel_param()
328 if ((priv->iq_autocal_len != priv->curve_data->entries) || in p54_generate_channel_lists()
329 (priv->iq_autocal_len != priv->output_limit->entries)) in p54_generate_channel_lists()
334 max_channel_num = max_t(unsigned int, priv->output_limit->entries, in p54_generate_channel_lists()
337 priv->curve_data->entries); in p54_generate_channel_lists()
[all …]
/drivers/gpu/drm/amd/pm/
Damdgpu_dpm.c239 amdgpu_table->entries = kzalloc(size, GFP_KERNEL); in amdgpu_parse_clk_voltage_dep_table()
240 if (!amdgpu_table->entries) in amdgpu_parse_clk_voltage_dep_table()
243 entry = &atom_table->entries[0]; in amdgpu_parse_clk_voltage_dep_table()
245 amdgpu_table->entries[i].clk = le16_to_cpu(entry->usClockLow) | in amdgpu_parse_clk_voltage_dep_table()
247 amdgpu_table->entries[i].v = le16_to_cpu(entry->usVoltage); in amdgpu_parse_clk_voltage_dep_table()
386 le16_to_cpu(clk_v->entries[0].usSclkLow) | in amdgpu_parse_extended_power_table()
387 (clk_v->entries[0].ucSclkHigh << 16); in amdgpu_parse_extended_power_table()
389 le16_to_cpu(clk_v->entries[0].usMclkLow) | in amdgpu_parse_extended_power_table()
390 (clk_v->entries[0].ucMclkHigh << 16); in amdgpu_parse_extended_power_table()
392 le16_to_cpu(clk_v->entries[0].usVddc); in amdgpu_parse_extended_power_table()
[all …]
/drivers/crypto/qat/qat_common/
Dadf_isr.c41 pci_dev_info->msix_entries.entries[i].entry = i; in adf_enable_msix()
43 pci_dev_info->msix_entries.entries[0].entry = in adf_enable_msix()
48 pci_dev_info->msix_entries.entries, in adf_enable_msix()
142 struct msix_entry *msixe = pci_dev_info->msix_entries.entries; in adf_request_irqs()
190 struct msix_entry *msixe = pci_dev_info->msix_entries.entries; in adf_free_irqs()
208 struct msix_entry *entries; in adf_isr_alloc_msix_entry_table() local
216 entries = kcalloc_node(msix_num_entries, sizeof(*entries), in adf_isr_alloc_msix_entry_table()
218 if (!entries) in adf_isr_alloc_msix_entry_table()
223 kfree(entries); in adf_isr_alloc_msix_entry_table()
232 accel_dev->accel_pci_dev.msix_entries.entries = entries; in adf_isr_alloc_msix_entry_table()
[all …]
/drivers/acpi/apei/
Derst.c371 ERST_TAB_ENTRY(erst_tab), erst_tab->entries); in erst_exec_ctx_init()
429 u64 *entries; member
478 u64 *entries; in __erst_record_id_cache_add_one() local
499 entries = erst_record_id_cache.entries; in __erst_record_id_cache_add_one()
501 if (entries[i] == id) in __erst_record_id_cache_add_one()
519 new_entries = kvmalloc_array(new_size, sizeof(entries[0]), in __erst_record_id_cache_add_one()
523 memcpy(new_entries, entries, in __erst_record_id_cache_add_one()
524 erst_record_id_cache.len * sizeof(entries[0])); in __erst_record_id_cache_add_one()
525 kvfree(entries); in __erst_record_id_cache_add_one()
526 erst_record_id_cache.entries = entries = new_entries; in __erst_record_id_cache_add_one()
[all …]
/drivers/acpi/
Dacpi_watchdog.c30 const struct acpi_wdat_entry *entries; in acpi_watchdog_uses_rtc() local
33 entries = (struct acpi_wdat_entry *)(wdat + 1); in acpi_watchdog_uses_rtc()
34 for (i = 0; i < wdat->entries; i++) { in acpi_watchdog_uses_rtc()
37 gas = &entries[i].register_region; in acpi_watchdog_uses_rtc()
104 const struct acpi_wdat_entry *entries; in acpi_watchdog_init() local
130 entries = (struct acpi_wdat_entry *)(wdat + 1); in acpi_watchdog_init()
131 for (i = 0; i < wdat->entries; i++) { in acpi_watchdog_init()
137 gas = &entries[i].register_region; in acpi_watchdog_init()
/drivers/infiniband/core/
Duverbs_std_types_device.c272 struct ib_uverbs_gid_entry *entries, in copy_gid_entries_to_user() argument
281 if (user_entry_size == sizeof(*entries)) { in copy_gid_entries_to_user()
284 entries, sizeof(*entries) * num_entries); in copy_gid_entries_to_user()
288 copy_len = min_t(size_t, user_entry_size, sizeof(*entries)); in copy_gid_entries_to_user()
295 if (copy_to_user(user_entries, entries, copy_len)) in copy_gid_entries_to_user()
298 if (user_entry_size > sizeof(*entries)) { in copy_gid_entries_to_user()
299 if (clear_user(user_entries + sizeof(*entries), in copy_gid_entries_to_user()
300 user_entry_size - sizeof(*entries))) in copy_gid_entries_to_user()
304 entries++; in copy_gid_entries_to_user()
315 struct ib_uverbs_gid_entry *entries; in UVERBS_HANDLER() local
[all …]
/drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/
Ddcn31_clk_mgr.c333 .entries = {
370 .entries = {
418 if (!bw_params->wm_table.entries[i].valid) in dcn31_build_watermark_ranges()
421 table->WatermarkRow[WM_DCFCLK][num_valid_sets].WmSetting = bw_params->wm_table.entries[i].wm_inst; in dcn31_build_watermark_ranges()
422 table->WatermarkRow[WM_DCFCLK][num_valid_sets].WmType = bw_params->wm_table.entries[i].wm_type; in dcn31_build_watermark_ranges()
433 bw_params->clk_table.entries[i - 1].dcfclk_mhz + 1; in dcn31_build_watermark_ranges()
436 bw_params->clk_table.entries[i].dcfclk_mhz; in dcn31_build_watermark_ranges()
584 bw_params->clk_table.entries[i].fclk_mhz = clock_table->DfPstateTable[j].FClk; in dcn31_clk_mgr_helper_populate_bw_params()
585 bw_params->clk_table.entries[i].memclk_mhz = clock_table->DfPstateTable[j].MemClk; in dcn31_clk_mgr_helper_populate_bw_params()
586 bw_params->clk_table.entries[i].voltage = clock_table->DfPstateTable[j].Voltage; in dcn31_clk_mgr_helper_populate_bw_params()
[all …]
/drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/
Drn_clk_mgr.c467 if (!bw_params->wm_table.entries[i].valid) in build_watermark_ranges()
470 ranges->reader_wm_sets[num_valid_sets].wm_inst = bw_params->wm_table.entries[i].wm_inst; in build_watermark_ranges()
471 ranges->reader_wm_sets[num_valid_sets].wm_type = bw_params->wm_table.entries[i].wm_type; in build_watermark_ranges()
482 …ranges->reader_wm_sets[num_valid_sets].min_drain_clk_mhz = bw_params->clk_table.entries[i - 1].dcf… in build_watermark_ranges()
484 …ranges->reader_wm_sets[num_valid_sets].max_drain_clk_mhz = bw_params->clk_table.entries[i].dcfclk_… in build_watermark_ranges()
582 .entries = {
619 .entries = {
656 .entries = {
693 .entries = {
730 .entries = {
[all …]
/drivers/gpu/drm/amd/pm/powerplay/
Dkv_dpm.c81 return vddc_sclk_table->entries[vid_2bit].v; in kv_convert_vid2_to_vid7()
83 return vddc_sclk_table->entries[vddc_sclk_table->count - 1].v; in kv_convert_vid2_to_vid7()
86 if (vid_mapping_table->entries[i].vid_2bit == vid_2bit) in kv_convert_vid2_to_vid7()
87 return vid_mapping_table->entries[i].vid_7bit; in kv_convert_vid2_to_vid7()
89 return vid_mapping_table->entries[vid_mapping_table->num_entries - 1].vid_7bit; in kv_convert_vid2_to_vid7()
103 if (vddc_sclk_table->entries[i].v == vid_7bit) in kv_convert_vid7_to_vid2()
109 if (vid_mapping_table->entries[i].vid_7bit == vid_7bit) in kv_convert_vid7_to_vid2()
110 return vid_mapping_table->entries[i].vid_2bit; in kv_convert_vid7_to_vid2()
113 return vid_mapping_table->entries[vid_mapping_table->num_entries - 1].vid_2bit; in kv_convert_vid7_to_vid2()
146 sclk_voltage_mapping_table->entries[n].sclk_frequency = in sumo_construct_sclk_voltage_mapping_table()
[all …]

12345678910>>...21