Home
last modified time | relevance | path

Searched refs:table (Results 1 – 25 of 372) sorted by relevance

12345678910>>...15

/drivers/net/ethernet/mellanox/mlx4/
Dicm.c253 int mlx4_table_get(struct mlx4_dev *dev, struct mlx4_icm_table *table, u32 obj, in mlx4_table_get() argument
256 u32 i = (obj & (table->num_obj - 1)) / in mlx4_table_get()
257 (MLX4_TABLE_CHUNK_SIZE / table->obj_size); in mlx4_table_get()
260 mutex_lock(&table->mutex); in mlx4_table_get()
262 if (table->icm[i]) { in mlx4_table_get()
263 ++table->icm[i]->refcount; in mlx4_table_get()
267 table->icm[i] = mlx4_alloc_icm(dev, MLX4_TABLE_CHUNK_SIZE >> PAGE_SHIFT, in mlx4_table_get()
268 (table->lowmem ? gfp : GFP_HIGHUSER) | in mlx4_table_get()
269 __GFP_NOWARN, table->coherent); in mlx4_table_get()
270 if (!table->icm[i]) { in mlx4_table_get()
[all …]
Dport.c52 void mlx4_init_mac_table(struct mlx4_dev *dev, struct mlx4_mac_table *table) in mlx4_init_mac_table() argument
56 mutex_init(&table->mutex); in mlx4_init_mac_table()
58 table->entries[i] = 0; in mlx4_init_mac_table()
59 table->refs[i] = 0; in mlx4_init_mac_table()
61 table->max = 1 << dev->caps.log_num_macs; in mlx4_init_mac_table()
62 table->total = 0; in mlx4_init_mac_table()
65 void mlx4_init_vlan_table(struct mlx4_dev *dev, struct mlx4_vlan_table *table) in mlx4_init_vlan_table() argument
69 mutex_init(&table->mutex); in mlx4_init_vlan_table()
71 table->entries[i] = 0; in mlx4_init_vlan_table()
72 table->refs[i] = 0; in mlx4_init_vlan_table()
[all …]
/drivers/net/wireless/iwlwifi/mvm/
Dutils.c413 struct iwl_umac_error_event_table table; in iwl_mvm_dump_umac_error_log() local
427 iwl_trans_read_mem_bytes(trans, base, &table, sizeof(table)); in iwl_mvm_dump_umac_error_log()
429 if (ERROR_START_OFFSET <= table.valid * ERROR_ELEM_SIZE) { in iwl_mvm_dump_umac_error_log()
432 mvm->status, table.valid); in iwl_mvm_dump_umac_error_log()
435 IWL_ERR(mvm, "0x%08X | %-28s\n", table.error_id, in iwl_mvm_dump_umac_error_log()
436 desc_lookup(table.error_id)); in iwl_mvm_dump_umac_error_log()
437 IWL_ERR(mvm, "0x%08X | umac branchlink1\n", table.blink1); in iwl_mvm_dump_umac_error_log()
438 IWL_ERR(mvm, "0x%08X | umac branchlink2\n", table.blink2); in iwl_mvm_dump_umac_error_log()
439 IWL_ERR(mvm, "0x%08X | umac interruptlink1\n", table.ilink1); in iwl_mvm_dump_umac_error_log()
440 IWL_ERR(mvm, "0x%08X | umac interruptlink2\n", table.ilink2); in iwl_mvm_dump_umac_error_log()
[all …]
/drivers/media/i2c/
Dks0127.c221 u8 *table = reg_defaults; in init_reg_defaults() local
227 table[KS_CMDA] = 0x2c; /* VSE=0, CCIR 601, autodetect standard */ in init_reg_defaults()
228 table[KS_CMDB] = 0x12; /* VALIGN=0, AGC control and input */ in init_reg_defaults()
229 table[KS_CMDC] = 0x00; /* Test options */ in init_reg_defaults()
231 table[KS_CMDD] = 0x01; in init_reg_defaults()
232 table[KS_HAVB] = 0x00; /* HAV Start Control */ in init_reg_defaults()
233 table[KS_HAVE] = 0x00; /* HAV End Control */ in init_reg_defaults()
234 table[KS_HS1B] = 0x10; /* HS1 Start Control */ in init_reg_defaults()
235 table[KS_HS1E] = 0x00; /* HS1 End Control */ in init_reg_defaults()
236 table[KS_HS2B] = 0x00; /* HS2 Start Control */ in init_reg_defaults()
[all …]
/drivers/infiniband/hw/mthca/
Dmthca_memfree.c222 int mthca_table_get(struct mthca_dev *dev, struct mthca_icm_table *table, int obj) in mthca_table_get() argument
224 int i = (obj & (table->num_obj - 1)) * table->obj_size / MTHCA_TABLE_CHUNK_SIZE; in mthca_table_get()
227 mutex_lock(&table->mutex); in mthca_table_get()
229 if (table->icm[i]) { in mthca_table_get()
230 ++table->icm[i]->refcount; in mthca_table_get()
234 table->icm[i] = mthca_alloc_icm(dev, MTHCA_TABLE_CHUNK_SIZE >> PAGE_SHIFT, in mthca_table_get()
235 (table->lowmem ? GFP_KERNEL : GFP_HIGHUSER) | in mthca_table_get()
236 __GFP_NOWARN, table->coherent); in mthca_table_get()
237 if (!table->icm[i]) { in mthca_table_get()
242 if (mthca_MAP_ICM(dev, table->icm[i], in mthca_table_get()
[all …]
/drivers/gpu/drm/radeon/
Drv730_dpm.c230 RV770_SMC_STATETABLE *table) in rv730_populate_smc_acpi_state() argument
242 table->ACPIState = table->initialState; in rv730_populate_smc_acpi_state()
243 table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC; in rv730_populate_smc_acpi_state()
247 &table->ACPIState.levels[0].vddc); in rv730_populate_smc_acpi_state()
248 table->ACPIState.levels[0].gen2PCIE = pi->pcie_gen2 ? in rv730_populate_smc_acpi_state()
250 table->ACPIState.levels[0].gen2XSP = in rv730_populate_smc_acpi_state()
254 &table->ACPIState.levels[0].vddc); in rv730_populate_smc_acpi_state()
255 table->ACPIState.levels[0].gen2PCIE = 0; in rv730_populate_smc_acpi_state()
297 table->ACPIState.levels[0].mclk.mclk730.vMPLL_FUNC_CNTL = cpu_to_be32(mpll_func_cntl); in rv730_populate_smc_acpi_state()
298 table->ACPIState.levels[0].mclk.mclk730.vMPLL_FUNC_CNTL2 = cpu_to_be32(mpll_func_cntl_2); in rv730_populate_smc_acpi_state()
[all …]
Dci_dpm.c890 SMU7_Discrete_DpmTable *table = &pi->smc_state_table; in ci_init_fps_limits() local
896 table->FpsHighT = cpu_to_be16(tmp); in ci_init_fps_limits()
899 table->FpsLowT = cpu_to_be16(tmp); in ci_init_fps_limits()
1781 SMU7_Discrete_DpmTable *table) in ci_populate_smc_vddc_table() argument
1786 table->VddcLevelCount = pi->vddc_voltage_table.count; in ci_populate_smc_vddc_table()
1787 for (count = 0; count < table->VddcLevelCount; count++) { in ci_populate_smc_vddc_table()
1790 &table->VddcLevel[count]); in ci_populate_smc_vddc_table()
1793 table->VddcLevel[count].Smio |= in ci_populate_smc_vddc_table()
1796 table->VddcLevel[count].Smio = 0; in ci_populate_smc_vddc_table()
1798 table->VddcLevelCount = cpu_to_be32(table->VddcLevelCount); in ci_populate_smc_vddc_table()
[all …]
Dbtc_dpm.c1174 …tc_get_max_clock_from_voltage_dependency_table(struct radeon_clock_voltage_dependency_table *table, in btc_get_max_clock_from_voltage_dependency_table() argument
1179 if ((table == NULL) || (table->count == 0)) { in btc_get_max_clock_from_voltage_dependency_table()
1184 for (i = 0; i < table->count; i++) { in btc_get_max_clock_from_voltage_dependency_table()
1185 if (clock < table->entries[i].clk) in btc_get_max_clock_from_voltage_dependency_table()
1186 clock = table->entries[i].clk; in btc_get_max_clock_from_voltage_dependency_table()
1191 void btc_apply_voltage_dependency_rules(struct radeon_clock_voltage_dependency_table *table, in btc_apply_voltage_dependency_rules() argument
1196 if ((table == NULL) || (table->count == 0)) in btc_apply_voltage_dependency_rules()
1199 for (i= 0; i < table->count; i++) { in btc_apply_voltage_dependency_rules()
1200 if (clock <= table->entries[i].clk) { in btc_apply_voltage_dependency_rules()
1201 if (*voltage < table->entries[i].v) in btc_apply_voltage_dependency_rules()
[all …]
Dcypress_dpm.c404 struct atom_voltage_table *table, in cypress_populate_voltage_value() argument
409 for (i = 0; i < table->count; i++) { in cypress_populate_voltage_value()
410 if (value <= table->entries[i].value) { in cypress_populate_voltage_value()
412 voltage->value = cpu_to_be16(table->entries[i].value); in cypress_populate_voltage_value()
417 if (i == table->count) in cypress_populate_voltage_value()
1236 RV770_SMC_STATETABLE *table) in cypress_populate_smc_initial_state() argument
1243 table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL = in cypress_populate_smc_initial_state()
1245 table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 = in cypress_populate_smc_initial_state()
1247 table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL = in cypress_populate_smc_initial_state()
1249 table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 = in cypress_populate_smc_initial_state()
[all …]
Drv740_dpm.c316 RV770_SMC_STATETABLE *table) in rv740_populate_smc_acpi_state() argument
329 table->ACPIState = table->initialState; in rv740_populate_smc_acpi_state()
331 table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC; in rv740_populate_smc_acpi_state()
335 &table->ACPIState.levels[0].vddc); in rv740_populate_smc_acpi_state()
336 table->ACPIState.levels[0].gen2PCIE = in rv740_populate_smc_acpi_state()
339 table->ACPIState.levels[0].gen2XSP = in rv740_populate_smc_acpi_state()
343 &table->ACPIState.levels[0].vddc); in rv740_populate_smc_acpi_state()
344 table->ACPIState.levels[0].gen2PCIE = 0; in rv740_populate_smc_acpi_state()
374 table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl); in rv740_populate_smc_acpi_state()
375 table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2); in rv740_populate_smc_acpi_state()
[all …]
/drivers/clk/ti/
Ddivider.c32 static unsigned int _get_table_maxdiv(const struct clk_div_table *table) in _get_table_maxdiv() argument
37 for (clkt = table; clkt->div; clkt++) in _get_table_maxdiv()
49 if (divider->table) in _get_maxdiv()
50 return _get_table_maxdiv(divider->table); in _get_maxdiv()
54 static unsigned int _get_table_div(const struct clk_div_table *table, in _get_table_div() argument
59 for (clkt = table; clkt->div; clkt++) in _get_table_div()
71 if (divider->table) in _get_div()
72 return _get_table_div(divider->table, val); in _get_div()
76 static unsigned int _get_table_val(const struct clk_div_table *table, in _get_table_val() argument
81 for (clkt = table; clkt->div; clkt++) in _get_table_val()
[all …]
/drivers/net/ethernet/sfc/
Dfarch.c1876 struct efx_farch_filter_table table[EFX_FARCH_FILTER_TABLE_COUNT]; member
1881 struct efx_farch_filter_table *table,
1930 struct efx_farch_filter_table *table; in efx_farch_filter_push_rx_config() local
1935 table = &state->table[EFX_FARCH_FILTER_TABLE_RX_IP]; in efx_farch_filter_push_rx_config()
1937 table->search_limit[EFX_FARCH_FILTER_TCP_FULL] + in efx_farch_filter_push_rx_config()
1940 table->search_limit[EFX_FARCH_FILTER_TCP_WILD] + in efx_farch_filter_push_rx_config()
1943 table->search_limit[EFX_FARCH_FILTER_UDP_FULL] + in efx_farch_filter_push_rx_config()
1946 table->search_limit[EFX_FARCH_FILTER_UDP_WILD] + in efx_farch_filter_push_rx_config()
1949 table = &state->table[EFX_FARCH_FILTER_TABLE_RX_MAC]; in efx_farch_filter_push_rx_config()
1950 if (table->size) { in efx_farch_filter_push_rx_config()
[all …]
/drivers/cpufreq/
Dfreq_table.c22 struct cpufreq_frequency_table *table) in cpufreq_frequency_table_cpuinfo() argument
29 cpufreq_for_each_valid_entry(pos, table) { in cpufreq_frequency_table_cpuinfo()
36 pr_debug("table entry %u: %u kHz\n", (int)(pos - table), freq); in cpufreq_frequency_table_cpuinfo()
55 struct cpufreq_frequency_table *table) in cpufreq_frequency_table_verify() argument
66 cpufreq_for_each_valid_entry(pos, table) { in cpufreq_frequency_table_verify()
96 struct cpufreq_frequency_table *table = in cpufreq_generic_frequency_table_verify() local
98 if (!table) in cpufreq_generic_frequency_table_verify()
101 return cpufreq_frequency_table_verify(policy, table); in cpufreq_generic_frequency_table_verify()
106 struct cpufreq_frequency_table *table, in cpufreq_frequency_table_target() argument
135 cpufreq_for_each_valid_entry(pos, table) { in cpufreq_frequency_table_target()
[all …]
Dppc-corenet-cpufreq.c32 struct cpufreq_frequency_table *table; member
107 struct cpufreq_frequency_table table; in freq_table_sort() local
122 table.driver_data = freq_table[i].driver_data; in freq_table_sort()
123 table.frequency = freq_table[i].frequency; in freq_table_sort()
126 freq_table[ind].driver_data = table.driver_data; in freq_table_sort()
127 freq_table[ind].frequency = table.frequency; in freq_table_sort()
138 struct cpufreq_frequency_table *table; in corenet_cpufreq_cpu_init() local
166 table = kcalloc(count + 1, sizeof(*table), GFP_KERNEL); in corenet_cpufreq_cpu_init()
167 if (!table) { in corenet_cpufreq_cpu_init()
185 table[i].frequency = CPUFREQ_ENTRY_INVALID; in corenet_cpufreq_cpu_init()
[all …]
Dsparc-us2e-cpufreq.c25 struct cpufreq_frequency_table table[6]; member
280 struct cpufreq_frequency_table *table = in us2e_freq_cpu_init() local
281 &us2e_freq_table[cpu].table[0]; in us2e_freq_cpu_init()
283 table[0].driver_data = 0; in us2e_freq_cpu_init()
284 table[0].frequency = clock_tick / 1; in us2e_freq_cpu_init()
285 table[1].driver_data = 1; in us2e_freq_cpu_init()
286 table[1].frequency = clock_tick / 2; in us2e_freq_cpu_init()
287 table[2].driver_data = 2; in us2e_freq_cpu_init()
288 table[2].frequency = clock_tick / 4; in us2e_freq_cpu_init()
289 table[2].driver_data = 3; in us2e_freq_cpu_init()
[all …]
/drivers/clk/
Dclk-divider.c35 static unsigned int _get_table_maxdiv(const struct clk_div_table *table) in _get_table_maxdiv() argument
40 for (clkt = table; clkt->div; clkt++) in _get_table_maxdiv()
46 static unsigned int _get_table_mindiv(const struct clk_div_table *table) in _get_table_mindiv() argument
51 for (clkt = table; clkt->div; clkt++) in _get_table_mindiv()
63 if (divider->table) in _get_maxdiv()
64 return _get_table_maxdiv(divider->table); in _get_maxdiv()
68 static unsigned int _get_table_div(const struct clk_div_table *table, in _get_table_div() argument
73 for (clkt = table; clkt->div; clkt++) in _get_table_div()
85 if (divider->table) in _get_div()
86 return _get_table_div(divider->table, val); in _get_div()
[all …]
/drivers/acpi/acpica/
Dutfileio.c59 acpi_ut_check_text_mode_corruption(u8 *table,
64 struct acpi_table_header **table, u32 *table_length);
83 acpi_ut_check_text_mode_corruption(u8 *table, u32 table_length, u32 file_length) in acpi_ut_check_text_mode_corruption() argument
97 if (table[i] == 0x0A) { in acpi_ut_check_text_mode_corruption()
98 if (table[i - 1] != 0x0D) { in acpi_ut_check_text_mode_corruption()
146 struct acpi_table_header **table, u32 *table_length) in acpi_ut_read_table() argument
232 *table = acpi_os_allocate((size_t) file_size); in acpi_ut_read_table()
233 if (!*table) { in acpi_ut_read_table()
243 actual = fread(*table, 1, (size_t) file_size, fp); in acpi_ut_read_table()
249 status = acpi_tb_verify_checksum((void *)*table, in acpi_ut_read_table()
[all …]
Dtbutils.c273 struct acpi_table_header *table; in acpi_tb_parse_root_table() local
319 table = acpi_os_map_memory(address, sizeof(struct acpi_table_header)); in acpi_tb_parse_root_table()
320 if (!table) { in acpi_tb_parse_root_table()
324 acpi_tb_print_table_header(address, table); in acpi_tb_parse_root_table()
330 length = table->length; in acpi_tb_parse_root_table()
331 acpi_os_unmap_memory(table, sizeof(struct acpi_table_header)); in acpi_tb_parse_root_table()
340 table = acpi_os_map_memory(address, length); in acpi_tb_parse_root_table()
341 if (!table) { in acpi_tb_parse_root_table()
347 status = acpi_tb_verify_checksum(table, length); in acpi_tb_parse_root_table()
349 acpi_os_unmap_memory(table, length); in acpi_tb_parse_root_table()
[all …]
/drivers/net/ethernet/mellanox/mlx5/core/
Dsrq.c43 struct mlx5_srq_table *table = &dev->priv.srq_table; in mlx5_srq_event() local
46 spin_lock(&table->lock); in mlx5_srq_event()
48 srq = radix_tree_lookup(&table->tree, srqn); in mlx5_srq_event()
52 spin_unlock(&table->lock); in mlx5_srq_event()
67 struct mlx5_srq_table *table = &dev->priv.srq_table; in mlx5_core_get_srq() local
70 spin_lock(&table->lock); in mlx5_core_get_srq()
72 srq = radix_tree_lookup(&table->tree, srqn); in mlx5_core_get_srq()
76 spin_unlock(&table->lock); in mlx5_core_get_srq()
86 struct mlx5_srq_table *table = &dev->priv.srq_table; in mlx5_core_create_srq() local
105 spin_lock_irq(&table->lock); in mlx5_core_create_srq()
[all …]
Dcq.c45 struct mlx5_cq_table *table = &dev->priv.cq_table; in mlx5_cq_completion() local
47 spin_lock(&table->lock); in mlx5_cq_completion()
48 cq = radix_tree_lookup(&table->tree, cqn); in mlx5_cq_completion()
51 spin_unlock(&table->lock); in mlx5_cq_completion()
68 struct mlx5_cq_table *table = &dev->priv.cq_table; in mlx5_cq_event() local
71 spin_lock(&table->lock); in mlx5_cq_event()
73 cq = radix_tree_lookup(&table->tree, cqn); in mlx5_cq_event()
77 spin_unlock(&table->lock); in mlx5_cq_event()
95 struct mlx5_cq_table *table = &dev->priv.cq_table; in mlx5_core_create_cq() local
115 spin_lock_irq(&table->lock); in mlx5_core_create_cq()
[all …]
/drivers/staging/android/ion/
Dion_chunk_heap.c43 struct sg_table *table; in ion_chunk_heap_allocate() local
58 table = kmalloc(sizeof(struct sg_table), GFP_KERNEL); in ion_chunk_heap_allocate()
59 if (!table) in ion_chunk_heap_allocate()
61 ret = sg_alloc_table(table, num_chunks, GFP_KERNEL); in ion_chunk_heap_allocate()
63 kfree(table); in ion_chunk_heap_allocate()
67 sg = table->sgl; in ion_chunk_heap_allocate()
78 buffer->priv_virt = table; in ion_chunk_heap_allocate()
82 sg = table->sgl; in ion_chunk_heap_allocate()
88 sg_free_table(table); in ion_chunk_heap_allocate()
89 kfree(table); in ion_chunk_heap_allocate()
[all …]
Dion_carveout_heap.c63 struct sg_table *table = buffer->priv_virt; in ion_carveout_heap_phys() local
64 struct page *page = sg_page(table->sgl); in ion_carveout_heap_phys()
77 struct sg_table *table; in ion_carveout_heap_allocate() local
84 table = kmalloc(sizeof(struct sg_table), GFP_KERNEL); in ion_carveout_heap_allocate()
85 if (!table) in ion_carveout_heap_allocate()
87 ret = sg_alloc_table(table, 1, GFP_KERNEL); in ion_carveout_heap_allocate()
97 sg_set_page(table->sgl, pfn_to_page(PFN_DOWN(paddr)), size, 0); in ion_carveout_heap_allocate()
98 buffer->priv_virt = table; in ion_carveout_heap_allocate()
103 sg_free_table(table); in ion_carveout_heap_allocate()
105 kfree(table); in ion_carveout_heap_allocate()
[all …]
Dion_system_heap.c130 struct sg_table *table; in ion_system_heap_allocate() local
155 table = kmalloc(sizeof(struct sg_table), GFP_KERNEL); in ion_system_heap_allocate()
156 if (!table) in ion_system_heap_allocate()
159 if (sg_alloc_table(table, i, GFP_KERNEL)) in ion_system_heap_allocate()
162 sg = table->sgl; in ion_system_heap_allocate()
169 buffer->priv_virt = table; in ion_system_heap_allocate()
173 kfree(table); in ion_system_heap_allocate()
185 struct sg_table *table = buffer->sg_table; in ion_system_heap_free() local
195 for_each_sg(table->sgl, sg, table->nents, i) in ion_system_heap_free()
197 sg_free_table(table); in ion_system_heap_free()
[all …]
/drivers/gpu/drm/
Ddrm_hashtab.c46 ht->table = NULL; in drm_ht_create()
47 if (size <= PAGE_SIZE / sizeof(*ht->table)) in drm_ht_create()
48 ht->table = kcalloc(size, sizeof(*ht->table), GFP_KERNEL); in drm_ht_create()
50 ht->table = vzalloc(size*sizeof(*ht->table)); in drm_ht_create()
51 if (!ht->table) { in drm_ht_create()
68 h_list = &ht->table[hashed_key]; in drm_ht_verbose_list()
81 h_list = &ht->table[hashed_key]; in drm_ht_find_key()
99 h_list = &ht->table[hashed_key]; in drm_ht_find_key_rcu()
118 h_list = &ht->table[hashed_key]; in drm_ht_insert_item()
200 if (ht->table) { in drm_ht_remove()
[all …]
/drivers/video/adf/
Dadf_memblock.c30 struct sg_table *table; in adf_memblock_map() local
33 table = kzalloc(sizeof(*table), GFP_KERNEL); in adf_memblock_map()
34 if (!table) in adf_memblock_map()
37 ret = sg_alloc_table(table, 1, GFP_KERNEL); in adf_memblock_map()
41 sg_set_page(table->sgl, page, attach->dmabuf->size, 0); in adf_memblock_map()
43 nents = dma_map_sg(attach->dev, table->sgl, 1, direction); in adf_memblock_map()
49 return table; in adf_memblock_map()
52 sg_free_table(table); in adf_memblock_map()
54 kfree(table); in adf_memblock_map()
59 struct sg_table *table, enum dma_data_direction direction) in adf_memblock_unmap() argument
[all …]

12345678910>>...15