/drivers/net/ethernet/freescale/ |
D | gianfar_ethtool.c | 829 static void gfar_set_mask(u32 mask, struct filer_table *tab) in gfar_set_mask() argument 831 tab->fe[tab->index].ctrl = RQFCR_AND | RQFCR_PID_MASK | RQFCR_CMP_EXACT; in gfar_set_mask() 832 tab->fe[tab->index].prop = mask; in gfar_set_mask() 833 tab->index++; in gfar_set_mask() 837 static void gfar_set_parse_bits(u32 value, u32 mask, struct filer_table *tab) in gfar_set_parse_bits() argument 839 gfar_set_mask(mask, tab); in gfar_set_parse_bits() 840 tab->fe[tab->index].ctrl = RQFCR_CMP_EXACT | RQFCR_PID_PARSE | in gfar_set_parse_bits() 842 tab->fe[tab->index].prop = value; in gfar_set_parse_bits() 843 tab->index++; in gfar_set_parse_bits() 847 struct filer_table *tab) in gfar_set_general_attribute() argument [all …]
|
/drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/ |
D | phy_lcn.c | 925 struct phytbl_info tab; in wlc_lcnphy_common_read_table() local 926 tab.tbl_id = tbl_id; in wlc_lcnphy_common_read_table() 927 tab.tbl_ptr = tbl_ptr; in wlc_lcnphy_common_read_table() 928 tab.tbl_len = tbl_len; in wlc_lcnphy_common_read_table() 929 tab.tbl_width = tbl_width; in wlc_lcnphy_common_read_table() 930 tab.tbl_offset = tbl_offset; in wlc_lcnphy_common_read_table() 931 wlc_lcnphy_read_table(pi, &tab); in wlc_lcnphy_common_read_table() 940 struct phytbl_info tab; in wlc_lcnphy_common_write_table() local 941 tab.tbl_id = tbl_id; in wlc_lcnphy_common_write_table() 942 tab.tbl_ptr = tbl_ptr; in wlc_lcnphy_common_write_table() [all …]
|
/drivers/net/ethernet/chelsio/cxgb4/ |
D | sched.c | 50 e = &s->tab[p->u.params.class]; in t4_sched_class_fw_cmd() 129 end = &s->tab[s->sched_size]; in t4_sched_entry_lookup() 130 for (e = &s->tab[0]; e != end; ++e) { in t4_sched_entry_lookup() 182 return qe ? &pi->sched_tbl->tab[qe->param.class] : NULL; in cxgb4_sched_queue_lookup() 206 e = &pi->sched_tbl->tab[qe->param.class]; in t4_sched_queue_unbind() 244 e = &s->tab[qe->param.class]; in t4_sched_queue_bind() 277 e = &pi->sched_tbl->tab[fe->param.class]; in t4_sched_flowc_unbind() 309 e = &s->tab[fe->param.class]; in t4_sched_flowc_bind() 488 end = &s->tab[s->sched_size]; in t4_sched_class_lookup() 489 for (e = &s->tab[0]; e != end; ++e) { in t4_sched_class_lookup() [all …]
|
/drivers/hid/ |
D | hid-debug.c | 520 static void tab(int n, struct seq_file *f) { in tab() function 528 tab(n, f); in hid_dump_field() 533 tab(n, f); in hid_dump_field() 538 tab(n, f); in hid_dump_field() 542 tab(n, f); seq_printf(f, "Usage(%d)\n", field->maxusage); in hid_dump_field() 544 tab(n+2, f); hid_resolv_usage(field->usage[j].hid, f); seq_printf(f, "\n"); in hid_dump_field() 547 tab(n, f); seq_printf(f, "Logical Minimum(%d)\n", field->logical_minimum); in hid_dump_field() 548 tab(n, f); seq_printf(f, "Logical Maximum(%d)\n", field->logical_maximum); in hid_dump_field() 551 tab(n, f); seq_printf(f, "Physical Minimum(%d)\n", field->physical_minimum); in hid_dump_field() 552 tab(n, f); seq_printf(f, "Physical Maximum(%d)\n", field->physical_maximum); in hid_dump_field() [all …]
|
/drivers/scsi/aic7xxx/aicasm/ |
D | Makefile | 70 mv $(<:.y=).tab.c $(OUTDIR)/$(<:.y=.c) 71 mv $(<:.y=).tab.h $(OUTDIR)/$(<:.y=.h) 79 mv $(<:.y=).tab.c $(OUTDIR)/$(<:.y=.c) 80 mv $(<:.y=).tab.h $(OUTDIR)/$(<:.y=.h)
|
/drivers/gpu/drm/amd/pm/inc/ |
D | amdgpu_smu.h | 1323 #define TAB_MAP(tab) \ argument 1324 [SMU_TABLE_##tab] = {1, TABLE_##tab} 1326 #define TAB_MAP_VALID(tab) \ argument 1327 [SMU_TABLE_##tab] = {1, TABLE_##tab} 1329 #define TAB_MAP_INVALID(tab) \ argument 1330 [SMU_TABLE_##tab] = {0, TABLE_##tab} 1332 #define PWR_MAP(tab) \ argument 1333 [SMU_POWER_SOURCE_##tab] = {1, POWER_SOURCE_##tab}
|
/drivers/media/dvb-frontends/ |
D | cxd2820r_c.c | 20 struct reg_val_mask tab[] = { in cxd2820r_set_frontend_c() local 48 ret = cxd2820r_wr_reg_val_mask_tab(priv, tab, ARRAY_SIZE(tab)); in cxd2820r_set_frontend_c() 301 static const struct reg_val_mask tab[] = { in cxd2820r_sleep_c() local 313 ret = cxd2820r_wr_reg_val_mask_tab(priv, tab, ARRAY_SIZE(tab)); in cxd2820r_sleep_c()
|
D | cxd2820r_t.c | 30 struct reg_val_mask tab[] = { in cxd2820r_set_frontend_t() local 73 ret = cxd2820r_wr_reg_val_mask_tab(priv, tab, ARRAY_SIZE(tab)); in cxd2820r_set_frontend_t() 395 static struct reg_val_mask tab[] = { in cxd2820r_sleep_t() local 407 ret = cxd2820r_wr_reg_val_mask_tab(priv, tab, ARRAY_SIZE(tab)); in cxd2820r_sleep_t()
|
D | cxd2820r_t2.c | 26 struct reg_val_mask tab[] = { in cxd2820r_set_frontend_t2() local 91 ret = cxd2820r_wr_reg_val_mask_tab(priv, tab, ARRAY_SIZE(tab)); in cxd2820r_set_frontend_t2() 389 static const struct reg_val_mask tab[] = { in cxd2820r_sleep_t2() local 400 ret = cxd2820r_wr_reg_val_mask_tab(priv, tab, ARRAY_SIZE(tab)); in cxd2820r_sleep_t2()
|
D | af9013.c | 836 const struct af9013_reg_mask_val *tab; in af9013_init() local 894 tab = demod_init_tab; in af9013_init() 896 ret = regmap_update_bits(state->regmap, tab[i].reg, tab[i].mask, in af9013_init() 897 tab[i].val); in af9013_init() 907 tab = tuner_init_tab_mxl5003d; in af9013_init() 913 tab = tuner_init_tab_mxl5005; in af9013_init() 917 tab = tuner_init_tab_env77h11d5; in af9013_init() 921 tab = tuner_init_tab_mt2060; in af9013_init() 925 tab = tuner_init_tab_mc44s803; in af9013_init() 930 tab = tuner_init_tab_qt1010; in af9013_init() [all …]
|
D | m88rs2000.c | 373 struct inittab *tab) in m88rs2000_tab_set() argument 377 if (tab == NULL) in m88rs2000_tab_set() 381 switch (tab[i].cmd) { in m88rs2000_tab_set() 383 ret = m88rs2000_writereg(state, tab[i].reg, in m88rs2000_tab_set() 384 tab[i].val); in m88rs2000_tab_set() 387 if (tab[i].reg > 0) in m88rs2000_tab_set() 388 mdelay(tab[i].reg); in m88rs2000_tab_set() 391 if (tab[i].reg == 0xaa && tab[i].val == 0xff) in m88rs2000_tab_set()
|
D | af9033.c | 33 const struct reg_val *tab, int tab_len) in af9033_wr_reg_val_tab() argument 48 buf[j] = tab[i].val; in af9033_wr_reg_val_tab() 50 if (i == tab_len - 1 || tab[i].reg != tab[i + 1].reg - 1) { in af9033_wr_reg_val_tab() 51 ret = regmap_bulk_write(dev->regmap, tab[i].reg - j, in af9033_wr_reg_val_tab() 77 struct reg_val_mask tab[] = { in af9033_init() local 144 for (i = 0; i < ARRAY_SIZE(tab); i++) { in af9033_init() 145 ret = regmap_update_bits(dev->regmap, tab[i].reg, tab[i].mask, in af9033_init() 146 tab[i].val); in af9033_init()
|
D | stb0899_drv.c | 922 static int stb0899_table_lookup(const struct stb0899_tab *tab, int max, int val) in stb0899_table_lookup() argument 927 if (val < tab[min].read) in stb0899_table_lookup() 928 res = tab[min].real; in stb0899_table_lookup() 929 else if (val >= tab[max].read) in stb0899_table_lookup() 930 res = tab[max].real; in stb0899_table_lookup() 934 if (val >= tab[min].read && val < tab[med].read) in stb0899_table_lookup() 939 res = ((val - tab[min].read) * in stb0899_table_lookup() 940 (tab[max].real - tab[min].real) / in stb0899_table_lookup() 941 (tab[max].read - tab[min].read)) + in stb0899_table_lookup() 942 tab[min].real; in stb0899_table_lookup()
|
D | tda10071.c | 761 struct tda10071_reg_val_mask tab[] = { in tda10071_init() local 822 for (i = 0; i < ARRAY_SIZE(tab); i++) { in tda10071_init() 823 ret = tda10071_wr_reg_mask(dev, tab[i].reg, in tda10071_init() 824 tab[i].val, tab[i].mask); in tda10071_init() 1044 struct tda10071_reg_val_mask tab[] = { in tda10071_sleep() local 1070 for (i = 0; i < ARRAY_SIZE(tab); i++) { in tda10071_sleep() 1071 ret = tda10071_wr_reg_mask(dev, tab[i].reg, tab[i].val, in tda10071_sleep() 1072 tab[i].mask); in tda10071_sleep()
|
D | tda10023.c | 98 static void tda10023_writetab(struct tda10023_state* state, u8* tab) in tda10023_writetab() argument 102 r=*tab++; in tda10023_writetab() 103 m=*tab++; in tda10023_writetab() 104 v=*tab++; in tda10023_writetab()
|
D | rtl2830.c | 53 struct rtl2830_reg_val_mask tab[] = { in rtl2830_init() local 92 for (i = 0; i < ARRAY_SIZE(tab); i++) { in rtl2830_init() 93 ret = rtl2830_update_bits(client, tab[i].reg, tab[i].mask, in rtl2830_init() 94 tab[i].val); in rtl2830_init()
|
D | cxd2820r_core.c | 13 const struct reg_val_mask *tab, int tab_len) in cxd2820r_wr_reg_val_mask_tab() argument 23 if ((tab[i].reg >> 16) & 0x1) in cxd2820r_wr_reg_val_mask_tab() 28 reg = (tab[i].reg >> 0) & 0xffff; in cxd2820r_wr_reg_val_mask_tab() 29 val = tab[i].val; in cxd2820r_wr_reg_val_mask_tab() 30 mask = tab[i].mask; in cxd2820r_wr_reg_val_mask_tab()
|
/drivers/staging/media/hantro/ |
D | hantro_jpeg.c | 270 const unsigned char *tab, int scale) in jpeg_scale_quant_table() argument 275 file_q_tab[i] = jpeg_scale_qp(tab[zigzag[i]], scale); in jpeg_scale_quant_table() 276 reordered_q_tab[i] = jpeg_scale_qp(tab[hw_reorder[i]], scale); in jpeg_scale_quant_table()
|
/drivers/media/usb/tm6000/ |
D | tm6000-core.c | 557 struct reg_init *tab; in tm6000_init() local 580 tab = tm6010_init_tab; in tm6000_init() 583 tab = tm6000_init_tab; in tm6000_init() 589 rc = tm6000_set_reg(dev, tab[i].req, tab[i].reg, tab[i].val); in tm6000_init() 593 tab[i].req, tab[i].reg, tab[i].val); in tm6000_init()
|
/drivers/video/fbdev/core/ |
D | cfbimgblt.c | 224 const u32 *tab = NULL; in fast_imageblit() local 229 tab = fb_be_math(p) ? cfb_tab8_be : cfb_tab8_le; in fast_imageblit() 232 tab = fb_be_math(p) ? cfb_tab16_be : cfb_tab16_le; in fast_imageblit() 236 tab = cfb_tab32; in fast_imageblit() 256 end_mask = tab[(*src >> shift) & bit_mask]; in fast_imageblit()
|
D | sysimgblt.c | 194 const u32 *tab; in fast_imageblit() local 201 tab = fb_be_math(p) ? cfb_tab8_be : cfb_tab8_le; in fast_imageblit() 205 tab = fb_be_math(p) ? cfb_tab16_be : cfb_tab16_le; in fast_imageblit() 209 tab = cfb_tab32; in fast_imageblit() 228 colortab[i] = (tab[i] & eorx) ^ bgx; in fast_imageblit()
|
/drivers/net/wireless/mediatek/mt76/mt76x0/ |
D | init.c | 83 #define RANDOM_WRITE(dev, tab) \ argument 85 tab, ARRAY_SIZE(tab))
|
/drivers/video/fbdev/aty/ |
D | mach64_gx.c | 90 } tab[3] = { in aty_set_dac_514() local 113 aty_st_514(0x04, tab[i].pixel_dly, par); /* Horizontal Sync Control */ in aty_set_dac_514() 116 aty_st_514(0x71, tab[i].misc2_cntl, par); /* Misc Control 2 */ in aty_set_dac_514() 117 aty_st_514(0x0a, tab[i].pixel_rep, par); /* Pixel Format */ in aty_set_dac_514() 118 aty_st_514(tab[i].pixel_cntl_index, tab[i].pixel_cntl_v1, par); in aty_set_dac_514()
|
/drivers/gpu/drm/amd/pm/powerplay/hwmgr/ |
D | smu7_hwmgr.c | 2600 struct phm_clock_voltage_dependency_table *tab) in smu7_patch_vddc() argument 2605 if (tab) in smu7_patch_vddc() 2606 for (i = 0; i < tab->count; i++) in smu7_patch_vddc() 2607 smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &tab->entries[i].v, in smu7_patch_vddc() 2614 struct phm_clock_voltage_dependency_table *tab) in smu7_patch_vddci() argument 2619 if (tab) in smu7_patch_vddci() 2620 for (i = 0; i < tab->count; i++) in smu7_patch_vddci() 2621 smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &tab->entries[i].v, in smu7_patch_vddci() 2628 struct phm_vce_clock_voltage_dependency_table *tab) in smu7_patch_vce_vddc() argument 2633 if (tab) in smu7_patch_vce_vddc() [all …]
|
/drivers/gpu/drm/amd/pm/powerplay/smumgr/ |
D | iceland_smumgr.c | 531 pp_atomctrl_voltage_table_entry *tab, uint16_t *hi, in iceland_get_std_voltage_value_sidd() argument 536 *hi = tab->value * VOLTAGE_SCALE; in iceland_get_std_voltage_value_sidd() 537 *lo = tab->value * VOLTAGE_SCALE; in iceland_get_std_voltage_value_sidd() 555 if (tab->value == hwmgr->dyn_state.vddc_dependency_on_sclk->entries[v_index].v) { in iceland_get_std_voltage_value_sidd() 575 if (tab->value <= hwmgr->dyn_state.vddc_dependency_on_sclk->entries[v_index].v) { in iceland_get_std_voltage_value_sidd() 597 pp_atomctrl_voltage_table_entry *tab, in iceland_populate_smc_voltage_table() argument 602 result = iceland_get_std_voltage_value_sidd(hwmgr, tab, in iceland_populate_smc_voltage_table() 606 smc_voltage_tab->StdVoltageHiSidd = tab->value * VOLTAGE_SCALE; in iceland_populate_smc_voltage_table() 607 smc_voltage_tab->StdVoltageLoSidd = tab->value * VOLTAGE_SCALE; in iceland_populate_smc_voltage_table() 610 smc_voltage_tab->Voltage = PP_HOST_TO_SMC_US(tab->value * VOLTAGE_SCALE); in iceland_populate_smc_voltage_table() [all …]
|