/drivers/net/phy/ |
D | phylink.c | 127 static int phylink_validate(struct phylink *pl, unsigned long *supported, in phylink_validate() argument 130 pl->ops->validate(pl->netdev, supported, state); in phylink_validate() 135 static int phylink_parse_fixedlink(struct phylink *pl, struct device_node *np) in phylink_parse_fixedlink() argument 148 pl->link_config.speed = speed; in phylink_parse_fixedlink() 149 pl->link_config.duplex = DUPLEX_HALF; in phylink_parse_fixedlink() 152 pl->link_config.duplex = DUPLEX_FULL; in phylink_parse_fixedlink() 157 pl->link_config.pause |= MLO_PAUSE_SYM; in phylink_parse_fixedlink() 159 pl->link_config.pause |= MLO_PAUSE_ASYM; in phylink_parse_fixedlink() 167 pl->link_gpio = desc; in phylink_parse_fixedlink() 178 netdev_err(pl->netdev, "broken fixed-link?\n"); in phylink_parse_fixedlink() [all …]
|
/drivers/staging/lustre/lustre/ldlm/ |
D | ldlm_pool.c | 211 static void ldlm_pool_recalc_stats(struct ldlm_pool *pl) in ldlm_pool_recalc_stats() argument 213 int grant_plan = pl->pl_grant_plan; in ldlm_pool_recalc_stats() 214 __u64 slv = pl->pl_server_lock_volume; in ldlm_pool_recalc_stats() 215 int granted = atomic_read(&pl->pl_granted); in ldlm_pool_recalc_stats() 216 int grant_rate = atomic_read(&pl->pl_grant_rate); in ldlm_pool_recalc_stats() 217 int cancel_rate = atomic_read(&pl->pl_cancel_rate); in ldlm_pool_recalc_stats() 219 lprocfs_counter_add(pl->pl_stats, LDLM_POOL_SLV_STAT, in ldlm_pool_recalc_stats() 221 lprocfs_counter_add(pl->pl_stats, LDLM_POOL_GRANTED_STAT, in ldlm_pool_recalc_stats() 223 lprocfs_counter_add(pl->pl_stats, LDLM_POOL_GRANT_RATE_STAT, in ldlm_pool_recalc_stats() 225 lprocfs_counter_add(pl->pl_stats, LDLM_POOL_GRANT_PLAN_STAT, in ldlm_pool_recalc_stats() [all …]
|
D | ldlm_internal.h | 187 __u64 ldlm_pool_get_slv(struct ldlm_pool *pl); 188 void ldlm_pool_set_clv(struct ldlm_pool *pl, __u64 clv); 189 __u32 ldlm_pool_get_lvf(struct ldlm_pool *pl); 229 struct ldlm_pool *pl = container_of(kobj, struct ldlm_pool, \ 233 spin_lock(&pl->pl_lock); \ 234 tmp = pl->pl_##var; \ 235 spin_unlock(&pl->pl_lock); \ 247 struct ldlm_pool *pl = container_of(kobj, struct ldlm_pool, \ 257 spin_lock(&pl->pl_lock); \ 258 LDLM_POOL_SYSFS_SET_##type(pl->pl_##var, tmp); \ [all …]
|
/drivers/staging/irda/net/ |
D | parameters.c | 85 p.pl = 0; in irda_insert_no_value() 91 irda_param_pack(buf, "bb", p.pi, p.pl); in irda_insert_no_value() 112 irda_param_unpack(buf, "bb", &p.pi, &p.pl); in irda_extract_no_value() 134 p.pl = type & PV_MASK; /* The integer type codes the length as well */ in irda_insert_integer() 147 if (p.pl == 0) { in irda_insert_integer() 150 p.pl = 1; in irda_insert_integer() 153 p.pl = 2; in irda_insert_integer() 156 p.pl = 4; /* Default length */ in irda_insert_integer() 160 if (len < (2+p.pl)) { in irda_insert_integer() 166 p.pi, p.pl, p.pv.i); in irda_insert_integer() [all …]
|
/drivers/nvdimm/ |
D | core.c | 452 struct nd_poison *pl; in badblocks_populate() local 457 list_for_each_entry(pl, poison_list, list) { in badblocks_populate() 458 u64 pl_end = pl->start + pl->length - 1; in badblocks_populate() 463 if (pl->start > res->end) in badblocks_populate() 466 if (pl->start >= res->start) { in badblocks_populate() 467 u64 start = pl->start; in badblocks_populate() 471 len = pl->length; in badblocks_populate() 474 - pl->start; in badblocks_populate() 479 if (pl->start < res->start) { in badblocks_populate() 483 len = pl->start + pl->length - res->start; in badblocks_populate() [all …]
|
/drivers/md/ |
D | dm-kcopyd.c | 197 struct page_list *pl; in alloc_pl() local 199 pl = kmalloc(sizeof(*pl), gfp); in alloc_pl() 200 if (!pl) in alloc_pl() 203 pl->page = alloc_page(gfp); in alloc_pl() 204 if (!pl->page) { in alloc_pl() 205 kfree(pl); in alloc_pl() 209 return pl; in alloc_pl() 212 static void free_pl(struct page_list *pl) in free_pl() argument 214 __free_page(pl->page); in free_pl() 215 kfree(pl); in free_pl() [all …]
|
D | dm-io.c | 183 struct page_list *pl = (struct page_list *) dp->context_ptr; in list_get_page() local 185 *p = pl->page; in list_get_page() 192 struct page_list *pl = (struct page_list *) dp->context_ptr; in list_next_page() local 193 dp->context_ptr = pl->next; in list_next_page() 197 static void list_dp_init(struct dpages *dp, struct page_list *pl, unsigned offset) in list_dp_init() argument 202 dp->context_ptr = pl; in list_dp_init() 502 list_dp_init(dp, io_req->mem.ptr.pl, io_req->mem.offset); in dp_init()
|
/drivers/gpu/drm/radeon/ |
D | rv770_dpm.c | 228 struct rv7xx_pl *pl) in rv770_get_seq_value() argument 230 return (pl->flags & ATOM_PPLIB_R600_FLAGS_LOWPOWER) ? in rv770_get_seq_value() 615 struct rv7xx_pl *pl, in rv770_convert_power_level_to_smc() argument 623 ((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0; in rv770_convert_power_level_to_smc() 624 level->gen2XSP = (pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0; in rv770_convert_power_level_to_smc() 625 level->backbias = (pl->flags & ATOM_PPLIB_R600_FLAGS_BACKBIASENABLE) ? 1 : 0; in rv770_convert_power_level_to_smc() 629 ret = rv740_populate_sclk_value(rdev, pl->sclk, in rv770_convert_power_level_to_smc() 632 ret = rv730_populate_sclk_value(rdev, pl->sclk, in rv770_convert_power_level_to_smc() 635 ret = rv770_populate_sclk_value(rdev, pl->sclk, in rv770_convert_power_level_to_smc() 642 if (pl->mclk <= pi->mclk_strobe_mode_threshold) in rv770_convert_power_level_to_smc() [all …]
|
D | rv6xx_dpm.c | 1824 struct rv6xx_pl *pl; in rv6xx_parse_pplib_clock_info() local 1828 pl = &ps->low; in rv6xx_parse_pplib_clock_info() 1831 pl = &ps->medium; in rv6xx_parse_pplib_clock_info() 1835 pl = &ps->high; in rv6xx_parse_pplib_clock_info() 1844 pl->mclk = mclk; in rv6xx_parse_pplib_clock_info() 1845 pl->sclk = sclk; in rv6xx_parse_pplib_clock_info() 1846 pl->vddc = le16_to_cpu(clock_info->r600.usVDDC); in rv6xx_parse_pplib_clock_info() 1847 pl->flags = le32_to_cpu(clock_info->r600.ulFlags); in rv6xx_parse_pplib_clock_info() 1850 if (pl->vddc == 0xff01) { in rv6xx_parse_pplib_clock_info() 1852 pl->vddc = vddc; in rv6xx_parse_pplib_clock_info() [all …]
|
D | btc_dpm.c | 1270 struct rv7xx_pl *pl) in btc_adjust_clock_combinations() argument 1273 if ((pl->mclk == 0) || (pl->sclk == 0)) in btc_adjust_clock_combinations() 1276 if (pl->mclk == pl->sclk) in btc_adjust_clock_combinations() 1279 if (pl->mclk > pl->sclk) { in btc_adjust_clock_combinations() 1280 if (((pl->mclk + (pl->sclk - 1)) / pl->sclk) > rdev->pm.dpm.dyn_state.mclk_sclk_ratio) in btc_adjust_clock_combinations() 1281 pl->sclk = btc_get_valid_sclk(rdev, in btc_adjust_clock_combinations() 1283 (pl->mclk + in btc_adjust_clock_combinations() 1287 if ((pl->sclk - pl->mclk) > rdev->pm.dpm.dyn_state.sclk_mclk_delta) in btc_adjust_clock_combinations() 1288 pl->mclk = btc_get_valid_mclk(rdev, in btc_adjust_clock_combinations() 1290 pl->sclk - in btc_adjust_clock_combinations() [all …]
|
D | ni_dpm.c | 1614 struct rv7xx_pl *pl, in ni_populate_memory_timing_parameters() argument 1621 (u8)rv770_calculate_memory_refresh_rate(rdev, pl->sclk); in ni_populate_memory_timing_parameters() 1624 radeon_atom_set_engine_dram_timings(rdev, pl->sclk, pl->mclk); in ni_populate_memory_timing_parameters() 2308 struct rv7xx_pl *pl, in ni_convert_power_level_to_smc() argument 2320 ((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0; in ni_convert_power_level_to_smc() 2322 ret = ni_populate_sclk_value(rdev, pl->sclk, &level->sclk); in ni_convert_power_level_to_smc() 2328 (pl->mclk <= pi->mclk_stutter_mode_threshold) && in ni_convert_power_level_to_smc() 2335 if (pl->mclk > pi->mclk_edc_enable_threshold) in ni_convert_power_level_to_smc() 2337 if (pl->mclk > eg_pi->mclk_edc_wr_enable_threshold) in ni_convert_power_level_to_smc() 2340 level->strobeMode = cypress_get_strobe_mode_settings(rdev, pl->mclk); in ni_convert_power_level_to_smc() [all …]
|
D | sumo_dpm.c | 548 struct sumo_pl *pl, u32 index) in sumo_program_power_level() argument 556 pl->sclk, false, ÷rs); in sumo_program_power_level() 562 sumo_set_vid(rdev, index, pl->vddc_index); in sumo_program_power_level() 564 if (pl->ss_divider_index == 0 || pl->ds_divider_index == 0) { in sumo_program_power_level() 568 sumo_set_ss_dividers(rdev, index, pl->ss_divider_index); in sumo_program_power_level() 569 sumo_set_ds_dividers(rdev, index, pl->ds_divider_index); in sumo_program_power_level() 575 sumo_set_allos_gnb_slow(rdev, index, pl->allow_gnb_slow); in sumo_program_power_level() 578 sumo_set_tdp_limit(rdev, index, pl->sclk_dpm_tdp_limit); in sumo_program_power_level() 1436 struct sumo_pl *pl = &ps->levels[index]; in sumo_parse_pplib_clock_info() local 1441 pl->sclk = sclk; in sumo_parse_pplib_clock_info() [all …]
|
D | trinity_dpm.c | 714 struct trinity_pl *pl, u32 index) in trinity_program_power_level() argument 721 trinity_set_divider_value(rdev, index, pl->sclk); in trinity_program_power_level() 722 trinity_set_vid(rdev, index, pl->vddc_index); in trinity_program_power_level() 723 trinity_set_ss_dividers(rdev, index, pl->ss_divider_index); in trinity_program_power_level() 724 trinity_set_ds_dividers(rdev, index, pl->ds_divider_index); in trinity_program_power_level() 725 trinity_set_allos_gnb_slow(rdev, index, pl->allow_gnb_slow); in trinity_program_power_level() 726 trinity_set_force_nbp_state(rdev, index, pl->force_nbp_state); in trinity_program_power_level() 727 trinity_set_display_wm(rdev, index, pl->display_wm); in trinity_program_power_level() 728 trinity_set_vce_wm(rdev, index, pl->vce_wm); in trinity_program_power_level() 1712 struct trinity_pl *pl = &ps->levels[index]; in trinity_parse_pplib_clock_info() local [all …]
|
D | si_dpm.c | 1754 struct rv7xx_pl *pl, 4292 struct rv7xx_pl *pl, in si_populate_memory_timing_parameters() argument 4300 (u8)si_calculate_memory_refresh_rate(rdev, pl->sclk); in si_populate_memory_timing_parameters() 4303 pl->sclk, in si_populate_memory_timing_parameters() 4304 pl->mclk); in si_populate_memory_timing_parameters() 4637 ret = si_convert_power_level_to_smc(rdev, &ulv->pl, in si_populate_ulv_state() 4666 ret = si_populate_memory_timing_parameters(rdev, &ulv->pl, in si_program_ulv_memory_timing_parameters() 4757 if (ulv->supported && ulv->pl.vddc) { in si_init_smc_table() 4976 struct rv7xx_pl *pl, in si_convert_power_level_to_smc() argument 4991 level->gen2PCIE = (u8)pl->pcie_gen; in si_convert_power_level_to_smc() [all …]
|
/drivers/gpu/drm/nouveau/nvkm/subdev/clk/ |
D | gk20a.c | 36 static u32 pl_to_div(u32 pl) in pl_to_div() argument 38 if (pl >= ARRAY_SIZE(_pl_to_div)) in pl_to_div() 41 return _pl_to_div[pl]; in pl_to_div() 46 u32 pl; in div_to_pl() local 48 for (pl = 0; pl < ARRAY_SIZE(_pl_to_div) - 1; pl++) { in div_to_pl() 49 if (_pl_to_div[pl] >= div) in div_to_pl() 50 return pl; in div_to_pl() 73 pll->pl = (val >> GPCPLL_COEFF_P_SHIFT) & MASK(GPCPLL_COEFF_P_WIDTH); in gk20a_pllg_read_mnp() 84 val |= (pll->pl & MASK(GPCPLL_COEFF_P_WIDTH)) << GPCPLL_COEFF_P_SHIFT; in gk20a_pllg_write_mnp() 95 divider = pll->m * clk->pl_to_div(pll->pl); in gk20a_pllg_calc_rate() [all …]
|
/drivers/target/iscsi/ |
D | iscsi_target_parameters.c | 205 struct iscsi_param_list *pl; in iscsi_create_default_params() local 207 pl = kzalloc(sizeof(struct iscsi_param_list), GFP_KERNEL); in iscsi_create_default_params() 208 if (!pl) { in iscsi_create_default_params() 213 INIT_LIST_HEAD(&pl->param_list); in iscsi_create_default_params() 214 INIT_LIST_HEAD(&pl->extra_response_list); in iscsi_create_default_params() 227 param = iscsi_set_default_param(pl, AUTHMETHOD, INITIAL_AUTHMETHOD, in iscsi_create_default_params() 233 param = iscsi_set_default_param(pl, HEADERDIGEST, INITIAL_HEADERDIGEST, in iscsi_create_default_params() 239 param = iscsi_set_default_param(pl, DATADIGEST, INITIAL_DATADIGEST, in iscsi_create_default_params() 245 param = iscsi_set_default_param(pl, MAXCONNECTIONS, in iscsi_create_default_params() 252 param = iscsi_set_default_param(pl, SENDTARGETS, INITIAL_SENDTARGETS, in iscsi_create_default_params() [all …]
|
/drivers/phy/ |
D | phy-core.c | 70 struct phy_lookup *pl; in phy_create_lookup() local 75 pl = kzalloc(sizeof(*pl), GFP_KERNEL); in phy_create_lookup() 76 if (!pl) in phy_create_lookup() 79 pl->dev_id = dev_id; in phy_create_lookup() 80 pl->con_id = con_id; in phy_create_lookup() 81 pl->phy = phy; in phy_create_lookup() 84 list_add_tail(&pl->node, &phys); in phy_create_lookup() 102 struct phy_lookup *pl; in phy_remove_lookup() local 108 list_for_each_entry(pl, &phys, node) in phy_remove_lookup() 109 if (pl->phy == phy && !strcmp(pl->dev_id, dev_id) && in phy_remove_lookup() [all …]
|
/drivers/misc/cxl/ |
D | file.c | 365 struct cxl_event_afu_driver_reserved *pl) in afu_driver_event_copy() argument 368 if (!pl) { in afu_driver_event_copy() 369 ctx->afu_driver_ops->event_delivered(ctx, pl, -EINVAL); in afu_driver_event_copy() 374 event->header.size += pl->data_size; in afu_driver_event_copy() 376 ctx->afu_driver_ops->event_delivered(ctx, pl, -EINVAL); in afu_driver_event_copy() 382 ctx->afu_driver_ops->event_delivered(ctx, pl, -EFAULT); in afu_driver_event_copy() 388 if (copy_to_user(buf, &pl->data, pl->data_size)) { in afu_driver_event_copy() 389 ctx->afu_driver_ops->event_delivered(ctx, pl, -EFAULT); in afu_driver_event_copy() 393 ctx->afu_driver_ops->event_delivered(ctx, pl, 0); /* Success */ in afu_driver_event_copy() 401 struct cxl_event_afu_driver_reserved *pl = NULL; in afu_read() local [all …]
|
/drivers/hwtracing/stm/ |
D | dummy_stm.c | 30 u64 pl = 0; in dummy_stm_packet() local 33 pl = *(u64 *)payload; in dummy_stm_packet() 36 pl &= (1ull << (size * 8)) - 1; in dummy_stm_packet() 38 packet, size, pl); in dummy_stm_packet()
|
/drivers/gpu/drm/vmwgfx/ |
D | vmwgfx_dmabuf.c | 289 struct ttm_place pl; in vmw_bo_pin_reserved() local 306 pl.fpfn = 0; in vmw_bo_pin_reserved() 307 pl.lpfn = 0; in vmw_bo_pin_reserved() 308 pl.flags = TTM_PL_FLAG_VRAM | VMW_PL_FLAG_GMR | VMW_PL_FLAG_MOB in vmw_bo_pin_reserved() 311 pl.flags |= TTM_PL_FLAG_NO_EVICT; in vmw_bo_pin_reserved() 315 placement.placement = &pl; in vmw_bo_pin_reserved()
|
/drivers/atm/ |
D | nicstar.c | 2468 pool_levels pl; in ns_ioctl() local 2476 (pl.buftype, &((pool_levels __user *) arg)->buftype)) in ns_ioctl() 2478 switch (pl.buftype) { in ns_ioctl() 2480 pl.count = in ns_ioctl() 2482 pl.level.min = card->sbnr.min; in ns_ioctl() 2483 pl.level.init = card->sbnr.init; in ns_ioctl() 2484 pl.level.max = card->sbnr.max; in ns_ioctl() 2488 pl.count = in ns_ioctl() 2490 pl.level.min = card->lbnr.min; in ns_ioctl() 2491 pl.level.init = card->lbnr.init; in ns_ioctl() [all …]
|
/drivers/gpu/drm/amd/amdgpu/ |
D | si_dpm.c | 1843 struct rv7xx_pl *pl, 3286 struct rv7xx_pl *pl) in btc_adjust_clock_combinations() argument 3289 if ((pl->mclk == 0) || (pl->sclk == 0)) in btc_adjust_clock_combinations() 3292 if (pl->mclk == pl->sclk) in btc_adjust_clock_combinations() 3295 if (pl->mclk > pl->sclk) { in btc_adjust_clock_combinations() 3296 if (((pl->mclk + (pl->sclk - 1)) / pl->sclk) > adev->pm.dpm.dyn_state.mclk_sclk_ratio) in btc_adjust_clock_combinations() 3297 pl->sclk = btc_get_valid_sclk(adev, in btc_adjust_clock_combinations() 3299 (pl->mclk + in btc_adjust_clock_combinations() 3303 if ((pl->sclk - pl->mclk) > adev->pm.dpm.dyn_state.sclk_mclk_delta) in btc_adjust_clock_combinations() 3304 pl->mclk = btc_get_valid_mclk(adev, in btc_adjust_clock_combinations() [all …]
|
/drivers/s390/scsi/ |
D | zfcp_dbf.c | 43 struct zfcp_dbf_pay *pl = &dbf->pay_buf; in zfcp_dbf_pl_write() local 47 memset(pl, 0, sizeof(*pl)); in zfcp_dbf_pl_write() 48 pl->fsf_req_id = req_id; in zfcp_dbf_pl_write() 49 memcpy(pl->area, area, ZFCP_DBF_TAG_LEN); in zfcp_dbf_pl_write() 54 memcpy(pl->data, data + offset, rec_length); in zfcp_dbf_pl_write() 55 debug_event(dbf->pay, 1, pl, zfcp_dbf_plen(rec_length)); in zfcp_dbf_pl_write() 58 pl->counter++; in zfcp_dbf_pl_write() 191 void **pl) in zfcp_dbf_hba_def_err() argument 202 if (!pl) in zfcp_dbf_hba_def_err() 214 while (payload->counter < scount && (char *)pl[payload->counter]) { in zfcp_dbf_hba_def_err() [all …]
|
/drivers/usb/serial/ |
D | Makefile-keyspan_pda_fw | 15 %_fw.h: %.hex ezusb_convert.pl 16 perl ezusb_convert.pl $* < $< > $@
|
/drivers/crypto/vmx/ |
D | Makefile | 16 $(obj)/aesp8-ppc.S: $(src)/aesp8-ppc.pl FORCE 19 $(obj)/ghashp8-ppc.S: $(src)/ghashp8-ppc.pl FORCE
|