Home
last modified time | relevance | path

Searched refs:weight (Results 1 – 9 of 9) sorted by relevance

/kernel/events/
Dhw_breakpoint.c164 fetch_this_slot(struct bp_busy_slots *slots, int weight) in fetch_this_slot() argument
166 slots->pinned += weight; in fetch_this_slot()
173 enum bp_type_idx type, int weight) in toggle_bp_task_slot() argument
179 new_idx = old_idx + weight; in toggle_bp_task_slot()
192 int weight) in toggle_bp_slot() argument
198 weight = -weight; in toggle_bp_slot()
202 get_bp_info(bp->cpu, type)->cpu_pinned += weight; in toggle_bp_slot()
208 toggle_bp_task_slot(bp, cpu, type, weight); in toggle_bp_slot()
281 int weight; in __reserve_bp_slot() local
294 weight = hw_breakpoint_weight(bp); in __reserve_bp_slot()
[all …]
Dcore.c1890 size += sizeof(data->weight); in __perf_event_header_size()
7026 perf_output_put(handle, data->weight); in perf_output_sample()
/kernel/
Dscftorture.c190 static void scf_sel_add(unsigned long weight, int prim, bool wait) in scf_sel_add() argument
197 if (!weight || in scf_sel_add()
199 WARN_ON_ONCE(0 - 100000 * weight <= 100000 * scf_sel_totweight) || in scf_sel_add()
202 scf_sel_totweight += weight; in scf_sel_add()
/kernel/sched/
Dfair.c140 lw->weight += inc; in update_load_add()
146 lw->weight -= dec; in update_load_sub()
152 lw->weight = w; in update_load_set()
213 w = scale_load_down(lw->weight); in __update_inv_weight()
235 static u64 __calc_delta(u64 delta_exec, unsigned long weight, struct load_weight *lw) in __calc_delta() argument
237 u64 fact = scale_load_down(weight); in __calc_delta()
681 if (unlikely(se->load.weight != NICE_0_LOAD)) in calc_delta_fair()
729 update_load_add(&lw, se->load.weight); in sched_slice()
732 slice = __calc_delta(slice, se->load.weight, load); in sched_slice()
772 sa->load_avg = scale_load_down(se->load.weight); in init_entity_runnable_average()
[all …]
Dcore.c894 load->weight = scale_load(WEIGHT_IDLEPRIO); in set_load_weight()
906 load->weight = scale_load(sched_prio_to_weight[prio]); in set_load_weight()
8708 u64 weight = scale_load_down(tg->shares); in cpu_weight_read_u64() local
8710 return DIV_ROUND_CLOSEST_ULL(weight * CGROUP_WEIGHT_DFL, 1024); in cpu_weight_read_u64()
8714 struct cftype *cft, u64 weight) in cpu_weight_write_u64() argument
8723 if (weight < CGROUP_WEIGHT_MIN || weight > CGROUP_WEIGHT_MAX) in cpu_weight_write_u64()
8726 weight = DIV_ROUND_CLOSEST_ULL(weight * 1024, CGROUP_WEIGHT_DFL); in cpu_weight_write_u64()
8728 return sched_group_set_shares(css_tg(css), scale_load(weight)); in cpu_weight_write_u64()
8734 unsigned long weight = scale_load_down(css_tg(css)->shares); in cpu_weight_nice_read_s64() local
8740 delta = abs(sched_prio_to_weight[prio] - weight); in cpu_weight_nice_read_s64()
[all …]
Ddebug.c458 P(se->load.weight); in print_cfs_group_stats()
599 SEQ_printf(m, " .%-30s: %ld\n", "load", cfs_rq->load.weight); in print_cfs_rq()
1000 P(se.load.weight); in proc_sched_show_task()
Dpelt.c387 scale_load_down(cfs_rq->load.weight), in __update_load_avg_cfs_rq()
Drt.c691 int i, weight; in do_balance_runtime() local
694 weight = cpumask_weight(rd->span); in do_balance_runtime()
720 diff = div_u64((u64)diff, weight); in do_balance_runtime()
Dsched.h765 return scale_load_down(se->load.weight); in se_weight()