Home
last modified time | relevance | path

Searched refs:weight (Results 1 – 9 of 9) sorted by relevance

/kernel/events/
Dhw_breakpoint.c409 toggle_bp_slot(struct perf_event *bp, bool enable, enum bp_type_idx type, int weight) in toggle_bp_slot() argument
414 weight = -weight; in toggle_bp_slot()
424 bp_slots_histogram_add(&cpu_pinned[type], info->cpu_pinned, weight); in toggle_bp_slot()
425 info->cpu_pinned += weight; in toggle_bp_slot()
478 bp_slots_histogram_add(&tsk_pinned_all[type], next_tsk_pinned, weight); in toggle_bp_slot()
487 next_tsk_pinned, weight); in toggle_bp_slot()
494 next_tsk_pinned + hw_breakpoint_weight(bp), weight); in toggle_bp_slot()
511 next_tsk_pinned, weight); in toggle_bp_slot()
596 int weight; in __reserve_bp_slot() local
609 weight = hw_breakpoint_weight(bp); in __reserve_bp_slot()
[all …]
Dcore.c1860 size += sizeof(data->weight.full); in __perf_event_header_size()
7227 perf_output_put(handle, data->weight.full); in perf_output_sample()
7542 data->weight.full = 0; in perf_prepare_sample()
/kernel/
Dscftorture.c204 static void scf_sel_add(unsigned long weight, int prim, bool wait) in scf_sel_add() argument
211 if (!weight || in scf_sel_add()
213 WARN_ON_ONCE(0 - 100000 * weight <= 100000 * scf_sel_totweight) || in scf_sel_add()
216 scf_sel_totweight += weight; in scf_sel_add()
/kernel/sched/
Dfair.c220 lw->weight += inc; in update_load_add()
226 lw->weight -= dec; in update_load_sub()
232 lw->weight = w; in update_load_set()
293 w = scale_load_down(lw->weight); in __update_inv_weight()
315 static u64 __calc_delta(u64 delta_exec, unsigned long weight, struct load_weight *lw) in __calc_delta() argument
317 u64 fact = scale_load_down(weight); in __calc_delta()
705 if (unlikely(se->load.weight != NICE_0_LOAD)) in calc_delta_fair()
758 update_load_add(&lw, se->load.weight); in sched_slice()
761 slice = __calc_delta(slice, se->load.weight, load); in sched_slice()
807 sa->load_avg = scale_load_down(se->load.weight); in init_entity_runnable_average()
[all …]
Ddebug.c483 P(se->load.weight); in print_cfs_group_stats()
630 SEQ_printf(m, " .%-30s: %ld\n", "load", cfs_rq->load.weight); in print_cfs_rq()
1028 P(se.load.weight); in proc_sched_show_task()
Dpelt.c330 scale_load_down(cfs_rq->load.weight), in __update_load_avg_cfs_rq()
Dcore.c1299 load->weight = scale_load(WEIGHT_IDLEPRIO); in set_load_weight()
1311 load->weight = scale_load(sched_prio_to_weight[prio]); in set_load_weight()
11270 u64 weight = scale_load_down(tg->shares); in cpu_weight_read_u64() local
11272 return DIV_ROUND_CLOSEST_ULL(weight * CGROUP_WEIGHT_DFL, 1024); in cpu_weight_read_u64()
11276 struct cftype *cft, u64 weight) in cpu_weight_write_u64() argument
11285 if (weight < CGROUP_WEIGHT_MIN || weight > CGROUP_WEIGHT_MAX) in cpu_weight_write_u64()
11288 weight = DIV_ROUND_CLOSEST_ULL(weight * 1024, CGROUP_WEIGHT_DFL); in cpu_weight_write_u64()
11290 return sched_group_set_shares(css_tg(css), scale_load(weight)); in cpu_weight_write_u64()
11296 unsigned long weight = scale_load_down(css_tg(css)->shares); in cpu_weight_nice_read_s64() local
11302 delta = abs(sched_prio_to_weight[prio] - weight); in cpu_weight_nice_read_s64()
[all …]
Drt.c738 int i, weight; in do_balance_runtime() local
741 weight = cpumask_weight(rd->span); in do_balance_runtime()
767 diff = div_u64((u64)diff, weight); in do_balance_runtime()
Dsched.h806 return scale_load_down(se->load.weight); in se_weight()