Home
last modified time | relevance | path

Searched refs:load (Results 1 – 12 of 12) sorted by relevance

/kernel/sched/
Dpelt.c105 unsigned long load, unsigned long runnable, int running) in accumulate_sum() argument
126 if (load) { in accumulate_sum()
143 if (load) in accumulate_sum()
144 sa->load_sum += load * contrib; in accumulate_sum()
183 unsigned long load, unsigned long runnable, int running) in ___update_load_sum() argument
220 if (!load) in ___update_load_sum()
230 if (!accumulate_sum(delta, sa, load, runnable, running)) in ___update_load_sum()
262 ___update_load_avg(struct sched_avg *sa, unsigned long load) in ___update_load_avg() argument
269 sa->load_avg = div_u64(load * sa->load_sum, divider); in ___update_load_avg()
330 scale_load_down(cfs_rq->load.weight), in __update_load_avg_cfs_rq()
Dfair.c705 if (unlikely(se->load.weight != NICE_0_LOAD)) in calc_delta_fair()
706 delta = __calc_delta(delta, NICE_0_LOAD, &se->load); in calc_delta_fair()
748 struct load_weight *load; in sched_slice() local
753 load = &qcfs_rq->load; in sched_slice()
756 lw = qcfs_rq->load; in sched_slice()
758 update_load_add(&lw, se->load.weight); in sched_slice()
759 load = &lw; in sched_slice()
761 slice = __calc_delta(slice, se->load.weight, load); in sched_slice()
807 sa->load_avg = scale_load_down(se->load.weight); in init_entity_runnable_average()
863 sa->util_avg = cfs_rq->avg.util_avg * se->load.weight; in post_init_entity_util_avg()
[all …]
Dloadavg.c156 calc_load_n(unsigned long load, unsigned long exp, in calc_load_n() argument
159 return calc_load(load, fixed_power_int(exp, FSHIFT, n), active); in calc_load_n()
Ddebug.c483 P(se->load.weight); in print_cfs_group_stats()
630 SEQ_printf(m, " .%-30s: %ld\n", "load", cfs_rq->load.weight); in print_cfs_rq()
1028 P(se.load.weight); in proc_sched_show_task()
Dsched.h562 struct load_weight load; member
806 return scale_load_down(se->load.weight); in se_weight()
Dcore.c1293 struct load_weight *load = &p->se.load; in set_load_weight() local
1299 load->weight = scale_load(WEIGHT_IDLEPRIO); in set_load_weight()
1300 load->inv_weight = WMULT_IDLEPRIO; in set_load_weight()
1311 load->weight = scale_load(sched_prio_to_weight[prio]); in set_load_weight()
1312 load->inv_weight = sched_prio_to_wmult[prio]; in set_load_weight()
/kernel/bpf/preload/
Dbpf_preload_kern.c70 static int __init load(void) in load() function
86 late_initcall(load);
/kernel/module/
DKconfig109 Check modules for valid signatures upon load: the signature
142 - Allows other modules to load if they don't violate the access to
165 possible to load a signed module containing the algorithm to check
214 Please note that the tool used to load modules needs to support the
258 load pinning security policy is enabled.
285 userspace can still load modules explicitly).
/kernel/
DKconfig.preempt47 under load.
63 system is under load, at the cost of slightly lower throughput
Dkexec_file.c70 if (!image->fops || !image->fops->load) in kexec_image_load_default()
73 return image->fops->load(image, image->kernel_buf, in kexec_image_load_default()
/kernel/rcu/
DKconfig286 to avoid starvation by heavy SCHED_OTHER background load.
/kernel/trace/
DKconfig891 boot up or module load. With this option, they will not be freed, as