Home
last modified time | relevance | path

Searched refs:new_mask (Results 1 – 8 of 8) sorted by relevance

/kernel/
Dcompat.c111 unsigned len, struct cpumask *new_mask) in compat_get_user_cpu_mask() argument
116 memset(new_mask, 0, cpumask_size()); in compat_get_user_cpu_mask()
120 k = cpumask_bits(new_mask); in compat_get_user_cpu_mask()
128 cpumask_var_t new_mask; in COMPAT_SYSCALL_DEFINE3() local
131 if (!alloc_cpumask_var(&new_mask, GFP_KERNEL)) in COMPAT_SYSCALL_DEFINE3()
134 retval = compat_get_user_cpu_mask(user_mask_ptr, len, new_mask); in COMPAT_SYSCALL_DEFINE3()
138 retval = sched_setaffinity(pid, new_mask); in COMPAT_SYSCALL_DEFINE3()
140 free_cpumask_var(new_mask); in COMPAT_SYSCALL_DEFINE3()
/kernel/trace/
Dtrace_boot.c70 cpumask_var_t new_mask; in trace_boot_set_instance_options() local
72 if (alloc_cpumask_var(&new_mask, GFP_KERNEL)) { in trace_boot_set_instance_options()
73 if (cpumask_parse(p, new_mask) < 0 || in trace_boot_set_instance_options()
74 tracing_set_cpumask(tr, new_mask) < 0) in trace_boot_set_instance_options()
76 free_cpumask_var(new_mask); in trace_boot_set_instance_options()
/kernel/time/
Dsched_clock.c155 u64 res, wrap, new_mask, new_epoch, cyc, ns; in sched_clock_register() local
170 new_mask = CLOCKSOURCE_MASK(bits); in sched_clock_register()
174 wrap = clocks_calc_max_nsecs(new_mult, new_shift, 0, new_mask, NULL); in sched_clock_register()
186 rd.sched_clock_mask = new_mask; in sched_clock_register()
/kernel/sched/
Dcore.c1942 void set_cpus_allowed_common(struct task_struct *p, const struct cpumask *new_mask) in set_cpus_allowed_common() argument
1944 cpumask_copy(&p->cpus_mask, new_mask); in set_cpus_allowed_common()
1945 p->nr_cpus_allowed = cpumask_weight(new_mask); in set_cpus_allowed_common()
1946 trace_android_rvh_set_cpus_allowed_comm(p, new_mask); in set_cpus_allowed_common()
1949 void do_set_cpus_allowed(struct task_struct *p, const struct cpumask *new_mask) in do_set_cpus_allowed() argument
1970 p->sched_class->set_cpus_allowed(p, new_mask); in do_set_cpus_allowed()
1982 const struct cpumask *new_mask, in __set_cpus_allowed_ptr_locked() argument
1999 } else if (!cpumask_subset(new_mask, cpu_allowed_mask)) { in __set_cpus_allowed_ptr_locked()
2013 if (cpumask_equal(&p->cpus_mask, new_mask)) in __set_cpus_allowed_ptr_locked()
2021 dest_cpu = cpumask_any_and_distribute(cpu_valid_mask, new_mask); in __set_cpus_allowed_ptr_locked()
[all …]
Ddeadline.c2314 const struct cpumask *new_mask) in set_cpus_allowed_dl() argument
2329 if (!cpumask_intersects(src_rd->span, new_mask)) { in set_cpus_allowed_dl()
2343 set_cpus_allowed_common(p, new_mask); in set_cpus_allowed_dl()
Dsched.h1956 extern void set_cpus_allowed_common(struct task_struct *p, const struct cpumask *new_mask);
/kernel/cgroup/
Dcpuset.c1100 const struct cpumask *new_mask) in update_cpus_allowed() argument
1104 trace_android_rvh_update_cpus_allowed(p, cs->cpus_requested, new_mask, &ret); in update_cpus_allowed()
1108 return set_cpus_allowed_ptr(p, new_mask); in update_cpus_allowed()
/kernel/locking/
Dlockdep.c4495 unsigned int new_mask, ret = 1; in mark_lock() local
4505 new_mask = 1 << new_bit; in mark_lock()
4511 if (likely(hlock_class(this)->usage_mask & new_mask)) in mark_lock()
4519 if (unlikely(hlock_class(this)->usage_mask & new_mask)) in mark_lock()
4525 hlock_class(this)->usage_mask |= new_mask; in mark_lock()