Home
last modified time | relevance | path

Searched refs:val (Results 1 – 25 of 82) sorted by relevance

1234

/kernel/locking/
Dqspinlock.c195 atomic_andnot(_Q_PENDING_VAL, &lock->val); in clear_pending()
206 atomic_add(-_Q_PENDING_VAL + _Q_LOCKED_VAL, &lock->val); in clear_pending_set_locked()
221 u32 old, new, val = atomic_read(&lock->val); in xchg_tail() local
224 new = (val & _Q_LOCKED_PENDING_MASK) | tail; in xchg_tail()
230 old = atomic_cmpxchg_relaxed(&lock->val, val, new); in xchg_tail()
231 if (old == val) in xchg_tail()
234 val = old; in xchg_tail()
250 return atomic_fetch_or_acquire(_Q_PENDING_VAL, &lock->val); in queued_fetch_set_pending_acquire()
314 void queued_spin_lock_slowpath(struct qspinlock *lock, u32 val) in queued_spin_lock_slowpath() argument
334 if (val == _Q_PENDING_VAL) { in queued_spin_lock_slowpath()
[all …]
Dqspinlock_paravirt.h88 int val = atomic_read(&lock->val); in pv_hybrid_queued_unfair_trylock() local
90 if (!(val & _Q_LOCKED_PENDING_MASK) && in pv_hybrid_queued_unfair_trylock()
95 if (!(val & _Q_TAIL_MASK) || (val & _Q_PENDING_MASK)) in pv_hybrid_queued_unfair_trylock()
128 atomic_or(_Q_PENDING_VAL, &lock->val); in set_pending()
133 int val = atomic_read(&lock->val); in trylock_clear_pending() local
138 if (val & _Q_LOCKED_MASK) in trylock_clear_pending()
144 old = val; in trylock_clear_pending()
145 new = (val & ~_Q_PENDING_MASK) | _Q_LOCKED_VAL; in trylock_clear_pending()
146 val = atomic_cmpxchg_acquire(&lock->val, old, new); in trylock_clear_pending()
148 if (val == old) in trylock_clear_pending()
[all …]
/kernel/trace/
Dtrace_probe_tmpl.h7 fetch_store_raw(unsigned long val, struct fetch_insn *code, void *buf) in fetch_store_raw() argument
11 *(u8 *)buf = (u8)val; in fetch_store_raw()
14 *(u16 *)buf = (u16)val; in fetch_store_raw()
17 *(u32 *)buf = (u32)val; in fetch_store_raw()
21 *(u64 *)buf = (u64)val; in fetch_store_raw()
24 *(unsigned long *)buf = val; in fetch_store_raw()
72 process_fetch_insn_bottom(struct fetch_insn *code, unsigned long val, in process_fetch_insn_bottom() argument
78 unsigned long lval = val; in process_fetch_insn_bottom()
84 lval = val; in process_fetch_insn_bottom()
85 ret = probe_mem_read(&val, (void *)val + code->offset, in process_fetch_insn_bottom()
[all …]
Dtrace_hwlat.c413 u64 val; in hwlat_read() local
422 val = *entry; in hwlat_read()
424 len = snprintf(buf, sizeof(buf), "%llu\n", val); in hwlat_read()
448 u64 val; in hwlat_width_write() local
451 err = kstrtoull_from_user(ubuf, cnt, 10, &val); in hwlat_width_write()
456 if (val < hwlat_data.sample_window) in hwlat_width_write()
457 hwlat_data.sample_width = val; in hwlat_width_write()
487 u64 val; in hwlat_window_write() local
490 err = kstrtoull_from_user(ubuf, cnt, 10, &val); in hwlat_window_write()
495 if (hwlat_data.sample_width < val) in hwlat_window_write()
[all …]
Dtrace_branch.c31 probe_likely_condition(struct ftrace_likely_data *f, int val, int expect) in probe_likely_condition() argument
83 entry->correct = val == expect; in probe_likely_condition()
94 void trace_likely_condition(struct ftrace_likely_data *f, int val, int expect) in trace_likely_condition() argument
99 probe_likely_condition(f, val, expect); in trace_likely_condition()
200 void trace_likely_condition(struct ftrace_likely_data *f, int val, int expect) in trace_likely_condition() argument
205 void ftrace_likely_update(struct ftrace_likely_data *f, int val, in ftrace_likely_update() argument
213 val = expect; in ftrace_likely_update()
221 trace_likely_condition(f, val, expect); in ftrace_likely_update()
224 if (val == expect) in ftrace_likely_update()
Dtrace_events_filter.c596 type val = (type)pred->val; \
597 return *addr < val; \
602 type val = (type)pred->val; \
603 return *addr <= val; \
608 type val = (type)pred->val; \
609 return *addr > val; \
614 type val = (type)pred->val; \
615 return *addr >= val; \
620 type val = (type)pred->val; \
621 return !!(*addr & val); \
[all …]
Dtracing_map.c521 struct tracing_map_elt *val; in __tracing_map_insert() local
534 val = READ_ONCE(entry->val); in __tracing_map_insert()
535 if (val && in __tracing_map_insert()
536 keys_match(key, val->key, map->key_size)) { in __tracing_map_insert()
539 return val; in __tracing_map_insert()
540 } else if (unlikely(!val)) { in __tracing_map_insert()
582 WRITE_ONCE(entry->val, elt); in __tracing_map_insert()
585 return entry->val; in __tracing_map_insert()
1092 if (!entry->key || !entry->val) in tracing_map_sort_entries()
1095 entries[n_entries] = create_sort_entry(entry->val->key, in tracing_map_sort_entries()
[all …]
Dtrace.c508 unsigned long val; in trace_pid_write() local
565 if (kstrtoul(parser.buffer, 0, &val)) in trace_pid_write()
567 if (val >= pid_list->pid_max) in trace_pid_write()
570 pid = (pid_t)val; in trace_pid_write()
1034 static void set_buffer_entries(struct trace_buffer *buf, unsigned long val);
1852 type->flags->val = 0; in register_tracer()
2004 static struct saved_cmdlines_buffer *allocate_cmdlines_buffer(unsigned int val) in allocate_cmdlines_buffer() argument
2012 orig_size = sizeof(*s) + val * TASK_COMM_LEN; in allocate_cmdlines_buffer()
2024 val = (size - sizeof(*s)) / TASK_COMM_LEN; in allocate_cmdlines_buffer()
2025 s->cmdline_num = val; in allocate_cmdlines_buffer()
[all …]
/kernel/
Dparams.c41 char val[]; member
58 return p->val; in kmalloc_parameter()
68 if (p->val == param) { in maybe_kfree_parameter()
116 char *val, in parse_one() argument
123 int (*handle_unknown)(char *param, char *val, in parse_one()
136 if (!val && in parse_one()
143 err = params[i].ops->set(val, &params[i]); in parse_one()
152 pr_debug("doing %s: %s='%s'\n", doing, param, val); in parse_one()
153 return handle_unknown(param, val, doing, arg); in parse_one()
168 int (*unknown)(char *param, char *val, in parse_args() argument
[all …]
Dnotifier.c80 unsigned long val, void *v, in notifier_call_chain() argument
98 ret = nb->notifier_call(nb, val, v); in notifier_call_chain()
182 unsigned long val, void *v, in __atomic_notifier_call_chain() argument
188 ret = notifier_call_chain(&nh->head, val, v, nr_to_call, nr_calls); in __atomic_notifier_call_chain()
196 unsigned long val, void *v) in atomic_notifier_call_chain() argument
198 return __atomic_notifier_call_chain(nh, val, v, -1, NULL); in atomic_notifier_call_chain()
310 unsigned long val, void *v, in __blocking_notifier_call_chain() argument
322 ret = notifier_call_chain(&nh->head, val, v, nr_to_call, in __blocking_notifier_call_chain()
331 unsigned long val, void *v) in blocking_notifier_call_chain() argument
333 return __blocking_notifier_call_chain(nh, val, v, -1, NULL); in blocking_notifier_call_chain()
[all …]
Dsysctl.c2253 unsigned long *val, bool *neg, in proc_get_long() argument
2277 if (strtoul_lenient(p, &p, 0, val)) in proc_get_long()
2311 static int proc_put_long(void __user **buf, size_t *size, unsigned long val, in proc_put_long() argument
2317 sprintf(p, "%s%lu", neg ? "-" : "", val); in proc_put_long()
2356 int val = *valp; in do_proc_dointvec_conv() local
2357 if (val < 0) { in do_proc_dointvec_conv()
2359 *lvalp = -(unsigned long)val; in do_proc_dointvec_conv()
2362 *lvalp = (unsigned long)val; in do_proc_dointvec_conv()
2377 unsigned int val = *valp; in do_proc_douintvec_conv() local
2378 *lvalp = (unsigned long)val; in do_proc_douintvec_conv()
[all …]
Dfail_function.c107 static int fei_retval_set(void *data, u64 val) in fei_retval_set() argument
110 unsigned long retv = (unsigned long)val; in fei_retval_set()
126 val) != retv) in fei_retval_set()
130 attr->retval = val; in fei_retval_set()
137 static int fei_retval_get(void *data, u64 *val) in fei_retval_get() argument
147 *val = attr->retval; in fei_retval_get()
Dauditfilter.c221 switch(audit_classify_arch(arch->val)) { in audit_match_signal()
414 if ((f->val != 0) && (f->val != 1)) in audit_field_valid()
418 if (f->val & ~15) in audit_field_valid()
422 if (f->val & ~S_IFMT) in audit_field_valid()
426 if (f->val > AUDIT_MAX_FIELD_COMPARE) in audit_field_valid()
430 if (f->val >= AF_MAX) in audit_field_valid()
503 f->val = f_val; in audit_data_to_entry()
560 f->val = f_val; in audit_data_to_entry()
594 f->val = f_val; in audit_data_to_entry()
679 if (krule->pflags & AUDIT_LOGINUID_LEGACY && !f->val) { in audit_krule_to_data()
[all …]
Dauditsc.c176 static int audit_match_filetype(struct audit_context *ctx, int val) in audit_match_filetype() argument
179 umode_t mode = (umode_t)val; in audit_match_filetype()
360 switch (f->val) { in audit_field_compare()
461 result = audit_comparator(pid, f->op, f->val); in audit_filter_rules()
467 result = audit_comparator(ctx->ppid, f->op, f->val); in audit_filter_rules()
515 result = audit_comparator(sessionid, f->op, f->val); in audit_filter_rules()
518 result = audit_comparator(tsk->personality, f->op, f->val); in audit_filter_rules()
522 result = audit_comparator(ctx->arch, f->op, f->val); in audit_filter_rules()
527 result = audit_comparator(ctx->return_code, f->op, f->val); in audit_filter_rules()
531 if (f->val) in audit_filter_rules()
[all …]
Dfutex.c2765 static int futex_wait_setup(u32 __user *uaddr, u32 val, unsigned int flags, in futex_wait_setup() argument
2813 if (uval != val) { in futex_wait_setup()
2824 static int futex_wait(u32 __user *uaddr, unsigned int flags, u32 val, in futex_wait() argument
2844 ret = futex_wait_setup(uaddr, val, flags, &q, &hb); in futex_wait()
2873 restart->futex.val = val; in futex_wait()
2901 restart->futex.val, tp, restart->futex.bitset); in futex_wait_restart()
3318 u32 val, ktime_t *abs_time, u32 bitset, in futex_wait_requeue_pi() argument
3358 ret = futex_wait_setup(uaddr, val, flags, &q, &hb); in futex_wait_requeue_pi()
3858 long do_futex(u32 __user *uaddr, int op, u32 val, ktime_t *timeout, in do_futex() argument
3888 return futex_wait(uaddr, flags, val, timeout, val3); in do_futex()
[all …]
/kernel/power/
Dmain.c83 int __pm_notifier_call_chain(unsigned long val, int nr_to_call, int *nr_calls) in __pm_notifier_call_chain() argument
87 ret = __blocking_notifier_call_chain(&pm_chain_head, val, NULL, in __pm_notifier_call_chain()
92 int pm_notifier_call_chain(unsigned long val) in pm_notifier_call_chain() argument
94 return __pm_notifier_call_chain(val, -1, NULL); in pm_notifier_call_chain()
109 unsigned long val; in pm_async_store() local
111 if (kstrtoul(buf, 10, &val)) in pm_async_store()
114 if (val > 1) in pm_async_store()
117 pm_async_enabled = val; in pm_async_store()
452 unsigned long val; in pm_print_times_store() local
454 if (kstrtoul(buf, 10, &val)) in pm_print_times_store()
[all …]
/kernel/sched/
Dcpuacct.c134 static void cpuacct_cpuusage_write(struct cpuacct *ca, int cpu, u64 val) in cpuacct_cpuusage_write() argument
147 cpuusage->usages[i] = val; in cpuacct_cpuusage_write()
186 u64 val) in cpuusage_write() argument
194 if (val) in cpuusage_write()
272 s64 val[CPUACCT_STAT_NSTATS]; in cpuacct_stats_show() local
276 memset(val, 0, sizeof(val)); in cpuacct_stats_show()
280 val[CPUACCT_STAT_USER] += cpustat[CPUTIME_USER]; in cpuacct_stats_show()
281 val[CPUACCT_STAT_USER] += cpustat[CPUTIME_NICE]; in cpuacct_stats_show()
282 val[CPUACCT_STAT_SYSTEM] += cpustat[CPUTIME_SYSTEM]; in cpuacct_stats_show()
283 val[CPUACCT_STAT_SYSTEM] += cpustat[CPUTIME_IRQ]; in cpuacct_stats_show()
[all …]
Dstats.h38 #define __schedstat_set(var, val) do { var = (val); } while (0) argument
39 #define schedstat_set(var, val) do { if (schedstat_enabled()) { var = (val); } } while (0) argument
52 # define __schedstat_set(var, val) do { } while (0) argument
53 # define schedstat_set(var, val) do { } while (0) argument
Dpelt.c37 static u64 decay_load(u64 val, u64 n) in decay_load() argument
55 val >>= local_n / LOAD_AVG_PERIOD; in decay_load()
59 val = mul_u64_u32_shr(val, runnable_avg_yN_inv[local_n], 32); in decay_load()
60 return val; in decay_load()
Dclock.c302 u64 *ptr, old_val, val; in sched_clock_remote() local
344 val = this_clock; in sched_clock_remote()
351 val = remote_clock; in sched_clock_remote()
354 if (cmpxchg64(ptr, old_val, val) != old_val) in sched_clock_remote()
357 return val; in sched_clock_remote()
/kernel/debug/kdb/
Dkdb_main.c371 unsigned long val; in kdbgetintenv() local
374 diag = kdbgetulenv(match, &val); in kdbgetintenv()
376 *value = (int) val; in kdbgetintenv()
393 unsigned long val; in kdbgetularg() local
395 val = simple_strtoul(arg, &endp, 0); in kdbgetularg()
402 val = simple_strtoul(arg, &endp, 16); in kdbgetularg()
407 *value = val; in kdbgetularg()
415 u64 val; in kdbgetu64arg() local
417 val = simple_strtoull(arg, &endp, 0); in kdbgetu64arg()
421 val = simple_strtoull(arg, &endp, 16); in kdbgetu64arg()
[all …]
/kernel/cgroup/
Dcpuset.c77 int val; /* most recent output value */ member
1857 static int update_relax_domain_level(struct cpuset *cs, s64 val) in update_relax_domain_level() argument
1860 if (val < -1 || val >= sched_domain_level_max) in update_relax_domain_level()
1864 if (val != cs->relax_domain_level) { in update_relax_domain_level()
1865 cs->relax_domain_level = val; in update_relax_domain_level()
1950 static int update_prstate(struct cpuset *cs, int val) in update_prstate() argument
1956 if ((val != 0) && (val != 1)) in update_prstate()
1958 if (val == cs->partition_root_state) in update_prstate()
1965 if (val && cs->partition_root_state) in update_prstate()
2080 fmp->val = 0; in fmeter_init()
[all …]
/kernel/time/
Ditimer.c50 u64 val, interval; in get_cpu_itimer() local
55 val = it->expires; in get_cpu_itimer()
57 if (val) { in get_cpu_itimer()
63 if (val < t) in get_cpu_itimer()
65 val = TICK_NSEC; in get_cpu_itimer()
67 val -= t; in get_cpu_itimer()
72 value->it_value = ns_to_timeval(val); in get_cpu_itimer()
/kernel/dma/
Dremap.c186 unsigned long val; in dma_alloc_from_pool() local
194 val = gen_pool_alloc(atomic_pool, size); in dma_alloc_from_pool()
195 if (val) { in dma_alloc_from_pool()
196 phys_addr_t phys = gen_pool_virt_to_phys(atomic_pool, val); in dma_alloc_from_pool()
199 ptr = (void *)val; in dma_alloc_from_pool()
/kernel/irq/
Dautoprobe.c117 unsigned int probe_irq_mask(unsigned long val) in probe_irq_mask() argument
136 return mask & val; in probe_irq_mask()
157 int probe_irq_off(unsigned long val) in probe_irq_off() argument

1234