Home
last modified time | relevance | path

Searched defs:l (Results 1 – 24 of 24) sorted by relevance

/kernel/bpf/
Dbpf_lru_list.c52 static void bpf_lru_list_count_inc(struct bpf_lru_list *l, in bpf_lru_list_count_inc()
59 static void bpf_lru_list_count_dec(struct bpf_lru_list *l, in bpf_lru_list_count_dec()
66 static void __bpf_lru_node_move_to_free(struct bpf_lru_list *l, in __bpf_lru_node_move_to_free()
87 static void __bpf_lru_node_move_in(struct bpf_lru_list *l, in __bpf_lru_node_move_in()
105 static void __bpf_lru_node_move(struct bpf_lru_list *l, in __bpf_lru_node_move()
129 static bool bpf_lru_list_inactive_low(const struct bpf_lru_list *l) in bpf_lru_list_inactive_low()
145 struct bpf_lru_list *l) in __bpf_lru_list_rotate_active()
172 struct bpf_lru_list *l) in __bpf_lru_list_rotate_inactive()
212 struct bpf_lru_list *l, in __bpf_lru_list_shrink_inactive()
242 static void __bpf_lru_list_rotate(struct bpf_lru *lru, struct bpf_lru_list *l) in __bpf_lru_list_rotate()
[all …]
Dhashtab.c79 static inline void htab_elem_set_ptr(struct htab_elem *l, u32 key_size, in htab_elem_set_ptr()
85 static inline void __percpu *htab_elem_get_ptr(struct htab_elem *l, u32 key_size) in htab_elem_get_ptr()
90 static void *fd_htab_map_get_ptr(const struct bpf_map *map, struct htab_elem *l) in fd_htab_map_get_ptr()
123 struct htab_elem *l; in prealloc_lru_pop() local
205 struct pcpu_freelist_node *l; in alloc_extra_elems() local
429 struct htab_elem *l; in lookup_elem_raw() local
447 struct htab_elem *l; in lookup_nulls_elem_raw() local
469 struct htab_elem *l; in __htab_map_lookup_elem() local
488 struct htab_elem *l = __htab_map_lookup_elem(map, key); in htab_map_lookup_elem() local
525 struct htab_elem *l = __htab_map_lookup_elem(map, key); in __htab_lru_map_lookup_elem() local
[all …]
Dhelpers.c235 arch_spinlock_t *l = (void *)lock; in __bpf_spin_lock() local
249 arch_spinlock_t *l = (void *)lock; in __bpf_spin_unlock() local
258 atomic_t *l = (void *)lock; in __bpf_spin_lock() local
268 atomic_t *l = (void *)lock; in __bpf_spin_unlock() local
Doffload.c42 struct rhash_head l; member
/kernel/locking/
Drtmutex.h14 #define rt_mutex_deadlock_check(l) (0) argument
17 #define debug_rt_mutex_lock(l) do { } while (0) argument
18 #define debug_rt_mutex_proxy_lock(l,p) do { } while (0) argument
19 #define debug_rt_mutex_proxy_unlock(l) do { } while (0) argument
20 #define debug_rt_mutex_unlock(l) do { } while (0) argument
22 #define debug_rt_mutex_deadlock(d, a ,l) do { } while (0) argument
Dmcs_spinlock.h32 #define arch_mcs_spin_lock_contended(l) \ argument
44 #define arch_mcs_spin_unlock_contended(l) \ argument
Dspinlock.c49 # define arch_read_relax(l) cpu_relax() argument
52 # define arch_write_relax(l) cpu_relax() argument
55 # define arch_spin_relax(l) cpu_relax() argument
Drtmutex.c144 # define rt_mutex_cmpxchg_relaxed(l,c,n) (cmpxchg_relaxed(&l->owner, c, n) == c) argument
145 # define rt_mutex_cmpxchg_acquire(l,c,n) (cmpxchg_acquire(&l->owner, c, n) == c) argument
146 # define rt_mutex_cmpxchg_release(l,c,n) (cmpxchg_release(&l->owner, c, n) == c) argument
205 # define rt_mutex_cmpxchg_relaxed(l,c,n) (0) argument
206 # define rt_mutex_cmpxchg_acquire(l,c,n) (0) argument
207 # define rt_mutex_cmpxchg_release(l,c,n) (0) argument
Dqspinlock_paravirt.h80 #define queued_spin_trylock(l) pv_hybrid_queued_unfair_trylock(l) argument
Dlockdep_proc.c357 static int lock_stat_cmp(const void *l, const void *r) in lock_stat_cmp()
/kernel/cgroup/
Dcgroup-v1.c206 struct cgroup_pidlist *l, *tmp_l; in cgroup1_pidlist_destroy_all() local
220 struct cgroup_pidlist *l = container_of(dwork, struct cgroup_pidlist, in cgroup_pidlist_destroy_work_fn() local
288 struct cgroup_pidlist *l; in cgroup_pidlist_find() local
309 struct cgroup_pidlist *l; in cgroup_pidlist_find_create() local
342 struct cgroup_pidlist *l; in pidlist_array_load() local
406 struct cgroup_pidlist *l; in cgroup_pidlist_start() local
460 struct cgroup_pidlist *l = ctx->procs1.pidlist; in cgroup_pidlist_stop() local
472 struct cgroup_pidlist *l = ctx->procs1.pidlist; in cgroup_pidlist_next() local
Dcgroup.c4540 struct list_head *l; in css_task_iter_next_css_set() local
/kernel/sched/
Dcpudeadline.c28 int l, r, largest; in cpudl_heapify_down() local
/kernel/
Daudit.h301 #define audit_to_watch(k, p, l, o) (-EINVAL) argument
302 #define audit_add_watch(k, l) (-EINVAL) argument
307 #define audit_alloc_mark(k, p, l) (ERR_PTR(-EINVAL)) argument
Dresource.c92 loff_t l = 0; in r_start() local
1541 loff_t l; in iomem_map_sanity_check() local
1591 loff_t l; in iomem_is_exclusive() local
Dmodule.c1188 size_t l = 0; in module_flags_taint() local
1257 size_t l; in show_taint() local
/kernel/trace/
Dtrace_stat.c219 struct stat_node *l = container_of(v, struct stat_node, node); in stat_seq_show() local
Dtrace_branch.c301 int l; in annotate_branch_stat_show() local
Dtrace_events.c920 loff_t l; in t_start() local
953 loff_t l; in s_start() local
1266 loff_t l = 0; in f_start() local
Dftrace.c3201 loff_t l; in t_probe_start() local
3276 loff_t l; in t_mod_start() local
3367 loff_t l = *pos; /* t_probe_start() must use original pos */ in t_next() local
3404 loff_t l; in t_start() local
Dtrace.c470 loff_t l = 0; in trace_pid_start() local
3505 loff_t l = 0; in s_start() local
4428 loff_t l = 0; in t_start() local
5213 loff_t l = 0; in saved_cmdlines_start() local
5373 loff_t l = 0; in eval_map_start() local
/kernel/irq/
Daffinity.c107 static int ncpus_cmp_func(const void *l, const void *r) in ncpus_cmp_func()
/kernel/printk/
Dprintk.c3222 size_t l = 0; in kmsg_dump_get_line_nolock() local
3308 size_t l = 0; in kmsg_dump_get_buffer() local
/kernel/events/
Duprobes.c622 static int match_uprobe(struct uprobe *l, struct uprobe *r) in match_uprobe()