/kernel/bpf/ |
D | bpf_lru_list.c | 52 static void bpf_lru_list_count_inc(struct bpf_lru_list *l, in bpf_lru_list_count_inc() 59 static void bpf_lru_list_count_dec(struct bpf_lru_list *l, in bpf_lru_list_count_dec() 66 static void __bpf_lru_node_move_to_free(struct bpf_lru_list *l, in __bpf_lru_node_move_to_free() 87 static void __bpf_lru_node_move_in(struct bpf_lru_list *l, in __bpf_lru_node_move_in() 105 static void __bpf_lru_node_move(struct bpf_lru_list *l, in __bpf_lru_node_move() 129 static bool bpf_lru_list_inactive_low(const struct bpf_lru_list *l) in bpf_lru_list_inactive_low() 145 struct bpf_lru_list *l) in __bpf_lru_list_rotate_active() 172 struct bpf_lru_list *l) in __bpf_lru_list_rotate_inactive() 212 struct bpf_lru_list *l, in __bpf_lru_list_shrink_inactive() 242 static void __bpf_lru_list_rotate(struct bpf_lru *lru, struct bpf_lru_list *l) in __bpf_lru_list_rotate() [all …]
|
D | hashtab.c | 79 static inline void htab_elem_set_ptr(struct htab_elem *l, u32 key_size, in htab_elem_set_ptr() 85 static inline void __percpu *htab_elem_get_ptr(struct htab_elem *l, u32 key_size) in htab_elem_get_ptr() 90 static void *fd_htab_map_get_ptr(const struct bpf_map *map, struct htab_elem *l) in fd_htab_map_get_ptr() 123 struct htab_elem *l; in prealloc_lru_pop() local 205 struct pcpu_freelist_node *l; in alloc_extra_elems() local 429 struct htab_elem *l; in lookup_elem_raw() local 447 struct htab_elem *l; in lookup_nulls_elem_raw() local 469 struct htab_elem *l; in __htab_map_lookup_elem() local 488 struct htab_elem *l = __htab_map_lookup_elem(map, key); in htab_map_lookup_elem() local 525 struct htab_elem *l = __htab_map_lookup_elem(map, key); in __htab_lru_map_lookup_elem() local [all …]
|
D | helpers.c | 235 arch_spinlock_t *l = (void *)lock; in __bpf_spin_lock() local 249 arch_spinlock_t *l = (void *)lock; in __bpf_spin_unlock() local 258 atomic_t *l = (void *)lock; in __bpf_spin_lock() local 268 atomic_t *l = (void *)lock; in __bpf_spin_unlock() local
|
D | offload.c | 42 struct rhash_head l; member
|
/kernel/locking/ |
D | rtmutex.h | 14 #define rt_mutex_deadlock_check(l) (0) argument 17 #define debug_rt_mutex_lock(l) do { } while (0) argument 18 #define debug_rt_mutex_proxy_lock(l,p) do { } while (0) argument 19 #define debug_rt_mutex_proxy_unlock(l) do { } while (0) argument 20 #define debug_rt_mutex_unlock(l) do { } while (0) argument 22 #define debug_rt_mutex_deadlock(d, a ,l) do { } while (0) argument
|
D | mcs_spinlock.h | 32 #define arch_mcs_spin_lock_contended(l) \ argument 44 #define arch_mcs_spin_unlock_contended(l) \ argument
|
D | spinlock.c | 49 # define arch_read_relax(l) cpu_relax() argument 52 # define arch_write_relax(l) cpu_relax() argument 55 # define arch_spin_relax(l) cpu_relax() argument
|
D | rtmutex.c | 144 # define rt_mutex_cmpxchg_relaxed(l,c,n) (cmpxchg_relaxed(&l->owner, c, n) == c) argument 145 # define rt_mutex_cmpxchg_acquire(l,c,n) (cmpxchg_acquire(&l->owner, c, n) == c) argument 146 # define rt_mutex_cmpxchg_release(l,c,n) (cmpxchg_release(&l->owner, c, n) == c) argument 205 # define rt_mutex_cmpxchg_relaxed(l,c,n) (0) argument 206 # define rt_mutex_cmpxchg_acquire(l,c,n) (0) argument 207 # define rt_mutex_cmpxchg_release(l,c,n) (0) argument
|
D | qspinlock_paravirt.h | 80 #define queued_spin_trylock(l) pv_hybrid_queued_unfair_trylock(l) argument
|
D | lockdep_proc.c | 357 static int lock_stat_cmp(const void *l, const void *r) in lock_stat_cmp()
|
/kernel/cgroup/ |
D | cgroup-v1.c | 206 struct cgroup_pidlist *l, *tmp_l; in cgroup1_pidlist_destroy_all() local 220 struct cgroup_pidlist *l = container_of(dwork, struct cgroup_pidlist, in cgroup_pidlist_destroy_work_fn() local 288 struct cgroup_pidlist *l; in cgroup_pidlist_find() local 309 struct cgroup_pidlist *l; in cgroup_pidlist_find_create() local 342 struct cgroup_pidlist *l; in pidlist_array_load() local 406 struct cgroup_pidlist *l; in cgroup_pidlist_start() local 460 struct cgroup_pidlist *l = ctx->procs1.pidlist; in cgroup_pidlist_stop() local 472 struct cgroup_pidlist *l = ctx->procs1.pidlist; in cgroup_pidlist_next() local
|
D | cgroup.c | 4540 struct list_head *l; in css_task_iter_next_css_set() local
|
/kernel/sched/ |
D | cpudeadline.c | 28 int l, r, largest; in cpudl_heapify_down() local
|
/kernel/ |
D | audit.h | 301 #define audit_to_watch(k, p, l, o) (-EINVAL) argument 302 #define audit_add_watch(k, l) (-EINVAL) argument 307 #define audit_alloc_mark(k, p, l) (ERR_PTR(-EINVAL)) argument
|
D | resource.c | 92 loff_t l = 0; in r_start() local 1541 loff_t l; in iomem_map_sanity_check() local 1591 loff_t l; in iomem_is_exclusive() local
|
D | module.c | 1188 size_t l = 0; in module_flags_taint() local 1257 size_t l; in show_taint() local
|
/kernel/trace/ |
D | trace_stat.c | 219 struct stat_node *l = container_of(v, struct stat_node, node); in stat_seq_show() local
|
D | trace_branch.c | 301 int l; in annotate_branch_stat_show() local
|
D | trace_events.c | 920 loff_t l; in t_start() local 953 loff_t l; in s_start() local 1266 loff_t l = 0; in f_start() local
|
D | ftrace.c | 3201 loff_t l; in t_probe_start() local 3276 loff_t l; in t_mod_start() local 3367 loff_t l = *pos; /* t_probe_start() must use original pos */ in t_next() local 3404 loff_t l; in t_start() local
|
D | trace.c | 470 loff_t l = 0; in trace_pid_start() local 3505 loff_t l = 0; in s_start() local 4428 loff_t l = 0; in t_start() local 5213 loff_t l = 0; in saved_cmdlines_start() local 5373 loff_t l = 0; in eval_map_start() local
|
/kernel/irq/ |
D | affinity.c | 107 static int ncpus_cmp_func(const void *l, const void *r) in ncpus_cmp_func()
|
/kernel/printk/ |
D | printk.c | 3222 size_t l = 0; in kmsg_dump_get_line_nolock() local 3308 size_t l = 0; in kmsg_dump_get_buffer() local
|
/kernel/events/ |
D | uprobes.c | 622 static int match_uprobe(struct uprobe *l, struct uprobe *r) in match_uprobe()
|