Home
last modified time | relevance | path

Searched refs:array (Results 1 – 16 of 16) sorted by relevance

/kernel/bpf/
Darraymap.c21 static void bpf_array_free_percpu(struct bpf_array *array) in bpf_array_free_percpu() argument
25 for (i = 0; i < array->map.max_entries; i++) { in bpf_array_free_percpu()
26 free_percpu(array->pptrs[i]); in bpf_array_free_percpu()
31 static int bpf_array_alloc_percpu(struct bpf_array *array) in bpf_array_alloc_percpu() argument
36 for (i = 0; i < array->map.max_entries; i++) { in bpf_array_alloc_percpu()
37 ptr = __alloc_percpu_gfp(array->elem_size, 8, in bpf_array_alloc_percpu()
40 bpf_array_free_percpu(array); in bpf_array_alloc_percpu()
43 array->pptrs[i] = ptr; in bpf_array_alloc_percpu()
89 struct bpf_array *array; in array_map_alloc() local
114 array_size = sizeof(*array); in array_map_alloc()
[all …]
Dreuseport_array.c54 struct reuseport_array *array = reuseport_array(map); in reuseport_array_lookup_elem() local
57 if (unlikely(index >= array->map.max_entries)) in reuseport_array_lookup_elem()
60 return rcu_dereference(array->ptrs[index]); in reuseport_array_lookup_elem()
66 struct reuseport_array *array = reuseport_array(map); in reuseport_array_delete_elem() local
74 if (!rcu_access_pointer(array->ptrs[index])) in reuseport_array_delete_elem()
79 sk = rcu_dereference_protected(array->ptrs[index], in reuseport_array_delete_elem()
84 RCU_INIT_POINTER(array->ptrs[index], NULL); in reuseport_array_delete_elem()
98 struct reuseport_array *array = reuseport_array(map); in reuseport_array_free() local
129 sk = rcu_dereference(array->ptrs[i]); in reuseport_array_free()
139 RCU_INIT_POINTER(array->ptrs[i], NULL); in reuseport_array_free()
[all …]
Dcore.c1575 struct bpf_array *array = container_of(map, struct bpf_array, map); in ___bpf_prog_run() local
1579 if (unlikely(index >= array->map.max_entries)) in ___bpf_prog_run()
1586 prog = READ_ONCE(array->ptrs[index]); in ___bpf_prog_run()
1782 bool bpf_prog_array_compatible(struct bpf_array *array, in bpf_prog_array_compatible() argument
1790 spin_lock(&array->aux->owner.lock); in bpf_prog_array_compatible()
1792 if (!array->aux->owner.type) { in bpf_prog_array_compatible()
1796 array->aux->owner.type = fp->type; in bpf_prog_array_compatible()
1797 array->aux->owner.jited = fp->jited; in bpf_prog_array_compatible()
1800 ret = array->aux->owner.type == fp->type && in bpf_prog_array_compatible()
1801 array->aux->owner.jited == fp->jited; in bpf_prog_array_compatible()
[all …]
Dbtf.c774 const struct btf_array *array; in btf_show_name() local
846 array = btf_type_array(t); in btf_show_name()
849 id = array->type; in btf_show_name()
1641 const struct btf_array *array = NULL; in __btf_resolve_size() local
1670 array = btf_type_array(type); in __btf_resolve_size()
1671 if (nelems && array->nelems > U32_MAX / nelems) in __btf_resolve_size()
1673 nelems *= array->nelems; in __btf_resolve_size()
1674 type = btf_type_by_id(btf, array->type); in __btf_resolve_size()
1695 *elem_id = array ? array->type : 0; in __btf_resolve_size()
2574 const struct btf_array *array = btf_type_array(t); in btf_array_check_meta() local
[all …]
Dsyscall.c551 const struct bpf_array *array; in bpf_map_show_fdinfo() local
555 array = container_of(map, struct bpf_array, map); in bpf_map_show_fdinfo()
556 spin_lock(&array->aux->owner.lock); in bpf_map_show_fdinfo()
557 type = array->aux->owner.type; in bpf_map_show_fdinfo()
558 jited = array->aux->owner.jited; in bpf_map_show_fdinfo()
559 spin_unlock(&array->aux->owner.lock); in bpf_map_show_fdinfo()
Dcgroup.c229 struct bpf_prog_array **array) in compute_effective_progs() argument
267 *array = progs; in compute_effective_progs()
/kernel/trace/
Dtracing_map.h173 #define TRACING_MAP_ARRAY_ELT(array, idx) \ argument
174 (array->pages[idx >> array->entry_shift] + \
175 ((idx & array->entry_mask) << array->entry_size_shift))
177 #define TRACING_MAP_ENTRY(array, idx) \ argument
178 ((struct tracing_map_entry *)TRACING_MAP_ARRAY_ELT(array, idx))
180 #define TRACING_MAP_ELT(array, idx) \ argument
181 ((struct tracing_map_elt **)TRACING_MAP_ARRAY_ELT(array, idx))
Dtrace_probe_tmpl.h105 goto array; in process_fetch_insn_bottom()
109 goto array; in process_fetch_insn_bottom()
143 array: in process_fetch_insn_bottom()
Dring_buffer_benchmark.c139 inc = event->array[0] + 4; in read_page()
151 if (!event->array[0]) { in read_page()
155 inc = event->array[0] + 4; in read_page()
Dtrace_events_synth.c586 const char *prefix = NULL, *field_type = argv[0], *field_name, *array; in parse_synth_field() local
613 array = strchr(field_name, '['); in parse_synth_field()
614 if (array) in parse_synth_field()
615 len -= strlen(array); in parse_synth_field()
633 if (array) in parse_synth_field()
634 len += strlen(array); in parse_synth_field()
647 if (array) { in parse_synth_field()
648 seq_buf_puts(&s, array); in parse_synth_field()
Dbpf_trace.c807 struct bpf_array *array = container_of(map, struct bpf_array, map); in get_map_perf_counter() local
816 if (unlikely(index >= array->map.max_entries)) in get_map_perf_counter()
819 ee = READ_ONCE(array->ptrs[index]); in get_map_perf_counter()
880 struct bpf_array *array = container_of(map, struct bpf_array, map); in __bpf_perf_event_output() local
888 if (unlikely(index >= array->map.max_entries)) in __bpf_perf_event_output()
891 ee = READ_ONCE(array->ptrs[index]); in __bpf_perf_event_output()
1026 struct bpf_array *array = container_of(map, struct bpf_array, map); in BPF_CALL_2() local
1029 if (unlikely(idx >= array->map.max_entries)) in BPF_CALL_2()
1032 cgrp = READ_ONCE(array->ptrs[idx]); in BPF_CALL_2()
Dring_buffer.c128 #define RB_EVNT_HDR_SIZE (offsetof(struct ring_buffer_event, array))
177 length = event->array[0]; in rb_event_data_length()
194 return event->array[0] + RB_EVNT_HDR_SIZE; in rb_event_length()
249 if (length > RB_MAX_SMALL_DATA + sizeof(event->array[0])) in ring_buffer_event_length()
250 length -= sizeof(event->array[0]); in ring_buffer_event_length()
264 return (void *)&event->array[0]; in rb_event_data()
266 return (void *)&event->array[1]; in rb_event_data()
303 ts = event->array[0]; in ring_buffer_event_time_stamp()
2576 event->array[0] = (BUF_PAGE_SIZE - tail) - RB_EVNT_HDR_SIZE; in rb_reset_tail()
2712 event->array[0] = delta >> TS_SHIFT; in rb_add_time_stamp()
[all …]
Dtrace.c998 ring_buffer_write(buffer, event->array[0], &event->array[1]); in __buffer_unlock_commit()
2773 if ((len < (PAGE_SIZE - sizeof(*entry) - sizeof(entry->array[0]))) && val == 1) { in trace_event_buffer_lock_reserve()
2775 entry->array[0] = len; in trace_event_buffer_lock_reserve()
/kernel/sched/
Drt.c87 struct rt_prio_array *array; in init_rt_rq() local
90 array = &rt_rq->active; in init_rt_rq()
92 INIT_LIST_HEAD(array->queue + i); in init_rt_rq()
93 __clear_bit(i, array->bitmap); in init_rt_rq()
96 __set_bit(MAX_RT_PRIO, array->bitmap); in init_rt_rq()
1164 struct rt_prio_array *array = &rt_rq->active; in dec_rt_prio() local
1167 sched_find_first_bit(array->bitmap); in dec_rt_prio()
1282 static void __delist_rt_entity(struct sched_rt_entity *rt_se, struct rt_prio_array *array) in __delist_rt_entity() argument
1286 if (list_empty(array->queue + rt_se_prio(rt_se))) in __delist_rt_entity()
1287 __clear_bit(rt_se_prio(rt_se), array->bitmap); in __delist_rt_entity()
[all …]
/kernel/cgroup/
Dcgroup-v1.c331 pid_t *array; in pidlist_array_load() local
347 array = kvmalloc_array(length, sizeof(pid_t), GFP_KERNEL); in pidlist_array_load()
348 if (!array) in pidlist_array_load()
361 array[n++] = pid; in pidlist_array_load()
366 sort(array, length, sizeof(pid_t), cmppid, NULL); in pidlist_array_load()
367 length = pidlist_uniq(array, length); in pidlist_array_load()
371 kvfree(array); in pidlist_array_load()
377 l->list = array; in pidlist_array_load()
/kernel/
Drelay.c82 static void relay_free_page_array(struct page **array) in relay_free_page_array() argument
84 kvfree(array); in relay_free_page_array()