Home
last modified time | relevance | path

Searched refs:max_entries (Results 1 – 25 of 142) sorted by relevance

123456

/tools/testing/selftests/bpf/map_tests/
Dhtab_map_batch_ops.c13 static void map_batch_update(int map_fd, __u32 max_entries, int *keys, in map_batch_update() argument
27 for (i = 0; i < max_entries; i++) { in map_batch_update()
36 err = bpf_map_update_batch(map_fd, keys, values, &max_entries, &opts); in map_batch_update()
40 static void map_batch_verify(int *visited, __u32 max_entries, in map_batch_verify() argument
50 memset(visited, 0, max_entries * sizeof(*visited)); in map_batch_verify()
51 for (i = 0; i < max_entries; i++) { in map_batch_verify()
70 for (i = 0; i < max_entries; i++) { in map_batch_verify()
81 const __u32 max_entries = 10; in __test_map_lookup_and_delete_batch() local
82 value pcpu_values[max_entries]; in __test_map_lookup_and_delete_batch()
98 xattr.max_entries = max_entries; in __test_map_lookup_and_delete_batch()
[all …]
Dlpm_trie_map_batch_ops.c21 static void map_batch_update(int map_fd, __u32 max_entries, in map_batch_update() argument
32 for (i = 0; i < max_entries; i++) { in map_batch_update()
39 err = bpf_map_update_batch(map_fd, keys, values, &max_entries, &opts); in map_batch_update()
43 static void map_batch_verify(int *visited, __u32 max_entries, in map_batch_verify() argument
50 memset(visited, 0, max_entries * sizeof(*visited)); in map_batch_verify()
51 for (i = 0; i < max_entries; i++) { in map_batch_verify()
59 for (i = 0; i < max_entries; i++) { in map_batch_verify()
77 const __u32 max_entries = 10; in test_lpm_trie_map_batch_ops() local
85 xattr.max_entries = max_entries; in test_lpm_trie_map_batch_ops()
90 keys = malloc(max_entries * sizeof(struct test_lpm_key)); in test_lpm_trie_map_batch_ops()
[all …]
Darray_map_batch_ops.c14 static void map_batch_update(int map_fd, __u32 max_entries, int *keys, in map_batch_update() argument
24 for (i = 0; i < max_entries; i++) { in map_batch_update()
35 err = bpf_map_update_batch(map_fd, keys, values, &max_entries, &opts); in map_batch_update()
39 static void map_batch_verify(int *visited, __u32 max_entries, int *keys, in map_batch_verify() argument
45 memset(visited, 0, max_entries * sizeof(*visited)); in map_batch_verify()
46 for (i = 0; i < max_entries; i++) { in map_batch_verify()
63 for (i = 0; i < max_entries; i++) { in map_batch_verify()
80 const __u32 max_entries = 10; in __test_map_lookup_and_update_batch() local
89 xattr.max_entries = max_entries; in __test_map_lookup_and_update_batch()
98 keys = calloc(max_entries, sizeof(*keys)); in __test_map_lookup_and_update_batch()
[all …]
/tools/testing/selftests/bpf/progs/
Dtest_btf_map_in_map.c8 __uint(max_entries, 1);
16 __uint(max_entries, 2);
23 __uint(max_entries, 3);
31 __uint(max_entries, 1);
47 __uint(max_entries, 3);
56 __uint(max_entries, 5);
63 __uint(max_entries, 3);
69 __uint(max_entries, 1);
83 __uint(max_entries, 5);
99 __uint(max_entries, 1);
[all …]
Dmap_ptr_kern.c34 __u32 max_entries; member
39 __u32 value_size, __u32 max_entries) in check_bpf_map_fields() argument
44 VERIFY(map->max_entries == max_entries); in check_bpf_map_fields()
56 VERIFY(indirect->max_entries == direct->max_entries); in check_bpf_map_ptr()
63 __u32 key_size, __u32 value_size, __u32 max_entries) in check() argument
67 max_entries)); in check()
101 __uint(max_entries, MAX_ENTRIES);
137 __uint(max_entries, MAX_ENTRIES);
152 for (i = 0; i < array->map.max_entries && i < LOOP_BOUND; ++i) { in check_array()
169 __uint(max_entries, MAX_ENTRIES);
[all …]
Dlsm.c14 __uint(max_entries, 1);
21 __uint(max_entries, 1);
28 __uint(max_entries, 1);
35 __uint(max_entries, 1);
42 __uint(max_entries, 1);
49 __uint(max_entries, 1);
56 __uint(max_entries, 1);
63 __uint(max_entries, 1);
73 __uint(max_entries, 1);
Dsockmap_verdict_prog.c9 __uint(max_entries, 20);
16 __uint(max_entries, 20);
23 __uint(max_entries, 20);
30 __uint(max_entries, 20);
Dtest_pinning.c10 __uint(max_entries, 1);
18 __uint(max_entries, 1);
25 __uint(max_entries, 1);
Dtest_stacktrace_map.c13 __uint(max_entries, 1);
20 __uint(max_entries, 16384);
29 __uint(max_entries, 16384);
36 __uint(max_entries, 16384);
Dtest_stacktrace_build_id.c13 __uint(max_entries, 1);
20 __uint(max_entries, 16384);
29 __uint(max_entries, 128);
37 __uint(max_entries, 128);
Dtest_sockmap_update.c8 __uint(max_entries, 1);
15 __uint(max_entries, 1);
22 __uint(max_entries, 1);
Dbpf_iter_sockmap.c13 __uint(max_entries, 64);
20 __uint(max_entries, 64);
27 __uint(max_entries, 64);
Dlinked_maps1.c15 __uint(max_entries, 16);
22 __uint(max_entries, 8);
37 __uint(max_entries, 16);
Dlinked_maps2.c13 __uint(max_entries, 16);
23 __uint(max_entries, 8);
31 __uint(max_entries, 16);
Dtest_map_in_map_invalid.c10 __uint(max_entries, 4);
15 __uint(max_entries, 0); /* This will make map creation to fail */
Dsample_map_ret0.c9 .max_entries = 2,
16 .max_entries = 2,
Drecursion.c12 __uint(max_entries, 1);
19 __uint(max_entries, 1);
Dtest_pe_preserve_elems.c9 __uint(max_entries, 1);
16 __uint(max_entries, 1);
Dtest_select_reuseport_kern.c26 __uint(max_entries, 1);
33 __uint(max_entries, NR_RESULTS);
40 __uint(max_entries, 1);
47 __uint(max_entries, 1);
54 __uint(max_entries, 1);
Dxdp_redirect_multi_kern.c19 __uint(max_entries, 1024);
26 __uint(max_entries, 128);
34 __uint(max_entries, 128);
Dbpf_iter_bpf_hash_map.c16 __uint(max_entries, 3);
23 __uint(max_entries, 3);
30 __uint(max_entries, 3);
/tools/testing/selftests/bpf/prog_tests/
Dfor_each.c12 int i, err, hashmap_fd, max_entries, percpu_map_fd; in test_hash_map() local
23 max_entries = bpf_map__max_entries(skel->maps.hashmap); in test_hash_map()
24 for (i = 0; i < max_entries; i++) { in test_hash_map()
53 ASSERT_EQ(skel->bss->hashmap_elems, max_entries, "hashmap_elems"); in test_hash_map()
72 __u32 key, num_cpus, max_entries, retval; in test_array_map() local
84 max_entries = bpf_map__max_entries(skel->maps.arraymap); in test_array_map()
85 for (i = 0; i < max_entries; i++) { in test_array_map()
89 if (i != max_entries - 1) in test_array_map()
Dbtf.c75 __u32 max_entries; member
143 .max_entries = 4,
198 .max_entries = 4,
223 .max_entries = 4,
264 .max_entries = 4,
309 .max_entries = 1,
331 .max_entries = 1,
353 .max_entries = 1,
375 .max_entries = 1,
400 .max_entries = 1,
[all …]
/tools/lib/perf/
Dcpumap.c109 int max_entries = 0; in perf_cpu_map__read() local
125 if (new_max >= max_entries) { in perf_cpu_map__read()
126 max_entries = new_max + MAX_NR_CPUS / 2; in perf_cpu_map__read()
127 tmp = realloc(tmp_cpus, max_entries * sizeof(int)); in perf_cpu_map__read()
136 if (nr_cpus == max_entries) { in perf_cpu_map__read()
137 max_entries += MAX_NR_CPUS; in perf_cpu_map__read()
138 tmp = realloc(tmp_cpus, max_entries * sizeof(int)); in perf_cpu_map__read()
183 int max_entries = 0; in perf_cpu_map__new() local
226 if (nr_cpus == max_entries) { in perf_cpu_map__new()
227 max_entries += MAX_NR_CPUS; in perf_cpu_map__new()
[all …]
/tools/include/linux/
Dstacktrace.h8 unsigned int nr_entries, max_entries; member
20 backtrace((void **)(trace)->entries, (trace)->max_entries))

123456