Home
last modified time | relevance | path

Searched refs:map (Results 1 – 25 of 273) sorted by relevance

1234567891011

/tools/perf/util/
Dmap.h18 struct map { struct
32 u64 (*map_ip)(struct map *, u64); argument
34 u64 (*unmap_ip)(struct map *, u64); argument
43 struct kmap *__map__kmap(struct map *map); argument
44 struct kmap *map__kmap(struct map *map);
45 struct maps *map__kmaps(struct map *map);
47 static inline u64 map__map_ip(struct map *map, u64 ip) in map__map_ip() argument
49 return ip - map->start + map->pgoff; in map__map_ip()
52 static inline u64 map__unmap_ip(struct map *map, u64 ip) in map__unmap_ip() argument
54 return ip + map->start - map->pgoff; in map__unmap_ip()
[all …]
Dmap.c28 static void __maps__insert(struct maps *maps, struct map *map);
116 void map__init(struct map *map, u64 start, u64 end, u64 pgoff, struct dso *dso) in map__init() argument
118 map->start = start; in map__init()
119 map->end = end; in map__init()
120 map->pgoff = pgoff; in map__init()
121 map->reloc = 0; in map__init()
122 map->dso = dso__get(dso); in map__init()
123 map->map_ip = map__map_ip; in map__init()
124 map->unmap_ip = map__unmap_ip; in map__init()
125 RB_CLEAR_NODE(&map->rb_node); in map__init()
[all …]
Dhashmap.c38 void hashmap__init(struct hashmap *map, hashmap_hash_fn hash_fn, in hashmap__init() argument
41 map->hash_fn = hash_fn; in hashmap__init()
42 map->equal_fn = equal_fn; in hashmap__init()
43 map->ctx = ctx; in hashmap__init()
45 map->buckets = NULL; in hashmap__init()
46 map->cap = 0; in hashmap__init()
47 map->cap_bits = 0; in hashmap__init()
48 map->sz = 0; in hashmap__init()
55 struct hashmap *map = malloc(sizeof(struct hashmap)); in hashmap__new() local
57 if (!map) in hashmap__new()
[all …]
Dmmap.c39 size_t mmap__mmap_len(struct mmap *map) in mmap__mmap_len() argument
41 return perf_mmap__mmap_len(&map->core); in mmap__mmap_len()
71 static int perf_mmap__aio_enabled(struct mmap *map) in perf_mmap__aio_enabled() argument
73 return map->aio.nr_cblocks > 0; in perf_mmap__aio_enabled()
77 static int perf_mmap__aio_alloc(struct mmap *map, int idx) in perf_mmap__aio_alloc() argument
79 map->aio.data[idx] = mmap(NULL, mmap__mmap_len(map), PROT_READ|PROT_WRITE, in perf_mmap__aio_alloc()
81 if (map->aio.data[idx] == MAP_FAILED) { in perf_mmap__aio_alloc()
82 map->aio.data[idx] = NULL; in perf_mmap__aio_alloc()
89 static void perf_mmap__aio_free(struct mmap *map, int idx) in perf_mmap__aio_free() argument
91 if (map->aio.data[idx]) { in perf_mmap__aio_free()
[all …]
Dmaps.h14 struct map;
18 struct map *maps__find(struct maps *maps, u64 addr);
19 struct map *maps__first(struct maps *maps);
20 struct map *map__next(struct map *map);
22 #define maps__for_each_entry(maps, map) \ argument
23 for (map = maps__first(maps); map; map = map__next(map))
25 #define maps__for_each_entry_safe(maps, map, next) \ argument
26 for (map = maps__first(maps), next = map__next(map); map; map = next, next = map__next(map))
32 struct map *last_search_by_name;
33 struct map **maps_by_name;
[all …]
Dhashmap.h73 void hashmap__init(struct hashmap *map, hashmap_hash_fn hash_fn,
78 void hashmap__clear(struct hashmap *map);
79 void hashmap__free(struct hashmap *map);
81 size_t hashmap__size(const struct hashmap *map);
82 size_t hashmap__capacity(const struct hashmap *map);
112 int hashmap__insert(struct hashmap *map, const void *key, void *value,
116 static inline int hashmap__add(struct hashmap *map, in hashmap__add() argument
119 return hashmap__insert(map, key, value, HASHMAP_ADD, NULL, NULL); in hashmap__add()
122 static inline int hashmap__set(struct hashmap *map, in hashmap__set() argument
126 return hashmap__insert(map, key, value, HASHMAP_SET, in hashmap__set()
[all …]
Dcpumap.c23 struct perf_cpu_map *map; in cpu_map__from_entries() local
25 map = perf_cpu_map__empty_new(cpus->nr); in cpu_map__from_entries()
26 if (map) { in cpu_map__from_entries()
36 map->map[i] = -1; in cpu_map__from_entries()
38 map->map[i] = (int) cpus->cpu[i]; in cpu_map__from_entries()
42 return map; in cpu_map__from_entries()
47 struct perf_cpu_map *map; in cpu_map__from_mask() local
52 map = perf_cpu_map__empty_new(nr); in cpu_map__from_mask()
53 if (map) { in cpu_map__from_mask()
57 map->map[i++] = cpu; in cpu_map__from_mask()
[all …]
/tools/lib/perf/
Dmmap.c16 void perf_mmap__init(struct perf_mmap *map, struct perf_mmap *prev, in perf_mmap__init() argument
19 map->fd = -1; in perf_mmap__init()
20 map->overwrite = overwrite; in perf_mmap__init()
21 map->unmap_cb = unmap_cb; in perf_mmap__init()
22 refcount_set(&map->refcnt, 0); in perf_mmap__init()
24 prev->next = map; in perf_mmap__init()
27 size_t perf_mmap__mmap_len(struct perf_mmap *map) in perf_mmap__mmap_len() argument
29 return map->mask + 1 + page_size; in perf_mmap__mmap_len()
32 int perf_mmap__mmap(struct perf_mmap *map, struct perf_mmap_param *mp, in perf_mmap__mmap() argument
35 map->prev = 0; in perf_mmap__mmap()
[all …]
Dthreadmap.c10 static void perf_thread_map__reset(struct perf_thread_map *map, int start, int nr) in perf_thread_map__reset() argument
12 size_t size = (nr - start) * sizeof(map->map[0]); in perf_thread_map__reset()
14 memset(&map->map[start], 0, size); in perf_thread_map__reset()
15 map->err_thread = -1; in perf_thread_map__reset()
18 struct perf_thread_map *perf_thread_map__realloc(struct perf_thread_map *map, int nr) in perf_thread_map__realloc() argument
20 size_t size = sizeof(*map) + sizeof(map->map[0]) * nr; in perf_thread_map__realloc()
21 int start = map ? map->nr : 0; in perf_thread_map__realloc()
23 map = realloc(map, size); in perf_thread_map__realloc()
27 if (map) in perf_thread_map__realloc()
28 perf_thread_map__reset(map, start, nr); in perf_thread_map__realloc()
[all …]
Dcpumap.c19 cpus->map[0] = -1; in perf_cpu_map__dummy_new()
26 static void cpu_map__delete(struct perf_cpu_map *map) in cpu_map__delete() argument
28 if (map) { in cpu_map__delete()
29 WARN_ONCE(refcount_read(&map->refcnt) != 0, in cpu_map__delete()
31 free(map); in cpu_map__delete()
35 struct perf_cpu_map *perf_cpu_map__get(struct perf_cpu_map *map) in perf_cpu_map__get() argument
37 if (map) in perf_cpu_map__get()
38 refcount_inc(&map->refcnt); in perf_cpu_map__get()
39 return map; in perf_cpu_map__get()
42 void perf_cpu_map__put(struct perf_cpu_map *map) in perf_cpu_map__put() argument
[all …]
/tools/lib/bpf/
Dhashmap.c38 void hashmap__init(struct hashmap *map, hashmap_hash_fn hash_fn, in hashmap__init() argument
41 map->hash_fn = hash_fn; in hashmap__init()
42 map->equal_fn = equal_fn; in hashmap__init()
43 map->ctx = ctx; in hashmap__init()
45 map->buckets = NULL; in hashmap__init()
46 map->cap = 0; in hashmap__init()
47 map->cap_bits = 0; in hashmap__init()
48 map->sz = 0; in hashmap__init()
55 struct hashmap *map = malloc(sizeof(struct hashmap)); in hashmap__new() local
57 if (!map) in hashmap__new()
[all …]
Dhashmap.h73 void hashmap__init(struct hashmap *map, hashmap_hash_fn hash_fn,
78 void hashmap__clear(struct hashmap *map);
79 void hashmap__free(struct hashmap *map);
81 size_t hashmap__size(const struct hashmap *map);
82 size_t hashmap__capacity(const struct hashmap *map);
112 int hashmap__insert(struct hashmap *map, const void *key, void *value,
116 static inline int hashmap__add(struct hashmap *map, in hashmap__add() argument
119 return hashmap__insert(map, key, value, HASHMAP_ADD, NULL, NULL); in hashmap__add()
122 static inline int hashmap__set(struct hashmap *map, in hashmap__set() argument
126 return hashmap__insert(map, key, value, HASHMAP_SET, in hashmap__set()
[all …]
/tools/testing/selftests/bpf/prog_tests/
Dhashmap.c48 struct hashmap *map; in test_hashmap_generic() local
50 map = hashmap__new(hash_fn, equal_fn, NULL); in test_hashmap_generic()
51 if (CHECK(IS_ERR(map), "hashmap__new", in test_hashmap_generic()
52 "failed to create map: %ld\n", PTR_ERR(map))) in test_hashmap_generic()
59 err = hashmap__update(map, k, v, &oldk, &oldv); in test_hashmap_generic()
65 err = hashmap__add(map, k, v); in test_hashmap_generic()
67 err = hashmap__set(map, k, v, &oldk, &oldv); in test_hashmap_generic()
77 if (CHECK(!hashmap__find(map, k, &oldv), "elem_find", in test_hashmap_generic()
85 if (CHECK(hashmap__size(map) != ELEM_CNT, "hashmap__size", in test_hashmap_generic()
86 "invalid map size: %zu\n", hashmap__size(map))) in test_hashmap_generic()
[all …]
/tools/perf/tests/
Dmem2node.c15 const char *map; member
17 { .node = 0, .map = "0" },
18 { .node = 1, .map = "1-2" },
19 { .node = 3, .map = "5-7,9" },
26 struct perf_cpu_map *map = perf_cpu_map__new(str); in get_bitmap() local
32 if (map && bm) { in get_bitmap()
33 for (i = 0; i < map->nr; i++) { in get_bitmap()
34 set_bit(map->map[i], bm); in get_bitmap()
38 if (map) in get_bitmap()
39 perf_cpu_map__put(map); in get_bitmap()
[all …]
Dthread-map.c24 struct perf_thread_map *map; in test__thread_map() local
30 map = thread_map__new_by_pid(getpid()); in test__thread_map()
31 TEST_ASSERT_VAL("failed to alloc map", map); in test__thread_map()
33 thread_map__read_comms(map); in test__thread_map()
35 TEST_ASSERT_VAL("wrong nr", map->nr == 1); in test__thread_map()
37 perf_thread_map__pid(map, 0) == getpid()); in test__thread_map()
39 perf_thread_map__comm(map, 0) && in test__thread_map()
40 !strcmp(perf_thread_map__comm(map, 0), NAME)); in test__thread_map()
42 refcount_read(&map->refcnt) == 1); in test__thread_map()
43 perf_thread_map__put(map); in test__thread_map()
[all …]
Dmaps.c18 struct map *map; in check_maps() local
21 maps__for_each_entry(maps, map) { in check_maps()
23 TEST_ASSERT_VAL("less maps expected", (map && i < size) || (!map && i == size)); in check_maps()
25 TEST_ASSERT_VAL("wrong map start", map->start == merged[i].start); in check_maps()
26 TEST_ASSERT_VAL("wrong map end", map->end == merged[i].end); in check_maps()
27 TEST_ASSERT_VAL("wrong map name", !strcmp(map->dso->name, merged[i].name)); in check_maps()
28 TEST_ASSERT_VAL("wrong map refcnt", refcount_read(&map->refcnt) == 1); in check_maps()
64 struct map *map_kcore1, *map_kcore2, *map_kcore3; in test__maps__merge_in()
70 struct map *map; in test__maps__merge_in() local
72 map = dso__new_map(bpf_progs[i].name); in test__maps__merge_in()
[all …]
Dcpumap.c22 struct perf_cpu_map *map; in process_event_mask() local
37 map = cpu_map__new_data(data); in process_event_mask()
38 TEST_ASSERT_VAL("wrong nr", map->nr == 20); in process_event_mask()
41 TEST_ASSERT_VAL("wrong cpu", map->map[i] == i); in process_event_mask()
44 perf_cpu_map__put(map); in process_event_mask()
56 struct perf_cpu_map *map; in process_event_cpus() local
68 map = cpu_map__new_data(data); in process_event_cpus()
69 TEST_ASSERT_VAL("wrong nr", map->nr == 2); in process_event_cpus()
70 TEST_ASSERT_VAL("wrong cpu", map->map[0] == 1); in process_event_cpus()
71 TEST_ASSERT_VAL("wrong cpu", map->map[1] == 256); in process_event_cpus()
[all …]
Dmmap-thread-lookup.c28 void *map; member
36 void *map; in thread_init() local
38 map = mmap(NULL, page_size, in thread_init()
42 if (map == MAP_FAILED) { in thread_init()
47 td->map = map; in thread_init()
50 pr_debug("tid = %d, map = %p\n", td->tid, map); in thread_init()
75 munmap(td->map, page_size); in thread_fn()
122 munmap(td0->map, page_size); in threads_destroy()
143 struct perf_thread_map *map; in synth_process() local
146 map = thread_map__new_by_pid(getpid()); in synth_process()
[all …]
Dvmlinux-kallsyms.c22 struct map *kallsyms_map, *vmlinux_map, *map; in test__vmlinux_matches_kallsyms() local
185 maps__for_each_entry(maps, map) { in test__vmlinux_matches_kallsyms()
186 struct map * in test__vmlinux_matches_kallsyms()
193 pair = maps__find_by_name(&kallsyms.kmaps, (map->dso->kernel ? in test__vmlinux_matches_kallsyms()
194 map->dso->short_name : in test__vmlinux_matches_kallsyms()
195 map->dso->name)); in test__vmlinux_matches_kallsyms()
203 map__fprintf(map, stderr); in test__vmlinux_matches_kallsyms()
209 maps__for_each_entry(maps, map) { in test__vmlinux_matches_kallsyms()
210 struct map *pair; in test__vmlinux_matches_kallsyms()
212 mem_start = vmlinux_map->unmap_ip(vmlinux_map, map->start); in test__vmlinux_matches_kallsyms()
[all …]
/tools/testing/selftests/vm/
Dmlock2-tests.c181 static int unlock_lock_check(char *map) in unlock_lock_check() argument
183 if (is_vmflag_set((unsigned long)map, LOCKED)) { in unlock_lock_check()
193 char *map; in test_mlock_lock() local
197 map = mmap(NULL, 2 * page_size, PROT_READ | PROT_WRITE, in test_mlock_lock()
199 if (map == MAP_FAILED) { in test_mlock_lock()
204 if (mlock2_(map, 2 * page_size, 0)) { in test_mlock_lock()
213 if (!lock_check((unsigned long)map)) in test_mlock_lock()
217 if (munlock(map, 2 * page_size)) { in test_mlock_lock()
222 ret = unlock_lock_check(map); in test_mlock_lock()
225 munmap(map, 2 * page_size); in test_mlock_lock()
[all …]
/tools/testing/selftests/bpf/progs/
Dmap_ptr_kern.c42 static inline int check_bpf_map_fields(struct bpf_map *map, __u32 key_size, in check_bpf_map_fields() argument
45 VERIFY(map->map_type == g_map_type); in check_bpf_map_fields()
46 VERIFY(map->key_size == key_size); in check_bpf_map_fields()
47 VERIFY(map->value_size == value_size); in check_bpf_map_fields()
48 VERIFY(map->max_entries == max_entries); in check_bpf_map_fields()
49 VERIFY(map->id > 0); in check_bpf_map_fields()
50 VERIFY(map->memory.pages > 0); in check_bpf_map_fields()
98 struct bpf_map map; member
115 struct bpf_map *map = (struct bpf_map *)&m_hash; in check_hash() local
118 VERIFY(check_default_noinline(&hash->map, map)); in check_hash()
[all …]
Dbpf_iter_bpf_map.c14 struct bpf_map *map = ctx->map; in dump_bpf_map() local
16 if (map == (void *)0) { in dump_bpf_map()
24 BPF_SEQ_PRINTF(seq, "%8u %8ld %8ld %10lu\n", map->id, map->refcnt.counter, in dump_bpf_map()
25 map->usercnt.counter, in dump_bpf_map()
26 map->memory.user->locked_vm.counter); in dump_bpf_map()
/tools/perf/pmu-events/
Djson.c48 char *map = NULL; in mapfile() local
63 map = mmap(NULL, in mapfile()
66 if (map == MAP_FAILED) in mapfile()
67 map = NULL; in mapfile()
70 return map; in mapfile()
73 static void unmapfile(char *map, size_t size) in unmapfile() argument
76 munmap(map, roundup(size, ps)); in unmapfile()
83 jsmntok_t *parse_json(const char *fn, char **map, size_t *size, int *len) in parse_json() argument
90 *map = mapfile(fn, size); in parse_json()
91 if (!*map) in parse_json()
[all …]
/tools/bpf/bpftool/Documentation/
Dbpftool-map.rst2 bpftool-map
13 **bpftool** [*OPTIONS*] **map** *COMMAND*
24 | **bpftool** **map** { **show** | **list** } [*MAP*]
25 | **bpftool** **map create** *FILE* **type** *TYPE* **key** *KEY_SIZE* **value** *VALUE_SIZE* \
28 | **bpftool** **map dump** *MAP*
29 | **bpftool** **map update** *MAP* [**key** *DATA*] [**value** *VALUE*] [*UPDATE_FLAGS*]
30 | **bpftool** **map lookup** *MAP* [**key** *DATA*]
31 | **bpftool** **map getnext** *MAP* [**key** *DATA*]
32 | **bpftool** **map delete** *MAP* **key** *DATA*
33 | **bpftool** **map pin** *MAP* *FILE*
[all …]
/tools/perf/arch/powerpc/util/
Dsym-handling.c79 struct probe_trace_event *tev, struct map *map, in arch__fix_tev_from_maps() argument
94 if (pev->point.offset || !map || !sym) in arch__fix_tev_from_maps()
107 if (map->dso->symtab_type == DSO_BINARY_TYPE__KALLSYMS) in arch__fix_tev_from_maps()
122 struct map *map; in arch__post_process_probe_trace_events() local
127 map = get_target_map(pev->target, pev->nsi, pev->uprobes); in arch__post_process_probe_trace_events()
128 if (!map || map__load(map) < 0) in arch__post_process_probe_trace_events()
133 map__for_each_symbol(map, sym, tmp) { in arch__post_process_probe_trace_events()
134 if (map->unmap_ip(map, sym->start) == tev->point.address) { in arch__post_process_probe_trace_events()
135 arch__fix_tev_from_maps(pev, tev, map, sym); in arch__post_process_probe_trace_events()

1234567891011