• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * This radix tree implementation is tailored to the singular purpose of
3  * associating metadata with chunks that are currently owned by jemalloc.
4  *
5  *******************************************************************************
6  */
7 #ifdef JEMALLOC_H_TYPES
8 
9 typedef struct rtree_node_elm_s rtree_node_elm_t;
10 typedef struct rtree_level_s rtree_level_t;
11 typedef struct rtree_s rtree_t;
12 
13 /*
14  * RTREE_BITS_PER_LEVEL must be a power of two that is no larger than the
15  * machine address width.
16  */
17 #define	LG_RTREE_BITS_PER_LEVEL	4
18 #define	RTREE_BITS_PER_LEVEL	(ZU(1) << LG_RTREE_BITS_PER_LEVEL)
19 #define	RTREE_HEIGHT_MAX						\
20     ((ZU(1) << (LG_SIZEOF_PTR+3)) / RTREE_BITS_PER_LEVEL)
21 
22 /* Used for two-stage lock-free node initialization. */
23 #define	RTREE_NODE_INITIALIZING	((rtree_node_elm_t *)0x1)
24 
25 /*
26  * The node allocation callback function's argument is the number of contiguous
27  * rtree_node_elm_t structures to allocate, and the resulting memory must be
28  * zeroed.
29  */
30 typedef rtree_node_elm_t *(rtree_node_alloc_t)(size_t);
31 typedef void (rtree_node_dalloc_t)(rtree_node_elm_t *);
32 
33 #endif /* JEMALLOC_H_TYPES */
34 /******************************************************************************/
35 #ifdef JEMALLOC_H_STRUCTS
36 
37 struct rtree_node_elm_s {
38 	union {
39 		void			*pun;
40 		rtree_node_elm_t	*child;
41 		extent_node_t		*val;
42 	};
43 };
44 
45 struct rtree_level_s {
46 	/*
47 	 * A non-NULL subtree points to a subtree rooted along the hypothetical
48 	 * path to the leaf node corresponding to key 0.  Depending on what keys
49 	 * have been used to store to the tree, an arbitrary combination of
50 	 * subtree pointers may remain NULL.
51 	 *
52 	 * Suppose keys comprise 48 bits, and LG_RTREE_BITS_PER_LEVEL is 4.
53 	 * This results in a 3-level tree, and the leftmost leaf can be directly
54 	 * accessed via subtrees[2], the subtree prefixed by 0x0000 (excluding
55 	 * 0x00000000) can be accessed via subtrees[1], and the remainder of the
56 	 * tree can be accessed via subtrees[0].
57 	 *
58 	 *   levels[0] : [<unused> | 0x0001******** | 0x0002******** | ...]
59 	 *
60 	 *   levels[1] : [<unused> | 0x00000001**** | 0x00000002**** | ... ]
61 	 *
62 	 *   levels[2] : [val(0x000000000000) | val(0x000000000001) | ...]
63 	 *
64 	 * This has practical implications on x64, which currently uses only the
65 	 * lower 47 bits of virtual address space in userland, thus leaving
66 	 * subtrees[0] unused and avoiding a level of tree traversal.
67 	 */
68 	union {
69 		void			*subtree_pun;
70 		rtree_node_elm_t	*subtree;
71 	};
72 	/* Number of key bits distinguished by this level. */
73 	unsigned		bits;
74 	/*
75 	 * Cumulative number of key bits distinguished by traversing to
76 	 * corresponding tree level.
77 	 */
78 	unsigned		cumbits;
79 };
80 
81 struct rtree_s {
82 	rtree_node_alloc_t	*alloc;
83 	rtree_node_dalloc_t	*dalloc;
84 	unsigned		height;
85 	/*
86 	 * Precomputed table used to convert from the number of leading 0 key
87 	 * bits to which subtree level to start at.
88 	 */
89 	unsigned		start_level[RTREE_HEIGHT_MAX];
90 	rtree_level_t		levels[RTREE_HEIGHT_MAX];
91 };
92 
93 #endif /* JEMALLOC_H_STRUCTS */
94 /******************************************************************************/
95 #ifdef JEMALLOC_H_EXTERNS
96 
97 bool rtree_new(rtree_t *rtree, unsigned bits, rtree_node_alloc_t *alloc,
98     rtree_node_dalloc_t *dalloc);
99 void	rtree_delete(rtree_t *rtree);
100 rtree_node_elm_t	*rtree_subtree_read_hard(rtree_t *rtree,
101     unsigned level);
102 rtree_node_elm_t	*rtree_child_read_hard(rtree_t *rtree,
103     rtree_node_elm_t *elm, unsigned level);
104 
105 #endif /* JEMALLOC_H_EXTERNS */
106 /******************************************************************************/
107 #ifdef JEMALLOC_H_INLINES
108 
109 #ifndef JEMALLOC_ENABLE_INLINE
110 unsigned	rtree_start_level(rtree_t *rtree, uintptr_t key);
111 uintptr_t	rtree_subkey(rtree_t *rtree, uintptr_t key, unsigned level);
112 
113 bool	rtree_node_valid(rtree_node_elm_t *node);
114 rtree_node_elm_t	*rtree_child_tryread(rtree_node_elm_t *elm);
115 rtree_node_elm_t	*rtree_child_read(rtree_t *rtree, rtree_node_elm_t *elm,
116     unsigned level);
117 extent_node_t	*rtree_val_read(rtree_t *rtree, rtree_node_elm_t *elm,
118     bool dependent);
119 void	rtree_val_write(rtree_t *rtree, rtree_node_elm_t *elm,
120     const extent_node_t *val);
121 rtree_node_elm_t	*rtree_subtree_tryread(rtree_t *rtree, unsigned level);
122 rtree_node_elm_t	*rtree_subtree_read(rtree_t *rtree, unsigned level);
123 
124 extent_node_t	*rtree_get(rtree_t *rtree, uintptr_t key, bool dependent);
125 bool	rtree_set(rtree_t *rtree, uintptr_t key, const extent_node_t *val);
126 #endif
127 
128 #if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_RTREE_C_))
129 JEMALLOC_INLINE unsigned
rtree_start_level(rtree_t * rtree,uintptr_t key)130 rtree_start_level(rtree_t *rtree, uintptr_t key)
131 {
132 	unsigned start_level;
133 
134 	if (unlikely(key == 0))
135 		return (rtree->height - 1);
136 
137 	start_level = rtree->start_level[lg_floor(key) >>
138 	    LG_RTREE_BITS_PER_LEVEL];
139 	assert(start_level < rtree->height);
140 	return (start_level);
141 }
142 
143 JEMALLOC_INLINE uintptr_t
rtree_subkey(rtree_t * rtree,uintptr_t key,unsigned level)144 rtree_subkey(rtree_t *rtree, uintptr_t key, unsigned level)
145 {
146 
147 	return ((key >> ((ZU(1) << (LG_SIZEOF_PTR+3)) -
148 	    rtree->levels[level].cumbits)) & ((ZU(1) <<
149 	    rtree->levels[level].bits) - 1));
150 }
151 
152 JEMALLOC_INLINE bool
rtree_node_valid(rtree_node_elm_t * node)153 rtree_node_valid(rtree_node_elm_t *node)
154 {
155 
156 	return ((uintptr_t)node > (uintptr_t)RTREE_NODE_INITIALIZING);
157 }
158 
159 JEMALLOC_INLINE rtree_node_elm_t *
rtree_child_tryread(rtree_node_elm_t * elm)160 rtree_child_tryread(rtree_node_elm_t *elm)
161 {
162 	rtree_node_elm_t *child;
163 
164 	/* Double-checked read (first read may be stale. */
165 	child = elm->child;
166 	if (!rtree_node_valid(child))
167 		child = atomic_read_p(&elm->pun);
168 	return (child);
169 }
170 
171 JEMALLOC_INLINE rtree_node_elm_t *
rtree_child_read(rtree_t * rtree,rtree_node_elm_t * elm,unsigned level)172 rtree_child_read(rtree_t *rtree, rtree_node_elm_t *elm, unsigned level)
173 {
174 	rtree_node_elm_t *child;
175 
176 	child = rtree_child_tryread(elm);
177 	if (unlikely(!rtree_node_valid(child)))
178 		child = rtree_child_read_hard(rtree, elm, level);
179 	return (child);
180 }
181 
182 JEMALLOC_INLINE extent_node_t *
rtree_val_read(rtree_t * rtree,rtree_node_elm_t * elm,bool dependent)183 rtree_val_read(rtree_t *rtree, rtree_node_elm_t *elm, bool dependent)
184 {
185 
186 	if (dependent) {
187 		/*
188 		 * Reading a val on behalf of a pointer to a valid allocation is
189 		 * guaranteed to be a clean read even without synchronization,
190 		 * because the rtree update became visible in memory before the
191 		 * pointer came into existence.
192 		 */
193 		return (elm->val);
194 	} else {
195 		/*
196 		 * An arbitrary read, e.g. on behalf of ivsalloc(), may not be
197 		 * dependent on a previous rtree write, which means a stale read
198 		 * could result if synchronization were omitted here.
199 		 */
200 		return (atomic_read_p(&elm->pun));
201 	}
202 }
203 
204 JEMALLOC_INLINE void
rtree_val_write(rtree_t * rtree,rtree_node_elm_t * elm,const extent_node_t * val)205 rtree_val_write(rtree_t *rtree, rtree_node_elm_t *elm, const extent_node_t *val)
206 {
207 
208 	atomic_write_p(&elm->pun, val);
209 }
210 
211 JEMALLOC_INLINE rtree_node_elm_t *
rtree_subtree_tryread(rtree_t * rtree,unsigned level)212 rtree_subtree_tryread(rtree_t *rtree, unsigned level)
213 {
214 	rtree_node_elm_t *subtree;
215 
216 	/* Double-checked read (first read may be stale. */
217 	subtree = rtree->levels[level].subtree;
218 	if (!rtree_node_valid(subtree))
219 		subtree = atomic_read_p(&rtree->levels[level].subtree_pun);
220 	return (subtree);
221 }
222 
223 JEMALLOC_INLINE rtree_node_elm_t *
rtree_subtree_read(rtree_t * rtree,unsigned level)224 rtree_subtree_read(rtree_t *rtree, unsigned level)
225 {
226 	rtree_node_elm_t *subtree;
227 
228 	subtree = rtree_subtree_tryread(rtree, level);
229 	if (unlikely(!rtree_node_valid(subtree)))
230 		subtree = rtree_subtree_read_hard(rtree, level);
231 	return (subtree);
232 }
233 
234 JEMALLOC_INLINE extent_node_t *
rtree_get(rtree_t * rtree,uintptr_t key,bool dependent)235 rtree_get(rtree_t *rtree, uintptr_t key, bool dependent)
236 {
237 	uintptr_t subkey;
238 	unsigned i, start_level;
239 	rtree_node_elm_t *node, *child;
240 
241 	start_level = rtree_start_level(rtree, key);
242 
243 	for (i = start_level, node = rtree_subtree_tryread(rtree, start_level);
244 	    /**/; i++, node = child) {
245 		if (!dependent && unlikely(!rtree_node_valid(node)))
246 			return (NULL);
247 		subkey = rtree_subkey(rtree, key, i);
248 		if (i == rtree->height - 1) {
249 			/*
250 			 * node is a leaf, so it contains values rather than
251 			 * child pointers.
252 			 */
253 			return (rtree_val_read(rtree, &node[subkey],
254 			    dependent));
255 		}
256 		assert(i < rtree->height - 1);
257 		child = rtree_child_tryread(&node[subkey]);
258 	}
259 	not_reached();
260 }
261 
262 JEMALLOC_INLINE bool
rtree_set(rtree_t * rtree,uintptr_t key,const extent_node_t * val)263 rtree_set(rtree_t *rtree, uintptr_t key, const extent_node_t *val)
264 {
265 	uintptr_t subkey;
266 	unsigned i, start_level;
267 	rtree_node_elm_t *node, *child;
268 
269 	start_level = rtree_start_level(rtree, key);
270 
271 	node = rtree_subtree_read(rtree, start_level);
272 	if (node == NULL)
273 		return (true);
274 	for (i = start_level; /**/; i++, node = child) {
275 		subkey = rtree_subkey(rtree, key, i);
276 		if (i == rtree->height - 1) {
277 			/*
278 			 * node is a leaf, so it contains values rather than
279 			 * child pointers.
280 			 */
281 			rtree_val_write(rtree, &node[subkey], val);
282 			return (false);
283 		}
284 		assert(i + 1 < rtree->height);
285 		child = rtree_child_read(rtree, &node[subkey], i);
286 		if (child == NULL)
287 			return (true);
288 	}
289 	not_reached();
290 }
291 #endif
292 
293 #endif /* JEMALLOC_H_INLINES */
294 /******************************************************************************/
295