1 | #ifndef JEMALLOC_INTERNAL_BITMAP_H |
2 | #define JEMALLOC_INTERNAL_BITMAP_H |
3 | |
4 | #include "jemalloc/internal/bit_util.h" |
5 | #include "jemalloc/internal/sc.h" |
6 | |
7 | typedef unsigned long bitmap_t; |
8 | #define LG_SIZEOF_BITMAP LG_SIZEOF_LONG |
9 | |
10 | /* Maximum bitmap bit count is 2^LG_BITMAP_MAXBITS. */ |
11 | #if SC_LG_SLAB_MAXREGS > LG_CEIL(SC_NSIZES) |
12 | /* Maximum bitmap bit count is determined by maximum regions per slab. */ |
13 | # define LG_BITMAP_MAXBITS SC_LG_SLAB_MAXREGS |
14 | #else |
15 | /* Maximum bitmap bit count is determined by number of extent size classes. */ |
16 | # define LG_BITMAP_MAXBITS LG_CEIL(SC_NSIZES) |
17 | #endif |
18 | #define BITMAP_MAXBITS (ZU(1) << LG_BITMAP_MAXBITS) |
19 | |
20 | /* Number of bits per group. */ |
21 | #define LG_BITMAP_GROUP_NBITS (LG_SIZEOF_BITMAP + 3) |
22 | #define BITMAP_GROUP_NBITS (1U << LG_BITMAP_GROUP_NBITS) |
23 | #define BITMAP_GROUP_NBITS_MASK (BITMAP_GROUP_NBITS-1) |
24 | |
25 | /* |
26 | * Do some analysis on how big the bitmap is before we use a tree. For a brute |
27 | * force linear search, if we would have to call ffs_lu() more than 2^3 times, |
28 | * use a tree instead. |
29 | */ |
30 | #if LG_BITMAP_MAXBITS - LG_BITMAP_GROUP_NBITS > 3 |
31 | # define BITMAP_USE_TREE |
32 | #endif |
33 | |
34 | /* Number of groups required to store a given number of bits. */ |
35 | #define BITMAP_BITS2GROUPS(nbits) \ |
36 | (((nbits) + BITMAP_GROUP_NBITS_MASK) >> LG_BITMAP_GROUP_NBITS) |
37 | |
38 | /* |
39 | * Number of groups required at a particular level for a given number of bits. |
40 | */ |
41 | #define BITMAP_GROUPS_L0(nbits) \ |
42 | BITMAP_BITS2GROUPS(nbits) |
43 | #define BITMAP_GROUPS_L1(nbits) \ |
44 | BITMAP_BITS2GROUPS(BITMAP_BITS2GROUPS(nbits)) |
45 | #define BITMAP_GROUPS_L2(nbits) \ |
46 | BITMAP_BITS2GROUPS(BITMAP_BITS2GROUPS(BITMAP_BITS2GROUPS((nbits)))) |
47 | #define BITMAP_GROUPS_L3(nbits) \ |
48 | BITMAP_BITS2GROUPS(BITMAP_BITS2GROUPS(BITMAP_BITS2GROUPS( \ |
49 | BITMAP_BITS2GROUPS((nbits))))) |
50 | #define BITMAP_GROUPS_L4(nbits) \ |
51 | BITMAP_BITS2GROUPS(BITMAP_BITS2GROUPS(BITMAP_BITS2GROUPS( \ |
52 | BITMAP_BITS2GROUPS(BITMAP_BITS2GROUPS((nbits)))))) |
53 | |
54 | /* |
55 | * Assuming the number of levels, number of groups required for a given number |
56 | * of bits. |
57 | */ |
58 | #define BITMAP_GROUPS_1_LEVEL(nbits) \ |
59 | BITMAP_GROUPS_L0(nbits) |
60 | #define BITMAP_GROUPS_2_LEVEL(nbits) \ |
61 | (BITMAP_GROUPS_1_LEVEL(nbits) + BITMAP_GROUPS_L1(nbits)) |
62 | #define BITMAP_GROUPS_3_LEVEL(nbits) \ |
63 | (BITMAP_GROUPS_2_LEVEL(nbits) + BITMAP_GROUPS_L2(nbits)) |
64 | #define BITMAP_GROUPS_4_LEVEL(nbits) \ |
65 | (BITMAP_GROUPS_3_LEVEL(nbits) + BITMAP_GROUPS_L3(nbits)) |
66 | #define BITMAP_GROUPS_5_LEVEL(nbits) \ |
67 | (BITMAP_GROUPS_4_LEVEL(nbits) + BITMAP_GROUPS_L4(nbits)) |
68 | |
69 | /* |
70 | * Maximum number of groups required to support LG_BITMAP_MAXBITS. |
71 | */ |
72 | #ifdef BITMAP_USE_TREE |
73 | |
74 | #if LG_BITMAP_MAXBITS <= LG_BITMAP_GROUP_NBITS |
75 | # define BITMAP_GROUPS(nbits) BITMAP_GROUPS_1_LEVEL(nbits) |
76 | # define BITMAP_GROUPS_MAX BITMAP_GROUPS_1_LEVEL(BITMAP_MAXBITS) |
77 | #elif LG_BITMAP_MAXBITS <= LG_BITMAP_GROUP_NBITS * 2 |
78 | # define BITMAP_GROUPS(nbits) BITMAP_GROUPS_2_LEVEL(nbits) |
79 | # define BITMAP_GROUPS_MAX BITMAP_GROUPS_2_LEVEL(BITMAP_MAXBITS) |
80 | #elif LG_BITMAP_MAXBITS <= LG_BITMAP_GROUP_NBITS * 3 |
81 | # define BITMAP_GROUPS(nbits) BITMAP_GROUPS_3_LEVEL(nbits) |
82 | # define BITMAP_GROUPS_MAX BITMAP_GROUPS_3_LEVEL(BITMAP_MAXBITS) |
83 | #elif LG_BITMAP_MAXBITS <= LG_BITMAP_GROUP_NBITS * 4 |
84 | # define BITMAP_GROUPS(nbits) BITMAP_GROUPS_4_LEVEL(nbits) |
85 | # define BITMAP_GROUPS_MAX BITMAP_GROUPS_4_LEVEL(BITMAP_MAXBITS) |
86 | #elif LG_BITMAP_MAXBITS <= LG_BITMAP_GROUP_NBITS * 5 |
87 | # define BITMAP_GROUPS(nbits) BITMAP_GROUPS_5_LEVEL(nbits) |
88 | # define BITMAP_GROUPS_MAX BITMAP_GROUPS_5_LEVEL(BITMAP_MAXBITS) |
89 | #else |
90 | # error "Unsupported bitmap size" |
91 | #endif |
92 | |
93 | /* |
94 | * Maximum number of levels possible. This could be statically computed based |
95 | * on LG_BITMAP_MAXBITS: |
96 | * |
97 | * #define BITMAP_MAX_LEVELS \ |
98 | * (LG_BITMAP_MAXBITS / LG_SIZEOF_BITMAP) \ |
99 | * + !!(LG_BITMAP_MAXBITS % LG_SIZEOF_BITMAP) |
100 | * |
101 | * However, that would not allow the generic BITMAP_INFO_INITIALIZER() macro, so |
102 | * instead hardcode BITMAP_MAX_LEVELS to the largest number supported by the |
103 | * various cascading macros. The only additional cost this incurs is some |
104 | * unused trailing entries in bitmap_info_t structures; the bitmaps themselves |
105 | * are not impacted. |
106 | */ |
107 | #define BITMAP_MAX_LEVELS 5 |
108 | |
109 | #define BITMAP_INFO_INITIALIZER(nbits) { \ |
110 | /* nbits. */ \ |
111 | nbits, \ |
112 | /* nlevels. */ \ |
113 | (BITMAP_GROUPS_L0(nbits) > BITMAP_GROUPS_L1(nbits)) + \ |
114 | (BITMAP_GROUPS_L1(nbits) > BITMAP_GROUPS_L2(nbits)) + \ |
115 | (BITMAP_GROUPS_L2(nbits) > BITMAP_GROUPS_L3(nbits)) + \ |
116 | (BITMAP_GROUPS_L3(nbits) > BITMAP_GROUPS_L4(nbits)) + 1, \ |
117 | /* levels. */ \ |
118 | { \ |
119 | {0}, \ |
120 | {BITMAP_GROUPS_L0(nbits)}, \ |
121 | {BITMAP_GROUPS_L1(nbits) + BITMAP_GROUPS_L0(nbits)}, \ |
122 | {BITMAP_GROUPS_L2(nbits) + BITMAP_GROUPS_L1(nbits) + \ |
123 | BITMAP_GROUPS_L0(nbits)}, \ |
124 | {BITMAP_GROUPS_L3(nbits) + BITMAP_GROUPS_L2(nbits) + \ |
125 | BITMAP_GROUPS_L1(nbits) + BITMAP_GROUPS_L0(nbits)}, \ |
126 | {BITMAP_GROUPS_L4(nbits) + BITMAP_GROUPS_L3(nbits) + \ |
127 | BITMAP_GROUPS_L2(nbits) + BITMAP_GROUPS_L1(nbits) \ |
128 | + BITMAP_GROUPS_L0(nbits)} \ |
129 | } \ |
130 | } |
131 | |
132 | #else /* BITMAP_USE_TREE */ |
133 | |
134 | #define BITMAP_GROUPS(nbits) BITMAP_BITS2GROUPS(nbits) |
135 | #define BITMAP_GROUPS_MAX BITMAP_BITS2GROUPS(BITMAP_MAXBITS) |
136 | |
137 | #define BITMAP_INFO_INITIALIZER(nbits) { \ |
138 | /* nbits. */ \ |
139 | nbits, \ |
140 | /* ngroups. */ \ |
141 | BITMAP_BITS2GROUPS(nbits) \ |
142 | } |
143 | |
144 | #endif /* BITMAP_USE_TREE */ |
145 | |
146 | typedef struct bitmap_level_s { |
147 | /* Offset of this level's groups within the array of groups. */ |
148 | size_t group_offset; |
149 | } bitmap_level_t; |
150 | |
151 | typedef struct bitmap_info_s { |
152 | /* Logical number of bits in bitmap (stored at bottom level). */ |
153 | size_t nbits; |
154 | |
155 | #ifdef BITMAP_USE_TREE |
156 | /* Number of levels necessary for nbits. */ |
157 | unsigned nlevels; |
158 | |
159 | /* |
160 | * Only the first (nlevels+1) elements are used, and levels are ordered |
161 | * bottom to top (e.g. the bottom level is stored in levels[0]). |
162 | */ |
163 | bitmap_level_t levels[BITMAP_MAX_LEVELS+1]; |
164 | #else /* BITMAP_USE_TREE */ |
165 | /* Number of groups necessary for nbits. */ |
166 | size_t ngroups; |
167 | #endif /* BITMAP_USE_TREE */ |
168 | } bitmap_info_t; |
169 | |
170 | void bitmap_info_init(bitmap_info_t *binfo, size_t nbits); |
171 | void bitmap_init(bitmap_t *bitmap, const bitmap_info_t *binfo, bool fill); |
172 | size_t bitmap_size(const bitmap_info_t *binfo); |
173 | |
174 | static inline bool |
175 | bitmap_full(bitmap_t *bitmap, const bitmap_info_t *binfo) { |
176 | #ifdef BITMAP_USE_TREE |
177 | size_t rgoff = binfo->levels[binfo->nlevels].group_offset - 1; |
178 | bitmap_t rg = bitmap[rgoff]; |
179 | /* The bitmap is full iff the root group is 0. */ |
180 | return (rg == 0); |
181 | #else |
182 | size_t i; |
183 | |
184 | for (i = 0; i < binfo->ngroups; i++) { |
185 | if (bitmap[i] != 0) { |
186 | return false; |
187 | } |
188 | } |
189 | return true; |
190 | #endif |
191 | } |
192 | |
193 | static inline bool |
194 | bitmap_get(bitmap_t *bitmap, const bitmap_info_t *binfo, size_t bit) { |
195 | size_t goff; |
196 | bitmap_t g; |
197 | |
198 | assert(bit < binfo->nbits); |
199 | goff = bit >> LG_BITMAP_GROUP_NBITS; |
200 | g = bitmap[goff]; |
201 | return !(g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK))); |
202 | } |
203 | |
204 | static inline void |
205 | bitmap_set(bitmap_t *bitmap, const bitmap_info_t *binfo, size_t bit) { |
206 | size_t goff; |
207 | bitmap_t *gp; |
208 | bitmap_t g; |
209 | |
210 | assert(bit < binfo->nbits); |
211 | assert(!bitmap_get(bitmap, binfo, bit)); |
212 | goff = bit >> LG_BITMAP_GROUP_NBITS; |
213 | gp = &bitmap[goff]; |
214 | g = *gp; |
215 | assert(g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK))); |
216 | g ^= ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK); |
217 | *gp = g; |
218 | assert(bitmap_get(bitmap, binfo, bit)); |
219 | #ifdef BITMAP_USE_TREE |
220 | /* Propagate group state transitions up the tree. */ |
221 | if (g == 0) { |
222 | unsigned i; |
223 | for (i = 1; i < binfo->nlevels; i++) { |
224 | bit = goff; |
225 | goff = bit >> LG_BITMAP_GROUP_NBITS; |
226 | gp = &bitmap[binfo->levels[i].group_offset + goff]; |
227 | g = *gp; |
228 | assert(g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK))); |
229 | g ^= ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK); |
230 | *gp = g; |
231 | if (g != 0) { |
232 | break; |
233 | } |
234 | } |
235 | } |
236 | #endif |
237 | } |
238 | |
239 | /* ffu: find first unset >= bit. */ |
240 | static inline size_t |
241 | bitmap_ffu(const bitmap_t *bitmap, const bitmap_info_t *binfo, size_t min_bit) { |
242 | assert(min_bit < binfo->nbits); |
243 | |
244 | #ifdef BITMAP_USE_TREE |
245 | size_t bit = 0; |
246 | for (unsigned level = binfo->nlevels; level--;) { |
247 | size_t lg_bits_per_group = (LG_BITMAP_GROUP_NBITS * (level + |
248 | 1)); |
249 | bitmap_t group = bitmap[binfo->levels[level].group_offset + (bit |
250 | >> lg_bits_per_group)]; |
251 | unsigned group_nmask = (unsigned)(((min_bit > bit) ? (min_bit - |
252 | bit) : 0) >> (lg_bits_per_group - LG_BITMAP_GROUP_NBITS)); |
253 | assert(group_nmask <= BITMAP_GROUP_NBITS); |
254 | bitmap_t group_mask = ~((1LU << group_nmask) - 1); |
255 | bitmap_t group_masked = group & group_mask; |
256 | if (group_masked == 0LU) { |
257 | if (group == 0LU) { |
258 | return binfo->nbits; |
259 | } |
260 | /* |
261 | * min_bit was preceded by one or more unset bits in |
262 | * this group, but there are no other unset bits in this |
263 | * group. Try again starting at the first bit of the |
264 | * next sibling. This will recurse at most once per |
265 | * non-root level. |
266 | */ |
267 | size_t sib_base = bit + (ZU(1) << lg_bits_per_group); |
268 | assert(sib_base > min_bit); |
269 | assert(sib_base > bit); |
270 | if (sib_base >= binfo->nbits) { |
271 | return binfo->nbits; |
272 | } |
273 | return bitmap_ffu(bitmap, binfo, sib_base); |
274 | } |
275 | bit += ((size_t)ffs_lu(group_masked)) << |
276 | (lg_bits_per_group - LG_BITMAP_GROUP_NBITS); |
277 | } |
278 | assert(bit >= min_bit); |
279 | assert(bit < binfo->nbits); |
280 | return bit; |
281 | #else |
282 | size_t i = min_bit >> LG_BITMAP_GROUP_NBITS; |
283 | bitmap_t g = bitmap[i] & ~((1LU << (min_bit & BITMAP_GROUP_NBITS_MASK)) |
284 | - 1); |
285 | size_t bit; |
286 | do { |
287 | if (g != 0) { |
288 | bit = ffs_lu(g); |
289 | return (i << LG_BITMAP_GROUP_NBITS) + bit; |
290 | } |
291 | i++; |
292 | g = bitmap[i]; |
293 | } while (i < binfo->ngroups); |
294 | return binfo->nbits; |
295 | #endif |
296 | } |
297 | |
298 | /* sfu: set first unset. */ |
299 | static inline size_t |
300 | bitmap_sfu(bitmap_t *bitmap, const bitmap_info_t *binfo) { |
301 | size_t bit; |
302 | bitmap_t g; |
303 | unsigned i; |
304 | |
305 | assert(!bitmap_full(bitmap, binfo)); |
306 | |
307 | #ifdef BITMAP_USE_TREE |
308 | i = binfo->nlevels - 1; |
309 | g = bitmap[binfo->levels[i].group_offset]; |
310 | bit = ffs_lu(g); |
311 | while (i > 0) { |
312 | i--; |
313 | g = bitmap[binfo->levels[i].group_offset + bit]; |
314 | bit = (bit << LG_BITMAP_GROUP_NBITS) + ffs_lu(g); |
315 | } |
316 | #else |
317 | i = 0; |
318 | g = bitmap[0]; |
319 | while (g == 0) { |
320 | i++; |
321 | g = bitmap[i]; |
322 | } |
323 | bit = (i << LG_BITMAP_GROUP_NBITS) + ffs_lu(g); |
324 | #endif |
325 | bitmap_set(bitmap, binfo, bit); |
326 | return bit; |
327 | } |
328 | |
329 | static inline void |
330 | bitmap_unset(bitmap_t *bitmap, const bitmap_info_t *binfo, size_t bit) { |
331 | size_t goff; |
332 | bitmap_t *gp; |
333 | bitmap_t g; |
334 | UNUSED bool propagate; |
335 | |
336 | assert(bit < binfo->nbits); |
337 | assert(bitmap_get(bitmap, binfo, bit)); |
338 | goff = bit >> LG_BITMAP_GROUP_NBITS; |
339 | gp = &bitmap[goff]; |
340 | g = *gp; |
341 | propagate = (g == 0); |
342 | assert((g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK))) == 0); |
343 | g ^= ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK); |
344 | *gp = g; |
345 | assert(!bitmap_get(bitmap, binfo, bit)); |
346 | #ifdef BITMAP_USE_TREE |
347 | /* Propagate group state transitions up the tree. */ |
348 | if (propagate) { |
349 | unsigned i; |
350 | for (i = 1; i < binfo->nlevels; i++) { |
351 | bit = goff; |
352 | goff = bit >> LG_BITMAP_GROUP_NBITS; |
353 | gp = &bitmap[binfo->levels[i].group_offset + goff]; |
354 | g = *gp; |
355 | propagate = (g == 0); |
356 | assert((g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK))) |
357 | == 0); |
358 | g ^= ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK); |
359 | *gp = g; |
360 | if (!propagate) { |
361 | break; |
362 | } |
363 | } |
364 | } |
365 | #endif /* BITMAP_USE_TREE */ |
366 | } |
367 | |
368 | #endif /* JEMALLOC_INTERNAL_BITMAP_H */ |
369 | |