1#ifndef JEMALLOC_INTERNAL_EXTENT_INLINES_H
2#define JEMALLOC_INTERNAL_EXTENT_INLINES_H
3
4#include "jemalloc/internal/mutex.h"
5#include "jemalloc/internal/mutex_pool.h"
6#include "jemalloc/internal/pages.h"
7#include "jemalloc/internal/prng.h"
8#include "jemalloc/internal/ql.h"
9#include "jemalloc/internal/sc.h"
10#include "jemalloc/internal/sz.h"
11
12static inline void
13extent_lock(tsdn_t *tsdn, extent_t *extent) {
14 assert(extent != NULL);
15 mutex_pool_lock(tsdn, &extent_mutex_pool, (uintptr_t)extent);
16}
17
18static inline void
19extent_unlock(tsdn_t *tsdn, extent_t *extent) {
20 assert(extent != NULL);
21 mutex_pool_unlock(tsdn, &extent_mutex_pool, (uintptr_t)extent);
22}
23
24static inline void
25extent_lock2(tsdn_t *tsdn, extent_t *extent1, extent_t *extent2) {
26 assert(extent1 != NULL && extent2 != NULL);
27 mutex_pool_lock2(tsdn, &extent_mutex_pool, (uintptr_t)extent1,
28 (uintptr_t)extent2);
29}
30
31static inline void
32extent_unlock2(tsdn_t *tsdn, extent_t *extent1, extent_t *extent2) {
33 assert(extent1 != NULL && extent2 != NULL);
34 mutex_pool_unlock2(tsdn, &extent_mutex_pool, (uintptr_t)extent1,
35 (uintptr_t)extent2);
36}
37
38static inline unsigned
39extent_arena_ind_get(const extent_t *extent) {
40 unsigned arena_ind = (unsigned)((extent->e_bits &
41 EXTENT_BITS_ARENA_MASK) >> EXTENT_BITS_ARENA_SHIFT);
42 assert(arena_ind < MALLOCX_ARENA_LIMIT);
43
44 return arena_ind;
45}
46
47static inline arena_t *
48extent_arena_get(const extent_t *extent) {
49 unsigned arena_ind = extent_arena_ind_get(extent);
50
51 return (arena_t *)atomic_load_p(&arenas[arena_ind], ATOMIC_ACQUIRE);
52}
53
54static inline szind_t
55extent_szind_get_maybe_invalid(const extent_t *extent) {
56 szind_t szind = (szind_t)((extent->e_bits & EXTENT_BITS_SZIND_MASK) >>
57 EXTENT_BITS_SZIND_SHIFT);
58 assert(szind <= SC_NSIZES);
59 return szind;
60}
61
62static inline szind_t
63extent_szind_get(const extent_t *extent) {
64 szind_t szind = extent_szind_get_maybe_invalid(extent);
65 assert(szind < SC_NSIZES); /* Never call when "invalid". */
66 return szind;
67}
68
69static inline size_t
70extent_usize_get(const extent_t *extent) {
71 return sz_index2size(extent_szind_get(extent));
72}
73
74static inline unsigned
75extent_binshard_get(const extent_t *extent) {
76 unsigned binshard = (unsigned)((extent->e_bits &
77 EXTENT_BITS_BINSHARD_MASK) >> EXTENT_BITS_BINSHARD_SHIFT);
78 assert(binshard < bin_infos[extent_szind_get(extent)].n_shards);
79 return binshard;
80}
81
82static inline size_t
83extent_sn_get(const extent_t *extent) {
84 return (size_t)((extent->e_bits & EXTENT_BITS_SN_MASK) >>
85 EXTENT_BITS_SN_SHIFT);
86}
87
88static inline extent_state_t
89extent_state_get(const extent_t *extent) {
90 return (extent_state_t)((extent->e_bits & EXTENT_BITS_STATE_MASK) >>
91 EXTENT_BITS_STATE_SHIFT);
92}
93
94static inline bool
95extent_zeroed_get(const extent_t *extent) {
96 return (bool)((extent->e_bits & EXTENT_BITS_ZEROED_MASK) >>
97 EXTENT_BITS_ZEROED_SHIFT);
98}
99
100static inline bool
101extent_committed_get(const extent_t *extent) {
102 return (bool)((extent->e_bits & EXTENT_BITS_COMMITTED_MASK) >>
103 EXTENT_BITS_COMMITTED_SHIFT);
104}
105
106static inline bool
107extent_dumpable_get(const extent_t *extent) {
108 return (bool)((extent->e_bits & EXTENT_BITS_DUMPABLE_MASK) >>
109 EXTENT_BITS_DUMPABLE_SHIFT);
110}
111
112static inline bool
113extent_slab_get(const extent_t *extent) {
114 return (bool)((extent->e_bits & EXTENT_BITS_SLAB_MASK) >>
115 EXTENT_BITS_SLAB_SHIFT);
116}
117
118static inline unsigned
119extent_nfree_get(const extent_t *extent) {
120 assert(extent_slab_get(extent));
121 return (unsigned)((extent->e_bits & EXTENT_BITS_NFREE_MASK) >>
122 EXTENT_BITS_NFREE_SHIFT);
123}
124
125static inline void *
126extent_base_get(const extent_t *extent) {
127 assert(extent->e_addr == PAGE_ADDR2BASE(extent->e_addr) ||
128 !extent_slab_get(extent));
129 return PAGE_ADDR2BASE(extent->e_addr);
130}
131
132static inline void *
133extent_addr_get(const extent_t *extent) {
134 assert(extent->e_addr == PAGE_ADDR2BASE(extent->e_addr) ||
135 !extent_slab_get(extent));
136 return extent->e_addr;
137}
138
139static inline size_t
140extent_size_get(const extent_t *extent) {
141 return (extent->e_size_esn & EXTENT_SIZE_MASK);
142}
143
144static inline size_t
145extent_esn_get(const extent_t *extent) {
146 return (extent->e_size_esn & EXTENT_ESN_MASK);
147}
148
149static inline size_t
150extent_bsize_get(const extent_t *extent) {
151 return extent->e_bsize;
152}
153
154static inline void *
155extent_before_get(const extent_t *extent) {
156 return (void *)((uintptr_t)extent_base_get(extent) - PAGE);
157}
158
159static inline void *
160extent_last_get(const extent_t *extent) {
161 return (void *)((uintptr_t)extent_base_get(extent) +
162 extent_size_get(extent) - PAGE);
163}
164
165static inline void *
166extent_past_get(const extent_t *extent) {
167 return (void *)((uintptr_t)extent_base_get(extent) +
168 extent_size_get(extent));
169}
170
171static inline arena_slab_data_t *
172extent_slab_data_get(extent_t *extent) {
173 assert(extent_slab_get(extent));
174 return &extent->e_slab_data;
175}
176
177static inline const arena_slab_data_t *
178extent_slab_data_get_const(const extent_t *extent) {
179 assert(extent_slab_get(extent));
180 return &extent->e_slab_data;
181}
182
183static inline prof_tctx_t *
184extent_prof_tctx_get(const extent_t *extent) {
185 return (prof_tctx_t *)atomic_load_p(&extent->e_prof_tctx,
186 ATOMIC_ACQUIRE);
187}
188
189static inline nstime_t
190extent_prof_alloc_time_get(const extent_t *extent) {
191 return extent->e_alloc_time;
192}
193
194static inline void
195extent_arena_set(extent_t *extent, arena_t *arena) {
196 unsigned arena_ind = (arena != NULL) ? arena_ind_get(arena) : ((1U <<
197 MALLOCX_ARENA_BITS) - 1);
198 extent->e_bits = (extent->e_bits & ~EXTENT_BITS_ARENA_MASK) |
199 ((uint64_t)arena_ind << EXTENT_BITS_ARENA_SHIFT);
200}
201
202static inline void
203extent_binshard_set(extent_t *extent, unsigned binshard) {
204 /* The assertion assumes szind is set already. */
205 assert(binshard < bin_infos[extent_szind_get(extent)].n_shards);
206 extent->e_bits = (extent->e_bits & ~EXTENT_BITS_BINSHARD_MASK) |
207 ((uint64_t)binshard << EXTENT_BITS_BINSHARD_SHIFT);
208}
209
210static inline void
211extent_addr_set(extent_t *extent, void *addr) {
212 extent->e_addr = addr;
213}
214
215static inline void
216extent_addr_randomize(tsdn_t *tsdn, extent_t *extent, size_t alignment) {
217 assert(extent_base_get(extent) == extent_addr_get(extent));
218
219 if (alignment < PAGE) {
220 unsigned lg_range = LG_PAGE -
221 lg_floor(CACHELINE_CEILING(alignment));
222 size_t r;
223 if (!tsdn_null(tsdn)) {
224 tsd_t *tsd = tsdn_tsd(tsdn);
225 r = (size_t)prng_lg_range_u64(
226 tsd_offset_statep_get(tsd), lg_range);
227 } else {
228 r = prng_lg_range_zu(
229 &extent_arena_get(extent)->offset_state,
230 lg_range, true);
231 }
232 uintptr_t random_offset = ((uintptr_t)r) << (LG_PAGE -
233 lg_range);
234 extent->e_addr = (void *)((uintptr_t)extent->e_addr +
235 random_offset);
236 assert(ALIGNMENT_ADDR2BASE(extent->e_addr, alignment) ==
237 extent->e_addr);
238 }
239}
240
241static inline void
242extent_size_set(extent_t *extent, size_t size) {
243 assert((size & ~EXTENT_SIZE_MASK) == 0);
244 extent->e_size_esn = size | (extent->e_size_esn & ~EXTENT_SIZE_MASK);
245}
246
247static inline void
248extent_esn_set(extent_t *extent, size_t esn) {
249 extent->e_size_esn = (extent->e_size_esn & ~EXTENT_ESN_MASK) | (esn &
250 EXTENT_ESN_MASK);
251}
252
253static inline void
254extent_bsize_set(extent_t *extent, size_t bsize) {
255 extent->e_bsize = bsize;
256}
257
258static inline void
259extent_szind_set(extent_t *extent, szind_t szind) {
260 assert(szind <= SC_NSIZES); /* SC_NSIZES means "invalid". */
261 extent->e_bits = (extent->e_bits & ~EXTENT_BITS_SZIND_MASK) |
262 ((uint64_t)szind << EXTENT_BITS_SZIND_SHIFT);
263}
264
265static inline void
266extent_nfree_set(extent_t *extent, unsigned nfree) {
267 assert(extent_slab_get(extent));
268 extent->e_bits = (extent->e_bits & ~EXTENT_BITS_NFREE_MASK) |
269 ((uint64_t)nfree << EXTENT_BITS_NFREE_SHIFT);
270}
271
272static inline void
273extent_nfree_binshard_set(extent_t *extent, unsigned nfree, unsigned binshard) {
274 /* The assertion assumes szind is set already. */
275 assert(binshard < bin_infos[extent_szind_get(extent)].n_shards);
276 extent->e_bits = (extent->e_bits &
277 (~EXTENT_BITS_NFREE_MASK & ~EXTENT_BITS_BINSHARD_MASK)) |
278 ((uint64_t)binshard << EXTENT_BITS_BINSHARD_SHIFT) |
279 ((uint64_t)nfree << EXTENT_BITS_NFREE_SHIFT);
280}
281
282static inline void
283extent_nfree_inc(extent_t *extent) {
284 assert(extent_slab_get(extent));
285 extent->e_bits += ((uint64_t)1U << EXTENT_BITS_NFREE_SHIFT);
286}
287
288static inline void
289extent_nfree_dec(extent_t *extent) {
290 assert(extent_slab_get(extent));
291 extent->e_bits -= ((uint64_t)1U << EXTENT_BITS_NFREE_SHIFT);
292}
293
294static inline void
295extent_nfree_sub(extent_t *extent, uint64_t n) {
296 assert(extent_slab_get(extent));
297 extent->e_bits -= (n << EXTENT_BITS_NFREE_SHIFT);
298}
299
300static inline void
301extent_sn_set(extent_t *extent, size_t sn) {
302 extent->e_bits = (extent->e_bits & ~EXTENT_BITS_SN_MASK) |
303 ((uint64_t)sn << EXTENT_BITS_SN_SHIFT);
304}
305
306static inline void
307extent_state_set(extent_t *extent, extent_state_t state) {
308 extent->e_bits = (extent->e_bits & ~EXTENT_BITS_STATE_MASK) |
309 ((uint64_t)state << EXTENT_BITS_STATE_SHIFT);
310}
311
312static inline void
313extent_zeroed_set(extent_t *extent, bool zeroed) {
314 extent->e_bits = (extent->e_bits & ~EXTENT_BITS_ZEROED_MASK) |
315 ((uint64_t)zeroed << EXTENT_BITS_ZEROED_SHIFT);
316}
317
318static inline void
319extent_committed_set(extent_t *extent, bool committed) {
320 extent->e_bits = (extent->e_bits & ~EXTENT_BITS_COMMITTED_MASK) |
321 ((uint64_t)committed << EXTENT_BITS_COMMITTED_SHIFT);
322}
323
324static inline void
325extent_dumpable_set(extent_t *extent, bool dumpable) {
326 extent->e_bits = (extent->e_bits & ~EXTENT_BITS_DUMPABLE_MASK) |
327 ((uint64_t)dumpable << EXTENT_BITS_DUMPABLE_SHIFT);
328}
329
330static inline void
331extent_slab_set(extent_t *extent, bool slab) {
332 extent->e_bits = (extent->e_bits & ~EXTENT_BITS_SLAB_MASK) |
333 ((uint64_t)slab << EXTENT_BITS_SLAB_SHIFT);
334}
335
336static inline void
337extent_prof_tctx_set(extent_t *extent, prof_tctx_t *tctx) {
338 atomic_store_p(&extent->e_prof_tctx, tctx, ATOMIC_RELEASE);
339}
340
341static inline void
342extent_prof_alloc_time_set(extent_t *extent, nstime_t t) {
343 nstime_copy(&extent->e_alloc_time, &t);
344}
345
346static inline bool
347extent_is_head_get(extent_t *extent) {
348 if (maps_coalesce) {
349 not_reached();
350 }
351
352 return (bool)((extent->e_bits & EXTENT_BITS_IS_HEAD_MASK) >>
353 EXTENT_BITS_IS_HEAD_SHIFT);
354}
355
356static inline void
357extent_is_head_set(extent_t *extent, bool is_head) {
358 if (maps_coalesce) {
359 not_reached();
360 }
361
362 extent->e_bits = (extent->e_bits & ~EXTENT_BITS_IS_HEAD_MASK) |
363 ((uint64_t)is_head << EXTENT_BITS_IS_HEAD_SHIFT);
364}
365
366static inline void
367extent_init(extent_t *extent, arena_t *arena, void *addr, size_t size,
368 bool slab, szind_t szind, size_t sn, extent_state_t state, bool zeroed,
369 bool committed, bool dumpable, extent_head_state_t is_head) {
370 assert(addr == PAGE_ADDR2BASE(addr) || !slab);
371
372 extent_arena_set(extent, arena);
373 extent_addr_set(extent, addr);
374 extent_size_set(extent, size);
375 extent_slab_set(extent, slab);
376 extent_szind_set(extent, szind);
377 extent_sn_set(extent, sn);
378 extent_state_set(extent, state);
379 extent_zeroed_set(extent, zeroed);
380 extent_committed_set(extent, committed);
381 extent_dumpable_set(extent, dumpable);
382 ql_elm_new(extent, ql_link);
383 if (!maps_coalesce) {
384 extent_is_head_set(extent, (is_head == EXTENT_IS_HEAD) ? true :
385 false);
386 }
387 if (config_prof) {
388 extent_prof_tctx_set(extent, NULL);
389 }
390}
391
392static inline void
393extent_binit(extent_t *extent, void *addr, size_t bsize, size_t sn) {
394 extent_arena_set(extent, NULL);
395 extent_addr_set(extent, addr);
396 extent_bsize_set(extent, bsize);
397 extent_slab_set(extent, false);
398 extent_szind_set(extent, SC_NSIZES);
399 extent_sn_set(extent, sn);
400 extent_state_set(extent, extent_state_active);
401 extent_zeroed_set(extent, true);
402 extent_committed_set(extent, true);
403 extent_dumpable_set(extent, true);
404}
405
406static inline void
407extent_list_init(extent_list_t *list) {
408 ql_new(list);
409}
410
411static inline extent_t *
412extent_list_first(const extent_list_t *list) {
413 return ql_first(list);
414}
415
416static inline extent_t *
417extent_list_last(const extent_list_t *list) {
418 return ql_last(list, ql_link);
419}
420
421static inline void
422extent_list_append(extent_list_t *list, extent_t *extent) {
423 ql_tail_insert(list, extent, ql_link);
424}
425
426static inline void
427extent_list_prepend(extent_list_t *list, extent_t *extent) {
428 ql_head_insert(list, extent, ql_link);
429}
430
431static inline void
432extent_list_replace(extent_list_t *list, extent_t *to_remove,
433 extent_t *to_insert) {
434 ql_after_insert(to_remove, to_insert, ql_link);
435 ql_remove(list, to_remove, ql_link);
436}
437
438static inline void
439extent_list_remove(extent_list_t *list, extent_t *extent) {
440 ql_remove(list, extent, ql_link);
441}
442
443static inline int
444extent_sn_comp(const extent_t *a, const extent_t *b) {
445 size_t a_sn = extent_sn_get(a);
446 size_t b_sn = extent_sn_get(b);
447
448 return (a_sn > b_sn) - (a_sn < b_sn);
449}
450
451static inline int
452extent_esn_comp(const extent_t *a, const extent_t *b) {
453 size_t a_esn = extent_esn_get(a);
454 size_t b_esn = extent_esn_get(b);
455
456 return (a_esn > b_esn) - (a_esn < b_esn);
457}
458
459static inline int
460extent_ad_comp(const extent_t *a, const extent_t *b) {
461 uintptr_t a_addr = (uintptr_t)extent_addr_get(a);
462 uintptr_t b_addr = (uintptr_t)extent_addr_get(b);
463
464 return (a_addr > b_addr) - (a_addr < b_addr);
465}
466
467static inline int
468extent_ead_comp(const extent_t *a, const extent_t *b) {
469 uintptr_t a_eaddr = (uintptr_t)a;
470 uintptr_t b_eaddr = (uintptr_t)b;
471
472 return (a_eaddr > b_eaddr) - (a_eaddr < b_eaddr);
473}
474
475static inline int
476extent_snad_comp(const extent_t *a, const extent_t *b) {
477 int ret;
478
479 ret = extent_sn_comp(a, b);
480 if (ret != 0) {
481 return ret;
482 }
483
484 ret = extent_ad_comp(a, b);
485 return ret;
486}
487
488static inline int
489extent_esnead_comp(const extent_t *a, const extent_t *b) {
490 int ret;
491
492 ret = extent_esn_comp(a, b);
493 if (ret != 0) {
494 return ret;
495 }
496
497 ret = extent_ead_comp(a, b);
498 return ret;
499}
500
501#endif /* JEMALLOC_INTERNAL_EXTENT_INLINES_H */
502