1#include "jemalloc/internal/jemalloc_preamble.h"
2#include "jemalloc/internal/jemalloc_internal_includes.h"
3
4#include "jemalloc/internal/assert.h"
5
6/******************************************************************************/
7
8#ifdef BITMAP_USE_TREE
9
10void
11bitmap_info_init(bitmap_info_t *binfo, size_t nbits) {
12 unsigned i;
13 size_t group_count;
14
15 assert(nbits > 0);
16 assert(nbits <= (ZU(1) << LG_BITMAP_MAXBITS));
17
18 /*
19 * Compute the number of groups necessary to store nbits bits, and
20 * progressively work upward through the levels until reaching a level
21 * that requires only one group.
22 */
23 binfo->levels[0].group_offset = 0;
24 group_count = BITMAP_BITS2GROUPS(nbits);
25 for (i = 1; group_count > 1; i++) {
26 assert(i < BITMAP_MAX_LEVELS);
27 binfo->levels[i].group_offset = binfo->levels[i-1].group_offset
28 + group_count;
29 group_count = BITMAP_BITS2GROUPS(group_count);
30 }
31 binfo->levels[i].group_offset = binfo->levels[i-1].group_offset
32 + group_count;
33 assert(binfo->levels[i].group_offset <= BITMAP_GROUPS_MAX);
34 binfo->nlevels = i;
35 binfo->nbits = nbits;
36}
37
38static size_t
39bitmap_info_ngroups(const bitmap_info_t *binfo) {
40 return binfo->levels[binfo->nlevels].group_offset;
41}
42
43void
44bitmap_init(bitmap_t *bitmap, const bitmap_info_t *binfo, bool fill) {
45 size_t extra;
46 unsigned i;
47
48 /*
49 * Bits are actually inverted with regard to the external bitmap
50 * interface.
51 */
52
53 if (fill) {
54 /* The "filled" bitmap starts out with all 0 bits. */
55 memset(bitmap, 0, bitmap_size(binfo));
56 return;
57 }
58
59 /*
60 * The "empty" bitmap starts out with all 1 bits, except for trailing
61 * unused bits (if any). Note that each group uses bit 0 to correspond
62 * to the first logical bit in the group, so extra bits are the most
63 * significant bits of the last group.
64 */
65 memset(bitmap, 0xffU, bitmap_size(binfo));
66 extra = (BITMAP_GROUP_NBITS - (binfo->nbits & BITMAP_GROUP_NBITS_MASK))
67 & BITMAP_GROUP_NBITS_MASK;
68 if (extra != 0) {
69 bitmap[binfo->levels[1].group_offset - 1] >>= extra;
70 }
71 for (i = 1; i < binfo->nlevels; i++) {
72 size_t group_count = binfo->levels[i].group_offset -
73 binfo->levels[i-1].group_offset;
74 extra = (BITMAP_GROUP_NBITS - (group_count &
75 BITMAP_GROUP_NBITS_MASK)) & BITMAP_GROUP_NBITS_MASK;
76 if (extra != 0) {
77 bitmap[binfo->levels[i+1].group_offset - 1] >>= extra;
78 }
79 }
80}
81
82#else /* BITMAP_USE_TREE */
83
84void
85bitmap_info_init(bitmap_info_t *binfo, size_t nbits) {
86 assert(nbits > 0);
87 assert(nbits <= (ZU(1) << LG_BITMAP_MAXBITS));
88
89 binfo->ngroups = BITMAP_BITS2GROUPS(nbits);
90 binfo->nbits = nbits;
91}
92
93static size_t
94bitmap_info_ngroups(const bitmap_info_t *binfo) {
95 return binfo->ngroups;
96}
97
98void
99bitmap_init(bitmap_t *bitmap, const bitmap_info_t *binfo, bool fill) {
100 size_t extra;
101
102 if (fill) {
103 memset(bitmap, 0, bitmap_size(binfo));
104 return;
105 }
106
107 memset(bitmap, 0xffU, bitmap_size(binfo));
108 extra = (BITMAP_GROUP_NBITS - (binfo->nbits & BITMAP_GROUP_NBITS_MASK))
109 & BITMAP_GROUP_NBITS_MASK;
110 if (extra != 0) {
111 bitmap[binfo->ngroups - 1] >>= extra;
112 }
113}
114
115#endif /* BITMAP_USE_TREE */
116
117size_t
118bitmap_size(const bitmap_info_t *binfo) {
119 return (bitmap_info_ngroups(binfo) << LG_SIZEOF_BITMAP);
120}
121