1#ifndef JEMALLOC_INTERNAL_BACKGROUND_THREAD_INLINES_H
2#define JEMALLOC_INTERNAL_BACKGROUND_THREAD_INLINES_H
3
4JEMALLOC_ALWAYS_INLINE bool
5background_thread_enabled(void) {
6 return atomic_load_b(&background_thread_enabled_state, ATOMIC_RELAXED);
7}
8
9JEMALLOC_ALWAYS_INLINE void
10background_thread_enabled_set(tsdn_t *tsdn, bool state) {
11 malloc_mutex_assert_owner(tsdn, &background_thread_lock);
12 atomic_store_b(&background_thread_enabled_state, state, ATOMIC_RELAXED);
13}
14
15JEMALLOC_ALWAYS_INLINE background_thread_info_t *
16arena_background_thread_info_get(arena_t *arena) {
17 unsigned arena_ind = arena_ind_get(arena);
18 return &background_thread_info[arena_ind % max_background_threads];
19}
20
21JEMALLOC_ALWAYS_INLINE background_thread_info_t *
22background_thread_info_get(size_t ind) {
23 return &background_thread_info[ind % max_background_threads];
24}
25
26JEMALLOC_ALWAYS_INLINE uint64_t
27background_thread_wakeup_time_get(background_thread_info_t *info) {
28 uint64_t next_wakeup = nstime_ns(&info->next_wakeup);
29 assert(atomic_load_b(&info->indefinite_sleep, ATOMIC_ACQUIRE) ==
30 (next_wakeup == BACKGROUND_THREAD_INDEFINITE_SLEEP));
31 return next_wakeup;
32}
33
34JEMALLOC_ALWAYS_INLINE void
35background_thread_wakeup_time_set(tsdn_t *tsdn, background_thread_info_t *info,
36 uint64_t wakeup_time) {
37 malloc_mutex_assert_owner(tsdn, &info->mtx);
38 atomic_store_b(&info->indefinite_sleep,
39 wakeup_time == BACKGROUND_THREAD_INDEFINITE_SLEEP, ATOMIC_RELEASE);
40 nstime_init(&info->next_wakeup, wakeup_time);
41}
42
43JEMALLOC_ALWAYS_INLINE bool
44background_thread_indefinite_sleep(background_thread_info_t *info) {
45 return atomic_load_b(&info->indefinite_sleep, ATOMIC_ACQUIRE);
46}
47
48#endif /* JEMALLOC_INTERNAL_BACKGROUND_THREAD_INLINES_H */
49