1/*
2 * Copyright (c) Facebook, Inc. and its affiliates.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#pragma once
18
19#include <cassert>
20#include <cerrno>
21#include <cstddef>
22#include <cstdlib>
23#include <exception>
24#include <limits>
25#include <memory>
26#include <stdexcept>
27#include <type_traits>
28#include <utility>
29
30#include <folly/ConstexprMath.h>
31#include <folly/Likely.h>
32#include <folly/Traits.h>
33#include <folly/functional/Invoke.h>
34#include <folly/lang/Align.h>
35#include <folly/lang/Exception.h>
36#include <folly/portability/Config.h>
37#include <folly/portability/Malloc.h>
38
39namespace folly {
40
41/// allocateBytes and deallocateBytes work like a checkedMalloc/free pair,
42/// but take advantage of sized deletion when available
43inline void* allocateBytes(size_t n) {
44 return ::operator new(n);
45}
46
47inline void deallocateBytes(void* p, size_t n) {
48#if __cpp_sized_deallocation
49 return ::operator delete(p, n);
50#else
51 (void)n;
52 return ::operator delete(p);
53#endif
54}
55
56#if _POSIX_C_SOURCE >= 200112L || _XOPEN_SOURCE >= 600 || \
57 (defined(__ANDROID__) && (__ANDROID_API__ > 16)) || \
58 (defined(__APPLE__) && \
59 (__MAC_OS_X_VERSION_MIN_REQUIRED >= __MAC_10_6 || \
60 __IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_3_0)) || \
61 defined(__FreeBSD__) || defined(__wasm32__)
62
63inline void* aligned_malloc(size_t size, size_t align) {
64 // use posix_memalign, but mimic the behaviour of memalign
65 void* ptr = nullptr;
66 int rc = posix_memalign(&ptr, align, size);
67 return rc == 0 ? (errno = 0, ptr) : (errno = rc, nullptr);
68}
69
70inline void aligned_free(void* aligned_ptr) {
71 free(aligned_ptr);
72}
73
74#elif defined(_WIN32)
75
76inline void* aligned_malloc(size_t size, size_t align) {
77 return _aligned_malloc(size, align);
78}
79
80inline void aligned_free(void* aligned_ptr) {
81 _aligned_free(aligned_ptr);
82}
83
84#else
85
86inline void* aligned_malloc(size_t size, size_t align) {
87 return memalign(align, size);
88}
89
90inline void aligned_free(void* aligned_ptr) {
91 free(aligned_ptr);
92}
93
94#endif
95
96namespace detail {
97template <typename Alloc, size_t kAlign, bool kAllocate>
98void rawOverAlignedImpl(Alloc const& alloc, size_t n, void*& raw) {
99 static_assert((kAlign & (kAlign - 1)) == 0, "Align must be a power of 2");
100
101 using AllocTraits = std::allocator_traits<Alloc>;
102 using T = typename AllocTraits::value_type;
103
104 constexpr bool kCanBypass = std::is_same<Alloc, std::allocator<T>>::value;
105
106 // BaseType is a type that gives us as much alignment as we need if
107 // we can get it naturally, otherwise it is aligned as max_align_t.
108 // kBaseAlign is both the alignment and size of this type.
109 constexpr size_t kBaseAlign = constexpr_min(kAlign, alignof(max_align_t));
110 using BaseType = std::aligned_storage_t<kBaseAlign, kBaseAlign>;
111 using BaseAllocTraits =
112 typename AllocTraits::template rebind_traits<BaseType>;
113 using BaseAlloc = typename BaseAllocTraits::allocator_type;
114 static_assert(
115 sizeof(BaseType) == kBaseAlign && alignof(BaseType) == kBaseAlign, "");
116
117#if __cpp_sized_deallocation
118 if (kCanBypass && kAlign == kBaseAlign) {
119 // until std::allocator uses sized deallocation, it is worth the
120 // effort to bypass it when we are able
121 if (kAllocate) {
122 raw = ::operator new(n * sizeof(T));
123 } else {
124 ::operator delete(raw, n * sizeof(T));
125 }
126 return;
127 }
128#endif
129
130 if (kCanBypass && kAlign > kBaseAlign) {
131 // allocating as BaseType isn't sufficient to get alignment, but
132 // since we can bypass Alloc we can use something like posix_memalign
133 if (kAllocate) {
134 raw = aligned_malloc(n * sizeof(T), kAlign);
135 } else {
136 aligned_free(raw);
137 }
138 return;
139 }
140
141 // we're not allowed to bypass Alloc, or we don't want to
142 BaseAlloc a(alloc);
143
144 // allocation size is counted in sizeof(BaseType)
145 size_t quanta = (n * sizeof(T) + kBaseAlign - 1) / sizeof(BaseType);
146 if (kAlign <= kBaseAlign) {
147 // rebinding Alloc to BaseType is sufficient to get us the alignment
148 // we want, happy path
149 if (kAllocate) {
150 raw = static_cast<void*>(
151 std::addressof(*BaseAllocTraits::allocate(a, quanta)));
152 } else {
153 BaseAllocTraits::deallocate(
154 a,
155 std::pointer_traits<typename BaseAllocTraits::pointer>::pointer_to(
156 *static_cast<BaseType*>(raw)),
157 quanta);
158 }
159 return;
160 }
161
162 // Overaligned and custom allocator, our only option is to
163 // overallocate and store a delta to the actual allocation just
164 // before the returned ptr.
165 //
166 // If we give ourselves kAlign extra bytes, then since
167 // sizeof(BaseType) divides kAlign we can meet alignment while
168 // getting a prefix of one BaseType. If we happen to get a
169 // kAlign-aligned block, then we can return a pointer to underlying
170 // + kAlign, otherwise there will be at least kBaseAlign bytes in
171 // the unused prefix of the first kAlign-aligned block.
172 if (kAllocate) {
173 char* base = reinterpret_cast<char*>(std::addressof(
174 *BaseAllocTraits::allocate(a, quanta + kAlign / sizeof(BaseType))));
175 size_t byteDelta =
176 kAlign - (reinterpret_cast<uintptr_t>(base) & (kAlign - 1));
177 raw = static_cast<void*>(base + byteDelta);
178 static_cast<size_t*>(raw)[-1] = byteDelta;
179 } else {
180 size_t byteDelta = static_cast<size_t*>(raw)[-1];
181 char* base = static_cast<char*>(raw) - byteDelta;
182 BaseAllocTraits::deallocate(
183 a,
184 std::pointer_traits<typename BaseAllocTraits::pointer>::pointer_to(
185 *reinterpret_cast<BaseType*>(base)),
186 quanta + kAlign / sizeof(BaseType));
187 }
188}
189} // namespace detail
190
191// Works like std::allocator_traits<Alloc>::allocate, but handles
192// over-aligned types. Feel free to manually specify any power of two as
193// the Align template arg. Must be matched with deallocateOverAligned.
194// allocationBytesForOverAligned will give you the number of bytes that
195// this function actually requests.
196template <
197 typename Alloc,
198 size_t kAlign = alignof(typename std::allocator_traits<Alloc>::value_type)>
199typename std::allocator_traits<Alloc>::pointer allocateOverAligned(
200 Alloc const& alloc,
201 size_t n) {
202 void* raw = nullptr;
203 detail::rawOverAlignedImpl<Alloc, kAlign, true>(alloc, n, raw);
204 return std::pointer_traits<typename std::allocator_traits<Alloc>::pointer>::
205 pointer_to(
206 *static_cast<typename std::allocator_traits<Alloc>::value_type*>(
207 raw));
208}
209
210template <
211 typename Alloc,
212 size_t kAlign = alignof(typename std::allocator_traits<Alloc>::value_type)>
213void deallocateOverAligned(
214 Alloc const& alloc,
215 typename std::allocator_traits<Alloc>::pointer ptr,
216 size_t n) {
217 void* raw = static_cast<void*>(std::addressof(*ptr));
218 detail::rawOverAlignedImpl<Alloc, kAlign, false>(alloc, n, raw);
219}
220
221template <
222 typename Alloc,
223 size_t kAlign = alignof(typename std::allocator_traits<Alloc>::value_type)>
224size_t allocationBytesForOverAligned(size_t n) {
225 static_assert((kAlign & (kAlign - 1)) == 0, "Align must be a power of 2");
226
227 using AllocTraits = std::allocator_traits<Alloc>;
228 using T = typename AllocTraits::value_type;
229
230 constexpr size_t kBaseAlign = constexpr_min(kAlign, alignof(max_align_t));
231
232 if (kAlign > kBaseAlign && std::is_same<Alloc, std::allocator<T>>::value) {
233 return n * sizeof(T);
234 } else {
235 size_t quanta = (n * sizeof(T) + kBaseAlign - 1) / kBaseAlign;
236 if (kAlign > kBaseAlign) {
237 quanta += kAlign / kBaseAlign;
238 }
239 return quanta * kBaseAlign;
240 }
241}
242
243/**
244 * For exception safety and consistency with make_shared. Erase me when
245 * we have std::make_unique().
246 *
247 * @author Louis Brandy (ldbrandy@fb.com)
248 * @author Xu Ning (xning@fb.com)
249 */
250
251#if __cplusplus >= 201402L || __cpp_lib_make_unique >= 201304L || \
252 (__ANDROID__ && __cplusplus >= 201300L) || _MSC_VER >= 1900
253
254/* using override */ using std::make_unique;
255
256#else
257
258template <typename T, typename... Args>
259typename std::enable_if<!std::is_array<T>::value, std::unique_ptr<T>>::type
260make_unique(Args&&... args) {
261 return std::unique_ptr<T>(new T(std::forward<Args>(args)...));
262}
263
264// Allows 'make_unique<T[]>(10)'. (N3690 s20.9.1.4 p3-4)
265template <typename T>
266typename std::enable_if<std::is_array<T>::value, std::unique_ptr<T>>::type
267make_unique(const size_t n) {
268 return std::unique_ptr<T>(new typename std::remove_extent<T>::type[n]());
269}
270
271// Disallows 'make_unique<T[10]>()'. (N3690 s20.9.1.4 p5)
272template <typename T, typename... Args>
273typename std::enable_if<std::extent<T>::value != 0, std::unique_ptr<T>>::type
274make_unique(Args&&...) = delete;
275
276#endif
277
278/**
279 * static_function_deleter
280 *
281 * So you can write this:
282 *
283 * using RSA_deleter = folly::static_function_deleter<RSA, &RSA_free>;
284 * auto rsa = std::unique_ptr<RSA, RSA_deleter>(RSA_new());
285 * RSA_generate_key_ex(rsa.get(), bits, exponent, nullptr);
286 * rsa = nullptr; // calls RSA_free(rsa.get())
287 *
288 * This would be sweet as well for BIO, but unfortunately BIO_free has signature
289 * int(BIO*) while we require signature void(BIO*). So you would need to make a
290 * wrapper for it:
291 *
292 * inline void BIO_free_fb(BIO* bio) { CHECK_EQ(1, BIO_free(bio)); }
293 * using BIO_deleter = folly::static_function_deleter<BIO, &BIO_free_fb>;
294 * auto buf = std::unique_ptr<BIO, BIO_deleter>(BIO_new(BIO_s_mem()));
295 * buf = nullptr; // calls BIO_free(buf.get())
296 */
297
298template <typename T, void (*f)(T*)>
299struct static_function_deleter {
300 void operator()(T* t) const {
301 f(t);
302 }
303};
304
305/**
306 * to_shared_ptr
307 *
308 * Convert unique_ptr to shared_ptr without specifying the template type
309 * parameter and letting the compiler deduce it.
310 *
311 * So you can write this:
312 *
313 * auto sptr = to_shared_ptr(getSomethingUnique<T>());
314 *
315 * Instead of this:
316 *
317 * auto sptr = shared_ptr<T>(getSomethingUnique<T>());
318 *
319 * Useful when `T` is long, such as:
320 *
321 * using T = foobar::FooBarAsyncClient;
322 */
323template <typename T, typename D>
324std::shared_ptr<T> to_shared_ptr(std::unique_ptr<T, D>&& ptr) {
325 return std::shared_ptr<T>(std::move(ptr));
326}
327
328/**
329 * to_weak_ptr
330 *
331 * Make a weak_ptr and return it from a shared_ptr without specifying the
332 * template type parameter and letting the compiler deduce it.
333 *
334 * So you can write this:
335 *
336 * auto wptr = to_weak_ptr(getSomethingShared<T>());
337 *
338 * Instead of this:
339 *
340 * auto wptr = weak_ptr<T>(getSomethingShared<T>());
341 *
342 * Useful when `T` is long, such as:
343 *
344 * using T = foobar::FooBarAsyncClient;
345 */
346template <typename T>
347std::weak_ptr<T> to_weak_ptr(const std::shared_ptr<T>& ptr) {
348 return std::weak_ptr<T>(ptr);
349}
350
351namespace detail {
352template <typename T>
353struct lift_void_to_char {
354 using type = T;
355};
356template <>
357struct lift_void_to_char<void> {
358 using type = char;
359};
360} // namespace detail
361
362/**
363 * SysAllocator
364 *
365 * Resembles std::allocator, the default Allocator, but wraps std::malloc and
366 * std::free.
367 */
368template <typename T>
369class SysAllocator {
370 private:
371 using Self = SysAllocator<T>;
372
373 public:
374 using value_type = T;
375
376 T* allocate(size_t count) {
377 using lifted = typename detail::lift_void_to_char<T>::type;
378 auto const p = std::malloc(sizeof(lifted) * count);
379 if (!p) {
380 throw_exception<std::bad_alloc>();
381 }
382 return static_cast<T*>(p);
383 }
384 void deallocate(T* p, size_t /* count */) {
385 std::free(p);
386 }
387
388 friend bool operator==(Self const&, Self const&) noexcept {
389 return true;
390 }
391 friend bool operator!=(Self const&, Self const&) noexcept {
392 return false;
393 }
394};
395
396class DefaultAlign {
397 private:
398 using Self = DefaultAlign;
399 std::size_t align_;
400
401 public:
402 explicit DefaultAlign(std::size_t align) noexcept : align_(align) {
403 assert(!(align_ < sizeof(void*)) && bool("bad align: too small"));
404 assert(!(align_ & (align_ - 1)) && bool("bad align: not power-of-two"));
405 }
406 std::size_t operator()(std::size_t align) const noexcept {
407 return align_ < align ? align : align_;
408 }
409
410 friend bool operator==(Self const& a, Self const& b) noexcept {
411 return a.align_ == b.align_;
412 }
413 friend bool operator!=(Self const& a, Self const& b) noexcept {
414 return a.align_ != b.align_;
415 }
416};
417
418template <std::size_t Align>
419class FixedAlign {
420 private:
421 static_assert(!(Align < sizeof(void*)), "bad align: too small");
422 static_assert(!(Align & (Align - 1)), "bad align: not power-of-two");
423 using Self = FixedAlign<Align>;
424
425 public:
426 constexpr std::size_t operator()(std::size_t align) const noexcept {
427 return Align < align ? align : Align;
428 }
429
430 friend bool operator==(Self const&, Self const&) noexcept {
431 return true;
432 }
433 friend bool operator!=(Self const&, Self const&) noexcept {
434 return false;
435 }
436};
437
438/**
439 * AlignedSysAllocator
440 *
441 * Resembles std::allocator, the default Allocator, but wraps aligned_malloc and
442 * aligned_free.
443 *
444 * Accepts a policy parameter for providing the alignment, which must:
445 * * be invocable as std::size_t(std::size_t) noexcept
446 * * taking the type alignment and returning the allocation alignment
447 * * be noexcept-copy-constructible
448 * * have noexcept operator==
449 * * have noexcept operator!=
450 * * not be final
451 *
452 * DefaultAlign and FixedAlign<std::size_t>, provided above, are valid policies.
453 */
454template <typename T, typename Align = DefaultAlign>
455class AlignedSysAllocator : private Align {
456 private:
457 using Self = AlignedSysAllocator<T, Align>;
458
459 template <typename, typename>
460 friend class AlignedSysAllocator;
461
462 constexpr Align const& align() const {
463 return *this;
464 }
465
466 public:
467 static_assert(std::is_nothrow_copy_constructible<Align>::value, "");
468 static_assert(
469 is_nothrow_invocable_r<std::size_t, Align, std::size_t>::value,
470 "");
471
472 using value_type = T;
473
474 using propagate_on_container_copy_assignment = std::true_type;
475 using propagate_on_container_move_assignment = std::true_type;
476 using propagate_on_container_swap = std::true_type;
477
478 using Align::Align;
479
480 // TODO: remove this ctor, which is is no longer required as of under gcc7
481 template <
482 typename S = Align,
483 std::enable_if_t<std::is_default_constructible<S>::value, int> = 0>
484 constexpr AlignedSysAllocator() noexcept(noexcept(Align())) : Align() {}
485
486 template <typename U>
487 constexpr explicit AlignedSysAllocator(
488 AlignedSysAllocator<U, Align> const& other) noexcept
489 : Align(other.align()) {}
490
491 T* allocate(size_t count) {
492 using lifted = typename detail::lift_void_to_char<T>::type;
493 auto const a = align()(alignof(lifted));
494 auto const p = aligned_malloc(sizeof(lifted) * count, a);
495 if (!p) {
496 if (FOLLY_UNLIKELY(errno != ENOMEM)) {
497 std::terminate();
498 }
499 throw_exception<std::bad_alloc>();
500 }
501 return static_cast<T*>(p);
502 }
503 void deallocate(T* p, size_t /* count */) {
504 aligned_free(p);
505 }
506
507 friend bool operator==(Self const& a, Self const& b) noexcept {
508 return a.align() == b.align();
509 }
510 friend bool operator!=(Self const& a, Self const& b) noexcept {
511 return a.align() != b.align();
512 }
513};
514
515/**
516 * CxxAllocatorAdaptor
517 *
518 * A type conforming to C++ concept Allocator, delegating operations to an
519 * unowned Inner which has this required interface:
520 *
521 * void* allocate(std::size_t)
522 * void deallocate(void*, std::size_t)
523 *
524 * Note that Inner is *not* a C++ Allocator.
525 */
526template <typename T, class Inner>
527class CxxAllocatorAdaptor {
528 private:
529 using Self = CxxAllocatorAdaptor<T, Inner>;
530
531 template <typename U, typename UAlloc>
532 friend class CxxAllocatorAdaptor;
533
534 std::reference_wrapper<Inner> ref_;
535
536 public:
537 using value_type = T;
538
539 using propagate_on_container_copy_assignment = std::true_type;
540 using propagate_on_container_move_assignment = std::true_type;
541 using propagate_on_container_swap = std::true_type;
542
543 explicit CxxAllocatorAdaptor(Inner& ref) : ref_(ref) {}
544
545 template <typename U>
546 explicit CxxAllocatorAdaptor(CxxAllocatorAdaptor<U, Inner> const& other)
547 : ref_(other.ref_) {}
548
549 T* allocate(std::size_t n) {
550 using lifted = typename detail::lift_void_to_char<T>::type;
551 return static_cast<T*>(ref_.get().allocate(sizeof(lifted) * n));
552 }
553 void deallocate(T* p, std::size_t n) {
554 using lifted = typename detail::lift_void_to_char<T>::type;
555 ref_.get().deallocate(p, sizeof(lifted) * n);
556 }
557
558 friend bool operator==(Self const& a, Self const& b) noexcept {
559 return std::addressof(a.ref_.get()) == std::addressof(b.ref_.get());
560 }
561 friend bool operator!=(Self const& a, Self const& b) noexcept {
562 return std::addressof(a.ref_.get()) != std::addressof(b.ref_.get());
563 }
564};
565
566/*
567 * allocator_delete
568 *
569 * A deleter which automatically works with a given allocator.
570 *
571 * Derives from the allocator to take advantage of the empty base
572 * optimization when possible.
573 */
574template <typename Alloc>
575class allocator_delete : private std::remove_reference<Alloc>::type {
576 private:
577 using allocator_type = typename std::remove_reference<Alloc>::type;
578 using allocator_traits = std::allocator_traits<allocator_type>;
579 using value_type = typename allocator_traits::value_type;
580 using pointer = typename allocator_traits::pointer;
581
582 public:
583 allocator_delete() = default;
584 allocator_delete(allocator_delete const&) = default;
585 allocator_delete(allocator_delete&&) = default;
586 allocator_delete& operator=(allocator_delete const&) = default;
587 allocator_delete& operator=(allocator_delete&&) = default;
588
589 explicit allocator_delete(const allocator_type& alloc)
590 : allocator_type(alloc) {}
591
592 explicit allocator_delete(allocator_type&& alloc)
593 : allocator_type(std::move(alloc)) {}
594
595 template <typename U>
596 allocator_delete(const allocator_delete<U>& other)
597 : allocator_type(other.get_allocator()) {}
598
599 allocator_type const& get_allocator() const {
600 return *this;
601 }
602
603 void operator()(pointer p) const {
604 auto alloc = get_allocator();
605 allocator_traits::destroy(alloc, p);
606 allocator_traits::deallocate(alloc, p, 1);
607 }
608};
609
610/**
611 * allocate_unique, like std::allocate_shared but for std::unique_ptr
612 */
613template <typename T, typename Alloc, typename... Args>
614std::unique_ptr<T, allocator_delete<Alloc>> allocate_unique(
615 Alloc const& alloc,
616 Args&&... args) {
617 using traits = std::allocator_traits<Alloc>;
618 struct DeferCondDeallocate {
619 bool& cond;
620 Alloc& copy;
621 T* p;
622 ~DeferCondDeallocate() {
623 if (FOLLY_UNLIKELY(!cond)) {
624 traits::deallocate(copy, p, 1);
625 }
626 }
627 };
628 auto copy = alloc;
629 auto const p = traits::allocate(copy, 1);
630 {
631 bool constructed = false;
632 DeferCondDeallocate handler{constructed, copy, p};
633 traits::construct(copy, p, static_cast<Args&&>(args)...);
634 constructed = true;
635 }
636 return {p, allocator_delete<Alloc>(std::move(copy))};
637}
638
639struct SysBufferDeleter {
640 void operator()(void* ptr) {
641 std::free(ptr);
642 }
643};
644using SysBufferUniquePtr = std::unique_ptr<void, SysBufferDeleter>;
645
646inline SysBufferUniquePtr allocate_sys_buffer(std::size_t size) {
647 auto p = std::malloc(size);
648 if (!p) {
649 throw_exception<std::bad_alloc>();
650 }
651 return {p, {}};
652}
653
654/**
655 * AllocatorHasTrivialDeallocate
656 *
657 * Unambiguously inherits std::integral_constant<bool, V> for some bool V.
658 *
659 * Describes whether a C++ Aallocator has trivial, i.e. no-op, deallocate().
660 *
661 * Also may be used to describe types which may be used with
662 * CxxAllocatorAdaptor.
663 */
664template <typename Alloc>
665struct AllocatorHasTrivialDeallocate : std::false_type {};
666
667template <typename T, class Alloc>
668struct AllocatorHasTrivialDeallocate<CxxAllocatorAdaptor<T, Alloc>>
669 : AllocatorHasTrivialDeallocate<Alloc> {};
670
671namespace detail {
672// note that construct and destroy here are methods, not short names for
673// the constructor and destructor
674FOLLY_CREATE_MEMBER_INVOKER(AllocatorConstruct_, construct);
675FOLLY_CREATE_MEMBER_INVOKER(AllocatorDestroy_, destroy);
676
677template <typename Void, typename Alloc, typename... Args>
678struct AllocatorCustomizesConstruct_
679 : folly::is_invocable<AllocatorConstruct_, Alloc, Args...> {};
680
681template <typename Alloc, typename... Args>
682struct AllocatorCustomizesConstruct_<
683 void_t<typename Alloc::folly_has_default_object_construct>,
684 Alloc,
685 Args...> : Negation<typename Alloc::folly_has_default_object_construct> {};
686
687template <typename Void, typename Alloc, typename... Args>
688struct AllocatorCustomizesDestroy_
689 : folly::is_invocable<AllocatorDestroy_, Alloc, Args...> {};
690
691template <typename Alloc, typename... Args>
692struct AllocatorCustomizesDestroy_<
693 void_t<typename Alloc::folly_has_default_object_destroy>,
694 Alloc,
695 Args...> : Negation<typename Alloc::folly_has_default_object_destroy> {};
696} // namespace detail
697
698/**
699 * AllocatorHasDefaultObjectConstruct
700 *
701 * AllocatorHasDefaultObjectConstruct<A, T, Args...> unambiguously
702 * inherits std::integral_constant<bool, V>, where V will be true iff
703 * the effect of std::allocator_traits<A>::construct(a, p, args...) is
704 * the same as new (static_cast<void*>(p)) T(args...). If true then
705 * any optimizations applicable to object construction (relying on
706 * std::is_trivially_copyable<T>, for example) can be applied to objects
707 * in an allocator-aware container using an allocation of type A.
708 *
709 * Allocator types can override V by declaring a type alias for
710 * folly_has_default_object_construct. It is helpful to do this if you
711 * define a custom allocator type that defines a construct method, but
712 * that method doesn't do anything except call placement new.
713 */
714template <typename Alloc, typename T, typename... Args>
715struct AllocatorHasDefaultObjectConstruct
716 : Negation<
717 detail::AllocatorCustomizesConstruct_<void, Alloc, T*, Args...>> {};
718
719template <typename Value, typename T, typename... Args>
720struct AllocatorHasDefaultObjectConstruct<std::allocator<Value>, T, Args...>
721 : std::true_type {};
722
723/**
724 * AllocatorHasDefaultObjectDestroy
725 *
726 * AllocatorHasDefaultObjectDestroy<A, T> unambiguously inherits
727 * std::integral_constant<bool, V>, where V will be true iff the effect
728 * of std::allocator_traits<A>::destroy(a, p) is the same as p->~T().
729 * If true then optimizations applicable to object destruction (relying
730 * on std::is_trivially_destructible<T>, for example) can be applied to
731 * objects in an allocator-aware container using an allocator of type A.
732 *
733 * Allocator types can override V by declaring a type alias for
734 * folly_has_default_object_destroy. It is helpful to do this if you
735 * define a custom allocator type that defines a destroy method, but that
736 * method doesn't do anything except call the object's destructor.
737 */
738template <typename Alloc, typename T>
739struct AllocatorHasDefaultObjectDestroy
740 : Negation<detail::AllocatorCustomizesDestroy_<void, Alloc, T*>> {};
741
742template <typename Value, typename T>
743struct AllocatorHasDefaultObjectDestroy<std::allocator<Value>, T>
744 : std::true_type {};
745
746} // namespace folly
747