folly/folly/Memory.h

/*
 * Copyright (c) Meta Platforms, Inc. and affiliates.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#pragma once

#include <cassert>
#include <cerrno>
#include <cstddef>
#include <cstdlib>
#include <exception>
#include <limits>
#include <memory>
#include <stdexcept>
#include <type_traits>
#include <utility>

#include <folly/ConstexprMath.h>
#include <folly/Likely.h>
#include <folly/Portability.h>
#include <folly/Traits.h>
#include <folly/Utility.h>
#include <folly/functional/Invoke.h>
#include <folly/lang/Align.h>
#include <folly/lang/Exception.h>
#include <folly/lang/Thunk.h>
#include <folly/memory/Malloc.h>
#include <folly/portability/Config.h>
#include <folly/portability/Malloc.h>

namespace folly {

#if (defined(_POSIX_C_SOURCE) && _POSIX_C_SOURCE >= 200112L) || \
    (defined(_XOPEN_SOURCE) && _XOPEN_SOURCE >= 600) ||         \
    (defined(__ANDROID__) && (__ANDROID_API__ > 16)) ||         \
    (defined(__APPLE__)) || defined(__FreeBSD__) || defined(__wasm32__)

inline void* aligned_malloc(size_t size, size_t align) {}

inline void aligned_free(void* aligned_ptr) {}

#elif defined(_WIN32)

inline void* aligned_malloc(size_t size, size_t align) {
  return _aligned_malloc(size, align);
}

inline void aligned_free(void* aligned_ptr) {
  _aligned_free(aligned_ptr);
}

#else

inline void* aligned_malloc(size_t size, size_t align) {
  return memalign(align, size);
}

inline void aligned_free(void* aligned_ptr) {
  free(aligned_ptr);
}

#endif

namespace detail {
template <typename Alloc, size_t kAlign, bool kAllocate>
void rawOverAlignedImpl(Alloc const& alloc, size_t n, void*& raw) {}
} // namespace detail

// Works like std::allocator_traits<Alloc>::allocate, but handles
// over-aligned types.  Feel free to manually specify any power of two as
// the Align template arg.  Must be matched with deallocateOverAligned.
// allocationBytesForOverAligned will give you the number of bytes that
// this function actually requests.
template <
    typename Alloc,
    size_t kAlign = alignof(typename std::allocator_traits<Alloc>::value_type)>
typename std::allocator_traits<Alloc>::pointer allocateOverAligned(
    Alloc const& alloc, size_t n) {}

template <
    typename Alloc,
    size_t kAlign = alignof(typename std::allocator_traits<Alloc>::value_type)>
void deallocateOverAligned(
    Alloc const& alloc,
    typename std::allocator_traits<Alloc>::pointer ptr,
    size_t n) {}

template <
    typename Alloc,
    size_t kAlign = alignof(typename std::allocator_traits<Alloc>::value_type)>
size_t allocationBytesForOverAligned(size_t n) {}

/**
 * static_function_deleter
 *
 * So you can write this:
 *
 *      using RSA_deleter = folly::static_function_deleter<RSA, &RSA_free>;
 *      auto rsa = std::unique_ptr<RSA, RSA_deleter>(RSA_new());
 *      RSA_generate_key_ex(rsa.get(), bits, exponent, nullptr);
 *      rsa = nullptr;  // calls RSA_free(rsa.get())
 *
 * This would be sweet as well for BIO, but unfortunately BIO_free has signature
 * int(BIO*) while we require signature void(BIO*). So you would need to make a
 * wrapper for it:
 *
 *      inline void BIO_free_fb(BIO* bio) { CHECK_EQ(1, BIO_free(bio)); }
 *      using BIO_deleter = folly::static_function_deleter<BIO, &BIO_free_fb>;
 *      auto buf = std::unique_ptr<BIO, BIO_deleter>(BIO_new(BIO_s_mem()));
 *      buf = nullptr;  // calls BIO_free(buf.get())
 */

template <typename T, void (*f)(T*)>
struct static_function_deleter {};

/**
 *  to_shared_ptr
 *
 *  Convert unique_ptr to shared_ptr without specifying the template type
 *  parameter and letting the compiler deduce it.
 *
 *  So you can write this:
 *
 *      auto sptr = to_shared_ptr(getSomethingUnique<T>());
 *
 *  Instead of this:
 *
 *      auto sptr = shared_ptr<T>(getSomethingUnique<T>());
 *
 *  Useful when `T` is long, such as:
 *
 *      using T = foobar::FooBarAsyncClient;
 */
template <typename T, typename D>
std::shared_ptr<T> to_shared_ptr(std::unique_ptr<T, D>&& ptr) {}

/**
 *  to_shared_ptr_aliasing
 */
template <typename T, typename U>
std::shared_ptr<U> to_shared_ptr_aliasing(std::shared_ptr<T> const& r, U* ptr) {}

/**
 *  to_weak_ptr
 *
 *  Make a weak_ptr and return it from a shared_ptr without specifying the
 *  template type parameter and letting the compiler deduce it.
 *
 *  So you can write this:
 *
 *      auto wptr = to_weak_ptr(getSomethingShared<T>());
 *
 *  Instead of this:
 *
 *      auto wptr = weak_ptr<T>(getSomethingShared<T>());
 *
 *  Useful when `T` is long, such as:
 *
 *      using T = foobar::FooBarAsyncClient;
 */
template <typename T>
std::weak_ptr<T> to_weak_ptr(const std::shared_ptr<T>& ptr) {}

#if defined(__GLIBCXX__)
namespace detail {
void weak_ptr_set_stored_ptr(std::weak_ptr<void>& w, void* ptr);

template <typename Tag, void* std::__weak_ptr<void>::*WeakPtr_Ptr_Field>
struct GenerateWeakPtrInternalsAccessor {
  friend void weak_ptr_set_stored_ptr(std::weak_ptr<void>& w, void* ptr) {
    w.*WeakPtr_Ptr_Field = ptr;
  }
};

// Each template instantiation of GenerateWeakPtrInternalsAccessor must
// be a new type, to avoid ODR problems.  We do this by tagging it with
// a type from an anon namespace.
namespace {
struct MemoryAnonTag {};
} // namespace

template struct GenerateWeakPtrInternalsAccessor<
    MemoryAnonTag,
    &std::__weak_ptr<void>::_M_ptr>;
} // namespace detail
#endif

/**
 *  to_weak_ptr_aliasing
 *
 *  Like to_weak_ptr, but arranges that lock().get() on the returned
 *  pointer points to ptr rather than r.get().
 *
 *  Equivalent to:
 *
 *      to_weak_ptr(std::shared_ptr<U>(r, ptr))
 *
 *  For libstdc++, ABI-specific tricks are used to optimize the
 *  implementation.
 */
template <typename T, typename U>
std::weak_ptr<U> to_weak_ptr_aliasing(const std::shared_ptr<T>& r, U* ptr) {}

/**
 *  copy_to_unique_ptr
 *
 *  Move or copy the argument to the heap and return it owned by a unique_ptr.
 *
 *  Like std::make_unique, but deduces the type of the owned object.
 */
template <typename T>
std::unique_ptr<remove_cvref_t<T>> copy_to_unique_ptr(T&& t) {}

/**
 *  copy_to_shared_ptr
 *
 *  Move or copy the argument to the heap and return it owned by a shared_ptr.
 *
 *  Like make_shared, but deduces the type of the owned object.
 */
template <typename T>
std::shared_ptr<remove_cvref_t<T>> copy_to_shared_ptr(T&& t) {}

/**
 *  copy_through_unique_ptr
 *
 *  If the argument is nonnull, allocates a copy of its pointee.
 */
template <typename T>
std::unique_ptr<T> copy_through_unique_ptr(const std::unique_ptr<T>& t) {}

//  erased_unique_ptr
//
//  A type-erased smart-ptr with unique ownership to a heap-allocated object.
erased_unique_ptr;

namespace detail {
// for erased_unique_ptr with types that specialize default_delete
template <typename T>
void erased_unique_ptr_delete(void* ptr) {}
} // namespace detail

//  to_erased_unique_ptr
//
//  Converts an owning pointer to an object to an erased_unique_ptr.
template <typename T>
erased_unique_ptr to_erased_unique_ptr(T* const ptr) noexcept {}

//  to_erased_unique_ptr
//
//  Converts an owning std::unique_ptr to an erased_unique_ptr.
template <typename T>
erased_unique_ptr to_erased_unique_ptr(std::unique_ptr<T> ptr) noexcept {}

//  make_erased_unique
//
//  Allocate an object of the T on the heap, constructed with a..., and return
//  an owning erased_unique_ptr to it.
template <typename T, typename... A>
erased_unique_ptr make_erased_unique(A&&... a) {}

//  copy_to_erased_unique_ptr
//
//  Copy an object to the heap and return an owning erased_unique_ptr to it.
template <typename T>
erased_unique_ptr copy_to_erased_unique_ptr(T&& obj) {}

//  empty_erased_unique_ptr
//
//  Return an empty erased_unique_ptr.
inline erased_unique_ptr empty_erased_unique_ptr() {}

/**
 * SysAllocator
 *
 * Resembles std::allocator, the default Allocator, but wraps std::malloc and
 * std::free.
 */
template <typename T>
class SysAllocator {};

class DefaultAlign {};

template <std::size_t Align>
class FixedAlign {};

/**
 * AlignedSysAllocator
 *
 * Resembles std::allocator, the default Allocator, but wraps aligned_malloc and
 * aligned_free.
 *
 * Accepts a policy parameter for providing the alignment, which must:
 *   * be invocable as std::size_t(std::size_t) noexcept
 *     * taking the type alignment and returning the allocation alignment
 *   * be noexcept-copy-constructible
 *   * have noexcept operator==
 *   * have noexcept operator!=
 *   * not be final
 *
 * DefaultAlign and FixedAlign<std::size_t>, provided above, are valid policies.
 */
template <typename T, typename Align = DefaultAlign>
class AlignedSysAllocator : private Align {};

/**
 * CxxAllocatorAdaptor
 *
 * A type conforming to C++ concept Allocator, delegating operations to an
 * unowned Inner which has this required interface:
 *
 *   void* allocate(std::size_t)
 *   void deallocate(void*, std::size_t)
 *
 * Note that Inner is *not* a C++ Allocator.
 */
template <typename T, class Inner, bool FallbackToStdAlloc = false>
class CxxAllocatorAdaptor : private std::allocator<T> {};

/*
 * allocator_delete
 *
 * A deleter which automatically works with a given allocator.
 *
 * Derives from the allocator to take advantage of the empty base
 * optimization when possible.
 */
template <typename Alloc>
class allocator_delete : private std::remove_reference<Alloc>::type {};

/**
 * allocate_unique, like std::allocate_shared but for std::unique_ptr
 */
template <typename T, typename Alloc, typename... Args>
std::unique_ptr<
    T,
    allocator_delete<
        typename std::allocator_traits<Alloc>::template rebind_alloc<T>>>
allocate_unique(Alloc const& alloc, Args&&... args) {}

struct SysBufferDeleter {};
SysBufferUniquePtr;

inline SysBufferUniquePtr allocate_sys_buffer(std::size_t size) {}

/**
 * AllocatorHasTrivialDeallocate
 *
 * Unambiguously inherits std::integral_constant<bool, V> for some bool V.
 *
 * Describes whether a C++ Aallocator has trivial, i.e. no-op, deallocate().
 *
 * Also may be used to describe types which may be used with
 * CxxAllocatorAdaptor.
 */
template <typename Alloc>
struct AllocatorHasTrivialDeallocate : std::false_type {};

AllocatorHasTrivialDeallocate<CxxAllocatorAdaptor<T, Alloc>>;

namespace detail {
// note that construct and destroy here are methods, not short names for
// the constructor and destructor
FOLLY_CREATE_MEMBER_INVOKER();
FOLLY_CREATE_MEMBER_INVOKER();

template <typename Void, typename Alloc, typename... Args>
struct AllocatorCustomizesConstruct_
    : folly::is_invocable<AllocatorConstruct_, Alloc, Args...> {};

AllocatorCustomizesConstruct_<void_t<typename Alloc::folly_has_default_object_construct>, Alloc, Args...>;

template <typename Void, typename Alloc, typename... Args>
struct AllocatorCustomizesDestroy_
    : folly::is_invocable<AllocatorDestroy_, Alloc, Args...> {};

AllocatorCustomizesDestroy_<void_t<typename Alloc::folly_has_default_object_destroy>, Alloc, Args...>;
} // namespace detail

/**
 * AllocatorHasDefaultObjectConstruct
 *
 * AllocatorHasDefaultObjectConstruct<A, T, Args...> unambiguously
 * inherits std::integral_constant<bool, V>, where V will be true iff
 * the effect of std::allocator_traits<A>::construct(a, p, args...) is
 * the same as new (static_cast<void*>(p)) T(args...).  If true then
 * any optimizations applicable to object construction (relying on
 * std::is_trivially_copyable<T>, for example) can be applied to objects
 * in an allocator-aware container using an allocation of type A.
 *
 * Allocator types can override V by declaring a type alias for
 * folly_has_default_object_construct.  It is helpful to do this if you
 * define a custom allocator type that defines a construct method, but
 * that method doesn't do anything except call placement new.
 */
template <typename Alloc, typename T, typename... Args>
struct AllocatorHasDefaultObjectConstruct
    : Negation<
          detail::AllocatorCustomizesConstruct_<void, Alloc, T*, Args...>> {};

AllocatorHasDefaultObjectConstruct<std::allocator<Value>, T, Args...>;

/**
 * AllocatorHasDefaultObjectDestroy
 *
 * AllocatorHasDefaultObjectDestroy<A, T> unambiguously inherits
 * std::integral_constant<bool, V>, where V will be true iff the effect
 * of std::allocator_traits<A>::destroy(a, p) is the same as p->~T().
 * If true then optimizations applicable to object destruction (relying
 * on std::is_trivially_destructible<T>, for example) can be applied to
 * objects in an allocator-aware container using an allocator of type A.
 *
 * Allocator types can override V by declaring a type alias for
 * folly_has_default_object_destroy.  It is helpful to do this if you
 * define a custom allocator type that defines a destroy method, but that
 * method doesn't do anything except call the object's destructor.
 */
template <typename Alloc, typename T>
struct AllocatorHasDefaultObjectDestroy
    : Negation<detail::AllocatorCustomizesDestroy_<void, Alloc, T*>> {};

AllocatorHasDefaultObjectDestroy<std::allocator<Value>, T>;

} // namespace folly