#if !defined(MEMORY_TOOL_REPLACES_ALLOCATOR)
#include <algorithm>
#include <cstddef>
#include <cstdint>
#include <cstdlib>
#include <cstring>
#include <iostream>
#include <limits>
#include <memory>
#include <random>
#include <set>
#include <tuple>
#include <vector>
#include "partition_alloc/address_space_randomization.h"
#include "partition_alloc/build_config.h"
#include "partition_alloc/buildflags.h"
#include "partition_alloc/dangling_raw_ptr_checks.h"
#include "partition_alloc/freeslot_bitmap.h"
#include "partition_alloc/in_slot_metadata.h"
#include "partition_alloc/lightweight_quarantine.h"
#include "partition_alloc/memory_reclaimer.h"
#include "partition_alloc/page_allocator_constants.h"
#include "partition_alloc/partition_address_space.h"
#include "partition_alloc/partition_alloc_base/bits.h"
#include "partition_alloc/partition_alloc_base/compiler_specific.h"
#include "partition_alloc/partition_alloc_base/cpu.h"
#include "partition_alloc/partition_alloc_base/logging.h"
#include "partition_alloc/partition_alloc_base/numerics/checked_math.h"
#include "partition_alloc/partition_alloc_base/rand_util.h"
#include "partition_alloc/partition_alloc_base/system/sys_info.h"
#include "partition_alloc/partition_alloc_base/test/gtest_util.h"
#include "partition_alloc/partition_alloc_base/thread_annotations.h"
#include "partition_alloc/partition_alloc_base/threading/platform_thread_for_testing.h"
#include "partition_alloc/partition_alloc_config.h"
#include "partition_alloc/partition_alloc_constants.h"
#include "partition_alloc/partition_alloc_for_testing.h"
#include "partition_alloc/partition_alloc_forward.h"
#include "partition_alloc/partition_bucket.h"
#include "partition_alloc/partition_cookie.h"
#include "partition_alloc/partition_freelist_entry.h"
#include "partition_alloc/partition_page.h"
#include "partition_alloc/partition_root.h"
#include "partition_alloc/partition_stats.h"
#include "partition_alloc/reservation_offset_table.h"
#include "partition_alloc/tagging.h"
#include "partition_alloc/thread_isolation/thread_isolation.h"
#include "partition_alloc/use_death_tests.h"
#include "testing/gtest/include/gtest/gtest.h"
#if defined(__ARM_FEATURE_MEMORY_TAGGING)
#include <arm_acle.h>
#endif
#if PA_BUILDFLAG(IS_POSIX)
#if PA_BUILDFLAG(IS_LINUX)
#include <linux/mman.h>
#endif
#include <sys/mman.h>
#include <sys/resource.h>
#include <sys/time.h>
#endif
#if PA_BUILDFLAG(USE_PARTITION_ALLOC_AS_MALLOC) && PA_BUILDFLAG(IS_MAC)
#include <OpenCL/opencl.h>
#endif
#if PA_BUILDFLAG(IS_MAC)
#include "partition_alloc/partition_alloc_base/mac/mac_util.h"
#endif
#if PA_BUILDFLAG(ENABLE_PKEYS)
#include <sys/syscall.h>
#endif
#if PA_BUILDFLAG(IS_FUCHSIA)
#include <zircon/syscalls.h>
#elif PA_BUILDFLAG(IS_WIN)
#include <windows.h>
#elif PA_BUILDFLAG(IS_APPLE)
#include <mach/host_info.h>
#include <mach/mach.h>
#elif PA_BUILDFLAG(IS_POSIX)
#include <unistd.h>
#endif
#define PA_EXPECT_PTR_EQ(ptr1, ptr2) …
#define PA_EXPECT_PTR_NE(ptr1, ptr2) …
namespace {
uint64_t AmountOfPhysicalMemory() { … }
bool IsLargeMemoryDevice() { … }
bool SetAddressSpaceLimit() { … }
bool ClearAddressSpaceLimit() { … }
const size_t kTestSizes[] = …;
constexpr size_t kTestSizesCount = …;
template <
partition_alloc::AllocFlags alloc_flags,
partition_alloc::FreeFlags free_flags = partition_alloc::FreeFlags::kNone>
void AllocateRandomly(partition_alloc::PartitionRoot* root, size_t count) { … }
void HandleOOM(size_t unused_size) { … }
int g_dangling_raw_ptr_detected_count = …;
int g_dangling_raw_ptr_released_count = …;
class CountDanglingRawPtr { … };
}
namespace partition_alloc::internal {
BucketDistribution;
SlotSpan;
const size_t kTestAllocSize = …;
constexpr size_t kPointerOffset = …;
#if !PA_BUILDFLAG(DCHECKS_ARE_ON)
constexpr size_t kExtraAllocSizeWithoutMetadata = 0ull;
#else
constexpr size_t kExtraAllocSizeWithoutMetadata = …;
#endif
const char* type_name = …;
void SetDistributionForPartitionRoot(PartitionRoot* root,
BucketDistribution distribution) { … }
struct PartitionAllocTestParam { … };
const std::vector<PartitionAllocTestParam> GetPartitionAllocTestParams() { … }
class PartitionAllocTest
: public testing::TestWithParam<PartitionAllocTestParam> { … };
#if PA_USE_DEATH_TESTS()
class PartitionAllocDeathTest : public PartitionAllocTest { … };
INSTANTIATE_TEST_SUITE_P(…);
#endif
namespace {
void FreeFullSlotSpan(PartitionRoot* root, SlotSpanMetadata* slot_span) { … }
#if PA_BUILDFLAG(IS_LINUX) || PA_BUILDFLAG(IS_CHROMEOS)
bool CheckPageInCore(void* ptr, bool in_core) { … }
#define CHECK_PAGE_IN_CORE(ptr, in_core) …
#else
#define CHECK_PAGE_IN_CORE …
#endif
class MockPartitionStatsDumper : public PartitionStatsDumper { … };
#if PA_BUILDFLAG(IS_APPLE)
bool IsNormalBucketsAllocatedByRoot(uintptr_t address, PartitionRoot* root) {
partition_alloc::internal::PartitionSuperPageExtentEntry<
partition_alloc::internal::MetadataKind::kReadOnly>* extent =
root->first_extent;
while (extent != nullptr) {
uintptr_t super_page =
partition_alloc::internal::SuperPagesBeginFromExtent(extent);
uintptr_t super_page_end =
partition_alloc::internal::SuperPagesEndFromExtent(extent);
if (super_page <= address && address < super_page_end) {
return true;
}
extent = extent->next;
}
return false;
}
bool IsDirectMapAllocatedByRoot(uintptr_t address, PartitionRoot* root) {
::partition_alloc::internal::ScopedGuard locker{
partition_alloc::internal::PartitionRootLock(root)};
partition_alloc::internal::PartitionDirectMapExtent<
partition_alloc::internal::MetadataKind::kReadOnly>* extent =
root->direct_map_list;
while (extent != nullptr) {
uintptr_t super_page =
reinterpret_cast<uintptr_t>(extent) & kSuperPageBaseMask;
uintptr_t super_page_end = super_page + extent->reservation_size;
if (super_page <= address && address < super_page_end) {
return true;
}
extent = extent->next_extent;
}
return false;
}
#endif
bool IsManagedByNormalBucketsForTesting(uintptr_t address,
[[maybe_unused]] PartitionRoot* root) { … }
bool IsManagedByDirectMapForTesting(uintptr_t address,
[[maybe_unused]] PartitionRoot* root) { … }
bool IsManagedByNormalBucketsOrDirectMapForTesting(
uintptr_t address,
[[maybe_unused]] PartitionRoot* root) { … }
}
INSTANTIATE_TEST_SUITE_P(…);
TEST_P(PartitionAllocTest, Basic) { … }
TEST_P(PartitionAllocTest, MultiAlloc) { … }
TEST_P(PartitionAllocTest, MultiSlotSpans) { … }
TEST_P(PartitionAllocTest, SlotSpanTransitions) { … }
TEST_P(PartitionAllocTest, ExtraAllocSize) { … }
TEST_P(PartitionAllocTest, PreferSlotSpansWithProvisionedEntries) { … }
TEST_P(PartitionAllocTest, FreeSlotSpanListSlotSpanTransitions) { … }
TEST_P(PartitionAllocTest, MultiPageAllocs) { … }
TEST_P(PartitionAllocTest, Alloc) { … }
TEST_P(PartitionAllocTest, AllocSizes) { … }
TEST_P(PartitionAllocTest, AllocGetSizeAndStart) { … }
#if PA_BUILDFLAG(HAS_MEMORY_TAGGING)
TEST_P(PartitionAllocTest, MTEProtectsFreedPtr) {
base::CPU cpu;
if (!cpu.has_mte()) {
GTEST_SKIP();
}
size_t alloc_size = 64 - ExtraAllocSize(allocator);
uint64_t* ptr1 =
static_cast<uint64_t*>(allocator.root()->Alloc(alloc_size, type_name));
EXPECT_TRUE(ptr1);
allocator.root()->Free(ptr1);
uint64_t* ptr2 =
static_cast<uint64_t*>(allocator.root()->Alloc(alloc_size, type_name));
PA_EXPECT_PTR_EQ(ptr1, ptr2);
EXPECT_NE(ptr1, ptr2);
allocator.root()->Free(ptr2);
uint64_t* ptr3 =
static_cast<uint64_t*>(allocator.root()->Alloc(alloc_size, type_name));
PA_EXPECT_PTR_EQ(ptr2, ptr3);
EXPECT_NE(ptr1, ptr3);
EXPECT_NE(ptr2, ptr3);
allocator.root()->Free(ptr3);
}
#endif
#if PA_BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT)
TEST_P(PartitionAllocTest, IsPtrWithinSameAlloc) { … }
TEST_P(PartitionAllocTest, GetSlotStartMultiplePages) { … }
#endif
TEST_P(PartitionAllocTest, Realloc) { … }
TEST_P(PartitionAllocTest, ReallocDirectMapAligned) { … }
TEST_P(PartitionAllocTest, ReallocDirectMapAlignedRelocate) { … }
TEST_P(PartitionAllocTest, PartialPageFreelists) { … }
TEST_P(PartitionAllocTest, SlotSpanRefilling) { … }
TEST_P(PartitionAllocTest, PartialPages) { … }
TEST_P(PartitionAllocTest, MappingCollision) { … }
TEST_P(PartitionAllocTest, FreeCache) { … }
TEST_P(PartitionAllocTest, LostFreeSlotSpansBug) { … }
#if PA_USE_DEATH_TESTS()
#if !PA_BUILDFLAG(IS_WIN) && \
(!PA_BUILDFLAG(PA_ARCH_CPU_64_BITS) || \
(PA_BUILDFLAG(IS_POSIX) && \
!(PA_BUILDFLAG(IS_APPLE) || PA_BUILDFLAG(IS_ANDROID)))) || \
PA_BUILDFLAG(IS_FUCHSIA)
#define MAYBE_RepeatedAllocReturnNullDirect …
#define MAYBE_RepeatedReallocReturnNullDirect …
#else
#define MAYBE_RepeatedAllocReturnNullDirect …
#define MAYBE_RepeatedReallocReturnNullDirect …
#endif
TEST_P(PartitionAllocDeathTest, MAYBE_RepeatedAllocReturnNullDirect) { … }
TEST_P(PartitionAllocDeathTest, MAYBE_RepeatedReallocReturnNullDirect) { … }
TEST_P(PartitionAllocDeathTest, DISABLED_RepeatedAllocReturnNull) { … }
TEST_P(PartitionAllocDeathTest, DISABLED_RepeatedReallocReturnNull) { … }
#if PA_BUILDFLAG(HAS_MEMORY_TAGGING)
TEST_P(PartitionAllocDeathTest, MTEProtectsFreedPtr) {
base::CPU cpu;
if (!cpu.has_mte()) {
GTEST_SKIP();
}
constexpr uint64_t kCookie = 0x1234567890ABCDEF;
constexpr uint64_t kQuarantined = 0xEFEFEFEFEFEFEFEF;
size_t alloc_size = 64 - ExtraAllocSize(allocator);
uint64_t* ptr =
static_cast<uint64_t*>(allocator.root()->Alloc(alloc_size, type_name));
EXPECT_TRUE(ptr);
*ptr = kCookie;
allocator.root()->Free(ptr);
EXPECT_EXIT(
{
*ptr = kQuarantined;
},
testing::KilledBySignal(SIGSEGV), "");
}
TEST_P(PartitionAllocDeathTest, SuspendTagCheckingScope) {
base::CPU cpu;
if (!cpu.has_mte()) {
GTEST_SKIP();
}
constexpr uint64_t kQuarantined = 0xEFEFEFEFEFEFEFEF;
size_t alloc_size = 64 - ExtraAllocSize(allocator);
uint64_t* ptr =
static_cast<uint64_t*>(allocator.root()->Alloc(alloc_size, type_name));
EXPECT_TRUE(ptr);
allocator.root()->Free(ptr);
{
partition_alloc::SuspendTagCheckingScope scope;
*ptr = kQuarantined;
}
EXPECT_EXIT(
{
*ptr = kQuarantined;
},
testing::KilledBySignal(SIGSEGV), "");
}
#endif
TEST_P(PartitionAllocDeathTest, LargeAllocs) { … }
#if !PA_BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) || \
(PA_BUILDFLAG(HAS_64_BIT_POINTERS) && \
PA_BUILDFLAG(PA_ARCH_CPU_LITTLE_ENDIAN))
TEST_P(PartitionAllocDeathTest, ImmediateDoubleFree) { … }
TEST_P(PartitionAllocDeathTest, ImmediateDoubleFree2ndSlot) { … }
TEST_P(PartitionAllocDeathTest, NumAllocatedSlotsDoubleFree) { … }
#endif
TEST_P(PartitionAllocDeathTest, DirectMapGuardPages) { … }
#if !PA_BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) && \
PA_CONFIG(HAS_FREELIST_SHADOW_ENTRY)
TEST_P(PartitionAllocDeathTest, UseAfterFreeDetection) {
base::CPU cpu;
void* data = allocator.root()->Alloc(100);
allocator.root()->Free(data);
memset(data, 0x42, 100);
EXPECT_DEATH(allocator.root()->Alloc(100), "");
}
TEST_P(PartitionAllocDeathTest, FreelistCorruption) {
base::CPU cpu;
const size_t alloc_size = 2 * sizeof(void*);
void** fake_freelist_entry =
static_cast<void**>(allocator.root()->Alloc(alloc_size));
fake_freelist_entry[0] = nullptr;
fake_freelist_entry[1] = nullptr;
void** uaf_data = static_cast<void**>(allocator.root()->Alloc(alloc_size));
allocator.root()->Free(uaf_data);
void* previous_uaf_data = uaf_data[0];
uaf_data[0] = fake_freelist_entry;
EXPECT_DEATH(allocator.root()->Alloc(alloc_size), "");
uaf_data[0] = previous_uaf_data;
allocator.root()->Free(fake_freelist_entry);
}
#if !PA_BUILDFLAG(DCHECKS_ARE_ON)
TEST_P(PartitionAllocDeathTest, OffByOneDetection) {
base::CPU cpu;
const size_t alloc_size = 2 * sizeof(void*);
char* array = static_cast<char*>(allocator.root()->Alloc(alloc_size));
if (cpu.has_mte()) {
EXPECT_DEATH(array[alloc_size] = 'A', "");
} else {
char previous_value = array[alloc_size];
*const_cast<volatile char*>(&array[alloc_size]) = 'A';
EXPECT_DEATH(allocator.root()->Alloc(alloc_size), "");
array[alloc_size] = previous_value;
}
}
TEST_P(PartitionAllocDeathTest, OffByOneDetectionWithRealisticData) {
base::CPU cpu;
const size_t alloc_size = 2 * sizeof(void*);
void** array = static_cast<void**>(allocator.root()->Alloc(alloc_size));
char valid;
if (cpu.has_mte()) {
EXPECT_DEATH(array[2] = &valid, "");
} else {
void* previous_value = array[2];
*const_cast<void* volatile*>(&array[2]) = &valid;
EXPECT_DEATH(allocator.root()->Alloc(alloc_size), "");
array[2] = previous_value;
}
}
#endif
#endif
#endif
TEST_P(PartitionAllocTest, DumpMemoryStats) { … }
TEST_P(PartitionAllocTest, Purge) { … }
TEST_P(PartitionAllocTest, PreferActiveOverEmpty) { … }
TEST_P(PartitionAllocTest, PurgeDiscardableSecondPage) { … }
TEST_P(PartitionAllocTest, PurgeDiscardableFirstPage) { … }
TEST_P(PartitionAllocTest, PurgeDiscardableNonPageSizedAlloc) { … }
TEST_P(PartitionAllocTest, PurgeDiscardableNonPageSizedAllocOnSlotBoundary) { … }
TEST_P(PartitionAllocTest, PurgeDiscardableManyPages) { … }
TEST_P(PartitionAllocTest, PurgeDiscardableWithFreeListStraightening) { … }
TEST_P(PartitionAllocTest, PurgeDiscardableDoubleTruncateFreeList) { … }
TEST_P(PartitionAllocTest, PurgeDiscardableSmallSlotsWithTruncate) { … }
TEST_P(PartitionAllocTest, ActiveListMaintenance) { … }
TEST_P(PartitionAllocTest, ReallocMovesCookie) { … }
TEST_P(PartitionAllocTest, SmallReallocDoesNotMoveTrailingCookie) { … }
TEST_P(PartitionAllocTest, ZeroFill) { … }
TEST_P(PartitionAllocTest, SchedulerLoopQuarantine) { … }
TEST_P(PartitionAllocTest, SchedulerLoopQuarantineDisabled) { … }
TEST_P(PartitionAllocTest, ZapOnFree) { … }
TEST_P(PartitionAllocTest, Bug_897585) { … }
TEST_P(PartitionAllocTest, OverrideHooks) { … }
TEST_P(PartitionAllocTest, Alignment) { … }
TEST_P(PartitionAllocTest, FundamentalAlignment) { … }
void VerifyAlignment(PartitionRoot* root, size_t size, size_t alignment) { … }
TEST_P(PartitionAllocTest, AlignedAllocations) { … }
TEST_P(PartitionAllocTest, OptimizedGetSlotNumber) { … }
TEST_P(PartitionAllocTest, GetUsableSizeNull) { … }
TEST_P(PartitionAllocTest, GetUsableSize) { … }
#if PA_CONFIG(MAYBE_ENABLE_MAC11_MALLOC_SIZE_HACK)
TEST_P(PartitionAllocTest, GetUsableSizeWithMac11MallocSizeHack) {
if (internal::base::mac::MacOSMajorVersion() != 11) {
GTEST_SKIP() << "Skipping because the test is for Mac11.";
}
allocator.root()->EnableMac11MallocSizeHackForTesting();
size_t size = internal::kMac11MallocSizeHackRequestedSize;
void* ptr = allocator.root()->Alloc(size);
size_t usable_size = PartitionRoot::GetUsableSize(ptr);
size_t usable_size_with_hack =
PartitionRoot::GetUsableSizeWithMac11MallocSizeHack(ptr);
EXPECT_EQ(usable_size,
allocator.root()->settings.mac11_malloc_size_hack_usable_size_);
EXPECT_EQ(usable_size_with_hack, size);
allocator.root()->Free(ptr);
}
#endif
TEST_P(PartitionAllocTest, Bookkeeping) { … }
#if PA_BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT)
TEST_P(PartitionAllocTest, RefCountBasic) { … }
void PartitionAllocTest::RunRefCountReallocSubtest(size_t orig_size,
size_t new_size) { … }
TEST_P(PartitionAllocTest, RefCountRealloc) { … }
int g_unretained_dangling_raw_ptr_detected_count = …;
class UnretainedDanglingRawPtrTest : public PartitionAllocTest { … };
INSTANTIATE_TEST_SUITE_P(…);
TEST_P(UnretainedDanglingRawPtrTest, UnretainedDanglingPtrNoReport) { … }
TEST_P(UnretainedDanglingRawPtrTest, UnretainedDanglingPtrShouldReport) { … }
#if !PA_BUILDFLAG(HAS_64_BIT_POINTERS)
TEST_P(PartitionAllocTest, BackupRefPtrGuardRegion) {
if (!UseBRPPool()) {
return;
}
size_t alignment = internal::PageAllocationGranularity();
uintptr_t requested_address;
memset(&requested_address, internal::kQuarantinedByte,
sizeof(requested_address));
requested_address = RoundDownToPageAllocationGranularity(requested_address);
uintptr_t allocated_address =
AllocPages(requested_address, alignment, alignment,
PageAccessibilityConfiguration(
PageAccessibilityConfiguration::kReadWrite),
PageTag::kPartitionAlloc);
EXPECT_NE(allocated_address, requested_address);
if (allocated_address) {
FreePages(allocated_address, alignment);
}
}
#endif
#endif
#if PA_BUILDFLAG(ENABLE_DANGLING_RAW_PTR_CHECKS)
TEST_P(PartitionAllocTest, DanglingPtr) { … }
TEST_P(PartitionAllocTest, DanglingDanglingPtr) { … }
TEST_P(PartitionAllocTest, DanglingMixedReleaseRawPtrFirst) { … }
TEST_P(PartitionAllocTest, DanglingMixedReleaseDanglingPtrFirst) { … }
TEST_P(PartitionAllocTest, DanglingPtrUsedToAcquireNewRawPtr) { … }
TEST_P(PartitionAllocTest, DanglingPtrUsedToAcquireNewRawPtrVariant) { … }
TEST_P(PartitionAllocTest, RawPtrReleasedBeforeFree) { … }
TEST_P(PartitionAllocTest,
DanglingPtrReleaseBeforeSchedulerLoopQuarantineExit) { … }
TEST_P(PartitionAllocTest, DanglingPtrReleaseAfterSchedulerLoopQuarantineExit) { … }
#if PA_USE_DEATH_TESTS()
#if !defined(OFFICIAL_BUILD) || !defined(NDEBUG)
TEST_P(PartitionAllocDeathTest, ReleaseUnderflowRawPtr) { … }
TEST_P(PartitionAllocDeathTest, ReleaseUnderflowDanglingPtr) { … }
#endif
#endif
#endif
TEST_P(PartitionAllocTest, ReservationOffset) { … }
TEST_P(PartitionAllocTest, GetReservationStart) { … }
#if PA_BUILDFLAG(IS_FUCHSIA)
TEST_P(PartitionAllocTest, DISABLED_CheckReservationType) {
#else
TEST_P(PartitionAllocTest, CheckReservationType) { … }
TEST_P(PartitionAllocTest, CrossPartitionRootRealloc) { … }
TEST_P(PartitionAllocTest, FastPathOrReturnNull) { … }
#if PA_USE_DEATH_TESTS()
#if !defined(OFFICIAL_BUILD) || !defined(NDEBUG)
TEST_P(PartitionAllocDeathTest, CheckTriggered) { … }
#endif
#endif
#if PA_BUILDFLAG(USE_PARTITION_ALLOC_AS_MALLOC) && PA_USE_DEATH_TESTS() && \
!PA_BUILDFLAG(IS_CASTOS)
namespace {
PA_NOINLINE void FreeForTest(void* data) { … }
class ThreadDelegateForPreforkHandler
: public base::PlatformThreadForTesting::Delegate { … };
}
TEST_P(PartitionAllocTest, DISABLED_PreforkHandler) { … }
#endif
TEST_P(PartitionAllocTest, GetIndex) { … }
TEST_P(PartitionAllocTest, MallocFunctionAnnotations) { … }
TEST_P(PartitionAllocTest, ConfigurablePool) { … }
TEST_P(PartitionAllocTest, EmptySlotSpanSizeIsCapped) { … }
TEST_P(PartitionAllocTest, IncreaseEmptySlotSpanRingSize) { … }
#if PA_BUILDFLAG(IS_CAST_ANDROID) && PA_BUILDFLAG(USE_PARTITION_ALLOC_AS_MALLOC)
extern "C" {
void* __real_malloc(size_t);
}
TEST_P(PartitionAllocTest, HandleMixedAllocations) {
void* ptr = __real_malloc(12);
free(ptr);
}
#endif
TEST_P(PartitionAllocTest, SortFreelist) { … }
#if PA_BUILDFLAG(USE_PARTITION_ALLOC_AS_MALLOC) && PA_BUILDFLAG(IS_LINUX) && \
PA_BUILDFLAG(PA_ARCH_CPU_64_BITS)
TEST_P(PartitionAllocTest, CrashOnUnknownPointer) { … }
#endif
#if PA_BUILDFLAG(USE_PARTITION_ALLOC_AS_MALLOC) && PA_BUILDFLAG(IS_MAC)
class ScopedOpenCLNoOpKernel {
public:
ScopedOpenCLNoOpKernel()
: context_(nullptr),
program_(nullptr),
kernel_(nullptr),
success_(false) {}
ScopedOpenCLNoOpKernel(const ScopedOpenCLNoOpKernel&) = delete;
ScopedOpenCLNoOpKernel& operator=(const ScopedOpenCLNoOpKernel&) = delete;
~ScopedOpenCLNoOpKernel() {
if (kernel_) {
cl_int rv = clReleaseKernel(kernel_);
EXPECT_EQ(rv, CL_SUCCESS) << "clReleaseKernel";
}
if (program_) {
cl_int rv = clReleaseProgram(program_);
EXPECT_EQ(rv, CL_SUCCESS) << "clReleaseProgram";
}
if (context_) {
cl_int rv = clReleaseContext(context_);
EXPECT_EQ(rv, CL_SUCCESS) << "clReleaseContext";
}
}
void SetUp() {
cl_platform_id platform_id;
cl_int rv = clGetPlatformIDs(1, &platform_id, nullptr);
ASSERT_EQ(rv, CL_SUCCESS) << "clGetPlatformIDs";
cl_device_id device_id;
rv =
clGetDeviceIDs(platform_id, CL_DEVICE_TYPE_CPU, 1, &device_id, nullptr);
#if PA_BUILDFLAG(PA_ARCH_CPU_ARM64)
if (rv == CL_INVALID_VALUE) {
return;
}
#endif
ASSERT_EQ(rv, CL_SUCCESS) << "clGetDeviceIDs";
context_ = clCreateContext(nullptr, 1, &device_id, nullptr, nullptr, &rv);
ASSERT_EQ(rv, CL_SUCCESS) << "clCreateContext";
const char* sources[] = {
"__kernel void NoOp(void) {barrier(CLK_LOCAL_MEM_FENCE);}",
};
const size_t source_lengths[] = {
strlen(sources[0]),
};
static_assert(std::size(sources) == std::size(source_lengths),
"arrays must be parallel");
program_ = clCreateProgramWithSource(context_, std::size(sources), sources,
source_lengths, &rv);
ASSERT_EQ(rv, CL_SUCCESS) << "clCreateProgramWithSource";
rv = clBuildProgram(program_, 1, &device_id, "-cl-opt-disable", nullptr,
nullptr);
ASSERT_EQ(rv, CL_SUCCESS) << "clBuildProgram";
kernel_ = clCreateKernel(program_, "NoOp", &rv);
ASSERT_EQ(rv, CL_SUCCESS) << "clCreateKernel";
success_ = true;
}
bool success() const { return success_; }
private:
cl_context context_;
cl_program program_;
cl_kernel kernel_;
bool success_;
};
TEST_P(PartitionAllocTest, OpenCL) {
ScopedOpenCLNoOpKernel kernel;
kernel.SetUp();
#if !PA_BUILDFLAG(PA_ARCH_CPU_ARM64)
ASSERT_TRUE(kernel.success());
#endif
}
#endif
TEST_P(PartitionAllocTest, SmallSlotSpanWaste) { … }
TEST_P(PartitionAllocTest, SortActiveSlotSpans) { … }
#if PA_BUILDFLAG(USE_FREESLOT_BITMAP)
TEST_P(PartitionAllocTest, FreeSlotBitmapMarkedAsUsedAfterAlloc) {
void* ptr = allocator.root()->Alloc(kTestAllocSize, type_name);
uintptr_t slot_start = allocator.root()->ObjectToSlotStart(ptr);
EXPECT_TRUE(FreeSlotBitmapSlotIsUsed(slot_start));
allocator.root()->Free(ptr);
}
TEST_P(PartitionAllocTest, FreeSlotBitmapMarkedAsFreeAfterFree) {
void* ptr = allocator.root()->Alloc(kTestAllocSize, type_name);
uintptr_t slot_start = allocator.root()->ObjectToSlotStart(ptr);
EXPECT_TRUE(FreeSlotBitmapSlotIsUsed(slot_start));
allocator.root()->Free(ptr);
EXPECT_FALSE(FreeSlotBitmapSlotIsUsed(slot_start));
}
TEST_P(PartitionAllocTest, FreeSlotBitmapResetAfterDecommit) {
void* ptr1 = allocator.root()->Alloc(
SystemPageSize() - ExtraAllocSize(allocator), type_name);
uintptr_t slot_start = allocator.root()->ObjectToSlotStart(ptr1);
allocator.root()->Free(ptr1);
EXPECT_FALSE(FreeSlotBitmapSlotIsUsed(slot_start));
allocator.root()->PurgeMemory(PurgeFlags::kDecommitEmptySlotSpans);
EXPECT_TRUE(FreeSlotBitmapSlotIsUsed(slot_start));
}
TEST_P(PartitionAllocTest, FreeSlotBitmapResetAfterPurge) {
void* ptr1 = allocator.root()->Alloc(
SystemPageSize() - ExtraAllocSize(allocator), type_name);
char* ptr2 = static_cast<char*>(allocator.root()->Alloc(
SystemPageSize() - ExtraAllocSize(allocator), type_name));
uintptr_t slot_start = allocator.root()->ObjectToSlotStart(ptr2);
allocator.root()->Free(ptr2);
CHECK_PAGE_IN_CORE(ptr2 - kPointerOffset, true);
EXPECT_FALSE(FreeSlotBitmapSlotIsUsed(slot_start));
allocator.root()->PurgeMemory(PurgeFlags::kDiscardUnusedSystemPages);
CHECK_PAGE_IN_CORE(ptr2 - kPointerOffset, false);
EXPECT_TRUE(FreeSlotBitmapSlotIsUsed(slot_start));
allocator.root()->Free(ptr1);
}
#endif
#if PA_BUILDFLAG(USE_LARGE_EMPTY_SLOT_SPAN_RING)
TEST_P(PartitionAllocTest, GlobalEmptySlotSpanRingIndexResets) {
allocator.root()->AdjustForForeground();
allocator.root()->SetGlobalEmptySlotSpanRingIndexForTesting(
internal::kMaxFreeableSpans - 1);
allocator.root()->AdjustForBackground();
void* ptr = allocator.root()->Alloc(kTestAllocSize, type_name);
allocator.root()->Free(ptr);
ClearEmptySlotSpanCache();
EXPECT_EQ(
0u, PA_TS_UNCHECKED_READ(allocator.root()->empty_slot_spans_dirty_bytes));
}
#endif
TEST_P(PartitionAllocTest, FastReclaim) { … }
TEST_P(PartitionAllocTest, FastReclaimEventuallyLooksAtAllBuckets) { … }
}
#endif