chromium/v8/src/heap/memory-chunk.cc

// Copyright 2023 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include "src/heap/memory-chunk.h"

#include "src/common/code-memory-access-inl.h"
#include "src/heap/base-space.h"
#include "src/heap/large-page-metadata.h"
#include "src/heap/page-metadata.h"
#include "src/heap/read-only-spaces.h"
#include "src/heap/trusted-range.h"

namespace v8 {
namespace internal {

// static
constexpr MemoryChunk::MainThreadFlags MemoryChunk::kAllFlagsMask;
// static
constexpr MemoryChunk::MainThreadFlags
    MemoryChunk::kPointersToHereAreInterestingMask;
// static
constexpr MemoryChunk::MainThreadFlags
    MemoryChunk::kPointersFromHereAreInterestingMask;
// static
constexpr MemoryChunk::MainThreadFlags MemoryChunk::kEvacuationCandidateMask;
// static
constexpr MemoryChunk::MainThreadFlags MemoryChunk::kIsInYoungGenerationMask;
// static
constexpr MemoryChunk::MainThreadFlags MemoryChunk::kIsLargePageMask;
// static
constexpr MemoryChunk::MainThreadFlags
    MemoryChunk::kSkipEvacuationSlotsRecordingMask;

MemoryChunk::MemoryChunk(MainThreadFlags flags, MemoryChunkMetadata* metadata)
    :{}

#ifdef V8_ENABLE_SANDBOX

MemoryChunkMetadata* MemoryChunk::metadata_pointer_table_[] =;

// static
void MemoryChunk::ClearMetadataPointer(MemoryChunkMetadata* metadata) {}

// static
uint32_t MemoryChunk::MetadataTableIndex(Address chunk_address) {}

#endif

void MemoryChunk::InitializationMemoryFence() {}

#ifdef THREAD_SANITIZER

void MemoryChunk::SynchronizedLoad() const {
#ifndef V8_ENABLE_SANDBOX
  MemoryChunkMetadata* metadata = reinterpret_cast<MemoryChunkMetadata*>(
      base::Acquire_Load(reinterpret_cast<base::AtomicWord*>(
          &(const_cast<MemoryChunk*>(this)->metadata_))));
#else
  static_assert(sizeof(base::AtomicWord) == sizeof(metadata_pointer_table_[0]));
  static_assert(sizeof(base::Atomic32) == sizeof(metadata_index_));
  uint32_t metadata_index =
      base::Acquire_Load(reinterpret_cast<base::Atomic32*>(
          &(const_cast<MemoryChunk*>(this)->metadata_index_)));
  MemoryChunkMetadata* metadata = reinterpret_cast<MemoryChunkMetadata*>(
      base::Acquire_Load(reinterpret_cast<base::AtomicWord*>(
          &metadata_pointer_table_[metadata_index])));
#endif
  metadata->SynchronizedHeapLoad();
}

bool MemoryChunk::InReadOnlySpace() const {
  // This is needed because TSAN does not process the memory fence
  // emitted after page initialization.
  SynchronizedLoad();
  return IsFlagSet(READ_ONLY_HEAP);
}

#endif  // THREAD_SANITIZER

#ifdef DEBUG

bool MemoryChunk::IsTrusted() const {}

size_t MemoryChunk::Offset(Address addr) const {}

size_t MemoryChunk::OffsetMaybeOutOfRange(Address addr) const {}

#endif  // DEBUG

void MemoryChunk::SetFlagSlow(Flag flag) {}

void MemoryChunk::ClearFlagSlow(Flag flag) {}

// static
MemoryChunk::MainThreadFlags MemoryChunk::OldGenerationPageFlags(
    MarkingMode marking_mode, AllocationSpace space) {}

// static
MemoryChunk::MainThreadFlags MemoryChunk::YoungGenerationPageFlags(
    MarkingMode marking_mode) {}

void MemoryChunk::SetOldGenerationPageFlags(MarkingMode marking_mode,
                                            AllocationSpace space) {}

void MemoryChunk::SetYoungGenerationPageFlags(MarkingMode marking_mode) {}

#ifdef V8_ENABLE_SANDBOX
bool MemoryChunk::SandboxSafeInReadOnlySpace() const {}
#endif

}  // namespace internal
}  // namespace v8