Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion common.gypi
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@

# Reset this number to 0 on major V8 upgrades.
# Increment by one for each non-official patch applied to deps/v8.
'v8_embedder_string': '-node.9',
'v8_embedder_string': '-node.10',

##### V8 defaults for Node.js #####

Expand Down
8 changes: 8 additions & 0 deletions deps/v8/include/v8-statistics.h
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,13 @@ class V8_EXPORT HeapStatistics {
size_t number_of_native_contexts() { return number_of_native_contexts_; }
size_t number_of_detached_contexts() { return number_of_detached_contexts_; }

/**
* Returns the total number of bytes allocated since the Isolate was created.
* This includes all heap objects allocated in any space (new, old, code,
* etc.).
*/
uint64_t total_allocated_bytes() { return total_allocated_bytes_; }

/**
* Returns a 0/1 boolean, which signifies whether the V8 overwrite heap
* garbage with a bit pattern.
Expand All @@ -175,6 +182,7 @@ class V8_EXPORT HeapStatistics {
size_t number_of_detached_contexts_;
size_t total_global_handles_size_;
size_t used_global_handles_size_;
uint64_t total_allocated_bytes_;

friend class V8;
friend class Isolate;
Expand Down
4 changes: 3 additions & 1 deletion deps/v8/src/api/api.cc
Original file line number Diff line number Diff line change
Expand Up @@ -6557,7 +6557,8 @@ HeapStatistics::HeapStatistics()
peak_malloced_memory_(0),
does_zap_garbage_(false),
number_of_native_contexts_(0),
number_of_detached_contexts_(0) {}
number_of_detached_contexts_(0),
total_allocated_bytes_(0) {}

HeapSpaceStatistics::HeapSpaceStatistics()
: space_name_(nullptr),
Expand Down Expand Up @@ -10411,6 +10412,7 @@ void Isolate::GetHeapStatistics(HeapStatistics* heap_statistics) {
heap_statistics->number_of_native_contexts_ = heap->NumberOfNativeContexts();
heap_statistics->number_of_detached_contexts_ =
heap->NumberOfDetachedContexts();
heap_statistics->total_allocated_bytes_ = heap->GetTotalAllocatedBytes();
heap_statistics->does_zap_garbage_ = i::heap::ShouldZapGarbage();

#if V8_ENABLE_WEBASSEMBLY
Expand Down
31 changes: 24 additions & 7 deletions deps/v8/src/heap/heap-allocator.cc
Original file line number Diff line number Diff line change
Expand Up @@ -85,25 +85,42 @@ AllocationResult HeapAllocator::AllocateRawLargeInternal(
int size_in_bytes, AllocationType allocation, AllocationOrigin origin,
AllocationAlignment alignment, AllocationHint hint) {
DCHECK_GT(size_in_bytes, heap_->MaxRegularHeapObjectSize(allocation));
AllocationResult allocation_result;
switch (allocation) {
case AllocationType::kYoung:
return new_lo_space()->AllocateRaw(local_heap_, size_in_bytes, hint);
allocation_result =
new_lo_space()->AllocateRaw(local_heap_, size_in_bytes, hint);
break;
case AllocationType::kOld:
return lo_space()->AllocateRaw(local_heap_, size_in_bytes, hint);
allocation_result =
lo_space()->AllocateRaw(local_heap_, size_in_bytes, hint);
break;
case AllocationType::kCode:
return code_lo_space()->AllocateRaw(local_heap_, size_in_bytes, hint);
allocation_result =
code_lo_space()->AllocateRaw(local_heap_, size_in_bytes, hint);
break;
case AllocationType::kSharedOld:
return shared_lo_space()->AllocateRaw(local_heap_, size_in_bytes, hint);
allocation_result =
shared_lo_space()->AllocateRaw(local_heap_, size_in_bytes, hint);
break;
case AllocationType::kTrusted:
return trusted_lo_space()->AllocateRaw(local_heap_, size_in_bytes, hint);
allocation_result =
trusted_lo_space()->AllocateRaw(local_heap_, size_in_bytes, hint);
break;
case AllocationType::kSharedTrusted:
return shared_trusted_lo_space()->AllocateRaw(local_heap_, size_in_bytes,
hint);
allocation_result = shared_trusted_lo_space()->AllocateRaw(
local_heap_, size_in_bytes, hint);
break;
case AllocationType::kMap:
case AllocationType::kReadOnly:
case AllocationType::kSharedMap:
UNREACHABLE();
}
if (!allocation_result.IsFailure()) {
int allocated_size = ALIGN_TO_ALLOCATION_ALIGNMENT(size_in_bytes);
heap_->AddTotalAllocatedBytes(allocated_size);
}
return allocation_result;
}

namespace {
Expand Down
4 changes: 4 additions & 0 deletions deps/v8/src/heap/heap.cc
Original file line number Diff line number Diff line change
Expand Up @@ -7839,6 +7839,10 @@ int Heap::NextStackTraceId() {
return last_id;
}

uint64_t Heap::GetTotalAllocatedBytes() {
return total_allocated_bytes_.load(std::memory_order_relaxed);
}

EmbedderStackStateScope::EmbedderStackStateScope(
Heap* heap, EmbedderStackStateOrigin origin, StackState stack_state)
: heap_(heap),
Expand Down
7 changes: 7 additions & 0 deletions deps/v8/src/heap/heap.h
Original file line number Diff line number Diff line change
Expand Up @@ -1703,6 +1703,11 @@ class Heap final {
bool ShouldUseBackgroundThreads() const;
bool ShouldUseIncrementalMarking() const;

void AddTotalAllocatedBytes(size_t size) {
total_allocated_bytes_.fetch_add(size, std::memory_order_relaxed);
}
uint64_t GetTotalAllocatedBytes();

HeapAllocator* allocator() { return heap_allocator_; }
const HeapAllocator* allocator() const { return heap_allocator_; }

Expand Down Expand Up @@ -2498,6 +2503,8 @@ class Heap final {
// no value was provided this will be 0.
uint64_t physical_memory_;

std::atomic<uint64_t> total_allocated_bytes_ = 0;

#if defined(V8_USE_PERFETTO)
perfetto::NamedTrack tracing_track_;
#endif
Expand Down
6 changes: 6 additions & 0 deletions deps/v8/src/heap/main-allocator.cc
Original file line number Diff line number Diff line change
Expand Up @@ -297,6 +297,12 @@ void MainAllocator::ResetLab(Address start, Address end, Address extended_end) {
MemoryChunkMetadata::UpdateHighWaterMark(top());
}

// This is going to overestimate a bit of the total allocated bytes, since the
// LAB was not used yet. However the leftover compared to the LAB itself is
// quite small, so it seems tolerable.
if (local_heap_) {
local_heap_->heap()->AddTotalAllocatedBytes(end - start);
}
allocation_info().Reset(start, end);
extended_limit_ = extended_end;

Expand Down
146 changes: 146 additions & 0 deletions deps/v8/test/cctest/test-api.cc
Original file line number Diff line number Diff line change
Expand Up @@ -17700,6 +17700,152 @@ TEST(GetHeapSpaceStatistics) {
CHECK_EQ(total_physical_size, heap_statistics.total_physical_size());
}

UNINITIALIZED_TEST(GetHeapTotalAllocatedBytes) {
// This test is incompatible with concurrent allocation, which may occur
// while collecting the statistics and break the final `CHECK_EQ`s.
if (i::v8_flags.stress_concurrent_allocation) return;

v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
v8::Isolate* isolate = v8::Isolate::New(create_params);

const uint32_t number_of_elements = 1;
const uint32_t allocation_size = i::FixedArray::SizeFor(number_of_elements);
const uint32_t trusted_allocation_size =
i::TrustedFixedArray::SizeFor(number_of_elements);
const uint32_t lo_number_of_elements = 256 * 1024;
const uint32_t lo_allocation_size =
i::FixedArray::SizeFor(lo_number_of_elements);
const uint32_t trusted_lo_allocation_size =
i::TrustedFixedArray::SizeFor(lo_number_of_elements);
const uint32_t expected_allocation_size =
allocation_size * 2 + lo_allocation_size * 2 + trusted_allocation_size +
trusted_lo_allocation_size;

{
v8::Isolate::Scope isolate_scope(isolate);
v8::HandleScope handle_scope(isolate);
LocalContext env(isolate);
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);

v8::HeapStatistics heap_stats_before;
isolate->GetHeapStatistics(&heap_stats_before);
size_t initial_allocated = heap_stats_before.total_allocated_bytes();

i::MaybeHandle<i::FixedArray> young_alloc =
i_isolate->factory()->TryNewFixedArray(number_of_elements,
i::AllocationType::kYoung);
USE(young_alloc);
i::MaybeHandle<i::FixedArray> old_alloc =
i_isolate->factory()->TryNewFixedArray(number_of_elements,
i::AllocationType::kOld);
USE(old_alloc);
i::Handle<i::TrustedFixedArray> trusted_alloc =
i_isolate->factory()->NewTrustedFixedArray(number_of_elements,
i::AllocationType::kTrusted);
USE(trusted_alloc);
i::MaybeHandle<i::FixedArray> old_lo_alloc =
i_isolate->factory()->TryNewFixedArray(lo_number_of_elements,
i::AllocationType::kOld);
USE(old_lo_alloc);

{
v8::HandleScope inner_handle_scope(isolate);
auto young_lo_alloc = i_isolate->factory()->TryNewFixedArray(
lo_number_of_elements, i::AllocationType::kYoung);
USE(young_lo_alloc);
}

auto trusted_lo_alloc = i_isolate->factory()->NewTrustedFixedArray(
lo_number_of_elements, i::AllocationType::kTrusted);
USE(trusted_lo_alloc);

v8::HeapStatistics heap_stats_after;
isolate->GetHeapStatistics(&heap_stats_after);
uint64_t final_allocated = heap_stats_after.total_allocated_bytes();

CHECK_GT(final_allocated, initial_allocated);
uint64_t allocated_diff = final_allocated - initial_allocated;
CHECK_GE(allocated_diff, expected_allocation_size);

// This either tests counting happening when a LAB freed and validate
// there's no double counting on evacuated/promoted objects.
v8::internal::heap::InvokeAtomicMajorGC(i_isolate->heap());

v8::HeapStatistics heap_stats_after_gc;
isolate->GetHeapStatistics(&heap_stats_after_gc);
uint64_t total_allocation_after_gc =
heap_stats_after_gc.total_allocated_bytes();

CHECK_EQ(total_allocation_after_gc, final_allocated);
}

isolate->Dispose();
}

#if V8_CAN_CREATE_SHARED_HEAP_BOOL

UNINITIALIZED_TEST(GetHeapTotalAllocatedBytesSharedSpaces) {
// This test is incompatible with concurrent allocation, which may occur
// while collecting the statistics and break the final `CHECK_EQ`s.
if (i::v8_flags.stress_concurrent_allocation) return;
if (COMPRESS_POINTERS_IN_MULTIPLE_CAGES_BOOL) return;

i::v8_flags.shared_heap = true;
i::FlagList::EnforceFlagImplications();

v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
v8::Isolate* isolate = v8::Isolate::New(create_params);

{
v8::Isolate::Scope isolate_scope(isolate);
v8::HandleScope handle_scope(isolate);
LocalContext env(isolate);

v8::HeapStatistics heap_stats_before;
isolate->GetHeapStatistics(&heap_stats_before);
size_t initial_allocated = heap_stats_before.total_allocated_bytes();

i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);

const uint32_t number_of_elements = 1;
const uint32_t allocation_size = i::FixedArray::SizeFor(number_of_elements);
const uint32_t trusted_allocation_size =
i::TrustedFixedArray::SizeFor(number_of_elements);
const uint32_t lo_number_of_elements = 256 * 1024;
const uint32_t lo_allocation_size =
i::FixedArray::SizeFor(lo_number_of_elements);
const uint32_t expected_allocation_size =
allocation_size + trusted_allocation_size + lo_allocation_size;

i::MaybeHandle<i::FixedArray> shared_alloc =
i_isolate->factory()->TryNewFixedArray(number_of_elements,
i::AllocationType::kSharedOld);
USE(shared_alloc);
i::Handle<i::TrustedFixedArray> shared_trusted_alloc =
i_isolate->factory()->NewTrustedFixedArray(
number_of_elements, i::AllocationType::kSharedTrusted);
USE(shared_trusted_alloc);
i::MaybeHandle<i::FixedArray> shared_lo_alloc =
i_isolate->factory()->TryNewFixedArray(lo_number_of_elements,
i::AllocationType::kSharedOld);
USE(shared_lo_alloc);

v8::HeapStatistics heap_stats_after;
isolate->GetHeapStatistics(&heap_stats_after);
uint64_t final_allocated = heap_stats_after.total_allocated_bytes();

CHECK_GT(final_allocated, initial_allocated);
uint64_t allocated_diff = final_allocated - initial_allocated;
CHECK_GE(allocated_diff, expected_allocation_size);
}

isolate->Dispose();
}

#endif // V8_CAN_CREATE_SHARED_HEAP_BOOL

TEST(NumberOfNativeContexts) {
static const size_t kNumTestContexts = 10;
i::Isolate* isolate = CcTest::i_isolate();
Expand Down
Loading