diff --git a/common.gypi b/common.gypi index ca1f853172063a..10b4ecfc52e2fb 100644 --- a/common.gypi +++ b/common.gypi @@ -39,7 +39,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.12', + 'v8_embedder_string': '-node.13', ##### V8 defaults for Node.js ##### diff --git a/deps/v8/include/v8.h b/deps/v8/include/v8.h index ba801345f160e2..acb9c089d6d2b8 100644 --- a/deps/v8/include/v8.h +++ b/deps/v8/include/v8.h @@ -4828,6 +4828,10 @@ enum class ArrayBufferCreationMode { kInternalized, kExternalized }; * V8. Clients should always use standard C++ memory ownership types (i.e. * std::unique_ptr and std::shared_ptr) to manage lifetimes of backing stores * properly, since V8 internal objects may alias backing stores. + * + * This object does not keep the underlying |ArrayBuffer::Allocator| alive by + * default. Use Isolate::CreateParams::array_buffer_allocator_shared when + * creating the Isolate to make it hold a reference to the allocator itself. */ class V8_EXPORT BackingStore : public v8::internal::BackingStoreBase { public: @@ -7837,6 +7841,7 @@ class V8_EXPORT Isolate { create_histogram_callback(nullptr), add_histogram_sample_callback(nullptr), array_buffer_allocator(nullptr), + array_buffer_allocator_shared(), external_references(nullptr), allow_atomics_wait(true), only_terminate_in_safe_scope(false) {} @@ -7876,8 +7881,14 @@ class V8_EXPORT Isolate { /** * The ArrayBuffer::Allocator to use for allocating and freeing the backing * store of ArrayBuffers. + * + * If the shared_ptr version is used, the Isolate instance and every + * |BackingStore| allocated using this allocator hold a std::shared_ptr + * to the allocator, in order to facilitate lifetime + * management for the allocator instance. */ ArrayBuffer::Allocator* array_buffer_allocator; + std::shared_ptr array_buffer_allocator_shared; /** * Specifies an optional nullptr-terminated array of raw addresses in the diff --git a/deps/v8/src/api/api.cc b/deps/v8/src/api/api.cc index 870c643491c9e5..cb503015c11400 100644 --- a/deps/v8/src/api/api.cc +++ b/deps/v8/src/api/api.cc @@ -8149,8 +8149,15 @@ Isolate* Isolate::Allocate() { void Isolate::Initialize(Isolate* isolate, const v8::Isolate::CreateParams& params) { i::Isolate* i_isolate = reinterpret_cast(isolate); - CHECK_NOT_NULL(params.array_buffer_allocator); - i_isolate->set_array_buffer_allocator(params.array_buffer_allocator); + if (auto allocator = params.array_buffer_allocator_shared) { + CHECK(params.array_buffer_allocator == nullptr || + params.array_buffer_allocator == allocator.get()); + i_isolate->set_array_buffer_allocator(allocator.get()); + i_isolate->set_array_buffer_allocator_shared(std::move(allocator)); + } else { + CHECK_NOT_NULL(params.array_buffer_allocator); + i_isolate->set_array_buffer_allocator(params.array_buffer_allocator); + } if (params.snapshot_blob != nullptr) { i_isolate->set_snapshot_blob(params.snapshot_blob); } else { diff --git a/deps/v8/src/execution/isolate.h b/deps/v8/src/execution/isolate.h index 20aea6066c981e..ef16dfa5140c05 100644 --- a/deps/v8/src/execution/isolate.h +++ b/deps/v8/src/execution/isolate.h @@ -1347,6 +1347,15 @@ class Isolate final : private HiddenFactory { return array_buffer_allocator_; } + void set_array_buffer_allocator_shared( + std::shared_ptr allocator) { + array_buffer_allocator_shared_ = std::move(allocator); + } + std::shared_ptr array_buffer_allocator_shared() + const { + return array_buffer_allocator_shared_; + } + FutexWaitListNode* futex_wait_list_node() { return &futex_wait_list_node_; } CancelableTaskManager* cancelable_task_manager() { @@ -1758,6 +1767,7 @@ class Isolate final : private HiddenFactory { uint32_t embedded_blob_size_ = 0; v8::ArrayBuffer::Allocator* array_buffer_allocator_ = nullptr; + std::shared_ptr array_buffer_allocator_shared_; FutexWaitListNode futex_wait_list_node_; diff --git a/deps/v8/src/objects/backing-store.cc b/deps/v8/src/objects/backing-store.cc index ff18a231465862..7f6d2251a78a70 100644 --- a/deps/v8/src/objects/backing-store.cc +++ b/deps/v8/src/objects/backing-store.cc @@ -114,6 +114,11 @@ void BackingStore::Clear() { buffer_start_ = nullptr; byte_length_ = 0; has_guard_regions_ = false; + if (holds_shared_ptr_to_allocator_) { + type_specific_data_.v8_api_array_buffer_allocator_shared + .std::shared_ptr::~shared_ptr(); + holds_shared_ptr_to_allocator_ = false; + } type_specific_data_.v8_api_array_buffer_allocator = nullptr; } @@ -154,14 +159,14 @@ BackingStore::~BackingStore() { DCHECK(free_on_destruct_); TRACE_BS("BS:custome deleter bs=%p mem=%p (length=%zu, capacity=%zu)\n", this, buffer_start_, byte_length(), byte_capacity_); - type_specific_data_.deleter(buffer_start_, byte_length_, deleter_data_); + type_specific_data_.deleter.callback(buffer_start_, byte_length_, + type_specific_data_.deleter.data); Clear(); return; } if (free_on_destruct_) { // JSArrayBuffer backing store. Deallocate through the embedder's allocator. - auto allocator = reinterpret_cast( - get_v8_api_array_buffer_allocator()); + auto allocator = get_v8_api_array_buffer_allocator(); TRACE_BS("BS:free bs=%p mem=%p (length=%zu, capacity=%zu)\n", this, buffer_start_, byte_length(), byte_capacity_); allocator->Free(buffer_start_, byte_length_); @@ -224,10 +229,22 @@ std::unique_ptr BackingStore::Allocate( TRACE_BS("BS:alloc bs=%p mem=%p (length=%zu)\n", result, result->buffer_start(), byte_length); - result->type_specific_data_.v8_api_array_buffer_allocator = allocator; + result->SetAllocatorFromIsolate(isolate); return std::unique_ptr(result); } +void BackingStore::SetAllocatorFromIsolate(Isolate* isolate) { + if (auto allocator_shared = isolate->array_buffer_allocator_shared()) { + holds_shared_ptr_to_allocator_ = true; + new (&type_specific_data_.v8_api_array_buffer_allocator_shared) + std::shared_ptr( + std::move(allocator_shared)); + } else { + type_specific_data_.v8_api_array_buffer_allocator = + isolate->array_buffer_allocator(); + } +} + // Allocate a backing store for a Wasm memory. Always use the page allocator // and add guard regions. std::unique_ptr BackingStore::TryAllocateWasmMemory( @@ -470,8 +487,7 @@ std::unique_ptr BackingStore::WrapAllocation( free_on_destruct, // free_on_destruct false, // has_guard_regions false); // custom_deleter - result->type_specific_data_.v8_api_array_buffer_allocator = - isolate->array_buffer_allocator(); + result->SetAllocatorFromIsolate(isolate); TRACE_BS("BS:wrap bs=%p mem=%p (length=%zu)\n", result, result->buffer_start(), result->byte_length()); return std::unique_ptr(result); @@ -489,8 +505,7 @@ std::unique_ptr BackingStore::WrapAllocation( true, // free_on_destruct false, // has_guard_regions true); // custom_deleter - result->type_specific_data_.deleter = deleter; - result->deleter_data_ = deleter_data; + result->type_specific_data_.deleter = {deleter, deleter_data}; TRACE_BS("BS:wrap bs=%p mem=%p (length=%zu)\n", result, result->buffer_start(), result->byte_length()); return std::unique_ptr(result); @@ -510,10 +525,12 @@ std::unique_ptr BackingStore::EmptyBackingStore( return std::unique_ptr(result); } -void* BackingStore::get_v8_api_array_buffer_allocator() { +v8::ArrayBuffer::Allocator* BackingStore::get_v8_api_array_buffer_allocator() { CHECK(!is_wasm_memory_); auto array_buffer_allocator = - type_specific_data_.v8_api_array_buffer_allocator; + holds_shared_ptr_to_allocator_ + ? type_specific_data_.v8_api_array_buffer_allocator_shared.get() + : type_specific_data_.v8_api_array_buffer_allocator; CHECK_NOT_NULL(array_buffer_allocator); return array_buffer_allocator; } diff --git a/deps/v8/src/objects/backing-store.h b/deps/v8/src/objects/backing-store.h index c212bebf485664..c9bbcf4ba0b2dc 100644 --- a/deps/v8/src/objects/backing-store.h +++ b/deps/v8/src/objects/backing-store.h @@ -128,24 +128,36 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase { byte_capacity_(byte_capacity), is_shared_(shared == SharedFlag::kShared), is_wasm_memory_(is_wasm_memory), + holds_shared_ptr_to_allocator_(false), free_on_destruct_(free_on_destruct), has_guard_regions_(has_guard_regions), globally_registered_(false), - custom_deleter_(custom_deleter) { - type_specific_data_.v8_api_array_buffer_allocator = nullptr; - deleter_data_ = nullptr; - } + custom_deleter_(custom_deleter) {} + void SetAllocatorFromIsolate(Isolate* isolate); void* buffer_start_ = nullptr; std::atomic byte_length_{0}; size_t byte_capacity_ = 0; - union { + + struct DeleterInfo { + v8::BackingStoreDeleterCallback callback; + void* data; + }; + + union TypeSpecificData { + TypeSpecificData() : v8_api_array_buffer_allocator(nullptr) {} + ~TypeSpecificData() {} + // If this backing store was allocated through the ArrayBufferAllocator API, // this is a direct pointer to the API object for freeing the backing // store. - // Note: we use {void*} here because we cannot forward-declare an inner - // class from the API. - void* v8_api_array_buffer_allocator; + v8::ArrayBuffer::Allocator* v8_api_array_buffer_allocator; + + // Holds a shared_ptr to the ArrayBuffer::Allocator instance, if requested + // so by the embedder through setting + // Isolate::CreateParams::array_buffer_allocator_shared. + std::shared_ptr + v8_api_array_buffer_allocator_shared; // For shared Wasm memories, this is a list of all the attached memory // objects, which is needed to grow shared backing stores. @@ -153,20 +165,19 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase { // Custom deleter for the backing stores that wrap memory blocks that are // allocated with a custom allocator. - v8::BackingStoreDeleterCallback deleter; + DeleterInfo deleter; } type_specific_data_; - void* deleter_data_; - bool is_shared_ : 1; bool is_wasm_memory_ : 1; + bool holds_shared_ptr_to_allocator_ : 1; bool free_on_destruct_ : 1; bool has_guard_regions_ : 1; bool globally_registered_ : 1; bool custom_deleter_ : 1; // Accessors for type-specific data. - void* get_v8_api_array_buffer_allocator(); + v8::ArrayBuffer::Allocator* get_v8_api_array_buffer_allocator(); SharedWasmMemoryData* get_shared_wasm_memory_data(); void Clear(); // Internally clears fields after deallocation. diff --git a/deps/v8/test/cctest/test-api-array-buffer.cc b/deps/v8/test/cctest/test-api-array-buffer.cc index 488dbde272fca0..9afdf047f07b73 100644 --- a/deps/v8/test/cctest/test-api-array-buffer.cc +++ b/deps/v8/test/cctest/test-api-array-buffer.cc @@ -608,3 +608,95 @@ TEST(SharedArrayBuffer_NewBackingStore_CustomDeleter) { } CHECK(backing_store_custom_called); } + +class DummyAllocator final : public v8::ArrayBuffer::Allocator { + public: + DummyAllocator() : allocator_(NewDefaultAllocator()) {} + + ~DummyAllocator() override { CHECK_EQ(allocation_count(), 0); } + + void* Allocate(size_t length) override { + allocation_count_++; + return allocator_->Allocate(length); + } + void* AllocateUninitialized(size_t length) override { + allocation_count_++; + return allocator_->AllocateUninitialized(length); + } + void Free(void* data, size_t length) override { + allocation_count_--; + allocator_->Free(data, length); + } + + uint64_t allocation_count() const { return allocation_count_; } + + private: + std::unique_ptr allocator_; + uint64_t allocation_count_ = 0; +}; + +TEST(BackingStore_HoldAllocatorAlive_UntilIsolateShutdown) { + std::shared_ptr allocator = + std::make_shared(); + std::weak_ptr allocator_weak(allocator); + + v8::Isolate::CreateParams create_params; + create_params.array_buffer_allocator_shared = allocator; + v8::Isolate* isolate = v8::Isolate::New(create_params); + isolate->Enter(); + + allocator.reset(); + create_params.array_buffer_allocator_shared.reset(); + CHECK(!allocator_weak.expired()); + CHECK_EQ(allocator_weak.lock()->allocation_count(), 0); + + { + // Create an ArrayBuffer and do not garbage collect it. This should make + // the allocator be released automatically once the Isolate is disposed. + v8::HandleScope handle_scope(isolate); + v8::Context::Scope context_scope(Context::New(isolate)); + v8::ArrayBuffer::New(isolate, 8); + + // This should be inside the HandleScope, so that we can be sure that + // the allocation is not garbage collected yet. + CHECK(!allocator_weak.expired()); + CHECK_EQ(allocator_weak.lock()->allocation_count(), 1); + } + + isolate->Exit(); + isolate->Dispose(); + CHECK(allocator_weak.expired()); +} + +TEST(BackingStore_HoldAllocatorAlive_AfterIsolateShutdown) { + std::shared_ptr allocator = + std::make_shared(); + std::weak_ptr allocator_weak(allocator); + + v8::Isolate::CreateParams create_params; + create_params.array_buffer_allocator_shared = allocator; + v8::Isolate* isolate = v8::Isolate::New(create_params); + isolate->Enter(); + + allocator.reset(); + create_params.array_buffer_allocator_shared.reset(); + CHECK(!allocator_weak.expired()); + CHECK_EQ(allocator_weak.lock()->allocation_count(), 0); + + std::shared_ptr backing_store; + { + // Create an ArrayBuffer and do not garbage collect it. This should make + // the allocator be released automatically once the Isolate is disposed. + v8::HandleScope handle_scope(isolate); + v8::Context::Scope context_scope(Context::New(isolate)); + v8::Local ab = v8::ArrayBuffer::New(isolate, 8); + backing_store = ab->GetBackingStore(); + } + + isolate->Exit(); + isolate->Dispose(); + CHECK(!allocator_weak.expired()); + CHECK_EQ(allocator_weak.lock()->allocation_count(), 1); + backing_store.reset(); + CHECK(allocator_weak.expired()); +}