summaryrefslogtreecommitdiff
path: root/deps/v8/src/handles
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/handles')
-rw-r--r--deps/v8/src/handles/global-handles.cc99
-rw-r--r--deps/v8/src/handles/global-handles.h4
-rw-r--r--deps/v8/src/handles/handles-inl.h1
-rw-r--r--deps/v8/src/handles/handles.cc48
-rw-r--r--deps/v8/src/handles/handles.h51
-rw-r--r--deps/v8/src/handles/local-handles-inl.h11
6 files changed, 156 insertions, 58 deletions
diff --git a/deps/v8/src/handles/global-handles.cc b/deps/v8/src/handles/global-handles.cc
index a5cc8672b6..fb4a2b4bea 100644
--- a/deps/v8/src/handles/global-handles.cc
+++ b/deps/v8/src/handles/global-handles.cc
@@ -12,7 +12,9 @@
#include "src/api/api-inl.h"
#include "src/base/compiler-specific.h"
#include "src/base/sanitizer/asan.h"
+#include "src/common/allow-deprecated.h"
#include "src/execution/vm-state-inl.h"
+#include "src/heap/base/stack.h"
#include "src/heap/embedder-tracing.h"
#include "src/heap/heap-inl.h"
#include "src/heap/heap-write-barrier-inl.h"
@@ -385,7 +387,7 @@ namespace {
void ExtractInternalFields(JSObject jsobject, void** embedder_fields, int len) {
int field_count = jsobject.GetEmbedderFieldCount();
- Isolate* isolate = GetIsolateForHeapSandbox(jsobject);
+ Isolate* isolate = GetIsolateForSandbox(jsobject);
for (int i = 0; i < len; ++i) {
if (field_count == i) break;
void* pointer;
@@ -515,9 +517,11 @@ class GlobalHandles::Node final : public NodeBase<GlobalHandles::Node> {
case v8::WeakCallbackType::kInternalFields:
set_weakness_type(PHANTOM_WEAK_2_EMBEDDER_FIELDS);
break;
+ START_ALLOW_USE_DEPRECATED()
case v8::WeakCallbackType::kFinalizer:
set_weakness_type(FINALIZER_WEAK);
break;
+ END_ALLOW_USE_DEPRECATED()
}
set_parameter(parameter);
weak_callback_ = phantom_callback;
@@ -667,6 +671,8 @@ class GlobalHandles::TracedNode final
bool is_on_stack() const { return IsOnStack::decode(flags_); }
void set_is_on_stack(bool v) { flags_ = IsOnStack::update(flags_, v); }
+ void clear_object() { object_ = kNullAddress; }
+
void SetFinalizationCallback(void* parameter,
WeakCallbackInfo<void>::Callback callback) {
set_parameter(parameter);
@@ -697,7 +703,11 @@ class GlobalHandles::TracedNode final
void ResetPhantomHandle(HandleHolder handle_holder) {
DCHECK(IsInUse());
- if (handle_holder == HandleHolder::kLive) {
+ // Even if the handle holder should be alive, the back reference may have
+ // been cleared which prevents the handle from being reclaimed at this
+ // point. This can happen for explicitly reset handles during incremental
+ // marking that then cannot be reclaimed during Scavenge.
+ if (handle_holder == HandleHolder::kLive && data_.parameter) {
Address** handle = reinterpret_cast<Address**>(data_.parameter);
*handle = nullptr;
}
@@ -753,7 +763,7 @@ class GlobalHandles::OnStackTracedNodeSpace final {
void SetStackStart(void* stack_start) {
CHECK(on_stack_nodes_.empty());
- stack_start_ = base::Stack::GetRealStackAddressForSlot(stack_start);
+ stack_.SetStackStart(base::Stack::GetRealStackAddressForSlot(stack_start));
}
V8_INLINE bool IsOnStack(uintptr_t slot) const;
@@ -789,28 +799,17 @@ class GlobalHandles::OnStackTracedNodeSpace final {
std::map<uintptr_t, NodeEntry> on_stack_nodes_;
#endif // !V8_USE_ADDRESS_SANITIZER
- uintptr_t stack_start_ = 0;
+ ::heap::base::Stack stack_;
GlobalHandles* global_handles_ = nullptr;
size_t acquire_count_ = 0;
};
bool GlobalHandles::OnStackTracedNodeSpace::IsOnStack(uintptr_t slot) const {
-#ifdef V8_USE_ADDRESS_SANITIZER
- if (__asan_addr_is_in_fake_stack(__asan_get_current_fake_stack(),
- reinterpret_cast<void*>(slot), nullptr,
- nullptr)) {
- return true;
- }
-#endif // V8_USE_ADDRESS_SANITIZER
-#if defined(__has_feature)
-#if __has_feature(safe_stack)
- if (reinterpret_cast<uintptr_t>(__builtin___get_unsafe_stack_top()) >= slot &&
- slot > reinterpret_cast<uintptr_t>(__builtin___get_unsafe_stack_ptr())) {
- return true;
- }
-#endif // __has_feature(safe_stack)
-#endif // defined(__has_feature)
- return stack_start_ >= slot && slot > base::Stack::GetCurrentStackPosition();
+ // By the time this function is called, the stack start may not be set (i.e.
+ // SetStackStart() was not called). In that case, assume the slot is not on
+ // stack.
+ if (!stack_.stack_start()) return false;
+ return stack_.IsOnStack(reinterpret_cast<void*>(slot));
}
void GlobalHandles::OnStackTracedNodeSpace::NotifyEmptyEmbedderStack() {
@@ -877,12 +876,27 @@ GlobalHandles::TracedNode* GlobalHandles::OnStackTracedNodeSpace::Acquire(
void GlobalHandles::OnStackTracedNodeSpace::CleanupBelowCurrentStackPosition() {
if (on_stack_nodes_.empty()) return;
- const auto it =
- on_stack_nodes_.upper_bound(base::Stack::GetCurrentStackPosition());
+ const uintptr_t stack_ptr = reinterpret_cast<uintptr_t>(
+ ::heap::base::Stack::GetCurrentStackPointerForLocalVariables());
+ const auto it = on_stack_nodes_.upper_bound(stack_ptr);
on_stack_nodes_.erase(on_stack_nodes_.begin(), it);
}
// static
+void GlobalHandles::EnableMarkingBarrier(Isolate* isolate) {
+ auto* global_handles = isolate->global_handles();
+ DCHECK(!global_handles->is_marking_);
+ global_handles->is_marking_ = true;
+}
+
+// static
+void GlobalHandles::DisableMarkingBarrier(Isolate* isolate) {
+ auto* global_handles = isolate->global_handles();
+ DCHECK(global_handles->is_marking_);
+ global_handles->is_marking_ = false;
+}
+
+// static
void GlobalHandles::TracedNode::Verify(GlobalHandles* global_handles,
const Address* const* slot) {
#ifdef DEBUG
@@ -1160,14 +1174,45 @@ void GlobalHandles::Destroy(Address* location) {
}
}
+// static
void GlobalHandles::DestroyTraced(Address* location) {
if (location != nullptr) {
TracedNode* node = TracedNode::FromLocation(location);
if (node->is_on_stack()) {
node->Release(nullptr);
- } else {
+ return;
+ }
+ DCHECK(!node->is_on_stack());
+
+ auto* global_handles = GlobalHandles::From(node);
+ // When marking is off the handle may be freed immediately. Note that this
+ // includes also the case when invoking the first pass callbacks during the
+ // atomic pause which requires releasing a node fully.
+ if (!global_handles->is_marking_) {
NodeSpace<TracedNode>::Release(node);
+ return;
}
+
+ // Incremental marking is on. This also covers the scavenge case which
+ // prohibits eagerly reclaiming nodes when marking is on during a scavenge.
+ //
+ // On-heap traced nodes are released in the atomic pause in
+ // `IterateWeakRootsForPhantomHandles()` when they are discovered as not
+ // marked.
+ //
+ // Eagerly clear out the object here to avoid needlessly marking it from
+ // this point on. Also clear out callback and backreference for the version
+ // with callbacks to avoid calling into possibly dead memory later.
+ //
+ // In the case this happens during incremental marking, the node may
+ // still be spuriously marked as live and is then only reclaimed on the
+ // next cycle.
+ node->clear_object();
+ node->set_parameter(nullptr);
+ node->SetFinalizationCallback(nullptr, nullptr);
+ // The destructor setting is left untouched to avoid casting a
+ // v8::TracedGlobal to a v8::TracedReference for the EmbedderRootsHandler
+ // which would be UB.
}
}
@@ -1291,8 +1336,10 @@ void GlobalHandles::IdentifyWeakUnmodifiedObjects(
if (is_unmodified(node->location())) {
v8::Value* value = ToApi<v8::Value>(node->handle());
if (node->has_destructor()) {
+ START_ALLOW_USE_DEPRECATED()
node->set_root(handler->IsRoot(
*reinterpret_cast<v8::TracedGlobal<v8::Value>*>(&value)));
+ END_ALLOW_USE_DEPRECATED()
} else {
node->set_root(handler->IsRoot(
*reinterpret_cast<v8::TracedReference<v8::Value>*>(&value)));
@@ -1385,7 +1432,9 @@ void GlobalHandles::IterateYoungWeakObjectsForPhantomHandles(
v8::Value* value = ToApi<v8::Value>(node->handle());
handler->ResetRoot(
*reinterpret_cast<v8::TracedReference<v8::Value>*>(&value));
- DCHECK(!node->IsInUse());
+ // We cannot check whether a node is in use here as the reset behavior
+ // depends on whether incremental marking is running when reclaiming
+ // young objects.
}
++number_of_phantom_handle_resets_;
@@ -1676,8 +1725,10 @@ void GlobalHandles::IterateTracedNodes(
if (node->IsInUse()) {
v8::Value* value = ToApi<v8::Value>(node->handle());
if (node->has_destructor()) {
+ START_ALLOW_USE_DEPRECATED()
visitor->VisitTracedGlobalHandle(
*reinterpret_cast<v8::TracedGlobal<v8::Value>*>(&value));
+ END_ALLOW_USE_DEPRECATED()
} else {
visitor->VisitTracedReference(
*reinterpret_cast<v8::TracedReference<v8::Value>*>(&value));
diff --git a/deps/v8/src/handles/global-handles.h b/deps/v8/src/handles/global-handles.h
index 86b276c2df..058af91069 100644
--- a/deps/v8/src/handles/global-handles.h
+++ b/deps/v8/src/handles/global-handles.h
@@ -45,6 +45,9 @@ enum WeaknessType {
// callbacks and finalizers attached to them.
class V8_EXPORT_PRIVATE GlobalHandles final {
public:
+ static void EnableMarkingBarrier(Isolate*);
+ static void DisableMarkingBarrier(Isolate*);
+
GlobalHandles(const GlobalHandles&) = delete;
GlobalHandles& operator=(const GlobalHandles&) = delete;
@@ -236,6 +239,7 @@ class V8_EXPORT_PRIVATE GlobalHandles final {
Node* node);
Isolate* const isolate_;
+ bool is_marking_ = false;
std::unique_ptr<NodeSpace<Node>> regular_nodes_;
// Contains all nodes holding young objects. Note: when the list
diff --git a/deps/v8/src/handles/handles-inl.h b/deps/v8/src/handles/handles-inl.h
index c0dab51de8..43c2ef807e 100644
--- a/deps/v8/src/handles/handles-inl.h
+++ b/deps/v8/src/handles/handles-inl.h
@@ -178,6 +178,7 @@ Address* HandleScope::CreateHandle(Isolate* isolate, Address value) {
Address* HandleScope::GetHandle(Isolate* isolate, Address value) {
DCHECK(AllowHandleAllocation::IsAllowed());
+ DCHECK(isolate->main_thread_local_heap()->IsRunning());
DCHECK_WITH_MSG(isolate->thread_id() == ThreadId::Current(),
"main-thread handle can only be created on the main thread.");
HandleScopeData* data = isolate->handle_scope_data();
diff --git a/deps/v8/src/handles/handles.cc b/deps/v8/src/handles/handles.cc
index 68d50c7ab3..8fdf858c50 100644
--- a/deps/v8/src/handles/handles.cc
+++ b/deps/v8/src/handles/handles.cc
@@ -10,6 +10,7 @@
#include "src/execution/isolate.h"
#include "src/execution/thread-id.h"
#include "src/handles/maybe-handles.h"
+#include "src/maglev/maglev-concurrent-dispatcher.h"
#include "src/objects/objects-inl.h"
#include "src/roots/roots-inl.h"
#include "src/utils/address-map.h"
@@ -149,11 +150,9 @@ Address HandleScope::current_limit_address(Isolate* isolate) {
return reinterpret_cast<Address>(&isolate->handle_scope_data()->limit);
}
-CanonicalHandleScope::CanonicalHandleScope(Isolate* isolate,
- OptimizedCompilationInfo* info)
- : isolate_(isolate),
- info_(info),
- zone_(info ? info->zone() : new Zone(isolate->allocator(), ZONE_NAME)) {
+CanonicalHandleScope::CanonicalHandleScope(Isolate* isolate, Zone* zone)
+ : zone_(zone == nullptr ? new Zone(isolate->allocator(), ZONE_NAME) : zone),
+ isolate_(isolate) {
HandleScopeData* handle_scope_data = isolate_->handle_scope_data();
prev_canonical_scope_ = handle_scope_data->canonical_scope;
handle_scope_data->canonical_scope = this;
@@ -165,18 +164,12 @@ CanonicalHandleScope::CanonicalHandleScope(Isolate* isolate,
CanonicalHandleScope::~CanonicalHandleScope() {
delete root_index_map_;
- if (info_) {
- // If we passed a compilation info as parameter, we created the identity map
- // on its zone(). Then, we pass it to the compilation info which is
- // responsible for the disposal.
- info_->set_canonical_handles(DetachCanonicalHandles());
- } else {
- // If we don't have a compilation info, we created the zone manually. To
- // properly dispose of said zone, we need to first free the identity_map_.
- // Then we do so manually even though identity_map_ is a unique_ptr.
- identity_map_.reset();
- delete zone_;
- }
+ // Note: both the identity_map_ (zone-allocated) and the zone_ itself may
+ // have custom ownership semantics, controlled by subclasses. For example, in
+ // case of external ownership, the subclass destructor may 'steal' both by
+ // resetting the identity map pointer and nulling the zone.
+ identity_map_.reset();
+ delete zone_;
isolate_->handle_scope_data()->canonical_scope = prev_canonical_scope_;
}
@@ -206,5 +199,26 @@ CanonicalHandleScope::DetachCanonicalHandles() {
return std::move(identity_map_);
}
+template <class CompilationInfoT>
+CanonicalHandleScopeForOptimization<CompilationInfoT>::
+ CanonicalHandleScopeForOptimization(Isolate* isolate,
+ CompilationInfoT* info)
+ : CanonicalHandleScope(isolate, info->zone()), info_(info) {}
+
+template <class CompilationInfoT>
+CanonicalHandleScopeForOptimization<
+ CompilationInfoT>::~CanonicalHandleScopeForOptimization() {
+ // We created the identity map on the compilation info's zone(). Pass
+ // ownership to the compilation info which is responsible for the disposal.
+ info_->set_canonical_handles(DetachCanonicalHandles());
+ zone_ = nullptr; // We don't own the zone, null it.
+}
+
+template class CanonicalHandleScopeForOptimization<OptimizedCompilationInfo>;
+#ifdef V8_ENABLE_MAGLEV
+template class CanonicalHandleScopeForOptimization<
+ maglev::ExportedMaglevCompilationInfo>;
+#endif // V8_ENABLE_MAGLEV
+
} // namespace internal
} // namespace v8
diff --git a/deps/v8/src/handles/handles.h b/deps/v8/src/handles/handles.h
index 3bde90f81f..8d4399477e 100644
--- a/deps/v8/src/handles/handles.h
+++ b/deps/v8/src/handles/handles.h
@@ -182,7 +182,7 @@ class Handle final : public HandleBase {
};
template <typename T>
-inline std::ostream& operator<<(std::ostream& os, Handle<T> handle);
+std::ostream& operator<<(std::ostream& os, Handle<T> handle);
// ----------------------------------------------------------------------------
// A stack-allocated class that governs a number of local handles.
@@ -278,6 +278,10 @@ class IdentityMap;
class RootIndexMap;
class OptimizedCompilationInfo;
+namespace maglev {
+class ExportedMaglevCompilationInfo;
+} // namespace maglev
+
using CanonicalHandlesMap = IdentityMap<Address*, ZoneAllocationPolicy>;
// A CanonicalHandleScope does not open a new HandleScope. It changes the
@@ -285,27 +289,23 @@ using CanonicalHandlesMap = IdentityMap<Address*, ZoneAllocationPolicy>;
// This does not apply to nested inner HandleScopes unless a nested
// CanonicalHandleScope is introduced. Handles are only canonicalized within
// the same CanonicalHandleScope, but not across nested ones.
-class V8_EXPORT_PRIVATE V8_NODISCARD CanonicalHandleScope final {
+class V8_EXPORT_PRIVATE V8_NODISCARD CanonicalHandleScope {
public:
- // If we passed a compilation info as parameter, we created the
- // CanonicalHandlesMap on said compilation info's zone(). If so, in the
- // CanonicalHandleScope destructor we hand off the canonical handle map to the
- // compilation info. The compilation info is responsible for the disposal. If
- // we don't have a compilation info, we create a zone in this constructor. To
- // properly dispose of said zone, we need to first free the identity_map_
+ // If no Zone is passed to this constructor, we create (and own) a new zone.
+ // To properly dispose of said zone, we need to first free the identity_map_
// which is done manually even though identity_map_ is a unique_ptr.
- explicit CanonicalHandleScope(Isolate* isolate,
- OptimizedCompilationInfo* info = nullptr);
+ explicit CanonicalHandleScope(Isolate* isolate, Zone* zone = nullptr);
~CanonicalHandleScope();
+ protected:
+ std::unique_ptr<CanonicalHandlesMap> DetachCanonicalHandles();
+
+ Zone* zone_; // *Not* const, may be mutated by subclasses.
+
private:
Address* Lookup(Address object);
- std::unique_ptr<CanonicalHandlesMap> DetachCanonicalHandles();
-
- Isolate* isolate_;
- OptimizedCompilationInfo* info_;
- Zone* zone_;
+ Isolate* const isolate_;
RootIndexMap* root_index_map_;
std::unique_ptr<CanonicalHandlesMap> identity_map_;
// Ordinary nested handle scopes within the current one are not canonical.
@@ -316,6 +316,27 @@ class V8_EXPORT_PRIVATE V8_NODISCARD CanonicalHandleScope final {
friend class HandleScope;
};
+template <class CompilationInfoT>
+class V8_EXPORT_PRIVATE V8_NODISCARD CanonicalHandleScopeForOptimization final
+ : public CanonicalHandleScope {
+ public:
+ // We created the
+ // CanonicalHandlesMap on the compilation info's zone(). In the
+ // CanonicalHandleScope destructor we hand off the canonical handle map to the
+ // compilation info. The compilation info is responsible for the disposal.
+ explicit CanonicalHandleScopeForOptimization(Isolate* isolate,
+ CompilationInfoT* info);
+ ~CanonicalHandleScopeForOptimization();
+
+ private:
+ CompilationInfoT* const info_;
+};
+
+using CanonicalHandleScopeForTurbofan =
+ CanonicalHandleScopeForOptimization<OptimizedCompilationInfo>;
+using CanonicalHandleScopeForMaglev =
+ CanonicalHandleScopeForOptimization<maglev::ExportedMaglevCompilationInfo>;
+
// Seal off the current HandleScope so that new handles can only be created
// if a new HandleScope is entered.
class V8_NODISCARD SealHandleScope final {
diff --git a/deps/v8/src/handles/local-handles-inl.h b/deps/v8/src/handles/local-handles-inl.h
index f9f63175cf..404a922ae0 100644
--- a/deps/v8/src/handles/local-handles-inl.h
+++ b/deps/v8/src/handles/local-handles-inl.h
@@ -16,6 +16,7 @@ namespace internal {
// static
V8_INLINE Address* LocalHandleScope::GetHandle(LocalHeap* local_heap,
Address value) {
+ DCHECK(local_heap->IsRunning());
if (local_heap->is_main_thread())
return LocalHandleScope::GetMainThreadHandle(local_heap, value);
@@ -57,10 +58,16 @@ LocalHandleScope::~LocalHandleScope() {
template <typename T>
Handle<T> LocalHandleScope::CloseAndEscape(Handle<T> handle_value) {
- HandleScopeData* current = &local_heap_->handles()->scope_;
+ HandleScopeData* current;
T value = *handle_value;
// Throw away all handles in the current scope.
- CloseScope(local_heap_, prev_next_, prev_limit_);
+ if (local_heap_->is_main_thread()) {
+ current = local_heap_->heap()->isolate()->handle_scope_data();
+ CloseMainThreadScope(local_heap_, prev_next_, prev_limit_);
+ } else {
+ current = &local_heap_->handles()->scope_;
+ CloseScope(local_heap_, prev_next_, prev_limit_);
+ }
// Allocate one handle in the parent scope.
DCHECK(current->level > current->sealed_level);
Handle<T> result(value, local_heap_);