summaryrefslogtreecommitdiff
path: root/deps/v8/src/objects.cc
diff options
context:
space:
mode:
authorMyles Borins <mylesborins@google.com>2017-08-01 11:36:44 -0500
committerMyles Borins <mylesborins@google.com>2017-08-01 15:23:15 -0500
commit0a66b223e149a841669bfad5598e4254589730cb (patch)
tree5ec050f7f78aafbf5b1e0e50d639fb843141e162 /deps/v8/src/objects.cc
parent1782b3836ba58ef0da6b687f2bb970c0bd8199ad (diff)
downloadnode-new-0a66b223e149a841669bfad5598e4254589730cb.tar.gz
deps: update V8 to 6.0.286.52
PR-URL: https://github.com/nodejs/node/pull/14004 Reviewed-By: Anna Henningsen <anna@addaleax.net> Reviewed-By: Anna Henningsen <anna@addaleax.net> Reviewed-By: Franziska Hinkelmann <franziska.hinkelmann@gmail.com> Reviewed-By: James M Snell <jasnell@gmail.com> Reviewed-By: Matteo Collina <matteo.collina@gmail.com> Reviewed-By: Colin Ihrig <cjihrig@gmail.com>
Diffstat (limited to 'deps/v8/src/objects.cc')
-rw-r--r--deps/v8/src/objects.cc997
1 files changed, 590 insertions, 407 deletions
diff --git a/deps/v8/src/objects.cc b/deps/v8/src/objects.cc
index fe7f0ccdfa..e2748a5216 100644
--- a/deps/v8/src/objects.cc
+++ b/deps/v8/src/objects.cc
@@ -58,6 +58,7 @@
#include "src/objects/code-cache-inl.h"
#include "src/objects/compilation-cache-inl.h"
#include "src/objects/frame-array-inl.h"
+#include "src/objects/map.h"
#include "src/property-descriptor.h"
#include "src/prototype.h"
#include "src/regexp/jsregexp.h"
@@ -108,10 +109,10 @@ Handle<FieldType> Object::OptimalType(Isolate* isolate,
return FieldType::Any(isolate);
}
-
MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
Handle<Object> object,
- Handle<Context> native_context) {
+ Handle<Context> native_context,
+ const char* method_name) {
if (object->IsJSReceiver()) return Handle<JSReceiver>::cast(object);
Handle<JSFunction> constructor;
if (object->IsSmi()) {
@@ -120,6 +121,14 @@ MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
int constructor_function_index =
Handle<HeapObject>::cast(object)->map()->GetConstructorFunctionIndex();
if (constructor_function_index == Map::kNoConstructorFunctionIndex) {
+ if (method_name != nullptr) {
+ THROW_NEW_ERROR(
+ isolate,
+ NewTypeError(
+ MessageTemplate::kCalledOnNullOrUndefined,
+ isolate->factory()->NewStringFromAsciiChecked(method_name)),
+ JSReceiver);
+ }
THROW_NEW_ERROR(isolate,
NewTypeError(MessageTemplate::kUndefinedOrNullToObject),
JSReceiver);
@@ -2054,11 +2063,10 @@ MUST_USE_RESULT Maybe<bool> FastAssign(
if (use_set) {
LookupIterator it(target, next_key, target);
- bool call_to_js = it.IsFound() && it.state() != LookupIterator::DATA;
Maybe<bool> result = Object::SetProperty(
&it, prop_value, STRICT, Object::CERTAINLY_NOT_STORE_FROM_KEYED);
if (result.IsNothing()) return result;
- if (stable && call_to_js) stable = from->map() == *map;
+ if (stable) stable = from->map() == *map;
} else {
if (excluded_properties != nullptr &&
HasExcludedProperty(excluded_properties, next_key)) {
@@ -2468,6 +2476,7 @@ Handle<String> String::SlowFlatten(Handle<ConsString> cons,
bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
+ DisallowHeapAllocation no_allocation;
// Externalizing twice leaks the external resource, so it's
// prohibited by the API.
DCHECK(!this->IsExternalString());
@@ -2490,7 +2499,9 @@ bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
bool is_one_byte = this->IsOneByteRepresentation();
bool is_internalized = this->IsInternalizedString();
bool has_pointers = StringShape(this).IsIndirect();
-
+ if (has_pointers) {
+ heap->NotifyObjectLayoutChange(this, no_allocation);
+ }
// Morph the string to an external string by replacing the map and
// reinitializing the fields. This won't work if the space the existing
// string occupies is too small for a regular external string.
@@ -2536,6 +2547,7 @@ bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
bool String::MakeExternal(v8::String::ExternalOneByteStringResource* resource) {
+ DisallowHeapAllocation no_allocation;
// Externalizing twice leaks the external resource, so it's
// prohibited by the API.
DCHECK(!this->IsExternalString());
@@ -2563,6 +2575,10 @@ bool String::MakeExternal(v8::String::ExternalOneByteStringResource* resource) {
bool is_internalized = this->IsInternalizedString();
bool has_pointers = StringShape(this).IsIndirect();
+ if (has_pointers) {
+ heap->NotifyObjectLayoutChange(this, no_allocation);
+ }
+
// Morph the string to an external string by replacing the map and
// reinitializing the fields. This won't work if the space the existing
// string occupies is too small for a regular external string.
@@ -3273,6 +3289,16 @@ MaybeHandle<Map> Map::CopyWithField(Handle<Map> map, Handle<Name> name,
if (map->instance_type() == JS_CONTEXT_EXTENSION_OBJECT_TYPE) {
representation = Representation::Tagged();
type = FieldType::Any(isolate);
+ } else if (IsTransitionableFastElementsKind(map->elements_kind()) &&
+ IsInplaceGeneralizableField(constness, representation, *type)) {
+ // We don't support propagation of field generalization through elements
+ // kind transitions because they are inserted into the transition tree
+ // before field transitions. In order to avoid complexity of handling
+ // such a case we ensure that all maps with transitionable elements kinds
+ // do not have fields that can be generalized in-place (without creation
+ // of a new map).
+ DCHECK(representation.IsHeapObject());
+ type = FieldType::Any(isolate);
}
Handle<Object> wrapped_type(WrapFieldType(type));
@@ -3330,8 +3356,8 @@ const char* Representation::Mnemonic() const {
}
bool Map::TransitionRemovesTaggedField(Map* target) {
- int inobject = GetInObjectProperties();
- int target_inobject = target->GetInObjectProperties();
+ int inobject = NumberOfFields();
+ int target_inobject = target->NumberOfFields();
for (int i = target_inobject; i < inobject; i++) {
FieldIndex index = FieldIndex::ForPropertyIndex(this, i);
if (!IsUnboxedDoubleField(index)) return true;
@@ -3340,8 +3366,8 @@ bool Map::TransitionRemovesTaggedField(Map* target) {
}
bool Map::TransitionChangesTaggedFieldToUntaggedField(Map* target) {
- int inobject = GetInObjectProperties();
- int target_inobject = target->GetInObjectProperties();
+ int inobject = NumberOfFields();
+ int target_inobject = target->NumberOfFields();
int limit = Min(inobject, target_inobject);
for (int i = 0; i < limit; i++) {
FieldIndex index = FieldIndex::ForPropertyIndex(target, i);
@@ -3458,20 +3484,23 @@ void MigrateFastToFast(Handle<JSObject> object, Handle<Map> new_map) {
}
PropertyDetails details = new_map->GetLastDescriptorDetails();
+ int target_index = details.field_index() - new_map->GetInObjectProperties();
+ bool have_space = old_map->unused_property_fields() > 0 ||
+ (details.location() == kField && target_index >= 0 &&
+ object->properties()->length() > target_index);
// Either new_map adds an kDescriptor property, or a kField property for
// which there is still space, and which does not require a mutable double
// box (an out-of-object double).
if (details.location() == kDescriptor ||
- (old_map->unused_property_fields() > 0 &&
- ((FLAG_unbox_double_fields && object->properties()->length() == 0) ||
- !details.representation().IsDouble()))) {
+ (have_space && ((FLAG_unbox_double_fields && target_index < 0) ||
+ !details.representation().IsDouble()))) {
object->synchronized_set_map(*new_map);
return;
}
// If there is still space in the object, we need to allocate a mutable
// double box.
- if (old_map->unused_property_fields() > 0) {
+ if (have_space) {
FieldIndex index =
FieldIndex::ForDescriptor(*new_map, new_map->LastAdded());
DCHECK(details.representation().IsDouble());
@@ -3498,7 +3527,6 @@ void MigrateFastToFast(Handle<JSObject> object, Handle<Map> new_map) {
}
DCHECK_EQ(kField, details.location());
DCHECK_EQ(kData, details.kind());
- int target_index = details.field_index() - new_map->GetInObjectProperties();
DCHECK(target_index >= 0); // Must be a backing store index.
new_storage->set(target_index, *value);
@@ -3917,7 +3945,6 @@ Handle<Map> Map::CopyGeneralizeAllFields(Handle<Map> map,
return new_map;
}
-
void Map::DeprecateTransitionTree() {
if (is_deprecated()) return;
Object* transitions = raw_transitions();
@@ -4060,7 +4087,6 @@ Handle<FieldType> Map::GeneralizeFieldType(Representation rep1,
return FieldType::Any(isolate);
}
-
// static
void Map::GeneralizeField(Handle<Map> map, int modify_index,
PropertyConstness new_constness,
@@ -4094,8 +4120,8 @@ void Map::GeneralizeField(Handle<Map> map, int modify_index,
// Determine the field owner.
Handle<Map> field_owner(map->FindFieldOwner(modify_index), isolate);
- Handle<DescriptorArray> descriptors(
- field_owner->instance_descriptors(), isolate);
+ Handle<DescriptorArray> descriptors(field_owner->instance_descriptors(),
+ isolate);
DCHECK_EQ(*old_field_type, descriptors->GetFieldType(modify_index));
new_field_type =
@@ -4823,16 +4849,15 @@ int AccessorInfo::AppendUnique(Handle<Object> descriptors,
valid_descriptors);
}
-
-static bool ContainsMap(MapHandleList* maps, Map* map) {
+static bool ContainsMap(MapHandles const& maps, Map* map) {
DCHECK_NOT_NULL(map);
- for (int i = 0; i < maps->length(); ++i) {
- if (!maps->at(i).is_null() && *maps->at(i) == map) return true;
+ for (Handle<Map> current : maps) {
+ if (!current.is_null() && *current == map) return true;
}
return false;
}
-Map* Map::FindElementsKindTransitionedMap(MapHandleList* candidates) {
+Map* Map::FindElementsKindTransitionedMap(MapHandles const& candidates) {
DisallowHeapAllocation no_allocation;
DisallowDeoptimization no_deoptimization(GetIsolate());
@@ -4843,7 +4868,7 @@ Map* Map::FindElementsKindTransitionedMap(MapHandleList* candidates) {
if (IsTransitionableFastElementsKind(kind)) {
// Check the state of the root map.
Map* root_map = FindRootMap();
- if (!EquivalentToForTransition(root_map)) return nullptr;
+ if (!EquivalentToForElementsKindTransition(root_map)) return nullptr;
root_map = root_map->LookupElementsTransitionMap(kind);
DCHECK_NOT_NULL(root_map);
// Starting from the next existing elements kind transition try to
@@ -5290,15 +5315,16 @@ MaybeHandle<Smi> JSFunction::GetLength(Isolate* isolate,
Handle<JSFunction> function) {
int length = 0;
if (function->shared()->is_compiled()) {
- length = function->shared()->length();
+ length = function->shared()->GetLength();
} else {
// If the function isn't compiled yet, the length is not computed
// correctly yet. Compile it now and return the right length.
if (Compiler::Compile(function, Compiler::KEEP_EXCEPTION)) {
- length = function->shared()->length();
+ length = function->shared()->GetLength();
}
if (isolate->has_pending_exception()) return MaybeHandle<Smi>();
}
+ DCHECK_GE(length, 0);
return handle(Smi::FromInt(length), isolate);
}
@@ -5690,7 +5716,7 @@ void JSObject::MigrateSlowToFast(Handle<JSObject> object,
NotifyMapChange(old_map, new_map, isolate);
-#if TRACE_MAPS
+#if V8_TRACE_MAPS
if (FLAG_trace_maps) {
PrintF("[TraceMaps: SlowToFast from= %p to= %p reason= %s ]\n",
reinterpret_cast<void*>(*old_map), reinterpret_cast<void*>(*new_map),
@@ -5822,14 +5848,13 @@ Handle<SeededNumberDictionary> JSObject::NormalizeElements(
Handle<JSObject> object) {
DCHECK(!object->HasFixedTypedArrayElements());
Isolate* isolate = object->GetIsolate();
- bool is_arguments = object->HasSloppyArgumentsElements();
+ bool is_sloppy_arguments = object->HasSloppyArgumentsElements();
{
DisallowHeapAllocation no_gc;
FixedArrayBase* elements = object->elements();
- if (is_arguments) {
- FixedArray* parameter_map = FixedArray::cast(elements);
- elements = FixedArrayBase::cast(parameter_map->get(1));
+ if (is_sloppy_arguments) {
+ elements = SloppyArgumentsElements::cast(elements)->arguments();
}
if (elements->IsDictionary()) {
@@ -5846,7 +5871,7 @@ Handle<SeededNumberDictionary> JSObject::NormalizeElements(
object->GetElementsAccessor()->Normalize(object);
// Switch to using the dictionary as the backing storage for elements.
- ElementsKind target_kind = is_arguments
+ ElementsKind target_kind = is_sloppy_arguments
? SLOW_SLOPPY_ARGUMENTS_ELEMENTS
: object->HasFastStringWrapperElements()
? SLOW_STRING_WRAPPER_ELEMENTS
@@ -5855,8 +5880,9 @@ Handle<SeededNumberDictionary> JSObject::NormalizeElements(
// Set the new map first to satify the elements type assert in set_elements().
JSObject::MigrateToMap(object, new_map);
- if (is_arguments) {
- FixedArray::cast(object->elements())->set(1, *dictionary);
+ if (is_sloppy_arguments) {
+ SloppyArgumentsElements::cast(object->elements())
+ ->set_arguments(*dictionary);
} else {
object->set_elements(*dictionary);
}
@@ -8646,7 +8672,7 @@ Handle<Map> Map::Normalize(Handle<Map> fast_map, PropertyNormalizationMode mode,
cache->Set(fast_map, new_map);
isolate->counters()->maps_normalized()->Increment();
}
-#if TRACE_MAPS
+#if V8_TRACE_MAPS
if (FLAG_trace_maps) {
PrintF("[TraceMaps: Normalize from= %p to= %p reason= %s ]\n",
reinterpret_cast<void*>(*fast_map),
@@ -8695,9 +8721,8 @@ Handle<Map> Map::TransitionToImmutableProto(Handle<Map> map) {
return new_map;
}
-Handle<Map> Map::CopyInitialMap(Handle<Map> map, int instance_size,
- int in_object_properties,
- int unused_property_fields) {
+namespace {
+void EnsureInitialMap(Handle<Map> map) {
#ifdef DEBUG
Isolate* isolate = map->GetIsolate();
// Strict function maps have Function as a constructor but the
@@ -8715,7 +8740,21 @@ Handle<Map> Map::CopyInitialMap(Handle<Map> map, int instance_size,
DCHECK(map->owns_descriptors());
DCHECK_EQ(map->NumberOfOwnDescriptors(),
map->instance_descriptors()->number_of_descriptors());
+}
+} // namespace
+// static
+Handle<Map> Map::CopyInitialMapNormalized(Handle<Map> map,
+ PropertyNormalizationMode mode) {
+ EnsureInitialMap(map);
+ return CopyNormalized(map, mode);
+}
+
+// static
+Handle<Map> Map::CopyInitialMap(Handle<Map> map, int instance_size,
+ int in_object_properties,
+ int unused_property_fields) {
+ EnsureInitialMap(map);
Handle<Map> result = RawCopy(map, instance_size);
// Please note instance_type and instance_size are set when allocated.
@@ -8792,8 +8831,7 @@ Handle<Map> Map::ShareDescriptor(Handle<Map> map,
return result;
}
-
-#if TRACE_MAPS
+#if V8_TRACE_MAPS
// static
void Map::TraceTransition(const char* what, Map* from, Map* to, Name* name) {
@@ -8818,18 +8856,11 @@ void Map::TraceAllTransitions(Map* map) {
}
}
-#endif // TRACE_MAPS
-
+#endif // V8_TRACE_MAPS
void Map::ConnectTransition(Handle<Map> parent, Handle<Map> child,
Handle<Name> name, SimpleTransitionFlag flag) {
- Isolate* isolate = parent->GetIsolate();
- // Do not track transitions during bootstrap except for element transitions.
- if (isolate->bootstrapper()->IsActive() &&
- !name.is_identical_to(isolate->factory()->elements_transition_symbol())) {
- return;
- }
- if (!parent->GetBackPointer()->IsUndefined(isolate)) {
+ if (!parent->GetBackPointer()->IsUndefined(parent->GetIsolate())) {
parent->set_owns_descriptors(false);
} else {
// |parent| is initial map and it must keep the ownership, there must be no
@@ -8840,12 +8871,12 @@ void Map::ConnectTransition(Handle<Map> parent, Handle<Map> child,
}
if (parent->is_prototype_map()) {
DCHECK(child->is_prototype_map());
-#if TRACE_MAPS
+#if V8_TRACE_MAPS
Map::TraceTransition("NoTransition", *parent, *child, *name);
#endif
} else {
TransitionArray::Insert(parent, name, child, flag);
-#if TRACE_MAPS
+#if V8_TRACE_MAPS
Map::TraceTransition("Transition", *parent, *child, *name);
#endif
}
@@ -8877,7 +8908,7 @@ Handle<Map> Map::CopyReplaceDescriptors(
} else {
result->InitializeDescriptors(*descriptors, *layout_descriptor);
}
-#if TRACE_MAPS
+#if V8_TRACE_MAPS
if (FLAG_trace_maps &&
// Mirror conditions above that did not call ConnectTransition().
(map->is_prototype_map() ||
@@ -9077,7 +9108,7 @@ Handle<Map> Map::CopyForTransition(Handle<Map> map, const char* reason) {
new_map->InitializeDescriptors(*new_descriptors, *new_layout_descriptor);
}
-#if TRACE_MAPS
+#if V8_TRACE_MAPS
if (FLAG_trace_maps) {
PrintF("[TraceMaps: CopyForTransition from= %p to= %p reason= %s ]\n",
reinterpret_cast<void*>(*map), reinterpret_cast<void*>(*new_map),
@@ -9260,7 +9291,7 @@ Handle<Map> Map::TransitionToDataProperty(Handle<Map> map, Handle<Name> name,
if (!maybe_map.ToHandle(&result)) {
Isolate* isolate = name->GetIsolate();
const char* reason = "TooManyFastProperties";
-#if TRACE_MAPS
+#if V8_TRACE_MAPS
std::unique_ptr<ScopedVector<char>> buffer;
if (FLAG_trace_maps) {
ScopedVector<char> name_buffer(100);
@@ -10156,11 +10187,13 @@ Handle<DescriptorArray> DescriptorArray::Allocate(Isolate* isolate,
factory->NewFixedArray(LengthFor(size), pretenure);
result->set(kDescriptorLengthIndex, Smi::FromInt(number_of_descriptors));
- result->set(kEnumCacheIndex, Smi::kZero);
+ result->set(kEnumCacheBridgeIndex, Smi::kZero);
return Handle<DescriptorArray>::cast(result);
}
-void DescriptorArray::ClearEnumCache() { set(kEnumCacheIndex, Smi::kZero); }
+void DescriptorArray::ClearEnumCache() {
+ set(kEnumCacheBridgeIndex, Smi::kZero);
+}
void DescriptorArray::Replace(int index, Descriptor* descriptor) {
descriptor->SetSortedKeyIndex(GetSortedKeyIndex(index));
@@ -10180,14 +10213,14 @@ void DescriptorArray::SetEnumCache(Handle<DescriptorArray> descriptors,
bridge_storage = *isolate->factory()->NewFixedArray(
DescriptorArray::kEnumCacheBridgeLength);
} else {
- bridge_storage = FixedArray::cast(descriptors->get(kEnumCacheIndex));
+ bridge_storage = FixedArray::cast(descriptors->get(kEnumCacheBridgeIndex));
}
bridge_storage->set(kEnumCacheBridgeCacheIndex, *new_cache);
bridge_storage->set(
kEnumCacheBridgeIndicesCacheIndex,
new_index_cache.is_null() ? Object::cast(Smi::kZero) : *new_index_cache);
if (needs_new_enum_cache) {
- descriptors->set(kEnumCacheIndex, bridge_storage);
+ descriptors->set(kEnumCacheBridgeIndex, bridge_storage);
}
}
@@ -10667,20 +10700,17 @@ char* Relocatable::RestoreState(Isolate* isolate, char* from) {
return from + ArchiveSpacePerThread();
}
-
-char* Relocatable::Iterate(ObjectVisitor* v, char* thread_storage) {
+char* Relocatable::Iterate(RootVisitor* v, char* thread_storage) {
Relocatable* top = *reinterpret_cast<Relocatable**>(thread_storage);
Iterate(v, top);
return thread_storage + ArchiveSpacePerThread();
}
-
-void Relocatable::Iterate(Isolate* isolate, ObjectVisitor* v) {
+void Relocatable::Iterate(Isolate* isolate, RootVisitor* v) {
Iterate(v, isolate->relocatable_top());
}
-
-void Relocatable::Iterate(ObjectVisitor* v, Relocatable* top) {
+void Relocatable::Iterate(RootVisitor* v, Relocatable* top) {
Relocatable* current = top;
while (current != NULL) {
current->IterateInstance(v);
@@ -11446,8 +11476,10 @@ int String::IndexOf(Isolate* isolate, Handle<String> receiver,
}
MaybeHandle<String> String::GetSubstitution(Isolate* isolate, Match* match,
- Handle<String> replacement) {
+ Handle<String> replacement,
+ int start_index) {
DCHECK_IMPLIES(match->HasNamedCaptures(), FLAG_harmony_regexp_named_captures);
+ DCHECK_GE(start_index, 0);
Factory* factory = isolate->factory();
@@ -11458,7 +11490,8 @@ MaybeHandle<String> String::GetSubstitution(Isolate* isolate, Match* match,
Handle<String> dollar_string =
factory->LookupSingleCharacterStringFromCode('$');
- int next_dollar_ix = String::IndexOf(isolate, replacement, dollar_string, 0);
+ int next_dollar_ix =
+ String::IndexOf(isolate, replacement, dollar_string, start_index);
if (next_dollar_ix < 0) {
return replacement;
}
@@ -12023,7 +12056,7 @@ bool Map::EquivalentToForTransition(Map* other) {
if (!CheckEquivalent(this, other)) return false;
if (instance_type() == JS_FUNCTION_TYPE) {
// JSFunctions require more checks to ensure that sloppy function is
- // not equvalent to strict function.
+ // not equivalent to strict function.
int nof = Min(NumberOfOwnDescriptors(), other->NumberOfOwnDescriptors());
return instance_descriptors()->IsEqualUpTo(other->instance_descriptors(),
nof);
@@ -12031,6 +12064,25 @@ bool Map::EquivalentToForTransition(Map* other) {
return true;
}
+bool Map::EquivalentToForElementsKindTransition(Map* other) {
+ if (!EquivalentToForTransition(other)) return false;
+#ifdef DEBUG
+ // Ensure that we don't try to generate elements kind transitions from maps
+ // with fields that may be generalized in-place. This must already be handled
+ // during addition of a new field.
+ DescriptorArray* descriptors = instance_descriptors();
+ int nof = NumberOfOwnDescriptors();
+ for (int i = 0; i < nof; i++) {
+ PropertyDetails details = descriptors->GetDetails(i);
+ if (details.location() == kField) {
+ DCHECK(!IsInplaceGeneralizableField(details.constness(),
+ details.representation(),
+ descriptors->GetFieldType(i)));
+ }
+ }
+#endif
+ return true;
+}
bool Map::EquivalentToForNormalization(Map* other,
PropertyNormalizationMode mode) {
@@ -12103,125 +12155,6 @@ void JSFunction::AttemptConcurrentOptimization() {
}
// static
-void SharedFunctionInfo::AddToOptimizedCodeMap(
- Handle<SharedFunctionInfo> shared, Handle<Context> native_context,
- Handle<Code> code, BailoutId osr_ast_id) {
- Isolate* isolate = shared->GetIsolate();
- if (isolate->serializer_enabled()) return;
- DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
- DCHECK(native_context->IsNativeContext());
- STATIC_ASSERT(kEntryLength == 2);
- Handle<FixedArray> new_code_map;
- int entry;
-
- if (!osr_ast_id.IsNone()) {
- Context::AddToOptimizedCodeMap(native_context, shared, code, osr_ast_id);
- return;
- }
-
- DCHECK(osr_ast_id.IsNone());
- if (shared->OptimizedCodeMapIsCleared()) {
- new_code_map = isolate->factory()->NewFixedArray(kInitialLength, TENURED);
- entry = kEntriesStart;
- } else {
- Handle<FixedArray> old_code_map(shared->optimized_code_map(), isolate);
- entry = shared->SearchOptimizedCodeMapEntry(*native_context);
- if (entry >= kEntriesStart) {
- // Just set the code of the entry.
- Handle<WeakCell> code_cell = isolate->factory()->NewWeakCell(code);
- old_code_map->set(entry + kCachedCodeOffset, *code_cell);
- return;
- }
-
- // Can we reuse an entry?
- DCHECK(entry < kEntriesStart);
- int length = old_code_map->length();
- for (int i = kEntriesStart; i < length; i += kEntryLength) {
- if (WeakCell::cast(old_code_map->get(i + kContextOffset))->cleared()) {
- new_code_map = old_code_map;
- entry = i;
- break;
- }
- }
-
- if (entry < kEntriesStart) {
- // Copy old optimized code map and append one new entry.
- new_code_map = isolate->factory()->CopyFixedArrayAndGrow(
- old_code_map, kEntryLength, TENURED);
- // TODO(mstarzinger): Temporary workaround. The allocation above might
- // have flushed the optimized code map and the copy we created is full of
- // holes. For now we just give up on adding the entry and pretend it got
- // flushed.
- if (shared->OptimizedCodeMapIsCleared()) return;
- entry = old_code_map->length();
- }
- }
-
- Handle<WeakCell> code_cell = isolate->factory()->NewWeakCell(code);
- WeakCell* context_cell = native_context->self_weak_cell();
-
- new_code_map->set(entry + kContextOffset, context_cell);
- new_code_map->set(entry + kCachedCodeOffset, *code_cell);
-
-#ifdef DEBUG
- for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) {
- WeakCell* cell = WeakCell::cast(new_code_map->get(i + kContextOffset));
- DCHECK(cell->cleared() || cell->value()->IsNativeContext());
- cell = WeakCell::cast(new_code_map->get(i + kCachedCodeOffset));
- DCHECK(cell->cleared() ||
- (cell->value()->IsCode() &&
- Code::cast(cell->value())->kind() == Code::OPTIMIZED_FUNCTION));
- }
-#endif
-
- FixedArray* old_code_map = shared->optimized_code_map();
- if (old_code_map != *new_code_map) {
- shared->set_optimized_code_map(*new_code_map);
- }
-}
-
-
-void SharedFunctionInfo::ClearOptimizedCodeMap() {
- FixedArray* empty_fixed_array = GetHeap()->empty_fixed_array();
- set_optimized_code_map(empty_fixed_array, SKIP_WRITE_BARRIER);
-}
-
-
-void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code,
- const char* reason) {
- DisallowHeapAllocation no_gc;
- Isolate* isolate = GetIsolate();
- bool found = false;
-
- if (!OptimizedCodeMapIsCleared()) {
- Heap* heap = isolate->heap();
- FixedArray* code_map = optimized_code_map();
- int length = code_map->length();
- for (int src = kEntriesStart; src < length; src += kEntryLength) {
- DCHECK(WeakCell::cast(code_map->get(src))->cleared() ||
- WeakCell::cast(code_map->get(src))->value()->IsNativeContext());
- found = WeakCell::cast(code_map->get(src + kCachedCodeOffset))->value() ==
- optimized_code;
- if (found) {
- if (FLAG_trace_opt) {
- PrintF("[evicting entry from optimizing code map (%s) for ", reason);
- ShortPrint();
- PrintF("]\n");
- }
- // Just clear the code.
- code_map->set(src + kCachedCodeOffset, heap->empty_weak_cell(),
- SKIP_WRITE_BARRIER);
- }
- }
- }
-
- if (!found) {
- // We didn't find the code in here. It must be osr'd code.
- isolate->EvictOSROptimizedCode(optimized_code, reason);
- }
-}
-
-// static
void JSFunction::EnsureLiterals(Handle<JSFunction> function) {
Handle<SharedFunctionInfo> shared(function->shared());
Isolate* isolate = shared->GetIsolate();
@@ -12626,13 +12559,10 @@ Handle<Object> CacheInitialJSArrayMaps(
return initial_map;
}
+namespace {
-void JSFunction::SetInstancePrototype(Handle<JSFunction> function,
- Handle<Object> value) {
- Isolate* isolate = function->GetIsolate();
-
- DCHECK(value->IsJSReceiver());
-
+void SetInstancePrototype(Isolate* isolate, Handle<JSFunction> function,
+ Handle<JSReceiver> value) {
// Now some logic for the maps of the objects that are created by using this
// function as a constructor.
if (function->has_initial_map()) {
@@ -12683,12 +12613,14 @@ void JSFunction::SetInstancePrototype(Handle<JSFunction> function,
isolate->heap()->ClearInstanceofCache();
}
+} // anonymous namespace
void JSFunction::SetPrototype(Handle<JSFunction> function,
Handle<Object> value) {
DCHECK(function->IsConstructor() ||
IsGeneratorFunction(function->shared()->kind()));
- Handle<Object> construct_prototype = value;
+ Isolate* isolate = function->GetIsolate();
+ Handle<JSReceiver> construct_prototype;
// If the value is not a JSReceiver, store the value in the map's
// constructor field so it can be accessed. Also, set the prototype
@@ -12703,23 +12635,23 @@ void JSFunction::SetPrototype(Handle<JSFunction> function,
JSObject::MigrateToMap(function, new_map);
new_map->SetConstructor(*value);
new_map->set_non_instance_prototype(true);
- Isolate* isolate = new_map->GetIsolate();
FunctionKind kind = function->shared()->kind();
Handle<Context> native_context(function->context()->native_context());
- construct_prototype =
- handle(IsGeneratorFunction(kind)
- ? IsAsyncFunction(kind)
- ? native_context->initial_async_generator_prototype()
- : native_context->initial_generator_prototype()
- : native_context->initial_object_prototype(),
- isolate);
+ construct_prototype = Handle<JSReceiver>(
+ IsGeneratorFunction(kind)
+ ? IsAsyncFunction(kind)
+ ? native_context->initial_async_generator_prototype()
+ : native_context->initial_generator_prototype()
+ : native_context->initial_object_prototype(),
+ isolate);
} else {
+ construct_prototype = Handle<JSReceiver>::cast(value);
function->map()->set_non_instance_prototype(false);
}
- return SetInstancePrototype(function, construct_prototype);
+ SetInstancePrototype(isolate, function, construct_prototype);
}
@@ -12753,7 +12685,7 @@ void JSFunction::SetInitialMap(Handle<JSFunction> function, Handle<Map> map,
}
function->set_prototype_or_initial_map(*map);
map->SetConstructor(*function);
-#if TRACE_MAPS
+#if V8_TRACE_MAPS
if (FLAG_trace_maps) {
PrintF("[TraceMaps: InitialMap map= %p SFI= %d_%s ]\n",
reinterpret_cast<void*>(*map), function->shared()->unique_id(),
@@ -13399,11 +13331,6 @@ bool Script::HasPreparsedScopeData() const {
return preparsed_scope_data()->length() > 0;
}
-Handle<FixedUint32Array> Script::GetPreparsedScopeData() const {
- return Handle<FixedUint32Array>::cast(
- Handle<FixedTypedArrayBase>(preparsed_scope_data()));
-}
-
SharedFunctionInfo::ScriptIterator::ScriptIterator(Handle<Script> script)
: ScriptIterator(script->GetIsolate(),
handle(script->shared_function_infos())) {}
@@ -13745,7 +13672,6 @@ void SharedFunctionInfo::InitFromFunctionLiteral(
Handle<SharedFunctionInfo> shared_info, FunctionLiteral* lit) {
// When adding fields here, make sure DeclarationScope::AnalyzePartially is
// updated accordingly.
- shared_info->set_length(lit->function_length());
shared_info->set_internal_formal_parameter_count(lit->parameter_count());
shared_info->set_function_token_position(lit->function_token_position());
shared_info->set_start_position(lit->start_position());
@@ -13771,8 +13697,14 @@ void SharedFunctionInfo::InitFromFunctionLiteral(
// SetSharedFunctionFlagsFromLiteral (compiler.cc), when the function is
// really parsed and compiled.
if (lit->body() != nullptr) {
+ shared_info->set_length(lit->function_length());
shared_info->set_has_duplicate_parameters(lit->has_duplicate_parameters());
shared_info->SetExpectedNofPropertiesFromEstimate(lit);
+ } else {
+ // Set an invalid length for lazy functions. This way we can set the correct
+ // value after compiling, but avoid overwriting values set manually by the
+ // bootstrapper.
+ shared_info->set_length(SharedFunctionInfo::kInvalidLength);
}
}
@@ -13835,134 +13767,58 @@ void SharedFunctionInfo::ResetForNewContext(int new_ic_age) {
}
}
-int SharedFunctionInfo::SearchOptimizedCodeMapEntry(Context* native_context) {
- DisallowHeapAllocation no_gc;
- DCHECK(native_context->IsNativeContext());
- if (!OptimizedCodeMapIsCleared()) {
- FixedArray* optimized_code_map = this->optimized_code_map();
- int length = optimized_code_map->length();
- for (int i = kEntriesStart; i < length; i += kEntryLength) {
- if (WeakCell::cast(optimized_code_map->get(i + kContextOffset))
- ->value() == native_context) {
- return i;
- }
- }
- }
- return -1;
-}
-
-void SharedFunctionInfo::ClearCodeFromOptimizedCodeMap() {
- if (!OptimizedCodeMapIsCleared()) {
- FixedArray* optimized_code_map = this->optimized_code_map();
- int length = optimized_code_map->length();
- WeakCell* empty_weak_cell = GetHeap()->empty_weak_cell();
- for (int i = kEntriesStart; i < length; i += kEntryLength) {
- optimized_code_map->set(i + kCachedCodeOffset, empty_weak_cell,
- SKIP_WRITE_BARRIER);
- }
- }
-}
-
-Code* SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context,
- BailoutId osr_ast_id) {
- Code* result = nullptr;
- if (!osr_ast_id.IsNone()) {
- return native_context->SearchOptimizedCodeMap(this, osr_ast_id);
- }
-
- DCHECK(osr_ast_id.IsNone());
- int entry = SearchOptimizedCodeMapEntry(native_context);
- if (entry != kNotFound) {
- FixedArray* code_map = optimized_code_map();
- DCHECK_LE(entry + kEntryLength, code_map->length());
- WeakCell* cell = WeakCell::cast(code_map->get(entry + kCachedCodeOffset));
- result = cell->cleared() ? nullptr : Code::cast(cell->value());
- }
- return result;
-}
-
-
-#define DECLARE_TAG(ignore1, name, ignore2) name,
-const char* const VisitorSynchronization::kTags[
- VisitorSynchronization::kNumberOfSyncTags] = {
- VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG)
-};
-#undef DECLARE_TAG
-
-
-#define DECLARE_TAG(ignore1, ignore2, name) name,
-const char* const VisitorSynchronization::kTagNames[
- VisitorSynchronization::kNumberOfSyncTags] = {
- VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG)
-};
-#undef DECLARE_TAG
-
-
-void ObjectVisitor::VisitCodeTarget(RelocInfo* rinfo) {
+void ObjectVisitor::VisitCodeTarget(Code* host, RelocInfo* rinfo) {
DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
Object* old_pointer = Code::GetCodeFromTargetAddress(rinfo->target_address());
Object* new_pointer = old_pointer;
- VisitPointer(&new_pointer);
+ VisitPointer(host, &new_pointer);
DCHECK_EQ(old_pointer, new_pointer);
}
-
-void ObjectVisitor::VisitCodeAgeSequence(RelocInfo* rinfo) {
+void ObjectVisitor::VisitCodeAgeSequence(Code* host, RelocInfo* rinfo) {
DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
Object* old_pointer = rinfo->code_age_stub();
Object* new_pointer = old_pointer;
if (old_pointer != nullptr) {
- VisitPointer(&new_pointer);
+ VisitPointer(host, &new_pointer);
DCHECK_EQ(old_pointer, new_pointer);
}
}
-
-void ObjectVisitor::VisitCodeEntry(Address entry_address) {
+void ObjectVisitor::VisitCodeEntry(JSFunction* host, Address entry_address) {
Object* old_pointer = Code::GetObjectFromEntryAddress(entry_address);
Object* new_pointer = old_pointer;
- VisitPointer(&new_pointer);
+ VisitPointer(host, &new_pointer);
DCHECK_EQ(old_pointer, new_pointer);
}
-
-void ObjectVisitor::VisitCell(RelocInfo* rinfo) {
+void ObjectVisitor::VisitCellPointer(Code* host, RelocInfo* rinfo) {
DCHECK(rinfo->rmode() == RelocInfo::CELL);
Object* old_pointer = rinfo->target_cell();
Object* new_pointer = old_pointer;
- VisitPointer(&new_pointer);
+ VisitPointer(host, &new_pointer);
DCHECK_EQ(old_pointer, new_pointer);
}
-
-void ObjectVisitor::VisitDebugTarget(RelocInfo* rinfo) {
+void ObjectVisitor::VisitDebugTarget(Code* host, RelocInfo* rinfo) {
DCHECK(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
rinfo->IsPatchedDebugBreakSlotSequence());
Object* old_pointer =
Code::GetCodeFromTargetAddress(rinfo->debug_call_address());
Object* new_pointer = old_pointer;
- VisitPointer(&new_pointer);
+ VisitPointer(host, &new_pointer);
DCHECK_EQ(old_pointer, new_pointer);
}
-
-void ObjectVisitor::VisitEmbeddedPointer(RelocInfo* rinfo) {
+void ObjectVisitor::VisitEmbeddedPointer(Code* host, RelocInfo* rinfo) {
DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
Object* old_pointer = rinfo->target_object();
Object* new_pointer = old_pointer;
- VisitPointer(&new_pointer);
+ VisitPointer(host, &new_pointer);
DCHECK_EQ(old_pointer, new_pointer);
}
-void ObjectVisitor::VisitExternalReference(RelocInfo* rinfo) {
- Address old_reference = rinfo->target_external_reference();
- Address new_reference = old_reference;
- VisitExternalReference(&new_reference);
- DCHECK_EQ(old_reference, new_reference);
-}
-
-
void Code::InvalidateRelocation() {
InvalidateEmbeddedObjects();
set_relocation_info(GetHeap()->empty_byte_array());
@@ -14018,7 +13874,6 @@ void Code::CopyFrom(const CodeDesc& desc) {
static_cast<size_t>(desc.reloc_size));
// unbox handles and relocate
- intptr_t delta = instruction_start() - desc.buffer;
int mode_mask = RelocInfo::kCodeTargetMask |
RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
RelocInfo::ModeMask(RelocInfo::CELL) |
@@ -14038,8 +13893,8 @@ void Code::CopyFrom(const CodeDesc& desc) {
it.rinfo()->set_target_cell(*cell, UPDATE_WRITE_BARRIER,
SKIP_ICACHE_FLUSH);
} else if (RelocInfo::IsCodeTarget(mode)) {
- // rewrite code handles in inline cache targets to direct
- // pointers to the first instruction in the code object
+ // rewrite code handles to direct pointers to the first instruction in the
+ // code object
Handle<Object> p = it.rinfo()->target_object_handle(origin);
Code* code = Code::cast(*p);
it.rinfo()->set_target_address(GetIsolate(), code->instruction_start(),
@@ -14053,6 +13908,7 @@ void Code::CopyFrom(const CodeDesc& desc) {
Code* code = Code::cast(*p);
it.rinfo()->set_code_age_stub(code, SKIP_ICACHE_FLUSH);
} else {
+ intptr_t delta = instruction_start() - desc.buffer;
it.rinfo()->apply(delta);
}
}
@@ -14131,6 +13987,55 @@ void Code::ClearInlineCaches() {
}
}
+namespace {
+template <typename Code>
+void SetStackFrameCacheCommon(Handle<Code> code,
+ Handle<UnseededNumberDictionary> cache) {
+ Handle<Object> maybe_table(code->source_position_table(), code->GetIsolate());
+ if (maybe_table->IsSourcePositionTableWithFrameCache()) {
+ Handle<SourcePositionTableWithFrameCache>::cast(maybe_table)
+ ->set_stack_frame_cache(*cache);
+ return;
+ }
+ DCHECK(maybe_table->IsByteArray());
+ Handle<ByteArray> table(Handle<ByteArray>::cast(maybe_table));
+ Handle<SourcePositionTableWithFrameCache> table_with_cache =
+ code->GetIsolate()->factory()->NewSourcePositionTableWithFrameCache(
+ table, cache);
+ code->set_source_position_table(*table_with_cache);
+}
+} // namespace
+
+// static
+void AbstractCode::SetStackFrameCache(Handle<AbstractCode> abstract_code,
+ Handle<UnseededNumberDictionary> cache) {
+ if (abstract_code->IsCode()) {
+ SetStackFrameCacheCommon(handle(abstract_code->GetCode()), cache);
+ } else {
+ SetStackFrameCacheCommon(handle(abstract_code->GetBytecodeArray()), cache);
+ }
+}
+
+namespace {
+template <typename Code>
+void DropStackFrameCacheCommon(Code* code) {
+ i::Object* maybe_table = code->source_position_table();
+ if (maybe_table->IsByteArray()) return;
+ DCHECK(maybe_table->IsSourcePositionTableWithFrameCache());
+ code->set_source_position_table(
+ i::SourcePositionTableWithFrameCache::cast(maybe_table)
+ ->source_position_table());
+}
+} // namespace
+
+void AbstractCode::DropStackFrameCache() {
+ if (IsCode()) {
+ DropStackFrameCacheCommon(GetCode());
+ } else {
+ DropStackFrameCacheCommon(GetBytecodeArray());
+ }
+}
+
int AbstractCode::SourcePosition(int offset) {
int position = 0;
// Subtract one because the current PC is one instruction after the call site.
@@ -14787,7 +14692,7 @@ void Code::Disassemble(const char* name, std::ostream& os) { // NOLINT
}
os << "\n";
- SourcePositionTableIterator it(source_position_table());
+ SourcePositionTableIterator it(SourcePositionTable());
if (!it.done()) {
os << "Source positions:\n pc offset position\n";
for (; !it.done(); it.Advance()) {
@@ -14851,7 +14756,11 @@ void Code::Disassemble(const char* name, std::ostream& os) { // NOLINT
}
#ifdef OBJECT_PRINT
if (!type_feedback_info()->IsUndefined(GetIsolate())) {
- TypeFeedbackInfo::cast(type_feedback_info())->TypeFeedbackInfoPrint(os);
+ TypeFeedbackInfo* info = TypeFeedbackInfo::cast(type_feedback_info());
+ HeapObject::PrintHeader(os, "TypeFeedbackInfo");
+ os << "\n - ic_total_count: " << info->ic_total_count()
+ << ", ic_with_type_info_count: " << info->ic_with_type_info_count()
+ << ", ic_generic_count: " << info->ic_generic_count() << "\n";
os << "\n";
}
#endif
@@ -14889,7 +14798,7 @@ void BytecodeArray::Disassemble(std::ostream& os) {
os << "Frame size " << frame_size() << "\n";
const uint8_t* base_address = GetFirstBytecodeAddress();
- SourcePositionTableIterator source_positions(source_position_table());
+ SourcePositionTableIterator source_positions(SourcePositionTable());
interpreter::BytecodeArrayIterator iterator(handle(this));
while (!iterator.done()) {
@@ -14911,18 +14820,33 @@ void BytecodeArray::Disassemble(std::ostream& os) {
os << " (" << jump_target << " @ " << iterator.GetJumpTargetOffset()
<< ")";
}
+ if (interpreter::Bytecodes::IsSwitch(iterator.current_bytecode())) {
+ os << " {";
+ bool first_entry = true;
+ for (const auto& entry : iterator.GetJumpTableTargetOffsets()) {
+ if (first_entry) {
+ first_entry = false;
+ } else {
+ os << ",";
+ }
+ os << " " << entry.case_value << ": @" << entry.target_offset;
+ }
+ os << " }";
+ }
os << std::endl;
iterator.Advance();
}
+ os << "Constant pool (size = " << constant_pool()->length() << ")\n";
+#ifdef OBJECT_PRINT
if (constant_pool()->length() > 0) {
- os << "Constant pool (size = " << constant_pool()->length() << ")\n";
constant_pool()->Print();
}
+#endif
+ os << "Handler Table (size = " << handler_table()->Size() << ")\n";
#ifdef ENABLE_DISASSEMBLER
if (handler_table()->length() > 0) {
- os << "Handler Table (size = " << handler_table()->Size() << ")\n";
HandlerTable::cast(handler_table())->HandlerTableRangePrint(os);
}
#endif
@@ -15513,13 +15437,14 @@ static bool ShouldConvertToSlowElements(JSObject* object, uint32_t capacity,
object->GetHeap()->InNewSpace(object))) {
return false;
}
- // If the fast-case backing storage takes up roughly three times as
- // much space (in machine words) as a dictionary backing storage
- // would, the object should have slow elements.
+ // If the fast-case backing storage takes up much more memory than a
+ // dictionary backing storage would, the object should have slow elements.
int used_elements = object->GetFastElementsUsage();
- int dictionary_size = SeededNumberDictionary::ComputeCapacity(used_elements) *
- SeededNumberDictionary::kEntrySize;
- return 3 * static_cast<uint32_t>(dictionary_size) <= *new_capacity;
+ uint32_t size_threshold =
+ SeededNumberDictionary::kPreferFastElementsSizeFactor *
+ SeededNumberDictionary::ComputeCapacity(used_elements) *
+ SeededNumberDictionary::kEntrySize;
+ return size_threshold <= *new_capacity;
}
@@ -15574,6 +15499,8 @@ static bool ShouldConvertToFastElements(JSObject* object,
Object* length = JSArray::cast(object)->length();
if (!length->IsSmi()) return false;
*new_capacity = static_cast<uint32_t>(Smi::cast(length)->value());
+ } else if (object->IsJSSloppyArgumentsObject()) {
+ return false;
} else {
*new_capacity = dictionary->max_number_key() + 1;
}
@@ -15618,7 +15545,7 @@ Maybe<bool> JSObject::AddDataElement(Handle<JSObject> object, uint32_t index,
FixedArrayBase* elements = object->elements();
ElementsKind dictionary_kind = DICTIONARY_ELEMENTS;
if (IsSloppyArgumentsElementsKind(kind)) {
- elements = FixedArrayBase::cast(FixedArray::cast(elements)->get(1));
+ elements = SloppyArgumentsElements::cast(elements)->arguments();
dictionary_kind = SLOW_SLOPPY_ARGUMENTS_ELEMENTS;
} else if (IsStringWrapperElementsKind(kind)) {
dictionary_kind = SLOW_STRING_WRAPPER_ELEMENTS;
@@ -15631,7 +15558,7 @@ Maybe<bool> JSObject::AddDataElement(Handle<JSObject> object, uint32_t index,
SeededNumberDictionary::cast(elements),
index, &new_capacity)
? BestFittingFastElementsKind(*object)
- : dictionary_kind; // Overwrite in case of arguments.
+ : dictionary_kind;
} else if (ShouldConvertToSlowElements(
*object, static_cast<uint32_t>(elements->length()), index,
&new_capacity)) {
@@ -16171,7 +16098,7 @@ JSRegExp::Flags RegExpFlagsFromString(Handle<String> flags, bool* success) {
JSRegExp::Flags value = JSRegExp::kNone;
int length = flags->length();
// A longer flags string cannot be valid.
- if (length > 5) return JSRegExp::Flags(0);
+ if (length > JSRegExp::FlagCount()) return JSRegExp::Flags(0);
for (int i = 0; i < length; i++) {
JSRegExp::Flag flag = JSRegExp::kNone;
switch (flags->Get(i)) {
@@ -16672,21 +16599,19 @@ Handle<Derived> HashTable<Derived, Shape, Key>::EnsureCapacity(
int n,
Key key,
PretenureFlag pretenure) {
+ if (table->HasSufficientCapacityToAdd(n)) return table;
+
Isolate* isolate = table->GetIsolate();
int capacity = table->Capacity();
- int nof = table->NumberOfElements() + n;
-
- if (table->HasSufficientCapacityToAdd(n)) return table;
+ int new_nof = table->NumberOfElements() + n;
const int kMinCapacityForPretenure = 256;
bool should_pretenure = pretenure == TENURED ||
((capacity > kMinCapacityForPretenure) &&
!isolate->heap()->InNewSpace(*table));
- Handle<Derived> new_table = HashTable::New(
- isolate,
- nof * 2,
- USE_DEFAULT_MINIMUM_CAPACITY,
- should_pretenure ? TENURED : NOT_TENURED);
+ Handle<Derived> new_table =
+ HashTable::New(isolate, new_nof, USE_DEFAULT_MINIMUM_CAPACITY,
+ should_pretenure ? TENURED : NOT_TENURED);
table->Rehash(new_table, key);
return new_table;
@@ -16777,17 +16702,16 @@ template class Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >;
template class Dictionary<GlobalDictionary, GlobalDictionaryShape,
Handle<Name> >;
-template class Dictionary<SeededNumberDictionary,
- SeededNumberDictionaryShape,
- uint32_t>;
+template class EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE)
+ HashTable<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>;
+
+template class EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE)
+ Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>;
template class Dictionary<UnseededNumberDictionary,
UnseededNumberDictionaryShape,
uint32_t>;
-template void
-HashTable<GlobalDictionary, GlobalDictionaryShape, Handle<Name> >::Rehash(Handle<Name> key);
-
template Handle<SeededNumberDictionary>
Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::New(
Isolate*, int at_least_space_for, PretenureFlag pretenure,
@@ -16860,10 +16784,6 @@ template Handle<NameDictionary>
HashTable<NameDictionary, NameDictionaryShape, Handle<Name> >::
Shrink(Handle<NameDictionary>, Handle<Name>);
-template Handle<SeededNumberDictionary>
-HashTable<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>::
- Shrink(Handle<SeededNumberDictionary>, uint32_t);
-
template Handle<UnseededNumberDictionary>
HashTable<UnseededNumberDictionary, UnseededNumberDictionaryShape,
uint32_t>::Shrink(Handle<UnseededNumberDictionary>, uint32_t);
@@ -16903,9 +16823,6 @@ template Handle<NameDictionary>
Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::
EnsureCapacity(Handle<NameDictionary>, int, Handle<Name>);
-template int HashTable<SeededNumberDictionary, SeededNumberDictionaryShape,
- uint32_t>::FindEntry(uint32_t);
-
template int NameDictionaryBase<NameDictionary, NameDictionaryShape>::FindEntry(
Handle<Name>);
@@ -16953,6 +16870,16 @@ Dictionary<NameDictionary, NameDictionaryShape, Handle<Name>>::CollectKeysTo(
dictionary,
KeyAccumulator* keys);
+template int
+Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape,
+ uint32_t>::AddEntry(Handle<SeededNumberDictionary> dictionary,
+ uint32_t key, Handle<Object> value,
+ PropertyDetails details, uint32_t hash);
+
+template int
+Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape,
+ uint32_t>::NumberOfElementsFilterAttributes(PropertyFilter filter);
+
Handle<Object> JSObject::PrepareSlowElementsForSort(
Handle<JSObject> object, uint32_t limit) {
DCHECK(object->HasDictionaryElements());
@@ -17301,6 +17228,70 @@ size_t JSTypedArray::element_size() {
}
}
+// static
+MaybeHandle<JSTypedArray> JSTypedArray::Create(Isolate* isolate,
+ Handle<Object> default_ctor,
+ int argc, Handle<Object>* argv,
+ const char* method_name) {
+ // 1. Let newTypedArray be ? Construct(constructor, argumentList).
+ Handle<Object> new_obj;
+ ASSIGN_RETURN_ON_EXCEPTION(isolate, new_obj,
+ Execution::New(isolate, default_ctor, argc, argv),
+ JSTypedArray);
+
+ // 2. Perform ? ValidateTypedArray(newTypedArray).
+ Handle<JSTypedArray> new_array;
+ ASSIGN_RETURN_ON_EXCEPTION(
+ isolate, new_array, JSTypedArray::Validate(isolate, new_obj, method_name),
+ JSTypedArray);
+
+ // 3. If argumentList is a List of a single Number, then
+ // If newTypedArray.[[ArrayLength]] < size, throw a TypeError exception.
+ DCHECK_IMPLIES(argc == 1, argv[0]->IsSmi());
+ if (argc == 1 && new_array->length_value() < argv[0]->Number()) {
+ const MessageTemplate::Template message =
+ MessageTemplate::kTypedArrayTooShort;
+ THROW_NEW_ERROR(isolate, NewTypeError(message), JSTypedArray);
+ }
+
+ // 4. Return newTypedArray.
+ return new_array;
+}
+
+// static
+MaybeHandle<JSTypedArray> JSTypedArray::SpeciesCreate(
+ Isolate* isolate, Handle<JSTypedArray> exemplar, int argc,
+ Handle<Object>* argv, const char* method_name) {
+ // 1. Assert: exemplar is an Object that has a [[TypedArrayName]] internal
+ // slot.
+ DCHECK(exemplar->IsJSTypedArray());
+
+ // 2. Let defaultConstructor be the intrinsic object listed in column one of
+ // Table 51 for exemplar.[[TypedArrayName]].
+ Handle<JSFunction> default_ctor = isolate->uint8_array_fun();
+ switch (exemplar->type()) {
+#define TYPED_ARRAY_CTOR(Type, type, TYPE, ctype, size) \
+ case kExternal##Type##Array: { \
+ default_ctor = isolate->type##_array_fun(); \
+ break; \
+ }
+
+ TYPED_ARRAYS(TYPED_ARRAY_CTOR)
+#undef TYPED_ARRAY_CTOR
+ default:
+ UNREACHABLE();
+ }
+
+ // 3. Let constructor be ? SpeciesConstructor(exemplar, defaultConstructor).
+ Handle<Object> ctor;
+ ASSIGN_RETURN_ON_EXCEPTION(
+ isolate, ctor,
+ Object::SpeciesConstructor(isolate, exemplar, default_ctor),
+ JSTypedArray);
+
+ // 4. Return ? TypedArrayCreate(constructor, argumentList).
+ return Create(isolate, ctor, argc, argv, method_name);
+}
void JSGlobalObject::InvalidatePropertyCell(Handle<JSGlobalObject> global,
Handle<Name> name) {
@@ -17467,10 +17458,9 @@ void StringTable::EnsureCapacityForDeserialization(Isolate* isolate,
namespace {
template <class StringClass>
-void MigrateExternalStringResource(Isolate* isolate, Handle<String> from,
- Handle<String> to) {
- Handle<StringClass> cast_from = Handle<StringClass>::cast(from);
- Handle<StringClass> cast_to = Handle<StringClass>::cast(to);
+void MigrateExternalStringResource(Isolate* isolate, String* from, String* to) {
+ StringClass* cast_from = StringClass::cast(from);
+ StringClass* cast_to = StringClass::cast(to);
const typename StringClass::Resource* to_resource = cast_to->resource();
if (to_resource == nullptr) {
// |to| is a just-created internalized copy of |from|. Migrate the resource.
@@ -17480,7 +17470,44 @@ void MigrateExternalStringResource(Isolate* isolate, Handle<String> from,
cast_from->set_resource(nullptr);
} else if (to_resource != cast_from->resource()) {
// |to| already existed and has its own resource. Finalize |from|.
- isolate->heap()->FinalizeExternalString(*from);
+ isolate->heap()->FinalizeExternalString(from);
+ }
+}
+
+void MakeStringThin(String* string, String* internalized, Isolate* isolate) {
+ if (string->IsExternalString()) {
+ if (internalized->IsExternalOneByteString()) {
+ MigrateExternalStringResource<ExternalOneByteString>(isolate, string,
+ internalized);
+ } else if (internalized->IsExternalTwoByteString()) {
+ MigrateExternalStringResource<ExternalTwoByteString>(isolate, string,
+ internalized);
+ } else {
+ // If the external string is duped into an existing non-external
+ // internalized string, free its resource (it's about to be rewritten
+ // into a ThinString below).
+ isolate->heap()->FinalizeExternalString(string);
+ }
+ }
+
+ if (!string->IsInternalizedString()) {
+ DisallowHeapAllocation no_gc;
+ isolate->heap()->NotifyObjectLayoutChange(string, no_gc);
+ bool one_byte = internalized->IsOneByteRepresentation();
+ Handle<Map> map = one_byte ? isolate->factory()->thin_one_byte_string_map()
+ : isolate->factory()->thin_string_map();
+ int old_size = string->Size();
+ DCHECK(old_size >= ThinString::kSize);
+ string->synchronized_set_map(*map);
+ ThinString* thin = ThinString::cast(string);
+ thin->set_actual(internalized);
+ Address thin_end = thin->address() + ThinString::kSize;
+ int size_delta = old_size - ThinString::kSize;
+ if (size_delta != 0) {
+ Heap* heap = isolate->heap();
+ heap->CreateFillerObjectAt(thin_end, size_delta, ClearRecordedSlots::kNo);
+ heap->AdjustLiveBytes(thin, -size_delta);
+ }
}
}
@@ -17501,44 +17528,7 @@ Handle<String> StringTable::LookupString(Isolate* isolate,
Handle<String> result = LookupKey(isolate, &key);
if (FLAG_thin_strings) {
- if (string->IsExternalString()) {
- if (result->IsExternalOneByteString()) {
- MigrateExternalStringResource<ExternalOneByteString>(isolate, string,
- result);
- } else if (result->IsExternalTwoByteString()) {
- MigrateExternalStringResource<ExternalTwoByteString>(isolate, string,
- result);
- } else {
- // If the external string is duped into an existing non-external
- // internalized string, free its resource (it's about to be rewritten
- // into a ThinString below).
- isolate->heap()->FinalizeExternalString(*string);
- }
- }
-
- // The LookupKey() call above tries to internalize the string in-place.
- // In cases where that wasn't possible (e.g. new-space strings), turn them
- // into ThinStrings referring to their internalized versions now.
- if (!string->IsInternalizedString()) {
- DisallowHeapAllocation no_gc;
- bool one_byte = result->IsOneByteRepresentation();
- Handle<Map> map = one_byte
- ? isolate->factory()->thin_one_byte_string_map()
- : isolate->factory()->thin_string_map();
- int old_size = string->Size();
- DCHECK(old_size >= ThinString::kSize);
- string->synchronized_set_map(*map);
- Handle<ThinString> thin = Handle<ThinString>::cast(string);
- thin->set_actual(*result);
- Address thin_end = thin->address() + ThinString::kSize;
- int size_delta = old_size - ThinString::kSize;
- if (size_delta != 0) {
- Heap* heap = isolate->heap();
- heap->CreateFillerObjectAt(thin_end, size_delta,
- ClearRecordedSlots::kNo);
- heap->AdjustLiveBytes(*thin, -size_delta);
- }
- }
+ MakeStringThin(*string, *result, isolate);
} else { // !FLAG_thin_strings
if (string->IsConsString()) {
Handle<ConsString> cons = Handle<ConsString>::cast(string);
@@ -17588,10 +17578,173 @@ Handle<String> StringTable::LookupKey(Isolate* isolate, HashTableKey* key) {
return Handle<String>::cast(string);
}
+namespace {
+
+class StringTableNoAllocateKey : public HashTableKey {
+ public:
+ StringTableNoAllocateKey(String* string, uint32_t seed)
+ : string_(string), length_(string->length()) {
+ StringShape shape(string);
+ one_byte_ = shape.HasOnlyOneByteChars();
+ DCHECK(!shape.IsInternalized());
+ DCHECK(!shape.IsThin());
+ if (shape.IsCons() && length_ <= String::kMaxHashCalcLength) {
+ special_flattening_ = true;
+ uint32_t hash_field = 0;
+ if (one_byte_) {
+ one_byte_content_ = new uint8_t[length_];
+ String::WriteToFlat(string, one_byte_content_, 0, length_);
+ hash_field = StringHasher::HashSequentialString(one_byte_content_,
+ length_, seed);
+ } else {
+ two_byte_content_ = new uint16_t[length_];
+ String::WriteToFlat(string, two_byte_content_, 0, length_);
+ hash_field = StringHasher::HashSequentialString(two_byte_content_,
+ length_, seed);
+ }
+ string->set_hash_field(hash_field);
+ } else {
+ special_flattening_ = false;
+ one_byte_content_ = nullptr;
+ }
+ hash_ = string->Hash();
+ }
+
+ ~StringTableNoAllocateKey() {
+ if (one_byte_) {
+ delete[] one_byte_content_;
+ } else {
+ delete[] two_byte_content_;
+ }
+ }
+
+ bool IsMatch(Object* otherstring) override {
+ String* other = String::cast(otherstring);
+ DCHECK(other->IsInternalizedString());
+ DCHECK(other->IsFlat());
+ if (hash_ != other->Hash()) return false;
+ int len = length_;
+ if (len != other->length()) return false;
+
+ if (!special_flattening_) {
+ if (string_->Get(0) != other->Get(0)) return false;
+ if (string_->IsFlat()) {
+ StringShape shape1(string_);
+ StringShape shape2(other);
+ if (shape1.encoding_tag() == kOneByteStringTag &&
+ shape2.encoding_tag() == kOneByteStringTag) {
+ String::FlatContent flat1 = string_->GetFlatContent();
+ String::FlatContent flat2 = other->GetFlatContent();
+ return CompareRawStringContents(flat1.ToOneByteVector().start(),
+ flat2.ToOneByteVector().start(), len);
+ }
+ if (shape1.encoding_tag() == kTwoByteStringTag &&
+ shape2.encoding_tag() == kTwoByteStringTag) {
+ String::FlatContent flat1 = string_->GetFlatContent();
+ String::FlatContent flat2 = other->GetFlatContent();
+ return CompareRawStringContents(flat1.ToUC16Vector().start(),
+ flat2.ToUC16Vector().start(), len);
+ }
+ }
+ StringComparator comparator;
+ return comparator.Equals(string_, other);
+ }
+
+ String::FlatContent flat_content = other->GetFlatContent();
+ if (one_byte_) {
+ if (flat_content.IsOneByte()) {
+ return CompareRawStringContents(
+ one_byte_content_, flat_content.ToOneByteVector().start(), len);
+ } else {
+ DCHECK(flat_content.IsTwoByte());
+ for (int i = 0; i < len; i++) {
+ if (flat_content.Get(i) != one_byte_content_[i]) return false;
+ }
+ return true;
+ }
+ } else {
+ if (flat_content.IsTwoByte()) {
+ return CompareRawStringContents(
+ two_byte_content_, flat_content.ToUC16Vector().start(), len);
+ } else {
+ DCHECK(flat_content.IsOneByte());
+ for (int i = 0; i < len; i++) {
+ if (flat_content.Get(i) != two_byte_content_[i]) return false;
+ }
+ return true;
+ }
+ }
+ }
+
+ uint32_t Hash() override { return hash_; }
+
+ uint32_t HashForObject(Object* key) override {
+ return String::cast(key)->Hash();
+ }
+
+ MUST_USE_RESULT Handle<Object> AsHandle(Isolate* isolate) override {
+ UNREACHABLE();
+ return Handle<String>();
+ }
+
+ private:
+ String* string_;
+ int length_;
+ bool one_byte_;
+ bool special_flattening_;
+ uint32_t hash_ = 0;
+ union {
+ uint8_t* one_byte_content_;
+ uint16_t* two_byte_content_;
+ };
+};
+
+} // namespace
+
+// static
+Object* StringTable::LookupStringIfExists_NoAllocate(String* string) {
+ DisallowHeapAllocation no_gc;
+ Heap* heap = string->GetHeap();
+ Isolate* isolate = heap->isolate();
+ StringTable* table = heap->string_table();
+
+ StringTableNoAllocateKey key(string, heap->HashSeed());
+
+ // String could be an array index.
+ DCHECK(string->HasHashCode());
+ uint32_t hash = string->hash_field();
+
+ // Valid array indices are >= 0, so they cannot be mixed up with any of
+ // the result sentinels, which are negative.
+ STATIC_ASSERT(
+ !String::ArrayIndexValueBits::is_valid(ResultSentinel::kUnsupported));
+ STATIC_ASSERT(
+ !String::ArrayIndexValueBits::is_valid(ResultSentinel::kNotFound));
+
+ if ((hash & Name::kContainsCachedArrayIndexMask) == 0) {
+ return Smi::FromInt(String::ArrayIndexValueBits::decode(hash));
+ }
+ if ((hash & Name::kIsNotArrayIndexMask) == 0) {
+ // It is an indexed, but it's not cached.
+ return Smi::FromInt(ResultSentinel::kUnsupported);
+ }
+
+ int entry = table->FindEntry(isolate, &key, key.Hash());
+ if (entry != kNotFound) {
+ String* internalized = String::cast(table->KeyAt(entry));
+ if (FLAG_thin_strings) {
+ MakeStringThin(string, internalized, isolate);
+ }
+ return internalized;
+ }
+ // A string that's not an array index, and not in the string table,
+ // cannot have been used as a property name before.
+ return Smi::FromInt(ResultSentinel::kNotFound);
+}
String* StringTable::LookupKeyIfExists(Isolate* isolate, HashTableKey* key) {
Handle<StringTable> table = isolate->factory()->string_table();
- int entry = table->FindEntry(key);
+ int entry = table->FindEntry(isolate, key);
if (entry != kNotFound) return String::cast(table->KeyAt(entry));
return NULL;
}
@@ -19467,8 +19620,10 @@ Handle<String> JSMessageObject::GetSourceLine() const {
void JSArrayBuffer::Neuter() {
CHECK(is_neuterable());
CHECK(is_external());
- set_backing_store(NULL);
+ set_backing_store(nullptr);
set_byte_length(Smi::kZero);
+ set_allocation_base(nullptr);
+ set_allocation_length(0);
set_was_neutered(true);
// Invalidate the neutering protector.
Isolate* const isolate = GetIsolate();
@@ -19477,10 +19632,36 @@ void JSArrayBuffer::Neuter() {
}
}
+void JSArrayBuffer::FreeBackingStore() {
+ if (allocation_base() == nullptr) {
+ return;
+ }
+ using AllocationMode = ArrayBuffer::Allocator::AllocationMode;
+ const size_t length = allocation_length();
+ const AllocationMode mode = allocation_mode();
+ GetIsolate()->array_buffer_allocator()->Free(allocation_base(), length, mode);
+
+ // Zero out the backing store and allocation base to avoid dangling
+ // pointers.
+ set_backing_store(nullptr);
+ // TODO(eholk): set_byte_length(0) once we aren't using Smis for the
+ // byte_length. We can't do it now because the GC needs to call
+ // FreeBackingStore while it is collecting.
+ set_allocation_base(nullptr);
+ set_allocation_length(0);
+}
void JSArrayBuffer::Setup(Handle<JSArrayBuffer> array_buffer, Isolate* isolate,
bool is_external, void* data, size_t allocated_length,
SharedFlag shared) {
+ return Setup(array_buffer, isolate, is_external, data, allocated_length, data,
+ allocated_length, shared);
+}
+
+void JSArrayBuffer::Setup(Handle<JSArrayBuffer> array_buffer, Isolate* isolate,
+ bool is_external, void* allocation_base,
+ size_t allocation_length, void* data,
+ size_t byte_length, SharedFlag shared) {
DCHECK(array_buffer->GetEmbedderFieldCount() ==
v8::ArrayBuffer::kEmbedderFieldCount);
for (int i = 0; i < v8::ArrayBuffer::kEmbedderFieldCount; i++) {
@@ -19491,16 +19672,19 @@ void JSArrayBuffer::Setup(Handle<JSArrayBuffer> array_buffer, Isolate* isolate,
array_buffer->set_is_neuterable(shared == SharedFlag::kNotShared);
array_buffer->set_is_shared(shared == SharedFlag::kShared);
- Handle<Object> byte_length =
- isolate->factory()->NewNumberFromSize(allocated_length);
- CHECK(byte_length->IsSmi() || byte_length->IsHeapNumber());
- array_buffer->set_byte_length(*byte_length);
+ Handle<Object> heap_byte_length =
+ isolate->factory()->NewNumberFromSize(byte_length);
+ CHECK(heap_byte_length->IsSmi() || heap_byte_length->IsHeapNumber());
+ array_buffer->set_byte_length(*heap_byte_length);
// Initialize backing store at last to avoid handling of |JSArrayBuffers| that
// are currently being constructed in the |ArrayBufferTracker|. The
// registration method below handles the case of registering a buffer that has
// already been promoted.
array_buffer->set_backing_store(data);
+ array_buffer->set_allocation_base(data);
+ array_buffer->set_allocation_length(allocation_length);
+
if (data && !is_external) {
isolate->heap()->RegisterNewArrayBuffer(*array_buffer);
}
@@ -19523,8 +19707,9 @@ bool JSArrayBuffer::SetupAllocatingData(Handle<JSArrayBuffer> array_buffer,
// Prevent creating array buffers when serializing.
DCHECK(!isolate->serializer_enabled());
if (allocated_length != 0) {
- isolate->counters()->array_buffer_big_allocations()->AddSample(
- ConvertToMb(allocated_length));
+ if (allocated_length >= MB)
+ isolate->counters()->array_buffer_big_allocations()->AddSample(
+ ConvertToMb(allocated_length));
if (initialize) {
data = isolate->array_buffer_allocator()->Allocate(allocated_length);
} else {
@@ -19540,8 +19725,9 @@ bool JSArrayBuffer::SetupAllocatingData(Handle<JSArrayBuffer> array_buffer,
data = NULL;
}
- JSArrayBuffer::Setup(array_buffer, isolate, false, data, allocated_length,
- shared);
+ const bool is_external = false;
+ JSArrayBuffer::Setup(array_buffer, isolate, is_external, data,
+ allocated_length, shared);
return true;
}
@@ -19572,6 +19758,8 @@ Handle<JSArrayBuffer> JSTypedArray::MaterializeArrayBuffer(
// already been promoted.
buffer->set_backing_store(backing_store);
isolate->heap()->RegisterNewArrayBuffer(*buffer);
+ buffer->set_allocation_base(backing_store);
+ buffer->set_allocation_length(NumberToSize(buffer->byte_length()));
memcpy(buffer->backing_store(),
fixed_typed_array->DataPtr(),
fixed_typed_array->DataSize());
@@ -19937,33 +20125,34 @@ void Module::CreateExport(Handle<Module> module, int cell_index,
module->set_exports(*exports);
}
-Handle<Object> Module::LoadVariable(Handle<Module> module, int cell_index) {
- Isolate* isolate = module->GetIsolate();
- Handle<Object> object;
+Cell* Module::GetCell(int cell_index) {
+ DisallowHeapAllocation no_gc;
+ Object* cell;
switch (ModuleDescriptor::GetCellIndexKind(cell_index)) {
case ModuleDescriptor::kImport:
- object = handle(module->regular_imports()->get(ImportIndex(cell_index)),
- isolate);
+ cell = regular_imports()->get(ImportIndex(cell_index));
break;
case ModuleDescriptor::kExport:
- object = handle(module->regular_exports()->get(ExportIndex(cell_index)),
- isolate);
+ cell = regular_exports()->get(ExportIndex(cell_index));
break;
case ModuleDescriptor::kInvalid:
UNREACHABLE();
+ cell = nullptr;
break;
}
- return handle(Handle<Cell>::cast(object)->value(), isolate);
+ return Cell::cast(cell);
+}
+
+Handle<Object> Module::LoadVariable(Handle<Module> module, int cell_index) {
+ Isolate* isolate = module->GetIsolate();
+ return handle(module->GetCell(cell_index)->value(), isolate);
}
void Module::StoreVariable(Handle<Module> module, int cell_index,
Handle<Object> value) {
- Isolate* isolate = module->GetIsolate();
DCHECK_EQ(ModuleDescriptor::GetCellIndexKind(cell_index),
ModuleDescriptor::kExport);
- Handle<Object> object(module->regular_exports()->get(ExportIndex(cell_index)),
- isolate);
- Handle<Cell>::cast(object)->set_value(*value);
+ module->GetCell(cell_index)->set_value(*value);
}
MaybeHandle<Cell> Module::ResolveImport(Handle<Module> module,
@@ -20120,15 +20309,10 @@ bool Module::PrepareInstantiate(Handle<Module> module,
for (int i = 0, length = module_requests->length(); i < length; ++i) {
Handle<String> specifier(String::cast(module_requests->get(i)), isolate);
v8::Local<v8::Module> api_requested_module;
- // TODO(adamk): Revisit these failure cases once d8 knows how to
- // persist a module_map across multiple top-level module loads, as
- // the current module is left in a "half-instantiated" state.
if (!callback(context, v8::Utils::ToLocal(specifier),
v8::Utils::ToLocal(module))
.ToLocal(&api_requested_module)) {
- // TODO(adamk): Give this a better error message. But this is a
- // misuse of the API anyway.
- isolate->ThrowIllegalOperation();
+ isolate->PromoteScheduledException();
return false;
}
Handle<Module> requested_module = Utils::OpenHandle(*api_requested_module);
@@ -20469,6 +20653,5 @@ ElementsKind JSArrayIterator::ElementsKindForInstanceType(InstanceType type) {
return kind;
}
}
-
} // namespace internal
} // namespace v8