summaryrefslogtreecommitdiff
path: root/deps/v8/src/objects
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/objects')
-rw-r--r--deps/v8/src/objects/allocation-site.h2
-rw-r--r--deps/v8/src/objects/api-callbacks.h16
-rw-r--r--deps/v8/src/objects/arguments.h11
-rw-r--r--deps/v8/src/objects/bigint.cc24
-rw-r--r--deps/v8/src/objects/bigint.h15
-rw-r--r--deps/v8/src/objects/builtin-function-id.h224
-rw-r--r--deps/v8/src/objects/cell.h2
-rw-r--r--deps/v8/src/objects/code-inl.h46
-rw-r--r--deps/v8/src/objects/code.cc31
-rw-r--r--deps/v8/src/objects/code.h71
-rw-r--r--deps/v8/src/objects/debug-objects.cc12
-rw-r--r--deps/v8/src/objects/debug-objects.h2
-rw-r--r--deps/v8/src/objects/descriptor-array-inl.h2
-rw-r--r--deps/v8/src/objects/descriptor-array.h19
-rw-r--r--deps/v8/src/objects/dictionary.h28
-rw-r--r--deps/v8/src/objects/embedder-data-array.h11
-rw-r--r--deps/v8/src/objects/embedder-data-slot.h2
-rw-r--r--deps/v8/src/objects/feedback-cell-inl.h8
-rw-r--r--deps/v8/src/objects/feedback-cell.h25
-rw-r--r--deps/v8/src/objects/fixed-array-inl.h10
-rw-r--r--deps/v8/src/objects/fixed-array.h71
-rw-r--r--deps/v8/src/objects/frame-array.h2
-rw-r--r--deps/v8/src/objects/free-space.h12
-rw-r--r--deps/v8/src/objects/hash-table-inl.h34
-rw-r--r--deps/v8/src/objects/hash-table.h63
-rw-r--r--deps/v8/src/objects/heap-object-inl.h3
-rw-r--r--deps/v8/src/objects/heap-object.h10
-rw-r--r--deps/v8/src/objects/instance-type.h59
-rw-r--r--deps/v8/src/objects/intl-objects.cc62
-rw-r--r--deps/v8/src/objects/intl-objects.h11
-rw-r--r--deps/v8/src/objects/js-array-buffer-inl.h8
-rw-r--r--deps/v8/src/objects/js-array-buffer.cc6
-rw-r--r--deps/v8/src/objects/js-array-buffer.h13
-rw-r--r--deps/v8/src/objects/js-array.h18
-rw-r--r--deps/v8/src/objects/js-break-iterator.h2
-rw-r--r--deps/v8/src/objects/js-collator.cc27
-rw-r--r--deps/v8/src/objects/js-collator.h2
-rw-r--r--deps/v8/src/objects/js-collection-inl.h1
-rw-r--r--deps/v8/src/objects/js-collection-iterator.h74
-rw-r--r--deps/v8/src/objects/js-collection.h23
-rw-r--r--deps/v8/src/objects/js-date-time-format-inl.h2
-rw-r--r--deps/v8/src/objects/js-date-time-format.cc156
-rw-r--r--deps/v8/src/objects/js-date-time-format.h30
-rw-r--r--deps/v8/src/objects/js-generator.h34
-rw-r--r--deps/v8/src/objects/js-list-format.cc222
-rw-r--r--deps/v8/src/objects/js-list-format.h3
-rw-r--r--deps/v8/src/objects/js-locale.cc13
-rw-r--r--deps/v8/src/objects/js-number-format.cc104
-rw-r--r--deps/v8/src/objects/js-number-format.h4
-rw-r--r--deps/v8/src/objects/js-objects-inl.h43
-rw-r--r--deps/v8/src/objects/js-objects.cc166
-rw-r--r--deps/v8/src/objects/js-objects.h225
-rw-r--r--deps/v8/src/objects/js-plural-rules.h2
-rw-r--r--deps/v8/src/objects/js-promise.h2
-rw-r--r--deps/v8/src/objects/js-proxy.h7
-rw-r--r--deps/v8/src/objects/js-regexp-string-iterator.h13
-rw-r--r--deps/v8/src/objects/js-regexp.h33
-rw-r--r--deps/v8/src/objects/js-relative-time-format.cc237
-rw-r--r--deps/v8/src/objects/js-relative-time-format.h12
-rw-r--r--deps/v8/src/objects/js-segmenter.h2
-rw-r--r--deps/v8/src/objects/js-weak-refs.h6
-rw-r--r--deps/v8/src/objects/literal-objects.cc16
-rw-r--r--deps/v8/src/objects/managed.h23
-rw-r--r--deps/v8/src/objects/map-inl.h38
-rw-r--r--deps/v8/src/objects/map.cc82
-rw-r--r--deps/v8/src/objects/map.h146
-rw-r--r--deps/v8/src/objects/name.h21
-rw-r--r--deps/v8/src/objects/object-macros.h27
-rw-r--r--deps/v8/src/objects/oddball.h27
-rw-r--r--deps/v8/src/objects/ordered-hash-table-inl.h1
-rw-r--r--deps/v8/src/objects/ordered-hash-table.cc192
-rw-r--r--deps/v8/src/objects/ordered-hash-table.h162
-rw-r--r--deps/v8/src/objects/promise.h29
-rw-r--r--deps/v8/src/objects/property-array.h14
-rw-r--r--deps/v8/src/objects/property-cell-inl.h2
-rw-r--r--deps/v8/src/objects/property-cell.h18
-rw-r--r--deps/v8/src/objects/prototype-info-inl.h2
-rw-r--r--deps/v8/src/objects/prototype-info.h27
-rw-r--r--deps/v8/src/objects/regexp-match-info.h10
-rw-r--r--deps/v8/src/objects/scope-info.cc21
-rw-r--r--deps/v8/src/objects/scope-info.h13
-rw-r--r--deps/v8/src/objects/script-inl.h2
-rw-r--r--deps/v8/src/objects/script.h54
-rw-r--r--deps/v8/src/objects/shared-function-info-inl.h68
-rw-r--r--deps/v8/src/objects/shared-function-info.h134
-rw-r--r--deps/v8/src/objects/slots-atomic-inl.h12
-rw-r--r--deps/v8/src/objects/slots.h79
-rw-r--r--deps/v8/src/objects/smi.h3
-rw-r--r--deps/v8/src/objects/stack-frame-info-inl.h1
-rw-r--r--deps/v8/src/objects/stack-frame-info.h14
-rw-r--r--deps/v8/src/objects/string-inl.h14
-rw-r--r--deps/v8/src/objects/string-table.h4
-rw-r--r--deps/v8/src/objects/string.cc24
-rw-r--r--deps/v8/src/objects/string.h112
-rw-r--r--deps/v8/src/objects/struct.h12
-rw-r--r--deps/v8/src/objects/template-objects.cc8
-rw-r--r--deps/v8/src/objects/templates-inl.h2
-rw-r--r--deps/v8/src/objects/templates.h62
98 files changed, 2013 insertions, 1841 deletions
diff --git a/deps/v8/src/objects/allocation-site.h b/deps/v8/src/objects/allocation-site.h
index 7b22d34f33..b221bd02dd 100644
--- a/deps/v8/src/objects/allocation-site.h
+++ b/deps/v8/src/objects/allocation-site.h
@@ -82,7 +82,7 @@ class AllocationSite : public Struct {
inline void IncrementMementoCreateCount();
- PretenureFlag GetPretenureMode() const;
+ AllocationType GetAllocationType() const;
void ResetPretenureDecision();
diff --git a/deps/v8/src/objects/api-callbacks.h b/deps/v8/src/objects/api-callbacks.h
index 0bbb8ce35d..1d8b456a8e 100644
--- a/deps/v8/src/objects/api-callbacks.h
+++ b/deps/v8/src/objects/api-callbacks.h
@@ -76,19 +76,9 @@ class AccessorInfo : public Struct {
static int AppendUnique(Isolate* isolate, Handle<Object> descriptors,
Handle<FixedArray> array, int valid_descriptors);
-// Layout description.
-#define ACCESSOR_INFO_FIELDS(V) \
- V(kNameOffset, kTaggedSize) \
- V(kFlagsOffset, kTaggedSize) \
- V(kExpectedReceiverTypeOffset, kTaggedSize) \
- V(kSetterOffset, kTaggedSize) \
- V(kGetterOffset, kTaggedSize) \
- V(kJsGetterOffset, kTaggedSize) \
- V(kDataOffset, kTaggedSize) \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, ACCESSOR_INFO_FIELDS)
-#undef ACCESSOR_INFO_FIELDS
+ // Layout description.
+ DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,
+ TORQUE_GENERATED_ACCESSOR_INFO_FIELDS)
private:
inline bool HasExpectedReceiverType();
diff --git a/deps/v8/src/objects/arguments.h b/deps/v8/src/objects/arguments.h
index 242b89f7ad..720820268c 100644
--- a/deps/v8/src/objects/arguments.h
+++ b/deps/v8/src/objects/arguments.h
@@ -48,14 +48,9 @@ class JSArgumentsObjectWithLength : public JSArgumentsObject {
// This initial map adds in-object properties for "length" and "callee".
class JSSloppyArgumentsObject : public JSArgumentsObjectWithLength {
public:
-// Layout description.
-#define JS_SLOPPY_ARGUMENTS_OBJECT_FIELDS(V) \
- V(kCalleeOffset, kTaggedSize) \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(JSArgumentsObjectWithLength::kSize,
- JS_SLOPPY_ARGUMENTS_OBJECT_FIELDS)
-#undef JS_SLOPPY_ARGUMENTS_OBJECT_FIELDS
+ DEFINE_FIELD_OFFSET_CONSTANTS(
+ JSArgumentsObjectWithLength::kSize,
+ TORQUE_GENERATED_JSSLOPPY_ARGUMENTS_OBJECT_FIELDS)
// Indices of in-object properties.
static const int kCalleeIndex = kLengthIndex + 1;
diff --git a/deps/v8/src/objects/bigint.cc b/deps/v8/src/objects/bigint.cc
index 7c886a7b1d..7b67aa3ffb 100644
--- a/deps/v8/src/objects/bigint.cc
+++ b/deps/v8/src/objects/bigint.cc
@@ -47,8 +47,9 @@ class MutableBigInt : public FreshlyAllocatedBigInt {
static Handle<BigInt> MakeImmutable(Handle<MutableBigInt> result);
// Allocation helpers.
- static MaybeHandle<MutableBigInt> New(Isolate* isolate, int length,
- PretenureFlag pretenure = NOT_TENURED);
+ static MaybeHandle<MutableBigInt> New(
+ Isolate* isolate, int length,
+ AllocationType allocation = AllocationType::kYoung);
static Handle<BigInt> NewFromInt(Isolate* isolate, int value);
static Handle<BigInt> NewFromDouble(Isolate* isolate, double value);
void InitializeDigits(int length, byte value = 0);
@@ -223,13 +224,13 @@ NEVER_READ_ONLY_SPACE_IMPL(MutableBigInt)
#include "src/objects/object-macros-undef.h"
MaybeHandle<MutableBigInt> MutableBigInt::New(Isolate* isolate, int length,
- PretenureFlag pretenure) {
+ AllocationType allocation) {
if (length > BigInt::kMaxLength) {
THROW_NEW_ERROR(isolate, NewRangeError(MessageTemplate::kBigIntTooBig),
MutableBigInt);
}
Handle<MutableBigInt> result =
- Cast(isolate->factory()->NewBigInt(length, pretenure));
+ Cast(isolate->factory()->NewBigInt(length, allocation));
result->initialize_bitfield(false, length);
#if DEBUG
result->InitializeDigits(length, 0xBF);
@@ -688,7 +689,8 @@ MaybeHandle<MutableBigInt> MutableBigInt::BitwiseAnd(Isolate* isolate,
// Assume that x is the positive BigInt.
if (x->sign()) std::swap(x, y);
// x & (-y) == x & ~(y-1) == x &~ (y-1)
- return AbsoluteAndNot(isolate, x, AbsoluteSubOne(isolate, y));
+ Handle<MutableBigInt> y_1 = AbsoluteSubOne(isolate, y);
+ return AbsoluteAndNot(isolate, x, y_1);
}
}
@@ -1862,7 +1864,7 @@ static const size_t kBitsPerCharTableMultiplier = 1u << kBitsPerCharTableShift;
MaybeHandle<FreshlyAllocatedBigInt> BigInt::AllocateFor(
Isolate* isolate, int radix, int charcount, ShouldThrow should_throw,
- PretenureFlag pretenure) {
+ AllocationType allocation) {
DCHECK(2 <= radix && radix <= 36);
DCHECK_GE(charcount, 0);
size_t bits_per_char = kMaxBitsPerChar[radix];
@@ -1877,7 +1879,7 @@ MaybeHandle<FreshlyAllocatedBigInt> BigInt::AllocateFor(
int length = static_cast<int>((bits_min + kDigitBits - 1) / kDigitBits);
if (length <= kMaxLength) {
Handle<MutableBigInt> result =
- MutableBigInt::New(isolate, length, pretenure).ToHandleChecked();
+ MutableBigInt::New(isolate, length, allocation).ToHandleChecked();
result->InitializeDigits(length);
return result;
}
@@ -1936,13 +1938,13 @@ void BigInt::SerializeDigits(uint8_t* storage) {
// version in value-serializer.cc!
MaybeHandle<BigInt> BigInt::FromSerializedDigits(
Isolate* isolate, uint32_t bitfield, Vector<const uint8_t> digits_storage,
- PretenureFlag pretenure) {
+ AllocationType allocation) {
int bytelength = LengthBits::decode(bitfield);
DCHECK(digits_storage.length() == bytelength);
bool sign = SignBits::decode(bitfield);
int length = (bytelength + kDigitSize - 1) / kDigitSize; // Round up.
Handle<MutableBigInt> result =
- MutableBigInt::Cast(isolate->factory()->NewBigInt(length, pretenure));
+ MutableBigInt::Cast(isolate->factory()->NewBigInt(length, allocation));
result->initialize_bitfield(sign, length);
void* digits =
reinterpret_cast<void*>(result->ptr() + kDigitsOffset - kHeapObjectTag);
@@ -2484,11 +2486,11 @@ uint64_t BigInt::AsUint64(bool* lossless) {
#if V8_TARGET_ARCH_32_BIT
#define HAVE_TWODIGIT_T 1
-typedef uint64_t twodigit_t;
+using twodigit_t = uint64_t;
#elif defined(__SIZEOF_INT128__)
// Both Clang and GCC support this on x64.
#define HAVE_TWODIGIT_T 1
-typedef __uint128_t twodigit_t;
+using twodigit_t = __uint128_t;
#endif
// {carry} must point to an initialized digit_t and will either be incremented
diff --git a/deps/v8/src/objects/bigint.h b/deps/v8/src/objects/bigint.h
index 4ddb57a5b2..e59c7d6982 100644
--- a/deps/v8/src/objects/bigint.h
+++ b/deps/v8/src/objects/bigint.h
@@ -70,7 +70,7 @@ class BigIntBase : public HeapObject {
friend class ::v8::internal::BigInt; // MSVC wants full namespace.
friend class MutableBigInt;
- typedef uintptr_t digit_t;
+ using digit_t = uintptr_t;
static const int kDigitSize = sizeof(digit_t);
// kMaxLength definition assumes this:
STATIC_ASSERT(kDigitSize == kSystemPointerSize);
@@ -133,7 +133,7 @@ class FreshlyAllocatedBigInt : public BigIntBase {
};
// Arbitrary precision integers in JavaScript.
-class V8_EXPORT_PRIVATE BigInt : public BigIntBase {
+class BigInt : public BigIntBase {
public:
// Implementation of the Spec methods, see:
// https://tc39.github.io/proposal-bigint/#sec-numeric-types
@@ -189,7 +189,8 @@ class V8_EXPORT_PRIVATE BigInt : public BigIntBase {
Handle<String> y);
static ComparisonResult CompareToNumber(Handle<BigInt> x, Handle<Object> y);
// Exposed for tests, do not call directly. Use CompareToNumber() instead.
- static ComparisonResult CompareToDouble(Handle<BigInt> x, double y);
+ V8_EXPORT_PRIVATE static ComparisonResult CompareToDouble(Handle<BigInt> x,
+ double y);
static Handle<BigInt> AsIntN(Isolate* isolate, uint64_t n, Handle<BigInt> x);
static MaybeHandle<BigInt> AsUintN(Isolate* isolate, uint64_t n,
@@ -223,8 +224,8 @@ class V8_EXPORT_PRIVATE BigInt : public BigIntBase {
static Handle<Object> ToNumber(Isolate* isolate, Handle<BigInt> x);
// ECMAScript's NumberToBigInt
- static MaybeHandle<BigInt> FromNumber(Isolate* isolate,
- Handle<Object> number);
+ V8_EXPORT_PRIVATE static MaybeHandle<BigInt> FromNumber(
+ Isolate* isolate, Handle<Object> number);
// ECMAScript's ToBigInt (throws for Number input)
static MaybeHandle<BigInt> FromObject(Isolate* isolate, Handle<Object> obj);
@@ -240,7 +241,7 @@ class V8_EXPORT_PRIVATE BigInt : public BigIntBase {
static Handle<BigInt> Zero(Isolate* isolate);
static MaybeHandle<FreshlyAllocatedBigInt> AllocateFor(
Isolate* isolate, int radix, int charcount, ShouldThrow should_throw,
- PretenureFlag pretenure);
+ AllocationType allocation);
static void InplaceMultiplyAdd(Handle<FreshlyAllocatedBigInt> x,
uintptr_t factor, uintptr_t summand);
static Handle<BigInt> Finalize(Handle<FreshlyAllocatedBigInt> x, bool sign);
@@ -253,7 +254,7 @@ class V8_EXPORT_PRIVATE BigInt : public BigIntBase {
void SerializeDigits(uint8_t* storage);
V8_WARN_UNUSED_RESULT static MaybeHandle<BigInt> FromSerializedDigits(
Isolate* isolate, uint32_t bitfield, Vector<const uint8_t> digits_storage,
- PretenureFlag pretenure);
+ AllocationType allocation);
OBJECT_CONSTRUCTORS(BigInt, BigIntBase);
};
diff --git a/deps/v8/src/objects/builtin-function-id.h b/deps/v8/src/objects/builtin-function-id.h
deleted file mode 100644
index 5d1dd445ea..0000000000
--- a/deps/v8/src/objects/builtin-function-id.h
+++ /dev/null
@@ -1,224 +0,0 @@
-// Copyright 2018 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef V8_OBJECTS_BUILTIN_FUNCTION_ID_H_
-#define V8_OBJECTS_BUILTIN_FUNCTION_ID_H_
-
-#include <stdint.h>
-
-namespace v8 {
-namespace internal {
-
-// List of builtin functions we want to identify to improve code
-// generation.
-//
-// Each entry has a name of a global object property holding an object
-// optionally followed by ".prototype", a name of a builtin function
-// on the object (the one the id is set for), and a label.
-//
-// Installation of ids for the selected builtin functions is handled
-// by the bootstrapper.
-#define FUNCTIONS_WITH_ID_LIST(V) \
- V(Array, isArray, ArrayIsArray) \
- V(Array.prototype, concat, ArrayConcat) \
- V(Array.prototype, every, ArrayEvery) \
- V(Array.prototype, fill, ArrayFill) \
- V(Array.prototype, filter, ArrayFilter) \
- V(Array.prototype, findIndex, ArrayFindIndex) \
- V(Array.prototype, forEach, ArrayForEach) \
- V(Array.prototype, includes, ArrayIncludes) \
- V(Array.prototype, indexOf, ArrayIndexOf) \
- V(Array.prototype, join, ArrayJoin) \
- V(Array.prototype, lastIndexOf, ArrayLastIndexOf) \
- V(Array.prototype, map, ArrayMap) \
- V(Array.prototype, pop, ArrayPop) \
- V(Array.prototype, push, ArrayPush) \
- V(Array.prototype, reverse, ArrayReverse) \
- V(Array.prototype, shift, ArrayShift) \
- V(Array.prototype, slice, ArraySlice) \
- V(Array.prototype, some, ArraySome) \
- V(Array.prototype, splice, ArraySplice) \
- V(Array.prototype, unshift, ArrayUnshift) \
- V(Date, now, DateNow) \
- V(Date.prototype, getDate, DateGetDate) \
- V(Date.prototype, getDay, DateGetDay) \
- V(Date.prototype, getFullYear, DateGetFullYear) \
- V(Date.prototype, getHours, DateGetHours) \
- V(Date.prototype, getMilliseconds, DateGetMilliseconds) \
- V(Date.prototype, getMinutes, DateGetMinutes) \
- V(Date.prototype, getMonth, DateGetMonth) \
- V(Date.prototype, getSeconds, DateGetSeconds) \
- V(Date.prototype, getTime, DateGetTime) \
- V(Function.prototype, apply, FunctionApply) \
- V(Function.prototype, bind, FunctionBind) \
- V(Function.prototype, call, FunctionCall) \
- V(Object, assign, ObjectAssign) \
- V(Object, create, ObjectCreate) \
- V(Object, is, ObjectIs) \
- V(Object.prototype, hasOwnProperty, ObjectHasOwnProperty) \
- V(Object.prototype, isPrototypeOf, ObjectIsPrototypeOf) \
- V(Object.prototype, toString, ObjectToString) \
- V(RegExp.prototype, compile, RegExpCompile) \
- V(RegExp.prototype, exec, RegExpExec) \
- V(RegExp.prototype, test, RegExpTest) \
- V(RegExp.prototype, toString, RegExpToString) \
- V(String.prototype, charCodeAt, StringCharCodeAt) \
- V(String.prototype, charAt, StringCharAt) \
- V(String.prototype, codePointAt, StringCodePointAt) \
- V(String.prototype, concat, StringConcat) \
- V(String.prototype, endsWith, StringEndsWith) \
- V(String.prototype, includes, StringIncludes) \
- V(String.prototype, indexOf, StringIndexOf) \
- V(String.prototype, lastIndexOf, StringLastIndexOf) \
- V(String.prototype, repeat, StringRepeat) \
- V(String.prototype, slice, StringSlice) \
- V(String.prototype, startsWith, StringStartsWith) \
- V(String.prototype, substr, StringSubstr) \
- V(String.prototype, substring, StringSubstring) \
- V(String.prototype, toLowerCase, StringToLowerCase) \
- V(String.prototype, toString, StringToString) \
- V(String.prototype, toUpperCase, StringToUpperCase) \
- V(String.prototype, trim, StringTrim) \
- V(String.prototype, trimLeft, StringTrimStart) \
- V(String.prototype, trimRight, StringTrimEnd) \
- V(String.prototype, valueOf, StringValueOf) \
- V(String, fromCharCode, StringFromCharCode) \
- V(String, fromCodePoint, StringFromCodePoint) \
- V(String, raw, StringRaw) \
- V(Math, random, MathRandom) \
- V(Math, floor, MathFloor) \
- V(Math, round, MathRound) \
- V(Math, ceil, MathCeil) \
- V(Math, abs, MathAbs) \
- V(Math, log, MathLog) \
- V(Math, log1p, MathLog1p) \
- V(Math, log2, MathLog2) \
- V(Math, log10, MathLog10) \
- V(Math, cbrt, MathCbrt) \
- V(Math, exp, MathExp) \
- V(Math, expm1, MathExpm1) \
- V(Math, sqrt, MathSqrt) \
- V(Math, pow, MathPow) \
- V(Math, max, MathMax) \
- V(Math, min, MathMin) \
- V(Math, cos, MathCos) \
- V(Math, cosh, MathCosh) \
- V(Math, sign, MathSign) \
- V(Math, sin, MathSin) \
- V(Math, sinh, MathSinh) \
- V(Math, tan, MathTan) \
- V(Math, tanh, MathTanh) \
- V(Math, acos, MathAcos) \
- V(Math, acosh, MathAcosh) \
- V(Math, asin, MathAsin) \
- V(Math, asinh, MathAsinh) \
- V(Math, atan, MathAtan) \
- V(Math, atan2, MathAtan2) \
- V(Math, atanh, MathAtanh) \
- V(Math, imul, MathImul) \
- V(Math, clz32, MathClz32) \
- V(Math, fround, MathFround) \
- V(Math, trunc, MathTrunc) \
- V(Number, isFinite, NumberIsFinite) \
- V(Number, isInteger, NumberIsInteger) \
- V(Number, isNaN, NumberIsNaN) \
- V(Number, isSafeInteger, NumberIsSafeInteger) \
- V(Number, parseFloat, NumberParseFloat) \
- V(Number, parseInt, NumberParseInt) \
- V(Number.prototype, toString, NumberToString) \
- V(Map.prototype, clear, MapClear) \
- V(Map.prototype, delete, MapDelete) \
- V(Map.prototype, entries, MapEntries) \
- V(Map.prototype, forEach, MapForEach) \
- V(Map.prototype, has, MapHas) \
- V(Map.prototype, keys, MapKeys) \
- V(Map.prototype, get, MapGet) \
- V(Map.prototype, set, MapSet) \
- V(Map.prototype, values, MapValues) \
- V(Set.prototype, add, SetAdd) \
- V(Set.prototype, clear, SetClear) \
- V(Set.prototype, delete, SetDelete) \
- V(Set.prototype, entries, SetEntries) \
- V(Set.prototype, forEach, SetForEach) \
- V(Set.prototype, has, SetHas) \
- V(Set.prototype, values, SetValues) \
- V(WeakMap.prototype, delete, WeakMapDelete) \
- V(WeakMap.prototype, has, WeakMapHas) \
- V(WeakMap.prototype, set, WeakMapSet) \
- V(WeakSet.prototype, add, WeakSetAdd) \
- V(WeakSet.prototype, delete, WeakSetDelete) \
- V(WeakSet.prototype, has, WeakSetHas)
-
-#define ATOMIC_FUNCTIONS_WITH_ID_LIST(V) \
- V(Atomics, load, AtomicsLoad) \
- V(Atomics, store, AtomicsStore) \
- V(Atomics, exchange, AtomicsExchange) \
- V(Atomics, compareExchange, AtomicsCompareExchange) \
- V(Atomics, add, AtomicsAdd) \
- V(Atomics, sub, AtomicsSub) \
- V(Atomics, and, AtomicsAnd) \
- V(Atomics, or, AtomicsOr) \
- V(Atomics, xor, AtomicsXor)
-
-enum class BuiltinFunctionId : uint8_t {
- kArrayConstructor,
-#define DECL_FUNCTION_ID(ignored1, ignore2, name) k##name,
- FUNCTIONS_WITH_ID_LIST(DECL_FUNCTION_ID)
- ATOMIC_FUNCTIONS_WITH_ID_LIST(DECL_FUNCTION_ID)
-#undef DECL_FUNCTION_ID
- // These are manually assigned to special getters during bootstrapping.
- kArrayBufferByteLength,
- kArrayBufferIsView,
- kArrayEntries,
- kArrayKeys,
- kArrayValues,
- kArrayIteratorNext,
- kBigIntConstructor,
- kMapSize,
- kSetSize,
- kMapIteratorNext,
- kSetIteratorNext,
- kDataViewBuffer,
- kDataViewByteLength,
- kDataViewByteOffset,
- kFunctionHasInstance,
- kGlobalDecodeURI,
- kGlobalDecodeURIComponent,
- kGlobalEncodeURI,
- kGlobalEncodeURIComponent,
- kGlobalEscape,
- kGlobalUnescape,
- kGlobalIsFinite,
- kGlobalIsNaN,
- kNumberConstructor,
- kPromiseAll,
- kPromisePrototypeCatch,
- kPromisePrototypeFinally,
- kPromisePrototypeThen,
- kPromiseRace,
- kPromiseReject,
- kPromiseResolve,
- kSymbolConstructor,
- kSymbolPrototypeToString,
- kSymbolPrototypeValueOf,
- kTypedArrayByteLength,
- kTypedArrayByteOffset,
- kTypedArrayEntries,
- kTypedArrayKeys,
- kTypedArrayLength,
- kTypedArrayToStringTag,
- kTypedArrayValues,
- kSharedArrayBufferByteLength,
- kStringConstructor,
- kStringIterator,
- kStringIteratorNext,
- kStringToLowerCaseIntl,
- kStringToUpperCaseIntl,
- kInvalidBuiltinFunctionId = static_cast<uint8_t>(-1),
-};
-
-} // namespace internal
-} // namespace v8
-
-#endif // V8_OBJECTS_BUILTIN_FUNCTION_ID_H_
diff --git a/deps/v8/src/objects/cell.h b/deps/v8/src/objects/cell.h
index cd76dee479..c15b31a61c 100644
--- a/deps/v8/src/objects/cell.h
+++ b/deps/v8/src/objects/cell.h
@@ -32,7 +32,7 @@ class Cell : public HeapObject {
DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,
TORQUE_GENERATED_CELL_FIELDS)
- typedef FixedBodyDescriptor<kValueOffset, kSize, kSize> BodyDescriptor;
+ using BodyDescriptor = FixedBodyDescriptor<kValueOffset, kSize, kSize>;
OBJECT_CONSTRUCTORS(Cell, HeapObject);
};
diff --git a/deps/v8/src/objects/code-inl.h b/deps/v8/src/objects/code-inl.h
index 905fdd5376..a0dc3b3ae1 100644
--- a/deps/v8/src/objects/code-inl.h
+++ b/deps/v8/src/objects/code-inl.h
@@ -233,8 +233,17 @@ void Code::clear_padding() {
CodeSize() - (data_end - address()));
}
+ByteArray Code::SourcePositionTableIfCollected() const {
+ ReadOnlyRoots roots = GetReadOnlyRoots();
+ Object maybe_table = source_position_table();
+ if (maybe_table->IsUndefined(roots) || maybe_table->IsException(roots))
+ return roots.empty_byte_array();
+ return SourcePositionTable();
+}
+
ByteArray Code::SourcePositionTable() const {
Object maybe_table = source_position_table();
+ DCHECK(!maybe_table->IsUndefined() && !maybe_table->IsException());
if (maybe_table->IsByteArray()) return ByteArray::cast(maybe_table);
DCHECK(maybe_table->IsSourcePositionTableWithFrameCache());
return SourcePositionTableWithFrameCache::cast(maybe_table)
@@ -548,7 +557,6 @@ Address Code::constant_pool() const {
}
Address Code::code_comments() const {
- if (!has_code_comments()) return kNullAddress;
return InstructionStart() + code_comments_offset();
}
@@ -661,15 +669,6 @@ void BytecodeArray::set_incoming_new_target_or_generator_register(
}
}
-int BytecodeArray::interrupt_budget() const {
- return READ_INT_FIELD(*this, kInterruptBudgetOffset);
-}
-
-void BytecodeArray::set_interrupt_budget(int interrupt_budget) {
- DCHECK_GE(interrupt_budget, 0);
- WRITE_INT_FIELD(*this, kInterruptBudgetOffset, interrupt_budget);
-}
-
int BytecodeArray::osr_loop_nesting_level() const {
return READ_INT8_FIELD(*this, kOSRNestingLevelOffset);
}
@@ -714,22 +713,37 @@ Address BytecodeArray::GetFirstBytecodeAddress() {
return ptr() - kHeapObjectTag + kHeaderSize;
}
-bool BytecodeArray::HasSourcePositionTable() {
+bool BytecodeArray::HasSourcePositionTable() const {
Object maybe_table = source_position_table();
- return !maybe_table->IsUndefined();
+ return !(maybe_table->IsUndefined() || DidSourcePositionGenerationFail());
}
-ByteArray BytecodeArray::SourcePositionTable() {
+bool BytecodeArray::DidSourcePositionGenerationFail() const {
+ return source_position_table()->IsException();
+}
+
+void BytecodeArray::SetSourcePositionsFailedToCollect() {
+ set_source_position_table(GetReadOnlyRoots().exception());
+}
+
+ByteArray BytecodeArray::SourcePositionTable() const {
Object maybe_table = source_position_table();
if (maybe_table->IsByteArray()) return ByteArray::cast(maybe_table);
ReadOnlyRoots roots = GetReadOnlyRoots();
- if (maybe_table->IsUndefined(roots)) return roots.empty_byte_array();
+ if (maybe_table->IsException(roots)) return roots.empty_byte_array();
+ DCHECK(!maybe_table->IsUndefined(roots));
DCHECK(maybe_table->IsSourcePositionTableWithFrameCache());
return SourcePositionTableWithFrameCache::cast(maybe_table)
->source_position_table();
}
+ByteArray BytecodeArray::SourcePositionTableIfCollected() const {
+ if (!HasSourcePositionTable()) return GetReadOnlyRoots().empty_byte_array();
+
+ return SourcePositionTable();
+}
+
void BytecodeArray::ClearFrameCacheFromSourcePositionTable() {
Object maybe_table = source_position_table();
if (maybe_table->IsUndefined() || maybe_table->IsByteArray()) return;
@@ -744,7 +758,9 @@ int BytecodeArray::SizeIncludingMetadata() {
int size = BytecodeArraySize();
size += constant_pool()->Size();
size += handler_table()->Size();
- size += SourcePositionTable()->Size();
+ if (HasSourcePositionTable()) {
+ size += SourcePositionTable()->Size();
+ }
return size;
}
diff --git a/deps/v8/src/objects/code.cc b/deps/v8/src/objects/code.cc
index f874855fbe..ab929db8a7 100644
--- a/deps/v8/src/objects/code.cc
+++ b/deps/v8/src/objects/code.cc
@@ -162,12 +162,13 @@ template <typename Code>
void SetStackFrameCacheCommon(Isolate* isolate, Handle<Code> code,
Handle<SimpleNumberDictionary> cache) {
Handle<Object> maybe_table(code->source_position_table(), isolate);
+ if (maybe_table->IsException(isolate) || maybe_table->IsUndefined()) return;
if (maybe_table->IsSourcePositionTableWithFrameCache()) {
Handle<SourcePositionTableWithFrameCache>::cast(maybe_table)
->set_stack_frame_cache(*cache);
return;
}
- DCHECK(maybe_table->IsUndefined() || maybe_table->IsByteArray());
+ DCHECK(maybe_table->IsByteArray());
Handle<ByteArray> table(Handle<ByteArray>::cast(maybe_table));
Handle<SourcePositionTableWithFrameCache> table_with_cache =
isolate->factory()->NewSourcePositionTableWithFrameCache(table, cache);
@@ -211,10 +212,14 @@ void AbstractCode::DropStackFrameCache() {
}
int AbstractCode::SourcePosition(int offset) {
+ Object maybe_table = source_position_table();
+ if (maybe_table->IsException()) return kNoSourcePosition;
+
+ ByteArray source_position_table = ByteArray::cast(maybe_table);
int position = 0;
// Subtract one because the current PC is one instruction after the call site.
if (IsCode()) offset--;
- for (SourcePositionTableIterator iterator(source_position_table());
+ for (SourcePositionTableIterator iterator(source_position_table);
!iterator.done() && iterator.code_offset() <= offset;
iterator.Advance()) {
position = iterator.source_position().ScriptOffset();
@@ -376,9 +381,9 @@ Code Code::OptimizedCodeIterator::Next() {
Handle<DeoptimizationData> DeoptimizationData::New(Isolate* isolate,
int deopt_entry_count,
- PretenureFlag pretenure) {
+ AllocationType allocation) {
return Handle<DeoptimizationData>::cast(isolate->factory()->NewFixedArray(
- LengthFor(deopt_entry_count), pretenure));
+ LengthFor(deopt_entry_count), allocation));
}
Handle<DeoptimizationData> DeoptimizationData::Empty(Isolate* isolate) {
@@ -708,7 +713,8 @@ void Code::Disassemble(const char* name, std::ostream& os, Address current_pc) {
{
SourcePositionTableIterator it(
- SourcePositionTable(), SourcePositionTableIterator::kJavaScriptOnly);
+ SourcePositionTableIfCollected(),
+ SourcePositionTableIterator::kJavaScriptOnly);
if (!it.done()) {
os << "Source positions:\n pc offset position\n";
for (; !it.done(); it.Advance()) {
@@ -721,7 +727,7 @@ void Code::Disassemble(const char* name, std::ostream& os, Address current_pc) {
}
{
- SourcePositionTableIterator it(SourcePositionTable(),
+ SourcePositionTableIterator it(SourcePositionTableIfCollected(),
SourcePositionTableIterator::kExternalOnly);
if (!it.done()) {
os << "External Source positions:\n pc offset fileid line\n";
@@ -791,7 +797,7 @@ void Code::Disassemble(const char* name, std::ostream& os, Address current_pc) {
}
if (has_code_comments()) {
- PrintCodeCommentsSection(os, code_comments());
+ PrintCodeCommentsSection(os, code_comments(), code_comments_size());
}
}
#endif // ENABLE_DISASSEMBLER
@@ -804,7 +810,8 @@ void BytecodeArray::Disassemble(std::ostream& os) {
os << "Frame size " << frame_size() << "\n";
Address base_address = GetFirstBytecodeAddress();
- SourcePositionTableIterator source_positions(SourcePositionTable());
+ SourcePositionTableIterator source_positions(
+ SourcePositionTableIfCollected());
// Storage for backing the handle passed to the iterator. This handle won't be
// updated by the gc, but that's ok because we've disallowed GCs anyway.
@@ -965,8 +972,9 @@ Handle<DependentCode> DependentCode::New(Isolate* isolate,
DependencyGroup group,
const MaybeObjectHandle& object,
Handle<DependentCode> next) {
- Handle<DependentCode> result = Handle<DependentCode>::cast(
- isolate->factory()->NewWeakFixedArray(kCodesStartIndex + 1, TENURED));
+ Handle<DependentCode> result =
+ Handle<DependentCode>::cast(isolate->factory()->NewWeakFixedArray(
+ kCodesStartIndex + 1, AllocationType::kOld));
result->set_next_link(*next);
result->set_flags(GroupField::encode(group) | CountField::encode(1));
result->set_object_at(0, *object);
@@ -979,7 +987,8 @@ Handle<DependentCode> DependentCode::EnsureSpace(
int capacity = kCodesStartIndex + DependentCode::Grow(entries->count());
int grow_by = capacity - entries->length();
return Handle<DependentCode>::cast(
- isolate->factory()->CopyWeakFixedArrayAndGrow(entries, grow_by, TENURED));
+ isolate->factory()->CopyWeakFixedArrayAndGrow(entries, grow_by,
+ AllocationType::kOld));
}
bool DependentCode::Compact() {
diff --git a/deps/v8/src/objects/code.h b/deps/v8/src/objects/code.h
index a6a4c03813..647cfebe69 100644
--- a/deps/v8/src/objects/code.h
+++ b/deps/v8/src/objects/code.h
@@ -34,7 +34,7 @@ class Code : public HeapObject {
NEVER_READ_ONLY_SPACE
// Opaque data type for encapsulating code flags like kind, inline
// cache state, and arguments count.
- typedef uint32_t Flags;
+ using Flags = uint32_t;
#define CODE_KIND_LIST(V) \
V(OPTIMIZED_FUNCTION) \
@@ -59,8 +59,8 @@ class Code : public HeapObject {
#ifdef ENABLE_DISASSEMBLER
const char* GetName(Isolate* isolate) const;
- void Disassemble(const char* name, std::ostream& os,
- Address current_pc = kNullAddress);
+ V8_EXPORT_PRIVATE void Disassemble(const char* name, std::ostream& os,
+ Address current_pc = kNullAddress);
#endif
// [instruction_size]: Size of the native instructions, including embedded
@@ -74,7 +74,7 @@ class Code : public HeapObject {
// the off-heap instruction stream rather than the on-heap trampoline located
// at instruction_start.
inline int InstructionSize() const;
- int OffHeapInstructionSize() const;
+ V8_EXPORT_PRIVATE int OffHeapInstructionSize() const;
// [relocation_info]: Code relocation information
DECL_ACCESSORS(relocation_info, ByteArray)
@@ -89,6 +89,7 @@ class Code : public HeapObject {
// SourcePositionTableWithFrameCache.
DECL_ACCESSORS(source_position_table, Object)
inline ByteArray SourcePositionTable() const;
+ inline ByteArray SourcePositionTableIfCollected() const;
// [code_data_container]: A container indirection for all mutable fields.
DECL_ACCESSORS(code_data_container, CodeDataContainer)
@@ -166,8 +167,8 @@ class Code : public HeapObject {
inline int code_comments_offset() const;
inline void set_code_comments_offset(int offset);
inline Address code_comments() const;
- int code_comments_size() const;
- bool has_code_comments() const;
+ V8_EXPORT_PRIVATE int code_comments_size() const;
+ V8_EXPORT_PRIVATE bool has_code_comments() const;
// The size of the executable instruction area, without embedded metadata.
int ExecutableInstructionSize() const;
@@ -241,7 +242,7 @@ class Code : public HeapObject {
// this differs from instruction_start (which would point to the off-heap
// trampoline instead).
inline Address InstructionStart() const;
- Address OffHeapInstructionStart() const;
+ V8_EXPORT_PRIVATE Address OffHeapInstructionStart() const;
// Returns the address right after the last instruction.
inline Address raw_instruction_end() const;
@@ -250,7 +251,7 @@ class Code : public HeapObject {
// objects this differs from instruction_end (which would point to the
// off-heap trampoline instead).
inline Address InstructionEnd() const;
- Address OffHeapInstructionEnd() const;
+ V8_EXPORT_PRIVATE Address OffHeapInstructionEnd() const;
// Returns the size of the instructions, padding, relocation and unwinding
// information.
@@ -649,9 +650,10 @@ class DependentCode : public WeakFixedArray {
};
// Register a code dependency of {cell} on {object}.
- static void InstallDependency(Isolate* isolate, const MaybeObjectHandle& code,
- Handle<HeapObject> object,
- DependencyGroup group);
+ V8_EXPORT_PRIVATE static void InstallDependency(Isolate* isolate,
+ const MaybeObjectHandle& code,
+ Handle<HeapObject> object,
+ DependencyGroup group);
void DeoptimizeDependentCodeGroup(Isolate* isolate, DependencyGroup group);
@@ -756,10 +758,6 @@ class BytecodeArray : public FixedArrayBase {
inline void set_incoming_new_target_or_generator_register(
interpreter::Register incoming_new_target_or_generator_register);
- // Accessors for profiling count.
- inline int interrupt_budget() const;
- inline void set_interrupt_budget(int interrupt_budget);
-
// Accessors for OSR loop nesting level.
inline int osr_loop_nesting_level() const;
inline void set_osr_loop_nesting_level(int depth);
@@ -774,14 +772,33 @@ class BytecodeArray : public FixedArrayBase {
// Accessors for handler table containing offsets of exception handlers.
DECL_ACCESSORS(handler_table, ByteArray)
- // Accessors for source position table containing mappings between byte code
- // offset and source position or SourcePositionTableWithFrameCache.
+ // Accessors for source position table. Can contain:
+ // * undefined (initial value)
+ // * empty_byte_array (for bytecode generated for functions that will never
+ // have source positions, e.g. native functions).
+ // * ByteArray (when source positions have been collected for the bytecode)
+ // * SourcePositionTableWithFrameCache (as above but with a frame cache)
+ // * exception (when an error occurred while explicitly collecting source
+ // positions for pre-existing bytecode).
DECL_ACCESSORS(source_position_table, Object)
- inline ByteArray SourcePositionTable();
- inline bool HasSourcePositionTable();
+ // This must only be called if source position collection has already been
+ // attempted. (If it failed because of an exception then it will return
+ // empty_byte_array).
+ inline ByteArray SourcePositionTable() const;
+ // If source positions have not been collected or an exception has been thrown
+ // this will return empty_byte_array.
+ inline ByteArray SourcePositionTableIfCollected() const;
+ inline bool HasSourcePositionTable() const;
+ inline bool DidSourcePositionGenerationFail() const;
inline void ClearFrameCacheFromSourcePositionTable();
+ // Indicates that an attempt was made to collect source positions, but that it
+ // failed most likely due to stack exhaustion. When in this state
+ // |SourcePositionTable| will return an empty byte array rather than crashing
+ // as it would if no attempt was ever made to collect source positions.
+ inline void SetSourcePositionsFailedToCollect();
+
DECL_CAST(BytecodeArray)
// Dispatched behavior.
@@ -796,13 +813,13 @@ class BytecodeArray : public FixedArrayBase {
DECL_PRINTER(BytecodeArray)
DECL_VERIFIER(BytecodeArray)
- void Disassemble(std::ostream& os);
+ V8_EXPORT_PRIVATE void Disassemble(std::ostream& os);
void CopyBytecodesTo(BytecodeArray to);
// Bytecode aging
- bool IsOld() const;
- void MakeOlder();
+ V8_EXPORT_PRIVATE bool IsOld() const;
+ V8_EXPORT_PRIVATE void MakeOlder();
// Clear uninitialized padding space. This ensures that the snapshot content
// is deterministic.
@@ -820,7 +837,6 @@ class BytecodeArray : public FixedArrayBase {
V(kFrameSizeOffset, kIntSize) \
V(kParameterSizeOffset, kIntSize) \
V(kIncomingNewTargetOrGeneratorRegisterOffset, kIntSize) \
- V(kInterruptBudgetOffset, kIntSize) \
V(kOSRNestingLevelOffset, kCharSize) \
V(kBytecodeAgeOffset, kCharSize) \
/* Total size. */ \
@@ -830,6 +846,11 @@ class BytecodeArray : public FixedArrayBase {
BYTECODE_ARRAY_FIELDS)
#undef BYTECODE_ARRAY_FIELDS
+ // InterpreterEntryTrampoline expects these fields to be next to each other
+ // and writes a 16-bit value to reset them.
+ STATIC_ASSERT(BytecodeArray::kBytecodeAgeOffset ==
+ kOSRNestingLevelOffset + kCharSize);
+
// Maximal memory consumption for a single BytecodeArray.
static const int kMaxSize = 512 * MB;
// Maximal length of a single BytecodeArray.
@@ -906,10 +927,10 @@ class DeoptimizationData : public FixedArray {
// Allocates a DeoptimizationData.
static Handle<DeoptimizationData> New(Isolate* isolate, int deopt_entry_count,
- PretenureFlag pretenure);
+ AllocationType allocation);
// Return an empty DeoptimizationData.
- static Handle<DeoptimizationData> Empty(Isolate* isolate);
+ V8_EXPORT_PRIVATE static Handle<DeoptimizationData> Empty(Isolate* isolate);
DECL_CAST(DeoptimizationData)
diff --git a/deps/v8/src/objects/debug-objects.cc b/deps/v8/src/objects/debug-objects.cc
index 1ae360dbbe..760edbfbcf 100644
--- a/deps/v8/src/objects/debug-objects.cc
+++ b/deps/v8/src/objects/debug-objects.cc
@@ -32,6 +32,18 @@ void DebugInfo::ClearBreakInfo(Isolate* isolate) {
// Reset function's bytecode array field to point to the original bytecode
// array.
shared()->SetDebugBytecodeArray(OriginalBytecodeArray());
+
+ // If the function is currently running on the stack, we need to update the
+ // bytecode pointers on the stack so they point to the original
+ // BytecodeArray before releasing that BytecodeArray from this DebugInfo.
+ // Otherwise, it could be flushed and cause problems on resume. See v8:9067.
+ {
+ RedirectActiveFunctions redirect_visitor(
+ shared(), RedirectActiveFunctions::Mode::kUseOriginalBytecode);
+ redirect_visitor.VisitThread(isolate, isolate->thread_local_top());
+ isolate->thread_manager()->IterateArchivedThreads(&redirect_visitor);
+ }
+
set_original_bytecode_array(ReadOnlyRoots(isolate).undefined_value());
set_debug_bytecode_array(ReadOnlyRoots(isolate).undefined_value());
}
diff --git a/deps/v8/src/objects/debug-objects.h b/deps/v8/src/objects/debug-objects.h
index cffe280097..9839f405f6 100644
--- a/deps/v8/src/objects/debug-objects.h
+++ b/deps/v8/src/objects/debug-objects.h
@@ -33,7 +33,7 @@ class DebugInfo : public Struct {
kDebugExecutionMode = 1 << 5
};
- typedef base::Flags<Flag> Flags;
+ using Flags = base::Flags<Flag>;
// A bitfield that lists uses of the current instance.
DECL_INT_ACCESSORS(flags)
diff --git a/deps/v8/src/objects/descriptor-array-inl.h b/deps/v8/src/objects/descriptor-array-inl.h
index bd96819597..a59d4e5a75 100644
--- a/deps/v8/src/objects/descriptor-array-inl.h
+++ b/deps/v8/src/objects/descriptor-array-inl.h
@@ -25,7 +25,7 @@ namespace v8 {
namespace internal {
OBJECT_CONSTRUCTORS_IMPL(DescriptorArray, HeapObject)
-OBJECT_CONSTRUCTORS_IMPL(EnumCache, Tuple2)
+OBJECT_CONSTRUCTORS_IMPL(EnumCache, Struct)
CAST_ACCESSOR(DescriptorArray)
CAST_ACCESSOR(EnumCache)
diff --git a/deps/v8/src/objects/descriptor-array.h b/deps/v8/src/objects/descriptor-array.h
index 4a6a240ba8..89350514b7 100644
--- a/deps/v8/src/objects/descriptor-array.h
+++ b/deps/v8/src/objects/descriptor-array.h
@@ -22,18 +22,21 @@ class Handle;
class Isolate;
// An EnumCache is a pair used to hold keys and indices caches.
-class EnumCache : public Tuple2 {
+class EnumCache : public Struct {
public:
DECL_ACCESSORS(keys, FixedArray)
DECL_ACCESSORS(indices, FixedArray)
DECL_CAST(EnumCache)
+ DECL_PRINTER(EnumCache)
+ DECL_VERIFIER(EnumCache)
+
// Layout description.
- static const int kKeysOffset = kValue1Offset;
- static const int kIndicesOffset = kValue2Offset;
+ DEFINE_FIELD_OFFSET_CONSTANTS(Struct::kHeaderSize,
+ TORQUE_GENERATED_ENUM_CACHE_FIELDS)
- OBJECT_CONSTRUCTORS(EnumCache, Tuple2);
+ OBJECT_CONSTRUCTORS(EnumCache, Struct);
};
// A DescriptorArray is a custom array that holds instance descriptors.
@@ -123,9 +126,9 @@ class DescriptorArray : public HeapObject {
// Allocates a DescriptorArray, but returns the singleton
// empty descriptor array object if number_of_descriptors is 0.
- static Handle<DescriptorArray> Allocate(
+ V8_EXPORT_PRIVATE static Handle<DescriptorArray> Allocate(
Isolate* isolate, int nof_descriptors, int slack,
- AllocationType type = AllocationType::kYoung);
+ AllocationType allocation = AllocationType::kYoung);
void Initialize(EnumCache enum_cache, HeapObject undefined_value,
int nof_descriptors, int slack);
@@ -171,7 +174,7 @@ class DescriptorArray : public HeapObject {
inline ObjectSlot GetKeySlot(int descriptor);
inline MaybeObjectSlot GetValueSlot(int descriptor);
- typedef FlexibleWeakBodyDescriptor<kPointersStartOffset> BodyDescriptor;
+ using BodyDescriptor = FlexibleWeakBodyDescriptor<kPointersStartOffset>;
// Layout of descriptor.
// Naming is consistent with Dictionary classes for easy templating.
@@ -190,7 +193,7 @@ class DescriptorArray : public HeapObject {
#ifdef DEBUG
// Is the descriptor array sorted and without duplicates?
- bool IsSortedNoDuplicates(int valid_descriptors = -1);
+ V8_EXPORT_PRIVATE bool IsSortedNoDuplicates(int valid_descriptors = -1);
// Are two DescriptorArrays equal?
bool IsEqualTo(DescriptorArray other);
diff --git a/deps/v8/src/objects/dictionary.h b/deps/v8/src/objects/dictionary.h
index 7670dff131..0bce08393f 100644
--- a/deps/v8/src/objects/dictionary.h
+++ b/deps/v8/src/objects/dictionary.h
@@ -24,11 +24,12 @@ class Handle;
class Isolate;
template <typename Derived, typename Shape>
-class Dictionary : public HashTable<Derived, Shape> {
- typedef HashTable<Derived, Shape> DerivedHashTable;
+class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) Dictionary
+ : public HashTable<Derived, Shape> {
+ using DerivedHashTable = HashTable<Derived, Shape>;
public:
- typedef typename Shape::Key Key;
+ using Key = typename Shape::Key;
// Returns the value at entry.
Object ValueAt(int entry) {
return this->get(DerivedHashTable::EntryToIndex(entry) + 1);
@@ -125,8 +126,9 @@ class NameDictionaryShape : public BaseDictionaryShape<Handle<Name>> {
};
template <typename Derived, typename Shape>
-class BaseNameDictionary : public Dictionary<Derived, Shape> {
- typedef typename Shape::Key Key;
+class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) BaseNameDictionary
+ : public Dictionary<Derived, Shape> {
+ using Key = typename Shape::Key;
public:
static const int kNextEnumerationIndexIndex =
@@ -159,7 +161,7 @@ class BaseNameDictionary : public Dictionary<Derived, Shape> {
// Creates a new dictionary.
V8_WARN_UNUSED_RESULT static Handle<Derived> New(
Isolate* isolate, int at_least_space_for,
- PretenureFlag pretenure = NOT_TENURED,
+ AllocationType allocation = AllocationType::kYoung,
MinimumCapacity capacity_option = USE_DEFAULT_MINIMUM_CAPACITY);
// Collect the keys into the given KeyAccumulator, in ascending chronological
@@ -192,7 +194,12 @@ class BaseNameDictionary : public Dictionary<Derived, Shape> {
OBJECT_CONSTRUCTORS(BaseNameDictionary, Dictionary<Derived, Shape>);
};
-class NameDictionary
+class NameDictionary;
+
+extern template class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE)
+ BaseNameDictionary<NameDictionary, NameDictionaryShape>;
+
+class V8_EXPORT_PRIVATE NameDictionary
: public BaseNameDictionary<NameDictionary, NameDictionaryShape> {
public:
DECL_CAST(NameDictionary)
@@ -228,7 +235,12 @@ class GlobalDictionaryShape : public NameDictionaryShape {
static inline RootIndex GetMapRootIndex();
};
-class GlobalDictionary
+class GlobalDictionary;
+
+extern template class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE)
+ BaseNameDictionary<GlobalDictionary, GlobalDictionaryShape>;
+
+class V8_EXPORT_PRIVATE GlobalDictionary
: public BaseNameDictionary<GlobalDictionary, GlobalDictionaryShape> {
public:
DECL_CAST(GlobalDictionary)
diff --git a/deps/v8/src/objects/embedder-data-array.h b/deps/v8/src/objects/embedder-data-array.h
index 751e4a94a5..f5ab2fa7ee 100644
--- a/deps/v8/src/objects/embedder-data-array.h
+++ b/deps/v8/src/objects/embedder-data-array.h
@@ -8,6 +8,7 @@
#include "src/globals.h"
#include "src/maybe-handles.h"
#include "src/objects/heap-object.h"
+#include "torque-generated/class-definitions-from-dsl.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@@ -27,14 +28,10 @@ class EmbedderDataArray : public HeapObject {
DECL_CAST(EmbedderDataArray)
-// Layout description.
-#define EMBEDDER_DATA_ARRAY_FIELDS(V) \
- V(kLengthOffset, kTaggedSize) \
- V(kHeaderSize, 0)
-
DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,
- EMBEDDER_DATA_ARRAY_FIELDS)
-#undef EMBEDDER_DATA_ARRAY_FIELDS
+ TORQUE_GENERATED_EMBEDDER_DATA_ARRAY_FIELDS)
+ // TODO(v8:8989): [torque] Support marker constants.
+ static const int kHeaderSize = kSize;
// Garbage collection support.
static constexpr int SizeFor(int length) {
diff --git a/deps/v8/src/objects/embedder-data-slot.h b/deps/v8/src/objects/embedder-data-slot.h
index 19bef3d170..6cebf28f2d 100644
--- a/deps/v8/src/objects/embedder-data-slot.h
+++ b/deps/v8/src/objects/embedder-data-slot.h
@@ -49,7 +49,7 @@ class EmbedderDataSlot
static constexpr int kRequiredPtrAlignment = kSmiTagSize;
// Opaque type used for storing raw embedder data.
- typedef Address RawData;
+ using RawData = Address;
V8_INLINE Object load_tagged() const;
V8_INLINE void store_smi(Smi value);
diff --git a/deps/v8/src/objects/feedback-cell-inl.h b/deps/v8/src/objects/feedback-cell-inl.h
index b3d7d196fc..c3902ca9aa 100644
--- a/deps/v8/src/objects/feedback-cell-inl.h
+++ b/deps/v8/src/objects/feedback-cell-inl.h
@@ -22,6 +22,14 @@ OBJECT_CONSTRUCTORS_IMPL(FeedbackCell, Struct)
CAST_ACCESSOR(FeedbackCell)
ACCESSORS(FeedbackCell, value, HeapObject, kValueOffset)
+INT32_ACCESSORS(FeedbackCell, interrupt_budget, kInterruptBudgetOffset)
+
+void FeedbackCell::clear_padding() {
+ if (FeedbackCell::kSize == FeedbackCell::kUnalignedSize) return;
+ DCHECK_GE(FeedbackCell::kSize, FeedbackCell::kUnalignedSize);
+ memset(reinterpret_cast<byte*>(address() + FeedbackCell::kUnalignedSize), 0,
+ FeedbackCell::kSize - FeedbackCell::kUnalignedSize);
+}
} // namespace internal
} // namespace v8
diff --git a/deps/v8/src/objects/feedback-cell.h b/deps/v8/src/objects/feedback-cell.h
index b8864ef4a2..a708f4cb92 100644
--- a/deps/v8/src/objects/feedback-cell.h
+++ b/deps/v8/src/objects/feedback-cell.h
@@ -20,8 +20,16 @@ namespace internal {
// a native context.
class FeedbackCell : public Struct {
public:
+ static int GetInitialInterruptBudget() {
+ if (FLAG_lazy_feedback_allocation) {
+ return FLAG_budget_for_feedback_vector_allocation;
+ }
+ return FLAG_interrupt_budget;
+ }
+
// [value]: value of the cell.
DECL_ACCESSORS(value, HeapObject)
+ DECL_INT32_ACCESSORS(interrupt_budget)
DECL_CAST(FeedbackCell)
@@ -30,15 +38,22 @@ class FeedbackCell : public Struct {
DECL_VERIFIER(FeedbackCell)
// Layout description.
-#define FEEDBACK_CELL_FIELDS(V) \
- V(kValueOffset, kTaggedSize) \
- /* Total size. */ \
- V(kSize, 0)
+#define FEEDBACK_CELL_FIELDS(V) \
+ V(kValueOffset, kTaggedSize) \
+ /* Non-pointer fields */ \
+ V(kInterruptBudgetOffset, kInt32Size) \
+ /* Total size. */ \
+ V(kUnalignedSize, 0)
DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, FEEDBACK_CELL_FIELDS)
#undef FEEDBACK_CELL_FIELDS
- typedef FixedBodyDescriptor<kValueOffset, kSize, kSize> BodyDescriptor;
+ static const int kSize = RoundUp<kObjectAlignment>(int{kUnalignedSize});
+
+ inline void clear_padding();
+
+ using BodyDescriptor =
+ FixedBodyDescriptor<kValueOffset, kInterruptBudgetOffset, kSize>;
OBJECT_CONSTRUCTORS(FeedbackCell, Struct);
};
diff --git a/deps/v8/src/objects/fixed-array-inl.h b/deps/v8/src/objects/fixed-array-inl.h
index 9aa65a8de6..d494f8d15b 100644
--- a/deps/v8/src/objects/fixed-array-inl.h
+++ b/deps/v8/src/objects/fixed-array-inl.h
@@ -208,6 +208,12 @@ void FixedArray::MoveElements(Heap* heap, int dst_index, int src_index, int len,
heap->MoveElements(*this, dst_index, src_index, len, mode);
}
+void FixedArray::CopyElements(Heap* heap, int dst_index, FixedArray src,
+ int src_index, int len, WriteBarrierMode mode) {
+ DisallowHeapAllocation no_gc;
+ heap->CopyElements(*this, src, dst_index, src_index, len, mode);
+}
+
// Perform a binary search in a fixed array.
template <SearchMode search_mode, typename T>
int BinarySearch(T* array, Name name, int valid_entries,
@@ -543,9 +549,9 @@ PodArray<T> PodArray<T>::cast(Object object) {
// static
template <class T>
Handle<PodArray<T>> PodArray<T>::New(Isolate* isolate, int length,
- PretenureFlag pretenure) {
+ AllocationType allocation) {
return Handle<PodArray<T>>::cast(
- isolate->factory()->NewByteArray(length * sizeof(T), pretenure));
+ isolate->factory()->NewByteArray(length * sizeof(T), allocation));
}
template <class T>
diff --git a/deps/v8/src/objects/fixed-array.h b/deps/v8/src/objects/fixed-array.h
index 2dc99de6da..e3ab45ba0e 100644
--- a/deps/v8/src/objects/fixed-array.h
+++ b/deps/v8/src/objects/fixed-array.h
@@ -15,8 +15,8 @@
namespace v8 {
namespace internal {
-typedef FlexibleWeakBodyDescriptor<HeapObject::kHeaderSize>
- WeakArrayBodyDescriptor;
+using WeakArrayBodyDescriptor =
+ FlexibleWeakBodyDescriptor<HeapObject::kHeaderSize>;
#define FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(V) \
V(BYTECODE_ARRAY_CONSTANT_POOL_SUB_TYPE) \
@@ -85,7 +85,7 @@ class FixedArrayBase : public HeapObject {
static int GetMaxLengthForNewSpaceAllocation(ElementsKind kind);
- bool IsCowArray() const;
+ V8_EXPORT_PRIVATE bool IsCowArray() const;
// Maximal allowed size, in bytes, of a single FixedArrayBase.
// Prevents overflowing size computations, as well as extreme memory
@@ -124,10 +124,9 @@ class FixedArray : public FixedArrayBase {
Handle<T> GetValueChecked(Isolate* isolate, int index) const;
// Return a grown copy if the index is bigger than the array's length.
- static Handle<FixedArray> SetAndGrow(Isolate* isolate,
- Handle<FixedArray> array, int index,
- Handle<Object> value,
- PretenureFlag pretenure = NOT_TENURED);
+ V8_EXPORT_PRIVATE static Handle<FixedArray> SetAndGrow(
+ Isolate* isolate, Handle<FixedArray> array, int index,
+ Handle<Object> value, AllocationType allocation = AllocationType::kYoung);
// Setter that uses write barrier.
inline void set(int index, Object value);
@@ -157,10 +156,13 @@ class FixedArray : public FixedArrayBase {
inline void MoveElements(Heap* heap, int dst_index, int src_index, int len,
WriteBarrierMode mode);
+ inline void CopyElements(Heap* heap, int dst_index, FixedArray src,
+ int src_index, int len, WriteBarrierMode mode);
+
inline void FillWithHoles(int from, int to);
// Shrink the array and insert filler objects. {new_length} must be > 0.
- void Shrink(Isolate* isolate, int new_length);
+ V8_EXPORT_PRIVATE void Shrink(Isolate* isolate, int new_length);
// If {new_length} is 0, return the canonical empty FixedArray. Otherwise
// like above.
static Handle<FixedArray> ShrinkOrEmpty(Isolate* isolate,
@@ -168,7 +170,8 @@ class FixedArray : public FixedArrayBase {
int new_length);
// Copy a sub array from the receiver to dest.
- void CopyTo(int pos, FixedArray dest, int dest_pos, int len) const;
+ V8_EXPORT_PRIVATE void CopyTo(int pos, FixedArray dest, int dest_pos,
+ int len) const;
// Garbage collection support.
static constexpr int SizeFor(int length) {
@@ -196,7 +199,7 @@ class FixedArray : public FixedArrayBase {
DECL_PRINTER(FixedArray)
DECL_VERIFIER(FixedArray)
- typedef FlexibleBodyDescriptor<kHeaderSize> BodyDescriptor;
+ using BodyDescriptor = FlexibleBodyDescriptor<kHeaderSize>;
protected:
// Set operation on FixedArray without using write barriers. Can
@@ -296,17 +299,11 @@ class WeakFixedArray : public HeapObject {
DECL_PRINTER(WeakFixedArray)
DECL_VERIFIER(WeakFixedArray)
- typedef WeakArrayBodyDescriptor BodyDescriptor;
-
- // Layout description.
-#define WEAK_FIXED_ARRAY_FIELDS(V) \
- V(kLengthOffset, kTaggedSize) \
- /* Header size. */ \
- V(kHeaderSize, 0)
+ using BodyDescriptor = WeakArrayBodyDescriptor;
DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,
- WEAK_FIXED_ARRAY_FIELDS)
-#undef WEAK_FIXED_ARRAY_FIELDS
+ TORQUE_GENERATED_WEAK_FIXED_ARRAY_FIELDS)
+ static constexpr int kHeaderSize = kSize;
static const int kMaxLength =
(FixedArray::kMaxSize - kHeaderSize) / kTaggedSize;
@@ -338,9 +335,9 @@ class WeakArrayList : public HeapObject {
DECL_VERIFIER(WeakArrayList)
DECL_PRINTER(WeakArrayList)
- static Handle<WeakArrayList> AddToEnd(Isolate* isolate,
- Handle<WeakArrayList> array,
- const MaybeObjectHandle& value);
+ V8_EXPORT_PRIVATE static Handle<WeakArrayList> AddToEnd(
+ Isolate* isolate, Handle<WeakArrayList> array,
+ const MaybeObjectHandle& value);
inline MaybeObject Get(int index) const;
@@ -357,7 +354,7 @@ class WeakArrayList : public HeapObject {
// Gives access to raw memory which stores the array's data.
inline MaybeObjectSlot data_start();
- bool IsFull();
+ V8_EXPORT_PRIVATE bool IsFull();
DECL_INT_ACCESSORS(capacity)
DECL_INT_ACCESSORS(length)
@@ -377,14 +374,14 @@ class WeakArrayList : public HeapObject {
DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, WEAK_ARRAY_LIST_FIELDS)
#undef WEAK_ARRAY_LIST_FIELDS
- typedef WeakArrayBodyDescriptor BodyDescriptor;
+ using BodyDescriptor = WeakArrayBodyDescriptor;
static const int kMaxCapacity =
(FixedArray::kMaxSize - kHeaderSize) / kTaggedSize;
static Handle<WeakArrayList> EnsureSpace(
Isolate* isolate, Handle<WeakArrayList> array, int length,
- PretenureFlag pretenure = NOT_TENURED);
+ AllocationType allocation = AllocationType::kYoung);
// Returns the number of non-cleaned weak references in the array.
int CountLiveWeakReferences() const;
@@ -393,7 +390,7 @@ class WeakArrayList : public HeapObject {
// around in the array - this method can only be used in cases where the user
// doesn't care about the indices! Users should make sure there are no
// duplicates.
- bool RemoveOne(const MaybeObjectHandle& value);
+ V8_EXPORT_PRIVATE bool RemoveOne(const MaybeObjectHandle& value);
class Iterator;
@@ -429,10 +426,13 @@ class WeakArrayList::Iterator {
// underlying FixedArray starting at kFirstIndex.
class ArrayList : public FixedArray {
public:
- static Handle<ArrayList> Add(Isolate* isolate, Handle<ArrayList> array,
- Handle<Object> obj);
- static Handle<ArrayList> Add(Isolate* isolate, Handle<ArrayList> array,
- Handle<Object> obj1, Handle<Object> obj2);
+ V8_EXPORT_PRIVATE static Handle<ArrayList> Add(Isolate* isolate,
+ Handle<ArrayList> array,
+ Handle<Object> obj);
+ V8_EXPORT_PRIVATE static Handle<ArrayList> Add(Isolate* isolate,
+ Handle<ArrayList> array,
+ Handle<Object> obj1,
+ Handle<Object> obj2);
static Handle<ArrayList> New(Isolate* isolate, int size);
// Returns the number of elements in the list, not the allocated size, which
@@ -550,8 +550,9 @@ class ByteArray : public FixedArrayBase {
template <class T>
class PodArray : public ByteArray {
public:
- static Handle<PodArray<T>> New(Isolate* isolate, int length,
- PretenureFlag pretenure = NOT_TENURED);
+ static Handle<PodArray<T>> New(
+ Isolate* isolate, int length,
+ AllocationType allocation = AllocationType::kYoung);
void copy_out(int index, T* result) {
ByteArray::copy_out(index * sizeof(T), reinterpret_cast<byte*>(result),
sizeof(T));
@@ -649,7 +650,7 @@ class FixedTypedArrayBase : public FixedArrayBase {
template <class Traits>
class FixedTypedArray : public FixedTypedArrayBase {
public:
- typedef typename Traits::ElementType ElementType;
+ using ElementType = typename Traits::ElementType;
static const InstanceType kInstanceType = Traits::kInstanceType;
DECL_CAST(FixedTypedArray<Traits>)
@@ -684,7 +685,7 @@ class FixedTypedArray : public FixedTypedArrayBase {
STATIC_ASSERT(sizeof(elementType) <= FixedTypedArrayBase::kMaxElementSize); \
class Type##ArrayTraits { \
public: /* NOLINT */ \
- typedef elementType ElementType; \
+ using ElementType = elementType; \
static const InstanceType kInstanceType = FIXED_##TYPE##_ARRAY_TYPE; \
static const char* ArrayTypeName() { return "Fixed" #Type "Array"; } \
static inline Handle<Object> ToHandle(Isolate* isolate, \
@@ -692,7 +693,7 @@ class FixedTypedArray : public FixedTypedArrayBase {
static inline elementType defaultValue(); \
}; \
\
- typedef FixedTypedArray<Type##ArrayTraits> Fixed##Type##Array;
+ using Fixed##Type##Array = FixedTypedArray<Type##ArrayTraits>;
TYPED_ARRAYS(FIXED_TYPED_ARRAY_TRAITS)
diff --git a/deps/v8/src/objects/frame-array.h b/deps/v8/src/objects/frame-array.h
index 60d3b6e20a..438718e25f 100644
--- a/deps/v8/src/objects/frame-array.h
+++ b/deps/v8/src/objects/frame-array.h
@@ -20,7 +20,7 @@ class Handle;
#define FRAME_ARRAY_FIELD_LIST(V) \
V(WasmInstance, WasmInstanceObject) \
V(WasmFunctionIndex, Smi) \
- V(WasmCodeObject, Foreign) \
+ V(WasmCodeObject, Object) \
V(Receiver, Object) \
V(Function, JSFunction) \
V(Code, AbstractCode) \
diff --git a/deps/v8/src/objects/free-space.h b/deps/v8/src/objects/free-space.h
index bb69ba389e..f1f7bb56c5 100644
--- a/deps/v8/src/objects/free-space.h
+++ b/deps/v8/src/objects/free-space.h
@@ -6,6 +6,7 @@
#define V8_OBJECTS_FREE_SPACE_H_
#include "src/objects/heap-object.h"
+#include "torque-generated/class-definitions-from-dsl.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@@ -40,15 +41,8 @@ class FreeSpace : public HeapObject {
DECL_PRINTER(FreeSpace)
DECL_VERIFIER(FreeSpace)
- // Layout description.
-#define FREE_SPACE_FIELDS(V) \
- V(kSizeOffset, kTaggedSize) \
- V(kNextOffset, kTaggedSize) \
- /* Header size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, FREE_SPACE_FIELDS)
-#undef FREE_SPACE_FIELDS
+ DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,
+ TORQUE_GENERATED_FREE_SPACE_FIELDS)
OBJECT_CONSTRUCTORS(FreeSpace, HeapObject);
};
diff --git a/deps/v8/src/objects/hash-table-inl.h b/deps/v8/src/objects/hash-table-inl.h
index 18786d780b..d65d9de083 100644
--- a/deps/v8/src/objects/hash-table-inl.h
+++ b/deps/v8/src/objects/hash-table-inl.h
@@ -49,6 +49,27 @@ CAST_ACCESSOR(ObjectHashTable)
CAST_ACCESSOR(EphemeronHashTable)
CAST_ACCESSOR(ObjectHashSet)
+void EphemeronHashTable::set_key(int index, Object value) {
+ DCHECK_NE(GetReadOnlyRoots().fixed_cow_array_map(), map());
+ DCHECK(IsEphemeronHashTable());
+ DCHECK_GE(index, 0);
+ DCHECK_LT(index, this->length());
+ int offset = kHeaderSize + index * kTaggedSize;
+ RELAXED_WRITE_FIELD(*this, offset, value);
+ EPHEMERON_KEY_WRITE_BARRIER(*this, offset, value);
+}
+
+void EphemeronHashTable::set_key(int index, Object value,
+ WriteBarrierMode mode) {
+ DCHECK_NE(GetReadOnlyRoots().fixed_cow_array_map(), map());
+ DCHECK(IsEphemeronHashTable());
+ DCHECK_GE(index, 0);
+ DCHECK_LT(index, this->length());
+ int offset = kHeaderSize + index * kTaggedSize;
+ RELAXED_WRITE_FIELD(*this, offset, value);
+ CONDITIONAL_EPHEMERON_KEY_WRITE_BARRIER(*this, offset, value, mode);
+}
+
int HashTableBase::NumberOfElements() const {
return Smi::ToInt(get(kNumberOfElementsIndex));
}
@@ -143,6 +164,19 @@ bool HashTable<Derived, Shape>::ToKey(ReadOnlyRoots roots, int entry,
return true;
}
+template <typename Derived, typename Shape>
+void HashTable<Derived, Shape>::set_key(int index, Object value) {
+ DCHECK(!IsEphemeronHashTable());
+ FixedArray::set(index, value);
+}
+
+template <typename Derived, typename Shape>
+void HashTable<Derived, Shape>::set_key(int index, Object value,
+ WriteBarrierMode mode) {
+ DCHECK(!IsEphemeronHashTable());
+ FixedArray::set(index, value, mode);
+}
+
template <typename KeyT>
bool BaseShape<KeyT>::IsKey(ReadOnlyRoots roots, Object key) {
return IsLive(roots, key);
diff --git a/deps/v8/src/objects/hash-table.h b/deps/v8/src/objects/hash-table.h
index 15ad1d8538..0c83d01b42 100644
--- a/deps/v8/src/objects/hash-table.h
+++ b/deps/v8/src/objects/hash-table.h
@@ -6,6 +6,8 @@
#define V8_OBJECTS_HASH_TABLE_H_
#include "src/base/compiler-specific.h"
+#include "src/base/export-template.h"
+#include "src/base/macros.h"
#include "src/globals.h"
#include "src/objects/fixed-array.h"
#include "src/objects/smi.h"
@@ -56,7 +58,7 @@ namespace internal {
template <typename KeyT>
class BaseShape {
public:
- typedef KeyT Key;
+ using Key = KeyT;
static inline RootIndex GetMapRootIndex();
static const bool kNeedsHoleCheck = true;
static Object Unwrap(Object key) { return key; }
@@ -130,15 +132,16 @@ class V8_EXPORT_PRIVATE HashTableBase : public NON_EXPORTED_BASE(FixedArray) {
};
template <typename Derived, typename Shape>
-class HashTable : public HashTableBase {
+class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) HashTable
+ : public HashTableBase {
public:
- typedef Shape ShapeT;
- typedef typename Shape::Key Key;
+ using ShapeT = Shape;
+ using Key = typename Shape::Key;
// Returns a new HashTable object.
V8_WARN_UNUSED_RESULT static Handle<Derived> New(
Isolate* isolate, int at_least_space_for,
- PretenureFlag pretenure = NOT_TENURED,
+ AllocationType allocation = AllocationType::kYoung,
MinimumCapacity capacity_option = USE_DEFAULT_MINIMUM_CAPACITY);
// Garbage collection support.
@@ -183,10 +186,20 @@ class HashTable : public HashTableBase {
return (entry * kEntrySize) + kElementsStartIndex;
}
+ // Returns the index for an entry (of the key)
+ static constexpr inline int IndexToEntry(int index) {
+ return (index - kElementsStartIndex) / kEntrySize;
+ }
+
+ // Returns the index for a slot address in the object.
+ static constexpr inline int SlotToIndex(Address object, Address slot) {
+ return static_cast<int>((slot - object - kHeaderSize) / kTaggedSize);
+ }
+
// Ensure enough space for n additional elements.
V8_WARN_UNUSED_RESULT static Handle<Derived> EnsureCapacity(
Isolate* isolate, Handle<Derived> table, int n,
- PretenureFlag pretenure = NOT_TENURED);
+ AllocationType allocation = AllocationType::kYoung);
// Returns true if this table has sufficient capacity for adding n elements.
bool HasSufficientCapacityToAdd(int number_of_additional_elements);
@@ -195,7 +208,7 @@ class HashTable : public HashTableBase {
friend class ObjectHashTable;
V8_WARN_UNUSED_RESULT static Handle<Derived> NewInternal(
- Isolate* isolate, int capacity, PretenureFlag pretenure);
+ Isolate* isolate, int capacity, AllocationType allocation);
// Find the entry at which to insert element with the given key that
// has the given hash value.
@@ -205,6 +218,9 @@ class HashTable : public HashTableBase {
V8_WARN_UNUSED_RESULT static Handle<Derived> Shrink(
Isolate* isolate, Handle<Derived> table, int additionalCapacity = 0);
+ inline void set_key(int index, Object value);
+ inline void set_key(int index, Object value, WriteBarrierMode mode);
+
private:
// Ensure that kMaxRegularCapacity yields a non-large object dictionary.
STATIC_ASSERT(EntryToIndex(kMaxRegularCapacity) < kMaxRegularLength);
@@ -274,7 +290,8 @@ class ObjectHashTableShape : public BaseShape<Handle<Object>> {
};
template <typename Derived, typename Shape>
-class ObjectHashTableBase : public HashTable<Derived, Shape> {
+class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) ObjectHashTableBase
+ : public HashTable<Derived, Shape> {
public:
// Looks up the value associated with the given key. The hole value is
// returned in case the key is not present.
@@ -315,9 +332,16 @@ class ObjectHashTableBase : public HashTable<Derived, Shape> {
OBJECT_CONSTRUCTORS(ObjectHashTableBase, HashTable<Derived, Shape>);
};
+class ObjectHashTable;
+
+extern template class EXPORT_TEMPLATE_DECLARE(
+ V8_EXPORT_PRIVATE) HashTable<ObjectHashTable, ObjectHashTableShape>;
+extern template class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE)
+ ObjectHashTableBase<ObjectHashTable, ObjectHashTableShape>;
+
// ObjectHashTable maps keys that are arbitrary objects to object values by
// using the identity hash of the key for hashing purposes.
-class ObjectHashTable
+class V8_EXPORT_PRIVATE ObjectHashTable
: public ObjectHashTableBase<ObjectHashTable, ObjectHashTableShape> {
public:
DECL_CAST(ObjectHashTable)
@@ -333,19 +357,31 @@ class EphemeronHashTableShape : public ObjectHashTableShape {
static inline RootIndex GetMapRootIndex();
};
+class EphemeronHashTable;
+
+extern template class EXPORT_TEMPLATE_DECLARE(
+ V8_EXPORT_PRIVATE) HashTable<EphemeronHashTable, EphemeronHashTableShape>;
+extern template class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE)
+ ObjectHashTableBase<EphemeronHashTable, EphemeronHashTableShape>;
+
// EphemeronHashTable is similar to ObjectHashTable but gets special treatment
// by the GC. The GC treats its entries as ephemerons: both key and value are
// weak references, however if the key is strongly reachable its corresponding
// value is also kept alive.
-class EphemeronHashTable
+class V8_EXPORT_PRIVATE EphemeronHashTable
: public ObjectHashTableBase<EphemeronHashTable, EphemeronHashTableShape> {
public:
DECL_CAST(EphemeronHashTable)
DECL_PRINTER(EphemeronHashTable)
+ class BodyDescriptor;
protected:
friend class MarkCompactCollector;
friend class ScavengerCollector;
+ friend class HashTable<EphemeronHashTable, EphemeronHashTableShape>;
+ friend class ObjectHashTableBase<EphemeronHashTable, EphemeronHashTableShape>;
+ inline void set_key(int index, Object value);
+ inline void set_key(int index, Object value, WriteBarrierMode mode);
OBJECT_CONSTRUCTORS(
EphemeronHashTable,
@@ -358,7 +394,12 @@ class ObjectHashSetShape : public ObjectHashTableShape {
static const int kEntrySize = 1;
};
-class ObjectHashSet : public HashTable<ObjectHashSet, ObjectHashSetShape> {
+class ObjectHashSet;
+extern template class EXPORT_TEMPLATE_DECLARE(
+ V8_EXPORT_PRIVATE) HashTable<ObjectHashSet, ObjectHashSetShape>;
+
+class V8_EXPORT_PRIVATE ObjectHashSet
+ : public HashTable<ObjectHashSet, ObjectHashSetShape> {
public:
static Handle<ObjectHashSet> Add(Isolate* isolate, Handle<ObjectHashSet> set,
Handle<Object> key);
diff --git a/deps/v8/src/objects/heap-object-inl.h b/deps/v8/src/objects/heap-object-inl.h
index fbdcb0f6ec..be97f8bb79 100644
--- a/deps/v8/src/objects/heap-object-inl.h
+++ b/deps/v8/src/objects/heap-object-inl.h
@@ -27,15 +27,18 @@ HeapObject::HeapObject(Address ptr, AllowInlineSmiStorage allow_smi)
IsHeapObject());
}
+// static
HeapObject HeapObject::FromAddress(Address address) {
DCHECK_TAG_ALIGNED(address);
return HeapObject(address + kHeapObjectTag);
}
+// static
Heap* NeverReadOnlySpaceObject::GetHeap(const HeapObject object) {
return GetHeapFromWritableObject(object);
}
+// static
Isolate* NeverReadOnlySpaceObject::GetIsolate(const HeapObject object) {
return Isolate::FromHeap(GetHeap(object));
}
diff --git a/deps/v8/src/objects/heap-object.h b/deps/v8/src/objects/heap-object.h
index 69a8463943..f42dc05b81 100644
--- a/deps/v8/src/objects/heap-object.h
+++ b/deps/v8/src/objects/heap-object.h
@@ -16,6 +16,8 @@
namespace v8 {
namespace internal {
+class Heap;
+
// HeapObject is the superclass for all classes describing heap allocated
// objects.
class HeapObject : public Object {
@@ -114,7 +116,7 @@ class HeapObject : public Object {
// Returns true if the object contains a tagged value at given offset.
// It is used for invalid slots filtering. If the offset points outside
// of the object or to the map word, the result is UNDEFINED (!!!).
- bool IsValidSlot(Map map, int offset);
+ V8_EXPORT_PRIVATE bool IsValidSlot(Map map, int offset);
// Returns the heap object's size in bytes
inline int Size() const;
@@ -129,9 +131,7 @@ class HeapObject : public Object {
// Does not invoke write barrier, so should only be assigned to
// during marking GC.
inline ObjectSlot RawField(int byte_offset) const;
- static inline ObjectSlot RawField(const HeapObject obj, int offset);
inline MaybeObjectSlot RawMaybeWeakField(int byte_offset) const;
- static inline MaybeObjectSlot RawMaybeWeakField(HeapObject obj, int offset);
DECL_CAST(HeapObject)
@@ -149,7 +149,7 @@ class HeapObject : public Object {
void PrintHeader(std::ostream& os, const char* id); // NOLINT
#endif
DECL_PRINTER(HeapObject)
- DECL_VERIFIER(HeapObject)
+ EXPORT_DECL_VERIFIER(HeapObject)
#ifdef VERIFY_HEAP
inline void VerifyObjectField(Isolate* isolate, int offset);
inline void VerifySmiField(int offset);
@@ -173,7 +173,7 @@ class HeapObject : public Object {
bool CanBeRehashed() const;
// Rehash the object based on the layout inferred from its map.
- void RehashBasedOnMap(ReadOnlyRoots roots);
+ void RehashBasedOnMap(ReadOnlyRoots root);
// Layout description.
#define HEAP_OBJECT_FIELDS(V) \
diff --git a/deps/v8/src/objects/instance-type.h b/deps/v8/src/objects/instance-type.h
index b121e9be2c..edbc428a5d 100644
--- a/deps/v8/src/objects/instance-type.h
+++ b/deps/v8/src/objects/instance-type.h
@@ -15,26 +15,14 @@ namespace v8 {
namespace internal {
// We use the full 16 bits of the instance_type field to encode heap object
-// instance types. All the high-order bits (bit 6-15) are cleared if the object
+// instance types. All the high-order bits (bits 6-15) are cleared if the object
// is a string, and contain set bits if it is not a string.
-const uint32_t kIsNotStringMask = 0xffc0;
+const uint32_t kIsNotStringMask = ~((1 << 6) - 1);
const uint32_t kStringTag = 0x0;
-// Bit 5 indicates that the object is an internalized string (if not set) or
-// not (if set). Bit 7 has to be clear as well.
-const uint32_t kIsNotInternalizedMask = 0x20;
-const uint32_t kNotInternalizedTag = 0x20;
-const uint32_t kInternalizedTag = 0x0;
-
-// If bit 7 is clear then bit 3 indicates whether the string consists of
-// two-byte characters or one-byte characters.
-const uint32_t kStringEncodingMask = 0x8;
-const uint32_t kTwoByteStringTag = 0x0;
-const uint32_t kOneByteStringTag = 0x8;
-
-// If bit 7 is clear, the low-order 3 bits indicate the representation
-// of the string.
-const uint32_t kStringRepresentationMask = 0x07;
+// For strings, bits 0-2 indicate the representation of the string. In
+// particular, bit 0 indicates whether the string is direct or indirect.
+const uint32_t kStringRepresentationMask = (1 << 3) - 1;
enum StringRepresentationTag {
kSeqStringTag = 0x0,
kConsStringTag = 0x1,
@@ -42,20 +30,32 @@ enum StringRepresentationTag {
kSlicedStringTag = 0x3,
kThinStringTag = 0x5
};
-const uint32_t kIsIndirectStringMask = 0x1;
-const uint32_t kIsIndirectStringTag = 0x1;
-STATIC_ASSERT((kSeqStringTag & kIsIndirectStringMask) == 0); // NOLINT
-STATIC_ASSERT((kExternalStringTag & kIsIndirectStringMask) == 0); // NOLINT
-STATIC_ASSERT((kConsStringTag & kIsIndirectStringMask) ==
- kIsIndirectStringTag); // NOLINT
+const uint32_t kIsIndirectStringMask = 1 << 0;
+const uint32_t kIsIndirectStringTag = 1 << 0;
+STATIC_ASSERT((kSeqStringTag & kIsIndirectStringMask) == 0);
+STATIC_ASSERT((kExternalStringTag & kIsIndirectStringMask) == 0);
+STATIC_ASSERT((kConsStringTag & kIsIndirectStringMask) == kIsIndirectStringTag);
STATIC_ASSERT((kSlicedStringTag & kIsIndirectStringMask) ==
- kIsIndirectStringTag); // NOLINT
+ kIsIndirectStringTag);
STATIC_ASSERT((kThinStringTag & kIsIndirectStringMask) == kIsIndirectStringTag);
-// If bit 6 is clear and string representation indicates an external string,
-// then bit 5 indicates whether the data pointer is cached.
-const uint32_t kUncachedExternalStringMask = 0x10;
-const uint32_t kUncachedExternalStringTag = 0x10;
+// For strings, bit 3 indicates whether the string consists of two-byte
+// characters or one-byte characters.
+const uint32_t kStringEncodingMask = 1 << 3;
+const uint32_t kTwoByteStringTag = 0;
+const uint32_t kOneByteStringTag = 1 << 3;
+
+// For strings, bit 4 indicates whether the data pointer of an external string
+// is cached. Note that the string representation is expected to be
+// kExternalStringTag.
+const uint32_t kUncachedExternalStringMask = 1 << 4;
+const uint32_t kUncachedExternalStringTag = 1 << 4;
+
+// For strings, bit 5 indicates that the string is internalized (if not set) or
+// isn't (if set).
+const uint32_t kIsNotInternalizedMask = 1 << 5;
+const uint32_t kNotInternalizedTag = 1 << 5;
+const uint32_t kInternalizedTag = 0;
// A ConsString with an empty string as the right side is a candidate
// for being shortcut by the garbage collector. We don't allocate any
@@ -156,6 +156,7 @@ enum InstanceType : uint16_t {
ASYNC_GENERATOR_REQUEST_TYPE,
CLASS_POSITIONS_TYPE,
DEBUG_INFO_TYPE,
+ ENUM_CACHE_TYPE,
FUNCTION_TEMPLATE_INFO_TYPE,
FUNCTION_TEMPLATE_RARE_DATA_TYPE,
INTERCEPTOR_INFO_TYPE,
@@ -188,6 +189,7 @@ enum InstanceType : uint16_t {
// FixedArrays.
FIXED_ARRAY_TYPE, // FIRST_FIXED_ARRAY_TYPE
OBJECT_BOILERPLATE_DESCRIPTION_TYPE,
+ CLOSURE_FEEDBACK_CELL_ARRAY_TYPE,
HASH_TABLE_TYPE, // FIRST_HASH_TABLE_TYPE
ORDERED_HASH_MAP_TYPE, // FIRST_DICTIONARY_TYPE
ORDERED_HASH_SET_TYPE,
@@ -401,6 +403,7 @@ V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream& os,
V(CachedTemplateObject, TUPLE3_TYPE) \
V(CodeDataContainer, CODE_DATA_CONTAINER_TYPE) \
V(CoverageInfo, FIXED_ARRAY_TYPE) \
+ V(ClosureFeedbackCellArray, CLOSURE_FEEDBACK_CELL_ARRAY_TYPE) \
V(DescriptorArray, DESCRIPTOR_ARRAY_TYPE) \
V(EmbedderDataArray, EMBEDDER_DATA_ARRAY_TYPE) \
V(EphemeronHashTable, EPHEMERON_HASH_TABLE_TYPE) \
diff --git a/deps/v8/src/objects/intl-objects.cc b/deps/v8/src/objects/intl-objects.cc
index b990a85bed..8a43f36245 100644
--- a/deps/v8/src/objects/intl-objects.cc
+++ b/deps/v8/src/objects/intl-objects.cc
@@ -691,7 +691,8 @@ bool IsTwoLetterLanguage(const std::string& locale) {
bool IsDeprecatedLanguage(const std::string& locale) {
// Check if locale is one of the deprecated language tags:
- return locale == "in" || locale == "iw" || locale == "ji" || locale == "jw";
+ return locale == "in" || locale == "iw" || locale == "ji" || locale == "jw" ||
+ locale == "mo";
}
// Reference:
@@ -1555,7 +1556,8 @@ std::map<std::string, std::string> LookupAndValidateUnicodeExtensions(
}
}
status = U_ZERO_ERROR;
- icu_locale->setKeywordValue(keyword, nullptr, status);
+ icu_locale->setUnicodeKeywordValue(
+ bcp47_key == nullptr ? keyword : bcp47_key, nullptr, status);
CHECK(U_SUCCESS(status));
}
@@ -1876,5 +1878,61 @@ const std::set<std::string>& Intl::GetAvailableLocalesForDateFormat() {
return available_locales.Pointer()->Get();
}
+Handle<String> Intl::NumberFieldToType(Isolate* isolate,
+ Handle<Object> numeric_obj,
+ int32_t field_id) {
+ DCHECK(numeric_obj->IsNumeric());
+ switch (static_cast<UNumberFormatFields>(field_id)) {
+ case UNUM_INTEGER_FIELD:
+ if (numeric_obj->IsBigInt()) {
+ // Neither NaN nor Infinite could be stored into BigInt
+ // so just return integer.
+ return isolate->factory()->integer_string();
+ } else {
+ double number = numeric_obj->Number();
+ if (std::isfinite(number)) return isolate->factory()->integer_string();
+ if (std::isnan(number)) return isolate->factory()->nan_string();
+ return isolate->factory()->infinity_string();
+ }
+ case UNUM_FRACTION_FIELD:
+ return isolate->factory()->fraction_string();
+ case UNUM_DECIMAL_SEPARATOR_FIELD:
+ return isolate->factory()->decimal_string();
+ case UNUM_GROUPING_SEPARATOR_FIELD:
+ return isolate->factory()->group_string();
+ case UNUM_CURRENCY_FIELD:
+ return isolate->factory()->currency_string();
+ case UNUM_PERCENT_FIELD:
+ return isolate->factory()->percentSign_string();
+ case UNUM_SIGN_FIELD:
+ if (numeric_obj->IsBigInt()) {
+ Handle<BigInt> big_int = Handle<BigInt>::cast(numeric_obj);
+ return big_int->IsNegative() ? isolate->factory()->minusSign_string()
+ : isolate->factory()->plusSign_string();
+ } else {
+ double number = numeric_obj->Number();
+ return number < 0 ? isolate->factory()->minusSign_string()
+ : isolate->factory()->plusSign_string();
+ }
+ case UNUM_EXPONENT_SYMBOL_FIELD:
+ case UNUM_EXPONENT_SIGN_FIELD:
+ case UNUM_EXPONENT_FIELD:
+ // We should never get these because we're not using any scientific
+ // formatter.
+ UNREACHABLE();
+ return Handle<String>();
+
+ case UNUM_PERMILL_FIELD:
+ // We're not creating any permill formatter, and it's not even clear how
+ // that would be possible with the ICU API.
+ UNREACHABLE();
+ return Handle<String>();
+
+ default:
+ UNREACHABLE();
+ return Handle<String>();
+ }
+}
+
} // namespace internal
} // namespace v8
diff --git a/deps/v8/src/objects/intl-objects.h b/deps/v8/src/objects/intl-objects.h
index 4dcafab793..5adb6fa2c8 100644
--- a/deps/v8/src/objects/intl-objects.h
+++ b/deps/v8/src/objects/intl-objects.h
@@ -20,7 +20,7 @@
#include "unicode/locid.h"
#include "unicode/uversion.h"
-#define V8_MINIMUM_ICU_VERSION 63
+#define V8_MINIMUM_ICU_VERSION 64
namespace U_ICU_NAMESPACE {
class BreakIterator;
@@ -79,7 +79,7 @@ class Intl {
//
// service is a string denoting the type of Intl object; used when
// printing the error message.
- V8_WARN_UNUSED_RESULT static Maybe<bool> GetStringOption(
+ V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT static Maybe<bool> GetStringOption(
Isolate* isolate, Handle<JSReceiver> options, const char* property,
std::vector<const char*> values, const char* service,
std::unique_ptr<char[]>* result);
@@ -122,7 +122,7 @@ class Intl {
//
// service is a string denoting the type of Intl object; used when
// printing the error message.
- V8_WARN_UNUSED_RESULT static Maybe<bool> GetBoolOption(
+ V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT static Maybe<bool> GetBoolOption(
Isolate* isolate, Handle<JSReceiver> options, const char* property,
const char* service, bool* result);
@@ -186,6 +186,11 @@ class Intl {
Isolate* isolate, const icu::UnicodeString& string, int32_t begin,
int32_t end);
+ // Helper function to convert number field id to type string.
+ static Handle<String> NumberFieldToType(Isolate* isolate,
+ Handle<Object> numeric_obj,
+ int32_t field_id);
+
// A helper function to implement formatToParts which add element to array as
// $array[$index] = { type: $field_type_string, value: $value }
static void AddElement(Isolate* isolate, Handle<JSArray> array, int index,
diff --git a/deps/v8/src/objects/js-array-buffer-inl.h b/deps/v8/src/objects/js-array-buffer-inl.h
index b1f3ed4ce2..39677093c2 100644
--- a/deps/v8/src/objects/js-array-buffer-inl.h
+++ b/deps/v8/src/objects/js-array-buffer-inl.h
@@ -77,11 +77,7 @@ void* JSArrayBuffer::allocation_base() const {
}
bool JSArrayBuffer::is_wasm_memory() const {
- bool const is_wasm_memory = IsWasmMemoryBit::decode(bit_field());
- DCHECK_EQ(is_wasm_memory,
- GetIsolate()->wasm_engine()->memory_tracker()->IsWasmMemory(
- backing_store()));
- return is_wasm_memory;
+ return IsWasmMemoryBit::decode(bit_field());
}
void JSArrayBuffer::set_is_wasm_memory(bool is_wasm_memory) {
@@ -113,8 +109,6 @@ BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, was_detached,
JSArrayBuffer::WasDetachedBit)
BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, is_shared,
JSArrayBuffer::IsSharedBit)
-BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, is_growable,
- JSArrayBuffer::IsGrowableBit)
size_t JSArrayBufferView::byte_offset() const {
return READ_UINTPTR_FIELD(*this, kByteOffsetOffset);
diff --git a/deps/v8/src/objects/js-array-buffer.cc b/deps/v8/src/objects/js-array-buffer.cc
index b5a8ab79a0..f96ae7e752 100644
--- a/deps/v8/src/objects/js-array-buffer.cc
+++ b/deps/v8/src/objects/js-array-buffer.cc
@@ -69,11 +69,7 @@ void JSArrayBuffer::FreeBackingStore(Isolate* isolate, Allocation allocation) {
if (allocation.is_wasm_memory) {
wasm::WasmMemoryTracker* memory_tracker =
isolate->wasm_engine()->memory_tracker();
- if (!memory_tracker->FreeMemoryIfIsWasmMemory(isolate,
- allocation.backing_store)) {
- CHECK(FreePages(GetPlatformPageAllocator(), allocation.allocation_base,
- allocation.length));
- }
+ memory_tracker->FreeMemoryIfIsWasmMemory(isolate, allocation.backing_store);
} else {
isolate->array_buffer_allocator()->Free(allocation.allocation_base,
allocation.length);
diff --git a/deps/v8/src/objects/js-array-buffer.h b/deps/v8/src/objects/js-array-buffer.h
index 3cc42e61f9..b77d1c9877 100644
--- a/deps/v8/src/objects/js-array-buffer.h
+++ b/deps/v8/src/objects/js-array-buffer.h
@@ -52,7 +52,6 @@ class JSArrayBuffer : public JSObject {
V(IsDetachableBit, bool, 1, _) \
V(WasDetachedBit, bool, 1, _) \
V(IsSharedBit, bool, 1, _) \
- V(IsGrowableBit, bool, 1, _) \
V(IsWasmMemoryBit, bool, 1, _)
DEFINE_BIT_FIELDS(JS_ARRAY_BUFFER_BIT_FIELD_FIELDS)
#undef JS_ARRAY_BUFFER_BIT_FIELD_FIELDS
@@ -71,9 +70,6 @@ class JSArrayBuffer : public JSObject {
// [is_shared]: tells whether this is an ArrayBuffer or a SharedArrayBuffer.
DECL_BOOLEAN_ACCESSORS(is_shared)
- // [is_growable]: indicates whether it's possible to grow this buffer.
- DECL_BOOLEAN_ACCESSORS(is_growable)
-
// [is_wasm_memory]: whether the buffer is tracked by the WasmMemoryTracker.
DECL_BOOLEAN_ACCESSORS(is_wasm_memory)
@@ -95,8 +91,9 @@ class JSArrayBuffer : public JSObject {
bool is_wasm_memory;
};
- void FreeBackingStoreFromMainThread();
- static void FreeBackingStore(Isolate* isolate, Allocation allocation);
+ V8_EXPORT_PRIVATE void FreeBackingStoreFromMainThread();
+ V8_EXPORT_PRIVATE static void FreeBackingStore(Isolate* isolate,
+ Allocation allocation);
V8_EXPORT_PRIVATE static void Setup(
Handle<JSArrayBuffer> array_buffer, Isolate* isolate, bool is_external,
@@ -111,7 +108,7 @@ class JSArrayBuffer : public JSObject {
// Returns false if array buffer contents could not be allocated.
// In this case, |array_buffer| will not be set up.
- static bool SetupAllocatingData(
+ V8_EXPORT_PRIVATE static bool SetupAllocatingData(
Handle<JSArrayBuffer> array_buffer, Isolate* isolate,
size_t allocated_length, bool initialize = true,
SharedFlag shared_flag = SharedFlag::kNotShared) V8_WARN_UNUSED_RESULT;
@@ -196,7 +193,7 @@ class JSTypedArray : public JSArrayBufferView {
ExternalArrayType type();
V8_EXPORT_PRIVATE size_t element_size();
- Handle<JSArrayBuffer> GetBuffer();
+ V8_EXPORT_PRIVATE Handle<JSArrayBuffer> GetBuffer();
// Whether the buffer's backing store is on-heap or off-heap.
inline bool is_on_heap() const;
diff --git a/deps/v8/src/objects/js-array.h b/deps/v8/src/objects/js-array.h
index a85af97e4a..23d62c810e 100644
--- a/deps/v8/src/objects/js-array.h
+++ b/deps/v8/src/objects/js-array.h
@@ -36,7 +36,8 @@ class JSArray : public JSObject {
// Initialize the array with the given capacity. The function may
// fail due to out-of-memory situations, but only if the requested
// capacity is non-zero.
- static void Initialize(Handle<JSArray> array, int capacity, int length = 0);
+ V8_EXPORT_PRIVATE static void Initialize(Handle<JSArray> array, int capacity,
+ int length = 0);
// If the JSArray has fast elements, and new_length would result in
// normalization, returns true.
@@ -46,7 +47,8 @@ class JSArray : public JSObject {
// Initializes the array to a certain length.
inline bool AllowsSetLength();
- static void SetLength(Handle<JSArray> array, uint32_t length);
+ V8_EXPORT_PRIVATE static void SetLength(Handle<JSArray> array,
+ uint32_t length);
// Set the content of the array to the content of storage.
static inline void SetContent(Handle<JSArray> array,
@@ -174,16 +176,8 @@ class JSArrayIterator : public JSObject {
inline IterationKind kind() const;
inline void set_kind(IterationKind kind);
- // Layout description.
-#define JS_ARRAY_ITERATOR_FIELDS(V) \
- V(kIteratedObjectOffset, kTaggedSize) \
- V(kNextIndexOffset, kTaggedSize) \
- V(kKindOffset, kTaggedSize) \
- /* Header size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize, JS_ARRAY_ITERATOR_FIELDS)
-#undef JS_ARRAY_ITERATOR_FIELDS
+ DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
+ TORQUE_GENERATED_JSARRAY_ITERATOR_FIELDS)
OBJECT_CONSTRUCTORS(JSArrayIterator, JSObject);
};
diff --git a/deps/v8/src/objects/js-break-iterator.h b/deps/v8/src/objects/js-break-iterator.h
index 03d036c957..3eff347485 100644
--- a/deps/v8/src/objects/js-break-iterator.h
+++ b/deps/v8/src/objects/js-break-iterator.h
@@ -35,7 +35,7 @@ class JSV8BreakIterator : public JSObject {
static Handle<JSObject> ResolvedOptions(
Isolate* isolate, Handle<JSV8BreakIterator> break_iterator);
- static const std::set<std::string>& GetAvailableLocales();
+ V8_EXPORT_PRIVATE static const std::set<std::string>& GetAvailableLocales();
static void AdoptText(Isolate* isolate,
Handle<JSV8BreakIterator> break_iterator,
diff --git a/deps/v8/src/objects/js-collator.cc b/deps/v8/src/objects/js-collator.cc
index dd927f02dc..b75468c6f3 100644
--- a/deps/v8/src/objects/js-collator.cc
+++ b/deps/v8/src/objects/js-collator.cc
@@ -134,22 +134,13 @@ Handle<JSObject> JSCollator::ResolvedOptions(Isolate* isolate,
const char* collation = "default";
const char* usage = "sort";
const char* collation_key = "co";
- const char* legacy_collation_key = uloc_toLegacyKey(collation_key);
- DCHECK_NOT_NULL(legacy_collation_key);
-
- char legacy_collation_value[ULOC_FULLNAME_CAPACITY];
status = U_ZERO_ERROR;
- int32_t length =
- icu_locale.getKeywordValue(legacy_collation_key, legacy_collation_value,
- ULOC_FULLNAME_CAPACITY, status);
+ std::string collation_value =
+ icu_locale.getUnicodeKeywordValue<std::string>(collation_key, status);
std::string locale;
- if (length > 0 && U_SUCCESS(status)) {
- const char* collation_value =
- uloc_toUnicodeLocaleType(collation_key, legacy_collation_value);
- CHECK_NOT_NULL(collation_value);
-
- if (strcmp(collation_value, "search") == 0) {
+ if (U_SUCCESS(status)) {
+ if (collation_value == "search") {
usage = "search";
// Search is disallowed as a collation value per spec. Let's
@@ -166,12 +157,12 @@ Handle<JSObject> JSCollator::ResolvedOptions(Isolate* isolate,
// The spec forbids the search as a collation value in the
// locale tag, so let's filter it out.
status = U_ZERO_ERROR;
- new_icu_locale.setKeywordValue(legacy_collation_key, nullptr, status);
+ new_icu_locale.setUnicodeKeywordValue(collation_key, nullptr, status);
CHECK(U_SUCCESS(status));
locale = Intl::ToLanguageTag(new_icu_locale).FromJust();
} else {
- collation = collation_value;
+ collation = collation_value.c_str();
locale = Intl::ToLanguageTag(icu_locale).FromJust();
}
} else {
@@ -348,12 +339,8 @@ MaybeHandle<JSCollator> JSCollator::Initialize(Isolate* isolate,
// This will need to be filtered out when creating the
// resolvedOptions object.
if (usage == Usage::SEARCH) {
- const char* key = uloc_toLegacyKey("co");
- CHECK_NOT_NULL(key);
- const char* value = uloc_toLegacyType(key, "search");
- CHECK_NOT_NULL(value);
UErrorCode status = U_ZERO_ERROR;
- icu_locale.setKeywordValue(key, value, status);
+ icu_locale.setUnicodeKeywordValue("co", "search", status);
CHECK(U_SUCCESS(status));
}
diff --git a/deps/v8/src/objects/js-collator.h b/deps/v8/src/objects/js-collator.h
index f338a5cfb5..e5d223aa24 100644
--- a/deps/v8/src/objects/js-collator.h
+++ b/deps/v8/src/objects/js-collator.h
@@ -40,7 +40,7 @@ class JSCollator : public JSObject {
static Handle<JSObject> ResolvedOptions(Isolate* isolate,
Handle<JSCollator> collator);
- static const std::set<std::string>& GetAvailableLocales();
+ V8_EXPORT_PRIVATE static const std::set<std::string>& GetAvailableLocales();
DECL_CAST(JSCollator)
DECL_PRINTER(JSCollator)
diff --git a/deps/v8/src/objects/js-collection-inl.h b/deps/v8/src/objects/js-collection-inl.h
index 79b55fda4a..78b6cc5db3 100644
--- a/deps/v8/src/objects/js-collection-inl.h
+++ b/deps/v8/src/objects/js-collection-inl.h
@@ -51,6 +51,7 @@ ACCESSORS(JSCollectionIterator, index, Object, kIndexOffset)
ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
+CAST_ACCESSOR(JSCollection)
CAST_ACCESSOR(JSSet)
CAST_ACCESSOR(JSSetIterator)
CAST_ACCESSOR(JSMap)
diff --git a/deps/v8/src/objects/js-collection-iterator.h b/deps/v8/src/objects/js-collection-iterator.h
new file mode 100644
index 0000000000..f25753738b
--- /dev/null
+++ b/deps/v8/src/objects/js-collection-iterator.h
@@ -0,0 +1,74 @@
+// Copyright 2019 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_OBJECTS_JS_COLLECTION_ITERATOR_H_
+#define V8_OBJECTS_JS_COLLECTION_ITERATOR_H_
+
+#include "src/globals.h"
+#include "src/objects.h"
+#include "src/objects/js-objects.h"
+#include "src/objects/smi.h"
+
+// Has to be the last include (doesn't have include guards):
+#include "src/objects/object-macros.h"
+
+namespace v8 {
+namespace internal {
+
+class JSCollectionIterator : public JSObject {
+ public:
+ // [table]: the backing hash table mapping keys to values.
+ DECL_ACCESSORS(table, Object)
+
+ // [index]: The index into the data table.
+ DECL_ACCESSORS(index, Object)
+
+ void JSCollectionIteratorPrint(std::ostream& os, const char* name);
+
+ DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
+ TORQUE_GENERATED_JSCOLLECTION_ITERATOR_FIELDS)
+
+ OBJECT_CONSTRUCTORS(JSCollectionIterator, JSObject);
+};
+
+// OrderedHashTableIterator is an iterator that iterates over the keys and
+// values of an OrderedHashTable.
+//
+// The iterator has a reference to the underlying OrderedHashTable data,
+// [table], as well as the current [index] the iterator is at.
+//
+// When the OrderedHashTable is rehashed it adds a reference from the old table
+// to the new table as well as storing enough data about the changes so that the
+// iterator [index] can be adjusted accordingly.
+//
+// When the [Next] result from the iterator is requested, the iterator checks if
+// there is a newer table that it needs to transition to.
+template <class Derived, class TableType>
+class OrderedHashTableIterator : public JSCollectionIterator {
+ public:
+ // Whether the iterator has more elements. This needs to be called before
+ // calling |CurrentKey| and/or |CurrentValue|.
+ bool HasMore();
+
+ // Move the index forward one.
+ void MoveNext() { set_index(Smi::FromInt(Smi::ToInt(index()) + 1)); }
+
+ // Returns the current key of the iterator. This should only be called when
+ // |HasMore| returns true.
+ inline Object CurrentKey();
+
+ private:
+ // Transitions the iterator to the non obsolete backing store. This is a NOP
+ // if the [table] is not obsolete.
+ void Transition();
+
+ OBJECT_CONSTRUCTORS(OrderedHashTableIterator, JSCollectionIterator);
+};
+
+} // namespace internal
+} // namespace v8
+
+#include "src/objects/object-macros-undef.h"
+
+#endif // V8_OBJECTS_JS_COLLECTION_ITERATOR_H_
diff --git a/deps/v8/src/objects/js-collection.h b/deps/v8/src/objects/js-collection.h
index 5a685a8c78..0450de8fb1 100644
--- a/deps/v8/src/objects/js-collection.h
+++ b/deps/v8/src/objects/js-collection.h
@@ -6,7 +6,7 @@
#define V8_OBJECTS_JS_COLLECTION_H_
#include "src/objects.h"
-#include "src/objects/ordered-hash-table.h"
+#include "src/objects/js-collection-iterator.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@@ -14,8 +14,13 @@
namespace v8 {
namespace internal {
+class OrderedHashSet;
+class OrderedHashMap;
+
class JSCollection : public JSObject {
public:
+ DECL_CAST(JSCollection)
+
// [table]: the backing hash table
DECL_ACCESSORS(table, Object)
@@ -97,22 +102,16 @@ class JSWeakCollection : public JSObject {
DECL_ACCESSORS(table, Object)
static void Initialize(Handle<JSWeakCollection> collection, Isolate* isolate);
- static void Set(Handle<JSWeakCollection> collection, Handle<Object> key,
- Handle<Object> value, int32_t hash);
+ V8_EXPORT_PRIVATE static void Set(Handle<JSWeakCollection> collection,
+ Handle<Object> key, Handle<Object> value,
+ int32_t hash);
static bool Delete(Handle<JSWeakCollection> collection, Handle<Object> key,
int32_t hash);
static Handle<JSArray> GetEntries(Handle<JSWeakCollection> holder,
int max_entries);
-// Layout description.
-#define JS_WEAK_COLLECTION_FIELDS(V) \
- V(kTableOffset, kTaggedSize) \
- /* Header size. */ \
- V(kSize, 0)
-
DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
- JS_WEAK_COLLECTION_FIELDS)
-#undef JS_WEAK_COLLECTION_FIELDS
+ TORQUE_GENERATED_JSWEAK_COLLECTION_FIELDS)
static const int kAddFunctionDescriptorIndex = 3;
@@ -120,7 +119,7 @@ class JSWeakCollection : public JSObject {
class BodyDescriptorImpl;
// Visit the whole object.
- typedef BodyDescriptorImpl BodyDescriptor;
+ using BodyDescriptor = BodyDescriptorImpl;
OBJECT_CONSTRUCTORS(JSWeakCollection, JSObject);
};
diff --git a/deps/v8/src/objects/js-date-time-format-inl.h b/deps/v8/src/objects/js-date-time-format-inl.h
index 6bcec9e301..1657241b07 100644
--- a/deps/v8/src/objects/js-date-time-format-inl.h
+++ b/deps/v8/src/objects/js-date-time-format-inl.h
@@ -23,6 +23,8 @@ OBJECT_CONSTRUCTORS_IMPL(JSDateTimeFormat, JSObject)
ACCESSORS(JSDateTimeFormat, icu_locale, Managed<icu::Locale>, kICULocaleOffset)
ACCESSORS(JSDateTimeFormat, icu_simple_date_format,
Managed<icu::SimpleDateFormat>, kICUSimpleDateFormatOffset)
+ACCESSORS(JSDateTimeFormat, icu_date_interval_format,
+ Managed<icu::DateIntervalFormat>, kICUDateIntervalFormatOffset)
ACCESSORS(JSDateTimeFormat, bound_format, Object, kBoundFormatOffset)
SMI_ACCESSORS(JSDateTimeFormat, flags, kFlagsOffset)
diff --git a/deps/v8/src/objects/js-date-time-format.cc b/deps/v8/src/objects/js-date-time-format.cc
index 3c1405f563..eda95f8773 100644
--- a/deps/v8/src/objects/js-date-time-format.cc
+++ b/deps/v8/src/objects/js-date-time-format.cc
@@ -19,7 +19,9 @@
#include "src/objects/js-date-time-format-inl.h"
#include "unicode/calendar.h"
+#include "unicode/dtitvfmt.h"
#include "unicode/dtptngen.h"
+#include "unicode/fieldpos.h"
#include "unicode/gregocal.h"
#include "unicode/smpdtfmt.h"
#include "unicode/unistr.h"
@@ -269,6 +271,7 @@ char LocaleIndependentAsciiToLower(char ch) {
// or ho_cHi_minH -> Ho_Chi_Minh. It is locale-agnostic and only
// deals with ASCII only characters.
// 'of', 'au' and 'es' are special-cased and lowercased.
+// Also "Antarctica/DumontDUrville" is special case.
// ICU's timezone parsing is case sensitive, but ECMAScript is case insensitive
std::string ToTitleCaseTimezoneLocation(Isolate* isolate,
const std::string& input) {
@@ -296,6 +299,10 @@ std::string ToTitleCaseTimezoneLocation(Isolate* isolate,
return std::string();
}
}
+ // Special case
+ if (title_cased == "Antarctica/Dumontdurville") {
+ return "Antarctica/DumontDUrville";
+ }
return title_cased;
}
@@ -488,16 +495,22 @@ MaybeHandle<JSObject> JSDateTimeFormat::ResolvedOptions(
}
}
- for (const auto& item : GetPatternItems()) {
- for (const auto& pair : item.pairs) {
- if (pattern.find(pair.pattern) != std::string::npos) {
- CHECK(JSReceiver::CreateDataProperty(
- isolate, options,
- factory->NewStringFromAsciiChecked(item.property.c_str()),
- factory->NewStringFromAsciiChecked(pair.value.c_str()),
- Just(kDontThrow))
- .FromJust());
- break;
+ // If dateStyle and timeStyle are undefined, then internal slots
+ // listed in "Table 1: Components of date and time formats" will be set
+ // in Step 33.f.iii.1 of InitializeDateTimeFormat
+ if (date_time_format->date_style() == DateTimeStyle::kUndefined &&
+ date_time_format->time_style() == DateTimeStyle::kUndefined) {
+ for (const auto& item : GetPatternItems()) {
+ for (const auto& pair : item.pairs) {
+ if (pattern.find(pair.pattern) != std::string::npos) {
+ CHECK(JSReceiver::CreateDataProperty(
+ isolate, options,
+ factory->NewStringFromAsciiChecked(item.property.c_str()),
+ factory->NewStringFromAsciiChecked(pair.value.c_str()),
+ Just(kDontThrow))
+ .FromJust());
+ break;
+ }
}
}
}
@@ -891,7 +904,8 @@ std::unique_ptr<icu::SimpleDateFormat> CreateICUDateFormat(
// has to be discussed. Revisit once the spec is clarified/revised.
icu::UnicodeString pattern;
UErrorCode status = U_ZERO_ERROR;
- pattern = generator.getBestPattern(skeleton, status);
+ pattern = generator.getBestPattern(skeleton, UDATPG_MATCH_HOUR_FIELD_LENGTH,
+ status);
CHECK(U_SUCCESS(status));
// Make formatter from skeleton. Calendar and numbering system are added
@@ -945,6 +959,16 @@ std::unique_ptr<icu::SimpleDateFormat> CreateICUDateFormatFromCache(
cache.Pointer()->Create(icu_locale, skeleton, generator));
}
+std::unique_ptr<icu::DateIntervalFormat> CreateICUDateIntervalFormat(
+ const icu::Locale& icu_locale, const icu::UnicodeString& skeleton) {
+ UErrorCode status = U_ZERO_ERROR;
+ std::unique_ptr<icu::DateIntervalFormat> date_interval_format(
+ icu::DateIntervalFormat::createInstance(skeleton, icu_locale, status));
+ if (U_FAILURE(status)) return std::unique_ptr<icu::DateIntervalFormat>();
+ CHECK_NOT_NULL(date_interval_format.get());
+ return date_interval_format;
+}
+
Intl::HourCycle HourCycleFromPattern(const icu::UnicodeString pattern) {
bool in_quote = false;
for (int32_t i = 0; i < pattern.length(); i++) {
@@ -1079,6 +1103,18 @@ std::unique_ptr<icu::SimpleDateFormat> DateTimeStylePattern(
generator);
}
+icu::UnicodeString SkeletonFromDateFormat(
+ const icu::SimpleDateFormat& icu_date_format) {
+ icu::UnicodeString pattern;
+ pattern = icu_date_format.toPattern(pattern);
+
+ UErrorCode status = U_ZERO_ERROR;
+ icu::UnicodeString skeleton =
+ icu::DateTimePatternGenerator::staticGetSkeleton(pattern, status);
+ CHECK(U_SUCCESS(status));
+ return skeleton;
+}
+
class DateTimePatternGeneratorCache {
public:
// Return a clone copy that the caller have to free.
@@ -1261,6 +1297,7 @@ MaybeHandle<JSDateTimeFormat> JSDateTimeFormat::Initialize(
DateTimeStyle date_style = DateTimeStyle::kUndefined;
DateTimeStyle time_style = DateTimeStyle::kUndefined;
std::unique_ptr<icu::SimpleDateFormat> icu_date_format;
+ std::unique_ptr<icu::DateIntervalFormat> icu_date_interval_format;
if (FLAG_harmony_intl_datetime_style) {
// 28. Let dateStyle be ? GetOption(options, "dateStyle", "string", «
@@ -1303,6 +1340,10 @@ MaybeHandle<JSDateTimeFormat> JSDateTimeFormat::Initialize(
time_style != DateTimeStyle::kUndefined) {
icu_date_format = DateTimeStylePattern(date_style, time_style, icu_locale,
hc, *generator);
+ if (FLAG_harmony_intl_date_format_range) {
+ icu_date_interval_format = CreateICUDateIntervalFormat(
+ icu_locale, SkeletonFromDateFormat(*icu_date_format));
+ }
}
}
// 33. Else,
@@ -1356,6 +1397,10 @@ MaybeHandle<JSDateTimeFormat> JSDateTimeFormat::Initialize(
FATAL("Failed to create ICU date format, are ICU data files missing?");
}
}
+ if (FLAG_harmony_intl_date_format_range) {
+ icu_date_interval_format =
+ CreateICUDateIntervalFormat(icu_locale, skeleton_ustr);
+ }
// g. If dateTimeFormat.[[Hour]] is not undefined, then
if (!has_hour_option) {
@@ -1390,7 +1435,7 @@ MaybeHandle<JSDateTimeFormat> JSDateTimeFormat::Initialize(
Intl::ToHourCycle(hc_extension_it->second.c_str())) {
// Remove -hc- if it does not agree with what we used.
UErrorCode status = U_ZERO_ERROR;
- icu_locale.setKeywordValue(uloc_toLegacyKey("hc"), nullptr, status);
+ icu_locale.setUnicodeKeywordValue("hc", nullptr, status);
CHECK(U_SUCCESS(status));
}
}
@@ -1404,6 +1449,12 @@ MaybeHandle<JSDateTimeFormat> JSDateTimeFormat::Initialize(
Managed<icu::SimpleDateFormat>::FromUniquePtr(isolate, 0,
std::move(icu_date_format));
date_time_format->set_icu_simple_date_format(*managed_format);
+ if (FLAG_harmony_intl_date_format_range) {
+ Handle<Managed<icu::DateIntervalFormat>> managed_interval_format =
+ Managed<icu::DateIntervalFormat>::FromUniquePtr(
+ isolate, 0, std::move(icu_date_interval_format));
+ date_time_format->set_icu_date_interval_format(*managed_interval_format);
+ }
return date_time_format;
}
@@ -1462,7 +1513,7 @@ Handle<String> IcuDateFieldIdToDateType(int32_t field_id, Isolate* isolate) {
} // namespace
-MaybeHandle<Object> JSDateTimeFormat::FormatToParts(
+MaybeHandle<JSArray> JSDateTimeFormat::FormatToParts(
Isolate* isolate, Handle<JSDateTimeFormat> date_time_format,
double date_value) {
Factory* factory = isolate->factory();
@@ -1476,7 +1527,7 @@ MaybeHandle<Object> JSDateTimeFormat::FormatToParts(
UErrorCode status = U_ZERO_ERROR;
format->format(date_value, formatted, &fp_iter, status);
if (U_FAILURE(status)) {
- THROW_NEW_ERROR(isolate, NewTypeError(MessageTemplate::kIcuError), Object);
+ THROW_NEW_ERROR(isolate, NewTypeError(MessageTemplate::kIcuError), JSArray);
}
Handle<JSArray> result = factory->NewJSArray(0);
@@ -1494,14 +1545,14 @@ MaybeHandle<Object> JSDateTimeFormat::FormatToParts(
ASSIGN_RETURN_ON_EXCEPTION(
isolate, substring,
Intl::ToString(isolate, formatted, previous_end_pos, begin_pos),
- Object);
+ JSArray);
Intl::AddElement(isolate, result, index,
IcuDateFieldIdToDateType(-1, isolate), substring);
++index;
}
ASSIGN_RETURN_ON_EXCEPTION(
isolate, substring,
- Intl::ToString(isolate, formatted, begin_pos, end_pos), Object);
+ Intl::ToString(isolate, formatted, begin_pos, end_pos), JSArray);
Intl::AddElement(isolate, result, index,
IcuDateFieldIdToDateType(fp.getField(), isolate),
substring);
@@ -1511,7 +1562,7 @@ MaybeHandle<Object> JSDateTimeFormat::FormatToParts(
if (previous_end_pos < length) {
ASSIGN_RETURN_ON_EXCEPTION(
isolate, substring,
- Intl::ToString(isolate, formatted, previous_end_pos, length), Object);
+ Intl::ToString(isolate, formatted, previous_end_pos, length), JSArray);
Intl::AddElement(isolate, result, index,
IcuDateFieldIdToDateType(-1, isolate), substring);
}
@@ -1540,5 +1591,76 @@ Handle<String> JSDateTimeFormat::HourCycleAsString() const {
}
}
+MaybeHandle<String> JSDateTimeFormat::FormatRange(
+ Isolate* isolate, Handle<JSDateTimeFormat> date_time_format, double x,
+ double y) {
+ // TODO(ftang): Merge the following with FormatRangeToParts after
+ // the landing of ICU64 to make it cleaner.
+
+ // #sec-partitiondatetimerangepattern
+ // 1. Let x be TimeClip(x).
+ x = DateCache::TimeClip(x);
+ // 2. If x is NaN, throw a RangeError exception.
+ if (std::isnan(x)) {
+ THROW_NEW_ERROR(isolate, NewRangeError(MessageTemplate::kInvalidTimeValue),
+ String);
+ }
+ // 3. Let y be TimeClip(y).
+ y = DateCache::TimeClip(y);
+ // 4. If y is NaN, throw a RangeError exception.
+ if (std::isnan(y)) {
+ THROW_NEW_ERROR(isolate, NewRangeError(MessageTemplate::kInvalidTimeValue),
+ String);
+ }
+
+ icu::DateIntervalFormat* date_interval_format =
+ date_time_format->icu_date_interval_format()->raw();
+ CHECK_NOT_NULL(date_interval_format);
+ icu::DateInterval interval(x, y);
+
+ icu::UnicodeString result;
+ icu::FieldPosition fpos;
+ UErrorCode status = U_ZERO_ERROR;
+ date_interval_format->format(&interval, result, fpos, status);
+ CHECK(U_SUCCESS(status));
+
+ return Intl::ToString(isolate, result);
+}
+
+MaybeHandle<JSArray> JSDateTimeFormat::FormatRangeToParts(
+ Isolate* isolate, Handle<JSDateTimeFormat> date_time_format, double x,
+ double y) {
+ // TODO(ftang): Merge the following with FormatRangeToParts after
+ // the landing of ICU64 to make it cleaner.
+
+ // #sec-partitiondatetimerangepattern
+ // 1. Let x be TimeClip(x).
+ x = DateCache::TimeClip(x);
+ // 2. If x is NaN, throw a RangeError exception.
+ if (std::isnan(x)) {
+ THROW_NEW_ERROR(isolate, NewRangeError(MessageTemplate::kInvalidTimeValue),
+ JSArray);
+ }
+ // 3. Let y be TimeClip(y).
+ y = DateCache::TimeClip(y);
+ // 4. If y is NaN, throw a RangeError exception.
+ if (std::isnan(y)) {
+ THROW_NEW_ERROR(isolate, NewRangeError(MessageTemplate::kInvalidTimeValue),
+ JSArray);
+ }
+
+ icu::DateIntervalFormat* date_interval_format =
+ date_time_format->icu_date_interval_format()->raw();
+ CHECK_NOT_NULL(date_interval_format);
+ Factory* factory = isolate->factory();
+ Handle<JSArray> result = factory->NewJSArray(0);
+
+ // TODO(ftang) To be implemented after ICU64 landed that support
+ // DateIntervalFormat::formatToValue() and FormattedDateInterval.
+
+ JSObject::ValidateElements(*result);
+ return result;
+}
+
} // namespace internal
} // namespace v8
diff --git a/deps/v8/src/objects/js-date-time-format.h b/deps/v8/src/objects/js-date-time-format.h
index 5909258d84..cf73af2aa8 100644
--- a/deps/v8/src/objects/js-date-time-format.h
+++ b/deps/v8/src/objects/js-date-time-format.h
@@ -21,6 +21,7 @@
#include "src/objects/object-macros.h"
namespace U_ICU_NAMESPACE {
+class DateIntervalFormat;
class Locale;
class SimpleDateFormat;
} // namespace U_ICU_NAMESPACE
@@ -56,10 +57,21 @@ class JSDateTimeFormat : public JSObject {
Isolate* isolate, Handle<JSDateTimeFormat> date_time_format,
Handle<Object> date);
- V8_WARN_UNUSED_RESULT static MaybeHandle<Object> FormatToParts(
+ // ecma402/#sec-Intl.DateTimeFormat.prototype.formatToParts
+ V8_WARN_UNUSED_RESULT static MaybeHandle<JSArray> FormatToParts(
Isolate* isolate, Handle<JSDateTimeFormat> date_time_format,
double date_value);
+ // ecma402/#sec-intl.datetimeformat.prototype.formatRange
+ V8_WARN_UNUSED_RESULT static MaybeHandle<String> FormatRange(
+ Isolate* isolate, Handle<JSDateTimeFormat> date_time_format,
+ double x_date_value, double y_date_value);
+
+ // ecma402/sec-Intl.DateTimeFormat.prototype.formatRangeToParts
+ V8_WARN_UNUSED_RESULT static MaybeHandle<JSArray> FormatRangeToParts(
+ Isolate* isolate, Handle<JSDateTimeFormat> date_time_format,
+ double x_date_value, double y_date_value);
+
// ecma-402/#sec-todatetimeoptions
enum class RequiredOption { kDate, kTime, kAny };
enum class DefaultsOption { kDate, kTime, kAll };
@@ -71,7 +83,7 @@ class JSDateTimeFormat : public JSObject {
Isolate* isolate, Handle<Object> date, Handle<Object> locales,
Handle<Object> options, RequiredOption required, DefaultsOption defaults);
- static const std::set<std::string>& GetAvailableLocales();
+ V8_EXPORT_PRIVATE static const std::set<std::string>& GetAvailableLocales();
Handle<String> HourCycleAsString() const;
DECL_CAST(JSDateTimeFormat)
@@ -80,12 +92,13 @@ class JSDateTimeFormat : public JSObject {
enum class DateTimeStyle { kUndefined, kFull, kLong, kMedium, kShort };
// Layout description.
-#define JS_DATE_TIME_FORMAT_FIELDS(V) \
- V(kICULocaleOffset, kTaggedSize) \
- V(kICUSimpleDateFormatOffset, kTaggedSize) \
- V(kBoundFormatOffset, kTaggedSize) \
- V(kFlagsOffset, kTaggedSize) \
- /* Total size. */ \
+#define JS_DATE_TIME_FORMAT_FIELDS(V) \
+ V(kICULocaleOffset, kTaggedSize) \
+ V(kICUSimpleDateFormatOffset, kTaggedSize) \
+ V(kICUDateIntervalFormatOffset, kTaggedSize) \
+ V(kBoundFormatOffset, kTaggedSize) \
+ V(kFlagsOffset, kTaggedSize) \
+ /* Total size. */ \
V(kSize, 0)
DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
@@ -130,6 +143,7 @@ class JSDateTimeFormat : public JSObject {
DECL_ACCESSORS(icu_locale, Managed<icu::Locale>)
DECL_ACCESSORS(icu_simple_date_format, Managed<icu::SimpleDateFormat>)
+ DECL_ACCESSORS(icu_date_interval_format, Managed<icu::DateIntervalFormat>)
DECL_ACCESSORS(bound_format, Object)
DECL_INT_ACCESSORS(flags)
diff --git a/deps/v8/src/objects/js-generator.h b/deps/v8/src/objects/js-generator.h
index e2a48810a9..0e99d824ab 100644
--- a/deps/v8/src/objects/js-generator.h
+++ b/deps/v8/src/objects/js-generator.h
@@ -68,19 +68,8 @@ class JSGeneratorObject : public JSObject {
static const int kGeneratorClosed = -1;
// Layout description.
-#define JS_GENERATOR_FIELDS(V) \
- V(kFunctionOffset, kTaggedSize) \
- V(kContextOffset, kTaggedSize) \
- V(kReceiverOffset, kTaggedSize) \
- V(kInputOrDebugPosOffset, kTaggedSize) \
- V(kResumeModeOffset, kTaggedSize) \
- V(kContinuationOffset, kTaggedSize) \
- V(kParametersAndRegistersOffset, kTaggedSize) \
- /* Header size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize, JS_GENERATOR_FIELDS)
-#undef JS_GENERATOR_FIELDS
+ DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
+ TORQUE_GENERATED_JSGENERATOR_OBJECT_FIELDS)
OBJECT_CONSTRUCTORS(JSGeneratorObject, JSObject);
};
@@ -96,14 +85,8 @@ class JSAsyncFunctionObject : public JSGeneratorObject {
DECL_ACCESSORS(promise, JSPromise)
// Layout description.
-#define JS_ASYNC_FUNCTION_FIELDS(V) \
- V(kPromiseOffset, kTaggedSize) \
- /* Header size. */ \
- V(kSize, 0)
-
DEFINE_FIELD_OFFSET_CONSTANTS(JSGeneratorObject::kSize,
- JS_ASYNC_FUNCTION_FIELDS)
-#undef JS_ASYNC_FUNCTION_FIELDS
+ TORQUE_GENERATED_JSASYNC_FUNCTION_OBJECT_FIELDS)
OBJECT_CONSTRUCTORS(JSAsyncFunctionObject, JSGeneratorObject);
};
@@ -125,14 +108,9 @@ class JSAsyncGeneratorObject : public JSGeneratorObject {
DECL_INT_ACCESSORS(is_awaiting)
// Layout description.
-#define JS_ASYNC_GENERATOR_FIELDS(V) \
- V(kQueueOffset, kTaggedSize) \
- V(kIsAwaitingOffset, kTaggedSize) \
- /* Header size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(JSGeneratorObject::kSize,
- JS_ASYNC_GENERATOR_FIELDS)
+ DEFINE_FIELD_OFFSET_CONSTANTS(
+ JSGeneratorObject::kSize,
+ TORQUE_GENERATED_JSASYNC_GENERATOR_OBJECT_FIELDS)
#undef JS_ASYNC_GENERATOR_FIELDS
OBJECT_CONSTRUCTORS(JSAsyncGeneratorObject, JSGeneratorObject);
diff --git a/deps/v8/src/objects/js-list-format.cc b/deps/v8/src/objects/js-list-format.cc
index dd7ab172af..c4329401a4 100644
--- a/deps/v8/src/objects/js-list-format.cc
+++ b/deps/v8/src/objects/js-list-format.cc
@@ -33,7 +33,10 @@ const char* kStandard = "standard";
const char* kOr = "or";
const char* kUnit = "unit";
const char* kStandardShort = "standard-short";
+const char* kOrShort = "or-short";
const char* kUnitShort = "unit-short";
+const char* kStandardNarrow = "standard-narrow";
+const char* kOrNarrow = "or-narrow";
const char* kUnitNarrow = "unit-narrow";
const char* GetIcuStyleString(JSListFormat::Style style,
@@ -45,29 +48,19 @@ const char* GetIcuStyleString(JSListFormat::Style style,
return kStandard;
case JSListFormat::Style::SHORT:
return kStandardShort;
- // NARROW is now not allowed if type is not unit
- // It is impossible to reach because we've already thrown a RangeError
- // when style is "narrow" and type is not "unit".
case JSListFormat::Style::NARROW:
+ return kStandardNarrow;
case JSListFormat::Style::COUNT:
UNREACHABLE();
}
case JSListFormat::Type::DISJUNCTION:
switch (style) {
- // Currently, ListFormat::createInstance on "or-short"
- // will fail so we use "or" here.
- // See https://unicode.org/cldr/trac/ticket/11254
- // TODO(ftang): change to return kOr or kOrShort depend on
- // style after the above issue fixed in CLDR/ICU.
- // CLDR bug: https://unicode.org/cldr/trac/ticket/11254
- // ICU bug: https://unicode-org.atlassian.net/browse/ICU-20014
case JSListFormat::Style::LONG:
- case JSListFormat::Style::SHORT:
return kOr;
- // NARROW is now not allowed if type is not unit
- // It is impossible to reach because we've already thrown a RangeError
- // when style is "narrow" and type is not "unit".
+ case JSListFormat::Style::SHORT:
+ return kOrShort;
case JSListFormat::Style::NARROW:
+ return kOrNarrow;
case JSListFormat::Style::COUNT:
UNREACHABLE();
}
@@ -181,11 +174,7 @@ MaybeHandle<JSListFormat> JSListFormat::Initialize(
// 13. Set listFormat.[[Type]] to t.
list_format->set_type(type_enum);
- // NOTE: Keep the old way of GetOptions on style for now. I discover a
- // disadvantage of following the lastest spec and propose to rollback that
- // part in https://github.com/tc39/proposal-intl-list-format/pull/40
-
- // Let s be ? GetOption(options, "style", "string",
+ // 14. Let s be ? GetOption(options, "style", "string",
// «"long", "short", "narrow"», "long").
Maybe<Style> maybe_style = Intl::GetStringOption<Style>(
isolate, options, "style", "Intl.ListFormat", {"long", "short", "narrow"},
@@ -193,15 +182,7 @@ MaybeHandle<JSListFormat> JSListFormat::Initialize(
MAYBE_RETURN(maybe_style, MaybeHandle<JSListFormat>());
Style style_enum = maybe_style.FromJust();
- // If _style_ is `"narrow"` and _type_ is not `"unit"`, throw a *RangeError*
- // exception.
- if (style_enum == Style::NARROW && type_enum != Type::UNIT) {
- THROW_NEW_ERROR(
- isolate, NewRangeError(MessageTemplate::kIllegalTypeWhileStyleNarrow),
- JSListFormat);
- }
-
- // 17. Set listFormat.[[Style]] to s.
+ // 15. Set listFormat.[[Style]] to s.
list_format->set_style(style_enum);
icu::Locale icu_locale = r.icu_locale;
@@ -273,78 +254,6 @@ Handle<String> JSListFormat::TypeAsString() const {
namespace {
-MaybeHandle<JSArray> GenerateListFormatParts(
- Isolate* isolate, const icu::UnicodeString& formatted,
- const std::vector<icu::FieldPosition>& positions) {
- Factory* factory = isolate->factory();
- Handle<JSArray> array =
- factory->NewJSArray(static_cast<int>(positions.size()));
- int index = 0;
- int prev_item_end_index = 0;
- Handle<String> substring;
- for (const icu::FieldPosition pos : positions) {
- CHECK(pos.getBeginIndex() >= prev_item_end_index);
- CHECK(pos.getField() == ULISTFMT_ELEMENT_FIELD);
- if (pos.getBeginIndex() != prev_item_end_index) {
- ASSIGN_RETURN_ON_EXCEPTION(
- isolate, substring,
- Intl::ToString(isolate, formatted, prev_item_end_index,
- pos.getBeginIndex()),
- JSArray);
- Intl::AddElement(isolate, array, index++, factory->literal_string(),
- substring);
- }
- ASSIGN_RETURN_ON_EXCEPTION(
- isolate, substring,
- Intl::ToString(isolate, formatted, pos.getBeginIndex(),
- pos.getEndIndex()),
- JSArray);
- Intl::AddElement(isolate, array, index++, factory->element_string(),
- substring);
- prev_item_end_index = pos.getEndIndex();
- }
- if (prev_item_end_index != formatted.length()) {
- ASSIGN_RETURN_ON_EXCEPTION(
- isolate, substring,
- Intl::ToString(isolate, formatted, prev_item_end_index,
- formatted.length()),
- JSArray);
- Intl::AddElement(isolate, array, index++, factory->literal_string(),
- substring);
- }
- return array;
-}
-
-// Get all the FieldPosition into a vector from FieldPositionIterator and return
-// them in output order.
-std::vector<icu::FieldPosition> GenerateFieldPosition(
- icu::FieldPositionIterator iter) {
- std::vector<icu::FieldPosition> positions;
- icu::FieldPosition pos;
- while (iter.next(pos)) {
- // Only take the information of the ULISTFMT_ELEMENT_FIELD field.
- if (pos.getField() == ULISTFMT_ELEMENT_FIELD) {
- positions.push_back(pos);
- }
- }
- // Because the format may reoder the items, ICU FieldPositionIterator
- // keep the order for FieldPosition based on the order of the input items.
- // But the formatToParts API in ECMA402 expects in formatted output order.
- // Therefore we have to sort based on beginIndex of the FieldPosition.
- // Example of such is in the "ur" (Urdu) locale with type: "unit", where the
- // main text flows from right to left, the formatted list of unit should flow
- // from left to right and therefore in the memory the formatted result will
- // put the first item on the last in the result string according the current
- // CLDR patterns.
- // See 'listPattern' pattern in
- // third_party/icu/source/data/locales/ur_IN.txt
- std::sort(positions.begin(), positions.end(),
- [](icu::FieldPosition a, icu::FieldPosition b) {
- return a.getBeginIndex() < b.getBeginIndex();
- });
- return positions;
-}
-
// Extract String from JSArray into array of UnicodeString
Maybe<std::vector<icu::UnicodeString>> ToUnicodeStringArray(
Isolate* isolate, Handle<JSArray> array) {
@@ -384,64 +293,103 @@ Maybe<std::vector<icu::UnicodeString>> ToUnicodeStringArray(
return Just(result);
}
-} // namespace
-
-// ecma402 #sec-formatlist
-MaybeHandle<String> JSListFormat::FormatList(Isolate* isolate,
- Handle<JSListFormat> format,
- Handle<JSArray> list) {
+template <typename T>
+MaybeHandle<T> FormatListCommon(
+ Isolate* isolate, Handle<JSListFormat> format, Handle<JSArray> list,
+ MaybeHandle<T> (*formatToResult)(Isolate*, const icu::FormattedList&)) {
DCHECK(!list->IsUndefined());
// ecma402 #sec-createpartsfromlist
// 2. If list contains any element value such that Type(value) is not String,
// throw a TypeError exception.
Maybe<std::vector<icu::UnicodeString>> maybe_array =
ToUnicodeStringArray(isolate, list);
- MAYBE_RETURN(maybe_array, Handle<String>());
+ MAYBE_RETURN(maybe_array, Handle<T>());
std::vector<icu::UnicodeString> array = maybe_array.FromJust();
icu::ListFormatter* formatter = format->icu_formatter()->raw();
CHECK_NOT_NULL(formatter);
UErrorCode status = U_ZERO_ERROR;
- icu::UnicodeString formatted;
- formatter->format(array.data(), static_cast<int32_t>(array.size()), formatted,
- status);
- DCHECK(U_SUCCESS(status));
+ icu::FormattedList formatted = formatter->formatStringsToValue(
+ array.data(), static_cast<int32_t>(array.size()), status);
+ if (U_FAILURE(status)) {
+ THROW_NEW_ERROR(isolate, NewTypeError(MessageTemplate::kIcuError), T);
+ }
+ return formatToResult(isolate, formatted);
+}
- return Intl::ToString(isolate, formatted);
+// A helper function to convert the FormattedList to a
+// MaybeHandle<String> for the implementation of format.
+MaybeHandle<String> FormattedToString(Isolate* isolate,
+ const icu::FormattedList& formatted) {
+ UErrorCode status = U_ZERO_ERROR;
+ icu::UnicodeString result = formatted.toString(status);
+ if (U_FAILURE(status)) {
+ THROW_NEW_ERROR(isolate, NewTypeError(MessageTemplate::kIcuError), String);
+ }
+ return Intl::ToString(isolate, result);
}
-const std::set<std::string>& JSListFormat::GetAvailableLocales() {
- // Since ListFormatter does not have a method to list all supported
- // locales, use the one in icu::Locale per comments in
- // ICU FR at https://unicode-org.atlassian.net/browse/ICU-20015
- return Intl::GetAvailableLocalesForLocale();
+Handle<String> IcuFieldIdToType(Isolate* isolate, int32_t field_id) {
+ switch (field_id) {
+ case ULISTFMT_LITERAL_FIELD:
+ return isolate->factory()->literal_string();
+ case ULISTFMT_ELEMENT_FIELD:
+ return isolate->factory()->element_string();
+ default:
+ UNREACHABLE();
+ // To prevent MSVC from issuing C4715 warning.
+ return Handle<String>();
+ }
+}
+
+// A helper function to convert the FormattedList to a
+// MaybeHandle<JSArray> for the implementation of formatToParts.
+MaybeHandle<JSArray> FormattedToJSArray(Isolate* isolate,
+ const icu::FormattedList& formatted) {
+ Handle<JSArray> array = isolate->factory()->NewJSArray(0);
+ icu::ConstrainedFieldPosition cfpos;
+ cfpos.constrainCategory(UFIELD_CATEGORY_LIST);
+ int index = 0;
+ UErrorCode status = U_ZERO_ERROR;
+ icu::UnicodeString string = formatted.toString(status);
+ Handle<String> substring;
+ while (formatted.nextPosition(cfpos, status) && U_SUCCESS(status)) {
+ ASSIGN_RETURN_ON_EXCEPTION(
+ isolate, substring,
+ Intl::ToString(isolate, string, cfpos.getStart(), cfpos.getLimit()),
+ JSArray);
+ Intl::AddElement(isolate, array, index++,
+ IcuFieldIdToType(isolate, cfpos.getField()), substring);
+ }
+ if (U_FAILURE(status)) {
+ THROW_NEW_ERROR(isolate, NewTypeError(MessageTemplate::kIcuError), JSArray);
+ }
+ JSObject::ValidateElements(*array);
+ return array;
+}
+
+} // namespace
+
+// ecma402 #sec-formatlist
+MaybeHandle<String> JSListFormat::FormatList(Isolate* isolate,
+ Handle<JSListFormat> format,
+ Handle<JSArray> list) {
+ return FormatListCommon<String>(isolate, format, list, FormattedToString);
}
// ecma42 #sec-formatlisttoparts
MaybeHandle<JSArray> JSListFormat::FormatListToParts(
Isolate* isolate, Handle<JSListFormat> format, Handle<JSArray> list) {
- DCHECK(!list->IsUndefined());
- // ecma402 #sec-createpartsfromlist
- // 2. If list contains any element value such that Type(value) is not String,
- // throw a TypeError exception.
- Maybe<std::vector<icu::UnicodeString>> maybe_array =
- ToUnicodeStringArray(isolate, list);
- MAYBE_RETURN(maybe_array, Handle<JSArray>());
- std::vector<icu::UnicodeString> array = maybe_array.FromJust();
-
- icu::ListFormatter* formatter = format->icu_formatter()->raw();
- CHECK_NOT_NULL(formatter);
+ return FormatListCommon<JSArray>(isolate, format, list, FormattedToJSArray);
+}
- UErrorCode status = U_ZERO_ERROR;
- icu::UnicodeString formatted;
- icu::FieldPositionIterator iter;
- formatter->format(array.data(), static_cast<int32_t>(array.size()), formatted,
- &iter, status);
- DCHECK(U_SUCCESS(status));
-
- std::vector<icu::FieldPosition> field_positions = GenerateFieldPosition(iter);
- return GenerateListFormatParts(isolate, formatted, field_positions);
+const std::set<std::string>& JSListFormat::GetAvailableLocales() {
+ // Since ListFormatter does not have a method to list all supported
+ // locales, use the one in icu::Locale per comments in
+ // ICU FR at https://unicode-org.atlassian.net/browse/ICU-20015
+ return Intl::GetAvailableLocalesForLocale();
}
+
} // namespace internal
} // namespace v8
diff --git a/deps/v8/src/objects/js-list-format.h b/deps/v8/src/objects/js-list-format.h
index 3880f6faeb..ee576b3ff2 100644
--- a/deps/v8/src/objects/js-list-format.h
+++ b/deps/v8/src/objects/js-list-format.h
@@ -49,7 +49,7 @@ class JSListFormat : public JSObject {
Isolate* isolate, Handle<JSListFormat> format_holder,
Handle<JSArray> list);
- static const std::set<std::string>& GetAvailableLocales();
+ V8_EXPORT_PRIVATE static const std::set<std::string>& GetAvailableLocales();
Handle<String> StyleAsString() const;
Handle<String> TypeAsString() const;
@@ -106,7 +106,6 @@ class JSListFormat : public JSObject {
// Layout description.
#define JS_LIST_FORMAT_FIELDS(V) \
- V(kJSListFormatOffset, kTaggedSize) \
V(kLocaleOffset, kTaggedSize) \
V(kICUFormatterOffset, kTaggedSize) \
V(kFlagsOffset, kTaggedSize) \
diff --git a/deps/v8/src/objects/js-locale.cc b/deps/v8/src/objects/js-locale.cc
index 94b4cb2aba..4e35c16b0f 100644
--- a/deps/v8/src/objects/js-locale.cc
+++ b/deps/v8/src/objects/js-locale.cc
@@ -98,15 +98,12 @@ Maybe<bool> InsertOptionsIntoLocale(Isolate* isolate,
}
DCHECK_NOT_NULL(value_str.get());
- // Convert bcp47 key and value into legacy ICU format so we can use
- // uloc_setKeywordValue.
- const char* key = uloc_toLegacyKey(option_to_bcp47.key);
- DCHECK_NOT_NULL(key);
-
// Overwrite existing, or insert new key-value to the locale string.
- const char* value = uloc_toLegacyType(key, value_str.get());
- if (value) {
- icu_locale->setKeywordValue(key, value, status);
+ if (uloc_toLegacyType(uloc_toLegacyKey(option_to_bcp47.key),
+ value_str.get())) {
+ // Only call setUnicodeKeywordValue if that value is a valid one.
+ icu_locale->setUnicodeKeywordValue(option_to_bcp47.key, value_str.get(),
+ status);
if (U_FAILURE(status)) {
return Just(false);
}
diff --git a/deps/v8/src/objects/js-number-format.cc b/deps/v8/src/objects/js-number-format.cc
index fda9a940d6..c490eeef57 100644
--- a/deps/v8/src/objects/js-number-format.cc
+++ b/deps/v8/src/objects/js-number-format.cc
@@ -83,9 +83,6 @@ Handle<JSObject> JSNumberFormat::ResolvedOptions(
icu::NumberFormat* number_format =
number_format_holder->icu_number_format()->raw();
CHECK_NOT_NULL(number_format);
- icu::DecimalFormat* decimal_format =
- static_cast<icu::DecimalFormat*>(number_format);
- CHECK_NOT_NULL(decimal_format);
Handle<String> locale =
Handle<String>(number_format_holder->locale(), isolate);
@@ -159,6 +156,11 @@ Handle<JSObject> JSNumberFormat::ResolvedOptions(
factory->NewNumberFromInt(number_format->getMaximumFractionDigits()),
Just(kDontThrow))
.FromJust());
+ CHECK(number_format->getDynamicClassID() ==
+ icu::DecimalFormat::getStaticClassID());
+ icu::DecimalFormat* decimal_format =
+ static_cast<icu::DecimalFormat*>(number_format);
+ CHECK_NOT_NULL(decimal_format);
if (decimal_format->areSignificantDigitsUsed()) {
CHECK(JSReceiver::CreateDataProperty(
isolate, options, factory->minimumSignificantDigits_string(),
@@ -335,22 +337,52 @@ MaybeHandle<JSNumberFormat> JSNumberFormat::Initialize(
UErrorCode status = U_ZERO_ERROR;
std::unique_ptr<icu::NumberFormat> icu_number_format;
+ icu::Locale no_extension_locale(r.icu_locale.getBaseName());
if (style == Style::DECIMAL) {
icu_number_format.reset(
icu::NumberFormat::createInstance(r.icu_locale, status));
+ // If the subclass is not DecimalFormat, fallback to no extension
+ // because other subclass has not support the format() with
+ // FieldPositionIterator yet.
+ if (U_FAILURE(status) || icu_number_format.get() == nullptr ||
+ icu_number_format->getDynamicClassID() !=
+ icu::DecimalFormat::getStaticClassID()) {
+ status = U_ZERO_ERROR;
+ icu_number_format.reset(
+ icu::NumberFormat::createInstance(no_extension_locale, status));
+ }
} else if (style == Style::PERCENT) {
icu_number_format.reset(
icu::NumberFormat::createPercentInstance(r.icu_locale, status));
+ // If the subclass is not DecimalFormat, fallback to no extension
+ // because other subclass has not support the format() with
+ // FieldPositionIterator yet.
+ if (U_FAILURE(status) || icu_number_format.get() == nullptr ||
+ icu_number_format->getDynamicClassID() !=
+ icu::DecimalFormat::getStaticClassID()) {
+ status = U_ZERO_ERROR;
+ icu_number_format.reset(icu::NumberFormat::createPercentInstance(
+ no_extension_locale, status));
+ }
} else {
DCHECK_EQ(style, Style::CURRENCY);
icu_number_format.reset(
icu::NumberFormat::createInstance(r.icu_locale, format_style, status));
+ // If the subclass is not DecimalFormat, fallback to no extension
+ // because other subclass has not support the format() with
+ // FieldPositionIterator yet.
+ if (U_FAILURE(status) || icu_number_format.get() == nullptr ||
+ icu_number_format->getDynamicClassID() !=
+ icu::DecimalFormat::getStaticClassID()) {
+ status = U_ZERO_ERROR;
+ icu_number_format.reset(icu::NumberFormat::createInstance(
+ no_extension_locale, format_style, status));
+ }
}
if (U_FAILURE(status) || icu_number_format.get() == nullptr) {
status = U_ZERO_ERROR;
// Remove extensions and try again.
- icu::Locale no_extension_locale(r.icu_locale.getBaseName());
icu_number_format.reset(
icu::NumberFormat::createInstance(no_extension_locale, status));
@@ -360,6 +392,8 @@ MaybeHandle<JSNumberFormat> JSNumberFormat::Initialize(
}
DCHECK(U_SUCCESS(status));
CHECK_NOT_NULL(icu_number_format.get());
+ CHECK(icu_number_format->getDynamicClassID() ==
+ icu::DecimalFormat::getStaticClassID());
if (style == Style::CURRENCY) {
// 19. If style is "currency", set numberFormat.[[CurrencyDisplay]] to
// currencyDisplay.
@@ -396,6 +430,8 @@ MaybeHandle<JSNumberFormat> JSNumberFormat::Initialize(
}
// 22. Perform ? SetNumberFormatDigitOptions(numberFormat, options,
// mnfdDefault, mxfdDefault).
+ CHECK(icu_number_format->getDynamicClassID() ==
+ icu::DecimalFormat::getStaticClassID());
icu::DecimalFormat* icu_decimal_format =
static_cast<icu::DecimalFormat*>(icu_number_format.get());
Maybe<bool> maybe_set_number_for_digit_options =
@@ -523,64 +559,6 @@ bool cmp_NumberFormatSpan(const NumberFormatSpan& a,
return a.field_id < b.field_id;
}
-// The list comes from third_party/icu/source/i18n/unicode/unum.h.
-// They're mapped to NumberFormat part types mentioned throughout
-// https://tc39.github.io/ecma402/#sec-partitionnumberpattern .
-Handle<String> IcuNumberFieldIdToNumberType(int32_t field_id,
- Handle<Object> numeric_obj,
- Isolate* isolate) {
- DCHECK(numeric_obj->IsNumeric());
- switch (static_cast<UNumberFormatFields>(field_id)) {
- case UNUM_INTEGER_FIELD:
- if (numeric_obj->IsBigInt()) {
- // Neither NaN nor Infinite could be stored into BigInt
- // so just return integer.
- return isolate->factory()->integer_string();
- } else {
- double number = numeric_obj->Number();
- if (std::isfinite(number)) return isolate->factory()->integer_string();
- if (std::isnan(number)) return isolate->factory()->nan_string();
- return isolate->factory()->infinity_string();
- }
- case UNUM_FRACTION_FIELD:
- return isolate->factory()->fraction_string();
- case UNUM_DECIMAL_SEPARATOR_FIELD:
- return isolate->factory()->decimal_string();
- case UNUM_GROUPING_SEPARATOR_FIELD:
- return isolate->factory()->group_string();
- case UNUM_CURRENCY_FIELD:
- return isolate->factory()->currency_string();
- case UNUM_PERCENT_FIELD:
- return isolate->factory()->percentSign_string();
- case UNUM_SIGN_FIELD:
- if (numeric_obj->IsBigInt()) {
- Handle<BigInt> big_int = Handle<BigInt>::cast(numeric_obj);
- return big_int->IsNegative() ? isolate->factory()->minusSign_string()
- : isolate->factory()->plusSign_string();
- } else {
- double number = numeric_obj->Number();
- return number < 0 ? isolate->factory()->minusSign_string()
- : isolate->factory()->plusSign_string();
- }
- case UNUM_EXPONENT_SYMBOL_FIELD:
- case UNUM_EXPONENT_SIGN_FIELD:
- case UNUM_EXPONENT_FIELD:
- // We should never get these because we're not using any scientific
- // formatter.
- UNREACHABLE();
- return Handle<String>();
-
- case UNUM_PERMILL_FIELD:
- // We're not creating any permill formatter, and it's not even clear how
- // that would be possible with the ICU API.
- UNREACHABLE();
- return Handle<String>();
-
- default:
- UNREACHABLE();
- return Handle<String>();
- }
-}
} // namespace
// Flattens a list of possibly-overlapping "regions" to a list of
@@ -712,7 +690,7 @@ Maybe<int> JSNumberFormat::FormatToParts(Isolate* isolate,
Handle<String> field_type_string =
part.field_id == -1
? isolate->factory()->literal_string()
- : IcuNumberFieldIdToNumberType(part.field_id, numeric_obj, isolate);
+ : Intl::NumberFieldToType(isolate, numeric_obj, part.field_id);
Handle<String> substring;
ASSIGN_RETURN_ON_EXCEPTION_VALUE(
isolate, substring,
diff --git a/deps/v8/src/objects/js-number-format.h b/deps/v8/src/objects/js-number-format.h
index 0f0f6342ac..6857989c22 100644
--- a/deps/v8/src/objects/js-number-format.h
+++ b/deps/v8/src/objects/js-number-format.h
@@ -64,7 +64,7 @@ class JSNumberFormat : public JSObject {
Isolate* isolate, const icu::NumberFormat& number_format,
Handle<Object> numeric_obj);
- static const std::set<std::string>& GetAvailableLocales();
+ V8_EXPORT_PRIVATE static const std::set<std::string>& GetAvailableLocales();
Handle<String> StyleAsString() const;
Handle<String> CurrencyDisplayAsString() const;
@@ -143,7 +143,7 @@ struct NumberFormatSpan {
: field_id(field_id), begin_pos(begin_pos), end_pos(end_pos) {}
};
-std::vector<NumberFormatSpan> FlattenRegionsToParts(
+V8_EXPORT_PRIVATE std::vector<NumberFormatSpan> FlattenRegionsToParts(
std::vector<NumberFormatSpan>* regions);
} // namespace internal
diff --git a/deps/v8/src/objects/js-objects-inl.h b/deps/v8/src/objects/js-objects-inl.h
index 3b4313b54a..bf7076b517 100644
--- a/deps/v8/src/objects/js-objects-inl.h
+++ b/deps/v8/src/objects/js-objects-inl.h
@@ -80,14 +80,14 @@ Handle<Object> JSReceiver::GetDataProperty(Handle<JSReceiver> object,
return GetDataProperty(&it);
}
-MaybeHandle<Object> JSReceiver::GetPrototype(Isolate* isolate,
- Handle<JSReceiver> receiver) {
+MaybeHandle<HeapObject> JSReceiver::GetPrototype(Isolate* isolate,
+ Handle<JSReceiver> receiver) {
// We don't expect access checks to be needed on JSProxy objects.
DCHECK(!receiver->IsAccessCheckNeeded() || receiver->IsJSObject());
PrototypeIterator iter(isolate, receiver, kStartAtReceiver,
PrototypeIterator::END_AT_NON_HIDDEN);
do {
- if (!iter.AdvanceFollowingProxies()) return MaybeHandle<Object>();
+ if (!iter.AdvanceFollowingProxies()) return MaybeHandle<HeapObject>();
} while (!iter.IsAtEnd());
return PrototypeIterator::GetCurrent(iter);
}
@@ -458,7 +458,7 @@ ACCESSORS(JSBoundFunction, bound_arguments, FixedArray, kBoundArgumentsOffset)
ACCESSORS(JSFunction, raw_feedback_cell, FeedbackCell, kFeedbackCellOffset)
ACCESSORS(JSGlobalObject, native_context, NativeContext, kNativeContextOffset)
-ACCESSORS(JSGlobalObject, global_proxy, JSObject, kGlobalProxyOffset)
+ACCESSORS(JSGlobalObject, global_proxy, JSGlobalProxy, kGlobalProxyOffset)
ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
@@ -467,6 +467,11 @@ FeedbackVector JSFunction::feedback_vector() const {
return FeedbackVector::cast(raw_feedback_cell()->value());
}
+ClosureFeedbackCellArray JSFunction::closure_feedback_cell_array() const {
+ DCHECK(has_closure_feedback_cell_array());
+ return ClosureFeedbackCellArray::cast(raw_feedback_cell()->value());
+}
+
// Code objects that are marked for deoptimization are not considered to be
// optimized. This is because the JSFunction might have been already
// deoptimized but its code() still needs to be unlinked, which will happen on
@@ -536,6 +541,8 @@ AbstractCode JSFunction::abstract_code() {
}
}
+int JSFunction::length() { return shared()->length(); }
+
Code JSFunction::code() const {
return Code::cast(RELAXED_READ_FIELD(*this, kCodeOffset));
}
@@ -584,7 +591,12 @@ void JSFunction::SetOptimizationMarker(OptimizationMarker marker) {
bool JSFunction::has_feedback_vector() const {
return shared()->is_compiled() &&
- !raw_feedback_cell()->value()->IsUndefined();
+ raw_feedback_cell()->value()->IsFeedbackVector();
+}
+
+bool JSFunction::has_closure_feedback_cell_array() const {
+ return shared()->is_compiled() &&
+ raw_feedback_cell()->value()->IsClosureFeedbackCellArray();
}
Context JSFunction::context() {
@@ -640,12 +652,12 @@ bool JSFunction::PrototypeRequiresRuntimeLookup() {
return !has_prototype_property() || map()->has_non_instance_prototype();
}
-Object JSFunction::instance_prototype() {
+HeapObject JSFunction::instance_prototype() {
DCHECK(has_instance_prototype());
if (has_initial_map()) return initial_map()->prototype();
// When there is no initial map and the prototype is a JSReceiver, the
// initial map field is used for the prototype field.
- return prototype_or_initial_map();
+ return HeapObject::cast(prototype_or_initial_map());
}
Object JSFunction::prototype() {
@@ -668,8 +680,6 @@ bool JSFunction::is_compiled() const {
}
bool JSFunction::NeedsResetDueToFlushedBytecode() {
- if (!FLAG_flush_bytecode) return false;
-
// Do a raw read for shared and code fields here since this function may be
// called on a concurrent thread and the JSFunction might not be fully
// initialized yet.
@@ -687,7 +697,7 @@ bool JSFunction::NeedsResetDueToFlushedBytecode() {
}
void JSFunction::ResetIfBytecodeFlushed() {
- if (NeedsResetDueToFlushedBytecode()) {
+ if (FLAG_flush_bytecode && NeedsResetDueToFlushedBytecode()) {
// Bytecode was flushed and function is now uncompiled, reset JSFunction
// by setting code to CompileLazy and clearing the feedback vector.
set_code(GetIsolate()->builtins()->builtin(i::Builtins::kCompileLazy));
@@ -709,11 +719,11 @@ ACCESSORS(JSDate, min, Object, kMinOffset)
ACCESSORS(JSDate, sec, Object, kSecOffset)
MessageTemplate JSMessageObject::type() const {
- Object value = READ_FIELD(*this, kTypeOffset);
+ Object value = READ_FIELD(*this, kMessageTypeOffset);
return MessageTemplateFromInt(Smi::ToInt(value));
}
void JSMessageObject::set_type(MessageTemplate value) {
- WRITE_FIELD(*this, kTypeOffset, Smi::FromInt(static_cast<int>(value)));
+ WRITE_FIELD(*this, kMessageTypeOffset, Smi::FromInt(static_cast<int>(value)));
}
ACCESSORS(JSMessageObject, argument, Object, kArgumentsOffset)
ACCESSORS(JSMessageObject, script, Script, kScriptOffset)
@@ -742,7 +752,8 @@ ElementsKind JSObject::GetElementsKind() const {
DCHECK(fixed_array->IsFixedArray());
DCHECK(fixed_array->IsDictionary());
} else {
- DCHECK(kind > DICTIONARY_ELEMENTS);
+ DCHECK(kind > DICTIONARY_ELEMENTS ||
+ IsPackedFrozenOrSealedElementsKind(kind));
}
DCHECK(!IsSloppyArgumentsElementsKind(kind) ||
(elements()->IsFixedArray() && elements()->length() >= 2));
@@ -785,6 +796,10 @@ bool JSObject::HasPackedElements() {
return GetElementsKind() == PACKED_ELEMENTS;
}
+bool JSObject::HasFrozenOrSealedElements() {
+ return IsPackedFrozenOrSealedElementsKind(GetElementsKind());
+}
+
bool JSObject::HasFastArgumentsElements() {
return GetElementsKind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
}
@@ -954,7 +969,7 @@ Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttributes(
}
bool JSGlobalObject::IsDetached() {
- return JSGlobalProxy::cast(global_proxy())->IsDetachedFrom(*this);
+ return global_proxy()->IsDetachedFrom(*this);
}
bool JSGlobalProxy::IsDetachedFrom(JSGlobalObject global) const {
diff --git a/deps/v8/src/objects/js-objects.cc b/deps/v8/src/objects/js-objects.cc
index f515a84599..841eec0edf 100644
--- a/deps/v8/src/objects/js-objects.cc
+++ b/deps/v8/src/objects/js-objects.cc
@@ -2024,8 +2024,8 @@ MaybeHandle<JSObject> JSObject::New(Handle<JSFunction> constructor,
ASSIGN_RETURN_ON_EXCEPTION(
isolate, initial_map,
JSFunction::GetDerivedMap(isolate, constructor, new_target), JSObject);
- Handle<JSObject> result =
- isolate->factory()->NewJSObjectFromMap(initial_map, NOT_TENURED, site);
+ Handle<JSObject> result = isolate->factory()->NewJSObjectFromMap(
+ initial_map, AllocationType::kYoung, site);
if (initial_map->is_dictionary_map()) {
Handle<NameDictionary> dictionary =
NameDictionary::New(isolate, NameDictionary::kInitialCapacity);
@@ -2059,7 +2059,8 @@ MaybeHandle<JSObject> JSObject::ObjectCreate(Isolate* isolate,
void JSObject::EnsureWritableFastElements(Handle<JSObject> object) {
DCHECK(object->HasSmiOrObjectElements() ||
- object->HasFastStringWrapperElements());
+ object->HasFastStringWrapperElements() ||
+ object->HasFrozenOrSealedElements());
FixedArray raw_elems = FixedArray::cast(object->elements());
Isolate* isolate = object->GetIsolate();
if (raw_elems->map() != ReadOnlyRoots(isolate).fixed_cow_array_map()) return;
@@ -2553,6 +2554,8 @@ bool JSObject::IsUnmodifiedApiObject(FullObjectSlot o) {
if (!maybe_constructor->IsJSFunction()) return false;
JSFunction constructor = JSFunction::cast(maybe_constructor);
if (js_object->elements()->length() != 0) return false;
+ // Check that the object is not a key in a WeakMap (over-approximation).
+ if (!js_object->GetIdentityHash()->IsUndefined()) return false;
return constructor->initial_map() == heap_object->map();
}
@@ -2811,12 +2814,10 @@ void MigrateFastToFast(Handle<JSObject> object, Handle<Map> new_map) {
index, MutableHeapNumber::cast(value)->value_as_bits());
if (i < old_number_of_fields && !old_map->IsUnboxedDoubleField(index)) {
// Transition from tagged to untagged slot.
- heap->ClearRecordedSlot(*object,
- HeapObject::RawField(*object, index.offset()));
+ heap->ClearRecordedSlot(*object, object->RawField(index.offset()));
} else {
#ifdef DEBUG
- heap->VerifyClearedSlot(*object,
- HeapObject::RawField(*object, index.offset()));
+ heap->VerifyClearedSlot(*object, object->RawField(index.offset()));
#endif
}
} else {
@@ -3000,7 +3001,7 @@ void JSObject::MigrateToMap(Handle<JSObject> object, Handle<Map> new_map,
}
void JSObject::ForceSetPrototype(Handle<JSObject> object,
- Handle<Object> proto) {
+ Handle<HeapObject> proto) {
// object.__proto__ = proto;
Handle<Map> old_map = Handle<Map>(object->map(), object->GetIsolate());
Handle<Map> new_map =
@@ -3668,8 +3669,12 @@ bool TestElementsIntegrityLevel(JSObject object, PropertyAttributes level) {
level);
}
if (IsFixedTypedArrayElementsKind(kind)) {
+ if (level == FROZEN && JSArrayBufferView::cast(object)->byte_length() > 0)
+ return false; // TypedArrays with elements can't be frozen.
return TestPropertiesIntegrityLevel(object, level);
}
+ if (IsFrozenElementsKind(kind)) return true;
+ if (IsSealedElementsKind(kind) && level != FROZEN) return true;
ElementsAccessor* accessor = ElementsAccessor::ForKind(kind);
// Only DICTIONARY_ELEMENTS and SLOW_SLOPPY_ARGUMENTS_ELEMENTS have
@@ -3766,12 +3771,10 @@ bool JSObject::IsExtensible(Handle<JSObject> object) {
return object->map()->is_extensible();
}
-namespace {
-
template <typename Dictionary>
-void ApplyAttributesToDictionary(Isolate* isolate, ReadOnlyRoots roots,
- Handle<Dictionary> dictionary,
- const PropertyAttributes attributes) {
+void JSObject::ApplyAttributesToDictionary(
+ Isolate* isolate, ReadOnlyRoots roots, Handle<Dictionary> dictionary,
+ const PropertyAttributes attributes) {
int capacity = dictionary->Capacity();
for (int i = 0; i < capacity; i++) {
Object k;
@@ -3789,8 +3792,6 @@ void ApplyAttributesToDictionary(Isolate* isolate, ReadOnlyRoots roots,
}
}
-} // namespace
-
template <PropertyAttributes attrs>
Maybe<bool> JSObject::PreventExtensionsWithTransition(
Handle<JSObject> object, ShouldThrow should_throw) {
@@ -3811,6 +3812,10 @@ Maybe<bool> JSObject::PreventExtensionsWithTransition(
}
if (attrs == NONE && !object->map()->is_extensible()) return Just(true);
+ ElementsKind old_elements_kind = object->map()->elements_kind();
+ if (attrs != FROZEN && IsSealedElementsKind(old_elements_kind))
+ return Just(true);
+ if (old_elements_kind == PACKED_FROZEN_ELEMENTS) return Just(true);
if (object->IsJSGlobalProxy()) {
PrototypeIterator iter(isolate, object);
@@ -3862,13 +3867,15 @@ Maybe<bool> JSObject::PreventExtensionsWithTransition(
}
Handle<Map> old_map(object->map(), isolate);
+ old_map = Map::Update(isolate, old_map);
TransitionsAccessor transitions(isolate, old_map);
Map transition = transitions.SearchSpecial(*transition_marker);
if (!transition.is_null()) {
Handle<Map> transition_map(transition, isolate);
DCHECK(transition_map->has_dictionary_elements() ||
transition_map->has_fixed_typed_array_elements() ||
- transition_map->elements_kind() == SLOW_STRING_WRAPPER_ELEMENTS);
+ transition_map->elements_kind() == SLOW_STRING_WRAPPER_ELEMENTS ||
+ transition_map->has_frozen_or_sealed_elements());
DCHECK(!transition_map->is_extensible());
JSObject::MigrateToMap(object, transition_map);
} else if (transitions.CanHaveMoreTransitions()) {
@@ -3901,15 +3908,21 @@ Maybe<bool> JSObject::PreventExtensionsWithTransition(
if (object->IsJSGlobalObject()) {
Handle<GlobalDictionary> dictionary(
JSGlobalObject::cast(*object)->global_dictionary(), isolate);
- ApplyAttributesToDictionary(isolate, roots, dictionary, attrs);
+ JSObject::ApplyAttributesToDictionary(isolate, roots, dictionary,
+ attrs);
} else {
Handle<NameDictionary> dictionary(object->property_dictionary(),
isolate);
- ApplyAttributesToDictionary(isolate, roots, dictionary, attrs);
+ JSObject::ApplyAttributesToDictionary(isolate, roots, dictionary,
+ attrs);
}
}
}
+ if (object->map()->has_frozen_or_sealed_elements()) {
+ return Just(true);
+ }
+
// Both seal and preventExtensions always go through without modifications to
// typed array elements. Freeze works only if there are no actual elements.
if (object->HasFixedTypedArrayElements()) {
@@ -3934,8 +3947,8 @@ Maybe<bool> JSObject::PreventExtensionsWithTransition(
// Make sure we never go back to the fast case
object->RequireSlowElements(*dictionary);
if (attrs != NONE) {
- ApplyAttributesToDictionary(isolate, ReadOnlyRoots(isolate), dictionary,
- attrs);
+ JSObject::ApplyAttributesToDictionary(isolate, ReadOnlyRoots(isolate),
+ dictionary, attrs);
}
}
@@ -3961,6 +3974,8 @@ bool JSObject::HasEnumerableElements() {
switch (object->GetElementsKind()) {
case PACKED_SMI_ELEMENTS:
case PACKED_ELEMENTS:
+ case PACKED_FROZEN_ELEMENTS:
+ case PACKED_SEALED_ELEMENTS:
case PACKED_DOUBLE_ELEMENTS: {
int length = object->IsJSArray()
? Smi::ToInt(JSArray::cast(object)->length())
@@ -4469,7 +4484,8 @@ Maybe<bool> JSObject::SetPrototype(Handle<JSObject> object,
isolate->UpdateNoElementsProtectorOnSetPrototype(real_receiver);
- Handle<Map> new_map = Map::TransitionToPrototype(isolate, map, value);
+ Handle<Map> new_map =
+ Map::TransitionToPrototype(isolate, map, Handle<HeapObject>::cast(value));
DCHECK(new_map->prototype() == *value);
JSObject::MigrateToMap(real_receiver, new_map);
@@ -4714,6 +4730,8 @@ int JSObject::GetFastElementsUsage() {
case PACKED_SMI_ELEMENTS:
case PACKED_DOUBLE_ELEMENTS:
case PACKED_ELEMENTS:
+ case PACKED_FROZEN_ELEMENTS:
+ case PACKED_SEALED_ELEMENTS:
return IsJSArray() ? Smi::ToInt(JSArray::cast(*this)->length())
: store->length();
case FAST_SLOPPY_ARGUMENTS_ELEMENTS:
@@ -4842,10 +4860,9 @@ Maybe<int> JSBoundFunction::GetLength(Isolate* isolate,
// accessor.
Handle<JSFunction> target(JSFunction::cast(function->bound_target_function()),
isolate);
- Maybe<int> target_length = JSFunction::GetLength(isolate, target);
- if (target_length.IsNothing()) return target_length;
+ int target_length = target->length();
- int length = Max(0, target_length.FromJust() - nof_bound_arguments);
+ int length = Max(0, target_length - nof_bound_arguments);
return Just(length);
}
@@ -4865,26 +4882,6 @@ Handle<Object> JSFunction::GetName(Isolate* isolate,
}
// static
-Maybe<int> JSFunction::GetLength(Isolate* isolate,
- Handle<JSFunction> function) {
- int length = 0;
- IsCompiledScope is_compiled_scope(function->shared()->is_compiled_scope());
- if (is_compiled_scope.is_compiled()) {
- length = function->shared()->GetLength();
- } else {
- // If the function isn't compiled yet, the length is not computed
- // correctly yet. Compile it now and return the right length.
- if (Compiler::Compile(function, Compiler::KEEP_EXCEPTION,
- &is_compiled_scope)) {
- length = function->shared()->GetLength();
- }
- if (isolate->has_pending_exception()) return Nothing<int>();
- }
- DCHECK_GE(length, 0);
- return Just(length);
-}
-
-// static
Handle<NativeContext> JSFunction::GetFunctionRealm(
Handle<JSFunction> function) {
DCHECK(function->map()->is_constructor());
@@ -4927,26 +4924,65 @@ void JSFunction::MarkForOptimization(ConcurrencyMode mode) {
}
// static
+void JSFunction::EnsureClosureFeedbackCellArray(Handle<JSFunction> function) {
+ Isolate* const isolate = function->GetIsolate();
+ DCHECK(function->shared()->is_compiled());
+ DCHECK(function->shared()->HasFeedbackMetadata());
+ if (function->has_closure_feedback_cell_array() ||
+ function->has_feedback_vector()) {
+ return;
+ }
+ if (function->shared()->HasAsmWasmData()) return;
+
+ Handle<SharedFunctionInfo> shared(function->shared(), isolate);
+ DCHECK(function->shared()->HasBytecodeArray());
+ Handle<HeapObject> feedback_cell_array =
+ ClosureFeedbackCellArray::New(isolate, shared);
+ // Many closure cell is used as a way to specify that there is no
+ // feedback cell for this function and a new feedback cell has to be
+ // allocated for this funciton. For ex: for eval functions, we have to create
+ // a feedback cell and cache it along with the code. It is safe to use
+ // many_closure_cell to indicate this because in regular cases, it should
+ // already have a feedback_vector / feedback cell array allocated.
+ if (function->raw_feedback_cell() == isolate->heap()->many_closures_cell()) {
+ Handle<FeedbackCell> feedback_cell =
+ isolate->factory()->NewOneClosureCell(feedback_cell_array);
+ function->set_raw_feedback_cell(*feedback_cell);
+ } else {
+ function->raw_feedback_cell()->set_value(*feedback_cell_array);
+ }
+}
+
+// static
void JSFunction::EnsureFeedbackVector(Handle<JSFunction> function) {
Isolate* const isolate = function->GetIsolate();
DCHECK(function->shared()->is_compiled());
- DCHECK(FLAG_lite_mode || function->shared()->HasFeedbackMetadata());
- if (!function->has_feedback_vector() &&
- function->shared()->HasFeedbackMetadata()) {
- Handle<SharedFunctionInfo> shared(function->shared(), isolate);
- if (!shared->HasAsmWasmData()) {
- DCHECK(function->shared()->HasBytecodeArray());
- Handle<FeedbackVector> feedback_vector =
- FeedbackVector::New(isolate, shared);
- if (function->raw_feedback_cell() ==
- isolate->heap()->many_closures_cell()) {
- Handle<FeedbackCell> feedback_cell =
- isolate->factory()->NewOneClosureCell(feedback_vector);
- function->set_raw_feedback_cell(*feedback_cell);
- } else {
- function->raw_feedback_cell()->set_value(*feedback_vector);
- }
- }
+ DCHECK(function->shared()->HasFeedbackMetadata());
+ if (function->has_feedback_vector()) return;
+ if (function->shared()->HasAsmWasmData()) return;
+
+ Handle<SharedFunctionInfo> shared(function->shared(), isolate);
+ DCHECK(function->shared()->HasBytecodeArray());
+
+ EnsureClosureFeedbackCellArray(function);
+ Handle<ClosureFeedbackCellArray> closure_feedback_cell_array =
+ handle(function->closure_feedback_cell_array(), isolate);
+ Handle<HeapObject> feedback_vector =
+ FeedbackVector::New(isolate, shared, closure_feedback_cell_array);
+ // EnsureClosureFeedbackCellArray should handle the special case where we need
+ // to allocate a new feedback cell. Please look at comment in that function
+ // for more details.
+ DCHECK(function->raw_feedback_cell() !=
+ isolate->heap()->many_closures_cell());
+ function->raw_feedback_cell()->set_value(*feedback_vector);
+}
+
+// static
+void JSFunction::InitializeFeedbackCell(Handle<JSFunction> function) {
+ if (FLAG_lazy_feedback_allocation) {
+ EnsureClosureFeedbackCellArray(function);
+ } else {
+ EnsureFeedbackVector(function);
}
}
@@ -5047,7 +5083,7 @@ void JSFunction::SetPrototype(Handle<JSFunction> function,
}
void JSFunction::SetInitialMap(Handle<JSFunction> function, Handle<Map> map,
- Handle<Object> prototype) {
+ Handle<HeapObject> prototype) {
if (map->prototype() != *prototype)
Map::SetPrototype(function->GetIsolate(), map, prototype);
function->set_prototype_or_initial_map(*map);
@@ -5088,7 +5124,7 @@ void JSFunction::EnsureHasInitialMap(Handle<JSFunction> function) {
inobject_properties);
// Fetch or allocate prototype.
- Handle<Object> prototype;
+ Handle<HeapObject> prototype;
if (function->has_instance_prototype()) {
prototype = handle(function->instance_prototype(), isolate);
} else {
@@ -5244,7 +5280,7 @@ bool FastInitializeDerivedMap(Isolate* isolate, Handle<JSFunction> new_target,
Map::CopyInitialMap(isolate, constructor_initial_map, instance_size,
in_object_properties, unused_property_fields);
map->set_new_target_is_base(false);
- Handle<Object> prototype(new_target->instance_prototype(), isolate);
+ Handle<HeapObject> prototype(new_target->instance_prototype(), isolate);
JSFunction::SetInitialMap(new_target, map, prototype);
DCHECK(new_target->instance_prototype()->IsJSReceiver());
map->SetConstructor(*constructor);
@@ -5323,7 +5359,7 @@ MaybeHandle<Map> JSFunction::GetDerivedMap(Isolate* isolate,
map->set_new_target_is_base(false);
CHECK(prototype->IsJSReceiver());
if (map->prototype() != *prototype)
- Map::SetPrototype(isolate, map, prototype);
+ Map::SetPrototype(isolate, map, Handle<HeapObject>::cast(prototype));
map->SetConstructor(*constructor);
return map;
}
diff --git a/deps/v8/src/objects/js-objects.h b/deps/v8/src/objects/js-objects.h
index 0d88d564d0..c67f70c207 100644
--- a/deps/v8/src/objects/js-objects.h
+++ b/deps/v8/src/objects/js-objects.h
@@ -84,8 +84,8 @@ class JSReceiver : public HeapObject {
Handle<JSReceiver> receiver);
// Get the first non-hidden prototype.
- static inline MaybeHandle<Object> GetPrototype(Isolate* isolate,
- Handle<JSReceiver> receiver);
+ static inline MaybeHandle<HeapObject> GetPrototype(
+ Isolate* isolate, Handle<JSReceiver> receiver);
V8_WARN_UNUSED_RESULT static Maybe<bool> HasInPrototypeChain(
Isolate* isolate, Handle<JSReceiver> object, Handle<Object> proto);
@@ -107,7 +107,7 @@ class JSReceiver : public HeapObject {
V8_WARN_UNUSED_RESULT static inline Maybe<bool> HasElement(
Handle<JSReceiver> object, uint32_t index);
- V8_WARN_UNUSED_RESULT static Maybe<bool> HasOwnProperty(
+ V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT static Maybe<bool> HasOwnProperty(
Handle<JSReceiver> object, Handle<Name> name);
V8_WARN_UNUSED_RESULT static inline Maybe<bool> HasOwnProperty(
Handle<JSReceiver> object, uint32_t index);
@@ -120,10 +120,10 @@ class JSReceiver : public HeapObject {
Isolate* isolate, Handle<JSReceiver> receiver, uint32_t index);
// Implementation of ES6 [[Delete]]
- V8_WARN_UNUSED_RESULT static Maybe<bool> DeletePropertyOrElement(
- Handle<JSReceiver> object, Handle<Name> name,
- LanguageMode language_mode = LanguageMode::kSloppy);
- V8_WARN_UNUSED_RESULT static Maybe<bool> DeleteProperty(
+ V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT static Maybe<bool>
+ DeletePropertyOrElement(Handle<JSReceiver> object, Handle<Name> name,
+ LanguageMode language_mode = LanguageMode::kSloppy);
+ V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT static Maybe<bool> DeleteProperty(
Handle<JSReceiver> object, Handle<Name> name,
LanguageMode language_mode = LanguageMode::kSloppy);
V8_WARN_UNUSED_RESULT static Maybe<bool> DeleteProperty(
@@ -178,7 +178,7 @@ class JSReceiver : public HeapObject {
V8_WARN_UNUSED_RESULT static Maybe<bool> GetOwnPropertyDescriptor(
LookupIterator* it, PropertyDescriptor* desc);
- typedef PropertyAttributes IntegrityLevel;
+ using IntegrityLevel = PropertyAttributes;
// ES6 7.3.14 (when passed kDontThrow)
// 'level' must be SEALED or FROZEN.
@@ -208,7 +208,7 @@ class JSReceiver : public HeapObject {
// function that was used to instantiate the object).
static Handle<String> GetConstructorName(Handle<JSReceiver> receiver);
- Handle<NativeContext> GetCreationContext();
+ V8_EXPORT_PRIVATE Handle<NativeContext> GetCreationContext();
V8_WARN_UNUSED_RESULT static inline Maybe<PropertyAttributes>
GetPropertyAttributes(Handle<JSReceiver> object, Handle<Name> name);
@@ -232,20 +232,20 @@ class JSReceiver : public HeapObject {
inline static Handle<Object> GetDataProperty(Handle<JSReceiver> object,
Handle<Name> name);
- static Handle<Object> GetDataProperty(LookupIterator* it);
+ V8_EXPORT_PRIVATE static Handle<Object> GetDataProperty(LookupIterator* it);
// Retrieves a permanent object identity hash code. The undefined value might
// be returned in case no hash was created yet.
- Object GetIdentityHash();
+ V8_EXPORT_PRIVATE Object GetIdentityHash();
// Retrieves a permanent object identity hash code. May create and store a
// hash code if needed and none exists.
static Smi CreateIdentityHash(Isolate* isolate, JSReceiver key);
- Smi GetOrCreateIdentityHash(Isolate* isolate);
+ V8_EXPORT_PRIVATE Smi GetOrCreateIdentityHash(Isolate* isolate);
// Stores the hash code. The hash passed in must be masked with
// JSReceiver::kHashMask.
- void SetIdentityHash(int masked_hash);
+ V8_EXPORT_PRIVATE void SetIdentityHash(int masked_hash);
// ES6 [[OwnPropertyKeys]] (modulo return type)
V8_WARN_UNUSED_RESULT static inline MaybeHandle<FixedArray> OwnPropertyKeys(
@@ -286,7 +286,7 @@ class JSObject : public JSReceiver {
public:
static bool IsUnmodifiedApiObject(FullObjectSlot o);
- static V8_WARN_UNUSED_RESULT MaybeHandle<JSObject> New(
+ V8_EXPORT_PRIVATE static V8_WARN_UNUSED_RESULT MaybeHandle<JSObject> New(
Handle<JSFunction> constructor, Handle<JSReceiver> new_target,
Handle<AllocationSite> site);
@@ -303,23 +303,22 @@ class JSObject : public JSReceiver {
// corresponds to a set of object representations of elements that
// have something in common.
//
- // In the fast mode elements is a FixedArray and so each element can
- // be quickly accessed. This fact is used in the generated code. The
- // elements array can have one of three maps in this mode:
- // fixed_array_map, sloppy_arguments_elements_map or
- // fixed_cow_array_map (for copy-on-write arrays). In the latter case
- // the elements array may be shared by a few objects and so before
- // writing to any element the array must be copied. Use
+ // In the fast mode elements is a FixedArray and so each element can be
+ // quickly accessed. The elements array can have one of several maps in this
+ // mode: fixed_array_map, fixed_double_array_map,
+ // sloppy_arguments_elements_map or fixed_cow_array_map (for copy-on-write
+ // arrays). In the latter case the elements array may be shared by a few
+ // objects and so before writing to any element the array must be copied. Use
// EnsureWritableFastElements in this case.
//
- // In the slow mode the elements is either a NumberDictionary, a
+ // In the slow mode the elements is either a NumberDictionary or a
// FixedArray parameter map for a (sloppy) arguments object.
DECL_ACCESSORS(elements, FixedArrayBase)
inline void initialize_elements();
static inline void SetMapAndElements(Handle<JSObject> object, Handle<Map> map,
Handle<FixedArrayBase> elements);
inline ElementsKind GetElementsKind() const;
- ElementsAccessor* GetElementsAccessor();
+ V8_EXPORT_PRIVATE ElementsAccessor* GetElementsAccessor();
// Returns true if an object has elements of PACKED_SMI_ELEMENTS or
// HOLEY_SMI_ELEMENTS ElementsKind.
inline bool HasSmiElements();
@@ -342,8 +341,10 @@ class JSObject : public JSReceiver {
inline bool HasSloppyArgumentsElements();
inline bool HasStringWrapperElements();
inline bool HasDictionaryElements();
+
// Returns true if an object has elements of PACKED_ELEMENTS
inline bool HasPackedElements();
+ inline bool HasFrozenOrSealedElements();
inline bool HasFixedTypedArrayElements();
@@ -390,7 +391,7 @@ class JSObject : public JSReceiver {
Maybe<ShouldThrow> should_throw,
AccessorInfoHandling handling = DONT_FORCE_FIELD);
- V8_WARN_UNUSED_RESULT static MaybeHandle<Object>
+ V8_WARN_UNUSED_RESULT static MaybeHandle<Object> V8_EXPORT_PRIVATE
SetOwnPropertyIgnoreAttributes(Handle<JSObject> object, Handle<Name> name,
Handle<Object> value,
PropertyAttributes attributes);
@@ -402,7 +403,7 @@ class JSObject : public JSReceiver {
// Equivalent to one of the above depending on whether |name| can be converted
// to an array index.
- V8_WARN_UNUSED_RESULT static MaybeHandle<Object>
+ V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT static MaybeHandle<Object>
DefinePropertyOrElementIgnoreAttributes(Handle<JSObject> object,
Handle<Name> name,
Handle<Object> value,
@@ -414,18 +415,21 @@ class JSObject : public JSReceiver {
LookupIterator* it, Handle<Object> value,
Maybe<ShouldThrow> should_throw = Just(kDontThrow));
- static void AddProperty(Isolate* isolate, Handle<JSObject> object,
- Handle<Name> name, Handle<Object> value,
- PropertyAttributes attributes);
+ V8_EXPORT_PRIVATE static void AddProperty(Isolate* isolate,
+ Handle<JSObject> object,
+ Handle<Name> name,
+ Handle<Object> value,
+ PropertyAttributes attributes);
// {name} must be a UTF-8 encoded, null-terminated string.
static void AddProperty(Isolate* isolate, Handle<JSObject> object,
const char* name, Handle<Object> value,
PropertyAttributes attributes);
- static void AddDataElement(Handle<JSObject> receiver, uint32_t index,
- Handle<Object> value,
- PropertyAttributes attributes);
+ V8_EXPORT_PRIVATE static void AddDataElement(Handle<JSObject> receiver,
+ uint32_t index,
+ Handle<Object> value,
+ PropertyAttributes attributes);
// Extend the receiver with a single fast property appeared first in the
// passed map. This also extends the property backing store if necessary.
@@ -489,11 +493,9 @@ class JSObject : public JSReceiver {
// Defines an AccessorPair property on the given object.
// TODO(mstarzinger): Rename to SetAccessor().
- static MaybeHandle<Object> DefineAccessor(Handle<JSObject> object,
- Handle<Name> name,
- Handle<Object> getter,
- Handle<Object> setter,
- PropertyAttributes attributes);
+ V8_EXPORT_PRIVATE static MaybeHandle<Object> DefineAccessor(
+ Handle<JSObject> object, Handle<Name> name, Handle<Object> getter,
+ Handle<Object> setter, PropertyAttributes attributes);
static MaybeHandle<Object> DefineAccessor(LookupIterator* it,
Handle<Object> getter,
Handle<Object> setter,
@@ -593,37 +595,40 @@ class JSObject : public JSReceiver {
// map and the ElementsKind set.
static Handle<Map> GetElementsTransitionMap(Handle<JSObject> object,
ElementsKind to_kind);
- static void TransitionElementsKind(Handle<JSObject> object,
- ElementsKind to_kind);
+ V8_EXPORT_PRIVATE static void TransitionElementsKind(Handle<JSObject> object,
+ ElementsKind to_kind);
// Always use this to migrate an object to a new map.
// |expected_additional_properties| is only used for fast-to-slow transitions
// and ignored otherwise.
- static void MigrateToMap(Handle<JSObject> object, Handle<Map> new_map,
- int expected_additional_properties = 0);
+ V8_EXPORT_PRIVATE static void MigrateToMap(
+ Handle<JSObject> object, Handle<Map> new_map,
+ int expected_additional_properties = 0);
// Forces a prototype without any of the checks that the regular SetPrototype
// would do.
- static void ForceSetPrototype(Handle<JSObject> object, Handle<Object> proto);
+ static void ForceSetPrototype(Handle<JSObject> object,
+ Handle<HeapObject> proto);
// Convert the object to use the canonical dictionary
// representation. If the object is expected to have additional properties
// added this number can be indicated to have the backing store allocated to
// an initial capacity for holding these properties.
- static void NormalizeProperties(Handle<JSObject> object,
- PropertyNormalizationMode mode,
- int expected_additional_properties,
- const char* reason);
+ V8_EXPORT_PRIVATE static void NormalizeProperties(
+ Handle<JSObject> object, PropertyNormalizationMode mode,
+ int expected_additional_properties, const char* reason);
// Convert and update the elements backing store to be a
// NumberDictionary dictionary. Returns the backing after conversion.
- static Handle<NumberDictionary> NormalizeElements(Handle<JSObject> object);
+ V8_EXPORT_PRIVATE static Handle<NumberDictionary> NormalizeElements(
+ Handle<JSObject> object);
void RequireSlowElements(NumberDictionary dictionary);
// Transform slow named properties to fast variants.
- static void MigrateSlowToFast(Handle<JSObject> object,
- int unused_property_fields, const char* reason);
+ V8_EXPORT_PRIVATE static void MigrateSlowToFast(Handle<JSObject> object,
+ int unused_property_fields,
+ const char* reason);
inline bool IsUnboxedDoubleField(FieldIndex index);
@@ -723,7 +728,7 @@ class JSObject : public JSReceiver {
// If a GC was caused while constructing this object, the elements pointer
// may point to a one pointer filler map. The object won't be rooted, but
// our heap verification code could stumble across it.
- bool ElementsAreSafeToExamine() const;
+ V8_EXPORT_PRIVATE bool ElementsAreSafeToExamine() const;
#endif
Object SlowReverseLookup(Object value);
@@ -792,6 +797,11 @@ class JSObject : public JSReceiver {
static bool AllCanRead(LookupIterator* it);
static bool AllCanWrite(LookupIterator* it);
+ template <typename Dictionary>
+ static void ApplyAttributesToDictionary(Isolate* isolate, ReadOnlyRoots roots,
+ Handle<Dictionary> dictionary,
+ const PropertyAttributes attributes);
+
private:
friend class JSReceiver;
friend class Object;
@@ -846,18 +856,8 @@ class JSAccessorPropertyDescriptor : public JSObject {
// FromPropertyDescriptor function for regular data properties.
class JSDataPropertyDescriptor : public JSObject {
public:
- // Layout description.
-#define JS_DATA_PROPERTY_DESCRIPTOR_FIELDS(V) \
- V(kValueOffset, kTaggedSize) \
- V(kWritableOffset, kTaggedSize) \
- V(kEnumerableOffset, kTaggedSize) \
- V(kConfigurableOffset, kTaggedSize) \
- /* Total size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
- JS_DATA_PROPERTY_DESCRIPTOR_FIELDS)
-#undef JS_DATA_PROPERTY_DESCRIPTOR_FIELDS
+ DEFINE_FIELD_OFFSET_CONSTANTS(
+ JSObject::kHeaderSize, TORQUE_GENERATED_JSDATA_PROPERTY_DESCRIPTOR_FIELDS)
// Indices of in-object properties.
static const int kValueIndex = 0;
@@ -958,9 +958,9 @@ class JSFunction : public JSObject {
inline void set_context(Object context);
inline JSGlobalProxy global_proxy();
inline NativeContext native_context();
+ inline int length();
static Handle<Object> GetName(Isolate* isolate, Handle<JSFunction> function);
- static Maybe<int> GetLength(Isolate* isolate, Handle<JSFunction> function);
static Handle<NativeContext> GetFunctionRealm(Handle<JSFunction> function);
// [code]: The generated code object for this function. Executed
@@ -1034,10 +1034,27 @@ class JSFunction : public JSObject {
// the JSFunction's bytecode being flushed.
DECL_ACCESSORS(raw_feedback_cell, FeedbackCell)
- // feedback_vector() can be used once the function is compiled.
+ // Functions related to feedback vector. feedback_vector() can be used once
+ // the function has feedback vectors allocated. feedback vectors may not be
+ // available after compile when lazily allocating feedback vectors.
inline FeedbackVector feedback_vector() const;
inline bool has_feedback_vector() const;
- static void EnsureFeedbackVector(Handle<JSFunction> function);
+ V8_EXPORT_PRIVATE static void EnsureFeedbackVector(
+ Handle<JSFunction> function);
+
+ // Functions related to clousre feedback cell array that holds feedback cells
+ // used to create closures from this function. We allocate closure feedback
+ // cell arrays after compile, when we want to allocate feedback vectors
+ // lazily.
+ inline bool has_closure_feedback_cell_array() const;
+ inline ClosureFeedbackCellArray closure_feedback_cell_array() const;
+ static void EnsureClosureFeedbackCellArray(Handle<JSFunction> function);
+
+ // Initializes the feedback cell of |function|. In lite mode, this would be
+ // initialized to the closure feedback cell array that holds the feedback
+ // cells for create closure calls from this function. In the regular mode,
+ // this allocates feedback vector.
+ static void InitializeFeedbackCell(Handle<JSFunction> function);
// Unconditionally clear the type feedback vector.
void ClearTypeFeedbackInfo();
@@ -1051,9 +1068,10 @@ class JSFunction : public JSObject {
// The initial map for an object created by this constructor.
inline Map initial_map();
static void SetInitialMap(Handle<JSFunction> function, Handle<Map> map,
- Handle<Object> prototype);
+ Handle<HeapObject> prototype);
inline bool has_initial_map();
- static void EnsureHasInitialMap(Handle<JSFunction> function);
+ V8_EXPORT_PRIVATE static void EnsureHasInitialMap(
+ Handle<JSFunction> function);
// Creates a map that matches the constructor's initial map, but with
// [[prototype]] being new.target.prototype. Because new.target can be a
@@ -1069,7 +1087,7 @@ class JSFunction : public JSObject {
inline bool has_prototype();
inline bool has_instance_prototype();
inline Object prototype();
- inline Object instance_prototype();
+ inline HeapObject instance_prototype();
inline bool has_prototype_property();
inline bool PrototypeRequiresRuntimeLookup();
static void SetPrototype(Handle<JSFunction> function, Handle<Object> value);
@@ -1169,7 +1187,7 @@ class JSGlobalObject : public JSObject {
DECL_ACCESSORS(native_context, NativeContext)
// [global proxy]: the global proxy object of the context
- DECL_ACCESSORS(global_proxy, JSObject)
+ DECL_ACCESSORS(global_proxy, JSGlobalProxy)
// Gets global object properties.
inline GlobalDictionary global_dictionary();
@@ -1303,22 +1321,8 @@ class JSDate : public JSObject {
kTimezoneOffset
};
- // Layout description.
-#define JS_DATE_FIELDS(V) \
- V(kValueOffset, kTaggedSize) \
- V(kYearOffset, kTaggedSize) \
- V(kMonthOffset, kTaggedSize) \
- V(kDayOffset, kTaggedSize) \
- V(kWeekdayOffset, kTaggedSize) \
- V(kHourOffset, kTaggedSize) \
- V(kMinOffset, kTaggedSize) \
- V(kSecOffset, kTaggedSize) \
- V(kCacheStampOffset, kTaggedSize) \
- /* Header size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize, JS_DATE_FIELDS)
-#undef JS_DATE_FIELDS
+ DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
+ TORQUE_GENERATED_JSDATE_FIELDS)
private:
inline Object DoGetField(FieldIndex index);
@@ -1362,10 +1366,10 @@ class JSMessageObject : public JSObject {
// Returns the line number for the error message (1-based), or
// Message::kNoLineNumberInfo if the line cannot be determined.
- int GetLineNumber() const;
+ V8_EXPORT_PRIVATE int GetLineNumber() const;
// Returns the offset of the given position within the containing line.
- int GetColumnNumber() const;
+ V8_EXPORT_PRIVATE int GetColumnNumber() const;
// Returns the source code line containing the given source
// position, or the empty string if the position is invalid.
@@ -1380,28 +1384,13 @@ class JSMessageObject : public JSObject {
DECL_PRINTER(JSMessageObject)
DECL_VERIFIER(JSMessageObject)
- // Layout description.
-#define JS_MESSAGE_FIELDS(V) \
- /* Tagged fields. */ \
- V(kTypeOffset, kTaggedSize) \
- V(kArgumentsOffset, kTaggedSize) \
- V(kScriptOffset, kTaggedSize) \
- V(kStackFramesOffset, kTaggedSize) \
- V(kPointerFieldsEndOffset, 0) \
- /* Raw data fields. */ \
- /* TODO(ishell): store as int32 instead of Smi. */ \
- V(kStartPositionOffset, kTaggedSize) \
- V(kEndPositionOffset, kTaggedSize) \
- V(kErrorLevelOffset, kTaggedSize) \
- /* Total size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize, JS_MESSAGE_FIELDS)
-#undef JS_MESSAGE_FIELDS
+ DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
+ TORQUE_GENERATED_JSMESSAGE_OBJECT_FIELDS)
+ // TODO(v8:8989): [torque] Support marker constants.
+ static const int kPointerFieldsEndOffset = kStartPositionOffset;
- typedef FixedBodyDescriptor<HeapObject::kMapOffset, kPointerFieldsEndOffset,
- kSize>
- BodyDescriptor;
+ using BodyDescriptor = FixedBodyDescriptor<HeapObject::kMapOffset,
+ kPointerFieldsEndOffset, kSize>;
OBJECT_CONSTRUCTORS(JSMessageObject, JSObject);
};
@@ -1428,16 +1417,8 @@ class JSAsyncFromSyncIterator : public JSObject {
// subsequent "next" invocations.
DECL_ACCESSORS(next, Object)
- // Layout description.
-#define JS_ASYNC_FROM_SYNC_ITERATOR_FIELDS(V) \
- V(kSyncIteratorOffset, kTaggedSize) \
- V(kNextOffset, kTaggedSize) \
- /* Total size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
- JS_ASYNC_FROM_SYNC_ITERATOR_FIELDS)
-#undef JS_ASYNC_FROM_SYNC_ITERATOR_FIELDS
+ DEFINE_FIELD_OFFSET_CONSTANTS(
+ JSObject::kHeaderSize, TORQUE_GENERATED_JSASYNC_FROM_SYNC_ITERATOR_FIELDS)
OBJECT_CONSTRUCTORS(JSAsyncFromSyncIterator, JSObject);
};
@@ -1457,16 +1438,8 @@ class JSStringIterator : public JSObject {
inline int index() const;
inline void set_index(int value);
- // Layout description.
-#define JS_STRING_ITERATOR_FIELDS(V) \
- V(kStringOffset, kTaggedSize) \
- V(kNextIndexOffset, kTaggedSize) \
- /* Total size. */ \
- V(kSize, 0)
-
DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
- JS_STRING_ITERATOR_FIELDS)
-#undef JS_STRING_ITERATOR_FIELDS
+ TORQUE_GENERATED_JSSTRING_ITERATOR_FIELDS)
OBJECT_CONSTRUCTORS(JSStringIterator, JSObject);
};
diff --git a/deps/v8/src/objects/js-plural-rules.h b/deps/v8/src/objects/js-plural-rules.h
index 30b4fd1d03..70c63a9a8f 100644
--- a/deps/v8/src/objects/js-plural-rules.h
+++ b/deps/v8/src/objects/js-plural-rules.h
@@ -40,7 +40,7 @@ class JSPluralRules : public JSObject {
V8_WARN_UNUSED_RESULT static MaybeHandle<String> ResolvePlural(
Isolate* isolate, Handle<JSPluralRules> plural_rules, double number);
- static const std::set<std::string>& GetAvailableLocales();
+ V8_EXPORT_PRIVATE static const std::set<std::string>& GetAvailableLocales();
// [[Type]] is one of the values "cardinal" or "ordinal",
// identifying the plural rules used.
diff --git a/deps/v8/src/objects/js-promise.h b/deps/v8/src/objects/js-promise.h
index 181ebc0a85..bbe6f724d1 100644
--- a/deps/v8/src/objects/js-promise.h
+++ b/deps/v8/src/objects/js-promise.h
@@ -49,7 +49,7 @@ class JSPromise : public JSObject {
void set_async_task_id(int id);
static const char* Status(Promise::PromiseState status);
- Promise::PromiseState status() const;
+ V8_EXPORT_PRIVATE Promise::PromiseState status() const;
void set_status(Promise::PromiseState status);
// ES section #sec-fulfillpromise
diff --git a/deps/v8/src/objects/js-proxy.h b/deps/v8/src/objects/js-proxy.h
index a933e6598a..68fbb333b9 100644
--- a/deps/v8/src/objects/js-proxy.h
+++ b/deps/v8/src/objects/js-proxy.h
@@ -6,6 +6,7 @@
#define V8_OBJECTS_JS_PROXY_H_
#include "src/objects/js-objects.h"
+#include "torque-generated/builtin-definitions-from-dsl.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@@ -33,7 +34,7 @@ class JSProxy : public JSReceiver {
static void Revoke(Handle<JSProxy> proxy);
// ES6 9.5.1
- static MaybeHandle<Object> GetPrototype(Handle<JSProxy> receiver);
+ static MaybeHandle<HeapObject> GetPrototype(Handle<JSProxy> receiver);
// ES6 9.5.2
V8_WARN_UNUSED_RESULT static Maybe<bool> SetPrototype(
@@ -116,8 +117,8 @@ class JSProxy : public JSReceiver {
STATIC_ASSERT(static_cast<int>(JSObject::kElementsOffset) ==
static_cast<int>(JSProxy::kTargetOffset));
- typedef FixedBodyDescriptor<JSReceiver::kPropertiesOrHashOffset, kSize, kSize>
- BodyDescriptor;
+ using BodyDescriptor =
+ FixedBodyDescriptor<JSReceiver::kPropertiesOrHashOffset, kSize, kSize>;
static Maybe<bool> SetPrivateSymbol(Isolate* isolate, Handle<JSProxy> proxy,
Handle<Symbol> private_name,
diff --git a/deps/v8/src/objects/js-regexp-string-iterator.h b/deps/v8/src/objects/js-regexp-string-iterator.h
index 005d10dfc6..871b724966 100644
--- a/deps/v8/src/objects/js-regexp-string-iterator.h
+++ b/deps/v8/src/objects/js-regexp-string-iterator.h
@@ -37,16 +37,9 @@ class JSRegExpStringIterator : public JSObject {
DECL_VERIFIER(JSRegExpStringIterator)
// Layout description.
-#define JS_REGEXP_STRING_ITERATOR_FIELDS(V) \
- V(kIteratingRegExpOffset, kTaggedSize) \
- V(kIteratedStringOffset, kTaggedSize) \
- V(kFlagsOffset, kTaggedSize) \
- /* Header size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
- JS_REGEXP_STRING_ITERATOR_FIELDS)
-#undef JS_REGEXP_STRING_ITERATOR_FIELDS
+ DEFINE_FIELD_OFFSET_CONSTANTS(
+ JSObject::kHeaderSize,
+ TORQUE_GENERATED_JSREG_EXP_STRING_ITERATOR_FIELDS)
static const int kDoneBit = 0;
static const int kGlobalBit = 1;
diff --git a/deps/v8/src/objects/js-regexp.h b/deps/v8/src/objects/js-regexp.h
index 5012e2c01b..a48900d81b 100644
--- a/deps/v8/src/objects/js-regexp.h
+++ b/deps/v8/src/objects/js-regexp.h
@@ -56,7 +56,7 @@ class JSRegExp : public JSObject {
// Update FlagCount when adding new flags.
kInvalid = 1 << FlagShiftBit::kInvalid, // Not included in FlagCount.
};
- typedef base::Flags<Flag> Flags;
+ using Flags = base::Flags<Flag>;
static constexpr int FlagCount() { return 6; }
static int FlagShiftBits(Flag flag) {
@@ -122,18 +122,12 @@ class JSRegExp : public JSObject {
DECL_PRINTER(JSRegExp)
DECL_VERIFIER(JSRegExp)
-// Layout description.
-#define JS_REGEXP_FIELDS(V) \
- V(kDataOffset, kTaggedSize) \
- V(kSourceOffset, kTaggedSize) \
- V(kFlagsOffset, kTaggedSize) \
- /* Total size. */ \
- V(kSize, 0) \
- /* This is already an in-object field. */ \
- V(kLastIndexOffset, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize, JS_REGEXP_FIELDS)
-#undef JS_REGEXP_FIELDS
+ // Layout description.
+ DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
+ TORQUE_GENERATED_JSREG_EXP_FIELDS)
+ /* This is already an in-object field. */
+ // TODO(v8:8944): improve handling of in-object fields
+ static constexpr int kLastIndexOffset = kSize;
// Indices in the data array.
static const int kTagIndex = 0;
@@ -194,16 +188,9 @@ DEFINE_OPERATORS_FOR_FLAGS(JSRegExp::Flags)
// After creation the result must be treated as a JSArray in all regards.
class JSRegExpResult : public JSArray {
public:
-// Layout description.
-#define REG_EXP_RESULT_FIELDS(V) \
- V(kIndexOffset, kTaggedSize) \
- V(kInputOffset, kTaggedSize) \
- V(kGroupsOffset, kTaggedSize) \
- /* Total size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(JSArray::kSize, REG_EXP_RESULT_FIELDS)
-#undef REG_EXP_RESULT_FIELDS
+ // Layout description.
+ DEFINE_FIELD_OFFSET_CONSTANTS(JSArray::kSize,
+ TORQUE_GENERATED_JSREG_EXP_RESULT_FIELDS)
// Indices of in-object properties.
static const int kIndexIndex = 0;
diff --git a/deps/v8/src/objects/js-relative-time-format.cc b/deps/v8/src/objects/js-relative-time-format.cc
index 02039fa9e0..5b89e83057 100644
--- a/deps/v8/src/objects/js-relative-time-format.cc
+++ b/deps/v8/src/objects/js-relative-time-format.cc
@@ -236,66 +236,6 @@ Handle<String> UnitAsString(Isolate* isolate, URelativeDateTimeUnit unit_enum) {
}
}
-MaybeHandle<JSArray> GenerateRelativeTimeFormatParts(
- Isolate* isolate, const icu::UnicodeString& formatted,
- const icu::UnicodeString& integer_part, URelativeDateTimeUnit unit_enum,
- double number, const icu::NumberFormat& nf) {
- Factory* factory = isolate->factory();
- Handle<JSArray> array = factory->NewJSArray(0);
- int32_t found = formatted.indexOf(integer_part);
-
- Handle<String> substring;
- if (found < 0) {
- // Cannot find the integer_part in the formatted.
- // Return [{'type': 'literal', 'value': formatted}]
- ASSIGN_RETURN_ON_EXCEPTION(isolate, substring,
- Intl::ToString(isolate, formatted), JSArray);
- Intl::AddElement(isolate, array,
- 0, // index
- factory->literal_string(), // field_type_string
- substring);
- } else {
- // Found the formatted integer in the result.
- int index = 0;
-
- // array.push({
- // 'type': 'literal',
- // 'value': formatted.substring(0, found)})
- if (found > 0) {
- ASSIGN_RETURN_ON_EXCEPTION(isolate, substring,
- Intl::ToString(isolate, formatted, 0, found),
- JSArray);
- Intl::AddElement(isolate, array, index++,
- factory->literal_string(), // field_type_string
- substring);
- }
-
- Handle<String> unit = UnitAsString(isolate, unit_enum);
-
- Handle<Object> number_obj = factory->NewNumber(number);
- Maybe<int> maybe_format_to_parts = JSNumberFormat::FormatToParts(
- isolate, array, index, nf, number_obj, unit);
- MAYBE_RETURN(maybe_format_to_parts, Handle<JSArray>());
- index = maybe_format_to_parts.FromJust();
-
- // array.push({
- // 'type': 'literal',
- // 'value': formatted.substring(
- // found + integer_part.length, formatted.length)})
- if (found + integer_part.length() < formatted.length()) {
- ASSIGN_RETURN_ON_EXCEPTION(
- isolate, substring,
- Intl::ToString(isolate, formatted, found + integer_part.length(),
- formatted.length()),
- JSArray);
- Intl::AddElement(isolate, array, index,
- factory->literal_string(), // field_type_string
- substring);
- }
- }
- return array;
-}
-
bool GetURelativeDateTimeUnit(Handle<String> unit,
URelativeDateTimeUnit* unit_enum) {
std::unique_ptr<char[]> unit_str = unit->ToCString();
@@ -329,37 +269,32 @@ bool GetURelativeDateTimeUnit(Handle<String> unit,
return true;
}
-} // namespace
-
-MaybeHandle<Object> JSRelativeTimeFormat::Format(
- Isolate* isolate, Handle<Object> value_obj, Handle<Object> unit_obj,
- Handle<JSRelativeTimeFormat> format_holder, const char* func_name,
- bool to_parts) {
- Factory* factory = isolate->factory();
-
+template <typename T>
+MaybeHandle<T> FormatCommon(
+ Isolate* isolate, Handle<JSRelativeTimeFormat> format,
+ Handle<Object> value_obj, Handle<Object> unit_obj, const char* func_name,
+ MaybeHandle<T> (*formatToResult)(Isolate*,
+ const icu::FormattedRelativeDateTime&,
+ Handle<Object>, Handle<String>)) {
// 3. Let value be ? ToNumber(value).
Handle<Object> value;
ASSIGN_RETURN_ON_EXCEPTION(isolate, value,
- Object::ToNumber(isolate, value_obj), Object);
+ Object::ToNumber(isolate, value_obj), T);
double number = value->Number();
// 4. Let unit be ? ToString(unit).
Handle<String> unit;
ASSIGN_RETURN_ON_EXCEPTION(isolate, unit, Object::ToString(isolate, unit_obj),
- Object);
-
+ T);
// 4. If isFinite(value) is false, then throw a RangeError exception.
if (!std::isfinite(number)) {
THROW_NEW_ERROR(
isolate,
NewRangeError(MessageTemplate::kNotFiniteNumber,
isolate->factory()->NewStringFromAsciiChecked(func_name)),
- Object);
+ T);
}
-
- icu::RelativeDateTimeFormatter* formatter =
- format_holder->icu_formatter()->raw();
+ icu::RelativeDateTimeFormatter* formatter = format->icu_formatter()->raw();
CHECK_NOT_NULL(formatter);
-
URelativeDateTimeUnit unit_enum;
if (!GetURelativeDateTimeUnit(unit, &unit_enum)) {
THROW_NEW_ERROR(
@@ -367,45 +302,137 @@ MaybeHandle<Object> JSRelativeTimeFormat::Format(
NewRangeError(MessageTemplate::kInvalidUnit,
isolate->factory()->NewStringFromAsciiChecked(func_name),
unit),
- Object);
+ T);
}
-
UErrorCode status = U_ZERO_ERROR;
- icu::UnicodeString formatted;
-
- if (format_holder->numeric() == JSRelativeTimeFormat::Numeric::ALWAYS) {
- formatter->formatNumeric(number, unit_enum, formatted, status);
- } else {
- DCHECK_EQ(JSRelativeTimeFormat::Numeric::AUTO, format_holder->numeric());
- formatter->format(number, unit_enum, formatted, status);
+ icu::FormattedRelativeDateTime formatted =
+ (format->numeric() == JSRelativeTimeFormat::Numeric::ALWAYS)
+ ? formatter->formatNumericToValue(number, unit_enum, status)
+ : formatter->formatToValue(number, unit_enum, status);
+ if (U_FAILURE(status)) {
+ THROW_NEW_ERROR(isolate, NewTypeError(MessageTemplate::kIcuError), T);
}
+ return formatToResult(isolate, formatted, value,
+ UnitAsString(isolate, unit_enum));
+}
+MaybeHandle<String> FormatToString(
+ Isolate* isolate, const icu::FormattedRelativeDateTime& formatted,
+ Handle<Object> value, Handle<String> unit) {
+ UErrorCode status = U_ZERO_ERROR;
+ icu::UnicodeString result = formatted.toString(status);
if (U_FAILURE(status)) {
- THROW_NEW_ERROR(isolate, NewTypeError(MessageTemplate::kIcuError), Object);
+ THROW_NEW_ERROR(isolate, NewTypeError(MessageTemplate::kIcuError), String);
}
+ return Intl::ToString(isolate, result);
+}
- if (to_parts) {
- icu::UnicodeString number_str;
- icu::FieldPosition pos;
- double abs_number = std::abs(number);
- formatter->getNumberFormat().format(abs_number, number_str, pos, status);
- if (U_FAILURE(status)) {
- THROW_NEW_ERROR(isolate, NewTypeError(MessageTemplate::kIcuError),
- Object);
- }
+Maybe<bool> AddLiteral(Isolate* isolate, Handle<JSArray> array,
+ const icu::UnicodeString& string, int32_t index,
+ int32_t start, int32_t limit) {
+ Handle<String> substring;
+ ASSIGN_RETURN_ON_EXCEPTION_VALUE(
+ isolate, substring, Intl::ToString(isolate, string, start, limit),
+ Nothing<bool>());
+ Intl::AddElement(isolate, array, index, isolate->factory()->literal_string(),
+ substring);
+ return Just(true);
+}
+
+Maybe<bool> AddUnit(Isolate* isolate, Handle<JSArray> array,
+ const icu::UnicodeString& string, int32_t index,
+ int32_t start, int32_t limit, int32_t field_id,
+ Handle<Object> value, Handle<String> unit) {
+ Handle<String> substring;
+ ASSIGN_RETURN_ON_EXCEPTION_VALUE(
+ isolate, substring, Intl::ToString(isolate, string, start, limit),
+ Nothing<bool>());
+ Intl::AddElement(isolate, array, index,
+ Intl::NumberFieldToType(isolate, value, field_id), substring,
+ isolate->factory()->unit_string(), unit);
+ return Just(true);
+}
- Handle<JSArray> elements;
- ASSIGN_RETURN_ON_EXCEPTION(isolate, elements,
- GenerateRelativeTimeFormatParts(
- isolate, formatted, number_str, unit_enum,
- abs_number, formatter->getNumberFormat()),
- Object);
- return elements;
+MaybeHandle<JSArray> FormatToJSArray(
+ Isolate* isolate, const icu::FormattedRelativeDateTime& formatted,
+ Handle<Object> value, Handle<String> unit) {
+ UErrorCode status = U_ZERO_ERROR;
+ icu::UnicodeString string = formatted.toString(status);
+
+ Factory* factory = isolate->factory();
+ Handle<JSArray> array = factory->NewJSArray(0);
+ icu::ConstrainedFieldPosition cfpos;
+ cfpos.constrainCategory(UFIELD_CATEGORY_NUMBER);
+ int32_t index = 0;
+
+ int32_t previous_end = 0;
+ Handle<String> substring;
+ std::vector<std::pair<int32_t, int32_t>> groups;
+ while (formatted.nextPosition(cfpos, status) && U_SUCCESS(status)) {
+ int32_t category = cfpos.getCategory();
+ int32_t field = cfpos.getField();
+ int32_t start = cfpos.getStart();
+ int32_t limit = cfpos.getLimit();
+ if (category == UFIELD_CATEGORY_NUMBER) {
+ if (field == UNUM_GROUPING_SEPARATOR_FIELD) {
+ groups.push_back(std::pair<int32_t, int32_t>(start, limit));
+ continue;
+ }
+ if (start > previous_end) {
+ Maybe<bool> maybe_added =
+ AddLiteral(isolate, array, string, index++, previous_end, start);
+ MAYBE_RETURN(maybe_added, Handle<JSArray>());
+ }
+ if (field == UNUM_INTEGER_FIELD) {
+ for (auto start_limit : groups) {
+ if (start_limit.first > start) {
+ Maybe<bool> maybe_added =
+ AddUnit(isolate, array, string, index++, start,
+ start_limit.first, field, value, unit);
+ MAYBE_RETURN(maybe_added, Handle<JSArray>());
+ maybe_added = AddUnit(isolate, array, string, index++,
+ start_limit.first, start_limit.second,
+ UNUM_GROUPING_SEPARATOR_FIELD, value, unit);
+ MAYBE_RETURN(maybe_added, Handle<JSArray>());
+ start = start_limit.second;
+ }
+ }
+ }
+ Maybe<bool> maybe_added = AddUnit(isolate, array, string, index++, start,
+ limit, field, value, unit);
+ MAYBE_RETURN(maybe_added, Handle<JSArray>());
+ previous_end = limit;
+ }
+ }
+ if (U_FAILURE(status)) {
+ THROW_NEW_ERROR(isolate, NewTypeError(MessageTemplate::kIcuError), JSArray);
+ }
+ if (string.length() > previous_end) {
+ Maybe<bool> maybe_added = AddLiteral(isolate, array, string, index,
+ previous_end, string.length());
+ MAYBE_RETURN(maybe_added, Handle<JSArray>());
}
- return factory->NewStringFromTwoByte(Vector<const uint16_t>(
- reinterpret_cast<const uint16_t*>(formatted.getBuffer()),
- formatted.length()));
+ JSObject::ValidateElements(*array);
+ return array;
+}
+
+} // namespace
+
+MaybeHandle<String> JSRelativeTimeFormat::Format(
+ Isolate* isolate, Handle<Object> value_obj, Handle<Object> unit_obj,
+ Handle<JSRelativeTimeFormat> format) {
+ return FormatCommon<String>(isolate, format, value_obj, unit_obj,
+ "Intl.RelativeTimeFormat.prototype.format",
+ FormatToString);
+}
+
+MaybeHandle<JSArray> JSRelativeTimeFormat::FormatToParts(
+ Isolate* isolate, Handle<Object> value_obj, Handle<Object> unit_obj,
+ Handle<JSRelativeTimeFormat> format) {
+ return FormatCommon<JSArray>(
+ isolate, format, value_obj, unit_obj,
+ "Intl.RelativeTimeFormat.prototype.formatToParts", FormatToJSArray);
}
const std::set<std::string>& JSRelativeTimeFormat::GetAvailableLocales() {
diff --git a/deps/v8/src/objects/js-relative-time-format.h b/deps/v8/src/objects/js-relative-time-format.h
index 4bdaee9dfc..8c8ef7bbce 100644
--- a/deps/v8/src/objects/js-relative-time-format.h
+++ b/deps/v8/src/objects/js-relative-time-format.h
@@ -44,13 +44,16 @@ class JSRelativeTimeFormat : public JSObject {
Handle<String> NumericAsString() const;
// ecma402/#sec-Intl.RelativeTimeFormat.prototype.format
+ V8_WARN_UNUSED_RESULT static MaybeHandle<String> Format(
+ Isolate* isolate, Handle<Object> value_obj, Handle<Object> unit_obj,
+ Handle<JSRelativeTimeFormat> format);
+
// ecma402/#sec-Intl.RelativeTimeFormat.prototype.formatToParts
- V8_WARN_UNUSED_RESULT static MaybeHandle<Object> Format(
+ V8_WARN_UNUSED_RESULT static MaybeHandle<JSArray> FormatToParts(
Isolate* isolate, Handle<Object> value_obj, Handle<Object> unit_obj,
- Handle<JSRelativeTimeFormat> format_holder, const char* func_name,
- bool to_parts);
+ Handle<JSRelativeTimeFormat> format);
- static const std::set<std::string>& GetAvailableLocales();
+ V8_EXPORT_PRIVATE static const std::set<std::string>& GetAvailableLocales();
DECL_CAST(JSRelativeTimeFormat)
@@ -107,7 +110,6 @@ class JSRelativeTimeFormat : public JSObject {
// Layout description.
#define JS_RELATIVE_TIME_FORMAT_FIELDS(V) \
- V(kJSRelativeTimeFormatOffset, kTaggedSize) \
V(kLocaleOffset, kTaggedSize) \
V(kICUFormatterOffset, kTaggedSize) \
V(kFlagsOffset, kTaggedSize) \
diff --git a/deps/v8/src/objects/js-segmenter.h b/deps/v8/src/objects/js-segmenter.h
index b54e042d17..4fd509eb0c 100644
--- a/deps/v8/src/objects/js-segmenter.h
+++ b/deps/v8/src/objects/js-segmenter.h
@@ -39,7 +39,7 @@ class JSSegmenter : public JSObject {
V8_WARN_UNUSED_RESULT static Handle<JSObject> ResolvedOptions(
Isolate* isolate, Handle<JSSegmenter> segmenter_holder);
- static const std::set<std::string>& GetAvailableLocales();
+ V8_EXPORT_PRIVATE static const std::set<std::string>& GetAvailableLocales();
Handle<String> GranularityAsString() const;
diff --git a/deps/v8/src/objects/js-weak-refs.h b/deps/v8/src/objects/js-weak-refs.h
index 975d8beca9..b846c2e608 100644
--- a/deps/v8/src/objects/js-weak-refs.h
+++ b/deps/v8/src/objects/js-weak-refs.h
@@ -22,7 +22,7 @@ class WeakCell;
class JSFinalizationGroup : public JSObject {
public:
DECL_PRINTER(JSFinalizationGroup)
- DECL_VERIFIER(JSFinalizationGroup)
+ EXPORT_DECL_VERIFIER(JSFinalizationGroup)
DECL_CAST(JSFinalizationGroup)
DECL_ACCESSORS(native_context, NativeContext)
@@ -86,7 +86,7 @@ class JSFinalizationGroup : public JSObject {
class WeakCell : public HeapObject {
public:
DECL_PRINTER(WeakCell)
- DECL_VERIFIER(WeakCell)
+ EXPORT_DECL_VERIFIER(WeakCell)
DECL_CAST(WeakCell)
DECL_ACCESSORS(finalization_group, Object)
@@ -141,7 +141,7 @@ class WeakCell : public HeapObject {
class JSWeakRef : public JSObject {
public:
DECL_PRINTER(JSWeakRef)
- DECL_VERIFIER(JSWeakRef)
+ EXPORT_DECL_VERIFIER(JSWeakRef)
DECL_CAST(JSWeakRef)
DECL_ACCESSORS(target, HeapObject)
diff --git a/deps/v8/src/objects/literal-objects.cc b/deps/v8/src/objects/literal-objects.cc
index 024d6845b9..d699ac7345 100644
--- a/deps/v8/src/objects/literal-objects.cc
+++ b/deps/v8/src/objects/literal-objects.cc
@@ -71,7 +71,7 @@ namespace {
inline int EncodeComputedEntry(ClassBoilerplate::ValueKind value_kind,
unsigned key_index) {
- typedef ClassBoilerplate::ComputedEntryFlags Flags;
+ using Flags = ClassBoilerplate::ComputedEntryFlags;
int flags = Flags::ValueKindBits::encode(value_kind) |
Flags::KeyIndexBits::encode(key_index);
return flags;
@@ -255,9 +255,15 @@ void AddToDictionaryTemplate(Isolate* isolate, Handle<Dictionary> dictionary,
}
}
} else {
- // Overwrite existing value if it was defined before the computed one.
- int existing_value_index = Smi::ToInt(existing_value);
- if (existing_value_index < key_index) {
+ // Overwrite existing value if it was defined before the computed one
+ // (AccessorInfo "length" property is always defined before).
+ DCHECK_IMPLIES(!existing_value->IsSmi(),
+ existing_value->IsAccessorInfo());
+ DCHECK_IMPLIES(!existing_value->IsSmi(),
+ AccessorInfo::cast(existing_value)->name() ==
+ *isolate->factory()->length_string());
+ if (!existing_value->IsSmi() ||
+ Smi::ToInt(existing_value) < key_index) {
PropertyDetails details(kData, DONT_ENUM, PropertyCellType::kNoCell,
enum_order);
dictionary->DetailsAtPut(isolate, entry, details);
@@ -269,6 +275,7 @@ void AddToDictionaryTemplate(Isolate* isolate, Handle<Dictionary> dictionary,
? ACCESSOR_GETTER
: ACCESSOR_SETTER;
if (existing_value->IsAccessorPair()) {
+ // Update respective component of existing AccessorPair.
AccessorPair current_pair = AccessorPair::cast(existing_value);
int existing_component_index =
@@ -278,6 +285,7 @@ void AddToDictionaryTemplate(Isolate* isolate, Handle<Dictionary> dictionary,
}
} else {
+ // Overwrite existing value with new AccessorPair.
Handle<AccessorPair> pair(isolate->factory()->NewAccessorPair());
pair->set(component, value);
PropertyDetails details(kAccessor, DONT_ENUM, PropertyCellType::kNoCell,
diff --git a/deps/v8/src/objects/managed.h b/deps/v8/src/objects/managed.h
index 9842ef7c0d..f1d42380dc 100644
--- a/deps/v8/src/objects/managed.h
+++ b/deps/v8/src/objects/managed.h
@@ -36,7 +36,8 @@ struct ManagedPtrDestructor {
// The GC finalizer of a managed object, which does not depend on
// the template parameter.
-void ManagedObjectFinalizer(const v8::WeakCallbackInfo<void>& data);
+V8_EXPORT_PRIVATE void ManagedObjectFinalizer(
+ const v8::WeakCallbackInfo<void>& data);
// {Managed<T>} is essentially a {std::shared_ptr<T>} allocated on the heap
// that can be used to manage the lifetime of C++ objects that are shared
@@ -55,8 +56,8 @@ class Managed : public Foreign {
// Get a raw pointer to the C++ object.
V8_INLINE CppType* raw() { return GetSharedPtrPtr()->get(); }
- // Get a copy of the shared pointer to the C++ object.
- V8_INLINE std::shared_ptr<CppType> get() { return *GetSharedPtrPtr(); }
+ // Get a reference to the shared pointer to the C++ object.
+ V8_INLINE const std::shared_ptr<CppType>& get() { return *GetSharedPtrPtr(); }
static Managed cast(Object obj) { return Managed(obj->ptr()); }
static Managed unchecked_cast(Object obj) { return bit_cast<Managed>(obj); }
@@ -66,9 +67,9 @@ class Managed : public Foreign {
static Handle<Managed<CppType>> Allocate(Isolate* isolate,
size_t estimated_size,
Args&&... args) {
- CppType* ptr = new CppType(std::forward<Args>(args)...);
- return FromSharedPtr(isolate, estimated_size,
- std::shared_ptr<CppType>(ptr));
+ return FromSharedPtr(
+ isolate, estimated_size,
+ std::make_shared<CppType>(std::forward<Args>(args)...));
}
// Create a {Managed<CppType>} from an existing raw {CppType*}. The returned
@@ -77,7 +78,7 @@ class Managed : public Foreign {
size_t estimated_size,
CppType* ptr) {
return FromSharedPtr(isolate, estimated_size,
- std::shared_ptr<CppType>(ptr));
+ std::shared_ptr<CppType>{ptr});
}
// Create a {Managed<CppType>} from an existing {std::unique_ptr<CppType>}.
@@ -92,11 +93,11 @@ class Managed : public Foreign {
// Create a {Managed<CppType>} from an existing {std::shared_ptr<CppType>}.
static Handle<Managed<CppType>> FromSharedPtr(
Isolate* isolate, size_t estimated_size,
- std::shared_ptr<CppType> shared_ptr) {
+ const std::shared_ptr<CppType>& shared_ptr) {
reinterpret_cast<v8::Isolate*>(isolate)
->AdjustAmountOfExternalAllocatedMemory(estimated_size);
auto destructor = new ManagedPtrDestructor(
- estimated_size, new std::shared_ptr<CppType>(shared_ptr), Destructor);
+ estimated_size, new std::shared_ptr<CppType>{shared_ptr}, Destructor);
Handle<Managed<CppType>> handle = Handle<Managed<CppType>>::cast(
isolate->factory()->NewForeign(reinterpret_cast<Address>(destructor)));
Handle<Object> global_handle = isolate->global_handles()->Create(*handle);
@@ -118,8 +119,8 @@ class Managed : public Foreign {
destructor->shared_ptr_ptr_);
}
- // Called by either isolate shutdown or the {ManagedObjectFinalizer} in
- // order to actually delete the shared pointer (i.e. decrement its refcount).
+ // Called by either isolate shutdown or the {ManagedObjectFinalizer} in order
+ // to actually delete the shared pointer and decrement the shared refcount.
static void Destructor(void* ptr) {
auto shared_ptr_ptr = reinterpret_cast<std::shared_ptr<CppType>*>(ptr);
delete shared_ptr_ptr;
diff --git a/deps/v8/src/objects/map-inl.h b/deps/v8/src/objects/map-inl.h
index c91902f130..792e12d126 100644
--- a/deps/v8/src/objects/map-inl.h
+++ b/deps/v8/src/objects/map-inl.h
@@ -54,7 +54,11 @@ SYNCHRONIZED_ACCESSORS_CHECKED(Map, layout_descriptor, LayoutDescriptor,
WEAK_ACCESSORS(Map, raw_transitions, kTransitionsOrPrototypeInfoOffset)
// |bit_field| fields.
-BIT_FIELD_ACCESSORS(Map, bit_field, has_non_instance_prototype,
+// Concurrent access to |has_prototype_slot| and |has_non_instance_prototype|
+// is explicitly whitelisted here. The former is never modified after the map
+// is setup but it's being read by concurrent marker when pointer compression
+// is enabled. The latter bit can be modified on a live objects.
+BIT_FIELD_ACCESSORS(Map, relaxed_bit_field, has_non_instance_prototype,
Map::HasNonInstancePrototypeBit)
BIT_FIELD_ACCESSORS(Map, bit_field, is_callable, Map::IsCallableBit)
BIT_FIELD_ACCESSORS(Map, bit_field, has_named_interceptor,
@@ -65,7 +69,7 @@ BIT_FIELD_ACCESSORS(Map, bit_field, is_undetectable, Map::IsUndetectableBit)
BIT_FIELD_ACCESSORS(Map, bit_field, is_access_check_needed,
Map::IsAccessCheckNeededBit)
BIT_FIELD_ACCESSORS(Map, bit_field, is_constructor, Map::IsConstructorBit)
-BIT_FIELD_ACCESSORS(Map, bit_field, has_prototype_slot,
+BIT_FIELD_ACCESSORS(Map, relaxed_bit_field, has_prototype_slot,
Map::HasPrototypeSlotBit)
// |bit_field2| fields.
@@ -422,6 +426,14 @@ void Map::set_bit_field(byte value) {
WRITE_BYTE_FIELD(*this, kBitFieldOffset, value);
}
+byte Map::relaxed_bit_field() const {
+ return RELAXED_READ_BYTE_FIELD(*this, kBitFieldOffset);
+}
+
+void Map::set_relaxed_bit_field(byte value) {
+ RELAXED_WRITE_BYTE_FIELD(*this, kBitFieldOffset, value);
+}
+
byte Map::bit_field2() const {
return READ_BYTE_FIELD(*this, kBitField2Offset);
}
@@ -488,6 +500,18 @@ bool Map::has_dictionary_elements() const {
return IsDictionaryElementsKind(elements_kind());
}
+bool Map::has_frozen_or_sealed_elements() const {
+ return IsPackedFrozenOrSealedElementsKind(elements_kind());
+}
+
+bool Map::has_sealed_elements() const {
+ return IsSealedElementsKind(elements_kind());
+}
+
+bool Map::has_frozen_elements() const {
+ return IsFrozenElementsKind(elements_kind());
+}
+
void Map::set_is_dictionary_map(bool value) {
uint32_t new_bit_field3 = IsDictionaryMapBit::update(bit_field3(), value);
new_bit_field3 = IsUnstableBit::update(new_bit_field3, value);
@@ -552,9 +576,11 @@ bool Map::IsPrimitiveMap() const {
return instance_type() <= LAST_PRIMITIVE_TYPE;
}
-Object Map::prototype() const { return READ_FIELD(*this, kPrototypeOffset); }
+HeapObject Map::prototype() const {
+ return HeapObject::cast(READ_FIELD(*this, kPrototypeOffset));
+}
-void Map::set_prototype(Object value, WriteBarrierMode mode) {
+void Map::set_prototype(HeapObject value, WriteBarrierMode mode) {
DCHECK(value->IsNull() || value->IsJSReceiver());
WRITE_FIELD(*this, kPrototypeOffset, value);
CONDITIONAL_WRITE_BARRIER(*this, kPrototypeOffset, value, mode);
@@ -666,10 +692,10 @@ void Map::AppendDescriptor(Isolate* isolate, Descriptor* desc) {
#endif
}
-Object Map::GetBackPointer() const {
+HeapObject Map::GetBackPointer() const {
Object object = constructor_or_backpointer();
if (object->IsMap()) {
- return object;
+ return Map::cast(object);
}
return GetReadOnlyRoots().undefined_value();
}
diff --git a/deps/v8/src/objects/map.cc b/deps/v8/src/objects/map.cc
index 1e40d32650..80ea74a176 100644
--- a/deps/v8/src/objects/map.cc
+++ b/deps/v8/src/objects/map.cc
@@ -56,6 +56,18 @@ MaybeHandle<JSFunction> Map::GetConstructorFunction(
return MaybeHandle<JSFunction>();
}
+bool Map::IsMapOfGlobalProxy(Handle<NativeContext> native_context) const {
+ DisallowHeapAllocation no_gc;
+ if (IsJSGlobalProxyMap()) {
+ Object maybe_constructor = GetConstructor();
+ // Detached global proxies have |null| as their constructor.
+ return maybe_constructor.IsJSFunction() &&
+ JSFunction::cast(maybe_constructor).native_context() ==
+ *native_context;
+ }
+ return false;
+}
+
void Map::PrintReconfiguration(Isolate* isolate, FILE* file, int modify_index,
PropertyKind kind,
PropertyAttributes attributes) {
@@ -121,6 +133,7 @@ VisitorId Map::GetVisitorId(Map map) {
case FIXED_ARRAY_TYPE:
case OBJECT_BOILERPLATE_DESCRIPTION_TYPE:
+ case CLOSURE_FEEDBACK_CELL_ARRAY_TYPE:
case HASH_TABLE_TYPE:
case ORDERED_HASH_MAP_TYPE:
case ORDERED_HASH_SET_TYPE:
@@ -735,16 +748,14 @@ void Map::UpdateFieldType(Isolate* isolate, int descriptor, Handle<Name> name,
DescriptorArray descriptors = current->instance_descriptors();
PropertyDetails details = descriptors->GetDetails(descriptor);
- // Currently constness change implies map change.
- DCHECK_IMPLIES(new_constness != details.constness(),
- FLAG_modify_map_inplace);
-
- // It is allowed to change representation here only from None to something.
+ // It is allowed to change representation here only from None
+ // to something or from Smi or HeapObject to Tagged.
DCHECK(details.representation().Equals(new_representation) ||
- details.representation().IsNone());
+ details.representation().CanBeInPlaceChangedTo(new_representation));
// Skip if already updated the shared descriptor.
- if ((FLAG_modify_map_inplace && new_constness != details.constness()) ||
+ if (new_constness != details.constness() ||
+ !new_representation.Equals(details.representation()) ||
descriptors->GetFieldType(descriptor) != *new_wrapped_type.object()) {
DCHECK_IMPLIES(!FLAG_track_constant_fields,
new_constness == PropertyConstness::kMutable);
@@ -792,9 +803,7 @@ void Map::GeneralizeField(Isolate* isolate, Handle<Map> map, int modify_index,
// Return if the current map is general enough to hold requested constness and
// representation/field type.
- if (((FLAG_modify_map_inplace &&
- IsGeneralizableTo(new_constness, old_constness)) ||
- (!FLAG_modify_map_inplace && (old_constness == new_constness))) &&
+ if (IsGeneralizableTo(new_constness, old_constness) &&
old_representation.Equals(new_representation) &&
!FieldTypeIsCleared(new_representation, *new_field_type) &&
// Checking old_field_type for being cleared is not necessary because
@@ -815,9 +824,8 @@ void Map::GeneralizeField(Isolate* isolate, Handle<Map> map, int modify_index,
new_field_type =
Map::GeneralizeFieldType(old_representation, old_field_type,
new_representation, new_field_type, isolate);
- if (FLAG_modify_map_inplace) {
- new_constness = GeneralizeConstness(old_constness, new_constness);
- }
+
+ new_constness = GeneralizeConstness(old_constness, new_constness);
PropertyDetails details = descriptors->GetDetails(modify_index);
Handle<Name> name(descriptors->GetKey(modify_index), isolate);
@@ -1005,9 +1013,10 @@ Map Map::TryUpdateSlow(Isolate* isolate, Map old_map) {
// Bail out if there were some private symbol transitions mixed up
// with the integrity level transitions.
if (!info.has_integrity_level_transition) return Map();
- // Make sure replay the original elements kind transitions, before
+ // Make sure to replay the original elements kind transitions, before
// the integrity level transition sets the elements to dictionary mode.
DCHECK(to_kind == DICTIONARY_ELEMENTS ||
+ to_kind == SLOW_STRING_WRAPPER_ELEMENTS ||
IsFixedTypedArrayElementsKind(to_kind));
to_kind = info.integrity_level_source_map->elements_kind();
}
@@ -1029,8 +1038,10 @@ Map Map::TryUpdateSlow(Isolate* isolate, Map old_map) {
.SearchSpecial(info.integrity_level_symbol);
}
- DCHECK_EQ(old_map->elements_kind(), result->elements_kind());
- DCHECK_EQ(old_map->instance_type(), result->instance_type());
+ DCHECK_IMPLIES(!result.is_null(),
+ old_map->elements_kind() == result->elements_kind());
+ DCHECK_IMPLIES(!result.is_null(),
+ old_map->instance_type() == result->instance_type());
return result;
}
@@ -1464,7 +1475,7 @@ Handle<Map> Map::RawCopy(Isolate* isolate, Handle<Map> map, int instance_size,
Handle<Map> result = isolate->factory()->NewMap(
map->instance_type(), instance_size, TERMINAL_FAST_ELEMENTS_KIND,
inobject_properties);
- Handle<Object> prototype(map->prototype(), isolate);
+ Handle<HeapObject> prototype(map->prototype(), isolate);
Map::SetPrototype(isolate, result, prototype);
result->set_constructor_or_backpointer(map->GetConstructor());
result->set_bit_field(map->bit_field());
@@ -1515,12 +1526,9 @@ Handle<Map> Map::Normalize(Isolate* isolate, Handle<Map> fast_map,
MaybeObject::FromObject(Smi::kZero));
STATIC_ASSERT(kDescriptorsOffset ==
kTransitionsOrPrototypeInfoOffset + kTaggedSize);
- DCHECK_EQ(
- 0,
- memcmp(
- HeapObject::RawField(*fresh, kDescriptorsOffset).ToVoidPtr(),
- HeapObject::RawField(*new_map, kDescriptorsOffset).ToVoidPtr(),
- kDependentCodeOffset - kDescriptorsOffset));
+ DCHECK_EQ(0, memcmp(fresh->RawField(kDescriptorsOffset).ToVoidPtr(),
+ new_map->RawField(kDescriptorsOffset).ToVoidPtr(),
+ kDependentCodeOffset - kDescriptorsOffset));
} else {
DCHECK_EQ(0, memcmp(reinterpret_cast<void*>(fresh->address()),
reinterpret_cast<void*>(new_map->address()),
@@ -2035,6 +2043,24 @@ Handle<Map> Map::CopyForPreventExtensions(Isolate* isolate, Handle<Map> map,
ElementsKind new_kind = IsStringWrapperElementsKind(map->elements_kind())
? SLOW_STRING_WRAPPER_ELEMENTS
: DICTIONARY_ELEMENTS;
+ if (FLAG_enable_sealed_frozen_elements_kind) {
+ switch (map->elements_kind()) {
+ case PACKED_ELEMENTS:
+ if (attrs_to_add == SEALED) {
+ new_kind = PACKED_SEALED_ELEMENTS;
+ } else if (attrs_to_add == FROZEN) {
+ new_kind = PACKED_FROZEN_ELEMENTS;
+ }
+ break;
+ case PACKED_SEALED_ELEMENTS:
+ if (attrs_to_add == FROZEN) {
+ new_kind = PACKED_FROZEN_ELEMENTS;
+ }
+ break;
+ default:
+ break;
+ }
+ }
new_map->set_elements_kind(new_kind);
}
return new_map;
@@ -2175,7 +2201,7 @@ Handle<Map> Map::TransitionToDataProperty(Isolate* isolate, Handle<Map> map,
result = Map::Normalize(isolate, initial_map, CLEAR_INOBJECT_PROPERTIES,
reason);
initial_map->DeprecateTransitionTree(isolate);
- Handle<Object> prototype(result->prototype(), isolate);
+ Handle<HeapObject> prototype(result->prototype(), isolate);
JSFunction::SetInitialMap(constructor, result, prototype);
// Deoptimize all code that embeds the previous initial map.
@@ -2618,7 +2644,7 @@ bool Map::IsPrototypeChainInvalidated(Map map) {
// static
void Map::SetPrototype(Isolate* isolate, Handle<Map> map,
- Handle<Object> prototype,
+ Handle<HeapObject> prototype,
bool enable_prototype_setup_mode) {
RuntimeCallTimerScope stats_scope(isolate, *map,
RuntimeCallCounterId::kMap_SetPrototype);
@@ -2640,6 +2666,8 @@ void Map::SetPrototype(Isolate* isolate, Handle<Map> map,
FunctionTemplateInfo::cast(maybe_constructor)->hidden_prototype() ||
prototype->IsJSGlobalObject();
}
+ } else {
+ DCHECK(prototype->IsNull(isolate) || prototype->IsJSProxy());
}
map->set_has_hidden_prototype(is_hidden);
@@ -2655,7 +2683,7 @@ void Map::StartInobjectSlackTracking() {
}
Handle<Map> Map::TransitionToPrototype(Isolate* isolate, Handle<Map> map,
- Handle<Object> prototype) {
+ Handle<HeapObject> prototype) {
Handle<Map> new_map =
TransitionsAccessor(isolate, map).GetPrototypeTransition(prototype);
if (new_map.is_null()) {
@@ -2669,7 +2697,7 @@ Handle<Map> Map::TransitionToPrototype(Isolate* isolate, Handle<Map> map,
Handle<NormalizedMapCache> NormalizedMapCache::New(Isolate* isolate) {
Handle<WeakFixedArray> array(
- isolate->factory()->NewWeakFixedArray(kEntries, TENURED));
+ isolate->factory()->NewWeakFixedArray(kEntries, AllocationType::kOld));
return Handle<NormalizedMapCache>::cast(array);
}
diff --git a/deps/v8/src/objects/map.h b/deps/v8/src/objects/map.h
index 7424ee4181..96c09e1664 100644
--- a/deps/v8/src/objects/map.h
+++ b/deps/v8/src/objects/map.h
@@ -93,7 +93,7 @@ enum class ObjectFields {
kMaybePointers,
};
-typedef std::vector<Handle<Map>> MapHandles;
+using MapHandles = std::vector<Handle<Map>>;
// All heap objects have a Map that describes their structure.
// A Map contains information about:
@@ -240,6 +240,8 @@ class Map : public HeapObject {
// Bit field.
//
DECL_PRIMITIVE_ACCESSORS(bit_field, byte)
+ // Atomic accessors, used for whitelisting legitimate concurrent accesses.
+ DECL_PRIMITIVE_ACCESSORS(relaxed_bit_field, byte)
// Bit positions for |bit_field|.
#define MAP_BIT_FIELD_FIELDS(V, _) \
@@ -353,7 +355,7 @@ class Map : public HeapObject {
// Completes inobject slack tracking for the transition tree starting at this
// initial map.
- void CompleteInobjectSlackTracking(Isolate* isolate);
+ V8_EXPORT_PRIVATE void CompleteInobjectSlackTracking(Isolate* isolate);
// Tells whether the object in the prototype property will be used
// for instances created from this function. If the prototype
@@ -419,6 +421,9 @@ class Map : public HeapObject {
inline bool has_fast_string_wrapper_elements() const;
inline bool has_fixed_typed_array_elements() const;
inline bool has_dictionary_elements() const;
+ inline bool has_frozen_or_sealed_elements() const;
+ inline bool has_sealed_elements() const;
+ inline bool has_frozen_elements() const;
// Returns true if the current map doesn't have DICTIONARY_ELEMENTS but if a
// map with DICTIONARY_ELEMENTS was found in the prototype chain.
@@ -461,8 +466,8 @@ class Map : public HeapObject {
// Return the map of the root of object's prototype chain.
Map GetPrototypeChainRootMap(Isolate* isolate) const;
- Map FindRootMap(Isolate* isolate) const;
- Map FindFieldOwner(Isolate* isolate, int descriptor) const;
+ V8_EXPORT_PRIVATE Map FindRootMap(Isolate* isolate) const;
+ V8_EXPORT_PRIVATE Map FindFieldOwner(Isolate* isolate, int descriptor) const;
inline int GetInObjectPropertyOffset(int index) const;
@@ -525,25 +530,22 @@ class Map : public HeapObject {
PropertyConstness* constness, Representation* representation,
Handle<FieldType>* field_type);
- static Handle<Map> ReconfigureProperty(Isolate* isolate, Handle<Map> map,
- int modify_index,
- PropertyKind new_kind,
- PropertyAttributes new_attributes,
- Representation new_representation,
- Handle<FieldType> new_field_type);
+ V8_EXPORT_PRIVATE static Handle<Map> ReconfigureProperty(
+ Isolate* isolate, Handle<Map> map, int modify_index,
+ PropertyKind new_kind, PropertyAttributes new_attributes,
+ Representation new_representation, Handle<FieldType> new_field_type);
- static Handle<Map> ReconfigureElementsKind(Isolate* isolate, Handle<Map> map,
- ElementsKind new_elements_kind);
+ V8_EXPORT_PRIVATE static Handle<Map> ReconfigureElementsKind(
+ Isolate* isolate, Handle<Map> map, ElementsKind new_elements_kind);
- static Handle<Map> PrepareForDataProperty(Isolate* isolate,
- Handle<Map> old_map,
- int descriptor_number,
- PropertyConstness constness,
- Handle<Object> value);
+ V8_EXPORT_PRIVATE static Handle<Map> PrepareForDataProperty(
+ Isolate* isolate, Handle<Map> old_map, int descriptor_number,
+ PropertyConstness constness, Handle<Object> value);
- static Handle<Map> Normalize(Isolate* isolate, Handle<Map> map,
- PropertyNormalizationMode mode,
- const char* reason);
+ V8_EXPORT_PRIVATE static Handle<Map> Normalize(Isolate* isolate,
+ Handle<Map> map,
+ PropertyNormalizationMode mode,
+ const char* reason);
// Tells whether the map is used for JSObjects in dictionary mode (ie
// normalized objects, ie objects for which HasFastProperties returns false).
@@ -556,11 +558,11 @@ class Map : public HeapObject {
DECL_BOOLEAN_ACCESSORS(is_access_check_needed)
// [prototype]: implicit prototype object.
- DECL_ACCESSORS(prototype, Object)
+ DECL_ACCESSORS(prototype, HeapObject)
// TODO(jkummerow): make set_prototype private.
- static void SetPrototype(Isolate* isolate, Handle<Map> map,
- Handle<Object> prototype,
- bool enable_prototype_setup_mode = true);
+ V8_EXPORT_PRIVATE static void SetPrototype(
+ Isolate* isolate, Handle<Map> map, Handle<HeapObject> prototype,
+ bool enable_prototype_setup_mode = true);
// [constructor]: points back to the function or FunctionTemplateInfo
// responsible for this map.
@@ -576,15 +578,16 @@ class Map : public HeapObject {
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
// [back pointer]: points back to the parent map from which a transition
// leads to this map. The field overlaps with the constructor (see above).
- inline Object GetBackPointer() const;
+ inline HeapObject GetBackPointer() const;
inline void SetBackPointer(Object value,
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
// [instance descriptors]: describes the object.
inline DescriptorArray instance_descriptors() const;
inline DescriptorArray synchronized_instance_descriptors() const;
- void SetInstanceDescriptors(Isolate* isolate, DescriptorArray descriptors,
- int number_of_own_descriptors);
+ V8_EXPORT_PRIVATE void SetInstanceDescriptors(Isolate* isolate,
+ DescriptorArray descriptors,
+ int number_of_own_descriptors);
// [layout descriptor]: describes the object layout.
DECL_ACCESSORS(layout_descriptor, LayoutDescriptor)
@@ -666,49 +669,52 @@ class Map : public HeapObject {
// is found by re-transitioning from the root of the transition tree using the
// descriptor array of the map. Returns MaybeHandle<Map>() if no updated map
// is found.
- static MaybeHandle<Map> TryUpdate(Isolate* isolate,
- Handle<Map> map) V8_WARN_UNUSED_RESULT;
- static Map TryUpdateSlow(Isolate* isolate, Map map) V8_WARN_UNUSED_RESULT;
+ V8_EXPORT_PRIVATE static MaybeHandle<Map> TryUpdate(
+ Isolate* isolate, Handle<Map> map) V8_WARN_UNUSED_RESULT;
+ V8_EXPORT_PRIVATE static Map TryUpdateSlow(Isolate* isolate,
+ Map map) V8_WARN_UNUSED_RESULT;
// Returns a non-deprecated version of the input. This method may deprecate
// existing maps along the way if encodings conflict. Not for use while
// gathering type feedback. Use TryUpdate in those cases instead.
- static Handle<Map> Update(Isolate* isolate, Handle<Map> map);
+ V8_EXPORT_PRIVATE static Handle<Map> Update(Isolate* isolate,
+ Handle<Map> map);
static inline Handle<Map> CopyInitialMap(Isolate* isolate, Handle<Map> map);
- static Handle<Map> CopyInitialMap(Isolate* isolate, Handle<Map> map,
- int instance_size, int in_object_properties,
- int unused_property_fields);
+ V8_EXPORT_PRIVATE static Handle<Map> CopyInitialMap(
+ Isolate* isolate, Handle<Map> map, int instance_size,
+ int in_object_properties, int unused_property_fields);
static Handle<Map> CopyInitialMapNormalized(
Isolate* isolate, Handle<Map> map,
PropertyNormalizationMode mode = CLEAR_INOBJECT_PROPERTIES);
static Handle<Map> CopyDropDescriptors(Isolate* isolate, Handle<Map> map);
- static Handle<Map> CopyInsertDescriptor(Isolate* isolate, Handle<Map> map,
- Descriptor* descriptor,
- TransitionFlag flag);
+ V8_EXPORT_PRIVATE static Handle<Map> CopyInsertDescriptor(
+ Isolate* isolate, Handle<Map> map, Descriptor* descriptor,
+ TransitionFlag flag);
static MaybeObjectHandle WrapFieldType(Isolate* isolate,
Handle<FieldType> type);
- static FieldType UnwrapFieldType(MaybeObject wrapped_type);
+ V8_EXPORT_PRIVATE static FieldType UnwrapFieldType(MaybeObject wrapped_type);
- V8_WARN_UNUSED_RESULT static MaybeHandle<Map> CopyWithField(
+ V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT static MaybeHandle<Map> CopyWithField(
Isolate* isolate, Handle<Map> map, Handle<Name> name,
Handle<FieldType> type, PropertyAttributes attributes,
PropertyConstness constness, Representation representation,
TransitionFlag flag);
- V8_WARN_UNUSED_RESULT static MaybeHandle<Map> CopyWithConstant(
- Isolate* isolate, Handle<Map> map, Handle<Name> name,
- Handle<Object> constant, PropertyAttributes attributes,
- TransitionFlag flag);
+ V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT static MaybeHandle<Map>
+ CopyWithConstant(Isolate* isolate, Handle<Map> map, Handle<Name> name,
+ Handle<Object> constant, PropertyAttributes attributes,
+ TransitionFlag flag);
// Returns a new map with all transitions dropped from the given map and
// the ElementsKind set.
static Handle<Map> TransitionElementsTo(Isolate* isolate, Handle<Map> map,
ElementsKind to_kind);
- static Handle<Map> AsElementsKind(Isolate* isolate, Handle<Map> map,
- ElementsKind kind);
+ V8_EXPORT_PRIVATE static Handle<Map> AsElementsKind(Isolate* isolate,
+ Handle<Map> map,
+ ElementsKind kind);
static Handle<Map> CopyAsElementsKind(Isolate* isolate, Handle<Map> map,
ElementsKind kind, TransitionFlag flag);
@@ -716,10 +722,9 @@ class Map : public HeapObject {
static Handle<Map> AsLanguageMode(Isolate* isolate, Handle<Map> initial_map,
Handle<SharedFunctionInfo> shared_info);
- static Handle<Map> CopyForPreventExtensions(Isolate* isolate, Handle<Map> map,
- PropertyAttributes attrs_to_add,
- Handle<Symbol> transition_marker,
- const char* reason);
+ V8_EXPORT_PRIVATE static Handle<Map> CopyForPreventExtensions(
+ Isolate* isolate, Handle<Map> map, PropertyAttributes attrs_to_add,
+ Handle<Symbol> transition_marker, const char* reason);
static Handle<Map> FixProxy(Handle<Map> map, InstanceType type, int size);
@@ -727,21 +732,17 @@ class Map : public HeapObject {
// transitions to avoid an explosion in the number of maps for objects used as
// dictionaries.
inline bool TooManyFastProperties(StoreOrigin store_origin) const;
- static Handle<Map> TransitionToDataProperty(Isolate* isolate, Handle<Map> map,
- Handle<Name> name,
- Handle<Object> value,
- PropertyAttributes attributes,
- PropertyConstness constness,
- StoreOrigin store_origin);
- static Handle<Map> TransitionToAccessorProperty(
+ V8_EXPORT_PRIVATE static Handle<Map> TransitionToDataProperty(
+ Isolate* isolate, Handle<Map> map, Handle<Name> name,
+ Handle<Object> value, PropertyAttributes attributes,
+ PropertyConstness constness, StoreOrigin store_origin);
+ V8_EXPORT_PRIVATE static Handle<Map> TransitionToAccessorProperty(
Isolate* isolate, Handle<Map> map, Handle<Name> name, int descriptor,
Handle<Object> getter, Handle<Object> setter,
PropertyAttributes attributes);
- static Handle<Map> ReconfigureExistingProperty(Isolate* isolate,
- Handle<Map> map,
- int descriptor,
- PropertyKind kind,
- PropertyAttributes attributes);
+ V8_EXPORT_PRIVATE static Handle<Map> ReconfigureExistingProperty(
+ Isolate* isolate, Handle<Map> map, int descriptor, PropertyKind kind,
+ PropertyAttributes attributes);
inline void AppendDescriptor(Isolate* isolate, Descriptor* desc);
@@ -755,7 +756,8 @@ class Map : public HeapObject {
// instance descriptors.
static Handle<Map> Copy(Isolate* isolate, Handle<Map> map,
const char* reason);
- static Handle<Map> Create(Isolate* isolate, int inobject_properties);
+ V8_EXPORT_PRIVATE static Handle<Map> Create(Isolate* isolate,
+ int inobject_properties);
// Returns the next free property index (only valid for FAST MODE).
int NextFreePropertyIndex() const;
@@ -767,8 +769,9 @@ class Map : public HeapObject {
static inline int SlackForArraySize(int old_size, int size_limit);
- static void EnsureDescriptorSlack(Isolate* isolate, Handle<Map> map,
- int slack);
+ V8_EXPORT_PRIVATE static void EnsureDescriptorSlack(Isolate* isolate,
+ Handle<Map> map,
+ int slack);
// Returns the map to be used for instances when the given {prototype} is
// passed to an Object.create call. Might transition the given {prototype}.
@@ -786,8 +789,8 @@ class Map : public HeapObject {
// Returns the transitioned map for this map with the most generic
// elements_kind that's found in |candidates|, or |nullptr| if no match is
// found at all.
- Map FindElementsKindTransitionedMap(Isolate* isolate,
- MapHandles const& candidates);
+ V8_EXPORT_PRIVATE Map FindElementsKindTransitionedMap(
+ Isolate* isolate, MapHandles const& candidates);
inline bool CanTransition() const;
@@ -818,8 +821,8 @@ class Map : public HeapObject {
: ObjectFields::kMaybePointers;
}
- static Handle<Map> TransitionToPrototype(Isolate* isolate, Handle<Map> map,
- Handle<Object> prototype);
+ V8_EXPORT_PRIVATE static Handle<Map> TransitionToPrototype(
+ Isolate* isolate, Handle<Map> map, Handle<HeapObject> prototype);
static Handle<Map> TransitionToImmutableProto(Isolate* isolate,
Handle<Map> map);
@@ -881,7 +884,7 @@ class Map : public HeapObject {
// the descriptor array.
inline void NotifyLeafMapLayoutChange(Isolate* isolate);
- static VisitorId GetVisitorId(Map map);
+ V8_EXPORT_PRIVATE static VisitorId GetVisitorId(Map map);
// Returns true if objects with given instance type are allowed to have
// fast transitionable elements kinds. This predicate is used to ensure
@@ -893,6 +896,9 @@ class Map : public HeapObject {
InstanceType instance_type);
inline bool CanHaveFastTransitionableElementsKind() const;
+ // Whether this is the map of the given native context's global proxy.
+ bool IsMapOfGlobalProxy(Handle<NativeContext> native_context) const;
+
private:
// This byte encodes either the instance size without the in-object slack or
// the slack size in properties backing store.
@@ -929,7 +935,7 @@ class Map : public HeapObject {
static Handle<Map> ShareDescriptor(Isolate* isolate, Handle<Map> map,
Handle<DescriptorArray> descriptors,
Descriptor* descriptor);
- static Handle<Map> AddMissingTransitions(
+ V8_EXPORT_PRIVATE static Handle<Map> AddMissingTransitions(
Isolate* isolate, Handle<Map> map, Handle<DescriptorArray> descriptors,
Handle<LayoutDescriptor> full_layout_descriptor);
static void InstallDescriptors(
diff --git a/deps/v8/src/objects/name.h b/deps/v8/src/objects/name.h
index c3c7fd68da..c17f73f775 100644
--- a/deps/v8/src/objects/name.h
+++ b/deps/v8/src/objects/name.h
@@ -7,6 +7,7 @@
#include "src/objects.h"
#include "src/objects/heap-object.h"
+#include "torque-generated/class-definitions-from-dsl.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@@ -67,9 +68,10 @@ class Name : public HeapObject {
void NameShortPrint();
int NameShortPrint(Vector<char> str);
- // Layout description.
- static const int kHashFieldOffset = HeapObject::kHeaderSize;
- static const int kHeaderSize = kHashFieldOffset + kInt32Size;
+ DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,
+ TORQUE_GENERATED_NAME_FIELDS)
+
+ static const int kHeaderSize = kSize;
// Mask constant for checking if a name has a computed hash code
// and if it is a string that is an array index. The least significant bit
@@ -175,15 +177,8 @@ class Symbol : public Name {
DECL_PRINTER(Symbol)
DECL_VERIFIER(Symbol)
- // Layout description.
-#define SYMBOL_FIELDS(V) \
- V(kFlagsOffset, kInt32Size) \
- V(kNameOffset, kTaggedSize) \
- /* Header size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(Name::kHeaderSize, SYMBOL_FIELDS)
-#undef SYMBOL_FIELDS
+ DEFINE_FIELD_OFFSET_CONSTANTS(Name::kHeaderSize,
+ TORQUE_GENERATED_SYMBOL_FIELDS)
// Flags layout.
#define FLAGS_BIT_FIELDS(V, _) \
@@ -196,7 +191,7 @@ class Symbol : public Name {
DEFINE_BIT_FIELDS(FLAGS_BIT_FIELDS)
#undef FLAGS_BIT_FIELDS
- typedef FixedBodyDescriptor<kNameOffset, kSize, kSize> BodyDescriptor;
+ using BodyDescriptor = FixedBodyDescriptor<kNameOffset, kSize, kSize>;
void SymbolShortPrint(std::ostream& os);
diff --git a/deps/v8/src/objects/object-macros.h b/deps/v8/src/objects/object-macros.h
index 941c68a6a2..da5c157bbc 100644
--- a/deps/v8/src/objects/object-macros.h
+++ b/deps/v8/src/objects/object-macros.h
@@ -290,9 +290,18 @@
GenerationalBarrier(object, (object)->RawMaybeWeakField(offset), value); \
} while (false)
+#define EPHEMERON_KEY_WRITE_BARRIER(object, offset, value) \
+ do { \
+ DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \
+ EphemeronHashTable table = EphemeronHashTable::cast(object); \
+ MarkingBarrier(object, (object)->RawField(offset), value); \
+ GenerationalEphemeronKeyBarrier(table, (object)->RawField(offset), value); \
+ } while (false)
+
#define CONDITIONAL_WRITE_BARRIER(object, offset, value, mode) \
do { \
DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \
+ DCHECK_NE(mode, UPDATE_EPHEMERON_KEY_WRITE_BARRIER); \
if (mode != SKIP_WRITE_BARRIER) { \
if (mode == UPDATE_WRITE_BARRIER) { \
MarkingBarrier(object, (object)->RawField(offset), value); \
@@ -304,6 +313,7 @@
#define CONDITIONAL_WEAK_WRITE_BARRIER(object, offset, value, mode) \
do { \
DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \
+ DCHECK_NE(mode, UPDATE_EPHEMERON_KEY_WRITE_BARRIER); \
if (mode != SKIP_WRITE_BARRIER) { \
if (mode == UPDATE_WRITE_BARRIER) { \
MarkingBarrier(object, (object)->RawMaybeWeakField(offset), value); \
@@ -312,6 +322,20 @@
} \
} while (false)
+#define CONDITIONAL_EPHEMERON_KEY_WRITE_BARRIER(object, offset, value, mode) \
+ do { \
+ DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \
+ DCHECK_NE(mode, UPDATE_EPHEMERON_KEY_WRITE_BARRIER); \
+ EphemeronHashTable table = EphemeronHashTable::cast(object); \
+ if (mode != SKIP_WRITE_BARRIER) { \
+ if (mode == UPDATE_WRITE_BARRIER) { \
+ MarkingBarrier(object, (object)->RawField(offset), value); \
+ } \
+ GenerationalEphemeronKeyBarrier(table, (object)->RawField(offset), \
+ value); \
+ } \
+ } while (false)
+
#define READ_DOUBLE_FIELD(p, offset) ReadDoubleValue(FIELD_ADDR(p, offset))
#define WRITE_DOUBLE_FIELD(p, offset, value) \
@@ -471,8 +495,11 @@
#ifdef VERIFY_HEAP
#define DECL_VERIFIER(Name) void Name##Verify(Isolate* isolate);
+#define EXPORT_DECL_VERIFIER(Name) \
+ V8_EXPORT_PRIVATE void Name##Verify(Isolate* isolate);
#else
#define DECL_VERIFIER(Name)
+#define EXPORT_DECL_VERIFIER(Name)
#endif
#define DEFINE_DEOPT_ELEMENT_ACCESSORS(name, type) \
diff --git a/deps/v8/src/objects/oddball.h b/deps/v8/src/objects/oddball.h
index 8f6adf9743..f608a76a2f 100644
--- a/deps/v8/src/objects/oddball.h
+++ b/deps/v8/src/objects/oddball.h
@@ -6,6 +6,7 @@
#define V8_OBJECTS_ODDBALL_H_
#include "src/objects/heap-object.h"
+#include "torque-generated/class-definitions-from-dsl.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@@ -47,22 +48,11 @@ class Oddball : public HeapObject {
const char* to_string, Handle<Object> to_number,
const char* type_of, byte kind);
- // Layout description.
-#define ODDBALL_FIELDS(V) \
- V(kToNumberRawOffset, kDoubleSize) \
- /* Tagged fields. */ \
- V(kTaggedFieldsStartOffset, 0) \
- V(kToStringOffset, kTaggedSize) \
- V(kToNumberOffset, kTaggedSize) \
- V(kTypeOfOffset, kTaggedSize) \
- V(kTaggedFieldsEndOffset, 0) \
- /* Raw data but still encoded as Smi. */ \
- V(kKindOffset, kTaggedSize) \
- /* Total size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, ODDBALL_FIELDS)
-#undef ODDBALL_FIELDS
+ DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,
+ TORQUE_GENERATED_ODDBALL_FIELDS)
+ // TODO(v8:8989): [torque] Support marker constants.
+ static const int kTaggedFieldsStartOffset = kToStringOffset;
+ static const int kTaggedFieldsEndOffset = kKindOffset;
static const byte kFalse = 0;
static const byte kTrue = 1;
@@ -78,9 +68,8 @@ class Oddball : public HeapObject {
static const byte kStaleRegister = 10;
static const byte kSelfReferenceMarker = 10;
- typedef FixedBodyDescriptor<kTaggedFieldsStartOffset, kTaggedFieldsEndOffset,
- kSize>
- BodyDescriptor;
+ using BodyDescriptor = FixedBodyDescriptor<kTaggedFieldsStartOffset,
+ kTaggedFieldsEndOffset, kSize>;
STATIC_ASSERT(kKindOffset == Internals::kOddballKindOffset);
STATIC_ASSERT(kNull == Internals::kNullOddballKind);
diff --git a/deps/v8/src/objects/ordered-hash-table-inl.h b/deps/v8/src/objects/ordered-hash-table-inl.h
index 5e2dd6a9fe..1ab26ca8ab 100644
--- a/deps/v8/src/objects/ordered-hash-table-inl.h
+++ b/deps/v8/src/objects/ordered-hash-table-inl.h
@@ -11,6 +11,7 @@
#include "src/objects-inl.h"
#include "src/objects/compressed-slots.h"
#include "src/objects/fixed-array-inl.h"
+#include "src/objects/js-collection-iterator.h"
#include "src/objects/slots.h"
// Has to be the last include (doesn't have include guards):
diff --git a/deps/v8/src/objects/ordered-hash-table.cc b/deps/v8/src/objects/ordered-hash-table.cc
index 3581b344d4..149a5c5715 100644
--- a/deps/v8/src/objects/ordered-hash-table.cc
+++ b/deps/v8/src/objects/ordered-hash-table.cc
@@ -15,7 +15,7 @@ namespace internal {
template <class Derived, int entrysize>
Handle<Derived> OrderedHashTable<Derived, entrysize>::Allocate(
- Isolate* isolate, int capacity, PretenureFlag pretenure) {
+ Isolate* isolate, int capacity, AllocationType allocation) {
// Capacity must be a power of two, since we depend on being able
// to divide and multiple by 2 (kLoadFactor) to derive capacity
// from number of buckets. If we decide to change kLoadFactor
@@ -28,7 +28,8 @@ Handle<Derived> OrderedHashTable<Derived, entrysize>::Allocate(
int num_buckets = capacity / kLoadFactor;
Handle<FixedArray> backing_store = isolate->factory()->NewFixedArrayWithMap(
Derived::GetMapRootIndex(),
- HashTableStartIndex() + num_buckets + (capacity * kEntrySize), pretenure);
+ HashTableStartIndex() + num_buckets + (capacity * kEntrySize),
+ allocation);
Handle<Derived> table = Handle<Derived>::cast(backing_store);
for (int i = 0; i < num_buckets; ++i) {
table->set(HashTableStartIndex() + i, Smi::FromInt(kNotFound));
@@ -73,7 +74,8 @@ Handle<Derived> OrderedHashTable<Derived, entrysize>::Clear(
Handle<Derived> new_table =
Allocate(isolate, kMinCapacity,
- Heap::InYoungGeneration(*table) ? NOT_TENURED : TENURED);
+ Heap::InYoungGeneration(*table) ? AllocationType::kYoung
+ : AllocationType::kOld);
table->SetNextTable(*new_table);
table->SetNumberOfDeletedElements(kClearedTableSentinel);
@@ -188,9 +190,10 @@ Handle<Derived> OrderedHashTable<Derived, entrysize>::Rehash(
Isolate* isolate, Handle<Derived> table, int new_capacity) {
DCHECK(!table->IsObsolete());
- Handle<Derived> new_table = Derived::Allocate(
- isolate, new_capacity,
- Heap::InYoungGeneration(*table) ? NOT_TENURED : TENURED);
+ Handle<Derived> new_table =
+ Derived::Allocate(isolate, new_capacity,
+ Heap::InYoungGeneration(*table) ? AllocationType::kYoung
+ : AllocationType::kOld);
int nof = table->NumberOfElements();
int nod = table->NumberOfDeletedElements();
int new_buckets = new_table->NumberOfBuckets();
@@ -319,8 +322,8 @@ Handle<OrderedHashMap> OrderedHashMap::Add(Isolate* isolate,
}
template <>
-int OrderedHashTable<OrderedNameDictionary, 3>::FindEntry(Isolate* isolate,
- Object key) {
+V8_EXPORT_PRIVATE int OrderedHashTable<OrderedNameDictionary, 3>::FindEntry(
+ Isolate* isolate, Object key) {
DisallowHeapAllocation no_gc;
DCHECK(key->IsUniqueName());
@@ -403,67 +406,67 @@ Handle<OrderedNameDictionary> OrderedNameDictionary::DeleteEntry(
}
Handle<OrderedHashSet> OrderedHashSet::Allocate(Isolate* isolate, int capacity,
- PretenureFlag pretenure) {
+ AllocationType allocation) {
return OrderedHashTable<OrderedHashSet, 1>::Allocate(isolate, capacity,
- pretenure);
+ allocation);
}
Handle<OrderedHashMap> OrderedHashMap::Allocate(Isolate* isolate, int capacity,
- PretenureFlag pretenure) {
+ AllocationType allocation) {
return OrderedHashTable<OrderedHashMap, 2>::Allocate(isolate, capacity,
- pretenure);
+ allocation);
}
Handle<OrderedNameDictionary> OrderedNameDictionary::Allocate(
- Isolate* isolate, int capacity, PretenureFlag pretenure) {
+ Isolate* isolate, int capacity, AllocationType allocation) {
Handle<OrderedNameDictionary> table =
OrderedHashTable<OrderedNameDictionary, 3>::Allocate(isolate, capacity,
- pretenure);
+ allocation);
table->SetHash(PropertyArray::kNoHashSentinel);
return table;
}
-template Handle<OrderedHashSet>
+template V8_EXPORT_PRIVATE Handle<OrderedHashSet>
OrderedHashTable<OrderedHashSet, 1>::EnsureGrowable(
Isolate* isolate, Handle<OrderedHashSet> table);
-template Handle<OrderedHashSet> OrderedHashTable<OrderedHashSet, 1>::Shrink(
- Isolate* isolate, Handle<OrderedHashSet> table);
+template V8_EXPORT_PRIVATE Handle<OrderedHashSet>
+OrderedHashTable<OrderedHashSet, 1>::Shrink(Isolate* isolate,
+ Handle<OrderedHashSet> table);
-template Handle<OrderedHashSet> OrderedHashTable<OrderedHashSet, 1>::Clear(
- Isolate* isolate, Handle<OrderedHashSet> table);
+template V8_EXPORT_PRIVATE Handle<OrderedHashSet>
+OrderedHashTable<OrderedHashSet, 1>::Clear(Isolate* isolate,
+ Handle<OrderedHashSet> table);
-template bool OrderedHashTable<OrderedHashSet, 1>::HasKey(Isolate* isolate,
- OrderedHashSet table,
- Object key);
+template V8_EXPORT_PRIVATE bool OrderedHashTable<OrderedHashSet, 1>::HasKey(
+ Isolate* isolate, OrderedHashSet table, Object key);
-template bool OrderedHashTable<OrderedHashSet, 1>::Delete(Isolate* isolate,
- OrderedHashSet table,
- Object key);
+template V8_EXPORT_PRIVATE bool OrderedHashTable<OrderedHashSet, 1>::Delete(
+ Isolate* isolate, OrderedHashSet table, Object key);
-template int OrderedHashTable<OrderedHashSet, 1>::FindEntry(Isolate* isolate,
- Object key);
+template V8_EXPORT_PRIVATE int OrderedHashTable<OrderedHashSet, 1>::FindEntry(
+ Isolate* isolate, Object key);
-template Handle<OrderedHashMap>
+template V8_EXPORT_PRIVATE Handle<OrderedHashMap>
OrderedHashTable<OrderedHashMap, 2>::EnsureGrowable(
Isolate* isolate, Handle<OrderedHashMap> table);
-template Handle<OrderedHashMap> OrderedHashTable<OrderedHashMap, 2>::Shrink(
- Isolate* isolate, Handle<OrderedHashMap> table);
+template V8_EXPORT_PRIVATE Handle<OrderedHashMap>
+OrderedHashTable<OrderedHashMap, 2>::Shrink(Isolate* isolate,
+ Handle<OrderedHashMap> table);
-template Handle<OrderedHashMap> OrderedHashTable<OrderedHashMap, 2>::Clear(
- Isolate* isolate, Handle<OrderedHashMap> table);
+template V8_EXPORT_PRIVATE Handle<OrderedHashMap>
+OrderedHashTable<OrderedHashMap, 2>::Clear(Isolate* isolate,
+ Handle<OrderedHashMap> table);
-template bool OrderedHashTable<OrderedHashMap, 2>::HasKey(Isolate* isolate,
- OrderedHashMap table,
- Object key);
+template V8_EXPORT_PRIVATE bool OrderedHashTable<OrderedHashMap, 2>::HasKey(
+ Isolate* isolate, OrderedHashMap table, Object key);
-template bool OrderedHashTable<OrderedHashMap, 2>::Delete(Isolate* isolate,
- OrderedHashMap table,
- Object key);
+template V8_EXPORT_PRIVATE bool OrderedHashTable<OrderedHashMap, 2>::Delete(
+ Isolate* isolate, OrderedHashMap table, Object key);
-template int OrderedHashTable<OrderedHashMap, 2>::FindEntry(Isolate* isolate,
- Object key);
+template V8_EXPORT_PRIVATE int OrderedHashTable<OrderedHashMap, 2>::FindEntry(
+ Isolate* isolate, Object key);
template Handle<OrderedNameDictionary>
OrderedHashTable<OrderedNameDictionary, 3>::Shrink(
@@ -475,25 +478,24 @@ OrderedHashTable<OrderedNameDictionary, 3>::EnsureGrowable(
template <>
Handle<SmallOrderedHashSet>
-SmallOrderedHashTable<SmallOrderedHashSet>::Allocate(Isolate* isolate,
- int capacity,
- PretenureFlag pretenure) {
- return isolate->factory()->NewSmallOrderedHashSet(capacity, pretenure);
+SmallOrderedHashTable<SmallOrderedHashSet>::Allocate(
+ Isolate* isolate, int capacity, AllocationType allocation) {
+ return isolate->factory()->NewSmallOrderedHashSet(capacity, allocation);
}
template <>
Handle<SmallOrderedHashMap>
-SmallOrderedHashTable<SmallOrderedHashMap>::Allocate(Isolate* isolate,
- int capacity,
- PretenureFlag pretenure) {
- return isolate->factory()->NewSmallOrderedHashMap(capacity, pretenure);
+SmallOrderedHashTable<SmallOrderedHashMap>::Allocate(
+ Isolate* isolate, int capacity, AllocationType allocation) {
+ return isolate->factory()->NewSmallOrderedHashMap(capacity, allocation);
}
template <>
Handle<SmallOrderedNameDictionary>
SmallOrderedHashTable<SmallOrderedNameDictionary>::Allocate(
- Isolate* isolate, int capacity, PretenureFlag pretenure) {
- return isolate->factory()->NewSmallOrderedNameDictionary(capacity, pretenure);
+ Isolate* isolate, int capacity, AllocationType allocation) {
+ return isolate->factory()->NewSmallOrderedNameDictionary(capacity,
+ allocation);
}
template <class Derived>
@@ -572,6 +574,16 @@ MaybeHandle<SmallOrderedHashSet> SmallOrderedHashSet::Add(
return table;
}
+bool SmallOrderedHashSet::Delete(Isolate* isolate, SmallOrderedHashSet table,
+ Object key) {
+ return SmallOrderedHashTable<SmallOrderedHashSet>::Delete(isolate, table,
+ key);
+}
+
+bool SmallOrderedHashSet::HasKey(Isolate* isolate, Handle<Object> key) {
+ return SmallOrderedHashTable<SmallOrderedHashSet>::HasKey(isolate, key);
+}
+
MaybeHandle<SmallOrderedHashMap> SmallOrderedHashMap::Add(
Isolate* isolate, Handle<SmallOrderedHashMap> table, Handle<Object> key,
Handle<Object> value) {
@@ -606,9 +618,20 @@ MaybeHandle<SmallOrderedHashMap> SmallOrderedHashMap::Add(
return table;
}
+bool SmallOrderedHashMap::Delete(Isolate* isolate, SmallOrderedHashMap table,
+ Object key) {
+ return SmallOrderedHashTable<SmallOrderedHashMap>::Delete(isolate, table,
+ key);
+}
+
+bool SmallOrderedHashMap::HasKey(Isolate* isolate, Handle<Object> key) {
+ return SmallOrderedHashTable<SmallOrderedHashMap>::HasKey(isolate, key);
+}
+
template <>
-int SmallOrderedHashTable<SmallOrderedNameDictionary>::FindEntry(
- Isolate* isolate, Object key) {
+int V8_EXPORT_PRIVATE
+SmallOrderedHashTable<SmallOrderedNameDictionary>::FindEntry(Isolate* isolate,
+ Object key) {
DisallowHeapAllocation no_gc;
DCHECK(key->IsUniqueName());
Name raw_key = Name::cast(key);
@@ -732,7 +755,8 @@ Handle<Derived> SmallOrderedHashTable<Derived>::Rehash(Isolate* isolate,
Handle<Derived> new_table = SmallOrderedHashTable<Derived>::Allocate(
isolate, new_capacity,
- Heap::InYoungGeneration(*table) ? NOT_TENURED : TENURED);
+ Heap::InYoungGeneration(*table) ? AllocationType::kYoung
+ : AllocationType::kOld);
int nof = table->NumberOfElements();
int nod = table->NumberOfDeletedElements();
int new_entry = 0;
@@ -838,42 +862,50 @@ int SmallOrderedHashTable<Derived>::FindEntry(Isolate* isolate, Object key) {
return kNotFound;
}
-template bool SmallOrderedHashTable<SmallOrderedHashSet>::HasKey(
- Isolate* isolate, Handle<Object> key);
-template Handle<SmallOrderedHashSet>
+template bool EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE)
+ SmallOrderedHashTable<SmallOrderedHashSet>::HasKey(Isolate* isolate,
+ Handle<Object> key);
+template V8_EXPORT_PRIVATE Handle<SmallOrderedHashSet>
SmallOrderedHashTable<SmallOrderedHashSet>::Rehash(
Isolate* isolate, Handle<SmallOrderedHashSet> table, int new_capacity);
-template Handle<SmallOrderedHashSet>
+template V8_EXPORT_PRIVATE Handle<SmallOrderedHashSet>
SmallOrderedHashTable<SmallOrderedHashSet>::Shrink(
Isolate* isolate, Handle<SmallOrderedHashSet> table);
-template MaybeHandle<SmallOrderedHashSet>
+template V8_EXPORT_PRIVATE MaybeHandle<SmallOrderedHashSet>
SmallOrderedHashTable<SmallOrderedHashSet>::Grow(
Isolate* isolate, Handle<SmallOrderedHashSet> table);
-template void SmallOrderedHashTable<SmallOrderedHashSet>::Initialize(
- Isolate* isolate, int capacity);
-
-template bool SmallOrderedHashTable<SmallOrderedHashMap>::HasKey(
- Isolate* isolate, Handle<Object> key);
-template Handle<SmallOrderedHashMap>
+template V8_EXPORT_PRIVATE void
+SmallOrderedHashTable<SmallOrderedHashSet>::Initialize(Isolate* isolate,
+ int capacity);
+template V8_EXPORT_PRIVATE bool
+SmallOrderedHashTable<SmallOrderedHashSet>::Delete(Isolate* isolate,
+ SmallOrderedHashSet table,
+ Object key);
+
+template EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE) bool SmallOrderedHashTable<
+ SmallOrderedHashMap>::HasKey(Isolate* isolate, Handle<Object> key);
+template V8_EXPORT_PRIVATE Handle<SmallOrderedHashMap>
SmallOrderedHashTable<SmallOrderedHashMap>::Rehash(
Isolate* isolate, Handle<SmallOrderedHashMap> table, int new_capacity);
-template Handle<SmallOrderedHashMap>
+template V8_EXPORT_PRIVATE Handle<SmallOrderedHashMap>
SmallOrderedHashTable<SmallOrderedHashMap>::Shrink(
Isolate* isolate, Handle<SmallOrderedHashMap> table);
-template MaybeHandle<SmallOrderedHashMap>
+template V8_EXPORT_PRIVATE MaybeHandle<SmallOrderedHashMap>
SmallOrderedHashTable<SmallOrderedHashMap>::Grow(
Isolate* isolate, Handle<SmallOrderedHashMap> table);
-template void SmallOrderedHashTable<SmallOrderedHashMap>::Initialize(
- Isolate* isolate, int capacity);
-
-template bool SmallOrderedHashTable<SmallOrderedHashMap>::Delete(
- Isolate* isolate, SmallOrderedHashMap table, Object key);
-template bool SmallOrderedHashTable<SmallOrderedHashSet>::Delete(
- Isolate* isolate, SmallOrderedHashSet table, Object key);
-
-template void SmallOrderedHashTable<SmallOrderedNameDictionary>::Initialize(
- Isolate* isolate, int capacity);
-template Handle<SmallOrderedNameDictionary>
+template V8_EXPORT_PRIVATE void
+SmallOrderedHashTable<SmallOrderedHashMap>::Initialize(Isolate* isolate,
+ int capacity);
+
+template V8_EXPORT_PRIVATE bool
+SmallOrderedHashTable<SmallOrderedHashMap>::Delete(Isolate* isolate,
+ SmallOrderedHashMap table,
+ Object key);
+
+template V8_EXPORT_PRIVATE void
+SmallOrderedHashTable<SmallOrderedNameDictionary>::Initialize(Isolate* isolate,
+ int capacity);
+template V8_EXPORT_PRIVATE Handle<SmallOrderedNameDictionary>
SmallOrderedHashTable<SmallOrderedNameDictionary>::Shrink(
Isolate* isolate, Handle<SmallOrderedNameDictionary> table);
@@ -887,13 +919,13 @@ Handle<HeapObject> OrderedHashTableHandler<SmallTable, LargeTable>::Allocate(
return LargeTable::Allocate(isolate, capacity);
}
-template Handle<HeapObject>
+template V8_EXPORT_PRIVATE Handle<HeapObject>
OrderedHashTableHandler<SmallOrderedHashSet, OrderedHashSet>::Allocate(
Isolate* isolate, int capacity);
-template Handle<HeapObject>
+template V8_EXPORT_PRIVATE Handle<HeapObject>
OrderedHashTableHandler<SmallOrderedHashMap, OrderedHashMap>::Allocate(
Isolate* isolate, int capacity);
-template Handle<HeapObject>
+template V8_EXPORT_PRIVATE Handle<HeapObject>
OrderedHashTableHandler<SmallOrderedNameDictionary,
OrderedNameDictionary>::Allocate(Isolate* isolate,
int capacity);
diff --git a/deps/v8/src/objects/ordered-hash-table.h b/deps/v8/src/objects/ordered-hash-table.h
index 48300fb7ba..6afbb6b662 100644
--- a/deps/v8/src/objects/ordered-hash-table.h
+++ b/deps/v8/src/objects/ordered-hash-table.h
@@ -5,6 +5,7 @@
#ifndef V8_OBJECTS_ORDERED_HASH_TABLE_H_
#define V8_OBJECTS_ORDERED_HASH_TABLE_H_
+#include "src/base/export-template.h"
#include "src/globals.h"
#include "src/objects/fixed-array.h"
#include "src/objects/js-objects.h"
@@ -196,8 +197,9 @@ class OrderedHashTable : public FixedArray {
protected:
// Returns an OrderedHashTable with a capacity of at least |capacity|.
- static Handle<Derived> Allocate(Isolate* isolate, int capacity,
- PretenureFlag pretenure = NOT_TENURED);
+ static Handle<Derived> Allocate(
+ Isolate* isolate, int capacity,
+ AllocationType allocation = AllocationType::kYoung);
static Handle<Derived> Rehash(Isolate* isolate, Handle<Derived> table,
int new_capacity);
@@ -228,7 +230,8 @@ class OrderedHashTable : public FixedArray {
friend class OrderedNameDictionaryHandler;
};
-class OrderedHashSet : public OrderedHashTable<OrderedHashSet, 1> {
+class V8_EXPORT_PRIVATE OrderedHashSet
+ : public OrderedHashTable<OrderedHashSet, 1> {
public:
DECL_CAST(OrderedHashSet)
@@ -241,8 +244,9 @@ class OrderedHashSet : public OrderedHashTable<OrderedHashSet, 1> {
static Handle<OrderedHashSet> Rehash(Isolate* isolate,
Handle<OrderedHashSet> table,
int new_capacity);
- static Handle<OrderedHashSet> Allocate(Isolate* isolate, int capacity,
- PretenureFlag pretenure = NOT_TENURED);
+ static Handle<OrderedHashSet> Allocate(
+ Isolate* isolate, int capacity,
+ AllocationType allocation = AllocationType::kYoung);
static HeapObject GetEmpty(ReadOnlyRoots ro_roots);
static inline RootIndex GetMapRootIndex();
static inline bool Is(Handle<HeapObject> table);
@@ -251,7 +255,8 @@ class OrderedHashSet : public OrderedHashTable<OrderedHashSet, 1> {
OBJECT_CONSTRUCTORS(OrderedHashSet, OrderedHashTable<OrderedHashSet, 1>);
};
-class OrderedHashMap : public OrderedHashTable<OrderedHashMap, 2> {
+class V8_EXPORT_PRIVATE OrderedHashMap
+ : public OrderedHashTable<OrderedHashMap, 2> {
public:
DECL_CAST(OrderedHashMap)
@@ -261,8 +266,9 @@ class OrderedHashMap : public OrderedHashTable<OrderedHashMap, 2> {
Handle<OrderedHashMap> table,
Handle<Object> key, Handle<Object> value);
- static Handle<OrderedHashMap> Allocate(Isolate* isolate, int capacity,
- PretenureFlag pretenure = NOT_TENURED);
+ static Handle<OrderedHashMap> Allocate(
+ Isolate* isolate, int capacity,
+ AllocationType allocation = AllocationType::kYoung);
static Handle<OrderedHashMap> Rehash(Isolate* isolate,
Handle<OrderedHashMap> table,
int new_capacity);
@@ -333,16 +339,17 @@ template <class Derived>
class SmallOrderedHashTable : public HeapObject {
public:
// Offset points to a relative location in the table
- typedef int Offset;
+ using Offset = int;
// ByteIndex points to a index in the table that needs to be
// converted to an Offset.
- typedef int ByteIndex;
+ using ByteIndex = int;
void Initialize(Isolate* isolate, int capacity);
- static Handle<Derived> Allocate(Isolate* isolate, int capacity,
- PretenureFlag pretenure = NOT_TENURED);
+ static Handle<Derived> Allocate(
+ Isolate* isolate, int capacity,
+ AllocationType allocation = AllocationType::kYoung);
// Returns a true if the OrderedHashTable contains the key
bool HasKey(Isolate* isolate, Handle<Object> key);
@@ -570,7 +577,7 @@ class SmallOrderedHashSet : public SmallOrderedHashTable<SmallOrderedHashSet> {
DECL_CAST(SmallOrderedHashSet)
DECL_PRINTER(SmallOrderedHashSet)
- DECL_VERIFIER(SmallOrderedHashSet)
+ EXPORT_DECL_VERIFIER(SmallOrderedHashSet)
static const int kKeyIndex = 0;
static const int kEntrySize = 1;
@@ -579,9 +586,12 @@ class SmallOrderedHashSet : public SmallOrderedHashTable<SmallOrderedHashSet> {
// Adds |value| to |table|, if the capacity isn't enough, a new
// table is created. The original |table| is returned if there is
// capacity to store |value| otherwise the new table is returned.
- static MaybeHandle<SmallOrderedHashSet> Add(Isolate* isolate,
- Handle<SmallOrderedHashSet> table,
- Handle<Object> key);
+ V8_EXPORT_PRIVATE static MaybeHandle<SmallOrderedHashSet> Add(
+ Isolate* isolate, Handle<SmallOrderedHashSet> table, Handle<Object> key);
+ V8_EXPORT_PRIVATE static bool Delete(Isolate* isolate,
+ SmallOrderedHashSet table, Object key);
+ V8_EXPORT_PRIVATE bool HasKey(Isolate* isolate, Handle<Object> key);
+
static inline bool Is(Handle<HeapObject> table);
static inline RootIndex GetMapRootIndex();
static Handle<SmallOrderedHashSet> Rehash(Isolate* isolate,
@@ -591,12 +601,15 @@ class SmallOrderedHashSet : public SmallOrderedHashTable<SmallOrderedHashSet> {
SmallOrderedHashTable<SmallOrderedHashSet>);
};
+STATIC_ASSERT(kSmallOrderedHashSetMinCapacity ==
+ SmallOrderedHashSet::kMinCapacity);
+
class SmallOrderedHashMap : public SmallOrderedHashTable<SmallOrderedHashMap> {
public:
DECL_CAST(SmallOrderedHashMap)
DECL_PRINTER(SmallOrderedHashMap)
- DECL_VERIFIER(SmallOrderedHashMap)
+ EXPORT_DECL_VERIFIER(SmallOrderedHashMap)
static const int kKeyIndex = 0;
static const int kValueIndex = 1;
@@ -606,10 +619,12 @@ class SmallOrderedHashMap : public SmallOrderedHashTable<SmallOrderedHashMap> {
// Adds |value| to |table|, if the capacity isn't enough, a new
// table is created. The original |table| is returned if there is
// capacity to store |value| otherwise the new table is returned.
- static MaybeHandle<SmallOrderedHashMap> Add(Isolate* isolate,
- Handle<SmallOrderedHashMap> table,
- Handle<Object> key,
- Handle<Object> value);
+ V8_EXPORT_PRIVATE static MaybeHandle<SmallOrderedHashMap> Add(
+ Isolate* isolate, Handle<SmallOrderedHashMap> table, Handle<Object> key,
+ Handle<Object> value);
+ V8_EXPORT_PRIVATE static bool Delete(Isolate* isolate,
+ SmallOrderedHashMap table, Object key);
+ V8_EXPORT_PRIVATE bool HasKey(Isolate* isolate, Handle<Object> key);
static inline bool Is(Handle<HeapObject> table);
static inline RootIndex GetMapRootIndex();
@@ -621,14 +636,17 @@ class SmallOrderedHashMap : public SmallOrderedHashTable<SmallOrderedHashMap> {
SmallOrderedHashTable<SmallOrderedHashMap>);
};
+STATIC_ASSERT(kSmallOrderedHashMapMinCapacity ==
+ SmallOrderedHashMap::kMinCapacity);
+
// TODO(gsathya): Rename this to OrderedHashTable, after we rename
// OrderedHashTable to LargeOrderedHashTable. Also set up a
// OrderedHashSetBase class as a base class for the two tables and use
// that instead of a HeapObject here.
template <class SmallTable, class LargeTable>
-class OrderedHashTableHandler {
+class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) OrderedHashTableHandler {
public:
- typedef int Entry;
+ using Entry = int;
static Handle<HeapObject> Allocate(Isolate* isolate, int capacity);
static bool Delete(Handle<HeapObject> table, Handle<Object> key);
@@ -640,7 +658,10 @@ class OrderedHashTableHandler {
SmallOrderedHashTable<SmallTable>::kGrowthHack << 1;
};
-class OrderedHashMapHandler
+extern template class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE)
+ OrderedHashTableHandler<SmallOrderedHashMap, OrderedHashMap>;
+
+class V8_EXPORT_PRIVATE OrderedHashMapHandler
: public OrderedHashTableHandler<SmallOrderedHashMap, OrderedHashMap> {
public:
static Handle<HeapObject> Add(Isolate* isolate, Handle<HeapObject> table,
@@ -649,7 +670,10 @@ class OrderedHashMapHandler
Isolate* isolate, Handle<SmallOrderedHashMap> table);
};
-class OrderedHashSetHandler
+extern template class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE)
+ OrderedHashTableHandler<SmallOrderedHashSet, OrderedHashSet>;
+
+class V8_EXPORT_PRIVATE OrderedHashSetHandler
: public OrderedHashTableHandler<SmallOrderedHashSet, OrderedHashSet> {
public:
static Handle<HeapObject> Add(Isolate* isolate, Handle<HeapObject> table,
@@ -663,20 +687,19 @@ class OrderedNameDictionary
public:
DECL_CAST(OrderedNameDictionary)
- static Handle<OrderedNameDictionary> Add(Isolate* isolate,
- Handle<OrderedNameDictionary> table,
- Handle<Name> key,
- Handle<Object> value,
- PropertyDetails details);
+ V8_EXPORT_PRIVATE static Handle<OrderedNameDictionary> Add(
+ Isolate* isolate, Handle<OrderedNameDictionary> table, Handle<Name> key,
+ Handle<Object> value, PropertyDetails details);
- void SetEntry(Isolate* isolate, int entry, Object key, Object value,
- PropertyDetails details);
+ V8_EXPORT_PRIVATE void SetEntry(Isolate* isolate, int entry, Object key,
+ Object value, PropertyDetails details);
- static Handle<OrderedNameDictionary> DeleteEntry(
+ V8_EXPORT_PRIVATE static Handle<OrderedNameDictionary> DeleteEntry(
Isolate* isolate, Handle<OrderedNameDictionary> table, int entry);
static Handle<OrderedNameDictionary> Allocate(
- Isolate* isolate, int capacity, PretenureFlag pretenure = NOT_TENURED);
+ Isolate* isolate, int capacity,
+ AllocationType allocation = AllocationType::kYoung);
static Handle<OrderedNameDictionary> Rehash(
Isolate* isolate, Handle<OrderedNameDictionary> table, int new_capacity);
@@ -707,7 +730,10 @@ class OrderedNameDictionary
OrderedHashTable<OrderedNameDictionary, 3>);
};
-class OrderedNameDictionaryHandler
+extern template class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE)
+ OrderedHashTableHandler<SmallOrderedNameDictionary, OrderedNameDictionary>;
+
+class V8_EXPORT_PRIVATE OrderedNameDictionaryHandler
: public OrderedHashTableHandler<SmallOrderedNameDictionary,
OrderedNameDictionary> {
public:
@@ -764,7 +790,7 @@ class SmallOrderedNameDictionary
Isolate* isolate, Handle<SmallOrderedNameDictionary> table,
int new_capacity);
- static Handle<SmallOrderedNameDictionary> DeleteEntry(
+ V8_EXPORT_PRIVATE static Handle<SmallOrderedNameDictionary> DeleteEntry(
Isolate* isolate, Handle<SmallOrderedNameDictionary> table, int entry);
// Set the value for entry.
@@ -788,12 +814,12 @@ class SmallOrderedNameDictionary
// Adds |value| to |table|, if the capacity isn't enough, a new
// table is created. The original |table| is returned if there is
// capacity to store |value| otherwise the new table is returned.
- static MaybeHandle<SmallOrderedNameDictionary> Add(
+ V8_EXPORT_PRIVATE static MaybeHandle<SmallOrderedNameDictionary> Add(
Isolate* isolate, Handle<SmallOrderedNameDictionary> table,
Handle<Name> key, Handle<Object> value, PropertyDetails details);
- void SetEntry(Isolate* isolate, int entry, Object key, Object value,
- PropertyDetails details);
+ V8_EXPORT_PRIVATE void SetEntry(Isolate* isolate, int entry, Object key,
+ Object value, PropertyDetails details);
static inline RootIndex GetMapRootIndex();
@@ -801,64 +827,6 @@ class SmallOrderedNameDictionary
SmallOrderedHashTable<SmallOrderedNameDictionary>);
};
-class JSCollectionIterator : public JSObject {
- public:
- // [table]: the backing hash table mapping keys to values.
- DECL_ACCESSORS(table, Object)
-
- // [index]: The index into the data table.
- DECL_ACCESSORS(index, Object)
-
- void JSCollectionIteratorPrint(std::ostream& os, const char* name);
-
-// Layout description.
-#define JS_COLLECTION_ITERATOR_FIELDS(V) \
- V(kTableOffset, kTaggedSize) \
- V(kIndexOffset, kTaggedSize) \
- /* Header size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
- JS_COLLECTION_ITERATOR_FIELDS)
-#undef JS_COLLECTION_ITERATOR_FIELDS
-
- OBJECT_CONSTRUCTORS(JSCollectionIterator, JSObject);
-};
-
-// OrderedHashTableIterator is an iterator that iterates over the keys and
-// values of an OrderedHashTable.
-//
-// The iterator has a reference to the underlying OrderedHashTable data,
-// [table], as well as the current [index] the iterator is at.
-//
-// When the OrderedHashTable is rehashed it adds a reference from the old table
-// to the new table as well as storing enough data about the changes so that the
-// iterator [index] can be adjusted accordingly.
-//
-// When the [Next] result from the iterator is requested, the iterator checks if
-// there is a newer table that it needs to transition to.
-template <class Derived, class TableType>
-class OrderedHashTableIterator : public JSCollectionIterator {
- public:
- // Whether the iterator has more elements. This needs to be called before
- // calling |CurrentKey| and/or |CurrentValue|.
- bool HasMore();
-
- // Move the index forward one.
- void MoveNext() { set_index(Smi::FromInt(Smi::ToInt(index()) + 1)); }
-
- // Returns the current key of the iterator. This should only be called when
- // |HasMore| returns true.
- inline Object CurrentKey();
-
- private:
- // Transitions the iterator to the non obsolete backing store. This is a NOP
- // if the [table] is not obsolete.
- void Transition();
-
- OBJECT_CONSTRUCTORS(OrderedHashTableIterator, JSCollectionIterator);
-};
-
} // namespace internal
} // namespace v8
diff --git a/deps/v8/src/objects/promise.h b/deps/v8/src/objects/promise.h
index 122ee7157f..0b15546d03 100644
--- a/deps/v8/src/objects/promise.h
+++ b/deps/v8/src/objects/promise.h
@@ -33,18 +33,8 @@ class PromiseReactionJobTask : public Microtask {
// a PromiseCapability (general case), or undefined (in case of await).
DECL_ACCESSORS(promise_or_capability, HeapObject)
-// Layout description.
-#define PROMISE_REACTION_JOB_FIELDS(V) \
- V(kArgumentOffset, kTaggedSize) \
- V(kContextOffset, kTaggedSize) \
- V(kHandlerOffset, kTaggedSize) \
- V(kPromiseOrCapabilityOffset, kTaggedSize) \
- /* Total size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(Microtask::kHeaderSize,
- PROMISE_REACTION_JOB_FIELDS)
-#undef PROMISE_REACTION_JOB_FIELDS
+ DEFINE_FIELD_OFFSET_CONSTANTS(
+ Microtask::kHeaderSize, TORQUE_GENERATED_PROMISE_REACTION_JOB_TASK_FIELDS)
// Dispatched behavior.
DECL_CAST(PromiseReactionJobTask)
@@ -83,18 +73,9 @@ class PromiseResolveThenableJobTask : public Microtask {
DECL_ACCESSORS(then, JSReceiver)
DECL_ACCESSORS(thenable, JSReceiver)
-// Layout description.
-#define PROMISE_RESOLVE_THENABLE_JOB_FIELDS(V) \
- V(kContextOffset, kTaggedSize) \
- V(kPromiseToResolveOffset, kTaggedSize) \
- V(kThenOffset, kTaggedSize) \
- V(kThenableOffset, kTaggedSize) \
- /* Total size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(Microtask::kHeaderSize,
- PROMISE_RESOLVE_THENABLE_JOB_FIELDS)
-#undef PROMISE_RESOLVE_THENABLE_JOB_FIELDS
+ DEFINE_FIELD_OFFSET_CONSTANTS(
+ Microtask::kHeaderSize,
+ TORQUE_GENERATED_PROMISE_RESOLVE_THENABLE_JOB_TASK_FIELDS)
// Dispatched behavior.
DECL_CAST(PromiseResolveThenableJobTask)
diff --git a/deps/v8/src/objects/property-array.h b/deps/v8/src/objects/property-array.h
index c1ac27fb72..1112de4ae6 100644
--- a/deps/v8/src/objects/property-array.h
+++ b/deps/v8/src/objects/property-array.h
@@ -6,6 +6,7 @@
#define V8_OBJECTS_PROPERTY_ARRAY_H_
#include "src/objects/heap-object.h"
+#include "torque-generated/class-definitions-from-dsl.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@@ -47,17 +48,12 @@ class PropertyArray : public HeapObject {
DECL_PRINTER(PropertyArray)
DECL_VERIFIER(PropertyArray)
-// Layout description.
-#define PROPERTY_ARRAY_FIELDS(V) \
- V(kLengthAndHashOffset, kTaggedSize) \
- /* Header size. */ \
- V(kHeaderSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, PROPERTY_ARRAY_FIELDS)
-#undef PROPERTY_ARRAY_FIELDS
+ DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,
+ TORQUE_GENERATED_PROPERTY_ARRAY_FIELDS)
+ static const int kHeaderSize = kSize;
// Garbage collection support.
- typedef FlexibleBodyDescriptor<kHeaderSize> BodyDescriptor;
+ using BodyDescriptor = FlexibleBodyDescriptor<kHeaderSize>;
static const int kLengthFieldSize = 10;
class LengthField : public BitField<int, 0, kLengthFieldSize> {};
diff --git a/deps/v8/src/objects/property-cell-inl.h b/deps/v8/src/objects/property-cell-inl.h
index d6600234ad..581708951d 100644
--- a/deps/v8/src/objects/property-cell-inl.h
+++ b/deps/v8/src/objects/property-cell-inl.h
@@ -22,7 +22,7 @@ CAST_ACCESSOR(PropertyCell)
ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
ACCESSORS(PropertyCell, name, Name, kNameOffset)
ACCESSORS(PropertyCell, value, Object, kValueOffset)
-ACCESSORS(PropertyCell, property_details_raw, Object, kDetailsOffset)
+ACCESSORS(PropertyCell, property_details_raw, Smi, kPropertyDetailsRawOffset)
PropertyDetails PropertyCell::property_details() const {
return PropertyDetails(Smi::cast(property_details_raw()));
diff --git a/deps/v8/src/objects/property-cell.h b/deps/v8/src/objects/property-cell.h
index c71a3e787e..7bdcfb8e49 100644
--- a/deps/v8/src/objects/property-cell.h
+++ b/deps/v8/src/objects/property-cell.h
@@ -6,6 +6,7 @@
#define V8_OBJECTS_PROPERTY_CELL_H_
#include "src/objects/heap-object.h"
+#include "torque-generated/class-definitions-from-dsl.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@@ -18,7 +19,7 @@ class PropertyCell : public HeapObject {
// [name]: the name of the global property.
DECL_ACCESSORS(name, Name)
// [property_details]: details of the global property.
- DECL_ACCESSORS(property_details_raw, Object)
+ DECL_ACCESSORS(property_details_raw, Smi)
// [value]: value of the global property.
DECL_ACCESSORS(value, Object)
// [dependent_code]: dependent code that depends on the type of the global
@@ -56,19 +57,10 @@ class PropertyCell : public HeapObject {
DECL_PRINTER(PropertyCell)
DECL_VERIFIER(PropertyCell)
-// Layout description.
-#define PROPERTY_CELL_FIELDS(V) \
- V(kDetailsOffset, kTaggedSize) \
- V(kNameOffset, kTaggedSize) \
- V(kValueOffset, kTaggedSize) \
- V(kDependentCodeOffset, kTaggedSize) \
- /* Total size. */ \
- V(kSize, 0)
+ DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,
+ TORQUE_GENERATED_PROPERTY_CELL_FIELDS)
- DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, PROPERTY_CELL_FIELDS)
-#undef PROPERTY_CELL_FIELDS
-
- typedef FixedBodyDescriptor<kNameOffset, kSize, kSize> BodyDescriptor;
+ using BodyDescriptor = FixedBodyDescriptor<kNameOffset, kSize, kSize>;
OBJECT_CONSTRUCTORS(PropertyCell, HeapObject);
};
diff --git a/deps/v8/src/objects/prototype-info-inl.h b/deps/v8/src/objects/prototype-info-inl.h
index ddcb50fe90..56104ba4c1 100644
--- a/deps/v8/src/objects/prototype-info-inl.h
+++ b/deps/v8/src/objects/prototype-info-inl.h
@@ -39,7 +39,7 @@ bool PrototypeInfo::HasObjectCreateMap() {
return cache->IsWeak();
}
-ACCESSORS(PrototypeInfo, module_namespace, Object, kJSModuleNamespaceOffset)
+ACCESSORS(PrototypeInfo, module_namespace, Object, kJsModuleNamespaceOffset)
ACCESSORS(PrototypeInfo, prototype_users, Object, kPrototypeUsersOffset)
WEAK_ACCESSORS(PrototypeInfo, object_create_map, kObjectCreateMapOffset)
SMI_ACCESSORS(PrototypeInfo, registry_slot, kRegistrySlotOffset)
diff --git a/deps/v8/src/objects/prototype-info.h b/deps/v8/src/objects/prototype-info.h
index 5f0be22613..4e6ba68cde 100644
--- a/deps/v8/src/objects/prototype-info.h
+++ b/deps/v8/src/objects/prototype-info.h
@@ -53,19 +53,8 @@ class PrototypeInfo : public Struct {
DECL_PRINTER(PrototypeInfo)
DECL_VERIFIER(PrototypeInfo)
-// Layout description.
-#define PROTOTYPE_INFO_FIELDS(V) \
- V(kJSModuleNamespaceOffset, kTaggedSize) \
- V(kPrototypeUsersOffset, kTaggedSize) \
- V(kRegistrySlotOffset, kTaggedSize) \
- V(kValidityCellOffset, kTaggedSize) \
- V(kObjectCreateMapOffset, kTaggedSize) \
- V(kBitFieldOffset, kTaggedSize) \
- /* Total size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, PROTOTYPE_INFO_FIELDS)
-#undef PROTOTYPE_INFO_FIELDS
+ DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,
+ TORQUE_GENERATED_PROTOTYPE_INFO_FIELDS)
// Bit field usage.
static const int kShouldBeFastBit = 0;
@@ -80,7 +69,7 @@ class PrototypeInfo : public Struct {
// A growing array with an additional API for marking slots "empty". When adding
// new elements, we reuse the empty slots instead of growing the array.
-class PrototypeUsers : public WeakArrayList {
+class V8_EXPORT_PRIVATE PrototypeUsers : public WeakArrayList {
public:
static Handle<WeakArrayList> Add(Isolate* isolate,
Handle<WeakArrayList> array,
@@ -91,11 +80,11 @@ class PrototypeUsers : public WeakArrayList {
// The callback is called when a weak pointer to HeapObject "object" is moved
// from index "from_index" to index "to_index" during compaction. The callback
// must not cause GC.
- typedef void (*CompactionCallback)(HeapObject object, int from_index,
- int to_index);
- static WeakArrayList Compact(Handle<WeakArrayList> array, Heap* heap,
- CompactionCallback callback,
- PretenureFlag pretenure = NOT_TENURED);
+ using CompactionCallback = void (*)(HeapObject object, int from_index,
+ int to_index);
+ static WeakArrayList Compact(
+ Handle<WeakArrayList> array, Heap* heap, CompactionCallback callback,
+ AllocationType allocation = AllocationType::kYoung);
#ifdef VERIFY_HEAP
static void Verify(WeakArrayList array);
diff --git a/deps/v8/src/objects/regexp-match-info.h b/deps/v8/src/objects/regexp-match-info.h
index d973e62a6b..32828e9591 100644
--- a/deps/v8/src/objects/regexp-match-info.h
+++ b/deps/v8/src/objects/regexp-match-info.h
@@ -57,16 +57,8 @@ class V8_EXPORT_PRIVATE RegExpMatchInfo : NON_EXPORTED_BASE(public FixedArray) {
static const int kFirstCaptureIndex = 3;
static const int kLastMatchOverhead = kFirstCaptureIndex;
-// Layout description.
-#define REG_EXP_MATCH_INFO_FIELDS(V) \
- V(kNumberOfCapturesOffset, kTaggedSize) \
- V(kLastSubjectOffset, kTaggedSize) \
- V(kLastInputOffset, kTaggedSize) \
- V(kFirstCaptureOffset, 0)
-
DEFINE_FIELD_OFFSET_CONSTANTS(FixedArray::kHeaderSize,
- REG_EXP_MATCH_INFO_FIELDS)
-#undef REG_EXP_MATCH_INFO_FIELDS
+ TORQUE_GENERATED_REG_EXP_MATCH_INFO_FIELDS)
// Every match info is guaranteed to have enough space to store two captures.
static const int kInitialCaptureIndices = 2;
diff --git a/deps/v8/src/objects/scope-info.cc b/deps/v8/src/objects/scope-info.cc
index b9084137a4..f3b3a15ab7 100644
--- a/deps/v8/src/objects/scope-info.cc
+++ b/deps/v8/src/objects/scope-info.cc
@@ -508,14 +508,14 @@ int ScopeInfo::ContextLength() const {
bool function_name_context_slot =
FunctionVariableField::decode(Flags()) == CONTEXT;
bool force_context = ForceContextAllocationField::decode(Flags());
- bool has_context = context_locals > 0 || force_context ||
- function_name_context_slot ||
- scope_type() == WITH_SCOPE ||
- (scope_type() == BLOCK_SCOPE && CallsSloppyEval() &&
- is_declaration_scope()) ||
- (scope_type() == FUNCTION_SCOPE && CallsSloppyEval()) ||
- (scope_type() == FUNCTION_SCOPE && IsAsmModule()) ||
- scope_type() == MODULE_SCOPE;
+ bool has_context =
+ context_locals > 0 || force_context || function_name_context_slot ||
+ scope_type() == WITH_SCOPE || scope_type() == CLASS_SCOPE ||
+ (scope_type() == BLOCK_SCOPE && CallsSloppyEval() &&
+ is_declaration_scope()) ||
+ (scope_type() == FUNCTION_SCOPE && CallsSloppyEval()) ||
+ (scope_type() == FUNCTION_SCOPE && IsAsmModule()) ||
+ scope_type() == MODULE_SCOPE;
if (has_context) {
return Context::MIN_CONTEXT_SLOTS + context_locals +
@@ -871,8 +871,9 @@ Handle<ModuleInfoEntry> ModuleInfoEntry::New(Isolate* isolate,
Handle<Object> import_name,
int module_request, int cell_index,
int beg_pos, int end_pos) {
- Handle<ModuleInfoEntry> result = Handle<ModuleInfoEntry>::cast(
- isolate->factory()->NewStruct(MODULE_INFO_ENTRY_TYPE, TENURED));
+ Handle<ModuleInfoEntry> result =
+ Handle<ModuleInfoEntry>::cast(isolate->factory()->NewStruct(
+ MODULE_INFO_ENTRY_TYPE, AllocationType::kOld));
result->set_export_name(*export_name);
result->set_local_name(*local_name);
result->set_import_name(*import_name);
diff --git a/deps/v8/src/objects/scope-info.h b/deps/v8/src/objects/scope-info.h
index 38d2318f6d..b5fb1d1a7c 100644
--- a/deps/v8/src/objects/scope-info.h
+++ b/deps/v8/src/objects/scope-info.h
@@ -47,6 +47,9 @@ class ScopeInfo : public FixedArray {
// True if this scope is a (var) declaration scope.
bool is_declaration_scope() const;
+ // True if this scope is a class scope.
+ bool is_class_scope() const;
+
// Does this scope make a sloppy eval call?
bool CallsSloppyEval() const;
@@ -70,12 +73,12 @@ class ScopeInfo : public FixedArray {
bool HasNewTarget() const;
// Is this scope the scope of a named function expression?
- bool HasFunctionName() const;
+ V8_EXPORT_PRIVATE bool HasFunctionName() const;
// See SharedFunctionInfo::HasSharedName.
- bool HasSharedFunctionName() const;
+ V8_EXPORT_PRIVATE bool HasSharedFunctionName() const;
- bool HasInferredFunctionName() const;
+ V8_EXPORT_PRIVATE bool HasInferredFunctionName() const;
void SetFunctionName(Object name);
void SetInferredFunctionName(String name);
@@ -92,7 +95,7 @@ class ScopeInfo : public FixedArray {
inline bool HasSimpleParameters() const;
// Return the function_name if present.
- Object FunctionName() const;
+ V8_EXPORT_PRIVATE Object FunctionName() const;
// The function's name if it is non-empty, otherwise the inferred name or an
// empty string.
@@ -100,7 +103,7 @@ class ScopeInfo : public FixedArray {
// Return the function's inferred name if present.
// See SharedFunctionInfo::function_identifier.
- Object InferredFunctionName() const;
+ V8_EXPORT_PRIVATE Object InferredFunctionName() const;
// Position information accessors.
int StartPosition() const;
diff --git a/deps/v8/src/objects/script-inl.h b/deps/v8/src/objects/script-inl.h
index 0ab5b2dfc4..33c794e4a5 100644
--- a/deps/v8/src/objects/script-inl.h
+++ b/deps/v8/src/objects/script-inl.h
@@ -29,7 +29,7 @@ SMI_ACCESSORS(Script, id, kIdOffset)
SMI_ACCESSORS(Script, line_offset, kLineOffsetOffset)
SMI_ACCESSORS(Script, column_offset, kColumnOffsetOffset)
ACCESSORS(Script, context_data, Object, kContextOffset)
-SMI_ACCESSORS(Script, type, kTypeOffset)
+SMI_ACCESSORS(Script, type, kScriptTypeOffset)
ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
ACCESSORS_CHECKED(Script, eval_from_shared_or_wrapped_arguments, Object,
kEvalFromSharedOrWrappedArgumentsOffset,
diff --git a/deps/v8/src/objects/script.h b/deps/v8/src/objects/script.h
index 12c9a4eeac..315ab038a8 100644
--- a/deps/v8/src/objects/script.h
+++ b/deps/v8/src/objects/script.h
@@ -13,6 +13,11 @@
#include "src/objects/object-macros.h"
namespace v8 {
+
+namespace tracing {
+class TracedValue;
+}
+
namespace internal {
// Script describes a script which has been added to the VM.
@@ -128,13 +133,13 @@ class Script : public Struct {
Object GetNameOrSourceURL();
// Retrieve source position from where eval was called.
- int GetEvalPosition();
+ static int GetEvalPosition(Isolate* isolate, Handle<Script> script);
// Check if the script contains any Asm modules.
bool ContainsAsmModule();
// Init line_ends array with source code positions of line ends.
- static void InitLineEnds(Handle<Script> script);
+ V8_EXPORT_PRIVATE static void InitLineEnds(Handle<Script> script);
// Carries information about a source position.
struct PositionInfo {
@@ -158,15 +163,16 @@ class Script : public Struct {
// callsites.
static bool GetPositionInfo(Handle<Script> script, int position,
PositionInfo* info, OffsetFlag offset_flag);
- bool GetPositionInfo(int position, PositionInfo* info,
- OffsetFlag offset_flag) const;
+ V8_EXPORT_PRIVATE bool GetPositionInfo(int position, PositionInfo* info,
+ OffsetFlag offset_flag) const;
bool IsUserJavaScript();
// Wrappers for GetPositionInfo
static int GetColumnNumber(Handle<Script> script, int code_offset);
int GetColumnNumber(int code_pos) const;
- static int GetLineNumber(Handle<Script> script, int code_offset);
+ V8_EXPORT_PRIVATE static int GetLineNumber(Handle<Script> script,
+ int code_offset);
int GetLineNumber(int code_pos) const;
// Look through the list of existing shared function infos to find one
@@ -174,8 +180,20 @@ class Script : public Struct {
MaybeHandle<SharedFunctionInfo> FindSharedFunctionInfo(
Isolate* isolate, const FunctionLiteral* fun);
+ // Returns the Script in a format tracing can support.
+ std::unique_ptr<v8::tracing::TracedValue> ToTracedValue();
+
+ // The tracing scope for Script objects.
+ static const char* kTraceScope;
+
+ // Returns the unique TraceID for this Script (within the kTraceScope).
+ uint64_t TraceID() const;
+
+ // Returns the unique trace ID reference for this Script.
+ std::unique_ptr<v8::tracing::TracedValue> TraceIDRef() const;
+
// Iterate over all script objects on the heap.
- class Iterator {
+ class V8_EXPORT_PRIVATE Iterator {
public:
explicit Iterator(Isolate* isolate);
Script Next();
@@ -189,28 +207,8 @@ class Script : public Struct {
DECL_PRINTER(Script)
DECL_VERIFIER(Script)
-// Layout description.
-#define SCRIPTS_FIELDS(V) \
- V(kSourceOffset, kTaggedSize) \
- V(kNameOffset, kTaggedSize) \
- V(kLineOffsetOffset, kTaggedSize) \
- V(kColumnOffsetOffset, kTaggedSize) \
- V(kContextOffset, kTaggedSize) \
- V(kTypeOffset, kTaggedSize) \
- V(kLineEndsOffset, kTaggedSize) \
- V(kIdOffset, kTaggedSize) \
- V(kEvalFromSharedOrWrappedArgumentsOffset, kTaggedSize) \
- V(kEvalFromPositionOffset, kTaggedSize) \
- V(kSharedFunctionInfosOffset, kTaggedSize) \
- V(kFlagsOffset, kTaggedSize) \
- V(kSourceUrlOffset, kTaggedSize) \
- V(kSourceMappingUrlOffset, kTaggedSize) \
- V(kHostDefinedOptionsOffset, kTaggedSize) \
- /* Total size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, SCRIPTS_FIELDS)
-#undef SCRIPTS_FIELDS
+ DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,
+ TORQUE_GENERATED_SCRIPT_FIELDS)
private:
// Bit positions in the flags field.
diff --git a/deps/v8/src/objects/shared-function-info-inl.h b/deps/v8/src/objects/shared-function-info-inl.h
index 137e0d9e02..1187db2d94 100644
--- a/deps/v8/src/objects/shared-function-info-inl.h
+++ b/deps/v8/src/objects/shared-function-info-inl.h
@@ -131,10 +131,8 @@ ACCESSORS(SharedFunctionInfo, script_or_debug_info, Object,
UINT16_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
UINT16_ACCESSORS(SharedFunctionInfo, internal_formal_parameter_count,
kFormalParameterCountOffset)
-UINT8_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
- kExpectedNofPropertiesOffset)
-UINT8_ACCESSORS(SharedFunctionInfo, raw_builtin_function_id,
- kBuiltinFunctionIdOffset)
+UINT16_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
+ kExpectedNofPropertiesOffset)
UINT16_ACCESSORS(SharedFunctionInfo, raw_function_token_offset,
kFunctionTokenOffsetOffset)
RELAXED_INT32_ACCESSORS(SharedFunctionInfo, flags, kFlagsOffset)
@@ -225,8 +223,9 @@ BIT_FIELD_ACCESSORS(SharedFunctionInfo, flags, is_named_expression,
SharedFunctionInfo::IsNamedExpressionBit)
BIT_FIELD_ACCESSORS(SharedFunctionInfo, flags, is_toplevel,
SharedFunctionInfo::IsTopLevelBit)
-BIT_FIELD_ACCESSORS(SharedFunctionInfo, flags, is_oneshot_iife,
- SharedFunctionInfo::IsOneshotIIFEBit)
+BIT_FIELD_ACCESSORS(SharedFunctionInfo, flags,
+ is_oneshot_iife_or_properties_are_final,
+ SharedFunctionInfo::IsOneshotIIFEOrPropertiesAreFinalBit)
BIT_FIELD_ACCESSORS(SharedFunctionInfo, flags,
is_safe_to_skip_arguments_adaptor,
SharedFunctionInfo::IsSafeToSkipArgumentsAdaptorBit)
@@ -429,16 +428,6 @@ IsCompiledScope::IsCompiledScope(const SharedFunctionInfo shared,
DCHECK_IMPLIES(!retain_bytecode_.is_null(), is_compiled());
}
-uint16_t SharedFunctionInfo::GetLength() const {
- DCHECK(is_compiled());
- DCHECK(HasLength());
- return length();
-}
-
-bool SharedFunctionInfo::HasLength() const {
- return length() != kInvalidLength;
-}
-
bool SharedFunctionInfo::has_simple_parameters() {
return scope_info()->HasSimpleParameters();
}
@@ -496,8 +485,8 @@ void SharedFunctionInfo::set_bytecode_array(BytecodeArray bytecode) {
set_function_data(bytecode);
}
-bool SharedFunctionInfo::ShouldFlushBytecode() {
- if (!FLAG_flush_bytecode) return false;
+bool SharedFunctionInfo::ShouldFlushBytecode(BytecodeFlushMode mode) {
+ if (mode == BytecodeFlushMode::kDoNotFlushBytecode) return false;
// TODO(rmcilroy): Enable bytecode flushing for resumable functions.
if (IsResumableFunction(kind()) || !allows_lazy_compilation()) {
@@ -510,7 +499,7 @@ bool SharedFunctionInfo::ShouldFlushBytecode() {
Object data = function_data();
if (!data->IsBytecodeArray()) return false;
- if (FLAG_stress_flush_bytecode) return true;
+ if (mode == BytecodeFlushMode::kStressFlushBytecode) return true;
BytecodeArray bytecode = BytecodeArray::cast(data);
@@ -708,18 +697,6 @@ void SharedFunctionInfo::SetDebugInfo(DebugInfo debug_info) {
set_script_or_debug_info(debug_info);
}
-bool SharedFunctionInfo::HasBuiltinFunctionId() {
- return builtin_function_id() != BuiltinFunctionId::kInvalidBuiltinFunctionId;
-}
-
-BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
- return static_cast<BuiltinFunctionId>(raw_builtin_function_id());
-}
-
-void SharedFunctionInfo::set_builtin_function_id(BuiltinFunctionId id) {
- set_raw_builtin_function_id(static_cast<uint8_t>(id));
-}
-
bool SharedFunctionInfo::HasInferredName() {
Object scope_info = name_or_scope_info();
if (scope_info->IsScopeInfo()) {
@@ -733,7 +710,7 @@ String SharedFunctionInfo::inferred_name() {
if (maybe_scope_info->IsScopeInfo()) {
ScopeInfo scope_info = ScopeInfo::cast(maybe_scope_info);
if (scope_info->HasInferredFunctionName()) {
- Object name = ScopeInfo::cast(maybe_scope_info)->InferredFunctionName();
+ Object name = scope_info->InferredFunctionName();
if (name->IsString()) return String::cast(name);
}
} else if (HasUncompiledData()) {
@@ -759,6 +736,33 @@ bool SharedFunctionInfo::CanDiscardCompiled() const {
return can_decompile;
}
+bool SharedFunctionInfo::is_class_constructor() const {
+ return IsClassConstructorBit::decode(flags());
+}
+
+bool SharedFunctionInfo::is_oneshot_iife() const {
+ bool bit = is_oneshot_iife_or_properties_are_final();
+ return bit && !is_class_constructor();
+}
+
+void SharedFunctionInfo::set_is_oneshot_iife(bool value) {
+ DCHECK(!value || !is_class_constructor());
+ if (!is_class_constructor()) {
+ set_is_oneshot_iife_or_properties_are_final(value);
+ }
+}
+
+void SharedFunctionInfo::set_are_properties_final(bool value) {
+ if (is_class_constructor()) {
+ set_is_oneshot_iife_or_properties_are_final(value);
+ }
+}
+
+bool SharedFunctionInfo::are_properties_final() const {
+ bool bit = is_oneshot_iife_or_properties_are_final();
+ return bit && is_class_constructor();
+}
+
} // namespace internal
} // namespace v8
diff --git a/deps/v8/src/objects/shared-function-info.h b/deps/v8/src/objects/shared-function-info.h
index b115866e24..5b79098fc0 100644
--- a/deps/v8/src/objects/shared-function-info.h
+++ b/deps/v8/src/objects/shared-function-info.h
@@ -8,18 +8,23 @@
#include "src/bailout-reason.h"
#include "src/function-kind.h"
#include "src/objects.h"
-#include "src/objects/builtin-function-id.h"
#include "src/objects/compressed-slots.h"
#include "src/objects/script.h"
#include "src/objects/slots.h"
#include "src/objects/smi.h"
#include "src/objects/struct.h"
+#include "testing/gtest/include/gtest/gtest_prod.h"
#include "torque-generated/class-definitions-from-dsl.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
namespace v8 {
+
+namespace tracing {
+class TracedValue;
+}
+
namespace internal {
class AsmWasmData;
@@ -70,16 +75,9 @@ class PreparseData : public HeapObject {
DECL_PRINTER(PreparseData)
DECL_VERIFIER(PreparseData)
-// Layout description.
-#define PREPARSE_DATA_FIELDS(V) \
- V(kDataLengthOffset, kInt32Size) \
- V(kInnerLengthOffset, kInt32Size) \
- /* Header size. */ \
- V(kDataStartOffset, 0) \
- V(kHeaderSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, PREPARSE_DATA_FIELDS)
-#undef PREPARSE_DATA_FIELDS
+ DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,
+ TORQUE_GENERATED_PREPARSE_DATA_FIELDS)
+ static const int kDataStartOffset = kSize;
class BodyDescriptor;
@@ -134,9 +132,8 @@ class UncompiledData : public HeapObject {
DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, UNCOMPILED_DATA_FIELDS)
#undef UNCOMPILED_DATA_FIELDS
- typedef FixedBodyDescriptor<kStartOfPointerFieldsOffset,
- kEndOfTaggedFieldsOffset, kSize>
- BodyDescriptor;
+ using BodyDescriptor = FixedBodyDescriptor<kStartOfPointerFieldsOffset,
+ kEndOfTaggedFieldsOffset, kSize>;
// Clear uninitialized padding space.
inline void clear_padding();
@@ -156,7 +153,7 @@ class UncompiledDataWithoutPreparseData : public UncompiledData {
static const int kSize = UncompiledData::kSize;
// No extra fields compared to UncompiledData.
- typedef UncompiledData::BodyDescriptor BodyDescriptor;
+ using BodyDescriptor = UncompiledData::BodyDescriptor;
OBJECT_CONSTRUCTORS(UncompiledDataWithoutPreparseData, UncompiledData);
};
@@ -195,11 +192,10 @@ class UncompiledDataWithPreparseData : public UncompiledData {
// Make sure the size is aligned
STATIC_ASSERT(IsAligned(kSize, kTaggedSize));
- typedef SubclassBodyDescriptor<
+ using BodyDescriptor = SubclassBodyDescriptor<
UncompiledData::BodyDescriptor,
FixedBodyDescriptor<kStartOfPointerFieldsOffset, kEndOfTaggedFieldsOffset,
- kSize>>
- BodyDescriptor;
+ kSize>>;
OBJECT_CONSTRUCTORS(UncompiledDataWithPreparseData, UncompiledData);
};
@@ -209,15 +205,8 @@ class InterpreterData : public Struct {
DECL_ACCESSORS(bytecode_array, BytecodeArray)
DECL_ACCESSORS(interpreter_trampoline, Code)
-// Layout description.
-#define INTERPRETER_DATA_FIELDS(V) \
- V(kBytecodeArrayOffset, kTaggedSize) \
- V(kInterpreterTrampolineOffset, kTaggedSize) \
- /* Total size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(Struct::kHeaderSize, INTERPRETER_DATA_FIELDS)
-#undef INTERPRETER_DATA_FIELDS
+ DEFINE_FIELD_OFFSET_CONSTANTS(Struct::kHeaderSize,
+ TORQUE_GENERATED_INTERPRETER_DATA_FIELDS)
DECL_CAST(InterpreterData)
DECL_PRINTER(InterpreterData)
@@ -231,14 +220,16 @@ class InterpreterData : public Struct {
class SharedFunctionInfo : public HeapObject {
public:
NEVER_READ_ONLY_SPACE
- static constexpr Object const kNoSharedNameSentinel = Smi::kZero;
+
+ V8_EXPORT_PRIVATE static constexpr Object const kNoSharedNameSentinel =
+ Smi::kZero;
// [name]: Returns shared name if it exists or an empty string otherwise.
inline String Name() const;
inline void SetName(String name);
// Get the code object which represents the execution of this function.
- Code GetCode() const;
+ V8_EXPORT_PRIVATE Code GetCode() const;
// Get the abstract code associated with the function, which will either be
// a Code object or a BytecodeArray.
@@ -265,7 +256,6 @@ class SharedFunctionInfo : public HeapObject {
static const int kInitialLength = kEntriesStart + kEntryLength;
static const int kNotFound = -1;
- static const uint16_t kInvalidLength = static_cast<uint16_t>(-1);
// [scope_info]: Scope info.
DECL_ACCESSORS(scope_info, ScopeInfo)
@@ -308,8 +298,7 @@ class SharedFunctionInfo : public HeapObject {
// Use up to 2^16-2 parameters (16 bits of values, where one is reserved for
// kDontAdaptArgumentsSentinel). The value is only reliable when the function
// has been compiled.
- inline uint16_t GetLength() const;
- inline bool HasLength() const;
+ inline uint16_t length() const;
inline void set_length(int value);
// [internal formal parameter count]: The declared number of parameters.
@@ -323,7 +312,7 @@ class SharedFunctionInfo : public HeapObject {
// [expected_nof_properties]: Expected number of properties for the
// function. The value is only reliable when the function has been compiled.
- DECL_UINT8_ACCESSORS(expected_nof_properties)
+ DECL_UINT16_ACCESSORS(expected_nof_properties)
// [function data]: This field holds some additional data for function.
// Currently it has one of:
@@ -341,6 +330,7 @@ class SharedFunctionInfo : public HeapObject {
DECL_ACCESSORS(function_data, Object)
inline bool IsApiFunction() const;
+ inline bool is_class_constructor() const;
inline FunctionTemplateInfo get_api_func_data();
inline void set_api_func_data(FunctionTemplateInfo data);
inline bool HasBytecodeArray() const;
@@ -356,11 +346,7 @@ class SharedFunctionInfo : public HeapObject {
inline AsmWasmData asm_wasm_data() const;
inline void set_asm_wasm_data(AsmWasmData data);
- // A brief note to clear up possible confusion:
- // builtin_id corresponds to the auto-generated
- // Builtins::Name id, while builtin_function_id corresponds to
- // BuiltinFunctionId (a manually maintained list of 'interesting' functions
- // mainly used during optimization).
+ // builtin_id corresponds to the auto-generated Builtins::Name id.
inline bool HasBuiltinId() const;
inline int builtin_id() const;
inline void set_builtin_id(int builtin_id);
@@ -380,18 +366,6 @@ class SharedFunctionInfo : public HeapObject {
// turning it into UncompiledDataWithoutPreparseData.
inline void ClearPreparseData();
- // [raw_builtin_function_id]: The id of the built-in function this function
- // represents, used during optimization to improve code generation.
- // TODO(leszeks): Once there are no more JS builtins, this can be replaced
- // by BuiltinId.
- DECL_UINT8_ACCESSORS(raw_builtin_function_id)
- inline bool HasBuiltinFunctionId();
- inline BuiltinFunctionId builtin_function_id();
- inline void set_builtin_function_id(BuiltinFunctionId id);
- // Make sure BuiltinFunctionIds fit in a uint8_t
- STATIC_ASSERT((std::is_same<std::underlying_type<BuiltinFunctionId>::type,
- uint8_t>::value));
-
// The inferred_name is inferred from variable or property assignment of this
// function. It is used to facilitate debugging and profiling of JavaScript
// code written in OO style, where almost all functions are anonymous but are
@@ -404,7 +378,7 @@ class SharedFunctionInfo : public HeapObject {
// Break infos are contained in DebugInfo, this is a convenience method
// to simplify access.
- bool HasBreakInfo() const;
+ V8_EXPORT_PRIVATE bool HasBreakInfo() const;
bool BreakAtEntry() const;
// Coverage infos are contained in DebugInfo, this is a convenience method
@@ -494,6 +468,9 @@ class SharedFunctionInfo : public HeapObject {
// is only executed once.
DECL_BOOLEAN_ACCESSORS(is_oneshot_iife)
+ // Whether or not the number of expected properties may change.
+ DECL_BOOLEAN_ACCESSORS(are_properties_final)
+
// Indicates that the function represented by the shared function info
// cannot observe the actual parameters passed at a call site, which
// means the function doesn't use the arguments object, doesn't use
@@ -560,8 +537,8 @@ class SharedFunctionInfo : public HeapObject {
// Flush compiled data from this function, setting it back to CompileLazy and
// clearing any compiled metadata.
- static void DiscardCompiled(Isolate* isolate,
- Handle<SharedFunctionInfo> shared_info);
+ V8_EXPORT_PRIVATE static void DiscardCompiled(
+ Isolate* isolate, Handle<SharedFunctionInfo> shared_info);
// Discard the compiled metadata. If called during GC then
// |gc_notify_updated_slot| should be used to record any slot updates.
@@ -571,8 +548,10 @@ class SharedFunctionInfo : public HeapObject {
gc_notify_updated_slot =
[](HeapObject object, ObjectSlot slot, HeapObject target) {});
- // Returns true if the function has old bytecode that could be flushed.
- inline bool ShouldFlushBytecode();
+ // Returns true if the function has old bytecode that could be flushed. This
+ // function shouldn't access any flags as it is used by concurrent marker.
+ // Hence it takes the mode as an argument.
+ inline bool ShouldFlushBytecode(BytecodeFlushMode mode);
// Check whether or not this function is inlineable.
bool IsInlineable();
@@ -589,8 +568,10 @@ class SharedFunctionInfo : public HeapObject {
static void InitFromFunctionLiteral(Handle<SharedFunctionInfo> shared_info,
FunctionLiteral* lit, bool is_toplevel);
- // Sets the expected number of properties based on estimate from parser.
- void SetExpectedNofPropertiesFromEstimate(FunctionLiteral* literal);
+ // Updates the expected number of properties based on estimate from parser.
+ void UpdateExpectedNofPropertiesFromEstimate(FunctionLiteral* literal);
+ void UpdateAndFinalizeExpectedNofPropertiesFromEstimate(
+ FunctionLiteral* literal);
// Sets the FunctionTokenOffset field based on the given token position and
// start position.
@@ -601,7 +582,7 @@ class SharedFunctionInfo : public HeapObject {
Isolate* isolate, Handle<SharedFunctionInfo> shared_info);
// Hash based on function literal id and script id.
- uint32_t Hash();
+ V8_EXPORT_PRIVATE uint32_t Hash();
inline bool construct_as_builtin() const;
@@ -619,13 +600,28 @@ class SharedFunctionInfo : public HeapObject {
void PrintSourceCode(std::ostream& os);
#endif
+ // Returns the SharedFunctionInfo in a format tracing can support.
+ std::unique_ptr<v8::tracing::TracedValue> ToTracedValue();
+
+ // The tracing scope for SharedFunctionInfo objects.
+ static const char* kTraceScope;
+
+ // Returns the unique TraceID for this SharedFunctionInfo (within the
+ // kTraceScope, works only for functions that have a Script and start/end
+ // position).
+ uint64_t TraceID() const;
+
+ // Returns the unique trace ID reference for this SharedFunctionInfo
+ // (based on the |TraceID()| above).
+ std::unique_ptr<v8::tracing::TracedValue> TraceIDRef() const;
+
// Iterate over all shared function infos in a given script.
class ScriptIterator {
public:
- ScriptIterator(Isolate* isolate, Script script);
+ V8_EXPORT_PRIVATE ScriptIterator(Isolate* isolate, Script script);
ScriptIterator(Isolate* isolate,
Handle<WeakFixedArray> shared_function_infos);
- SharedFunctionInfo Next();
+ V8_EXPORT_PRIVATE SharedFunctionInfo Next();
int CurrentIndex() const { return index_ - 1; }
// Reset the iterator to run on |script|.
@@ -641,8 +637,8 @@ class SharedFunctionInfo : public HeapObject {
// Iterate over all shared function infos on the heap.
class GlobalIterator {
public:
- explicit GlobalIterator(Isolate* isolate);
- SharedFunctionInfo Next();
+ V8_EXPORT_PRIVATE explicit GlobalIterator(Isolate* isolate);
+ V8_EXPORT_PRIVATE SharedFunctionInfo Next();
private:
Script::Iterator script_iterator_;
@@ -690,7 +686,7 @@ class SharedFunctionInfo : public HeapObject {
V(HasReportedBinaryCoverageBit, bool, 1, _) \
V(IsNamedExpressionBit, bool, 1, _) \
V(IsTopLevelBit, bool, 1, _) \
- V(IsOneshotIIFEBit, bool, 1, _) \
+ V(IsOneshotIIFEOrPropertiesAreFinalBit, bool, 1, _) \
V(IsSafeToSkipArgumentsAdaptorBit, bool, 1, _)
DEFINE_BIT_FIELDS(FLAGS_BIT_FIELDS)
#undef FLAGS_BIT_FIELDS
@@ -723,16 +719,24 @@ class SharedFunctionInfo : public HeapObject {
// function.
DECL_ACCESSORS(outer_scope_info, HeapObject)
+ // [is_oneshot_iife_or_properties_are_final]: This bit is used to track
+ // two mutually exclusive cases. Either this SharedFunctionInfo is
+ // a oneshot_iife or we have finished parsing its properties. These cases
+ // are mutually exclusive because the properties final bit is only used by
+ // class constructors to handle lazily parsed properties and class
+ // constructors can never be oneshot iifes.
+ DECL_BOOLEAN_ACCESSORS(is_oneshot_iife_or_properties_are_final)
+
inline void set_kind(FunctionKind kind);
inline void set_needs_home_object(bool value);
+ inline uint16_t get_property_estimate_from_literal(FunctionLiteral* literal);
+
friend class Factory;
friend class V8HeapExplorer;
FRIEND_TEST(PreParserTest, LazyFunctionLength);
- inline uint16_t length() const;
-
// Find the index of this function in the parent script. Slow path of
// FunctionLiteralId.
int FindIndexInScript(Isolate* isolate) const;
diff --git a/deps/v8/src/objects/slots-atomic-inl.h b/deps/v8/src/objects/slots-atomic-inl.h
index 0ef1232eec..57da18dd66 100644
--- a/deps/v8/src/objects/slots-atomic-inl.h
+++ b/deps/v8/src/objects/slots-atomic-inl.h
@@ -23,7 +23,7 @@ namespace internal {
// // Decompress a and b if necessary.
// return my_comparison(a, b);
// });
-// Note how the comparator operates on Address values, representing the raw
+// Note how the comparator operates on Tagged_t values, representing the raw
// data found at the given heap location, so you probably want to construct
// an Object from it.
class AtomicSlot : public SlotBase<AtomicSlot, Tagged_t> {
@@ -71,11 +71,11 @@ class AtomicSlot : public SlotBase<AtomicSlot, Tagged_t> {
// The rest of this class follows C++'s "RandomAccessIterator" requirements.
// Most of the heavy lifting is inherited from SlotBase.
- typedef int difference_type;
- typedef Tagged_t value_type;
- typedef Reference reference;
- typedef void* pointer; // Must be present, but should not be used.
- typedef std::random_access_iterator_tag iterator_category;
+ using difference_type = int;
+ using value_type = Tagged_t;
+ using reference = Reference;
+ using pointer = void*; // Must be present, but should not be used.
+ using iterator_category = std::random_access_iterator_tag;
AtomicSlot() : SlotBase(kNullAddress) {}
explicit AtomicSlot(Address address) : SlotBase(address) {}
diff --git a/deps/v8/src/objects/slots.h b/deps/v8/src/objects/slots.h
index 3af615a695..18c8152f5b 100644
--- a/deps/v8/src/objects/slots.h
+++ b/deps/v8/src/objects/slots.h
@@ -6,6 +6,7 @@
#define V8_OBJECTS_SLOTS_H_
#include "src/globals.h"
+#include "src/v8memory.h"
namespace v8 {
namespace internal {
@@ -174,6 +175,84 @@ class FullHeapObjectSlot : public SlotBase<FullHeapObjectSlot, Address> {
inline void StoreHeapObject(HeapObject value) const;
};
+// TODO(ishell, v8:8875): When pointer compression is enabled the [u]intptr_t
+// and double fields are only kTaggedSize aligned so in order to avoid undefined
+// behavior in C++ code we use this iterator adaptor when using STL algorithms
+// with unaligned pointers.
+// It will be removed once all v8:8875 is fixed and all the full pointer and
+// double values in compressed V8 heap are properly aligned.
+template <typename T>
+class UnalignedSlot : public SlotBase<UnalignedSlot<T>, T, 1> {
+ public:
+ // This class is a stand-in for "T&" that uses custom read/write operations
+ // for the actual memory accesses.
+ class Reference {
+ public:
+ explicit Reference(Address address) : address_(address) {}
+ Reference(const Reference&) V8_NOEXCEPT = default;
+
+ Reference& operator=(const Reference& other) V8_NOEXCEPT {
+ WriteUnalignedValue<T>(address_, other.value());
+ return *this;
+ }
+ Reference& operator=(T value) {
+ WriteUnalignedValue<T>(address_, value);
+ return *this;
+ }
+
+ // Values of type UnalignedSlot::reference must be implicitly convertible
+ // to UnalignedSlot::value_type.
+ operator T() const { return value(); }
+
+ void swap(Reference& other) {
+ T tmp = value();
+ WriteUnalignedValue<T>(address_, other.value());
+ WriteUnalignedValue<T>(other.address_, tmp);
+ }
+
+ bool operator<(const Reference& other) const {
+ return value() < other.value();
+ }
+
+ bool operator==(const Reference& other) const {
+ return value() == other.value();
+ }
+
+ private:
+ T value() const { return ReadUnalignedValue<T>(address_); }
+
+ Address address_;
+ };
+
+ // The rest of this class follows C++'s "RandomAccessIterator" requirements.
+ // Most of the heavy lifting is inherited from SlotBase.
+ using difference_type = int;
+ using value_type = T;
+ using reference = Reference;
+ using pointer = T*;
+ using iterator_category = std::random_access_iterator_tag;
+
+ UnalignedSlot() : SlotBase<UnalignedSlot<T>, T, 1>(kNullAddress) {}
+ explicit UnalignedSlot(Address address)
+ : SlotBase<UnalignedSlot<T>, T, 1>(address) {}
+ explicit UnalignedSlot(T* address)
+ : SlotBase<UnalignedSlot<T>, T, 1>(reinterpret_cast<Address>(address)) {}
+
+ Reference operator*() const {
+ return Reference(SlotBase<UnalignedSlot<T>, T, 1>::address());
+ }
+ Reference operator[](difference_type i) const {
+ return Reference(SlotBase<UnalignedSlot<T>, T, 1>::address() +
+ i * sizeof(T));
+ }
+
+ friend void swap(Reference lhs, Reference rhs) { lhs.swap(rhs); }
+
+ friend difference_type operator-(UnalignedSlot a, UnalignedSlot b) {
+ return static_cast<int>(a.address() - b.address()) / sizeof(T);
+ }
+};
+
} // namespace internal
} // namespace v8
diff --git a/deps/v8/src/objects/smi.h b/deps/v8/src/objects/smi.h
index 301d5093c0..938fc5504d 100644
--- a/deps/v8/src/objects/smi.h
+++ b/deps/v8/src/objects/smi.h
@@ -87,7 +87,8 @@ class Smi : public Object {
// 1 if x > y.
// Returns the result (a tagged Smi) as a raw Address for ExternalReference
// usage.
- static Address LexicographicCompare(Isolate* isolate, Smi x, Smi y);
+ V8_EXPORT_PRIVATE static Address LexicographicCompare(Isolate* isolate, Smi x,
+ Smi y);
DECL_CAST(Smi)
diff --git a/deps/v8/src/objects/stack-frame-info-inl.h b/deps/v8/src/objects/stack-frame-info-inl.h
index 4bcf3a5672..1007c78b18 100644
--- a/deps/v8/src/objects/stack-frame-info-inl.h
+++ b/deps/v8/src/objects/stack-frame-info-inl.h
@@ -35,7 +35,6 @@ SMI_ACCESSORS(StackFrameInfo, flag, kFlagOffset)
BOOL_ACCESSORS(StackFrameInfo, flag, is_eval, kIsEvalBit)
BOOL_ACCESSORS(StackFrameInfo, flag, is_constructor, kIsConstructorBit)
BOOL_ACCESSORS(StackFrameInfo, flag, is_wasm, kIsWasmBit)
-SMI_ACCESSORS(StackFrameInfo, id, kIdOffset)
OBJECT_CONSTRUCTORS_IMPL(StackTraceFrame, Struct)
NEVER_READ_ONLY_SPACE_IMPL(StackTraceFrame)
diff --git a/deps/v8/src/objects/stack-frame-info.h b/deps/v8/src/objects/stack-frame-info.h
index cb67637119..cf1d4b0e2d 100644
--- a/deps/v8/src/objects/stack-frame-info.h
+++ b/deps/v8/src/objects/stack-frame-info.h
@@ -28,7 +28,6 @@ class StackFrameInfo : public Struct {
DECL_BOOLEAN_ACCESSORS(is_constructor)
DECL_BOOLEAN_ACCESSORS(is_wasm)
DECL_INT_ACCESSORS(flag)
- DECL_INT_ACCESSORS(id)
DECL_CAST(StackFrameInfo)
@@ -67,17 +66,8 @@ class StackTraceFrame : public Struct {
DECL_PRINTER(StackTraceFrame)
DECL_VERIFIER(StackTraceFrame)
- // Layout description.
-#define STACK_FRAME_FIELDS(V) \
- V(kFrameArrayOffset, kTaggedSize) \
- V(kFrameIndexOffset, kTaggedSize) \
- V(kFrameInfoOffset, kTaggedSize) \
- V(kIdOffset, kTaggedSize) \
- /* Total size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(Struct::kHeaderSize, STACK_FRAME_FIELDS)
-#undef STACK_FRAME_FIELDS
+ DEFINE_FIELD_OFFSET_CONSTANTS(Struct::kHeaderSize,
+ TORQUE_GENERATED_STACK_TRACE_FRAME_FIELDS)
static int GetLineNumber(Handle<StackTraceFrame> frame);
static int GetColumnNumber(Handle<StackTraceFrame> frame);
diff --git a/deps/v8/src/objects/string-inl.h b/deps/v8/src/objects/string-inl.h
index 440266ced1..f3a4f5908b 100644
--- a/deps/v8/src/objects/string-inl.h
+++ b/deps/v8/src/objects/string-inl.h
@@ -303,13 +303,13 @@ bool String::Equals(Isolate* isolate, Handle<String> one, Handle<String> two) {
}
Handle<String> String::Flatten(Isolate* isolate, Handle<String> string,
- PretenureFlag pretenure) {
+ AllocationType allocation) {
if (string->IsConsString()) {
Handle<ConsString> cons = Handle<ConsString>::cast(string);
if (cons->IsFlat()) {
string = handle(cons->first(), isolate);
} else {
- return SlowFlatten(isolate, cons, pretenure);
+ return SlowFlatten(isolate, cons, allocation);
}
}
if (string->IsThinString()) {
@@ -750,11 +750,11 @@ SubStringRange::SubStringRange(String string,
class SubStringRange::iterator final {
public:
- typedef std::forward_iterator_tag iterator_category;
- typedef int difference_type;
- typedef uc16 value_type;
- typedef uc16* pointer;
- typedef uc16& reference;
+ using iterator_category = std::forward_iterator_tag;
+ using difference_type = int;
+ using value_type = uc16;
+ using pointer = uc16*;
+ using reference = uc16&;
iterator(const iterator& other) = default;
diff --git a/deps/v8/src/objects/string-table.h b/deps/v8/src/objects/string-table.h
index 5e6d012e6b..e71a3a1341 100644
--- a/deps/v8/src/objects/string-table.h
+++ b/deps/v8/src/objects/string-table.h
@@ -78,8 +78,8 @@ class StringTable : public HashTable<StringTable, StringTableShape> {
// {raw_string} must be a tagged String pointer.
// Returns a tagged pointer: either an internalized string, or a Smi
// sentinel.
- static Address LookupStringIfExists_NoAllocate(Isolate* isolate,
- Address raw_string);
+ V8_EXPORT_PRIVATE static Address LookupStringIfExists_NoAllocate(
+ Isolate* isolate, Address raw_string);
static void EnsureCapacityForDeserialization(Isolate* isolate, int expected);
diff --git a/deps/v8/src/objects/string.cc b/deps/v8/src/objects/string.cc
index a735d038fd..22157a3500 100644
--- a/deps/v8/src/objects/string.cc
+++ b/deps/v8/src/objects/string.cc
@@ -23,7 +23,7 @@ namespace v8 {
namespace internal {
Handle<String> String::SlowFlatten(Isolate* isolate, Handle<ConsString> cons,
- PretenureFlag pretenure) {
+ AllocationType allocation) {
DCHECK_NE(cons->second()->length(), 0);
// TurboFan can create cons strings with empty first parts.
@@ -40,19 +40,22 @@ Handle<String> String::SlowFlatten(Isolate* isolate, Handle<ConsString> cons,
DCHECK(AllowHeapAllocation::IsAllowed());
int length = cons->length();
- PretenureFlag tenure = ObjectInYoungGeneration(*cons) ? pretenure : TENURED;
+ allocation =
+ ObjectInYoungGeneration(*cons) ? allocation : AllocationType::kOld;
Handle<SeqString> result;
if (cons->IsOneByteRepresentation()) {
- Handle<SeqOneByteString> flat = isolate->factory()
- ->NewRawOneByteString(length, tenure)
- .ToHandleChecked();
+ Handle<SeqOneByteString> flat =
+ isolate->factory()
+ ->NewRawOneByteString(length, allocation)
+ .ToHandleChecked();
DisallowHeapAllocation no_gc;
WriteToFlat(*cons, flat->GetChars(no_gc), 0, length);
result = flat;
} else {
- Handle<SeqTwoByteString> flat = isolate->factory()
- ->NewRawTwoByteString(length, tenure)
- .ToHandleChecked();
+ Handle<SeqTwoByteString> flat =
+ isolate->factory()
+ ->NewRawTwoByteString(length, allocation)
+ .ToHandleChecked();
DisallowHeapAllocation no_gc;
WriteToFlat(*cons, flat->GetChars(no_gc), 0, length);
result = flat;
@@ -997,7 +1000,7 @@ MaybeHandle<String> String::GetSubstitution(Isolate* isolate, Match* match,
break;
}
case '<': { // $<name> - named capture
- typedef String::Match::CaptureState CaptureState;
+ using CaptureState = String::Match::CaptureState;
if (!match->HasNamedCaptures()) {
builder.AppendCharacter('$');
@@ -1522,5 +1525,8 @@ String ConsStringIterator::NextLeaf(bool* blew_stack) {
UNREACHABLE();
}
+template EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE) void String::WriteToFlat(
+ String source, uint16_t* sink, int from, int to);
+
} // namespace internal
} // namespace v8
diff --git a/deps/v8/src/objects/string.h b/deps/v8/src/objects/string.h
index e91f913c2b..7c6616a6f6 100644
--- a/deps/v8/src/objects/string.h
+++ b/deps/v8/src/objects/string.h
@@ -6,6 +6,7 @@
#define V8_OBJECTS_STRING_H_
#include "src/base/bits.h"
+#include "src/base/export-template.h"
#include "src/objects/instance-type.h"
#include "src/objects/name.h"
#include "src/objects/smi.h"
@@ -189,15 +190,17 @@ class String : public Name {
// Degenerate cons strings are handled specially by the garbage
// collector (see IsShortcutCandidate).
- static inline Handle<String> Flatten(Isolate* isolate, Handle<String> string,
- PretenureFlag pretenure = NOT_TENURED);
+ static inline Handle<String> Flatten(
+ Isolate* isolate, Handle<String> string,
+ AllocationType allocation = AllocationType::kYoung);
// Tries to return the content of a flat string as a structure holding either
// a flat vector of char or of uc16.
// If the string isn't flat, and therefore doesn't have flat content, the
// returned structure will report so, and can't provide a vector of either
// kind.
- FlatContent GetFlatContent(const DisallowHeapAllocation& no_gc);
+ V8_EXPORT_PRIVATE FlatContent
+ GetFlatContent(const DisallowHeapAllocation& no_gc);
// Returns the parent of a sliced string or first part of a flat cons string.
// Requires: StringShape(this).IsIndirect() && this->IsFlat()
@@ -265,13 +268,14 @@ class String : public Name {
inline bool Equals(String other);
inline static bool Equals(Isolate* isolate, Handle<String> one,
Handle<String> two);
- bool IsUtf8EqualTo(Vector<const char> str, bool allow_prefix_match = false);
+ V8_EXPORT_PRIVATE bool IsUtf8EqualTo(Vector<const char> str,
+ bool allow_prefix_match = false);
// Dispatches to Is{One,Two}ByteEqualTo.
template <typename Char>
bool IsEqualTo(Vector<const Char> str);
- bool IsOneByteEqualTo(Vector<const uint8_t> str);
+ V8_EXPORT_PRIVATE bool IsOneByteEqualTo(Vector<const uint8_t> str);
bool IsTwoByteEqualTo(Vector<const uc16> str);
// Return a UTF8 representation of the string. The string is null
@@ -285,7 +289,7 @@ class String : public Name {
std::unique_ptr<char[]> ToCString(AllowNullsFlag allow_nulls,
RobustnessFlag robustness_flag, int offset,
int length, int* length_output = nullptr);
- std::unique_ptr<char[]> ToCString(
+ V8_EXPORT_PRIVATE std::unique_ptr<char[]> ToCString(
AllowNullsFlag allow_nulls = DISALLOW_NULLS,
RobustnessFlag robustness_flag = FAST_STRING_TRAVERSAL,
int* length_output = nullptr);
@@ -293,8 +297,10 @@ class String : public Name {
bool ComputeArrayIndex(uint32_t* index);
// Externalization.
- bool MakeExternal(v8::String::ExternalStringResource* resource);
- bool MakeExternal(v8::String::ExternalOneByteStringResource* resource);
+ V8_EXPORT_PRIVATE bool MakeExternal(
+ v8::String::ExternalStringResource* resource);
+ V8_EXPORT_PRIVATE bool MakeExternal(
+ v8::String::ExternalOneByteStringResource* resource);
bool SupportsExternalization();
// Conversion.
@@ -308,7 +314,7 @@ class String : public Name {
DECL_CAST(String)
- void PrintOn(FILE* out);
+ V8_EXPORT_PRIVATE void PrintOn(FILE* out);
// For use during stack traces. Performs rudimentary sanity check.
bool LooksValid();
@@ -324,9 +330,10 @@ class String : public Name {
inline bool IsFlat();
- // Layout description.
- static const int kLengthOffset = Name::kHeaderSize;
- static const int kHeaderSize = kLengthOffset + kInt32Size;
+ DEFINE_FIELD_OFFSET_CONSTANTS(Name::kHeaderSize,
+ TORQUE_GENERATED_STRING_FIELDS)
+
+ static const int kHeaderSize = kSize;
// Max char codes.
static const int32_t kMaxOneByteCharCode = unibrow::Latin1::kMaxChar;
@@ -356,6 +363,7 @@ class String : public Name {
// Helper function for flattening strings.
template <typename sinkchar>
+ EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE)
static void WriteToFlat(String source, sinkchar* sink, int from, int to);
// The return value may point to the first aligned word containing the first
@@ -431,25 +439,30 @@ class String : public Name {
friend class StringTableInsertionKey;
friend class InternalizedStringKey;
- static Handle<String> SlowFlatten(Isolate* isolate, Handle<ConsString> cons,
- PretenureFlag tenure);
+ V8_EXPORT_PRIVATE static Handle<String> SlowFlatten(
+ Isolate* isolate, Handle<ConsString> cons, AllocationType allocation);
// Slow case of String::Equals. This implementation works on any strings
// but it is most efficient on strings that are almost flat.
- bool SlowEquals(String other);
+ V8_EXPORT_PRIVATE bool SlowEquals(String other);
- static bool SlowEquals(Isolate* isolate, Handle<String> one,
- Handle<String> two);
+ V8_EXPORT_PRIVATE static bool SlowEquals(Isolate* isolate, Handle<String> one,
+ Handle<String> two);
// Slow case of AsArrayIndex.
V8_EXPORT_PRIVATE bool SlowAsArrayIndex(uint32_t* index);
// Compute and set the hash code.
- uint32_t ComputeAndSetHash();
+ V8_EXPORT_PRIVATE uint32_t ComputeAndSetHash();
OBJECT_CONSTRUCTORS(String, Name);
};
+// clang-format off
+extern template EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE)
+void String::WriteToFlat(String source, uint16_t* sink, int from, int to);
+// clang-format on
+
class SubStringRange {
public:
inline SubStringRange(String string, const DisallowHeapAllocation& no_gc,
@@ -601,20 +614,13 @@ class ConsString : public String {
DECL_CAST(ConsString)
- // Layout description.
-#define CONS_STRING_FIELDS(V) \
- V(kFirstOffset, kTaggedSize) \
- V(kSecondOffset, kTaggedSize) \
- /* Total size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(String::kHeaderSize, CONS_STRING_FIELDS)
-#undef CONS_STRING_FIELDS
+ DEFINE_FIELD_OFFSET_CONSTANTS(String::kHeaderSize,
+ TORQUE_GENERATED_CONS_STRING_FIELDS)
// Minimum length for a cons string.
static const int kMinLength = 13;
- typedef FixedBodyDescriptor<kFirstOffset, kSize, kSize> BodyDescriptor;
+ using BodyDescriptor = FixedBodyDescriptor<kFirstOffset, kSize, kSize>;
DECL_VERIFIER(ConsString)
@@ -641,16 +647,10 @@ class ThinString : public String {
DECL_CAST(ThinString)
DECL_VERIFIER(ThinString)
- // Layout description.
-#define THIN_STRING_FIELDS(V) \
- V(kActualOffset, kTaggedSize) \
- /* Total size. */ \
- V(kSize, 0)
+ DEFINE_FIELD_OFFSET_CONSTANTS(String::kHeaderSize,
+ TORQUE_GENERATED_THIN_STRING_FIELDS)
- DEFINE_FIELD_OFFSET_CONSTANTS(String::kHeaderSize, THIN_STRING_FIELDS)
-#undef THIN_STRING_FIELDS
-
- typedef FixedBodyDescriptor<kActualOffset, kSize, kSize> BodyDescriptor;
+ using BodyDescriptor = FixedBodyDescriptor<kActualOffset, kSize, kSize>;
OBJECT_CONSTRUCTORS(ThinString, String);
};
@@ -680,20 +680,13 @@ class SlicedString : public String {
DECL_CAST(SlicedString)
- // Layout description.
-#define SLICED_STRING_FIELDS(V) \
- V(kParentOffset, kTaggedSize) \
- V(kOffsetOffset, kTaggedSize) \
- /* Total size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(String::kHeaderSize, SLICED_STRING_FIELDS)
-#undef SLICED_STRING_FIELDS
+ DEFINE_FIELD_OFFSET_CONSTANTS(String::kHeaderSize,
+ TORQUE_GENERATED_SLICED_STRING_FIELDS)
// Minimum length for a sliced string.
static const int kMinLength = 13;
- typedef FixedBodyDescriptor<kParentOffset, kSize, kSize> BodyDescriptor;
+ using BodyDescriptor = FixedBodyDescriptor<kParentOffset, kSize, kSize>;
DECL_VERIFIER(SlicedString)
@@ -713,17 +706,12 @@ class ExternalString : public String {
public:
DECL_CAST(ExternalString)
- // Layout description.
-#define EXTERNAL_STRING_FIELDS(V) \
- V(kResourceOffset, kSystemPointerSize) \
- /* Size of uncached external strings. */ \
- V(kUncachedSize, 0) \
- V(kResourceDataOffset, kSystemPointerSize) \
- /* Total size. */ \
- V(kSize, 0)
+ DEFINE_FIELD_OFFSET_CONSTANTS(String::kHeaderSize,
+ TORQUE_GENERATED_EXTERNAL_STRING_FIELDS)
- DEFINE_FIELD_OFFSET_CONSTANTS(String::kHeaderSize, EXTERNAL_STRING_FIELDS)
-#undef EXTERNAL_STRING_FIELDS
+ // Size of uncached external strings.
+ static const int kUncachedSize =
+ kResourceOffset + FIELD_SIZE(kResourceOffset);
// Return whether the external string data pointer is not cached.
inline bool is_uncached() const;
@@ -750,7 +738,7 @@ class ExternalOneByteString : public ExternalString {
public:
static const bool kHasOneByteEncoding = true;
- typedef v8::String::ExternalOneByteStringResource Resource;
+ using Resource = v8::String::ExternalOneByteStringResource;
// The underlying resource.
inline const Resource* resource();
@@ -785,7 +773,7 @@ class ExternalTwoByteString : public ExternalString {
public:
static const bool kHasOneByteEncoding = false;
- typedef v8::String::ExternalStringResource Resource;
+ using Resource = v8::String::ExternalStringResource;
// The underlying string resource.
inline const Resource* resource();
@@ -820,7 +808,7 @@ class ExternalTwoByteString : public ExternalString {
// A flat string reader provides random access to the contents of a
// string independent of the character width of the string. The handle
// must be valid as long as the reader is being used.
-class FlatStringReader : public Relocatable {
+class V8_EXPORT_PRIVATE FlatStringReader : public Relocatable {
public:
FlatStringReader(Isolate* isolate, Handle<String> str);
FlatStringReader(Isolate* isolate, Vector<const char> input);
@@ -872,8 +860,8 @@ class ConsStringIterator {
inline void AdjustMaximumDepth();
inline void Pop();
inline bool StackBlown() { return maximum_depth_ - depth_ == kStackSize; }
- void Initialize(ConsString cons_string, int offset);
- String Continue(int* offset_out);
+ V8_EXPORT_PRIVATE void Initialize(ConsString cons_string, int offset);
+ V8_EXPORT_PRIVATE String Continue(int* offset_out);
String NextLeaf(bool* blew_stack);
String Search(int* offset_out);
diff --git a/deps/v8/src/objects/struct.h b/deps/v8/src/objects/struct.h
index f702022ebf..cab41665bd 100644
--- a/deps/v8/src/objects/struct.h
+++ b/deps/v8/src/objects/struct.h
@@ -93,15 +93,9 @@ class AccessorPair : public Struct {
DECL_PRINTER(AccessorPair)
DECL_VERIFIER(AccessorPair)
-// Layout description.
-#define ACCESSOR_PAIR_FIELDS(V) \
- V(kGetterOffset, kTaggedSize) \
- V(kSetterOffset, kTaggedSize) \
- /* Total size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, ACCESSOR_PAIR_FIELDS)
-#undef ACCESSOR_PAIR_FIELDS
+ // Layout description.
+ DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,
+ TORQUE_GENERATED_ACCESSOR_PAIR_FIELDS)
OBJECT_CONSTRUCTORS(AccessorPair, Struct);
};
diff --git a/deps/v8/src/objects/template-objects.cc b/deps/v8/src/objects/template-objects.cc
index de9be911e2..448d54fb9d 100644
--- a/deps/v8/src/objects/template-objects.cc
+++ b/deps/v8/src/objects/template-objects.cc
@@ -42,12 +42,14 @@ Handle<JSArray> TemplateObjectDescription::GetTemplateObject(
// Create the raw object from the {raw_strings}.
Handle<FixedArray> raw_strings(description->raw_strings(), isolate);
Handle<JSArray> raw_object = isolate->factory()->NewJSArrayWithElements(
- raw_strings, PACKED_ELEMENTS, raw_strings->length(), TENURED);
+ raw_strings, PACKED_ELEMENTS, raw_strings->length(),
+ AllocationType::kOld);
// Create the template object from the {cooked_strings}.
Handle<FixedArray> cooked_strings(description->cooked_strings(), isolate);
Handle<JSArray> template_object = isolate->factory()->NewJSArrayWithElements(
- cooked_strings, PACKED_ELEMENTS, cooked_strings->length(), TENURED);
+ cooked_strings, PACKED_ELEMENTS, cooked_strings->length(),
+ AllocationType::kOld);
// Freeze the {raw_object}.
JSObject::SetIntegrityLevel(raw_object, FROZEN, kThrowOnError).ToChecked();
@@ -85,7 +87,7 @@ Handle<CachedTemplateObject> CachedTemplateObject::New(
DCHECK(next->IsCachedTemplateObject() || next->IsTheHole());
Factory* factory = isolate->factory();
Handle<CachedTemplateObject> result = Handle<CachedTemplateObject>::cast(
- factory->NewStruct(TUPLE3_TYPE, TENURED));
+ factory->NewStruct(TUPLE3_TYPE, AllocationType::kOld));
result->set_slot_id(slot_id);
result->set_template_object(*template_object);
result->set_next(*next);
diff --git a/deps/v8/src/objects/templates-inl.h b/deps/v8/src/objects/templates-inl.h
index 90b1f05c6c..19739be91a 100644
--- a/deps/v8/src/objects/templates-inl.h
+++ b/deps/v8/src/objects/templates-inl.h
@@ -26,7 +26,7 @@ NEVER_READ_ONLY_SPACE_IMPL(TemplateInfo)
ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
ACCESSORS(TemplateInfo, serial_number, Object, kSerialNumberOffset)
-SMI_ACCESSORS(TemplateInfo, number_of_properties, kNumberOfProperties)
+SMI_ACCESSORS(TemplateInfo, number_of_properties, kNumberOfPropertiesOffset)
ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
diff --git a/deps/v8/src/objects/templates.h b/deps/v8/src/objects/templates.h
index 6348120a25..bd55821c7d 100644
--- a/deps/v8/src/objects/templates.h
+++ b/deps/v8/src/objects/templates.h
@@ -26,18 +26,8 @@ class TemplateInfo : public Struct {
DECL_CAST(TemplateInfo)
- // Layout description.
-#define TEMPLATE_INFO_FIELDS(V) \
- V(kTagOffset, kTaggedSize) \
- V(kSerialNumberOffset, kTaggedSize) \
- V(kNumberOfProperties, kTaggedSize) \
- V(kPropertyListOffset, kTaggedSize) \
- V(kPropertyAccessorsOffset, kTaggedSize) \
- /* Header size. */ \
- V(kHeaderSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, TEMPLATE_INFO_FIELDS)
-#undef TEMPLATE_INFO_FIELDS
+ DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,
+ TORQUE_GENERATED_TEMPLATE_INFO_FIELDS)
static const int kFastTemplateInstantiationsCacheSize = 1 * KB;
@@ -68,21 +58,9 @@ class FunctionTemplateRareData : public Struct {
DECL_PRINTER(FunctionTemplateRareData)
DECL_VERIFIER(FunctionTemplateRareData)
- // Layout description.
-#define SYMBOL_FIELDS(V) \
- V(kPrototypeTemplateOffset, kTaggedSize) \
- V(kPrototypeProviderTemplateOffset, kTaggedSize) \
- V(kParentTemplateOffset, kTaggedSize) \
- V(kNamedPropertyHandlerOffset, kTaggedSize) \
- V(kIndexedPropertyHandlerOffset, kTaggedSize) \
- V(kInstanceTemplateOffset, kTaggedSize) \
- V(kInstanceCallHandlerOffset, kTaggedSize) \
- V(kAccessCheckInfoOffset, kTaggedSize) \
- /* Total size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, SYMBOL_FIELDS)
-#undef SYMBOL_FIELDS
+ DEFINE_FIELD_OFFSET_CONSTANTS(
+ HeapObject::kHeaderSize,
+ TORQUE_GENERATED_FUNCTION_TEMPLATE_RARE_DATA_FIELDS)
OBJECT_CONSTRUCTORS(FunctionTemplateRareData, Struct);
};
@@ -191,22 +169,8 @@ class FunctionTemplateInfo : public TemplateInfo {
static const int kInvalidSerialNumber = 0;
- // Layout description.
-#define FUNCTION_TEMPLATE_INFO_FIELDS(V) \
- V(kCallCodeOffset, kTaggedSize) \
- V(kClassNameOffset, kTaggedSize) \
- V(kSignatureOffset, kTaggedSize) \
- V(kFunctionTemplateRareDataOffset, kTaggedSize) \
- V(kSharedFunctionInfoOffset, kTaggedSize) \
- V(kFlagOffset, kTaggedSize) \
- V(kLengthOffset, kTaggedSize) \
- V(kCachedPropertyNameOffset, kTaggedSize) \
- /* Total size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(TemplateInfo::kHeaderSize,
- FUNCTION_TEMPLATE_INFO_FIELDS)
-#undef FUNCTION_TEMPLATE_INFO_FIELDS
+ DEFINE_FIELD_OFFSET_CONSTANTS(TemplateInfo::kSize,
+ TORQUE_GENERATED_FUNCTION_TEMPLATE_INFO_FIELDS)
static Handle<SharedFunctionInfo> GetOrCreateSharedFunctionInfo(
Isolate* isolate, Handle<FunctionTemplateInfo> info,
@@ -257,16 +221,8 @@ class ObjectTemplateInfo : public TemplateInfo {
DECL_VERIFIER(ObjectTemplateInfo)
// Layout description.
-#define OBJECT_TEMPLATE_INFO_FIELDS(V) \
- V(kConstructorOffset, kTaggedSize) \
- /* LSB is for immutable_proto, higher bits for embedder_field_count */ \
- V(kDataOffset, kTaggedSize) \
- /* Total size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(TemplateInfo::kHeaderSize,
- OBJECT_TEMPLATE_INFO_FIELDS)
-#undef OBJECT_TEMPLATE_INFO_FIELDS
+ DEFINE_FIELD_OFFSET_CONSTANTS(TemplateInfo::kSize,
+ TORQUE_GENERATED_OBJECT_TEMPLATE_INFO_FIELDS)
// Starting from given object template's constructor walk up the inheritance
// chain till a function template that has an instance template is found.