summaryrefslogtreecommitdiff
path: root/deps/v8/src/objects/code.h
diff options
context:
space:
mode:
authorMichaël Zasso <targos@protonmail.com>2019-03-12 09:01:49 +0100
committerMichaël Zasso <targos@protonmail.com>2019-03-14 18:49:21 +0100
commit7b48713334469818661fe276cf571de9c7899f2d (patch)
tree4dbda49ac88db76ce09dc330a0cb587e68e139ba /deps/v8/src/objects/code.h
parent8549ac09b256666cf5275224ec58fab9939ff32e (diff)
downloadnode-new-7b48713334469818661fe276cf571de9c7899f2d.tar.gz
deps: update V8 to 7.3.492.25
PR-URL: https://github.com/nodejs/node/pull/25852 Reviewed-By: Ujjwal Sharma <usharma1998@gmail.com> Reviewed-By: Matteo Collina <matteo.collina@gmail.com> Reviewed-By: Ali Ijaz Sheikh <ofrobots@google.com>
Diffstat (limited to 'deps/v8/src/objects/code.h')
-rw-r--r--deps/v8/src/objects/code.h284
1 files changed, 163 insertions, 121 deletions
diff --git a/deps/v8/src/objects/code.h b/deps/v8/src/objects/code.h
index 1f1d4b71d6..6239ef9a0b 100644
--- a/deps/v8/src/objects/code.h
+++ b/deps/v8/src/objects/code.h
@@ -5,9 +5,12 @@
#ifndef V8_OBJECTS_CODE_H_
#define V8_OBJECTS_CODE_H_
+#include "src/contexts.h"
#include "src/handler-table.h"
#include "src/objects.h"
#include "src/objects/fixed-array.h"
+#include "src/objects/heap-object.h"
+#include "src/objects/struct.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@@ -25,8 +28,9 @@ class Register;
}
// Code describes objects with on-the-fly generated machine code.
-class Code : public HeapObject, public NeverReadOnlySpaceObject {
+class Code : public HeapObject {
public:
+ NEVER_READ_ONLY_SPACE
// Opaque data type for encapsulating code flags like kind, inline
// cache state, and arguments count.
typedef uint32_t Flags;
@@ -54,7 +58,6 @@ class Code : public HeapObject, public NeverReadOnlySpaceObject {
#ifdef ENABLE_DISASSEMBLER
const char* GetName(Isolate* isolate) const;
- void PrintBuiltinCode(Isolate* isolate, const char* name);
void Disassemble(const char* name, std::ostream& os,
Address current_pc = kNullAddress);
#endif
@@ -74,7 +77,9 @@ class Code : public HeapObject, public NeverReadOnlySpaceObject {
// [relocation_info]: Code relocation information
DECL_ACCESSORS(relocation_info, ByteArray)
- void InvalidateEmbeddedObjects(Heap* heap);
+
+ // This function should be called only from GC.
+ void ClearEmbeddedObjects(Heap* heap);
// [deoptimization_data]: Array containing data for deopt.
DECL_ACCESSORS(deoptimization_data, FixedArray)
@@ -82,34 +87,35 @@ class Code : public HeapObject, public NeverReadOnlySpaceObject {
// [source_position_table]: ByteArray for the source positions table or
// SourcePositionTableWithFrameCache.
DECL_ACCESSORS(source_position_table, Object)
- inline ByteArray* SourcePositionTable() const;
+ inline ByteArray SourcePositionTable() const;
// [code_data_container]: A container indirection for all mutable fields.
DECL_ACCESSORS(code_data_container, CodeDataContainer)
- // [stub_key]: The major/minor key of a code stub.
- inline uint32_t stub_key() const;
- inline void set_stub_key(uint32_t key);
-
// [next_code_link]: Link for lists of optimized or deoptimized code.
// Note that this field is stored in the {CodeDataContainer} to be mutable.
- inline Object* next_code_link() const;
- inline void set_next_code_link(Object* value);
+ inline Object next_code_link() const;
+ inline void set_next_code_link(Object value);
// [constant_pool offset]: Offset of the constant pool.
// Valid for FLAG_enable_embedded_constant_pool only
inline int constant_pool_offset() const;
inline void set_constant_pool_offset(int offset);
+ inline int constant_pool_size() const;
+
+ // [code_comments_offset]: Offset of the code comment section.
+ inline int code_comments_offset() const;
+ inline void set_code_comments_offset(int offset);
+ inline Address code_comments() const;
// Unchecked accessors to be used during GC.
- inline ByteArray* unchecked_relocation_info() const;
+ inline ByteArray unchecked_relocation_info() const;
inline int relocation_size() const;
// [kind]: Access to specific code kind.
inline Kind kind() const;
- inline bool is_stub() const;
inline bool is_optimized_code() const;
inline bool is_wasm_code() const;
@@ -132,12 +138,6 @@ class Code : public HeapObject, public NeverReadOnlySpaceObject {
inline bool can_have_weak_objects() const;
inline void set_can_have_weak_objects(bool value);
- // [is_construct_stub]: For kind BUILTIN, tells whether the code object
- // represents a hand-written construct stub
- // (e.g., NumberConstructor_ConstructStub).
- inline bool is_construct_stub() const;
- inline void set_is_construct_stub(bool value);
-
// [builtin_index]: For builtins, tells which builtin index the code object
// has. The builtin index is a non-negative integer for builtins, and -1
// otherwise.
@@ -162,10 +162,16 @@ class Code : public HeapObject, public NeverReadOnlySpaceObject {
inline void set_handler_table_offset(int offset);
// [marked_for_deoptimization]: For kind OPTIMIZED_FUNCTION tells whether
- // the code is going to be deoptimized because of dead embedded maps.
+ // the code is going to be deoptimized.
inline bool marked_for_deoptimization() const;
inline void set_marked_for_deoptimization(bool flag);
+ // [embedded_objects_cleared]: For kind OPTIMIZED_FUNCTION tells whether
+ // the embedded objects in the code marked for deoptimization were cleared.
+ // Note that embedded_objects_cleared() implies marked_for_deoptimization().
+ inline bool embedded_objects_cleared() const;
+ inline void set_embedded_objects_cleared(bool flag);
+
// [deopt_already_counted]: For kind OPTIMIZED_FUNCTION tells whether
// the code was already deoptimized.
inline bool deopt_already_counted() const;
@@ -212,13 +218,10 @@ class Code : public HeapObject, public NeverReadOnlySpaceObject {
bool is_off_heap_trampoline);
// Convert a target address into a code object.
- static inline Code* GetCodeFromTargetAddress(Address address);
+ static inline Code GetCodeFromTargetAddress(Address address);
// Convert an entry address into an object.
- static inline Object* GetObjectFromEntryAddress(Address location_of_address);
-
- // Convert a code entry into an object.
- static inline Object* GetObjectFromCodeEntry(Address code_entry);
+ static inline Code GetObjectFromEntryAddress(Address location_of_address);
// Returns the address of the first instruction.
inline Address raw_instruction_start() const;
@@ -308,8 +311,13 @@ class Code : public HeapObject, public NeverReadOnlySpaceObject {
// Migrate code from desc without flushing the instruction cache.
void CopyFromNoFlush(Heap* heap, const CodeDesc& desc);
+ // Copy the RelocInfo portion of |desc| to |dest|. The ByteArray must be
+ // exactly the same size as the RelocInfo in |desc|.
+ static inline void CopyRelocInfoToByteArray(ByteArray dest,
+ const CodeDesc& desc);
+
// Flushes the instruction cache for the executable instructions of this code
- // object.
+ // object. Make sure to call this while the code is still writable.
void FlushICache() const;
// Returns the object size for a given body (used for allocation).
@@ -337,69 +345,73 @@ class Code : public HeapObject, public NeverReadOnlySpaceObject {
inline HandlerTable::CatchPrediction GetBuiltinCatchPrediction();
-#ifdef DEBUG
- enum VerifyMode { kNoContextSpecificPointers, kNoContextRetainingPointers };
- void VerifyEmbeddedObjects(Isolate* isolate,
- VerifyMode mode = kNoContextRetainingPointers);
-#endif // DEBUG
-
bool IsIsolateIndependent(Isolate* isolate);
inline bool CanContainWeakObjects();
- inline bool IsWeakObject(Object* object);
+ inline bool IsWeakObject(HeapObject object);
- static inline bool IsWeakObjectInOptimizedCode(Object* object);
+ static inline bool IsWeakObjectInOptimizedCode(HeapObject object);
// Return true if the function is inlined in the code.
- bool Inlines(SharedFunctionInfo* sfi);
+ bool Inlines(SharedFunctionInfo sfi);
- class OptimizedCodeIterator {
- public:
- explicit OptimizedCodeIterator(Isolate* isolate);
- Code* Next();
-
- private:
- Context* next_context_;
- Code* current_code_;
- Isolate* isolate_;
-
- DisallowHeapAllocation no_gc;
- DISALLOW_COPY_AND_ASSIGN(OptimizedCodeIterator)
- };
-
- static const int kConstantPoolSize =
- FLAG_enable_embedded_constant_pool ? kIntSize : 0;
+ class OptimizedCodeIterator;
// Layout description.
- static const int kRelocationInfoOffset = HeapObject::kHeaderSize;
- static const int kDeoptimizationDataOffset =
- kRelocationInfoOffset + kPointerSize;
- static const int kSourcePositionTableOffset =
- kDeoptimizationDataOffset + kPointerSize;
- static const int kCodeDataContainerOffset =
- kSourcePositionTableOffset + kPointerSize;
- static const int kInstructionSizeOffset =
- kCodeDataContainerOffset + kPointerSize;
- static const int kFlagsOffset = kInstructionSizeOffset + kIntSize;
- static const int kSafepointTableOffsetOffset = kFlagsOffset + kIntSize;
- static const int kHandlerTableOffsetOffset =
- kSafepointTableOffsetOffset + kIntSize;
- static const int kStubKeyOffset = kHandlerTableOffsetOffset + kIntSize;
- static const int kConstantPoolOffset = kStubKeyOffset + kIntSize;
- static const int kBuiltinIndexOffset =
- kConstantPoolOffset + kConstantPoolSize;
- static const int kHeaderPaddingStart = kBuiltinIndexOffset + kIntSize;
-
- // Add padding to align the instruction start following right after
- // the Code object header.
- static const int kHeaderSize =
- (kHeaderPaddingStart + kCodeAlignmentMask) & ~kCodeAlignmentMask;
-
- // Data or code not directly visited by GC directly starts here.
- // The serializer needs to copy bytes starting from here verbatim.
- // Objects embedded into code is visited via reloc info.
- static const int kDataStart = kInstructionSizeOffset;
+#define CODE_FIELDS(V) \
+ V(kRelocationInfoOffset, kTaggedSize) \
+ V(kDeoptimizationDataOffset, kTaggedSize) \
+ V(kSourcePositionTableOffset, kTaggedSize) \
+ V(kCodeDataContainerOffset, kTaggedSize) \
+ /* Data or code not directly visited by GC directly starts here. */ \
+ /* The serializer needs to copy bytes starting from here verbatim. */ \
+ /* Objects embedded into code is visited via reloc info. */ \
+ V(kDataStart, 0) \
+ V(kInstructionSizeOffset, kIntSize) \
+ V(kFlagsOffset, kIntSize) \
+ V(kSafepointTableOffsetOffset, kIntSize) \
+ V(kHandlerTableOffsetOffset, kIntSize) \
+ V(kConstantPoolOffset, FLAG_enable_embedded_constant_pool ? kIntSize : 0) \
+ V(kBuiltinIndexOffset, kIntSize) \
+ V(kCodeCommentsOffset, kIntSize) \
+ /* Add padding to align the instruction start following right after */ \
+ /* the Code object header. */ \
+ V(kHeaderPaddingStart, CODE_POINTER_PADDING(kHeaderPaddingStart)) \
+ V(kHeaderSize, 0)
+
+ DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, CODE_FIELDS)
+#undef CODE_FIELDS
+
+ // This documents the amount of free space we have in each Code object header
+ // due to padding for code alignment.
+#if V8_TARGET_ARCH_ARM64
+ static constexpr int kHeaderPaddingSize = 0;
+ STATIC_ASSERT(kHeaderSize - kHeaderPaddingStart == kHeaderPaddingSize);
+#elif V8_TARGET_ARCH_MIPS64
+ static constexpr int kHeaderPaddingSize = 0;
+ STATIC_ASSERT(kHeaderSize - kHeaderPaddingStart == kHeaderPaddingSize);
+#elif V8_TARGET_ARCH_X64
+ static constexpr int kHeaderPaddingSize = 0;
+ STATIC_ASSERT(kHeaderSize - kHeaderPaddingStart == kHeaderPaddingSize);
+#elif V8_TARGET_ARCH_ARM
+ static constexpr int kHeaderPaddingSize = 20;
+ STATIC_ASSERT(kHeaderSize - kHeaderPaddingStart == kHeaderPaddingSize);
+#elif V8_TARGET_ARCH_IA32
+ static constexpr int kHeaderPaddingSize = 20;
+ STATIC_ASSERT(kHeaderSize - kHeaderPaddingStart == kHeaderPaddingSize);
+#elif V8_TARGET_ARCH_MIPS
+ static constexpr int kHeaderPaddingSize = 20;
+ STATIC_ASSERT(kHeaderSize - kHeaderPaddingStart == kHeaderPaddingSize);
+#elif V8_TARGET_ARCH_PPC64
+ // No static assert possible since padding size depends on the
+ // FLAG_enable_embedded_constant_pool runtime flag.
+#elif V8_TARGET_ARCH_S390X
+ static constexpr int kHeaderPaddingSize = 0;
+ STATIC_ASSERT(kHeaderSize - kHeaderPaddingStart == kHeaderPaddingSize);
+#else
+#error Unknown architecture.
+#endif
inline int GetUnwindingInfoSizeOffset() const;
@@ -421,9 +433,9 @@ class Code : public HeapObject, public NeverReadOnlySpaceObject {
// KindSpecificFlags layout (STUB, BUILTIN and OPTIMIZED_FUNCTION)
#define CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS(V, _) \
V(MarkedForDeoptimizationField, bool, 1, _) \
+ V(EmbeddedObjectsClearedField, bool, 1, _) \
V(DeoptAlreadyCountedField, bool, 1, _) \
V(CanHaveWeakObjectsField, bool, 1, _) \
- V(IsConstructStubField, bool, 1, _) \
V(IsPromiseRejectionField, bool, 1, _) \
V(IsExceptionCaughtField, bool, 1, _)
DEFINE_BIT_FIELDS(CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS)
@@ -444,7 +456,21 @@ class Code : public HeapObject, public NeverReadOnlySpaceObject {
bool is_promise_rejection() const;
bool is_exception_caught() const;
- DISALLOW_IMPLICIT_CONSTRUCTORS(Code);
+ OBJECT_CONSTRUCTORS(Code, HeapObject);
+};
+
+class Code::OptimizedCodeIterator {
+ public:
+ explicit OptimizedCodeIterator(Isolate* isolate);
+ Code Next();
+
+ private:
+ Context next_context_;
+ Code current_code_;
+ Isolate* isolate_;
+
+ DISALLOW_HEAP_ALLOCATION(no_gc);
+ DISALLOW_COPY_AND_ASSIGN(OptimizedCodeIterator);
};
// CodeDataContainer is a container for all mutable fields associated with its
@@ -452,8 +478,9 @@ class Code : public HeapObject, public NeverReadOnlySpaceObject {
// pages within the heap, its header fields need to be immutable. There always
// is a 1-to-1 relation between {Code} and {CodeDataContainer}, the referencing
// field {Code::code_data_container} itself is immutable.
-class CodeDataContainer : public HeapObject, public NeverReadOnlySpaceObject {
+class CodeDataContainer : public HeapObject {
public:
+ NEVER_READ_ONLY_SPACE
DECL_ACCESSORS(next_code_link, Object)
DECL_INT_ACCESSORS(kind_specific_flags)
@@ -467,24 +494,29 @@ class CodeDataContainer : public HeapObject, public NeverReadOnlySpaceObject {
DECL_PRINTER(CodeDataContainer)
DECL_VERIFIER(CodeDataContainer)
- static const int kNextCodeLinkOffset = HeapObject::kHeaderSize;
- static const int kKindSpecificFlagsOffset =
- kNextCodeLinkOffset + kPointerSize;
- static const int kUnalignedSize = kKindSpecificFlagsOffset + kIntSize;
- static const int kSize = OBJECT_POINTER_ALIGN(kUnalignedSize);
-
- // During mark compact we need to take special care for weak fields.
- static const int kPointerFieldsStrongEndOffset = kNextCodeLinkOffset;
- static const int kPointerFieldsWeakEndOffset = kKindSpecificFlagsOffset;
+// Layout description.
+#define CODE_DATA_FIELDS(V) \
+ /* Weak pointer fields. */ \
+ V(kPointerFieldsStrongEndOffset, 0) \
+ V(kNextCodeLinkOffset, kTaggedSize) \
+ V(kPointerFieldsWeakEndOffset, 0) \
+ /* Raw data fields. */ \
+ V(kKindSpecificFlagsOffset, kIntSize) \
+ V(kUnalignedSize, OBJECT_POINTER_PADDING(kUnalignedSize)) \
+ /* Total size. */ \
+ V(kSize, 0)
+
+ DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, CODE_DATA_FIELDS)
+#undef CODE_DATA_FIELDS
class BodyDescriptor;
- private:
- DISALLOW_IMPLICIT_CONSTRUCTORS(CodeDataContainer);
+ OBJECT_CONSTRUCTORS(CodeDataContainer, HeapObject);
};
-class AbstractCode : public HeapObject, public NeverReadOnlySpaceObject {
+class AbstractCode : public HeapObject {
public:
+ NEVER_READ_ONLY_SPACE
// All code kinds and INTERPRETED_FUNCTION.
enum Kind {
#define DEFINE_CODE_KIND_ENUM(name) name,
@@ -526,9 +558,9 @@ class AbstractCode : public HeapObject, public NeverReadOnlySpaceObject {
inline int InstructionSize();
// Return the source position table.
- inline ByteArray* source_position_table();
+ inline ByteArray source_position_table();
- inline Object* stack_frame_cache();
+ inline Object stack_frame_cache();
static void SetStackFrameCache(Handle<AbstractCode> abstract_code,
Handle<SimpleNumberDictionary> cache);
void DropStackFrameCache();
@@ -547,12 +579,14 @@ class AbstractCode : public HeapObject, public NeverReadOnlySpaceObject {
inline int ExecutableSize();
DECL_CAST(AbstractCode)
- inline Code* GetCode();
- inline BytecodeArray* GetBytecodeArray();
+ inline Code GetCode();
+ inline BytecodeArray GetBytecodeArray();
// Max loop nesting marker used to postpose OSR. We don't take loop
// nesting that is deeper than 5 levels into account.
static const int kMaxLoopNestingMarker = 6;
+
+ OBJECT_CONSTRUCTORS(AbstractCode, HeapObject)
};
// Dependent code is a singly linked list of weak fixed arrays. Each array
@@ -615,15 +649,15 @@ class DependentCode : public WeakFixedArray {
// The following low-level accessors are exposed only for tests.
inline DependencyGroup group();
- inline MaybeObject* object_at(int i);
+ inline MaybeObject object_at(int i);
inline int count();
- inline DependentCode* next_link();
+ inline DependentCode next_link();
private:
static const char* DependencyGroupName(DependencyGroup group);
// Get/Set {object}'s {DependentCode}.
- static DependentCode* GetDependentCode(Handle<HeapObject> object);
+ static DependentCode GetDependentCode(Handle<HeapObject> object);
static void SetDependentCode(Handle<HeapObject> object,
Handle<DependentCode> dep);
@@ -651,9 +685,9 @@ class DependentCode : public WeakFixedArray {
static const int kFlagsIndex = 1;
static const int kCodesStartIndex = 2;
- inline void set_next_link(DependentCode* next);
+ inline void set_next_link(DependentCode next);
inline void set_count(int value);
- inline void set_object_at(int i, MaybeObject* object);
+ inline void set_object_at(int i, MaybeObject object);
inline void clear_at(int i);
inline void copy(int from, int to);
@@ -662,6 +696,8 @@ class DependentCode : public WeakFixedArray {
class GroupField : public BitField<int, 0, 3> {};
class CountField : public BitField<int, 3, 27> {};
STATIC_ASSERT(kGroupCount <= GroupField::kMax + 1);
+
+ OBJECT_CONSTRUCTORS(DependentCode, WeakFixedArray)
};
// BytecodeArray represents a sequence of interpreter bytecodes.
@@ -681,7 +717,7 @@ class BytecodeArray : public FixedArrayBase {
kIsOldBytecodeAge = kSexagenarianBytecodeAge
};
- static int SizeFor(int length) {
+ static constexpr int SizeFor(int length) {
return OBJECT_POINTER_ALIGN(kHeaderSize + length);
}
@@ -732,7 +768,7 @@ class BytecodeArray : public FixedArrayBase {
// offset and source position or SourcePositionTableWithFrameCache.
DECL_ACCESSORS(source_position_table, Object)
- inline ByteArray* SourcePositionTable();
+ inline ByteArray SourcePositionTable();
inline void ClearFrameCacheFromSourcePositionTable();
DECL_CAST(BytecodeArray)
@@ -754,7 +790,7 @@ class BytecodeArray : public FixedArrayBase {
void Disassemble(std::ostream& os);
- void CopyBytecodesTo(BytecodeArray* to);
+ void CopyBytecodesTo(BytecodeArray to);
// Bytecode aging
bool IsOld() const;
@@ -767,9 +803,9 @@ class BytecodeArray : public FixedArrayBase {
// Layout description.
#define BYTECODE_ARRAY_FIELDS(V) \
/* Pointer fields. */ \
- V(kConstantPoolOffset, kPointerSize) \
- V(kHandlerTableOffset, kPointerSize) \
- V(kSourcePositionTableOffset, kPointerSize) \
+ V(kConstantPoolOffset, kTaggedSize) \
+ V(kHandlerTableOffset, kTaggedSize) \
+ V(kSourcePositionTableOffset, kTaggedSize) \
V(kFrameSizeOffset, kIntSize) \
V(kParameterSizeOffset, kIntSize) \
V(kIncomingNewTargetOrGeneratorRegisterOffset, kIntSize) \
@@ -790,8 +826,7 @@ class BytecodeArray : public FixedArrayBase {
class BodyDescriptor;
- private:
- DISALLOW_IMPLICIT_CONSTRUCTORS(BytecodeArray);
+ OBJECT_CONSTRUCTORS(BytecodeArray, FixedArrayBase);
};
// DeoptimizationData is a fixed array used to hold the deoptimization data for
@@ -821,8 +856,8 @@ class DeoptimizationData : public FixedArray {
// Simple element accessors.
#define DECL_ELEMENT_ACCESSORS(name, type) \
- inline type* name(); \
- inline void Set##name(type* value);
+ inline type name() const; \
+ inline void Set##name(type value);
DECL_ELEMENT_ACCESSORS(TranslationByteArray, ByteArray)
DECL_ELEMENT_ACCESSORS(InlinedFunctionCount, Smi)
@@ -837,8 +872,8 @@ class DeoptimizationData : public FixedArray {
// Accessors for elements of the ith deoptimization entry.
#define DECL_ENTRY_ACCESSORS(name, type) \
- inline type* name(int i); \
- inline void Set##name(int i, type* value);
+ inline type name(int i) const; \
+ inline void Set##name(int i, type value);
DECL_ENTRY_ACCESSORS(BytecodeOffsetRaw, Smi)
DECL_ENTRY_ACCESSORS(TranslationIndex, Smi)
@@ -856,7 +891,7 @@ class DeoptimizationData : public FixedArray {
// Returns the inlined function at the given position in LiteralArray, or the
// outer function if index == kNotInlinedIndex.
- class SharedFunctionInfo* GetInlinedFunction(int index);
+ class SharedFunctionInfo GetInlinedFunction(int index);
// Allocates a DeoptimizationData.
static Handle<DeoptimizationData> New(Isolate* isolate, int deopt_entry_count,
@@ -877,6 +912,8 @@ class DeoptimizationData : public FixedArray {
}
static int LengthFor(int entry_count) { return IndexForEntry(entry_count); }
+
+ OBJECT_CONSTRUCTORS(DeoptimizationData, FixedArray)
};
class SourcePositionTableWithFrameCache : public Tuple2 {
@@ -886,13 +923,18 @@ class SourcePositionTableWithFrameCache : public Tuple2 {
DECL_CAST(SourcePositionTableWithFrameCache)
- static const int kSourcePositionTableIndex = Struct::kHeaderSize;
- static const int kStackFrameCacheIndex =
- kSourcePositionTableIndex + kPointerSize;
- static const int kSize = kStackFrameCacheIndex + kPointerSize;
+// Layout description.
+#define SOURCE_POSITION_TABLE_WITH_FRAME_FIELDS(V) \
+ V(kSourcePositionTableIndex, kTaggedSize) \
+ V(kStackFrameCacheIndex, kTaggedSize) \
+ /* Total size. */ \
+ V(kSize, 0)
- private:
- DISALLOW_IMPLICIT_CONSTRUCTORS(SourcePositionTableWithFrameCache);
+ DEFINE_FIELD_OFFSET_CONSTANTS(Struct::kHeaderSize,
+ SOURCE_POSITION_TABLE_WITH_FRAME_FIELDS)
+#undef SOURCE_POSITION_TABLE_WITH_FRAME_FIELDS
+
+ OBJECT_CONSTRUCTORS(SourcePositionTableWithFrameCache, Tuple2);
};
} // namespace internal