summaryrefslogtreecommitdiff
path: root/deps/v8/src/objects.h
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/objects.h')
-rw-r--r--deps/v8/src/objects.h363
1 files changed, 346 insertions, 17 deletions
diff --git a/deps/v8/src/objects.h b/deps/v8/src/objects.h
index b52bac27be..1827ab0114 100644
--- a/deps/v8/src/objects.h
+++ b/deps/v8/src/objects.h
@@ -585,6 +585,7 @@ struct ValueInfo : public Malloced {
// A template-ized version of the IsXXX functions.
template <class C> static inline bool Is(Object* obj);
+
class MaybeObject BASE_EMBEDDED {
public:
inline bool IsFailure();
@@ -654,6 +655,8 @@ class Object : public MaybeObject {
inline bool IsMap();
inline bool IsFixedArray();
inline bool IsDescriptorArray();
+ inline bool IsDeoptimizationInputData();
+ inline bool IsDeoptimizationOutputData();
inline bool IsContext();
inline bool IsCatchContext();
inline bool IsGlobalContext();
@@ -2886,6 +2889,122 @@ class ExternalFloatArray: public ExternalArray {
};
+// DeoptimizationInputData is a fixed array used to hold the deoptimization
+// data for code generated by the Hydrogen/Lithium compiler. It also
+// contains information about functions that were inlined. If N different
+// functions were inlined then first N elements of the literal array will
+// contain these functions.
+//
+// It can be empty.
+class DeoptimizationInputData: public FixedArray {
+ public:
+ // Layout description. Indices in the array.
+ static const int kTranslationByteArrayIndex = 0;
+ static const int kInlinedFunctionCountIndex = 1;
+ static const int kLiteralArrayIndex = 2;
+ static const int kOsrAstIdIndex = 3;
+ static const int kOsrPcOffsetIndex = 4;
+ static const int kFirstDeoptEntryIndex = 5;
+
+ // Offsets of deopt entry elements relative to the start of the entry.
+ static const int kAstIdOffset = 0;
+ static const int kTranslationIndexOffset = 1;
+ static const int kArgumentsStackHeightOffset = 2;
+ static const int kDeoptEntrySize = 3;
+
+ // Simple element accessors.
+#define DEFINE_ELEMENT_ACCESSORS(name, type) \
+ type* name() { \
+ return type::cast(get(k##name##Index)); \
+ } \
+ void Set##name(type* value) { \
+ set(k##name##Index, value); \
+ }
+
+ DEFINE_ELEMENT_ACCESSORS(TranslationByteArray, ByteArray)
+ DEFINE_ELEMENT_ACCESSORS(InlinedFunctionCount, Smi)
+ DEFINE_ELEMENT_ACCESSORS(LiteralArray, FixedArray)
+ DEFINE_ELEMENT_ACCESSORS(OsrAstId, Smi)
+ DEFINE_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
+
+ // Unchecked accessor to be used during GC.
+ FixedArray* UncheckedLiteralArray() {
+ return reinterpret_cast<FixedArray*>(get(kLiteralArrayIndex));
+ }
+
+#undef DEFINE_ELEMENT_ACCESSORS
+
+ // Accessors for elements of the ith deoptimization entry.
+#define DEFINE_ENTRY_ACCESSORS(name, type) \
+ type* name(int i) { \
+ return type::cast(get(IndexForEntry(i) + k##name##Offset)); \
+ } \
+ void Set##name(int i, type* value) { \
+ set(IndexForEntry(i) + k##name##Offset, value); \
+ }
+
+ DEFINE_ENTRY_ACCESSORS(AstId, Smi)
+ DEFINE_ENTRY_ACCESSORS(TranslationIndex, Smi)
+ DEFINE_ENTRY_ACCESSORS(ArgumentsStackHeight, Smi)
+
+#undef DEFINE_ENTRY_ACCESSORS
+
+ int DeoptCount() {
+ return (length() - kFirstDeoptEntryIndex) / kDeoptEntrySize;
+ }
+
+ // Allocates a DeoptimizationInputData.
+ MUST_USE_RESULT static MaybeObject* Allocate(int deopt_entry_count,
+ PretenureFlag pretenure);
+
+ // Casting.
+ static inline DeoptimizationInputData* cast(Object* obj);
+
+#ifdef DEBUG
+ void DeoptimizationInputDataPrint();
+#endif
+
+ private:
+ static int IndexForEntry(int i) {
+ return kFirstDeoptEntryIndex + (i * kDeoptEntrySize);
+ }
+
+ static int LengthFor(int entry_count) {
+ return IndexForEntry(entry_count);
+ }
+};
+
+
+// DeoptimizationOutputData is a fixed array used to hold the deoptimization
+// data for code generated by the full compiler.
+// The format of the these objects is
+// [i * 2]: Ast ID for ith deoptimization.
+// [i * 2 + 1]: PC and state of ith deoptimization
+class DeoptimizationOutputData: public FixedArray {
+ public:
+ int DeoptPoints() { return length() / 2; }
+ Smi* AstId(int index) { return Smi::cast(get(index * 2)); }
+ void SetAstId(int index, Smi* id) { set(index * 2, id); }
+ Smi* PcAndState(int index) { return Smi::cast(get(1 + index * 2)); }
+ void SetPcAndState(int index, Smi* offset) { set(1 + index * 2, offset); }
+
+ static int LengthOfFixedArray(int deopt_points) {
+ return deopt_points * 2;
+ }
+
+ // Allocates a DeoptimizationOutputData.
+ MUST_USE_RESULT static MaybeObject* Allocate(int number_of_deopt_points,
+ PretenureFlag pretenure);
+
+ // Casting.
+ static inline DeoptimizationOutputData* cast(Object* obj);
+
+#ifdef DEBUG
+ void DeoptimizationOutputDataPrint();
+#endif
+};
+
+
// Code describes objects with on-the-fly generated machine code.
class Code: public HeapObject {
public:
@@ -2900,6 +3019,7 @@ class Code: public HeapObject {
enum Kind {
FUNCTION,
+ OPTIMIZED_FUNCTION,
STUB,
BUILTIN,
LOAD_IC,
@@ -2909,13 +3029,15 @@ class Code: public HeapObject {
STORE_IC,
KEYED_STORE_IC,
BINARY_OP_IC,
+ TYPE_RECORDING_BINARY_OP_IC,
+ COMPARE_IC,
// No more than 16 kinds. The value currently encoded in four bits in
// Flags.
// Pseudo-kinds.
REGEXP = BUILTIN,
FIRST_IC_KIND = LOAD_IC,
- LAST_IC_KIND = BINARY_OP_IC
+ LAST_IC_KIND = COMPARE_IC
};
enum {
@@ -2936,9 +3058,14 @@ class Code: public HeapObject {
// [relocation_info]: Code relocation information
DECL_ACCESSORS(relocation_info, ByteArray)
+ void InvalidateRelocation();
- // Unchecked accessor to be used during GC.
+ // [deoptimization_data]: Array containing data for deopt.
+ DECL_ACCESSORS(deoptimization_data, FixedArray)
+
+ // Unchecked accessors to be used during GC.
inline ByteArray* unchecked_relocation_info();
+ inline FixedArray* unchecked_deoptimization_data();
inline int relocation_size();
@@ -2961,10 +3088,77 @@ class Code: public HeapObject {
inline bool is_keyed_store_stub() { return kind() == KEYED_STORE_IC; }
inline bool is_call_stub() { return kind() == CALL_IC; }
inline bool is_keyed_call_stub() { return kind() == KEYED_CALL_IC; }
+ inline bool is_binary_op_stub() { return kind() == BINARY_OP_IC; }
+ inline bool is_type_recording_binary_op_stub() {
+ return kind() == TYPE_RECORDING_BINARY_OP_IC;
+ }
+ inline bool is_compare_ic_stub() { return kind() == COMPARE_IC; }
// [major_key]: For kind STUB or BINARY_OP_IC, the major key.
inline int major_key();
- inline void set_major_key(int major);
+ inline void set_major_key(int value);
+
+ // [optimizable]: For FUNCTION kind, tells if it is optimizable.
+ inline bool optimizable();
+ inline void set_optimizable(bool value);
+
+ // [has_deoptimization_support]: For FUNCTION kind, tells if it has
+ // deoptimization support.
+ inline bool has_deoptimization_support();
+ inline void set_has_deoptimization_support(bool value);
+
+ // [allow_osr_at_loop_nesting_level]: For FUNCTION kind, tells for
+ // how long the function has been marked for OSR and therefore which
+ // level of loop nesting we are willing to do on-stack replacement
+ // for.
+ inline void set_allow_osr_at_loop_nesting_level(int level);
+ inline int allow_osr_at_loop_nesting_level();
+
+ // [stack_slots]: For kind OPTIMIZED_FUNCTION, the number of stack slots
+ // reserved in the code prologue.
+ inline unsigned stack_slots();
+ inline void set_stack_slots(unsigned slots);
+
+ // [safepoint_table_start]: For kind OPTIMIZED_CODE, the offset in
+ // the instruction stream where the safepoint table starts.
+ inline unsigned safepoint_table_start();
+ inline void set_safepoint_table_start(unsigned offset);
+
+ // [stack_check_table_start]: For kind FUNCTION, the offset in the
+ // instruction stream where the stack check table starts.
+ inline unsigned stack_check_table_start();
+ inline void set_stack_check_table_start(unsigned offset);
+
+ // [check type]: For kind CALL_IC, tells how to check if the
+ // receiver is valid for the given call.
+ inline CheckType check_type();
+ inline void set_check_type(CheckType value);
+
+ // [binary op type]: For all BINARY_OP_IC.
+ inline byte binary_op_type();
+ inline void set_binary_op_type(byte value);
+
+ // [type-recording binary op type]: For all TYPE_RECORDING_BINARY_OP_IC.
+ inline byte type_recording_binary_op_type();
+ inline void set_type_recording_binary_op_type(byte value);
+ inline byte type_recording_binary_op_result_type();
+ inline void set_type_recording_binary_op_result_type(byte value);
+
+ // [compare state]: For kind compare IC stubs, tells what state the
+ // stub is in.
+ inline byte compare_state();
+ inline void set_compare_state(byte value);
+
+ // Get the safepoint entry for the given pc. Returns NULL for
+ // non-safepoint pcs.
+ uint8_t* GetSafepointEntry(Address pc);
+
+ // Mark this code object as not having a stack check table. Assumes kind
+ // is FUNCTION.
+ void SetNoStackCheckTable();
+
+ // Find the first map in an IC stub.
+ Map* FindFirstMap();
// Flags operations.
static inline Flags ComputeFlags(Kind kind,
@@ -3052,18 +3246,45 @@ class Code: public HeapObject {
void CodePrint();
void CodeVerify();
#endif
+
+ // Max loop nesting marker used to postpose OSR. We don't take loop
+ // nesting that is deeper than 5 levels into account.
+ static const int kMaxLoopNestingMarker = 6;
+
// Layout description.
static const int kInstructionSizeOffset = HeapObject::kHeaderSize;
static const int kRelocationInfoOffset = kInstructionSizeOffset + kIntSize;
- static const int kFlagsOffset = kRelocationInfoOffset + kPointerSize;
+ static const int kDeoptimizationDataOffset =
+ kRelocationInfoOffset + kPointerSize;
+ static const int kFlagsOffset = kDeoptimizationDataOffset + kPointerSize;
static const int kKindSpecificFlagsOffset = kFlagsOffset + kIntSize;
+
+ static const int kKindSpecificFlagsSize = 2 * kIntSize;
+
+ static const int kHeaderPaddingStart = kKindSpecificFlagsOffset +
+ kKindSpecificFlagsSize;
+
// Add padding to align the instruction start following right after
// the Code object header.
static const int kHeaderSize =
- CODE_POINTER_ALIGN(kKindSpecificFlagsOffset + kIntSize);
+ (kHeaderPaddingStart + kCodeAlignmentMask) & ~kCodeAlignmentMask;
// Byte offsets within kKindSpecificFlagsOffset.
- static const int kStubMajorKeyOffset = kKindSpecificFlagsOffset + 1;
+ static const int kStubMajorKeyOffset = kKindSpecificFlagsOffset;
+ static const int kOptimizableOffset = kKindSpecificFlagsOffset;
+ static const int kStackSlotsOffset = kKindSpecificFlagsOffset;
+ static const int kCheckTypeOffset = kKindSpecificFlagsOffset;
+
+ static const int kCompareStateOffset = kStubMajorKeyOffset + 1;
+ static const int kBinaryOpTypeOffset = kStubMajorKeyOffset + 1;
+ static const int kHasDeoptimizationSupportOffset = kOptimizableOffset + 1;
+
+ static const int kBinaryOpReturnTypeOffset = kBinaryOpTypeOffset + 1;
+ static const int kAllowOSRAtLoopNestingLevelOffset =
+ kHasDeoptimizationSupportOffset + 1;
+
+ static const int kSafepointTableStartOffset = kStackSlotsOffset + kIntSize;
+ static const int kStackCheckTableStartOffset = kStackSlotsOffset + kIntSize;
// Flags layout.
static const int kFlagsICStateShift = 0;
@@ -3239,6 +3460,13 @@ class Map: public HeapObject {
// [stub cache]: contains stubs compiled for this map.
DECL_ACCESSORS(code_cache, Object)
+ // Lookup in the map's instance descriptors and fill out the result
+ // with the given holder if the name is found. The holder may be
+ // NULL when this function is used from the compiler.
+ void LookupInDescriptors(JSObject* holder,
+ String* name,
+ LookupResult* result);
+
MUST_USE_RESULT MaybeObject* CopyDropDescriptors();
MUST_USE_RESULT MaybeObject* CopyNormalized(PropertyNormalizationMode mode,
@@ -3486,6 +3714,25 @@ class Script: public Struct {
};
+enum MathFunctionId {
+ kNotSpecialMathFunction = 0,
+ // These numbers must be kept in sync with the ones in math.js.
+ kMathFloor = 1,
+ kMathRound = 2,
+ kMathCeil = 3,
+ kMathAbs = 4,
+ kMathLog = 5,
+ kMathSin = 6,
+ kMathCos = 7,
+ kMathTan = 8,
+ kMathASin = 9,
+ kMathACos = 0xa,
+ kMathATan = 0xb,
+ kMathExp = 0xc,
+ kMathSqrt = 0xd
+};
+
+
// SharedFunctionInfo describes the JSFunction information that can be
// shared by multiple instances of the function.
class SharedFunctionInfo: public HeapObject {
@@ -3687,6 +3934,11 @@ class SharedFunctionInfo: public HeapObject {
inline int compiler_hints();
inline void set_compiler_hints(int value);
+ // A counter used to determine when to stress the deoptimizer with a
+ // deopt.
+ inline Smi* deopt_counter();
+ inline void set_deopt_counter(Smi* counter);
+
// Add information on assignments of the form this.x = ...;
void SetThisPropertyAssignmentsInfo(
bool has_only_simple_this_property_assignments,
@@ -3716,6 +3968,24 @@ class SharedFunctionInfo: public HeapObject {
inline int code_age();
inline void set_code_age(int age);
+ // Indicates whether optimizations have been disabled for this
+ // shared function info. If a function is repeatedly optimized or if
+ // we cannot optimize the function we disable optimization to avoid
+ // spending time attempting to optimize it again.
+ inline bool optimization_disabled();
+ inline void set_optimization_disabled(bool value);
+
+ // Indicates whether or not the code in the shared function support
+ // deoptimization.
+ inline bool has_deoptimization_support();
+
+ // Enable deoptimization support through recompiled code.
+ void EnableDeoptimizationSupport(Code* recompiled);
+
+ // Lookup the bailout ID and ASSERT that it exists in the non-optimized
+ // code, returns whether it asserted (i.e., always true if assertions are
+ // disabled).
+ bool VerifyBailoutId(int id);
// Check whether a inlined constructor can be generated with the given
// prototype.
@@ -3739,6 +4009,12 @@ class SharedFunctionInfo: public HeapObject {
bool HasSourceCode();
Object* GetSourceCode();
+ inline int opt_count();
+ inline void set_opt_count(int opt_count);
+
+ // Source size of this function.
+ int SourceSize();
+
// Calculate the instance size.
int CalculateInstanceSize();
@@ -3776,10 +4052,12 @@ class SharedFunctionInfo: public HeapObject {
kInferredNameOffset + kPointerSize;
static const int kThisPropertyAssignmentsOffset =
kInitialMapOffset + kPointerSize;
+ static const int kDeoptCounterOffset =
+ kThisPropertyAssignmentsOffset + kPointerSize;
#if V8_HOST_ARCH_32_BIT
// Smi fields.
static const int kLengthOffset =
- kThisPropertyAssignmentsOffset + kPointerSize;
+ kDeoptCounterOffset + kPointerSize;
static const int kFormalParameterCountOffset = kLengthOffset + kPointerSize;
static const int kExpectedNofPropertiesOffset =
kFormalParameterCountOffset + kPointerSize;
@@ -3795,8 +4073,10 @@ class SharedFunctionInfo: public HeapObject {
kFunctionTokenPositionOffset + kPointerSize;
static const int kThisPropertyAssignmentsCountOffset =
kCompilerHintsOffset + kPointerSize;
+ static const int kOptCountOffset =
+ kThisPropertyAssignmentsCountOffset + kPointerSize;
// Total size.
- static const int kSize = kThisPropertyAssignmentsCountOffset + kPointerSize;
+ static const int kSize = kOptCountOffset + kPointerSize;
#else
// The only reason to use smi fields instead of int fields
// is to allow iteration without maps decoding during
@@ -3808,7 +4088,7 @@ class SharedFunctionInfo: public HeapObject {
// word is not set and thus this word cannot be treated as pointer
// to HeapObject during old space traversal.
static const int kLengthOffset =
- kThisPropertyAssignmentsOffset + kPointerSize;
+ kDeoptCounterOffset + kPointerSize;
static const int kFormalParameterCountOffset =
kLengthOffset + kIntSize;
@@ -3829,9 +4109,11 @@ class SharedFunctionInfo: public HeapObject {
static const int kThisPropertyAssignmentsCountOffset =
kCompilerHintsOffset + kIntSize;
+ static const int kOptCountOffset =
+ kThisPropertyAssignmentsCountOffset + kIntSize;
// Total size.
- static const int kSize = kThisPropertyAssignmentsCountOffset + kIntSize;
+ static const int kSize = kOptCountOffset + kIntSize;
#endif
@@ -3848,6 +4130,12 @@ class SharedFunctionInfo: public HeapObject {
static const int kAlignedSize = POINTER_SIZE_ALIGN(kSize);
+ // Get/set a special tag on the functions from math.js so we can inline
+ // efficient versions of them in the code.
+ inline MathFunctionId math_function_id();
+ inline void set_math_function_id(int id);
+ static inline int max_math_id_number() { return kMathFunctionMask; }
+
typedef FixedBodyDescriptor<kNameOffset,
kThisPropertyAssignmentsOffset + kPointerSize,
kSize> BodyDescriptor;
@@ -3865,9 +4153,12 @@ class SharedFunctionInfo: public HeapObject {
static const int kHasOnlySimpleThisPropertyAssignments = 0;
static const int kTryFullCodegen = 1;
static const int kAllowLazyCompilation = 2;
- static const int kLiveObjectsMayExist = 3;
- static const int kCodeAgeShift = 4;
- static const int kCodeAgeMask = 7;
+ static const int kMathFunctionShift = 3;
+ static const int kMathFunctionMask = 0xf;
+ static const int kLiveObjectsMayExist = 7;
+ static const int kCodeAgeShift = 8;
+ static const int kCodeAgeMask = 0x7;
+ static const int kOptimizationDisabled = 11;
DISALLOW_IMPLICIT_CONSTRUCTORS(SharedFunctionInfo);
};
@@ -3895,13 +4186,34 @@ class JSFunction: public JSObject {
// [[Call]] and [[Construct]] description in ECMA-262, section
// 8.6.2, page 27.
inline Code* code();
- inline void set_code(Code* value);
+ inline void set_code(Code* code);
+ inline void ReplaceCode(Code* code);
inline Code* unchecked_code();
// Tells whether this function is builtin.
inline bool IsBuiltin();
+ // Tells whether or not the function needs arguments adaption.
+ inline bool NeedsArgumentsAdaption();
+
+ // Tells whether or not this function has been optimized.
+ inline bool IsOptimized();
+
+ // Mark this function for lazy recompilation. The function will be
+ // recompiled the next time it is executed.
+ void MarkForLazyRecompilation();
+
+ // Tells whether or not the function is already marked for lazy
+ // recompilation.
+ inline bool IsMarkedForLazyRecompilation();
+
+ // Compute a hash code for the source code of this function.
+ uint32_t SourceHash();
+
+ // Check whether or not this function is inlineable.
+ bool IsInlineable();
+
// [literals]: Fixed array holding the materialized literals.
//
// If the function contains object, regexp or array literals, the
@@ -3948,6 +4260,13 @@ class JSFunction: public JSObject {
// Returns if this function has been compiled to native code yet.
inline bool is_compiled();
+ // [next_function_link]: Field for linking functions. This list is treated as
+ // a weak list by the GC.
+ DECL_ACCESSORS(next_function_link, Object)
+
+ // Prints the name of the function using PrintF.
+ void PrintName();
+
// Casting.
static inline JSFunction* cast(Object* obj);
@@ -3967,7 +4286,8 @@ class JSFunction: public JSObject {
// Retrieve the global context from a function's literal array.
static Context* GlobalContextFromLiterals(FixedArray* literals);
- // Layout descriptors.
+ // Layout descriptors. The last property (from kNonWeakFieldsEndOffset to
+ // kSize) is weak and has special handling during garbage collection.
static const int kCodeEntryOffset = JSObject::kHeaderSize;
static const int kPrototypeOrInitialMapOffset =
kCodeEntryOffset + kPointerSize;
@@ -3975,7 +4295,9 @@ class JSFunction: public JSObject {
kPrototypeOrInitialMapOffset + kPointerSize;
static const int kContextOffset = kSharedFunctionInfoOffset + kPointerSize;
static const int kLiteralsOffset = kContextOffset + kPointerSize;
- static const int kSize = kLiteralsOffset + kPointerSize;
+ static const int kNonWeakFieldsEndOffset = kLiteralsOffset + kPointerSize;
+ static const int kNextFunctionLinkOffset = kNonWeakFieldsEndOffset;
+ static const int kSize = kNextFunctionLinkOffset + kPointerSize;
// Layout of the literals array.
static const int kLiteralsPrefixSize = 1;
@@ -4020,6 +4342,7 @@ class JSGlobalProxy : public JSObject {
// Forward declaration.
class JSBuiltinsObject;
+class JSGlobalPropertyCell;
// Common super class for JavaScript global objects and the special
// builtins global objects.
@@ -4035,7 +4358,7 @@ class GlobalObject: public JSObject {
DECL_ACCESSORS(global_receiver, JSObject)
// Retrieve the property cell used to store a property.
- Object* GetPropertyCell(LookupResult* result);
+ JSGlobalPropertyCell* GetPropertyCell(LookupResult* result);
// This is like GetProperty, but is used when you know the lookup won't fail
// by throwing an exception. This is for the debug and builtins global
@@ -4297,6 +4620,9 @@ class CompilationCacheTable: public HashTable<CompilationCacheShape,
MaybeObject* PutEval(String* src, Context* context, Object* value);
MaybeObject* PutRegExp(String* src, JSRegExp::Flags flags, FixedArray* value);
+ // Remove given value from cache.
+ void Remove(Object* value);
+
static inline CompilationCacheTable* cast(Object* obj);
private:
@@ -5705,6 +6031,9 @@ class ObjectVisitor BASE_EMBEDDED {
// Visits a code entry in a JS function.
virtual void VisitCodeEntry(Address entry_address);
+ // Visits a global property cell reference in the instruction stream.
+ virtual void VisitGlobalPropertyCell(RelocInfo* rinfo);
+
// Visits a runtime entry in the instruction stream.
virtual void VisitRuntimeEntry(RelocInfo* rinfo) {}