summaryrefslogtreecommitdiff
path: root/deps/v8/src/heap.h
diff options
context:
space:
mode:
authorBen Noordhuis <info@bnoordhuis.nl>2014-03-31 14:38:28 +0200
committerFedor Indutny <fedor@indutny.com>2014-04-02 00:05:24 +0400
commit67e078094b53861a5aa7e9354e33487d0bd4f73b (patch)
tree09a706adee1ddb59c1507ee3320de9cb6896135b /deps/v8/src/heap.h
parentf984555d47298cfb01b3e55c2861066379306fc3 (diff)
downloadnode-new-67e078094b53861a5aa7e9354e33487d0bd4f73b.tar.gz
deps: upgrade v8 to 3.25.30
Diffstat (limited to 'deps/v8/src/heap.h')
-rw-r--r--deps/v8/src/heap.h203
1 files changed, 127 insertions, 76 deletions
diff --git a/deps/v8/src/heap.h b/deps/v8/src/heap.h
index 266cdb9684..0f586e9284 100644
--- a/deps/v8/src/heap.h
+++ b/deps/v8/src/heap.h
@@ -78,7 +78,6 @@ namespace internal {
V(ByteArray, empty_byte_array, EmptyByteArray) \
V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
V(ConstantPoolArray, empty_constant_pool_array, EmptyConstantPoolArray) \
- V(Smi, stack_limit, StackLimit) \
V(Oddball, arguments_marker, ArgumentsMarker) \
/* The roots above this line should be boring from a GC point of view. */ \
/* This means they are never in new space and never on a page that is */ \
@@ -165,7 +164,17 @@ namespace internal {
V(Map, fixed_float32_array_map, FixedFloat32ArrayMap) \
V(Map, fixed_float64_array_map, FixedFloat64ArrayMap) \
V(Map, fixed_uint8_clamped_array_map, FixedUint8ClampedArrayMap) \
- V(Map, non_strict_arguments_elements_map, NonStrictArgumentsElementsMap) \
+ V(FixedTypedArrayBase, empty_fixed_uint8_array, EmptyFixedUint8Array) \
+ V(FixedTypedArrayBase, empty_fixed_int8_array, EmptyFixedInt8Array) \
+ V(FixedTypedArrayBase, empty_fixed_uint16_array, EmptyFixedUint16Array) \
+ V(FixedTypedArrayBase, empty_fixed_int16_array, EmptyFixedInt16Array) \
+ V(FixedTypedArrayBase, empty_fixed_uint32_array, EmptyFixedUint32Array) \
+ V(FixedTypedArrayBase, empty_fixed_int32_array, EmptyFixedInt32Array) \
+ V(FixedTypedArrayBase, empty_fixed_float32_array, EmptyFixedFloat32Array) \
+ V(FixedTypedArrayBase, empty_fixed_float64_array, EmptyFixedFloat64Array) \
+ V(FixedTypedArrayBase, empty_fixed_uint8_clamped_array, \
+ EmptyFixedUint8ClampedArray) \
+ V(Map, sloppy_arguments_elements_map, SloppyArgumentsElementsMap) \
V(Map, function_context_map, FunctionContextMap) \
V(Map, catch_context_map, CatchContextMap) \
V(Map, with_context_map, WithContextMap) \
@@ -186,27 +195,37 @@ namespace internal {
V(Code, js_entry_code, JsEntryCode) \
V(Code, js_construct_entry_code, JsConstructEntryCode) \
V(FixedArray, natives_source_cache, NativesSourceCache) \
- V(Smi, last_script_id, LastScriptId) \
V(Script, empty_script, EmptyScript) \
- V(Smi, real_stack_limit, RealStackLimit) \
V(NameDictionary, intrinsic_function_names, IntrinsicFunctionNames) \
- V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset) \
- V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset) \
- V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset) \
- V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset) \
V(Cell, undefined_cell, UndefineCell) \
V(JSObject, observation_state, ObservationState) \
V(Map, external_map, ExternalMap) \
+ V(Object, symbol_registry, SymbolRegistry) \
V(Symbol, frozen_symbol, FrozenSymbol) \
+ V(Symbol, nonexistent_symbol, NonExistentSymbol) \
V(Symbol, elements_transition_symbol, ElementsTransitionSymbol) \
V(SeededNumberDictionary, empty_slow_element_dictionary, \
EmptySlowElementDictionary) \
V(Symbol, observed_symbol, ObservedSymbol) \
+ V(Symbol, uninitialized_symbol, UninitializedSymbol) \
+ V(Symbol, megamorphic_symbol, MegamorphicSymbol) \
V(FixedArray, materialized_objects, MaterializedObjects) \
- V(FixedArray, allocation_sites_scratchpad, AllocationSitesScratchpad)
+ V(FixedArray, allocation_sites_scratchpad, AllocationSitesScratchpad) \
+ V(JSObject, microtask_state, MicrotaskState)
+
+// Entries in this list are limited to Smis and are not visited during GC.
+#define SMI_ROOT_LIST(V) \
+ V(Smi, stack_limit, StackLimit) \
+ V(Smi, real_stack_limit, RealStackLimit) \
+ V(Smi, last_script_id, LastScriptId) \
+ V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset) \
+ V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset) \
+ V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset) \
+ V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset)
#define ROOT_LIST(V) \
STRONG_ROOT_LIST(V) \
+ SMI_ROOT_LIST(V) \
V(StringTable, string_table, StringTable)
// Heap roots that are known to be immortal immovable, for which we can safely
@@ -242,7 +261,7 @@ namespace internal {
V(empty_constant_pool_array) \
V(arguments_marker) \
V(symbol_map) \
- V(non_strict_arguments_elements_map) \
+ V(sloppy_arguments_elements_map) \
V(function_context_map) \
V(catch_context_map) \
V(with_context_map) \
@@ -297,6 +316,11 @@ namespace internal {
V(String_string, "String") \
V(symbol_string, "symbol") \
V(Symbol_string, "Symbol") \
+ V(for_string, "for") \
+ V(for_api_string, "for_api") \
+ V(for_intern_string, "for_intern") \
+ V(private_api_string, "private_api") \
+ V(private_intern_string, "private_intern") \
V(Date_string, "Date") \
V(this_string, "this") \
V(to_string_string, "toString") \
@@ -325,10 +349,6 @@ namespace internal {
V(MakeReferenceError_string, "MakeReferenceError") \
V(MakeSyntaxError_string, "MakeSyntaxError") \
V(MakeTypeError_string, "MakeTypeError") \
- V(invalid_lhs_in_assignment_string, "invalid_lhs_in_assignment") \
- V(invalid_lhs_in_for_in_string, "invalid_lhs_in_for_in") \
- V(invalid_lhs_in_postfix_op_string, "invalid_lhs_in_postfix_op") \
- V(invalid_lhs_in_prefix_op_string, "invalid_lhs_in_prefix_op") \
V(illegal_return_string, "illegal_return") \
V(illegal_break_string, "illegal_break") \
V(illegal_continue_string, "illegal_continue") \
@@ -678,14 +698,13 @@ class Heap {
// constructor.
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
// failed.
+ // If allocation_site is non-null, then a memento is emitted after the object
+ // that points to the site.
// Please note this does not perform a garbage collection.
MUST_USE_RESULT MaybeObject* AllocateJSObject(
JSFunction* constructor,
- PretenureFlag pretenure = NOT_TENURED);
-
- MUST_USE_RESULT MaybeObject* AllocateJSObjectWithAllocationSite(
- JSFunction* constructor,
- Handle<AllocationSite> allocation_site);
+ PretenureFlag pretenure = NOT_TENURED,
+ AllocationSite* allocation_site = NULL);
MUST_USE_RESULT MaybeObject* AllocateJSModule(Context* context,
ScopeInfo* scope_info);
@@ -765,21 +784,21 @@ class Heap {
// Allocates and initializes a new JavaScript object based on a map.
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
// failed.
+ // Passing an allocation site means that a memento will be created that
+ // points to the site.
// Please note this does not perform a garbage collection.
MUST_USE_RESULT MaybeObject* AllocateJSObjectFromMap(
- Map* map, PretenureFlag pretenure = NOT_TENURED, bool alloc_props = true);
-
- MUST_USE_RESULT MaybeObject* AllocateJSObjectFromMapWithAllocationSite(
- Map* map, Handle<AllocationSite> allocation_site);
+ Map* map,
+ PretenureFlag pretenure = NOT_TENURED,
+ bool alloc_props = true,
+ AllocationSite* allocation_site = NULL);
// Allocates a heap object based on the map.
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
// failed.
// Please note this function does not perform a garbage collection.
- MUST_USE_RESULT MaybeObject* Allocate(Map* map, AllocationSpace space);
-
- MUST_USE_RESULT MaybeObject* AllocateWithAllocationSite(Map* map,
- AllocationSpace space, Handle<AllocationSite> allocation_site);
+ MUST_USE_RESULT MaybeObject* Allocate(Map* map, AllocationSpace space,
+ AllocationSite* allocation_site = NULL);
// Allocates a JS Map in the heap.
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
@@ -972,6 +991,10 @@ class Heap {
// Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
MUST_USE_RESULT inline MaybeObject* CopyFixedArray(FixedArray* src);
+ // Make a copy of src and return it. Returns
+ // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
+ MUST_USE_RESULT MaybeObject* CopyAndTenureFixedCOWArray(FixedArray* src);
+
// Make a copy of src, set the map, and return the copy. Returns
// Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
MUST_USE_RESULT MaybeObject* CopyFixedArrayWithMap(FixedArray* src, Map* map);
@@ -1005,9 +1028,10 @@ class Heap {
PretenureFlag pretenure = NOT_TENURED);
MUST_USE_RESULT MaybeObject* AllocateConstantPoolArray(
- int first_int64_index,
- int first_ptr_index,
- int first_int32_index);
+ int number_of_int64_entries,
+ int number_of_code_ptr_entries,
+ int number_of_heap_ptr_entries,
+ int number_of_int32_entries);
// Allocates a fixed double array with uninitialized values. Returns
// Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
@@ -1070,15 +1094,15 @@ class Heap {
Object* prototype,
PretenureFlag pretenure = TENURED);
- // Arguments object size.
- static const int kArgumentsObjectSize =
+ // Sloppy mode arguments object size.
+ static const int kSloppyArgumentsObjectSize =
JSObject::kHeaderSize + 2 * kPointerSize;
// Strict mode arguments has no callee so it is smaller.
- static const int kArgumentsObjectSizeStrict =
+ static const int kStrictArgumentsObjectSize =
JSObject::kHeaderSize + 1 * kPointerSize;
// Indicies for direct access into argument objects.
static const int kArgumentsLengthIndex = 0;
- // callee is only valid in non-strict mode.
+ // callee is only valid in sloppy mode.
static const int kArgumentsCalleeIndex = 1;
// Allocates an arguments object - optionally with an elements array.
@@ -1134,7 +1158,6 @@ class Heap {
int start_position,
int end_position,
Object* script,
- Object* stack_trace,
Object* stack_frames);
// Allocate a new external string object, which is backed by a string
@@ -1164,6 +1187,13 @@ class Heap {
// when shortening objects.
void CreateFillerObjectAt(Address addr, int size);
+ bool CanMoveObjectStart(HeapObject* object);
+
+ enum InvocationMode { FROM_GC, FROM_MUTATOR };
+
+ // Maintain marking consistency for IncrementalMarking.
+ void AdjustLiveBytes(Address address, int by, InvocationMode mode);
+
// Makes a new native code object
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
// failed. On success, the pointer to the Code object is stored in the
@@ -1255,10 +1285,6 @@ class Heap {
// Notify the heap that a context has been disposed.
int NotifyContextDisposed();
- // Utility to invoke the scavenger. This is needed in test code to
- // ensure correct callback for weak global handles.
- void PerformScavenge();
-
inline void increment_scan_on_scavenge_pages() {
scan_on_scavenge_pages_++;
if (FLAG_gc_verbose) {
@@ -1347,6 +1373,9 @@ class Heap {
void IterateRoots(ObjectVisitor* v, VisitMode mode);
// Iterates over all strong roots in the heap.
void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
+ // Iterates over entries in the smi roots list. Only interesting to the
+ // serializer/deserializer, since GC does not care about smis.
+ void IterateSmiRoots(ObjectVisitor* v);
// Iterates over all the other roots in the heap.
void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
@@ -1485,10 +1514,6 @@ class Heap {
allocation_timeout_ = timeout;
}
- bool disallow_allocation_failure() {
- return disallow_allocation_failure_;
- }
-
void TracePathToObjectFrom(Object* target, Object* root);
void TracePathToObject(Object* target);
void TracePathToGlobal();
@@ -1501,10 +1526,16 @@ class Heap {
static inline void ScavengePointer(HeapObject** p);
static inline void ScavengeObject(HeapObject** p, HeapObject* object);
+ enum ScratchpadSlotMode {
+ IGNORE_SCRATCHPAD_SLOT,
+ RECORD_SCRATCHPAD_SLOT
+ };
+
// An object may have an AllocationSite associated with it through a trailing
// AllocationMemento. Its feedback should be updated when objects are found
// in the heap.
- static inline void UpdateAllocationSiteFeedback(HeapObject* object);
+ static inline void UpdateAllocationSiteFeedback(
+ HeapObject* object, ScratchpadSlotMode mode);
// Support for partial snapshots. After calling this we have a linear
// space to write objects in each space.
@@ -1582,7 +1613,7 @@ class Heap {
// Implements the corresponding V8 API function.
bool IdleNotification(int hint);
- // Declare all the root indices.
+ // Declare all the root indices. This defines the root list order.
enum RootListIndex {
#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
@@ -1598,8 +1629,14 @@ class Heap {
#undef DECLARE_STRUCT_MAP
kStringTableRootIndex,
+
+#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
+ SMI_ROOT_LIST(ROOT_INDEX_DECLARATION)
+#undef ROOT_INDEX_DECLARATION
+
+ kRootListLength,
kStrongRootListLength = kStringTableRootIndex,
- kRootListLength
+ kSmiRootsStart = kStringTableRootIndex + 1
};
STATIC_CHECK(kUndefinedValueRootIndex == Internals::kUndefinedValueRootIndex);
@@ -1628,7 +1665,9 @@ class Heap {
ExternalArrayType array_type);
RootListIndex RootIndexForEmptyExternalArray(ElementsKind kind);
+ RootListIndex RootIndexForEmptyFixedTypedArray(ElementsKind kind);
ExternalArray* EmptyExternalArrayForMap(Map* map);
+ FixedTypedArrayBase* EmptyFixedTypedArrayForMap(Map* map);
void RecordStats(HeapStats* stats, bool take_snapshot = false);
@@ -1834,6 +1873,8 @@ class Heap {
return amount_of_external_allocated_memory_;
}
+ void DeoptMarkedAllocationSites();
+
// ObjectStats are kept in two arrays, counts and sizes. Related stats are
// stored in a contiguous linear buffer. Stats groups are stored one after
// another.
@@ -1879,16 +1920,12 @@ class Heap {
class RelocationLock {
public:
explicit RelocationLock(Heap* heap) : heap_(heap) {
- if (FLAG_concurrent_recompilation) {
- heap_->relocation_mutex_->Lock();
- }
+ heap_->relocation_mutex_.Lock();
}
~RelocationLock() {
- if (FLAG_concurrent_recompilation) {
- heap_->relocation_mutex_->Unlock();
- }
+ heap_->relocation_mutex_.Unlock();
}
private:
@@ -1984,10 +2021,6 @@ class Heap {
// variable holds the value indicating the number of allocations
// remain until the next failure and garbage collection.
int allocation_timeout_;
-
- // Do we expect to be able to handle allocation failure at this
- // time?
- bool disallow_allocation_failure_;
#endif // DEBUG
// Indicates that the new space should be kept small due to high promotion
@@ -2120,6 +2153,11 @@ class Heap {
GarbageCollector SelectGarbageCollector(AllocationSpace space,
const char** reason);
+ // Make sure there is a filler value behind the top of the new space
+ // so that the GC does not confuse some unintialized/stale memory
+ // with the allocation memento of the object at the top
+ void EnsureFillerObjectAtTop();
+
// Performs garbage collection operation.
// Returns whether there is a chance that another major GC could
// collect more garbage.
@@ -2195,6 +2233,10 @@ class Heap {
MUST_USE_RESULT MaybeObject* AllocateEmptyExternalArray(
ExternalArrayType array_type);
+ // Allocate empty fixed typed array of given type.
+ MUST_USE_RESULT MaybeObject* AllocateEmptyFixedTypedArray(
+ ExternalArrayType array_type);
+
// Allocate empty fixed double array.
MUST_USE_RESULT MaybeObject* AllocateEmptyFixedDoubleArray();
@@ -2296,7 +2338,8 @@ class Heap {
void InitializeAllocationSitesScratchpad();
// Adds an allocation site to the scratchpad if there is space left.
- void AddAllocationSiteToScratchpad(AllocationSite* site);
+ void AddAllocationSiteToScratchpad(AllocationSite* site,
+ ScratchpadSlotMode mode);
void UpdateSurvivalRateTrend(int start_new_space_size);
@@ -2489,14 +2532,12 @@ class Heap {
MemoryChunk* chunks_queued_for_free_;
- Mutex* relocation_mutex_;
-#ifdef DEBUG
- bool relocation_mutex_locked_by_optimizer_thread_;
-#endif // DEBUG;
+ Mutex relocation_mutex_;
+
+ int gc_callbacks_depth_;
friend class Factory;
friend class GCTracer;
- friend class DisallowAllocationFailure;
friend class AlwaysAllocateScope;
friend class Page;
friend class Isolate;
@@ -2506,6 +2547,7 @@ class Heap {
#ifdef VERIFY_HEAP
friend class NoWeakObjectVerificationScope;
#endif
+ friend class GCCallbacksScope;
DISALLOW_COPY_AND_ASSIGN(Heap);
};
@@ -2546,26 +2588,15 @@ class HeapStats {
};
-class DisallowAllocationFailure {
- public:
- inline DisallowAllocationFailure();
- inline ~DisallowAllocationFailure();
-
-#ifdef DEBUG
- private:
- bool old_state_;
-#endif
-};
-
-
class AlwaysAllocateScope {
public:
- inline AlwaysAllocateScope();
+ explicit inline AlwaysAllocateScope(Isolate* isolate);
inline ~AlwaysAllocateScope();
private:
// Implicitly disable artificial allocation failures.
- DisallowAllocationFailure disallow_allocation_failure_;
+ Heap* heap_;
+ DisallowAllocationFailure daf_;
};
@@ -2578,6 +2609,18 @@ class NoWeakObjectVerificationScope {
#endif
+class GCCallbacksScope {
+ public:
+ explicit inline GCCallbacksScope(Heap* heap);
+ inline ~GCCallbacksScope();
+
+ inline bool CheckReenter();
+
+ private:
+ Heap* heap_;
+};
+
+
// Visitor class to verify interior pointers in spaces that do not contain
// or care about intergenerational references. All heap object pointers have to
// point into the heap to a location that has a map pointer at its first word.
@@ -2589,6 +2632,13 @@ class VerifyPointersVisitor: public ObjectVisitor {
};
+// Verify that all objects are Smis.
+class VerifySmisVisitor: public ObjectVisitor {
+ public:
+ inline void VisitPointers(Object** start, Object** end);
+};
+
+
// Space iterator for iterating over all spaces of the heap. Returns each space
// in turn, and null when it is done.
class AllSpaces BASE_EMBEDDED {
@@ -2829,6 +2879,7 @@ class GCTracer BASE_EMBEDDED {
MC_MARK,
MC_SWEEP,
MC_SWEEP_NEWSPACE,
+ MC_SWEEP_OLDSPACE,
MC_EVACUATE_PAGES,
MC_UPDATE_NEW_TO_NEW_POINTERS,
MC_UPDATE_ROOT_TO_NEW_POINTERS,