summaryrefslogtreecommitdiff
path: root/deps/v8/src/heap.h
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/heap.h')
-rw-r--r--deps/v8/src/heap.h139
1 files changed, 45 insertions, 94 deletions
diff --git a/deps/v8/src/heap.h b/deps/v8/src/heap.h
index b24b0b3608..da10efcee5 100644
--- a/deps/v8/src/heap.h
+++ b/deps/v8/src/heap.h
@@ -31,6 +31,7 @@
#include <cmath>
#include "allocation.h"
+#include "assert-scope.h"
#include "globals.h"
#include "incremental-marking.h"
#include "list.h"
@@ -58,6 +59,7 @@ namespace internal {
V(Oddball, null_value, NullValue) \
V(Oddball, true_value, TrueValue) \
V(Oddball, false_value, FalseValue) \
+ V(Oddball, uninitialized_value, UninitializedValue) \
V(Map, global_property_cell_map, GlobalPropertyCellMap) \
V(Map, shared_function_info_map, SharedFunctionInfoMap) \
V(Map, meta_map, MetaMap) \
@@ -181,7 +183,10 @@ namespace internal {
V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset) \
V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset) \
V(JSObject, observation_state, ObservationState) \
- V(Map, external_map, ExternalMap)
+ V(Map, external_map, ExternalMap) \
+ V(Symbol, frozen_symbol, FrozenSymbol) \
+ V(SeededNumberDictionary, empty_slow_element_dictionary, \
+ EmptySlowElementDictionary)
#define ROOT_LIST(V) \
STRONG_ROOT_LIST(V) \
@@ -288,10 +293,10 @@ namespace internal {
V(hidden_stack_trace_string, "v8::hidden_stack_trace") \
V(query_colon_string, "(?:)") \
V(Generator_string, "Generator") \
- V(send_string, "send") \
V(throw_string, "throw") \
V(done_string, "done") \
- V(value_string, "value")
+ V(value_string, "value") \
+ V(next_string, "next")
// Forward declarations.
class GCTracer;
@@ -547,7 +552,7 @@ class Heap {
int InitialSemiSpaceSize() { return initial_semispace_size_; }
intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
intptr_t MaxExecutableSize() { return max_executable_size_; }
- int MaxNewSpaceAllocationSize() { return InitialSemiSpaceSize() * 3/4; }
+ int MaxRegularSpaceAllocationSize() { return InitialSemiSpaceSize() * 3/4; }
// Returns the capacity of the heap in bytes w/o growing. Heap grows when
// more spaces are needed until it reaches the limit.
@@ -934,6 +939,10 @@ class Heap {
// Please note this does not perform a garbage collection.
MUST_USE_RESULT MaybeObject* AllocateJSGlobalPropertyCell(Object* value);
+ // Allocate Box.
+ MUST_USE_RESULT MaybeObject* AllocateBox(Object* value,
+ PretenureFlag pretenure);
+
// Allocates a fixed array initialized with undefined values
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
// failed.
@@ -1343,6 +1352,12 @@ class Heap {
}
Object* native_contexts_list() { return native_contexts_list_; }
+ void set_array_buffers_list(Object* object) {
+ array_buffers_list_ = object;
+ }
+ Object* array_buffers_list() { return array_buffers_list_; }
+
+
// Number of mark-sweeps.
unsigned int ms_count() { return ms_count_; }
@@ -1493,10 +1508,6 @@ class Heap {
inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
#ifdef DEBUG
- bool IsAllocationAllowed() { return allocation_allowed_; }
- inline void set_allow_allocation(bool allocation_allowed);
- inline bool allow_allocation(bool enable);
-
bool disallow_allocation_failure() {
return disallow_allocation_failure_;
}
@@ -1546,7 +1557,12 @@ class Heap {
// Predicate that governs global pre-tenuring decisions based on observed
// promotion rates of previous collections.
inline bool ShouldGloballyPretenure() {
- return new_space_high_promotion_mode_active_;
+ return FLAG_pretenuring && new_space_high_promotion_mode_active_;
+ }
+
+ // This is only needed for testing high promotion mode.
+ void SetNewSpaceHighPromotionModeActive(bool mode) {
+ new_space_high_promotion_mode_active_ = mode;
}
inline PretenureFlag GetPretenureMode() {
@@ -1561,44 +1577,23 @@ class Heap {
return PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
}
- // True if we have reached the allocation limit in the old generation that
- // should force the next GC (caused normally) to be a full one.
- inline bool OldGenerationPromotionLimitReached() {
- return PromotedTotalSize() > old_gen_promotion_limit_;
- }
-
inline intptr_t OldGenerationSpaceAvailable() {
- return old_gen_allocation_limit_ - PromotedTotalSize();
+ return old_generation_allocation_limit_ - PromotedTotalSize();
}
inline intptr_t OldGenerationCapacityAvailable() {
return max_old_generation_size_ - PromotedTotalSize();
}
- static const intptr_t kMinimumPromotionLimit = 5 * Page::kPageSize;
- static const intptr_t kMinimumAllocationLimit =
+ static const intptr_t kMinimumOldGenerationAllocationLimit =
8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
- intptr_t OldGenPromotionLimit(intptr_t old_gen_size) {
+ intptr_t OldGenerationAllocationLimit(intptr_t old_gen_size) {
const int divisor = FLAG_stress_compaction ? 10 :
new_space_high_promotion_mode_active_ ? 1 : 3;
intptr_t limit =
- Max(old_gen_size + old_gen_size / divisor, kMinimumPromotionLimit);
- limit += new_space_.Capacity();
- // TODO(hpayer): Can be removed when when pretenuring is supported for all
- // allocation sites.
- if (IsHighSurvivalRate() && IsStableOrIncreasingSurvivalTrend()) {
- limit *= 2;
- }
- intptr_t halfway_to_the_max = (old_gen_size + max_old_generation_size_) / 2;
- return Min(limit, halfway_to_the_max);
- }
-
- intptr_t OldGenAllocationLimit(intptr_t old_gen_size) {
- const int divisor = FLAG_stress_compaction ? 8 :
- new_space_high_promotion_mode_active_ ? 1 : 2;
- intptr_t limit =
- Max(old_gen_size + old_gen_size / divisor, kMinimumAllocationLimit);
+ Max(old_gen_size + old_gen_size / divisor,
+ kMinimumOldGenerationAllocationLimit);
limit += new_space_.Capacity();
// TODO(hpayer): Can be removed when when pretenuring is supported for all
// allocation sites.
@@ -1679,22 +1674,14 @@ class Heap {
if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;
- intptr_t total_promoted = PromotedTotalSize();
-
- intptr_t adjusted_promotion_limit =
- old_gen_promotion_limit_ - new_space_.Capacity();
-
- if (total_promoted >= adjusted_promotion_limit) return true;
-
intptr_t adjusted_allocation_limit =
- old_gen_allocation_limit_ - new_space_.Capacity() / 5;
+ old_generation_allocation_limit_ - new_space_.Capacity();
- if (PromotedSpaceSizeOfObjects() >= adjusted_allocation_limit) return true;
+ if (PromotedTotalSize() >= adjusted_allocation_limit) return true;
return false;
}
-
void UpdateNewSpaceReferencesInExternalStringTable(
ExternalStringTableUpdaterCallback updater_func);
@@ -2000,8 +1987,6 @@ class Heap {
#undef ROOT_ACCESSOR
#ifdef DEBUG
- bool allocation_allowed_;
-
// If the --gc-interval flag is set to a positive value, this
// variable holds the value indicating the number of allocations
// remain until the next failure and garbage collection.
@@ -2019,13 +2004,9 @@ class Heap {
// Limit that triggers a global GC on the next (normally caused) GC. This
// is checked when we have already decided to do a GC to help determine
- // which collector to invoke.
- intptr_t old_gen_promotion_limit_;
-
- // Limit that triggers a global GC as soon as is reasonable. This is
- // checked before expanding a paged space in the old generation and on
- // every allocation in large object space.
- intptr_t old_gen_allocation_limit_;
+ // which collector to invoke, before expanding a paged space in the old
+ // generation and on every allocation in large object space.
+ intptr_t old_generation_allocation_limit_;
// Used to adjust the limits that control the timing of the next GC.
intptr_t size_of_old_gen_at_last_old_space_gc_;
@@ -2043,10 +2024,12 @@ class Heap {
// Indicates that an allocation has failed in the old generation since the
// last GC.
- int old_gen_exhausted_;
+ bool old_gen_exhausted_;
Object* native_contexts_list_;
+ Object* array_buffers_list_;
+
StoreBufferRebuilder store_buffer_rebuilder_;
struct StringTypeTable {
@@ -2190,6 +2173,9 @@ class Heap {
// Code to be run before and after mark-compact.
void MarkCompactPrologue();
+ void ProcessNativeContexts(WeakObjectRetainer* retainer, bool record_slots);
+ void ProcessArrayBuffers(WeakObjectRetainer* retainer, bool record_slots);
+
// Record statistics before and after garbage collection.
void ReportStatisticsBeforeGC();
void ReportStatisticsAfterGC();
@@ -2724,43 +2710,6 @@ class DescriptorLookupCache {
};
-// A helper class to document/test C++ scopes where we do not
-// expect a GC. Usage:
-//
-// /* Allocation not allowed: we cannot handle a GC in this scope. */
-// { AssertNoAllocation nogc;
-// ...
-// }
-
-#ifdef DEBUG
-inline bool EnterAllocationScope(Isolate* isolate, bool allow_allocation);
-inline void ExitAllocationScope(Isolate* isolate, bool last_state);
-#endif
-
-
-class AssertNoAllocation {
- public:
- inline AssertNoAllocation();
- inline ~AssertNoAllocation();
-
-#ifdef DEBUG
- private:
- bool last_state_;
-#endif
-};
-
-
-class DisableAssertNoAllocation {
- public:
- inline DisableAssertNoAllocation();
- inline ~DisableAssertNoAllocation();
-
-#ifdef DEBUG
- private:
- bool last_state_;
-#endif
-};
-
// GCTracer collects and prints ONE line after each garbage collector
// invocation IFF --trace_gc is used.
@@ -2780,6 +2729,8 @@ class GCTracer BASE_EMBEDDED {
MC_UPDATE_POINTERS_TO_EVACUATED,
MC_UPDATE_POINTERS_BETWEEN_EVACUATED,
MC_UPDATE_MISC_POINTERS,
+ MC_WEAKMAP_PROCESS,
+ MC_WEAKMAP_CLEAR,
MC_FLUSH_CODE,
kNumberOfScopes
};
@@ -3075,7 +3026,7 @@ class PathTracer : public ObjectVisitor {
what_to_find_(what_to_find),
visit_mode_(visit_mode),
object_stack_(20),
- no_alloc() {}
+ no_allocation() {}
virtual void VisitPointers(Object** start, Object** end);
@@ -3104,7 +3055,7 @@ class PathTracer : public ObjectVisitor {
VisitMode visit_mode_;
List<Object*> object_stack_;
- AssertNoAllocation no_alloc; // i.e. no gc allowed.
+ DisallowHeapAllocation no_allocation; // i.e. no gc allowed.
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);