summaryrefslogtreecommitdiff
path: root/chromium/v8/src/heap.cc
diff options
context:
space:
mode:
Diffstat (limited to 'chromium/v8/src/heap.cc')
-rw-r--r--chromium/v8/src/heap.cc1021
1 files changed, 458 insertions, 563 deletions
diff --git a/chromium/v8/src/heap.cc b/chromium/v8/src/heap.cc
index f4cc421b079..1e9091b30d8 100644
--- a/chromium/v8/src/heap.cc
+++ b/chromium/v8/src/heap.cc
@@ -67,33 +67,19 @@ namespace internal {
Heap::Heap()
: isolate_(NULL),
+ code_range_size_(kIs64BitArch ? 512 * MB : 0),
// semispace_size_ should be a power of 2 and old_generation_size_ should be
// a multiple of Page::kPageSize.
-#if V8_TARGET_ARCH_X64
-#define LUMP_OF_MEMORY (2 * MB)
- code_range_size_(512*MB),
-#else
-#define LUMP_OF_MEMORY MB
- code_range_size_(0),
-#endif
-#if defined(ANDROID) || V8_TARGET_ARCH_MIPS
- reserved_semispace_size_(4 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
- max_semispace_size_(4 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
- initial_semispace_size_(Page::kPageSize),
- max_old_generation_size_(192*MB),
- max_executable_size_(max_old_generation_size_),
-#else
- reserved_semispace_size_(8 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
- max_semispace_size_(8 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
+ reserved_semispace_size_(8 * (kPointerSize / 4) * MB),
+ max_semispace_size_(8 * (kPointerSize / 4) * MB),
initial_semispace_size_(Page::kPageSize),
- max_old_generation_size_(700ul * LUMP_OF_MEMORY),
- max_executable_size_(256l * LUMP_OF_MEMORY),
-#endif
-
+ max_old_generation_size_(700ul * (kPointerSize / 4) * MB),
+ max_executable_size_(256ul * (kPointerSize / 4) * MB),
// Variables set based on semispace_size_ and old_generation_size_ in
// ConfigureHeap (survived_since_last_expansion_, external_allocation_limit_)
// Will be 4 * reserved_semispace_size_ to ensure that young
// generation can be aligned to its size.
+ maximum_committed_(0),
survived_since_last_expansion_(0),
sweep_generation_(0),
always_allocate_scope_depth_(0),
@@ -127,10 +113,9 @@ Heap::Heap()
amount_of_external_allocated_memory_(0),
amount_of_external_allocated_memory_at_last_global_gc_(0),
old_gen_exhausted_(false),
+ inline_allocation_disabled_(false),
store_buffer_rebuilder_(store_buffer()),
hidden_string_(NULL),
- global_gc_prologue_callback_(NULL),
- global_gc_epilogue_callback_(NULL),
gc_safe_size_of_old_object_(NULL),
total_regexp_code_generated_(0),
tracer_(NULL),
@@ -157,9 +142,11 @@ Heap::Heap()
mark_sweeps_since_idle_round_started_(0),
gc_count_at_last_idle_gc_(0),
scavenges_since_last_idle_round_(kIdleScavengeThreshold),
+ full_codegen_bytes_generated_(0),
+ crankshaft_codegen_bytes_generated_(0),
gcs_since_last_deopt_(0),
#ifdef VERIFY_HEAP
- no_weak_embedded_maps_verification_scope_depth_(0),
+ no_weak_object_verification_scope_depth_(0),
#endif
promotion_queue_(this),
configured_(false),
@@ -172,6 +159,9 @@ Heap::Heap()
max_semispace_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
#endif
+ // Ensure old_generation_size_ is a multiple of kPageSize.
+ ASSERT(MB >= Page::kPageSize);
+
intptr_t max_virtual = OS::MaxVirtualMemory();
if (max_virtual > 0) {
@@ -243,6 +233,16 @@ intptr_t Heap::CommittedMemoryExecutable() {
}
+void Heap::UpdateMaximumCommitted() {
+ if (!HasBeenSetUp()) return;
+
+ intptr_t current_committed_memory = CommittedMemory();
+ if (current_committed_memory > maximum_committed_) {
+ maximum_committed_ = current_committed_memory;
+ }
+}
+
+
intptr_t Heap::Available() {
if (!HasBeenSetUp()) return 0;
@@ -411,7 +411,7 @@ void Heap::PrintShortHeapStatistics() {
this->Available() / KB,
this->CommittedMemory() / KB);
PrintPID("External memory reported: %6" V8_PTR_PREFIX "d KB\n",
- amount_of_external_allocated_memory_ / KB);
+ static_cast<intptr_t>(amount_of_external_allocated_memory_ / KB));
PrintPID("Total time spent in GC : %.1f ms\n", total_gc_time_ms_);
}
@@ -452,6 +452,8 @@ void Heap::GarbageCollectionPrologue() {
#endif
}
+ UpdateMaximumCommitted();
+
#ifdef DEBUG
ASSERT(!AllowHeapAllocation::IsAllowed() && gc_state_ == NOT_IN_GC);
@@ -461,6 +463,10 @@ void Heap::GarbageCollectionPrologue() {
#endif // DEBUG
store_buffer()->GCPrologue();
+
+ if (isolate()->concurrent_osr_enabled()) {
+ isolate()->optimizing_compiler_thread()->AgeBufferedOsrJobs();
+ }
}
@@ -474,6 +480,20 @@ intptr_t Heap::SizeOfObjects() {
}
+void Heap::ClearAllICsByKind(Code::Kind kind) {
+ HeapObjectIterator it(code_space());
+
+ for (Object* object = it.Next(); object != NULL; object = it.Next()) {
+ Code* code = Code::cast(object);
+ Code::Kind current_kind = code->kind();
+ if (current_kind == Code::FUNCTION ||
+ current_kind == Code::OPTIMIZED_FUNCTION) {
+ code->ClearInlineCaches(kind);
+ }
+ }
+}
+
+
void Heap::RepairFreeListsAfterBoot() {
PagedSpaces spaces(this);
for (PagedSpace* space = spaces.next();
@@ -485,6 +505,40 @@ void Heap::RepairFreeListsAfterBoot() {
void Heap::GarbageCollectionEpilogue() {
+ if (FLAG_allocation_site_pretenuring) {
+ int tenure_decisions = 0;
+ int dont_tenure_decisions = 0;
+ int allocation_mementos_found = 0;
+
+ Object* cur = allocation_sites_list();
+ while (cur->IsAllocationSite()) {
+ AllocationSite* casted = AllocationSite::cast(cur);
+ allocation_mementos_found += casted->memento_found_count()->value();
+ if (casted->DigestPretenuringFeedback()) {
+ if (casted->GetPretenureMode() == TENURED) {
+ tenure_decisions++;
+ } else {
+ dont_tenure_decisions++;
+ }
+ }
+ cur = casted->weak_next();
+ }
+
+ // TODO(mvstanton): Pretenure decisions are only made once for an allocation
+ // site. Find a sane way to decide about revisiting the decision later.
+
+ if (FLAG_trace_track_allocation_sites &&
+ (allocation_mementos_found > 0 ||
+ tenure_decisions > 0 ||
+ dont_tenure_decisions > 0)) {
+ PrintF("GC: (#mementos, #tenure decisions, #donttenure decisions) "
+ "(%d, %d, %d)\n",
+ allocation_mementos_found,
+ tenure_decisions,
+ dont_tenure_decisions);
+ }
+ }
+
store_buffer()->GCEpilogue();
// In release mode, we only zap the from space under heap verification.
@@ -513,6 +567,8 @@ void Heap::GarbageCollectionEpilogue() {
}
}
+ UpdateMaximumCommitted();
+
isolate_->counters()->alive_after_last_gc()->Set(
static_cast<int>(SizeOfObjects()));
@@ -521,10 +577,31 @@ void Heap::GarbageCollectionEpilogue() {
isolate_->counters()->number_of_symbols()->Set(
string_table()->NumberOfElements());
+ if (full_codegen_bytes_generated_ + crankshaft_codegen_bytes_generated_ > 0) {
+ isolate_->counters()->codegen_fraction_crankshaft()->AddSample(
+ static_cast<int>((crankshaft_codegen_bytes_generated_ * 100.0) /
+ (crankshaft_codegen_bytes_generated_
+ + full_codegen_bytes_generated_)));
+ }
+
if (CommittedMemory() > 0) {
isolate_->counters()->external_fragmentation_total()->AddSample(
static_cast<int>(100 - (SizeOfObjects() * 100.0) / CommittedMemory()));
+ isolate_->counters()->heap_fraction_new_space()->
+ AddSample(static_cast<int>(
+ (new_space()->CommittedMemory() * 100.0) / CommittedMemory()));
+ isolate_->counters()->heap_fraction_old_pointer_space()->AddSample(
+ static_cast<int>(
+ (old_pointer_space()->CommittedMemory() * 100.0) /
+ CommittedMemory()));
+ isolate_->counters()->heap_fraction_old_data_space()->AddSample(
+ static_cast<int>(
+ (old_data_space()->CommittedMemory() * 100.0) /
+ CommittedMemory()));
+ isolate_->counters()->heap_fraction_code_space()->
+ AddSample(static_cast<int>(
+ (code_space()->CommittedMemory() * 100.0) / CommittedMemory()));
isolate_->counters()->heap_fraction_map_space()->AddSample(
static_cast<int>(
(map_space()->CommittedMemory() * 100.0) / CommittedMemory()));
@@ -535,6 +612,9 @@ void Heap::GarbageCollectionEpilogue() {
AddSample(static_cast<int>(
(property_cell_space()->CommittedMemory() * 100.0) /
CommittedMemory()));
+ isolate_->counters()->heap_fraction_lo_space()->
+ AddSample(static_cast<int>(
+ (lo_space()->CommittedMemory() * 100.0) / CommittedMemory()));
isolate_->counters()->heap_sample_total_committed()->AddSample(
static_cast<int>(CommittedMemory() / KB));
@@ -548,6 +628,11 @@ void Heap::GarbageCollectionEpilogue() {
heap_sample_property_cell_space_committed()->
AddSample(static_cast<int>(
property_cell_space()->CommittedMemory() / KB));
+ isolate_->counters()->heap_sample_code_space_committed()->AddSample(
+ static_cast<int>(code_space()->CommittedMemory() / KB));
+
+ isolate_->counters()->heap_sample_maximum_committed()->AddSample(
+ static_cast<int>(MaximumCommittedMemory() / KB));
}
#define UPDATE_COUNTERS_FOR_SPACE(space) \
@@ -610,6 +695,11 @@ void Heap::CollectAllAvailableGarbage(const char* gc_reason) {
// Note: as weak callbacks can execute arbitrary code, we cannot
// hope that eventually there will be no weak callbacks invocations.
// Therefore stop recollecting after several attempts.
+ if (isolate()->concurrent_recompilation_enabled()) {
+ // The optimizing compiler may be unnecessarily holding on to memory.
+ DisallowHeapAllocation no_recursive_gc;
+ isolate()->optimizing_compiler_thread()->Flush();
+ }
mark_compact_collector()->SetFlags(kMakeHeapIterableMask |
kReduceMemoryFootprintMask);
isolate_->compilation_cache()->Clear();
@@ -706,7 +796,7 @@ bool Heap::CollectGarbage(AllocationSpace space,
int Heap::NotifyContextDisposed() {
- if (FLAG_concurrent_recompilation) {
+ if (isolate()->concurrent_recompilation_enabled()) {
// Flush the queued recompilation tasks.
isolate()->optimizing_compiler_thread()->Flush();
}
@@ -784,9 +874,7 @@ static bool AbortIncrementalMarkingAndCollectGarbage(
}
-void Heap::ReserveSpace(
- int *sizes,
- Address *locations_out) {
+void Heap::ReserveSpace(int *sizes, Address *locations_out) {
bool gc_performed = true;
int counter = 0;
static const int kThreshold = 20;
@@ -884,6 +972,8 @@ void Heap::ClearNormalizedMapCaches() {
void Heap::UpdateSurvivalRateTrend(int start_new_space_size) {
+ if (start_new_space_size == 0) return;
+
double survival_rate =
(static_cast<double>(young_survivors_after_last_gc_) * 100) /
start_new_space_size;
@@ -1056,12 +1146,17 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
void Heap::CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags) {
- if (gc_type == kGCTypeMarkSweepCompact && global_gc_prologue_callback_) {
- global_gc_prologue_callback_();
- }
for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) {
if (gc_type & gc_prologue_callbacks_[i].gc_type) {
- gc_prologue_callbacks_[i].callback(gc_type, flags);
+ if (!gc_prologue_callbacks_[i].pass_isolate_) {
+ v8::GCPrologueCallback callback =
+ reinterpret_cast<v8::GCPrologueCallback>(
+ gc_prologue_callbacks_[i].callback);
+ callback(gc_type, flags);
+ } else {
+ v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(this->isolate());
+ gc_prologue_callbacks_[i].callback(isolate, gc_type, flags);
+ }
}
}
}
@@ -1070,12 +1165,18 @@ void Heap::CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags) {
void Heap::CallGCEpilogueCallbacks(GCType gc_type) {
for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) {
if (gc_type & gc_epilogue_callbacks_[i].gc_type) {
- gc_epilogue_callbacks_[i].callback(gc_type, kNoGCCallbackFlags);
+ if (!gc_epilogue_callbacks_[i].pass_isolate_) {
+ v8::GCPrologueCallback callback =
+ reinterpret_cast<v8::GCPrologueCallback>(
+ gc_epilogue_callbacks_[i].callback);
+ callback(gc_type, kNoGCCallbackFlags);
+ } else {
+ v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(this->isolate());
+ gc_epilogue_callbacks_[i].callback(
+ isolate, gc_type, kNoGCCallbackFlags);
+ }
}
}
- if (gc_type == kGCTypeMarkSweepCompact && global_gc_epilogue_callback_) {
- global_gc_epilogue_callback_();
- }
}
@@ -1709,6 +1810,8 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
mark_compact_collector()->is_compacting();
ProcessArrayBuffers(retainer, record_slots);
ProcessNativeContexts(retainer, record_slots);
+ // TODO(mvstanton): AllocationSites only need to be processed during
+ // MARK_COMPACT, as they live in old space. Verify and address.
ProcessAllocationSites(retainer, record_slots);
}
@@ -1814,7 +1917,7 @@ struct WeakListVisitor<AllocationSite> {
}
static void VisitLiveObject(Heap* heap,
- AllocationSite* array_buffer,
+ AllocationSite* site,
WeakObjectRetainer* retainer,
bool record_slots) {}
@@ -1946,6 +2049,7 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) == 0);
+STATIC_ASSERT((ConstantPoolArray::kHeaderSize & kDoubleAlignmentMask) == 0);
INLINE(static HeapObject* EnsureDoubleAligned(Heap* heap,
@@ -2090,8 +2194,12 @@ class ScavengingVisitor : public StaticVisitorBase {
if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) {
// Update NewSpace stats if necessary.
RecordCopiedObject(heap, target);
- HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address()));
Isolate* isolate = heap->isolate();
+ HeapProfiler* heap_profiler = isolate->heap_profiler();
+ if (heap_profiler->is_tracking_object_moves()) {
+ heap_profiler->ObjectMoveEvent(source->address(), target->address(),
+ size);
+ }
if (isolate->logger()->is_logging_code_events() ||
isolate->cpu_profiler()->is_profiling()) {
if (target->IsSharedFunctionInfo()) {
@@ -2128,12 +2236,10 @@ class ScavengingVisitor : public StaticVisitorBase {
MaybeObject* maybe_result;
if (object_contents == DATA_OBJECT) {
- // TODO(mstarzinger): Turn this check into a regular assert soon!
- CHECK(heap->AllowedToBeMigrated(object, OLD_DATA_SPACE));
+ ASSERT(heap->AllowedToBeMigrated(object, OLD_DATA_SPACE));
maybe_result = heap->old_data_space()->AllocateRaw(allocation_size);
} else {
- // TODO(mstarzinger): Turn this check into a regular assert soon!
- CHECK(heap->AllowedToBeMigrated(object, OLD_POINTER_SPACE));
+ ASSERT(heap->AllowedToBeMigrated(object, OLD_POINTER_SPACE));
maybe_result = heap->old_pointer_space()->AllocateRaw(allocation_size);
}
@@ -2164,8 +2270,7 @@ class ScavengingVisitor : public StaticVisitorBase {
return;
}
}
- // TODO(mstarzinger): Turn this check into a regular assert soon!
- CHECK(heap->AllowedToBeMigrated(object, NEW_SPACE));
+ ASSERT(heap->AllowedToBeMigrated(object, NEW_SPACE));
MaybeObject* allocation = heap->new_space()->AllocateRaw(allocation_size);
heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
Object* result = allocation->ToObjectUnchecked();
@@ -2342,7 +2447,7 @@ void Heap::SelectScavengingVisitorsTable() {
isolate()->logger()->is_logging() ||
isolate()->cpu_profiler()->is_profiling() ||
(isolate()->heap_profiler() != NULL &&
- isolate()->heap_profiler()->is_profiling());
+ isolate()->heap_profiler()->is_tracking_object_moves());
if (!incremental_marking()->IsMarking()) {
if (!logging_and_profiling) {
@@ -2391,7 +2496,7 @@ void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type,
int instance_size) {
Object* result;
- MaybeObject* maybe_result = AllocateRawMap();
+ MaybeObject* maybe_result = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE);
if (!maybe_result->ToObject(&result)) return maybe_result;
// Map::cast cannot be used due to uninitialized map field.
@@ -2405,7 +2510,7 @@ MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type,
reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
reinterpret_cast<Map*>(result)->set_bit_field(0);
reinterpret_cast<Map*>(result)->set_bit_field2(0);
- int bit_field3 = Map::EnumLengthBits::encode(Map::kInvalidEnumCache) |
+ int bit_field3 = Map::EnumLengthBits::encode(kInvalidEnumCacheSentinel) |
Map::OwnsDescriptors::encode(true);
reinterpret_cast<Map*>(result)->set_bit_field3(bit_field3);
return result;
@@ -2416,7 +2521,7 @@ MaybeObject* Heap::AllocateMap(InstanceType instance_type,
int instance_size,
ElementsKind elements_kind) {
Object* result;
- MaybeObject* maybe_result = AllocateRawMap();
+ MaybeObject* maybe_result = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE);
if (!maybe_result->To(&result)) return maybe_result;
Map* map = reinterpret_cast<Map*>(result);
@@ -2437,7 +2542,7 @@ MaybeObject* Heap::AllocateMap(InstanceType instance_type,
map->set_instance_descriptors(empty_descriptor_array());
map->set_bit_field(0);
map->set_bit_field2(1 << Map::kIsExtensible);
- int bit_field3 = Map::EnumLengthBits::encode(Map::kInvalidEnumCache) |
+ int bit_field3 = Map::EnumLengthBits::encode(kInvalidEnumCacheSentinel) |
Map::OwnsDescriptors::encode(true);
map->set_bit_field3(bit_field3);
map->set_elements_kind(elements_kind);
@@ -2649,6 +2754,12 @@ bool Heap::CreateInitialMaps() {
set_fixed_double_array_map(Map::cast(obj));
{ MaybeObject* maybe_obj =
+ AllocateMap(CONSTANT_POOL_ARRAY_TYPE, kVariableSizeSentinel);
+ if (!maybe_obj->ToObject(&obj)) return false;
+ }
+ set_constant_pool_array_map(Map::cast(obj));
+
+ { MaybeObject* maybe_obj =
AllocateMap(BYTE_ARRAY_TYPE, kVariableSizeSentinel);
if (!maybe_obj->ToObject(&obj)) return false;
}
@@ -2886,12 +2997,12 @@ bool Heap::CreateInitialMaps() {
MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) {
// Statically ensure that it is safe to allocate heap numbers in paged
// spaces.
+ int size = HeapNumber::kSize;
STATIC_ASSERT(HeapNumber::kSize <= Page::kNonCodeObjectAreaSize);
- AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
+ AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
Object* result;
- { MaybeObject* maybe_result =
- AllocateRaw(HeapNumber::kSize, space, OLD_DATA_SPACE);
+ { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
@@ -2901,26 +3012,12 @@ MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) {
}
-MaybeObject* Heap::AllocateHeapNumber(double value) {
- // Use general version, if we're forced to always allocate.
- if (always_allocate()) return AllocateHeapNumber(value, TENURED);
-
- // This version of AllocateHeapNumber is optimized for
- // allocation in new space.
- STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxNonCodeHeapObjectSize);
- Object* result;
- { MaybeObject* maybe_result = new_space_.AllocateRaw(HeapNumber::kSize);
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
- HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map());
- HeapNumber::cast(result)->set_value(value);
- return result;
-}
-
-
MaybeObject* Heap::AllocateCell(Object* value) {
+ int size = Cell::kSize;
+ STATIC_ASSERT(Cell::kSize <= Page::kNonCodeObjectAreaSize);
+
Object* result;
- { MaybeObject* maybe_result = AllocateRawCell();
+ { MaybeObject* maybe_result = AllocateRaw(size, CELL_SPACE, CELL_SPACE);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
HeapObject::cast(result)->set_map_no_write_barrier(cell_map());
@@ -2929,9 +3026,13 @@ MaybeObject* Heap::AllocateCell(Object* value) {
}
-MaybeObject* Heap::AllocatePropertyCell(Object* value) {
+MaybeObject* Heap::AllocatePropertyCell() {
+ int size = PropertyCell::kSize;
+ STATIC_ASSERT(PropertyCell::kSize <= Page::kNonCodeObjectAreaSize);
+
Object* result;
- MaybeObject* maybe_result = AllocateRawPropertyCell();
+ MaybeObject* maybe_result =
+ AllocateRaw(size, PROPERTY_CELL_SPACE, PROPERTY_CELL_SPACE);
if (!maybe_result->ToObject(&result)) return maybe_result;
HeapObject::cast(result)->set_map_no_write_barrier(
@@ -2939,10 +3040,8 @@ MaybeObject* Heap::AllocatePropertyCell(Object* value) {
PropertyCell* cell = PropertyCell::cast(result);
cell->set_dependent_code(DependentCode::cast(empty_fixed_array()),
SKIP_WRITE_BARRIER);
- cell->set_value(value);
+ cell->set_value(the_hole_value());
cell->set_type(Type::None());
- maybe_result = cell->SetValueInferType(value);
- if (maybe_result->IsFailure()) return maybe_result;
return result;
}
@@ -2957,17 +3056,16 @@ MaybeObject* Heap::AllocateBox(Object* value, PretenureFlag pretenure) {
MaybeObject* Heap::AllocateAllocationSite() {
- Object* result;
+ AllocationSite* site;
MaybeObject* maybe_result = Allocate(allocation_site_map(),
OLD_POINTER_SPACE);
- if (!maybe_result->ToObject(&result)) return maybe_result;
- AllocationSite* site = AllocationSite::cast(result);
+ if (!maybe_result->To(&site)) return maybe_result;
site->Initialize();
// Link the site
site->set_weak_next(allocation_sites_list());
set_allocation_sites_list(site);
- return result;
+ return site;
}
@@ -3048,6 +3146,12 @@ void Heap::CreateFixedStubs() {
}
+void Heap::CreateStubsRequiringBuiltins() {
+ HandleScope scope(isolate());
+ CodeStub::GenerateStubsRequiringBuiltinsAheadOfTime(isolate());
+}
+
+
bool Heap::CreateInitialObjects() {
Object* obj;
@@ -3244,11 +3348,13 @@ bool Heap::CreateInitialObjects() {
{ MaybeObject* maybe_obj = AllocateSymbol();
if (!maybe_obj->ToObject(&obj)) return false;
}
+ Symbol::cast(obj)->set_is_private(true);
set_frozen_symbol(Symbol::cast(obj));
{ MaybeObject* maybe_obj = AllocateSymbol();
if (!maybe_obj->ToObject(&obj)) return false;
}
+ Symbol::cast(obj)->set_is_private(true);
set_elements_transition_symbol(Symbol::cast(obj));
{ MaybeObject* maybe_obj = SeededNumberDictionary::Allocate(this, 0, TENURED);
@@ -3260,6 +3366,7 @@ bool Heap::CreateInitialObjects() {
{ MaybeObject* maybe_obj = AllocateSymbol();
if (!maybe_obj->ToObject(&obj)) return false;
}
+ Symbol::cast(obj)->set_is_private(true);
set_observed_symbol(Symbol::cast(obj));
// Handling of script id generation is in Factory::NewScript.
@@ -3887,7 +3994,12 @@ MaybeObject* Heap::AllocateSubString(String* buffer,
int length = end - start;
if (length <= 0) {
return empty_string();
- } else if (length == 1) {
+ }
+
+ // Make an attempt to flatten the buffer to reduce access time.
+ buffer = buffer->TryFlattenGetString();
+
+ if (length == 1) {
return LookupSingleCharacterStringFromCode(buffer->Get(start));
} else if (length == 2) {
// Optimization for 2-byte strings often used as keys in a decompression
@@ -3898,9 +4010,6 @@ MaybeObject* Heap::AllocateSubString(String* buffer,
return MakeOrFindTwoCharacterString(this, c1, c2);
}
- // Make an attempt to flatten the buffer to reduce access time.
- buffer = buffer->TryFlattenGetString();
-
if (!FLAG_string_slices ||
!buffer->IsFlat() ||
length < SlicedString::kMinLength ||
@@ -4042,13 +4151,12 @@ MaybeObject* Heap::LookupSingleCharacterStringFromCode(uint16_t code) {
return result;
}
- Object* result;
+ SeqTwoByteString* result;
{ MaybeObject* maybe_result = AllocateRawTwoByteString(1);
- if (!maybe_result->ToObject(&result)) return maybe_result;
+ if (!maybe_result->To<SeqTwoByteString>(&result)) return maybe_result;
}
- String* answer = String::cast(result);
- answer->Set(0, code);
- return answer;
+ result->SeqTwoByteStringSet(0, code);
+ return result;
}
@@ -4056,31 +4164,8 @@ MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
if (length < 0 || length > ByteArray::kMaxLength) {
return Failure::OutOfMemoryException(0x7);
}
- if (pretenure == NOT_TENURED) {
- return AllocateByteArray(length);
- }
int size = ByteArray::SizeFor(length);
- AllocationSpace space =
- (size > Page::kMaxNonCodeHeapObjectSize) ? LO_SPACE : OLD_DATA_SPACE;
- Object* result;
- { MaybeObject* maybe_result = AllocateRaw(size, space, space);
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
-
- reinterpret_cast<ByteArray*>(result)->set_map_no_write_barrier(
- byte_array_map());
- reinterpret_cast<ByteArray*>(result)->set_length(length);
- return result;
-}
-
-
-MaybeObject* Heap::AllocateByteArray(int length) {
- if (length < 0 || length > ByteArray::kMaxLength) {
- return Failure::OutOfMemoryException(0x8);
- }
- int size = ByteArray::SizeFor(length);
- AllocationSpace space =
- (size > Page::kMaxNonCodeHeapObjectSize) ? LO_SPACE : NEW_SPACE;
+ AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
Object* result;
{ MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
if (!maybe_result->ToObject(&result)) return maybe_result;
@@ -4111,11 +4196,10 @@ MaybeObject* Heap::AllocateExternalArray(int length,
ExternalArrayType array_type,
void* external_pointer,
PretenureFlag pretenure) {
- AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
+ int size = ExternalArray::kAlignedSize;
+ AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
Object* result;
- { MaybeObject* maybe_result = AllocateRaw(ExternalArray::kAlignedSize,
- space,
- OLD_DATA_SPACE);
+ { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
@@ -4133,7 +4217,8 @@ MaybeObject* Heap::CreateCode(const CodeDesc& desc,
Code::Flags flags,
Handle<Object> self_reference,
bool immovable,
- bool crankshafted) {
+ bool crankshafted,
+ int prologue_offset) {
// Allocate ByteArray before the Code object, so that we do not risk
// leaving uninitialized Code object (and breaking the heap).
ByteArray* reloc_info;
@@ -4152,7 +4237,7 @@ MaybeObject* Heap::CreateCode(const CodeDesc& desc,
if (force_lo_space) {
maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE);
} else {
- maybe_result = code_space_->AllocateRaw(obj_size);
+ maybe_result = AllocateRaw(obj_size, CODE_SPACE, CODE_SPACE);
}
if (!maybe_result->To<HeapObject>(&result)) return maybe_result;
@@ -4174,19 +4259,29 @@ MaybeObject* Heap::CreateCode(const CodeDesc& desc,
code->set_instruction_size(desc.instr_size);
code->set_relocation_info(reloc_info);
code->set_flags(flags);
+ code->set_raw_kind_specific_flags1(0);
+ code->set_raw_kind_specific_flags2(0);
if (code->is_call_stub() || code->is_keyed_call_stub()) {
code->set_check_type(RECEIVER_MAP_CHECK);
}
code->set_is_crankshafted(crankshafted);
code->set_deoptimization_data(empty_fixed_array(), SKIP_WRITE_BARRIER);
- code->InitializeTypeFeedbackInfoNoWriteBarrier(undefined_value());
+ code->set_raw_type_feedback_info(undefined_value());
code->set_handler_table(empty_fixed_array(), SKIP_WRITE_BARRIER);
code->set_gc_metadata(Smi::FromInt(0));
code->set_ic_age(global_ic_age_);
- code->set_prologue_offset(kPrologueOffsetNotSet);
+ code->set_prologue_offset(prologue_offset);
if (code->kind() == Code::OPTIMIZED_FUNCTION) {
code->set_marked_for_deoptimization(false);
}
+
+#ifdef ENABLE_DEBUGGER_SUPPORT
+ if (code->kind() == Code::FUNCTION) {
+ code->set_has_debug_break_slots(
+ isolate_->debugger()->IsDebuggerActive());
+ }
+#endif
+
// Allow self references to created code object by patching the handle to
// point to the newly allocated Code object.
if (!self_reference.is_null()) {
@@ -4215,7 +4310,7 @@ MaybeObject* Heap::CopyCode(Code* code) {
if (obj_size > code_space()->AreaSize()) {
maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE);
} else {
- maybe_result = code_space_->AllocateRaw(obj_size);
+ maybe_result = AllocateRaw(obj_size, CODE_SPACE, CODE_SPACE);
}
Object* result;
@@ -4258,7 +4353,7 @@ MaybeObject* Heap::CopyCode(Code* code, Vector<byte> reloc_info) {
if (new_obj_size > code_space()->AreaSize()) {
maybe_result = lo_space_->AllocateRaw(new_obj_size, EXECUTABLE);
} else {
- maybe_result = code_space_->AllocateRaw(new_obj_size);
+ maybe_result = AllocateRaw(new_obj_size, CODE_SPACE, CODE_SPACE);
}
Object* result;
@@ -4292,6 +4387,17 @@ MaybeObject* Heap::CopyCode(Code* code, Vector<byte> reloc_info) {
}
+void Heap::InitializeAllocationMemento(AllocationMemento* memento,
+ AllocationSite* allocation_site) {
+ memento->set_map_no_write_barrier(allocation_memento_map());
+ ASSERT(allocation_site->map() == allocation_site_map());
+ memento->set_allocation_site(allocation_site, SKIP_WRITE_BARRIER);
+ if (FLAG_allocation_site_pretenuring) {
+ allocation_site->IncrementMementoCreateCount();
+ }
+}
+
+
MaybeObject* Heap::AllocateWithAllocationSite(Map* map, AllocationSpace space,
Handle<AllocationSite> allocation_site) {
ASSERT(gc_state_ == NOT_IN_GC);
@@ -4308,8 +4414,7 @@ MaybeObject* Heap::AllocateWithAllocationSite(Map* map, AllocationSpace space,
HeapObject::cast(result)->set_map_no_write_barrier(map);
AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
reinterpret_cast<Address>(result) + map->instance_size());
- alloc_memento->set_map_no_write_barrier(allocation_memento_map());
- alloc_memento->set_allocation_site(*allocation_site, SKIP_WRITE_BARRIER);
+ InitializeAllocationMemento(alloc_memento, *allocation_site);
return result;
}
@@ -4346,39 +4451,6 @@ void Heap::InitializeFunction(JSFunction* function,
}
-MaybeObject* Heap::AllocateFunctionPrototype(JSFunction* function) {
- // Make sure to use globals from the function's context, since the function
- // can be from a different context.
- Context* native_context = function->context()->native_context();
- Map* new_map;
- if (function->shared()->is_generator()) {
- // Generator prototypes can share maps since they don't have "constructor"
- // properties.
- new_map = native_context->generator_object_prototype_map();
- } else {
- // Each function prototype gets a fresh map to avoid unwanted sharing of
- // maps between prototypes of different constructors.
- JSFunction* object_function = native_context->object_function();
- ASSERT(object_function->has_initial_map());
- MaybeObject* maybe_map = object_function->initial_map()->Copy();
- if (!maybe_map->To(&new_map)) return maybe_map;
- }
-
- Object* prototype;
- MaybeObject* maybe_prototype = AllocateJSObjectFromMap(new_map);
- if (!maybe_prototype->ToObject(&prototype)) return maybe_prototype;
-
- if (!function->shared()->is_generator()) {
- MaybeObject* maybe_failure =
- JSObject::cast(prototype)->SetLocalPropertyIgnoreAttributesTrampoline(
- constructor_string(), function, DONT_ENUM);
- if (maybe_failure->IsFailure()) return maybe_failure;
- }
-
- return prototype;
-}
-
-
MaybeObject* Heap::AllocateFunction(Map* function_map,
SharedFunctionInfo* shared,
Object* prototype,
@@ -4413,10 +4485,6 @@ MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) {
arguments_object_size = kArgumentsObjectSize;
}
- // This calls Copy directly rather than using Heap::AllocateRaw so we
- // duplicate the check here.
- ASSERT(AllowHeapAllocation::IsAllowed() && gc_state_ == NOT_IN_GC);
-
// Check that the size of the boilerplate matches our
// expectations. The ArgumentsAccessStub::GenerateNewObject relies
// on the size being a known constant.
@@ -4454,48 +4522,6 @@ MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) {
}
-MaybeObject* Heap::AllocateInitialMap(JSFunction* fun) {
- ASSERT(!fun->has_initial_map());
-
- // First create a new map with the size and number of in-object properties
- // suggested by the function.
- InstanceType instance_type;
- int instance_size;
- int in_object_properties;
- if (fun->shared()->is_generator()) {
- instance_type = JS_GENERATOR_OBJECT_TYPE;
- instance_size = JSGeneratorObject::kSize;
- in_object_properties = 0;
- } else {
- instance_type = JS_OBJECT_TYPE;
- instance_size = fun->shared()->CalculateInstanceSize();
- in_object_properties = fun->shared()->CalculateInObjectProperties();
- }
- Map* map;
- MaybeObject* maybe_map = AllocateMap(instance_type, instance_size);
- if (!maybe_map->To(&map)) return maybe_map;
-
- // Fetch or allocate prototype.
- Object* prototype;
- if (fun->has_instance_prototype()) {
- prototype = fun->instance_prototype();
- } else {
- MaybeObject* maybe_prototype = AllocateFunctionPrototype(fun);
- if (!maybe_prototype->To(&prototype)) return maybe_prototype;
- }
- map->set_inobject_properties(in_object_properties);
- map->set_unused_property_fields(in_object_properties);
- map->set_prototype(prototype);
- ASSERT(map->has_fast_object_elements());
-
- if (!fun->shared()->is_generator()) {
- fun->shared()->StartInobjectSlackTracking(map);
- }
-
- return map;
-}
-
-
void Heap::InitializeJSObjectFromMap(JSObject* obj,
FixedArray* properties,
Map* map) {
@@ -4552,9 +4578,8 @@ MaybeObject* Heap::AllocateJSObjectFromMap(
}
// Allocate the JSObject.
- AllocationSpace space =
- (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
- if (map->instance_size() > Page::kMaxNonCodeHeapObjectSize) space = LO_SPACE;
+ int size = map->instance_size();
+ AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure);
Object* obj;
MaybeObject* maybe_obj = Allocate(map, space);
if (!maybe_obj->To(&obj)) return maybe_obj;
@@ -4587,8 +4612,8 @@ MaybeObject* Heap::AllocateJSObjectFromMapWithAllocationSite(
}
// Allocate the JSObject.
- AllocationSpace space = NEW_SPACE;
- if (map->instance_size() > Page::kMaxNonCodeHeapObjectSize) space = LO_SPACE;
+ int size = map->instance_size();
+ AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, NOT_TENURED);
Object* obj;
MaybeObject* maybe_obj =
AllocateWithAllocationSite(map, space, allocation_site);
@@ -4603,15 +4628,7 @@ MaybeObject* Heap::AllocateJSObjectFromMapWithAllocationSite(
MaybeObject* Heap::AllocateJSObject(JSFunction* constructor,
PretenureFlag pretenure) {
- // Allocate the initial map if absent.
- if (!constructor->has_initial_map()) {
- Object* initial_map;
- { MaybeObject* maybe_initial_map = AllocateInitialMap(constructor);
- if (!maybe_initial_map->ToObject(&initial_map)) return maybe_initial_map;
- }
- constructor->set_initial_map(Map::cast(initial_map));
- Map::cast(initial_map)->set_constructor(constructor);
- }
+ ASSERT(constructor->has_initial_map());
// Allocate the object based on the constructors initial map.
MaybeObject* result = AllocateJSObjectFromMap(
constructor->initial_map(), pretenure);
@@ -4626,21 +4643,12 @@ MaybeObject* Heap::AllocateJSObject(JSFunction* constructor,
MaybeObject* Heap::AllocateJSObjectWithAllocationSite(JSFunction* constructor,
Handle<AllocationSite> allocation_site) {
- // Allocate the initial map if absent.
- if (!constructor->has_initial_map()) {
- Object* initial_map;
- { MaybeObject* maybe_initial_map = AllocateInitialMap(constructor);
- if (!maybe_initial_map->ToObject(&initial_map)) return maybe_initial_map;
- }
- constructor->set_initial_map(Map::cast(initial_map));
- Map::cast(initial_map)->set_constructor(constructor);
- }
+ ASSERT(constructor->has_initial_map());
// Allocate the object based on the constructors initial map, or the payload
// advice
Map* initial_map = constructor->initial_map();
- Smi* smi = Smi::cast(allocation_site->transition_info());
- ElementsKind to_kind = static_cast<ElementsKind>(smi->value());
+ ElementsKind to_kind = allocation_site->GetElementsKind();
AllocationSiteMode mode = TRACK_ALLOCATION_SITE;
if (to_kind != initial_map->elements_kind()) {
MaybeObject* maybe_new_map = initial_map->AsElementsKind(to_kind);
@@ -4666,23 +4674,6 @@ MaybeObject* Heap::AllocateJSObjectWithAllocationSite(JSFunction* constructor,
}
-MaybeObject* Heap::AllocateJSGeneratorObject(JSFunction *function) {
- ASSERT(function->shared()->is_generator());
- Map *map;
- if (function->has_initial_map()) {
- map = function->initial_map();
- } else {
- // Allocate the initial map if absent.
- MaybeObject* maybe_map = AllocateInitialMap(function);
- if (!maybe_map->To(&map)) return maybe_map;
- function->set_initial_map(map);
- map->set_constructor(function);
- }
- ASSERT(map->instance_type() == JS_GENERATOR_OBJECT_TYPE);
- return AllocateJSObjectFromMap(map);
-}
-
-
MaybeObject* Heap::AllocateJSModule(Context* context, ScopeInfo* scope_info) {
// Allocate a fresh map. Modules do not have a prototype.
Map* map;
@@ -4744,20 +4735,6 @@ MaybeObject* Heap::AllocateJSArrayAndStorage(
}
-MaybeObject* Heap::AllocateJSArrayAndStorageWithAllocationSite(
- ElementsKind elements_kind,
- int length,
- int capacity,
- Handle<AllocationSite> allocation_site,
- ArrayStorageAllocationMode mode) {
- MaybeObject* maybe_array = AllocateJSArrayWithAllocationSite(elements_kind,
- allocation_site);
- JSArray* array;
- if (!maybe_array->To(&array)) return maybe_array;
- return AllocateJSArrayStorage(array, length, capacity, mode);
-}
-
-
MaybeObject* Heap::AllocateJSArrayStorage(
JSArray* array,
int length,
@@ -4860,74 +4837,7 @@ MaybeObject* Heap::AllocateJSFunctionProxy(Object* handler,
}
-MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
- ASSERT(constructor->has_initial_map());
- Map* map = constructor->initial_map();
- ASSERT(map->is_dictionary_map());
-
- // Make sure no field properties are described in the initial map.
- // This guarantees us that normalizing the properties does not
- // require us to change property values to PropertyCells.
- ASSERT(map->NextFreePropertyIndex() == 0);
-
- // Make sure we don't have a ton of pre-allocated slots in the
- // global objects. They will be unused once we normalize the object.
- ASSERT(map->unused_property_fields() == 0);
- ASSERT(map->inobject_properties() == 0);
-
- // Initial size of the backing store to avoid resize of the storage during
- // bootstrapping. The size differs between the JS global object ad the
- // builtins object.
- int initial_size = map->instance_type() == JS_GLOBAL_OBJECT_TYPE ? 64 : 512;
-
- // Allocate a dictionary object for backing storage.
- NameDictionary* dictionary;
- MaybeObject* maybe_dictionary =
- NameDictionary::Allocate(
- this,
- map->NumberOfOwnDescriptors() * 2 + initial_size);
- if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary;
-
- // The global object might be created from an object template with accessors.
- // Fill these accessors into the dictionary.
- DescriptorArray* descs = map->instance_descriptors();
- for (int i = 0; i < map->NumberOfOwnDescriptors(); i++) {
- PropertyDetails details = descs->GetDetails(i);
- ASSERT(details.type() == CALLBACKS); // Only accessors are expected.
- PropertyDetails d = PropertyDetails(details.attributes(), CALLBACKS, i + 1);
- Object* value = descs->GetCallbacksObject(i);
- MaybeObject* maybe_value = AllocatePropertyCell(value);
- if (!maybe_value->ToObject(&value)) return maybe_value;
-
- MaybeObject* maybe_added = dictionary->Add(descs->GetKey(i), value, d);
- if (!maybe_added->To(&dictionary)) return maybe_added;
- }
-
- // Allocate the global object and initialize it with the backing store.
- JSObject* global;
- MaybeObject* maybe_global = Allocate(map, OLD_POINTER_SPACE);
- if (!maybe_global->To(&global)) return maybe_global;
-
- InitializeJSObjectFromMap(global, dictionary, map);
-
- // Create a new map for the global object.
- Map* new_map;
- MaybeObject* maybe_map = map->CopyDropDescriptors();
- if (!maybe_map->To(&new_map)) return maybe_map;
- new_map->set_dictionary_map(true);
-
- // Set up the global object as a normalized object.
- global->set_map(new_map);
- global->set_properties(dictionary);
-
- // Make sure result is a global object with properties in dictionary.
- ASSERT(global->IsGlobalObject());
- ASSERT(!global->HasFastProperties());
- return global;
-}
-
-
-MaybeObject* Heap::CopyJSObject(JSObject* source) {
+MaybeObject* Heap::CopyJSObject(JSObject* source, AllocationSite* site) {
// Never used to copy functions. If functions need to be copied we
// have to be careful to clear the literals array.
SLOW_ASSERT(!source->IsJSFunction());
@@ -4937,6 +4847,8 @@ MaybeObject* Heap::CopyJSObject(JSObject* source) {
int object_size = map->instance_size();
Object* clone;
+ ASSERT(site == NULL || AllocationSite::CanTrack(map->instance_type()));
+
WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER;
// If we're forced to always allocate, we use the general allocation
@@ -4957,7 +4869,11 @@ MaybeObject* Heap::CopyJSObject(JSObject* source) {
} else {
wb_mode = SKIP_WRITE_BARRIER;
- { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size);
+ { int adjusted_object_size = site != NULL
+ ? object_size + AllocationMemento::kSize
+ : object_size;
+ MaybeObject* maybe_clone =
+ AllocateRaw(adjusted_object_size, NEW_SPACE, NEW_SPACE);
if (!maybe_clone->ToObject(&clone)) return maybe_clone;
}
SLOW_ASSERT(InNewSpace(clone));
@@ -4966,115 +4882,12 @@ MaybeObject* Heap::CopyJSObject(JSObject* source) {
CopyBlock(HeapObject::cast(clone)->address(),
source->address(),
object_size);
- }
- SLOW_ASSERT(
- JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind());
- FixedArrayBase* elements = FixedArrayBase::cast(source->elements());
- FixedArray* properties = FixedArray::cast(source->properties());
- // Update elements if necessary.
- if (elements->length() > 0) {
- Object* elem;
- { MaybeObject* maybe_elem;
- if (elements->map() == fixed_cow_array_map()) {
- maybe_elem = FixedArray::cast(elements);
- } else if (source->HasFastDoubleElements()) {
- maybe_elem = CopyFixedDoubleArray(FixedDoubleArray::cast(elements));
- } else {
- maybe_elem = CopyFixedArray(FixedArray::cast(elements));
- }
- if (!maybe_elem->ToObject(&elem)) return maybe_elem;
+ if (site != NULL) {
+ AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
+ reinterpret_cast<Address>(clone) + object_size);
+ InitializeAllocationMemento(alloc_memento, site);
}
- JSObject::cast(clone)->set_elements(FixedArrayBase::cast(elem), wb_mode);
- }
- // Update properties if necessary.
- if (properties->length() > 0) {
- Object* prop;
- { MaybeObject* maybe_prop = CopyFixedArray(properties);
- if (!maybe_prop->ToObject(&prop)) return maybe_prop;
- }
- JSObject::cast(clone)->set_properties(FixedArray::cast(prop), wb_mode);
- }
- // Return the new clone.
- return clone;
-}
-
-
-MaybeObject* Heap::CopyJSObjectWithAllocationSite(
- JSObject* source,
- AllocationSite* site) {
- // Never used to copy functions. If functions need to be copied we
- // have to be careful to clear the literals array.
- SLOW_ASSERT(!source->IsJSFunction());
-
- // Make the clone.
- Map* map = source->map();
- int object_size = map->instance_size();
- Object* clone;
-
- ASSERT(AllocationSite::CanTrack(map->instance_type()));
- ASSERT(map->instance_type() == JS_ARRAY_TYPE);
- WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER;
-
- // If we're forced to always allocate, we use the general allocation
- // functions which may leave us with an object in old space.
- int adjusted_object_size = object_size;
- if (always_allocate()) {
- // We'll only track origin if we are certain to allocate in new space
- const int kMinFreeNewSpaceAfterGC = InitialSemiSpaceSize() * 3/4;
- if ((object_size + AllocationMemento::kSize) < kMinFreeNewSpaceAfterGC) {
- adjusted_object_size += AllocationMemento::kSize;
- }
-
- { MaybeObject* maybe_clone =
- AllocateRaw(adjusted_object_size, NEW_SPACE, OLD_POINTER_SPACE);
- if (!maybe_clone->ToObject(&clone)) return maybe_clone;
- }
- Address clone_address = HeapObject::cast(clone)->address();
- CopyBlock(clone_address,
- source->address(),
- object_size);
- // Update write barrier for all fields that lie beyond the header.
- int write_barrier_offset = adjusted_object_size > object_size
- ? JSArray::kSize + AllocationMemento::kSize
- : JSObject::kHeaderSize;
- if (((object_size - write_barrier_offset) / kPointerSize) > 0) {
- RecordWrites(clone_address,
- write_barrier_offset,
- (object_size - write_barrier_offset) / kPointerSize);
- }
-
- // Track allocation site information, if we failed to allocate it inline.
- if (InNewSpace(clone) &&
- adjusted_object_size == object_size) {
- MaybeObject* maybe_alloc_memento =
- AllocateStruct(ALLOCATION_MEMENTO_TYPE);
- AllocationMemento* alloc_memento;
- if (maybe_alloc_memento->To(&alloc_memento)) {
- alloc_memento->set_map_no_write_barrier(allocation_memento_map());
- alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER);
- }
- }
- } else {
- wb_mode = SKIP_WRITE_BARRIER;
- adjusted_object_size += AllocationMemento::kSize;
-
- { MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size);
- if (!maybe_clone->ToObject(&clone)) return maybe_clone;
- }
- SLOW_ASSERT(InNewSpace(clone));
- // Since we know the clone is allocated in new space, we can copy
- // the contents without worrying about updating the write barrier.
- CopyBlock(HeapObject::cast(clone)->address(),
- source->address(),
- object_size);
- }
-
- if (adjusted_object_size > object_size) {
- AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
- reinterpret_cast<Address>(clone) + object_size);
- alloc_memento->set_map_no_write_barrier(allocation_memento_map());
- alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER);
}
SLOW_ASSERT(
@@ -5197,7 +5010,7 @@ MaybeObject* Heap::ReinitializeJSGlobalProxy(JSFunction* constructor,
MaybeObject* Heap::AllocateStringFromOneByte(Vector<const uint8_t> string,
- PretenureFlag pretenure) {
+ PretenureFlag pretenure) {
int length = string.length();
if (length == 1) {
return Heap::LookupSingleCharacterStringFromCode(string[0]);
@@ -5365,12 +5178,11 @@ MaybeObject* Heap::AllocateInternalizedStringImpl(
map = internalized_string_map();
size = SeqTwoByteString::SizeFor(chars);
}
+ AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
// Allocate string.
Object* result;
- { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize)
- ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
- : old_data_space_->AllocateRaw(size);
+ { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
@@ -5409,16 +5221,10 @@ MaybeObject* Heap::AllocateRawOneByteString(int length,
}
int size = SeqOneByteString::SizeFor(length);
ASSERT(size <= SeqOneByteString::kMaxSize);
- AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
- AllocationSpace retry_space = OLD_DATA_SPACE;
-
- if (size > Page::kMaxNonCodeHeapObjectSize) {
- // Allocate in large object space, retry space will be ignored.
- space = LO_SPACE;
- }
+ AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
Object* result;
- { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space);
+ { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
@@ -5439,16 +5245,10 @@ MaybeObject* Heap::AllocateRawTwoByteString(int length,
}
int size = SeqTwoByteString::SizeFor(length);
ASSERT(size <= SeqTwoByteString::kMaxSize);
- AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
- AllocationSpace retry_space = OLD_DATA_SPACE;
-
- if (size > Page::kMaxNonCodeHeapObjectSize) {
- // Allocate in large object space, retry space will be ignored.
- space = LO_SPACE;
- }
+ AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
Object* result;
- { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space);
+ { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
@@ -5473,24 +5273,6 @@ MaybeObject* Heap::AllocateJSArray(
}
-MaybeObject* Heap::AllocateJSArrayWithAllocationSite(
- ElementsKind elements_kind,
- Handle<AllocationSite> allocation_site) {
- Context* native_context = isolate()->context()->native_context();
- JSFunction* array_function = native_context->array_function();
- Map* map = array_function->initial_map();
- Object* maybe_map_array = native_context->js_array_maps();
- if (!maybe_map_array->IsUndefined()) {
- Object* maybe_transitioned_map =
- FixedArray::cast(maybe_map_array)->get(elements_kind);
- if (!maybe_transitioned_map->IsUndefined()) {
- map = Map::cast(maybe_transitioned_map);
- }
- }
- return AllocateJSObjectFromMapWithAllocationSite(map, allocation_site);
-}
-
-
MaybeObject* Heap::AllocateEmptyFixedArray() {
int size = FixedArray::SizeFor(0);
Object* result;
@@ -5511,25 +5293,10 @@ MaybeObject* Heap::AllocateEmptyExternalArray(ExternalArrayType array_type) {
}
-MaybeObject* Heap::AllocateRawFixedArray(int length) {
- if (length < 0 || length > FixedArray::kMaxLength) {
- return Failure::OutOfMemoryException(0xd);
- }
- ASSERT(length > 0);
- // Use the general function if we're forced to always allocate.
- if (always_allocate()) return AllocateFixedArray(length, TENURED);
- // Allocate the raw data for a fixed array.
- int size = FixedArray::SizeFor(length);
- return size <= Page::kMaxNonCodeHeapObjectSize
- ? new_space_.AllocateRaw(size)
- : lo_space_->AllocateRaw(size, NOT_EXECUTABLE);
-}
-
-
MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) {
int len = src->length();
Object* obj;
- { MaybeObject* maybe_obj = AllocateRawFixedArray(len);
+ { MaybeObject* maybe_obj = AllocateRawFixedArray(len, NOT_TENURED);
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
if (InNewSpace(obj)) {
@@ -5569,21 +5336,24 @@ MaybeObject* Heap::CopyFixedDoubleArrayWithMap(FixedDoubleArray* src,
}
-MaybeObject* Heap::AllocateFixedArray(int length) {
- ASSERT(length >= 0);
- if (length == 0) return empty_fixed_array();
- Object* result;
- { MaybeObject* maybe_result = AllocateRawFixedArray(length);
- if (!maybe_result->ToObject(&result)) return maybe_result;
+MaybeObject* Heap::CopyConstantPoolArrayWithMap(ConstantPoolArray* src,
+ Map* map) {
+ int int64_entries = src->count_of_int64_entries();
+ int ptr_entries = src->count_of_ptr_entries();
+ int int32_entries = src->count_of_int32_entries();
+ Object* obj;
+ { MaybeObject* maybe_obj =
+ AllocateConstantPoolArray(int64_entries, ptr_entries, int32_entries);
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
- // Initialize header.
- FixedArray* array = reinterpret_cast<FixedArray*>(result);
- array->set_map_no_write_barrier(fixed_array_map());
- array->set_length(length);
- // Initialize body.
- ASSERT(!InNewSpace(undefined_value()));
- MemsetPointer(array->data_start(), undefined_value(), length);
- return result;
+ HeapObject* dst = HeapObject::cast(obj);
+ dst->set_map_no_write_barrier(map);
+ CopyBlock(
+ dst->address() + ConstantPoolArray::kLengthOffset,
+ src->address() + ConstantPoolArray::kLengthOffset,
+ ConstantPoolArray::SizeFor(int64_entries, ptr_entries, int32_entries)
+ - ConstantPoolArray::kLengthOffset);
+ return obj;
}
@@ -5592,35 +5362,26 @@ MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) {
return Failure::OutOfMemoryException(0xe);
}
int size = FixedArray::SizeFor(length);
- AllocationSpace space =
- (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
- AllocationSpace retry_space = OLD_POINTER_SPACE;
-
- if (size > Page::kMaxNonCodeHeapObjectSize) {
- // Allocate in large object space, retry space will be ignored.
- space = LO_SPACE;
- }
+ AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure);
- return AllocateRaw(size, space, retry_space);
+ return AllocateRaw(size, space, OLD_POINTER_SPACE);
}
-MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller(
- Heap* heap,
- int length,
- PretenureFlag pretenure,
- Object* filler) {
+MaybeObject* Heap::AllocateFixedArrayWithFiller(int length,
+ PretenureFlag pretenure,
+ Object* filler) {
ASSERT(length >= 0);
- ASSERT(heap->empty_fixed_array()->IsFixedArray());
- if (length == 0) return heap->empty_fixed_array();
+ ASSERT(empty_fixed_array()->IsFixedArray());
+ if (length == 0) return empty_fixed_array();
- ASSERT(!heap->InNewSpace(filler));
+ ASSERT(!InNewSpace(filler));
Object* result;
- { MaybeObject* maybe_result = heap->AllocateRawFixedArray(length, pretenure);
+ { MaybeObject* maybe_result = AllocateRawFixedArray(length, pretenure);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
- HeapObject::cast(result)->set_map_no_write_barrier(heap->fixed_array_map());
+ HeapObject::cast(result)->set_map_no_write_barrier(fixed_array_map());
FixedArray* array = FixedArray::cast(result);
array->set_length(length);
MemsetPointer(array->data_start(), filler, length);
@@ -5629,19 +5390,13 @@ MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller(
MaybeObject* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
- return AllocateFixedArrayWithFiller(this,
- length,
- pretenure,
- undefined_value());
+ return AllocateFixedArrayWithFiller(length, pretenure, undefined_value());
}
MaybeObject* Heap::AllocateFixedArrayWithHoles(int length,
PretenureFlag pretenure) {
- return AllocateFixedArrayWithFiller(this,
- length,
- pretenure,
- the_hole_value());
+ return AllocateFixedArrayWithFiller(length, pretenure, the_hole_value());
}
@@ -5649,7 +5404,7 @@ MaybeObject* Heap::AllocateUninitializedFixedArray(int length) {
if (length == 0) return empty_fixed_array();
Object* obj;
- { MaybeObject* maybe_obj = AllocateRawFixedArray(length);
+ { MaybeObject* maybe_obj = AllocateRawFixedArray(length, NOT_TENURED);
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
@@ -5719,24 +5474,52 @@ MaybeObject* Heap::AllocateRawFixedDoubleArray(int length,
return Failure::OutOfMemoryException(0xf);
}
int size = FixedDoubleArray::SizeFor(length);
- AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
- AllocationSpace retry_space = OLD_DATA_SPACE;
-
#ifndef V8_HOST_ARCH_64_BIT
size += kPointerSize;
#endif
+ AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
- if (size > Page::kMaxNonCodeHeapObjectSize) {
- // Allocate in large object space, retry space will be ignored.
- space = LO_SPACE;
+ HeapObject* object;
+ { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_DATA_SPACE);
+ if (!maybe_object->To<HeapObject>(&object)) return maybe_object;
}
+ return EnsureDoubleAligned(this, object, size);
+}
+
+
+MaybeObject* Heap::AllocateConstantPoolArray(int number_of_int64_entries,
+ int number_of_ptr_entries,
+ int number_of_int32_entries) {
+ ASSERT(number_of_int64_entries > 0 || number_of_ptr_entries > 0 ||
+ number_of_int32_entries > 0);
+ int size = ConstantPoolArray::SizeFor(number_of_int64_entries,
+ number_of_ptr_entries,
+ number_of_int32_entries);
+#ifndef V8_HOST_ARCH_64_BIT
+ size += kPointerSize;
+#endif
+ AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED);
+
HeapObject* object;
- { MaybeObject* maybe_object = AllocateRaw(size, space, retry_space);
+ { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_POINTER_SPACE);
if (!maybe_object->To<HeapObject>(&object)) return maybe_object;
}
+ object = EnsureDoubleAligned(this, object, size);
+ HeapObject::cast(object)->set_map_no_write_barrier(constant_pool_array_map());
- return EnsureDoubleAligned(this, object, size);
+ ConstantPoolArray* constant_pool =
+ reinterpret_cast<ConstantPoolArray*>(object);
+ constant_pool->SetEntryCounts(number_of_int64_entries,
+ number_of_ptr_entries,
+ number_of_int32_entries);
+ MemsetPointer(
+ HeapObject::RawField(
+ constant_pool,
+ constant_pool->OffsetOfElementAt(constant_pool->first_ptr_index())),
+ undefined_value(),
+ number_of_ptr_entries);
+ return constant_pool;
}
@@ -5775,12 +5558,22 @@ MaybeObject* Heap::AllocateSymbol() {
Symbol::cast(result)->set_hash_field(
Name::kIsNotArrayIndexMask | (hash << Name::kHashShift));
Symbol::cast(result)->set_name(undefined_value());
+ Symbol::cast(result)->set_flags(Smi::FromInt(0));
- ASSERT(result->IsSymbol());
+ ASSERT(!Symbol::cast(result)->is_private());
return result;
}
+MaybeObject* Heap::AllocatePrivateSymbol() {
+ MaybeObject* maybe = AllocateSymbol();
+ Symbol* symbol;
+ if (!maybe->To(&symbol)) return maybe;
+ symbol->set_is_private(true);
+ return symbol;
+}
+
+
MaybeObject* Heap::AllocateNativeContext() {
Object* result;
{ MaybeObject* maybe_result =
@@ -5936,8 +5729,7 @@ STRUCT_LIST(MAKE_CASE)
return Failure::InternalError();
}
int size = map->instance_size();
- AllocationSpace space =
- (size > Page::kMaxNonCodeHeapObjectSize) ? LO_SPACE : OLD_POINTER_SPACE;
+ AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED);
Object* result;
{ MaybeObject* maybe_result = Allocate(map, space);
if (!maybe_result->ToObject(&result)) return maybe_result;
@@ -6820,7 +6612,15 @@ intptr_t Heap::PromotedSpaceSizeOfObjects() {
}
-intptr_t Heap::PromotedExternalMemorySize() {
+bool Heap::AdvanceSweepers(int step_size) {
+ ASSERT(isolate()->num_sweeper_threads() == 0);
+ bool sweeping_complete = old_data_space()->AdvanceSweeper(step_size);
+ sweeping_complete &= old_pointer_space()->AdvanceSweeper(step_size);
+ return sweeping_complete;
+}
+
+
+int64_t Heap::PromotedExternalMemorySize() {
if (amount_of_external_allocated_memory_
<= amount_of_external_allocated_memory_at_last_global_gc_) return 0;
return amount_of_external_allocated_memory_
@@ -6828,6 +6628,32 @@ intptr_t Heap::PromotedExternalMemorySize() {
}
+void Heap::EnableInlineAllocation() {
+ ASSERT(inline_allocation_disabled_);
+ inline_allocation_disabled_ = false;
+
+ // Update inline allocation limit for new space.
+ new_space()->UpdateInlineAllocationLimit(0);
+}
+
+
+void Heap::DisableInlineAllocation() {
+ ASSERT(!inline_allocation_disabled_);
+ inline_allocation_disabled_ = true;
+
+ // Update inline allocation limit for new space.
+ new_space()->UpdateInlineAllocationLimit(0);
+
+ // Update inline allocation limit for old spaces.
+ PagedSpaces spaces(this);
+ for (PagedSpace* space = spaces.next();
+ space != NULL;
+ space = spaces.next()) {
+ space->EmptyAllocationInfo();
+ }
+}
+
+
V8_DECLARE_ONCE(initialize_gc_once);
static void InitializeGCOnce() {
@@ -6940,9 +6766,6 @@ bool Heap::SetUp() {
store_buffer()->SetUp();
if (FLAG_concurrent_recompilation) relocation_mutex_ = new Mutex;
-#ifdef DEBUG
- relocation_mutex_locked_by_optimizer_thread_ = false;
-#endif // DEBUG
return true;
}
@@ -6959,6 +6782,7 @@ bool Heap::CreateHeapObjects() {
native_contexts_list_ = undefined_value();
array_buffers_list_ = undefined_value();
allocation_sites_list_ = undefined_value();
+ weak_object_to_code_table_ = undefined_value();
return true;
}
@@ -6987,6 +6811,8 @@ void Heap::TearDown() {
}
#endif
+ UpdateMaximumCommitted();
+
if (FLAG_print_cumulative_gc_stat) {
PrintF("\n");
PrintF("gc_count=%d ", gc_count_);
@@ -7001,6 +6827,31 @@ void Heap::TearDown() {
PrintF("\n\n");
}
+ if (FLAG_print_max_heap_committed) {
+ PrintF("\n");
+ PrintF("maximum_committed_by_heap=%" V8_PTR_PREFIX "d ",
+ MaximumCommittedMemory());
+ PrintF("maximum_committed_by_new_space=%" V8_PTR_PREFIX "d ",
+ new_space_.MaximumCommittedMemory());
+ PrintF("maximum_committed_by_old_pointer_space=%" V8_PTR_PREFIX "d ",
+ old_data_space_->MaximumCommittedMemory());
+ PrintF("maximum_committed_by_old_data_space=%" V8_PTR_PREFIX "d ",
+ old_pointer_space_->MaximumCommittedMemory());
+ PrintF("maximum_committed_by_old_data_space=%" V8_PTR_PREFIX "d ",
+ old_pointer_space_->MaximumCommittedMemory());
+ PrintF("maximum_committed_by_code_space=%" V8_PTR_PREFIX "d ",
+ code_space_->MaximumCommittedMemory());
+ PrintF("maximum_committed_by_map_space=%" V8_PTR_PREFIX "d ",
+ map_space_->MaximumCommittedMemory());
+ PrintF("maximum_committed_by_cell_space=%" V8_PTR_PREFIX "d ",
+ cell_space_->MaximumCommittedMemory());
+ PrintF("maximum_committed_by_property_space=%" V8_PTR_PREFIX "d ",
+ property_cell_space_->MaximumCommittedMemory());
+ PrintF("maximum_committed_by_lo_space=%" V8_PTR_PREFIX "d ",
+ lo_space_->MaximumCommittedMemory());
+ PrintF("\n\n");
+ }
+
TearDownArrayBuffers();
isolate_->global_handles()->TearDown();
@@ -7059,18 +6910,21 @@ void Heap::TearDown() {
isolate_->memory_allocator()->TearDown();
delete relocation_mutex_;
+ relocation_mutex_ = NULL;
}
-void Heap::AddGCPrologueCallback(GCPrologueCallback callback, GCType gc_type) {
+void Heap::AddGCPrologueCallback(v8::Isolate::GCPrologueCallback callback,
+ GCType gc_type,
+ bool pass_isolate) {
ASSERT(callback != NULL);
- GCPrologueCallbackPair pair(callback, gc_type);
+ GCPrologueCallbackPair pair(callback, gc_type, pass_isolate);
ASSERT(!gc_prologue_callbacks_.Contains(pair));
return gc_prologue_callbacks_.Add(pair);
}
-void Heap::RemoveGCPrologueCallback(GCPrologueCallback callback) {
+void Heap::RemoveGCPrologueCallback(v8::Isolate::GCPrologueCallback callback) {
ASSERT(callback != NULL);
for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) {
if (gc_prologue_callbacks_[i].callback == callback) {
@@ -7082,15 +6936,17 @@ void Heap::RemoveGCPrologueCallback(GCPrologueCallback callback) {
}
-void Heap::AddGCEpilogueCallback(GCEpilogueCallback callback, GCType gc_type) {
+void Heap::AddGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback,
+ GCType gc_type,
+ bool pass_isolate) {
ASSERT(callback != NULL);
- GCEpilogueCallbackPair pair(callback, gc_type);
+ GCEpilogueCallbackPair pair(callback, gc_type, pass_isolate);
ASSERT(!gc_epilogue_callbacks_.Contains(pair));
return gc_epilogue_callbacks_.Add(pair);
}
-void Heap::RemoveGCEpilogueCallback(GCEpilogueCallback callback) {
+void Heap::RemoveGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback) {
ASSERT(callback != NULL);
for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) {
if (gc_epilogue_callbacks_[i].callback == callback) {
@@ -7102,6 +6958,37 @@ void Heap::RemoveGCEpilogueCallback(GCEpilogueCallback callback) {
}
+MaybeObject* Heap::AddWeakObjectToCodeDependency(Object* obj,
+ DependentCode* dep) {
+ ASSERT(!InNewSpace(obj));
+ ASSERT(!InNewSpace(dep));
+ MaybeObject* maybe_obj =
+ WeakHashTable::cast(weak_object_to_code_table_)->Put(obj, dep);
+ WeakHashTable* table;
+ if (!maybe_obj->To(&table)) return maybe_obj;
+ if (ShouldZapGarbage() && weak_object_to_code_table_ != table) {
+ WeakHashTable::cast(weak_object_to_code_table_)->Zap(the_hole_value());
+ }
+ set_weak_object_to_code_table(table);
+ ASSERT_EQ(dep, WeakHashTable::cast(weak_object_to_code_table_)->Lookup(obj));
+ return weak_object_to_code_table_;
+}
+
+
+DependentCode* Heap::LookupWeakObjectToCodeDependency(Object* obj) {
+ Object* dep = WeakHashTable::cast(weak_object_to_code_table_)->Lookup(obj);
+ if (dep->IsDependentCode()) return DependentCode::cast(dep);
+ return DependentCode::cast(empty_fixed_array());
+}
+
+
+void Heap::EnsureWeakObjectToCodeTable() {
+ if (!weak_object_to_code_table()->IsHashTable()) {
+ set_weak_object_to_code_table(*isolate()->factory()->NewWeakHashTable(16));
+ }
+}
+
+
#ifdef DEBUG
class PrintHandleVisitor: public ObjectVisitor {
@@ -7958,7 +7845,13 @@ void ExternalStringTable::CleanUp() {
void ExternalStringTable::TearDown() {
+ for (int i = 0; i < new_space_strings_.length(); ++i) {
+ heap_->FinalizeExternalString(ExternalString::cast(new_space_strings_[i]));
+ }
new_space_strings_.Free();
+ for (int i = 0; i < old_space_strings_.length(); ++i) {
+ heap_->FinalizeExternalString(ExternalString::cast(old_space_strings_[i]));
+ }
old_space_strings_.Free();
}
@@ -8084,21 +7977,23 @@ void Heap::CheckpointObjectStats() {
static_cast<int>(object_sizes_last_time_[index]));
FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT)
#undef ADJUST_LAST_TIME_OBJECT_COUNT
+#define ADJUST_LAST_TIME_OBJECT_COUNT(name) \
+ index = \
+ FIRST_CODE_AGE_SUB_TYPE + Code::k##name##CodeAge - Code::kFirstCodeAge; \
+ counters->count_of_CODE_AGE_##name()->Increment( \
+ static_cast<int>(object_counts_[index])); \
+ counters->count_of_CODE_AGE_##name()->Decrement( \
+ static_cast<int>(object_counts_last_time_[index])); \
+ counters->size_of_CODE_AGE_##name()->Increment( \
+ static_cast<int>(object_sizes_[index])); \
+ counters->size_of_CODE_AGE_##name()->Decrement( \
+ static_cast<int>(object_sizes_last_time_[index]));
+ CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT)
+#undef ADJUST_LAST_TIME_OBJECT_COUNT
OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));
ClearObjectStats();
}
-
-Heap::RelocationLock::RelocationLock(Heap* heap) : heap_(heap) {
- if (FLAG_concurrent_recompilation) {
- heap_->relocation_mutex_->Lock();
-#ifdef DEBUG
- heap_->relocation_mutex_locked_by_optimizer_thread_ =
- heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread();
-#endif // DEBUG
- }
-}
-
} } // namespace v8::internal