summaryrefslogtreecommitdiff
path: root/src/3rdparty/v8/src/mark-compact.h
diff options
context:
space:
mode:
authorSergio Ahumada <sergio.ahumada@digia.com>2013-03-19 09:25:14 +0100
committerSergio Ahumada <sergio.ahumada@digia.com>2013-03-19 09:56:31 +0100
commit6313e1fe4c27755adde87e62db1c2f9fac534ae4 (patch)
treec57bb29f65e02fbfcc07895a8cc2903fff9300ba /src/3rdparty/v8/src/mark-compact.h
parentb5a49a260d03249c386f1b63c249089383dd81fa (diff)
parentcac65e7a222b848a735a974b0aeb43209b0cfa18 (diff)
downloadqtjsbackend-6313e1fe4c27755adde87e62db1c2f9fac534ae4.tar.gz
Merge branch 'dev' into stable
This starts Qt 5.1 release cycle Change-Id: I892bbc73c276842894a720f761ce31ad1b015672
Diffstat (limited to 'src/3rdparty/v8/src/mark-compact.h')
-rw-r--r--src/3rdparty/v8/src/mark-compact.h149
1 files changed, 109 insertions, 40 deletions
diff --git a/src/3rdparty/v8/src/mark-compact.h b/src/3rdparty/v8/src/mark-compact.h
index dbc2869..0a4c1ea 100644
--- a/src/3rdparty/v8/src/mark-compact.h
+++ b/src/3rdparty/v8/src/mark-compact.h
@@ -304,6 +304,26 @@ class SlotsBuffer {
NUMBER_OF_SLOT_TYPES
};
+ static const char* SlotTypeToString(SlotType type) {
+ switch (type) {
+ case EMBEDDED_OBJECT_SLOT:
+ return "EMBEDDED_OBJECT_SLOT";
+ case RELOCATED_CODE_OBJECT:
+ return "RELOCATED_CODE_OBJECT";
+ case CODE_TARGET_SLOT:
+ return "CODE_TARGET_SLOT";
+ case CODE_ENTRY_SLOT:
+ return "CODE_ENTRY_SLOT";
+ case DEBUG_TARGET_SLOT:
+ return "DEBUG_TARGET_SLOT";
+ case JS_RETURN_SLOT:
+ return "JS_RETURN_SLOT";
+ case NUMBER_OF_SLOT_TYPES:
+ return "NUMBER_OF_SLOT_TYPES";
+ }
+ return "UNKNOWN SlotType";
+ }
+
void UpdateSlots(Heap* heap);
void UpdateSlotsWithFilter(Heap* heap);
@@ -383,31 +403,96 @@ class SlotsBuffer {
};
-// -------------------------------------------------------------------------
-// Marker shared between incremental and non-incremental marking
-template<class BaseMarker> class Marker {
+// CodeFlusher collects candidates for code flushing during marking and
+// processes those candidates after marking has completed in order to
+// reset those functions referencing code objects that would otherwise
+// be unreachable. Code objects can be referenced in two ways:
+// - SharedFunctionInfo references unoptimized code.
+// - JSFunction references either unoptimized or optimized code.
+// We are not allowed to flush unoptimized code for functions that got
+// optimized or inlined into optimized code, because we might bailout
+// into the unoptimized code again during deoptimization.
+class CodeFlusher {
public:
- Marker(BaseMarker* base_marker, MarkCompactCollector* mark_compact_collector)
- : base_marker_(base_marker),
- mark_compact_collector_(mark_compact_collector) {}
+ explicit CodeFlusher(Isolate* isolate)
+ : isolate_(isolate),
+ jsfunction_candidates_head_(NULL),
+ shared_function_info_candidates_head_(NULL) {}
+
+ void AddCandidate(SharedFunctionInfo* shared_info) {
+ if (GetNextCandidate(shared_info) == NULL) {
+ SetNextCandidate(shared_info, shared_function_info_candidates_head_);
+ shared_function_info_candidates_head_ = shared_info;
+ }
+ }
- // Mark pointers in a Map and its DescriptorArray together, possibly
- // treating transitions or back pointers weak.
- void MarkMapContents(Map* map);
- void MarkDescriptorArray(DescriptorArray* descriptors);
- void MarkAccessorPairSlot(AccessorPair* accessors, int offset);
+ void AddCandidate(JSFunction* function) {
+ ASSERT(function->code() == function->shared()->code());
+ if (GetNextCandidate(function)->IsUndefined()) {
+ SetNextCandidate(function, jsfunction_candidates_head_);
+ jsfunction_candidates_head_ = function;
+ }
+ }
+
+ void EvictCandidate(JSFunction* function);
+
+ void ProcessCandidates() {
+ ProcessSharedFunctionInfoCandidates();
+ ProcessJSFunctionCandidates();
+ }
+
+ void EvictAllCandidates() {
+ EvictJSFunctionCandidates();
+ EvictSharedFunctionInfoCandidates();
+ }
+
+ void IteratePointersToFromSpace(ObjectVisitor* v);
private:
- BaseMarker* base_marker() {
- return base_marker_;
+ void ProcessJSFunctionCandidates();
+ void ProcessSharedFunctionInfoCandidates();
+ void EvictJSFunctionCandidates();
+ void EvictSharedFunctionInfoCandidates();
+
+ static JSFunction** GetNextCandidateSlot(JSFunction* candidate) {
+ return reinterpret_cast<JSFunction**>(
+ HeapObject::RawField(candidate, JSFunction::kNextFunctionLinkOffset));
+ }
+
+ static JSFunction* GetNextCandidate(JSFunction* candidate) {
+ Object* next_candidate = candidate->next_function_link();
+ return reinterpret_cast<JSFunction*>(next_candidate);
+ }
+
+ static void SetNextCandidate(JSFunction* candidate,
+ JSFunction* next_candidate) {
+ candidate->set_next_function_link(next_candidate);
}
- MarkCompactCollector* mark_compact_collector() {
- return mark_compact_collector_;
+ static void ClearNextCandidate(JSFunction* candidate, Object* undefined) {
+ ASSERT(undefined->IsUndefined());
+ candidate->set_next_function_link(undefined, SKIP_WRITE_BARRIER);
}
- BaseMarker* base_marker_;
- MarkCompactCollector* mark_compact_collector_;
+ static SharedFunctionInfo* GetNextCandidate(SharedFunctionInfo* candidate) {
+ Object* next_candidate = candidate->code()->gc_metadata();
+ return reinterpret_cast<SharedFunctionInfo*>(next_candidate);
+ }
+
+ static void SetNextCandidate(SharedFunctionInfo* candidate,
+ SharedFunctionInfo* next_candidate) {
+ candidate->code()->set_gc_metadata(next_candidate);
+ }
+
+ static void ClearNextCandidate(SharedFunctionInfo* candidate) {
+ candidate->code()->set_gc_metadata(NULL, SKIP_WRITE_BARRIER);
+ }
+
+ Isolate* isolate_;
+ JSFunction* jsfunction_candidates_head_;
+ SharedFunctionInfo* shared_function_info_candidates_head_;
+
+ DISALLOW_COPY_AND_ASSIGN(CodeFlusher);
};
@@ -505,7 +590,7 @@ class MarkCompactCollector {
PRECISE
};
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
void VerifyMarkbitsAreClean();
static void VerifyMarkbitsAreClean(PagedSpace* space);
static void VerifyMarkbitsAreClean(NewSpace* space);
@@ -553,6 +638,7 @@ class MarkCompactCollector {
void RecordRelocSlot(RelocInfo* rinfo, Object* target);
void RecordCodeEntrySlot(Address slot, Code* target);
+ void RecordCodeTargetPatch(Address pc, Code* target);
INLINE(void RecordSlot(Object** anchor_slot, Object** slot, Object* object));
@@ -574,6 +660,8 @@ class MarkCompactCollector {
bool is_compacting() const { return compacting_; }
+ MarkingParity marking_parity() { return marking_parity_; }
+
private:
MarkCompactCollector();
~MarkCompactCollector();
@@ -606,14 +694,14 @@ class MarkCompactCollector {
bool abort_incremental_marking_;
+ MarkingParity marking_parity_;
+
// True if we are collecting slots to perform evacuation from evacuation
// candidates.
bool compacting_;
bool was_marked_incrementally_;
- bool flush_monomorphic_ics_;
-
// A pointer to the current stack-allocated GC tracer object during a full
// collection (NULL before and after).
GCTracer* tracer_;
@@ -636,15 +724,9 @@ class MarkCompactCollector {
friend class RootMarkingVisitor;
friend class MarkingVisitor;
- friend class StaticMarkingVisitor;
+ friend class MarkCompactMarkingVisitor;
friend class CodeMarkingVisitor;
friend class SharedFunctionInfoMarkingVisitor;
- friend class Marker<IncrementalMarking>;
- friend class Marker<MarkCompactCollector>;
-
- // Mark non-optimize code for functions inlined into the given optimized
- // code. This will prevent it from being flushed.
- void MarkInlinedFunctionsCode(Code* code);
// Mark code objects that are active on the stack to prevent them
// from being flushed.
@@ -658,25 +740,13 @@ class MarkCompactCollector {
void AfterMarking();
// Marks the object black and pushes it on the marking stack.
- // Returns true if object needed marking and false otherwise.
- // This is for non-incremental marking only.
- INLINE(bool MarkObjectAndPush(HeapObject* obj));
-
- // Marks the object black and pushes it on the marking stack.
// This is for non-incremental marking only.
INLINE(void MarkObject(HeapObject* obj, MarkBit mark_bit));
- // Marks the object black without pushing it on the marking stack.
- // Returns true if object needed marking and false otherwise.
- // This is for non-incremental marking only.
- INLINE(bool MarkObjectWithoutPush(HeapObject* obj));
-
// Marks the object black assuming that it is not yet marked.
// This is for non-incremental marking only.
INLINE(void SetMark(HeapObject* obj, MarkBit mark_bit));
- void ProcessNewlyMarkedObject(HeapObject* obj);
-
// Mark the heap roots and all objects reachable from them.
void MarkRoots(RootMarkingVisitor* visitor);
@@ -779,7 +849,6 @@ class MarkCompactCollector {
MarkingDeque marking_deque_;
CodeFlusher* code_flusher_;
Object* encountered_weak_maps_;
- Marker<MarkCompactCollector> marker_;
List<Page*> evacuation_candidates_;
List<Code*> invalidated_code_;