Index: src/mark-compact.cc |
diff --git a/src/mark-compact.cc b/src/mark-compact.cc |
index c455564b41a736d268db2e81a9259fec07b261bd..0aa119219adfaad70c22460973ca125b3aa21086 100644 |
--- a/src/mark-compact.cc |
+++ b/src/mark-compact.cc |
@@ -64,13 +64,13 @@ MarkCompactCollector::MarkCompactCollector() : // NOLINT |
abort_incremental_marking_(false), |
compacting_(false), |
was_marked_incrementally_(false), |
+ collect_maps_(FLAG_collect_maps), |
flush_monomorphic_ics_(false), |
tracer_(NULL), |
migration_slots_buffer_(NULL), |
heap_(NULL), |
code_flusher_(NULL), |
- encountered_weak_maps_(NULL), |
- marker_(this, this) { } |
+ encountered_weak_maps_(NULL) { } |
#ifdef DEBUG |
@@ -282,7 +282,7 @@ void MarkCompactCollector::CollectGarbage() { |
MarkLiveObjects(); |
ASSERT(heap_->incremental_marking()->IsStopped()); |
- if (FLAG_collect_maps) ClearNonLiveTransitions(); |
+ if (collect_maps_) ClearNonLiveTransitions(); |
ClearWeakMaps(); |
@@ -294,7 +294,7 @@ void MarkCompactCollector::CollectGarbage() { |
SweepSpaces(); |
- if (!FLAG_collect_maps) ReattachInitialMaps(); |
+ if (!collect_maps_) ReattachInitialMaps(); |
Finish(); |
@@ -658,6 +658,11 @@ void MarkCompactCollector::AbortCompaction() { |
void MarkCompactCollector::Prepare(GCTracer* tracer) { |
was_marked_incrementally_ = heap()->incremental_marking()->IsMarking(); |
+ // Disable collection of maps if incremental marking is enabled. |
+ // Map collection algorithm relies on a special map transition tree traversal |
+ // order which is not implemented for incremental marking. |
+ collect_maps_ = FLAG_collect_maps && !was_marked_incrementally_; |
+ |
// Monomorphic ICs are preserved when possible, but need to be flushed |
// when they might be keeping a Context alive, or when the heap is about |
// to be serialized. |
@@ -1793,11 +1798,11 @@ void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) { |
heap_->ClearCacheOnMap(map); |
// When map collection is enabled we have to mark through map's transitions |
- // in a special way to make transition links weak. Only maps for subclasses |
- // of JSReceiver can have transitions. |
+ // in a special way to make transition links weak. |
+ // Only maps for subclasses of JSReceiver can have transitions. |
STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); |
- if (FLAG_collect_maps && map->instance_type() >= FIRST_JS_RECEIVER_TYPE) { |
- marker_.MarkMapContents(map); |
+ if (collect_maps_ && map->instance_type() >= FIRST_JS_RECEIVER_TYPE) { |
+ MarkMapContents(map); |
} else { |
marking_deque_.PushBlack(map); |
} |
@@ -1807,86 +1812,85 @@ void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) { |
} |
-// Force instantiation of template instances. |
-template void Marker<IncrementalMarking>::MarkMapContents(Map* map); |
-template void Marker<MarkCompactCollector>::MarkMapContents(Map* map); |
- |
- |
-template <class T> |
-void Marker<T>::MarkMapContents(Map* map) { |
+void MarkCompactCollector::MarkMapContents(Map* map) { |
// Mark prototype transitions array but don't push it into marking stack. |
// This will make references from it weak. We will clean dead prototype |
- // transitions in ClearNonLiveTransitions. |
- Object** proto_trans_slot = |
- HeapObject::RawField(map, Map::kPrototypeTransitionsOrBackPointerOffset); |
- HeapObject* prototype_transitions = HeapObject::cast(*proto_trans_slot); |
- if (prototype_transitions->IsFixedArray()) { |
- mark_compact_collector()->RecordSlot(proto_trans_slot, |
- proto_trans_slot, |
- prototype_transitions); |
+ // transitions in ClearNonLiveTransitions. But make sure that back pointers |
+ // stored inside prototype transitions arrays are marked. |
+ Object* raw_proto_transitions = map->unchecked_prototype_transitions(); |
+ if (raw_proto_transitions->IsFixedArray()) { |
+ FixedArray* prototype_transitions = FixedArray::cast(raw_proto_transitions); |
MarkBit mark = Marking::MarkBitFrom(prototype_transitions); |
if (!mark.Get()) { |
mark.Set(); |
MemoryChunk::IncrementLiveBytesFromGC(prototype_transitions->address(), |
prototype_transitions->Size()); |
+ MarkObjectAndPush(HeapObject::cast( |
+ prototype_transitions->get(Map::kProtoTransitionBackPointerOffset))); |
} |
} |
- // Make sure that the back pointer stored either in the map itself or inside |
- // its prototype transitions array is marked. Treat pointers in the descriptor |
- // array as weak and also mark that array to prevent visiting it later. |
- base_marker()->MarkObjectAndPush(HeapObject::cast(map->GetBackPointer())); |
- |
- Object** descriptor_array_slot = |
+ Object** raw_descriptor_array_slot = |
HeapObject::RawField(map, Map::kInstanceDescriptorsOrBitField3Offset); |
- Object* descriptor_array = *descriptor_array_slot; |
- if (!descriptor_array->IsSmi()) { |
- MarkDescriptorArray(reinterpret_cast<DescriptorArray*>(descriptor_array)); |
- } |
- |
- // Mark the Object* fields of the Map. Since the descriptor array has been |
- // marked already, it is fine that one of these fields contains a pointer |
- // to it. But make sure to skip back pointer and prototype transitions. |
- STATIC_ASSERT(Map::kPointerFieldsEndOffset == |
- Map::kPrototypeTransitionsOrBackPointerOffset + kPointerSize); |
- Object** start_slot = HeapObject::RawField( |
- map, Map::kPointerFieldsBeginOffset); |
- Object** end_slot = HeapObject::RawField( |
- map, Map::kPrototypeTransitionsOrBackPointerOffset); |
- for (Object** slot = start_slot; slot < end_slot; slot++) { |
- Object* obj = *slot; |
- if (!obj->NonFailureIsHeapObject()) continue; |
- mark_compact_collector()->RecordSlot(start_slot, slot, obj); |
- base_marker()->MarkObjectAndPush(reinterpret_cast<HeapObject*>(obj)); |
+ Object* raw_descriptor_array = *raw_descriptor_array_slot; |
+ if (!raw_descriptor_array->IsSmi()) { |
+ MarkDescriptorArray( |
+ reinterpret_cast<DescriptorArray*>(raw_descriptor_array)); |
} |
+ |
+ // Mark the Object* fields of the Map. |
+ // Since the descriptor array has been marked already, it is fine |
+ // that one of these fields contains a pointer to it. |
+ Object** start_slot = HeapObject::RawField(map, |
+ Map::kPointerFieldsBeginOffset); |
+ |
+ Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset); |
+ |
+ StaticMarkingVisitor::VisitPointers(map->GetHeap(), start_slot, end_slot); |
} |
-template <class T> |
-void Marker<T>::MarkDescriptorArray(DescriptorArray* descriptors) { |
+void MarkCompactCollector::MarkAccessorPairSlot(HeapObject* accessors, |
+ int offset) { |
+ Object** slot = HeapObject::RawField(accessors, offset); |
+ HeapObject* accessor = HeapObject::cast(*slot); |
+ if (accessor->IsMap()) return; |
+ RecordSlot(slot, slot, accessor); |
+ MarkObjectAndPush(accessor); |
+} |
+ |
+ |
+void MarkCompactCollector::MarkDescriptorArray( |
+ DescriptorArray* descriptors) { |
+ MarkBit descriptors_mark = Marking::MarkBitFrom(descriptors); |
+ if (descriptors_mark.Get()) return; |
// Empty descriptor array is marked as a root before any maps are marked. |
- ASSERT(descriptors != descriptors->GetHeap()->empty_descriptor_array()); |
+ ASSERT(descriptors != heap()->empty_descriptor_array()); |
+ SetMark(descriptors, descriptors_mark); |
- // The DescriptorArray contains a pointer to its contents array, but the |
- // contents array will be marked black and hence not be visited again. |
- if (!base_marker()->MarkObjectAndPush(descriptors)) return; |
- FixedArray* contents = FixedArray::cast( |
+ FixedArray* contents = reinterpret_cast<FixedArray*>( |
descriptors->get(DescriptorArray::kContentArrayIndex)); |
+ ASSERT(contents->IsHeapObject()); |
+ ASSERT(!IsMarked(contents)); |
+ ASSERT(contents->IsFixedArray()); |
ASSERT(contents->length() >= 2); |
- ASSERT(Marking::IsWhite(Marking::MarkBitFrom(contents))); |
- base_marker()->MarkObjectWithoutPush(contents); |
- |
- // Contents contains (value, details) pairs. If the descriptor contains a |
- // transition (value is a Map), we don't mark the value as live. It might |
- // be set to the NULL_DESCRIPTOR in ClearNonLiveTransitions later. |
+ MarkBit contents_mark = Marking::MarkBitFrom(contents); |
+ SetMark(contents, contents_mark); |
+ // Contents contains (value, details) pairs. If the details say that the type |
+ // of descriptor is MAP_TRANSITION, CONSTANT_TRANSITION, |
+ // EXTERNAL_ARRAY_TRANSITION or NULL_DESCRIPTOR, we don't mark the value as |
+ // live. Only for MAP_TRANSITION, EXTERNAL_ARRAY_TRANSITION and |
+ // CONSTANT_TRANSITION is the value an Object* (a Map*). |
for (int i = 0; i < contents->length(); i += 2) { |
+ // If the pair (value, details) at index i, i+1 is not |
+ // a transition or null descriptor, mark the value. |
PropertyDetails details(Smi::cast(contents->get(i + 1))); |
Object** slot = contents->data_start() + i; |
if (!(*slot)->IsHeapObject()) continue; |
HeapObject* value = HeapObject::cast(*slot); |
- mark_compact_collector()->RecordSlot(slot, slot, *slot); |
+ RecordSlot(slot, slot, *slot); |
switch (details.type()) { |
case NORMAL: |
@@ -1894,22 +1898,21 @@ void Marker<T>::MarkDescriptorArray(DescriptorArray* descriptors) { |
case CONSTANT_FUNCTION: |
case HANDLER: |
case INTERCEPTOR: |
- base_marker()->MarkObjectAndPush(value); |
+ MarkObjectAndPush(value); |
break; |
case CALLBACKS: |
if (!value->IsAccessorPair()) { |
- base_marker()->MarkObjectAndPush(value); |
- } else if (base_marker()->MarkObjectWithoutPush(value)) { |
- AccessorPair* accessors = AccessorPair::cast(value); |
- MarkAccessorPairSlot(accessors, AccessorPair::kGetterOffset); |
- MarkAccessorPairSlot(accessors, AccessorPair::kSetterOffset); |
+ MarkObjectAndPush(value); |
+ } else if (!MarkObjectWithoutPush(value)) { |
+ MarkAccessorPairSlot(value, AccessorPair::kGetterOffset); |
+ MarkAccessorPairSlot(value, AccessorPair::kSetterOffset); |
} |
break; |
case ELEMENTS_TRANSITION: |
// For maps with multiple elements transitions, the transition maps are |
// stored in a FixedArray. Keep the fixed array alive but not the maps |
// that it refers to. |
- if (value->IsFixedArray()) base_marker()->MarkObjectWithoutPush(value); |
+ if (value->IsFixedArray()) MarkObjectWithoutPush(value); |
break; |
case MAP_TRANSITION: |
case CONSTANT_TRANSITION: |
@@ -1917,16 +1920,9 @@ void Marker<T>::MarkDescriptorArray(DescriptorArray* descriptors) { |
break; |
} |
} |
-} |
- |
- |
-template <class T> |
-void Marker<T>::MarkAccessorPairSlot(AccessorPair* accessors, int offset) { |
- Object** slot = HeapObject::RawField(accessors, offset); |
- HeapObject* accessor = HeapObject::cast(*slot); |
- if (accessor->IsMap()) return; |
- mark_compact_collector()->RecordSlot(slot, slot, accessor); |
- base_marker()->MarkObjectAndPush(accessor); |
+ // The DescriptorArray descriptors contains a pointer to its contents array, |
+ // but the contents array is already marked. |
+ marking_deque_.PushBlack(descriptors); |
} |