Index: src/mark-compact.cc |
diff --git a/src/mark-compact.cc b/src/mark-compact.cc |
index 0aa119219adfaad70c22460973ca125b3aa21086..3eff6bbc56a51eb9a675d951b51c3ea71af5dbfe 100644 |
--- a/src/mark-compact.cc |
+++ b/src/mark-compact.cc |
@@ -64,7 +64,7 @@ MarkCompactCollector::MarkCompactCollector() : // NOLINT |
abort_incremental_marking_(false), |
compacting_(false), |
was_marked_incrementally_(false), |
- collect_maps_(FLAG_collect_maps), |
+ clear_map_transitions_(true), |
flush_monomorphic_ics_(false), |
tracer_(NULL), |
migration_slots_buffer_(NULL), |
@@ -282,7 +282,7 @@ void MarkCompactCollector::CollectGarbage() { |
MarkLiveObjects(); |
ASSERT(heap_->incremental_marking()->IsStopped()); |
- if (collect_maps_) ClearNonLiveTransitions(); |
+ if (FLAG_collect_maps) ClearNonLiveTransitions(); |
ClearWeakMaps(); |
@@ -294,7 +294,7 @@ void MarkCompactCollector::CollectGarbage() { |
SweepSpaces(); |
- if (!collect_maps_) ReattachInitialMaps(); |
+ if (!FLAG_collect_maps) ReattachInitialMaps(); |
Finish(); |
@@ -658,10 +658,10 @@ void MarkCompactCollector::AbortCompaction() { |
void MarkCompactCollector::Prepare(GCTracer* tracer) { |
was_marked_incrementally_ = heap()->incremental_marking()->IsMarking(); |
- // Disable collection of maps if incremental marking is enabled. |
- // Map collection algorithm relies on a special map transition tree traversal |
- // order which is not implemented for incremental marking. |
- collect_maps_ = FLAG_collect_maps && !was_marked_incrementally_; |
+ // TODO(1465): Implement heuristic to switch between clearing map transitions |
+ // by marking strongly "up the tree" and collecting whole transition trees |
+ // which requires marking strongly "down the tree". |
+ clear_map_transitions_ = true; |
// Monomorphic ICs are preserved when possible, but need to be flushed |
// when they might be keeping a Context alive, or when the heap is about |
@@ -1798,11 +1798,22 @@ void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) { |
heap_->ClearCacheOnMap(map); |
// When map collection is enabled we have to mark through map's transitions |
- // in a special way to make transition links weak. |
- // Only maps for subclasses of JSReceiver can have transitions. |
+ // in a special way to make transition links weak. Only maps for subclasses |
+ // of JSReceiver can have transitions. |
STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); |
- if (collect_maps_ && map->instance_type() >= FIRST_JS_RECEIVER_TYPE) { |
- MarkMapContents(map); |
+ if (FLAG_collect_maps && map->instance_type() >= FIRST_JS_RECEIVER_TYPE) { |
+ MarkWeakMapContents(map); |
+ |
+ // Mark the Object* fields of the Map. Since the descriptor array has been |
+ // marked already, it is fine that one of these fields contains a pointer |
+ // to it. But make sure to skip back pointer and prototype transitions. |
+ STATIC_ASSERT(Map::kPointerFieldsEndOffset == |
+ Map::kPrototypeTransitionsOrBackPointerOffset + kPointerSize); |
+ Object** start_slot = HeapObject::RawField( |
+ map, Map::kPointerFieldsBeginOffset); |
+ Object** end_slot = HeapObject::RawField( |
+ map, Map::kPrototypeTransitionsOrBackPointerOffset); |
+ StaticMarkingVisitor::VisitPointers(map->GetHeap(), start_slot, end_slot); |
} else { |
marking_deque_.PushBlack(map); |
} |
@@ -1812,41 +1823,37 @@ void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) { |
} |
-void MarkCompactCollector::MarkMapContents(Map* map) { |
+void MarkCompactCollector::MarkWeakMapContents(Map* map) { |
// Mark prototype transitions array but don't push it into marking stack. |
// This will make references from it weak. We will clean dead prototype |
- // transitions in ClearNonLiveTransitions. But make sure that back pointers |
- // stored inside prototype transitions arrays are marked. |
- Object* raw_proto_transitions = map->unchecked_prototype_transitions(); |
- if (raw_proto_transitions->IsFixedArray()) { |
- FixedArray* prototype_transitions = FixedArray::cast(raw_proto_transitions); |
+ // transitions in ClearNonLiveTransitions. |
+ Object** proto_trans_slot = |
+ HeapObject::RawField(map, Map::kPrototypeTransitionsOrBackPointerOffset); |
+ HeapObject* prototype_transitions = HeapObject::cast(*proto_trans_slot); |
+ if (prototype_transitions->IsFixedArray()) { |
+ RecordSlot(proto_trans_slot, proto_trans_slot, prototype_transitions); |
MarkBit mark = Marking::MarkBitFrom(prototype_transitions); |
if (!mark.Get()) { |
mark.Set(); |
MemoryChunk::IncrementLiveBytesFromGC(prototype_transitions->address(), |
prototype_transitions->Size()); |
- MarkObjectAndPush(HeapObject::cast( |
- prototype_transitions->get(Map::kProtoTransitionBackPointerOffset))); |
} |
} |
- Object** raw_descriptor_array_slot = |
- HeapObject::RawField(map, Map::kInstanceDescriptorsOrBitField3Offset); |
- Object* raw_descriptor_array = *raw_descriptor_array_slot; |
- if (!raw_descriptor_array->IsSmi()) { |
- MarkDescriptorArray( |
- reinterpret_cast<DescriptorArray*>(raw_descriptor_array)); |
- } |
- |
- // Mark the Object* fields of the Map. |
- // Since the descriptor array has been marked already, it is fine |
- // that one of these fields contains a pointer to it. |
- Object** start_slot = HeapObject::RawField(map, |
- Map::kPointerFieldsBeginOffset); |
- |
- Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset); |
+ // Make sure that the back pointer stored either in the map itself or inside |
+ // its prototype transitions array is marked when clearing map transitions. |
+ // Treat pointers in the descriptor array as weak and also mark that array to |
+ // prevent visiting it later. |
+ if (clear_map_transitions_) { |
+ MarkObjectAndPush(HeapObject::cast(map->GetBackPointer())); |
- StaticMarkingVisitor::VisitPointers(map->GetHeap(), start_slot, end_slot); |
+ Object** descriptor_array_slot = |
+ HeapObject::RawField(map, Map::kInstanceDescriptorsOrBitField3Offset); |
+ Object* descriptor_array = *descriptor_array_slot; |
+ if (!descriptor_array->IsSmi()) { |
+ MarkDescriptorArray(reinterpret_cast<DescriptorArray*>(descriptor_array)); |
+ } |
+ } |
} |
@@ -2526,6 +2533,7 @@ void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) { |
void MarkCompactCollector::ClearNonLiveMapTransitions(Map* map, |
MarkBit map_mark) { |
+ if (!clear_map_transitions_) return; |
Object* potential_parent = map->GetBackPointer(); |
if (!potential_parent->IsMap()) return; |
Map* parent = Map::cast(potential_parent); |