Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(11)

Side by Side Diff: src/mark-compact.cc

Issue 10386046: Implement map collection for incremental marking. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 8 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« src/incremental-marking.cc ('K') | « src/mark-compact.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
57 57
58 MarkCompactCollector::MarkCompactCollector() : // NOLINT 58 MarkCompactCollector::MarkCompactCollector() : // NOLINT
59 #ifdef DEBUG 59 #ifdef DEBUG
60 state_(IDLE), 60 state_(IDLE),
61 #endif 61 #endif
62 sweep_precisely_(false), 62 sweep_precisely_(false),
63 reduce_memory_footprint_(false), 63 reduce_memory_footprint_(false),
64 abort_incremental_marking_(false), 64 abort_incremental_marking_(false),
65 compacting_(false), 65 compacting_(false),
66 was_marked_incrementally_(false), 66 was_marked_incrementally_(false),
67 collect_maps_(FLAG_collect_maps), 67 clear_map_transitions_(true),
68 flush_monomorphic_ics_(false), 68 flush_monomorphic_ics_(false),
69 tracer_(NULL), 69 tracer_(NULL),
70 migration_slots_buffer_(NULL), 70 migration_slots_buffer_(NULL),
71 heap_(NULL), 71 heap_(NULL),
72 code_flusher_(NULL), 72 code_flusher_(NULL),
73 encountered_weak_maps_(NULL) { } 73 encountered_weak_maps_(NULL) { }
74 74
75 75
76 #ifdef DEBUG 76 #ifdef DEBUG
77 class VerifyMarkingVisitor: public ObjectVisitor { 77 class VerifyMarkingVisitor: public ObjectVisitor {
(...skipping 197 matching lines...) Expand 10 before | Expand all | Expand 10 after
275 275
276 void MarkCompactCollector::CollectGarbage() { 276 void MarkCompactCollector::CollectGarbage() {
277 // Make sure that Prepare() has been called. The individual steps below will 277 // Make sure that Prepare() has been called. The individual steps below will
278 // update the state as they proceed. 278 // update the state as they proceed.
279 ASSERT(state_ == PREPARE_GC); 279 ASSERT(state_ == PREPARE_GC);
280 ASSERT(encountered_weak_maps_ == Smi::FromInt(0)); 280 ASSERT(encountered_weak_maps_ == Smi::FromInt(0));
281 281
282 MarkLiveObjects(); 282 MarkLiveObjects();
283 ASSERT(heap_->incremental_marking()->IsStopped()); 283 ASSERT(heap_->incremental_marking()->IsStopped());
284 284
285 if (collect_maps_) ClearNonLiveTransitions(); 285 if (FLAG_collect_maps) ClearNonLiveTransitions();
286 286
287 ClearWeakMaps(); 287 ClearWeakMaps();
288 288
289 #ifdef DEBUG 289 #ifdef DEBUG
290 if (FLAG_verify_heap) { 290 if (FLAG_verify_heap) {
291 VerifyMarking(heap_); 291 VerifyMarking(heap_);
292 } 292 }
293 #endif 293 #endif
294 294
295 SweepSpaces(); 295 SweepSpaces();
296 296
297 if (!collect_maps_) ReattachInitialMaps(); 297 if (!FLAG_collect_maps) ReattachInitialMaps();
298 298
299 Finish(); 299 Finish();
300 300
301 tracer_ = NULL; 301 tracer_ = NULL;
302 } 302 }
303 303
304 304
305 #ifdef DEBUG 305 #ifdef DEBUG
306 void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) { 306 void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) {
307 PageIterator it(space); 307 PageIterator it(space);
(...skipping 343 matching lines...) Expand 10 before | Expand all | Expand 10 after
651 evacuation_candidates_.Rewind(0); 651 evacuation_candidates_.Rewind(0);
652 invalidated_code_.Rewind(0); 652 invalidated_code_.Rewind(0);
653 } 653 }
654 ASSERT_EQ(0, evacuation_candidates_.length()); 654 ASSERT_EQ(0, evacuation_candidates_.length());
655 } 655 }
656 656
657 657
658 void MarkCompactCollector::Prepare(GCTracer* tracer) { 658 void MarkCompactCollector::Prepare(GCTracer* tracer) {
659 was_marked_incrementally_ = heap()->incremental_marking()->IsMarking(); 659 was_marked_incrementally_ = heap()->incremental_marking()->IsMarking();
660 660
661 // Disable collection of maps if incremental marking is enabled. 661 // TODO(1465): Implement heuristic to switch between clearing map transitions
662 // Map collection algorithm relies on a special map transition tree traversal 662 // by marking strongly "up the tree" and collecting whole transition trees
663 // order which is not implemented for incremental marking. 663 // which requires marking strongly "down the tree".
664 collect_maps_ = FLAG_collect_maps && !was_marked_incrementally_; 664 clear_map_transitions_ = true;
665 665
666 // Monomorphic ICs are preserved when possible, but need to be flushed 666 // Monomorphic ICs are preserved when possible, but need to be flushed
667 // when they might be keeping a Context alive, or when the heap is about 667 // when they might be keeping a Context alive, or when the heap is about
668 // to be serialized. 668 // to be serialized.
669 flush_monomorphic_ics_ = 669 flush_monomorphic_ics_ =
670 heap()->isolate()->context_exit_happened() || Serializer::enabled(); 670 heap()->isolate()->context_exit_happened() || Serializer::enabled();
671 671
672 // Rather than passing the tracer around we stash it in a static member 672 // Rather than passing the tracer around we stash it in a static member
673 // variable. 673 // variable.
674 tracer_ = tracer; 674 tracer_ = tracer;
(...skipping 1116 matching lines...) Expand 10 before | Expand all | Expand 10 after
1791 1791
1792 1792
1793 void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) { 1793 void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) {
1794 ASSERT(IsMarked(object)); 1794 ASSERT(IsMarked(object));
1795 ASSERT(HEAP->Contains(object)); 1795 ASSERT(HEAP->Contains(object));
1796 if (object->IsMap()) { 1796 if (object->IsMap()) {
1797 Map* map = Map::cast(object); 1797 Map* map = Map::cast(object);
1798 heap_->ClearCacheOnMap(map); 1798 heap_->ClearCacheOnMap(map);
1799 1799
1800 // When map collection is enabled we have to mark through map's transitions 1800 // When map collection is enabled we have to mark through map's transitions
1801 // in a special way to make transition links weak. 1801 // in a special way to make transition links weak. Only maps for subclasses
1802 // Only maps for subclasses of JSReceiver can have transitions. 1802 // of JSReceiver can have transitions.
1803 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); 1803 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
1804 if (collect_maps_ && map->instance_type() >= FIRST_JS_RECEIVER_TYPE) { 1804 if (FLAG_collect_maps && map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
1805 MarkMapContents(map); 1805 MarkWeakMapContents(map);
1806
1807 // Mark the Object* fields of the Map. Since the descriptor array has been
1808 // marked already, it is fine that one of these fields contains a pointer
1809 // to it. But make sure to skip back pointer and prototype transitions.
1810 STATIC_ASSERT(Map::kPointerFieldsEndOffset ==
1811 Map::kPrototypeTransitionsOrBackPointerOffset + kPointerSize);
1812 Object** start_slot = HeapObject::RawField(
1813 map, Map::kPointerFieldsBeginOffset);
1814 Object** end_slot = HeapObject::RawField(
1815 map, Map::kPrototypeTransitionsOrBackPointerOffset);
1816 StaticMarkingVisitor::VisitPointers(map->GetHeap(), start_slot, end_slot);
1806 } else { 1817 } else {
1807 marking_deque_.PushBlack(map); 1818 marking_deque_.PushBlack(map);
1808 } 1819 }
1809 } else { 1820 } else {
1810 marking_deque_.PushBlack(object); 1821 marking_deque_.PushBlack(object);
1811 } 1822 }
1812 } 1823 }
1813 1824
1814 1825
1815 void MarkCompactCollector::MarkMapContents(Map* map) { 1826 void MarkCompactCollector::MarkWeakMapContents(Map* map) {
1816 // Mark prototype transitions array but don't push it into marking stack. 1827 // Mark prototype transitions array but don't push it into marking stack.
1817 // This will make references from it weak. We will clean dead prototype 1828 // This will make references from it weak. We will clean dead prototype
1818 // transitions in ClearNonLiveTransitions. But make sure that back pointers 1829 // transitions in ClearNonLiveTransitions.
1819 // stored inside prototype transitions arrays are marked. 1830 Object** proto_trans_slot =
1820 Object* raw_proto_transitions = map->unchecked_prototype_transitions(); 1831 HeapObject::RawField(map, Map::kPrototypeTransitionsOrBackPointerOffset);
1821 if (raw_proto_transitions->IsFixedArray()) { 1832 HeapObject* prototype_transitions = HeapObject::cast(*proto_trans_slot);
1822 FixedArray* prototype_transitions = FixedArray::cast(raw_proto_transitions); 1833 if (prototype_transitions->IsFixedArray()) {
1834 RecordSlot(proto_trans_slot, proto_trans_slot, prototype_transitions);
1823 MarkBit mark = Marking::MarkBitFrom(prototype_transitions); 1835 MarkBit mark = Marking::MarkBitFrom(prototype_transitions);
1824 if (!mark.Get()) { 1836 if (!mark.Get()) {
1825 mark.Set(); 1837 mark.Set();
1826 MemoryChunk::IncrementLiveBytesFromGC(prototype_transitions->address(), 1838 MemoryChunk::IncrementLiveBytesFromGC(prototype_transitions->address(),
1827 prototype_transitions->Size()); 1839 prototype_transitions->Size());
1828 MarkObjectAndPush(HeapObject::cast(
1829 prototype_transitions->get(Map::kProtoTransitionBackPointerOffset)));
1830 } 1840 }
1831 } 1841 }
1832 1842
1833 Object** raw_descriptor_array_slot = 1843 // Make sure that the back pointer stored either in the map itself or inside
1834 HeapObject::RawField(map, Map::kInstanceDescriptorsOrBitField3Offset); 1844 // its prototype transitions array is marked when clearing map transitions.
1835 Object* raw_descriptor_array = *raw_descriptor_array_slot; 1845 // Treat pointers in the descriptor array as weak and also mark that array to
1836 if (!raw_descriptor_array->IsSmi()) { 1846 // prevent visiting it later.
1837 MarkDescriptorArray( 1847 if (clear_map_transitions_) {
1838 reinterpret_cast<DescriptorArray*>(raw_descriptor_array)); 1848 MarkObjectAndPush(HeapObject::cast(map->GetBackPointer()));
1849
1850 Object** descriptor_array_slot =
1851 HeapObject::RawField(map, Map::kInstanceDescriptorsOrBitField3Offset);
1852 Object* descriptor_array = *descriptor_array_slot;
1853 if (!descriptor_array->IsSmi()) {
1854 MarkDescriptorArray(reinterpret_cast<DescriptorArray*>(descriptor_array));
1855 }
1839 } 1856 }
1840
1841 // Mark the Object* fields of the Map.
1842 // Since the descriptor array has been marked already, it is fine
1843 // that one of these fields contains a pointer to it.
1844 Object** start_slot = HeapObject::RawField(map,
1845 Map::kPointerFieldsBeginOffset);
1846
1847 Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset);
1848
1849 StaticMarkingVisitor::VisitPointers(map->GetHeap(), start_slot, end_slot);
1850 } 1857 }
1851 1858
1852 1859
1853 void MarkCompactCollector::MarkAccessorPairSlot(HeapObject* accessors, 1860 void MarkCompactCollector::MarkAccessorPairSlot(HeapObject* accessors,
1854 int offset) { 1861 int offset) {
1855 Object** slot = HeapObject::RawField(accessors, offset); 1862 Object** slot = HeapObject::RawField(accessors, offset);
1856 HeapObject* accessor = HeapObject::cast(*slot); 1863 HeapObject* accessor = HeapObject::cast(*slot);
1857 if (accessor->IsMap()) return; 1864 if (accessor->IsMap()) return;
1858 RecordSlot(slot, slot, accessor); 1865 RecordSlot(slot, slot, accessor);
1859 MarkObjectAndPush(accessor); 1866 MarkObjectAndPush(accessor);
(...skipping 659 matching lines...) Expand 10 before | Expand all | Expand 10 after
2519 for (int i = new_number_of_transitions * step; 2526 for (int i = new_number_of_transitions * step;
2520 i < number_of_transitions * step; 2527 i < number_of_transitions * step;
2521 i++) { 2528 i++) {
2522 prototype_transitions->set_undefined(heap_, header + i); 2529 prototype_transitions->set_undefined(heap_, header + i);
2523 } 2530 }
2524 } 2531 }
2525 2532
2526 2533
2527 void MarkCompactCollector::ClearNonLiveMapTransitions(Map* map, 2534 void MarkCompactCollector::ClearNonLiveMapTransitions(Map* map,
2528 MarkBit map_mark) { 2535 MarkBit map_mark) {
2536 if (!clear_map_transitions_) return;
2529 Object* potential_parent = map->GetBackPointer(); 2537 Object* potential_parent = map->GetBackPointer();
2530 if (!potential_parent->IsMap()) return; 2538 if (!potential_parent->IsMap()) return;
2531 Map* parent = Map::cast(potential_parent); 2539 Map* parent = Map::cast(potential_parent);
2532 2540
2533 // Follow back pointer, check whether we are dealing with a map transition 2541 // Follow back pointer, check whether we are dealing with a map transition
2534 // from a live map to a dead path and in case clear transitions of parent. 2542 // from a live map to a dead path and in case clear transitions of parent.
2535 bool current_is_alive = map_mark.Get(); 2543 bool current_is_alive = map_mark.Get();
2536 bool parent_is_alive = Marking::MarkBitFrom(parent).Get(); 2544 bool parent_is_alive = Marking::MarkBitFrom(parent).Get();
2537 if (!current_is_alive && parent_is_alive) { 2545 if (!current_is_alive && parent_is_alive) {
2538 parent->ClearNonLiveTransitions(heap()); 2546 parent->ClearNonLiveTransitions(heap());
(...skipping 1557 matching lines...) Expand 10 before | Expand all | Expand 10 after
4096 while (buffer != NULL) { 4104 while (buffer != NULL) {
4097 SlotsBuffer* next_buffer = buffer->next(); 4105 SlotsBuffer* next_buffer = buffer->next();
4098 DeallocateBuffer(buffer); 4106 DeallocateBuffer(buffer);
4099 buffer = next_buffer; 4107 buffer = next_buffer;
4100 } 4108 }
4101 *buffer_address = NULL; 4109 *buffer_address = NULL;
4102 } 4110 }
4103 4111
4104 4112
4105 } } // namespace v8::internal 4113 } } // namespace v8::internal
OLDNEW
« src/incremental-marking.cc ('K') | « src/mark-compact.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698