Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 57 | 57 |
| 58 MarkCompactCollector::MarkCompactCollector() : // NOLINT | 58 MarkCompactCollector::MarkCompactCollector() : // NOLINT |
| 59 #ifdef DEBUG | 59 #ifdef DEBUG |
| 60 state_(IDLE), | 60 state_(IDLE), |
| 61 #endif | 61 #endif |
| 62 sweep_precisely_(false), | 62 sweep_precisely_(false), |
| 63 reduce_memory_footprint_(false), | 63 reduce_memory_footprint_(false), |
| 64 abort_incremental_marking_(false), | 64 abort_incremental_marking_(false), |
| 65 compacting_(false), | 65 compacting_(false), |
| 66 was_marked_incrementally_(false), | 66 was_marked_incrementally_(false), |
| 67 collect_maps_(FLAG_collect_maps), | 67 clear_map_transitions_(true), |
| 68 flush_monomorphic_ics_(false), | 68 flush_monomorphic_ics_(false), |
| 69 tracer_(NULL), | 69 tracer_(NULL), |
| 70 migration_slots_buffer_(NULL), | 70 migration_slots_buffer_(NULL), |
| 71 heap_(NULL), | 71 heap_(NULL), |
| 72 code_flusher_(NULL), | 72 code_flusher_(NULL), |
| 73 encountered_weak_maps_(NULL) { } | 73 encountered_weak_maps_(NULL), |
| 74 marker_(this, this) { } | |
| 74 | 75 |
| 75 | 76 |
| 76 #ifdef DEBUG | 77 #ifdef DEBUG |
| 77 class VerifyMarkingVisitor: public ObjectVisitor { | 78 class VerifyMarkingVisitor: public ObjectVisitor { |
| 78 public: | 79 public: |
| 79 void VisitPointers(Object** start, Object** end) { | 80 void VisitPointers(Object** start, Object** end) { |
| 80 for (Object** current = start; current < end; current++) { | 81 for (Object** current = start; current < end; current++) { |
| 81 if ((*current)->IsHeapObject()) { | 82 if ((*current)->IsHeapObject()) { |
| 82 HeapObject* object = HeapObject::cast(*current); | 83 HeapObject* object = HeapObject::cast(*current); |
| 83 ASSERT(HEAP->mark_compact_collector()->IsMarked(object)); | 84 ASSERT(HEAP->mark_compact_collector()->IsMarked(object)); |
| (...skipping 191 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 275 | 276 |
| 276 void MarkCompactCollector::CollectGarbage() { | 277 void MarkCompactCollector::CollectGarbage() { |
| 277 // Make sure that Prepare() has been called. The individual steps below will | 278 // Make sure that Prepare() has been called. The individual steps below will |
| 278 // update the state as they proceed. | 279 // update the state as they proceed. |
| 279 ASSERT(state_ == PREPARE_GC); | 280 ASSERT(state_ == PREPARE_GC); |
| 280 ASSERT(encountered_weak_maps_ == Smi::FromInt(0)); | 281 ASSERT(encountered_weak_maps_ == Smi::FromInt(0)); |
| 281 | 282 |
| 282 MarkLiveObjects(); | 283 MarkLiveObjects(); |
| 283 ASSERT(heap_->incremental_marking()->IsStopped()); | 284 ASSERT(heap_->incremental_marking()->IsStopped()); |
| 284 | 285 |
| 285 if (collect_maps_) ClearNonLiveTransitions(); | 286 if (FLAG_collect_maps) ClearNonLiveTransitions(); |
|
Vyacheslav Egorov (Chromium)
2012/05/11 12:58:35
It seems that running with --nocollect-maps would
Michael Starzinger
2012/05/11 14:51:53
Yes, now the flag treats pointers in both directio
| |
| 286 | 287 |
| 287 ClearWeakMaps(); | 288 ClearWeakMaps(); |
| 288 | 289 |
| 289 #ifdef DEBUG | 290 #ifdef DEBUG |
| 290 if (FLAG_verify_heap) { | 291 if (FLAG_verify_heap) { |
| 291 VerifyMarking(heap_); | 292 VerifyMarking(heap_); |
| 292 } | 293 } |
| 293 #endif | 294 #endif |
| 294 | 295 |
| 295 SweepSpaces(); | 296 SweepSpaces(); |
| 296 | 297 |
| 297 if (!collect_maps_) ReattachInitialMaps(); | 298 if (!FLAG_collect_maps) ReattachInitialMaps(); |
| 298 | 299 |
| 299 Finish(); | 300 Finish(); |
| 300 | 301 |
| 301 tracer_ = NULL; | 302 tracer_ = NULL; |
| 302 } | 303 } |
| 303 | 304 |
| 304 | 305 |
| 305 #ifdef DEBUG | 306 #ifdef DEBUG |
| 306 void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) { | 307 void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) { |
| 307 PageIterator it(space); | 308 PageIterator it(space); |
| (...skipping 343 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 651 evacuation_candidates_.Rewind(0); | 652 evacuation_candidates_.Rewind(0); |
| 652 invalidated_code_.Rewind(0); | 653 invalidated_code_.Rewind(0); |
| 653 } | 654 } |
| 654 ASSERT_EQ(0, evacuation_candidates_.length()); | 655 ASSERT_EQ(0, evacuation_candidates_.length()); |
| 655 } | 656 } |
| 656 | 657 |
| 657 | 658 |
| 658 void MarkCompactCollector::Prepare(GCTracer* tracer) { | 659 void MarkCompactCollector::Prepare(GCTracer* tracer) { |
| 659 was_marked_incrementally_ = heap()->incremental_marking()->IsMarking(); | 660 was_marked_incrementally_ = heap()->incremental_marking()->IsMarking(); |
| 660 | 661 |
| 661 // Disable collection of maps if incremental marking is enabled. | 662 // TODO(1465): Implement heuristic to switch between clearing map transitions |
| 662 // Map collection algorithm relies on a special map transition tree traversal | 663 // by marking strongly "up the tree" and collecting whole transition trees |
| 663 // order which is not implemented for incremental marking. | 664 // which requires marking strongly "down the tree". |
| 664 collect_maps_ = FLAG_collect_maps && !was_marked_incrementally_; | 665 clear_map_transitions_ = true; |
|
Vyacheslav Egorov (Chromium)
2012/05/11 12:58:35
Why does not it depend on FLAG_collect_maps?
Flag
Michael Starzinger
2012/05/11 14:51:53
Done, removed flag for now.
| |
| 665 | 666 |
| 666 // Monomorphic ICs are preserved when possible, but need to be flushed | 667 // Monomorphic ICs are preserved when possible, but need to be flushed |
| 667 // when they might be keeping a Context alive, or when the heap is about | 668 // when they might be keeping a Context alive, or when the heap is about |
| 668 // to be serialized. | 669 // to be serialized. |
| 669 flush_monomorphic_ics_ = | 670 flush_monomorphic_ics_ = |
| 670 heap()->isolate()->context_exit_happened() || Serializer::enabled(); | 671 heap()->isolate()->context_exit_happened() || Serializer::enabled(); |
| 671 | 672 |
| 672 // Rather than passing the tracer around we stash it in a static member | 673 // Rather than passing the tracer around we stash it in a static member |
| 673 // variable. | 674 // variable. |
| 674 tracer_ = tracer; | 675 tracer_ = tracer; |
| (...skipping 1116 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1791 | 1792 |
| 1792 | 1793 |
| 1793 void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) { | 1794 void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) { |
| 1794 ASSERT(IsMarked(object)); | 1795 ASSERT(IsMarked(object)); |
| 1795 ASSERT(HEAP->Contains(object)); | 1796 ASSERT(HEAP->Contains(object)); |
| 1796 if (object->IsMap()) { | 1797 if (object->IsMap()) { |
| 1797 Map* map = Map::cast(object); | 1798 Map* map = Map::cast(object); |
| 1798 heap_->ClearCacheOnMap(map); | 1799 heap_->ClearCacheOnMap(map); |
| 1799 | 1800 |
| 1800 // When map collection is enabled we have to mark through map's transitions | 1801 // When map collection is enabled we have to mark through map's transitions |
| 1801 // in a special way to make transition links weak. | 1802 // in a special way to make transition links weak. Only maps for subclasses |
| 1802 // Only maps for subclasses of JSReceiver can have transitions. | 1803 // of JSReceiver can have transitions. |
| 1803 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); | 1804 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); |
| 1804 if (collect_maps_ && map->instance_type() >= FIRST_JS_RECEIVER_TYPE) { | 1805 if (FLAG_collect_maps && map->instance_type() >= FIRST_JS_RECEIVER_TYPE) { |
| 1805 MarkMapContents(map); | 1806 marker_.MarkMapContents(map); |
| 1807 | |
| 1808 // Mark the Object* fields of the Map. Since the descriptor array has been | |
|
Vyacheslav Egorov (Chromium)
2012/05/11 12:58:35
Try to move this to MarkMapContents to further sha
Michael Starzinger
2012/05/11 14:51:53
Done.
| |
| 1809 // marked already, it is fine that one of these fields contains a pointer | |
| 1810 // to it. But make sure to skip back pointer and prototype transitions. | |
| 1811 STATIC_ASSERT(Map::kPointerFieldsEndOffset == | |
| 1812 Map::kPrototypeTransitionsOrBackPointerOffset + kPointerSize); | |
| 1813 Object** start_slot = HeapObject::RawField( | |
| 1814 map, Map::kPointerFieldsBeginOffset); | |
| 1815 Object** end_slot = HeapObject::RawField( | |
| 1816 map, Map::kPrototypeTransitionsOrBackPointerOffset); | |
| 1817 StaticMarkingVisitor::VisitPointers(map->GetHeap(), start_slot, end_slot); | |
| 1806 } else { | 1818 } else { |
| 1807 marking_deque_.PushBlack(map); | 1819 marking_deque_.PushBlack(map); |
| 1808 } | 1820 } |
| 1809 } else { | 1821 } else { |
| 1810 marking_deque_.PushBlack(object); | 1822 marking_deque_.PushBlack(object); |
| 1811 } | 1823 } |
| 1812 } | 1824 } |
| 1813 | 1825 |
| 1814 | 1826 |
| 1815 void MarkCompactCollector::MarkMapContents(Map* map) { | 1827 // Force instantiation of template instances. |
| 1828 template void Marker<IncrementalMarking>::MarkMapContents(Map* map); | |
| 1829 template void Marker<MarkCompactCollector>::MarkMapContents(Map* map); | |
| 1830 | |
| 1831 | |
| 1832 template <class T> | |
| 1833 void Marker<T>::MarkMapContents(Map* map) { | |
| 1816 // Mark prototype transitions array but don't push it into marking stack. | 1834 // Mark prototype transitions array but don't push it into marking stack. |
| 1817 // This will make references from it weak. We will clean dead prototype | 1835 // This will make references from it weak. We will clean dead prototype |
| 1818 // transitions in ClearNonLiveTransitions. But make sure that back pointers | 1836 // transitions in ClearNonLiveTransitions. |
| 1819 // stored inside prototype transitions arrays are marked. | 1837 Object** proto_trans_slot = |
| 1820 Object* raw_proto_transitions = map->unchecked_prototype_transitions(); | 1838 HeapObject::RawField(map, Map::kPrototypeTransitionsOrBackPointerOffset); |
| 1821 if (raw_proto_transitions->IsFixedArray()) { | 1839 HeapObject* prototype_transitions = HeapObject::cast(*proto_trans_slot); |
| 1822 FixedArray* prototype_transitions = FixedArray::cast(raw_proto_transitions); | 1840 if (prototype_transitions->IsFixedArray()) { |
| 1841 mark_compact_collector()->RecordSlot(proto_trans_slot, | |
| 1842 proto_trans_slot, | |
| 1843 prototype_transitions); | |
| 1823 MarkBit mark = Marking::MarkBitFrom(prototype_transitions); | 1844 MarkBit mark = Marking::MarkBitFrom(prototype_transitions); |
| 1824 if (!mark.Get()) { | 1845 if (!mark.Get()) { |
| 1825 mark.Set(); | 1846 mark.Set(); |
| 1826 MemoryChunk::IncrementLiveBytesFromGC(prototype_transitions->address(), | 1847 MemoryChunk::IncrementLiveBytesFromGC(prototype_transitions->address(), |
| 1827 prototype_transitions->Size()); | 1848 prototype_transitions->Size()); |
| 1828 MarkObjectAndPush(HeapObject::cast( | |
| 1829 prototype_transitions->get(Map::kProtoTransitionBackPointerOffset))); | |
| 1830 } | 1849 } |
| 1831 } | 1850 } |
| 1832 | 1851 |
| 1833 Object** raw_descriptor_array_slot = | 1852 // Make sure that the back pointer stored either in the map itself or inside |
| 1834 HeapObject::RawField(map, Map::kInstanceDescriptorsOrBitField3Offset); | 1853 // its prototype transitions array is marked when clearing map transitions. |
| 1835 Object* raw_descriptor_array = *raw_descriptor_array_slot; | 1854 // Treat pointers in the descriptor array as weak and also mark that array to |
| 1836 if (!raw_descriptor_array->IsSmi()) { | 1855 // prevent visiting it later. |
| 1837 MarkDescriptorArray( | 1856 if (mark_compact_collector()->clear_map_transitions_) { |
| 1838 reinterpret_cast<DescriptorArray*>(raw_descriptor_array)); | 1857 base_marker()->MarkObject(HeapObject::cast(map->GetBackPointer())); |
| 1858 | |
| 1859 Object** descriptor_array_slot = | |
| 1860 HeapObject::RawField(map, Map::kInstanceDescriptorsOrBitField3Offset); | |
| 1861 Object* descriptor_array = *descriptor_array_slot; | |
| 1862 if (!descriptor_array->IsSmi()) { | |
| 1863 MarkDescriptorArray(reinterpret_cast<DescriptorArray*>(descriptor_array)); | |
| 1864 } | |
| 1839 } | 1865 } |
| 1840 | |
| 1841 // Mark the Object* fields of the Map. | |
| 1842 // Since the descriptor array has been marked already, it is fine | |
| 1843 // that one of these fields contains a pointer to it. | |
| 1844 Object** start_slot = HeapObject::RawField(map, | |
| 1845 Map::kPointerFieldsBeginOffset); | |
| 1846 | |
| 1847 Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset); | |
| 1848 | |
| 1849 StaticMarkingVisitor::VisitPointers(map->GetHeap(), start_slot, end_slot); | |
| 1850 } | 1866 } |
| 1851 | 1867 |
| 1852 | 1868 |
| 1853 void MarkCompactCollector::MarkAccessorPairSlot(HeapObject* accessors, | 1869 template <class T> |
| 1854 int offset) { | 1870 void Marker<T>::MarkDescriptorArray(DescriptorArray* descriptors) { |
| 1855 Object** slot = HeapObject::RawField(accessors, offset); | 1871 // Empty descriptor array is marked as a root before any maps are marked. |
| 1856 HeapObject* accessor = HeapObject::cast(*slot); | 1872 ASSERT(descriptors != descriptors->GetHeap()->empty_descriptor_array()); |
| 1857 if (accessor->IsMap()) return; | |
| 1858 RecordSlot(slot, slot, accessor); | |
| 1859 MarkObjectAndPush(accessor); | |
| 1860 } | |
| 1861 | 1873 |
| 1874 // The DescriptorArray contains a pointer to its contents array, but the | |
| 1875 // contents array will be marked black and hence not be visited again. | |
| 1876 if (!base_marker()->MarkObject(descriptors)) return; | |
| 1877 FixedArray* contents = FixedArray::cast( | |
| 1878 descriptors->get(DescriptorArray::kContentArrayIndex)); | |
| 1879 ASSERT(contents->length() >= 2); | |
| 1880 ASSERT(Marking::IsWhite(Marking::MarkBitFrom(contents))); | |
| 1881 base_marker()->MarkObjectWithoutPush(contents); | |
| 1862 | 1882 |
| 1863 void MarkCompactCollector::MarkDescriptorArray( | 1883 // Contents contains (value, details) pairs. If the descriptor contains a |
| 1864 DescriptorArray* descriptors) { | 1884 // transition (value is a Map), we don't mark the value as live. It might |
| 1865 MarkBit descriptors_mark = Marking::MarkBitFrom(descriptors); | 1885 // be set to the NULL_DESCRIPTOR in ClearNonLiveTransitions later. |
| 1866 if (descriptors_mark.Get()) return; | |
| 1867 // Empty descriptor array is marked as a root before any maps are marked. | |
| 1868 ASSERT(descriptors != heap()->empty_descriptor_array()); | |
| 1869 SetMark(descriptors, descriptors_mark); | |
| 1870 | |
| 1871 FixedArray* contents = reinterpret_cast<FixedArray*>( | |
| 1872 descriptors->get(DescriptorArray::kContentArrayIndex)); | |
| 1873 ASSERT(contents->IsHeapObject()); | |
| 1874 ASSERT(!IsMarked(contents)); | |
| 1875 ASSERT(contents->IsFixedArray()); | |
| 1876 ASSERT(contents->length() >= 2); | |
| 1877 MarkBit contents_mark = Marking::MarkBitFrom(contents); | |
| 1878 SetMark(contents, contents_mark); | |
| 1879 // Contents contains (value, details) pairs. If the details say that the type | |
| 1880 // of descriptor is MAP_TRANSITION, CONSTANT_TRANSITION, | |
| 1881 // EXTERNAL_ARRAY_TRANSITION or NULL_DESCRIPTOR, we don't mark the value as | |
| 1882 // live. Only for MAP_TRANSITION, EXTERNAL_ARRAY_TRANSITION and | |
| 1883 // CONSTANT_TRANSITION is the value an Object* (a Map*). | |
| 1884 for (int i = 0; i < contents->length(); i += 2) { | 1886 for (int i = 0; i < contents->length(); i += 2) { |
| 1885 // If the pair (value, details) at index i, i+1 is not | |
| 1886 // a transition or null descriptor, mark the value. | |
| 1887 PropertyDetails details(Smi::cast(contents->get(i + 1))); | 1887 PropertyDetails details(Smi::cast(contents->get(i + 1))); |
| 1888 | 1888 |
| 1889 Object** slot = contents->data_start() + i; | 1889 Object** slot = contents->data_start() + i; |
| 1890 if (!(*slot)->IsHeapObject()) continue; | 1890 if (!(*slot)->IsHeapObject()) continue; |
| 1891 HeapObject* value = HeapObject::cast(*slot); | 1891 HeapObject* value = HeapObject::cast(*slot); |
| 1892 | 1892 |
| 1893 RecordSlot(slot, slot, *slot); | 1893 mark_compact_collector()->RecordSlot(slot, slot, *slot); |
| 1894 | 1894 |
| 1895 switch (details.type()) { | 1895 switch (details.type()) { |
| 1896 case NORMAL: | 1896 case NORMAL: |
| 1897 case FIELD: | 1897 case FIELD: |
| 1898 case CONSTANT_FUNCTION: | 1898 case CONSTANT_FUNCTION: |
| 1899 case HANDLER: | 1899 case HANDLER: |
| 1900 case INTERCEPTOR: | 1900 case INTERCEPTOR: |
| 1901 MarkObjectAndPush(value); | 1901 base_marker()->MarkObject(value); |
| 1902 break; | 1902 break; |
| 1903 case CALLBACKS: | 1903 case CALLBACKS: |
| 1904 if (!value->IsAccessorPair()) { | 1904 if (!value->IsAccessorPair()) { |
| 1905 MarkObjectAndPush(value); | 1905 base_marker()->MarkObject(value); |
| 1906 } else if (!MarkObjectWithoutPush(value)) { | 1906 } else if (base_marker()->MarkObjectWithoutPush(value)) { |
| 1907 MarkAccessorPairSlot(value, AccessorPair::kGetterOffset); | 1907 AccessorPair* accessors = AccessorPair::cast(value); |
| 1908 MarkAccessorPairSlot(value, AccessorPair::kSetterOffset); | 1908 MarkAccessorPairSlot(accessors, AccessorPair::kGetterOffset); |
| 1909 MarkAccessorPairSlot(accessors, AccessorPair::kSetterOffset); | |
| 1909 } | 1910 } |
| 1910 break; | 1911 break; |
| 1911 case ELEMENTS_TRANSITION: | 1912 case ELEMENTS_TRANSITION: |
| 1912 // For maps with multiple elements transitions, the transition maps are | 1913 // For maps with multiple elements transitions, the transition maps are |
| 1913 // stored in a FixedArray. Keep the fixed array alive but not the maps | 1914 // stored in a FixedArray. Keep the fixed array alive but not the maps |
| 1914 // that it refers to. | 1915 // that it refers to. |
| 1915 if (value->IsFixedArray()) MarkObjectWithoutPush(value); | 1916 if (value->IsFixedArray()) base_marker()->MarkObjectWithoutPush(value); |
| 1916 break; | 1917 break; |
| 1917 case MAP_TRANSITION: | 1918 case MAP_TRANSITION: |
| 1918 case CONSTANT_TRANSITION: | 1919 case CONSTANT_TRANSITION: |
| 1919 case NULL_DESCRIPTOR: | 1920 case NULL_DESCRIPTOR: |
| 1920 break; | 1921 break; |
| 1921 } | 1922 } |
| 1922 } | 1923 } |
| 1923 // The DescriptorArray descriptors contains a pointer to its contents array, | |
| 1924 // but the contents array is already marked. | |
| 1925 marking_deque_.PushBlack(descriptors); | |
| 1926 } | 1924 } |
| 1927 | 1925 |
| 1928 | 1926 |
| 1927 template <class T> | |
| 1928 void Marker<T>::MarkAccessorPairSlot(AccessorPair* accessors, int offset) { | |
| 1929 Object** slot = HeapObject::RawField(accessors, offset); | |
| 1930 HeapObject* accessor = HeapObject::cast(*slot); | |
| 1931 if (accessor->IsMap()) return; | |
| 1932 mark_compact_collector()->RecordSlot(slot, slot, accessor); | |
| 1933 base_marker()->MarkObject(accessor); | |
| 1934 } | |
| 1935 | |
| 1936 | |
| 1929 // Fill the marking stack with overflowed objects returned by the given | 1937 // Fill the marking stack with overflowed objects returned by the given |
| 1930 // iterator. Stop when the marking stack is filled or the end of the space | 1938 // iterator. Stop when the marking stack is filled or the end of the space |
| 1931 // is reached, whichever comes first. | 1939 // is reached, whichever comes first. |
| 1932 template<class T> | 1940 template<class T> |
| 1933 static void DiscoverGreyObjectsWithIterator(Heap* heap, | 1941 static void DiscoverGreyObjectsWithIterator(Heap* heap, |
| 1934 MarkingDeque* marking_deque, | 1942 MarkingDeque* marking_deque, |
| 1935 T* it) { | 1943 T* it) { |
| 1936 // The caller should ensure that the marking stack is initially not full, | 1944 // The caller should ensure that the marking stack is initially not full, |
| 1937 // so that we don't waste effort pointlessly scanning for objects. | 1945 // so that we don't waste effort pointlessly scanning for objects. |
| 1938 ASSERT(!marking_deque->IsFull()); | 1946 ASSERT(!marking_deque->IsFull()); |
| (...skipping 580 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2519 for (int i = new_number_of_transitions * step; | 2527 for (int i = new_number_of_transitions * step; |
| 2520 i < number_of_transitions * step; | 2528 i < number_of_transitions * step; |
| 2521 i++) { | 2529 i++) { |
| 2522 prototype_transitions->set_undefined(heap_, header + i); | 2530 prototype_transitions->set_undefined(heap_, header + i); |
| 2523 } | 2531 } |
| 2524 } | 2532 } |
| 2525 | 2533 |
| 2526 | 2534 |
| 2527 void MarkCompactCollector::ClearNonLiveMapTransitions(Map* map, | 2535 void MarkCompactCollector::ClearNonLiveMapTransitions(Map* map, |
| 2528 MarkBit map_mark) { | 2536 MarkBit map_mark) { |
| 2537 if (!clear_map_transitions_) return; | |
| 2529 Object* potential_parent = map->GetBackPointer(); | 2538 Object* potential_parent = map->GetBackPointer(); |
| 2530 if (!potential_parent->IsMap()) return; | 2539 if (!potential_parent->IsMap()) return; |
| 2531 Map* parent = Map::cast(potential_parent); | 2540 Map* parent = Map::cast(potential_parent); |
| 2532 | 2541 |
| 2533 // Follow back pointer, check whether we are dealing with a map transition | 2542 // Follow back pointer, check whether we are dealing with a map transition |
| 2534 // from a live map to a dead path and in case clear transitions of parent. | 2543 // from a live map to a dead path and in case clear transitions of parent. |
| 2535 bool current_is_alive = map_mark.Get(); | 2544 bool current_is_alive = map_mark.Get(); |
| 2536 bool parent_is_alive = Marking::MarkBitFrom(parent).Get(); | 2545 bool parent_is_alive = Marking::MarkBitFrom(parent).Get(); |
| 2537 if (!current_is_alive && parent_is_alive) { | 2546 if (!current_is_alive && parent_is_alive) { |
| 2538 parent->ClearNonLiveTransitions(heap()); | 2547 parent->ClearNonLiveTransitions(heap()); |
| (...skipping 1557 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4096 while (buffer != NULL) { | 4105 while (buffer != NULL) { |
| 4097 SlotsBuffer* next_buffer = buffer->next(); | 4106 SlotsBuffer* next_buffer = buffer->next(); |
| 4098 DeallocateBuffer(buffer); | 4107 DeallocateBuffer(buffer); |
| 4099 buffer = next_buffer; | 4108 buffer = next_buffer; |
| 4100 } | 4109 } |
| 4101 *buffer_address = NULL; | 4110 *buffer_address = NULL; |
| 4102 } | 4111 } |
| 4103 | 4112 |
| 4104 | 4113 |
| 4105 } } // namespace v8::internal | 4114 } } // namespace v8::internal |
| OLD | NEW |