OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 662 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
673 // variable. | 673 // variable. |
674 tracer_ = tracer; | 674 tracer_ = tracer; |
675 | 675 |
676 #ifdef DEBUG | 676 #ifdef DEBUG |
677 ASSERT(state_ == IDLE); | 677 ASSERT(state_ == IDLE); |
678 state_ = PREPARE_GC; | 678 state_ = PREPARE_GC; |
679 #endif | 679 #endif |
680 | 680 |
681 ASSERT(!FLAG_never_compact || !FLAG_always_compact); | 681 ASSERT(!FLAG_never_compact || !FLAG_always_compact); |
682 | 682 |
683 if (collect_maps_) CreateBackPointers(); | |
684 #ifdef ENABLE_GDB_JIT_INTERFACE | 683 #ifdef ENABLE_GDB_JIT_INTERFACE |
685 if (FLAG_gdbjit) { | 684 if (FLAG_gdbjit) { |
686 // If GDBJIT interface is active disable compaction. | 685 // If GDBJIT interface is active disable compaction. |
687 compacting_collection_ = false; | 686 compacting_collection_ = false; |
688 } | 687 } |
689 #endif | 688 #endif |
690 | 689 |
691 // Clear marking bits if incremental marking is aborted. | 690 // Clear marking bits if incremental marking is aborted. |
692 if (was_marked_incrementally_ && abort_incremental_marking_) { | 691 if (was_marked_incrementally_ && abort_incremental_marking_) { |
693 heap()->incremental_marking()->Abort(); | 692 heap()->incremental_marking()->Abort(); |
(...skipping 1115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1809 } | 1808 } |
1810 } else { | 1809 } else { |
1811 marking_deque_.PushBlack(object); | 1810 marking_deque_.PushBlack(object); |
1812 } | 1811 } |
1813 } | 1812 } |
1814 | 1813 |
1815 | 1814 |
1816 void MarkCompactCollector::MarkMapContents(Map* map) { | 1815 void MarkCompactCollector::MarkMapContents(Map* map) { |
1817 // Mark prototype transitions array but don't push it into marking stack. | 1816 // Mark prototype transitions array but don't push it into marking stack. |
1818 // This will make references from it weak. We will clean dead prototype | 1817 // This will make references from it weak. We will clean dead prototype |
1819 // transitions in ClearNonLiveTransitions. | 1818 // transitions in ClearNonLiveTransitions. But make sure that back pointers |
1820 FixedArray* prototype_transitions = map->prototype_transitions(); | 1819 // stored inside prototype transitions arrays are marked. |
1821 MarkBit mark = Marking::MarkBitFrom(prototype_transitions); | 1820 Object* raw_proto_transitions = map->unchecked_prototype_transitions(); |
1822 if (!mark.Get()) { | 1821 if (raw_proto_transitions->IsFixedArray()) { |
1823 mark.Set(); | 1822 FixedArray* prototype_transitions = FixedArray::cast(raw_proto_transitions); |
1824 MemoryChunk::IncrementLiveBytesFromGC(prototype_transitions->address(), | 1823 MarkBit mark = Marking::MarkBitFrom(prototype_transitions); |
1825 prototype_transitions->Size()); | 1824 if (!mark.Get()) { |
| 1825 mark.Set(); |
| 1826 MemoryChunk::IncrementLiveBytesFromGC(prototype_transitions->address(), |
| 1827 prototype_transitions->Size()); |
| 1828 MarkObjectAndPush(HeapObject::cast( |
| 1829 prototype_transitions->get(Map::kProtoTransitionBackPointerOffset))); |
| 1830 } |
1826 } | 1831 } |
1827 | 1832 |
1828 Object** raw_descriptor_array_slot = | 1833 Object** raw_descriptor_array_slot = |
1829 HeapObject::RawField(map, Map::kInstanceDescriptorsOrBitField3Offset); | 1834 HeapObject::RawField(map, Map::kInstanceDescriptorsOrBitField3Offset); |
1830 Object* raw_descriptor_array = *raw_descriptor_array_slot; | 1835 Object* raw_descriptor_array = *raw_descriptor_array_slot; |
1831 if (!raw_descriptor_array->IsSmi()) { | 1836 if (!raw_descriptor_array->IsSmi()) { |
1832 MarkDescriptorArray( | 1837 MarkDescriptorArray( |
1833 reinterpret_cast<DescriptorArray*>(raw_descriptor_array)); | 1838 reinterpret_cast<DescriptorArray*>(raw_descriptor_array)); |
1834 } | 1839 } |
1835 | 1840 |
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1914 case NULL_DESCRIPTOR: | 1919 case NULL_DESCRIPTOR: |
1915 break; | 1920 break; |
1916 } | 1921 } |
1917 } | 1922 } |
1918 // The DescriptorArray descriptors contains a pointer to its contents array, | 1923 // The DescriptorArray descriptors contains a pointer to its contents array, |
1919 // but the contents array is already marked. | 1924 // but the contents array is already marked. |
1920 marking_deque_.PushBlack(descriptors); | 1925 marking_deque_.PushBlack(descriptors); |
1921 } | 1926 } |
1922 | 1927 |
1923 | 1928 |
1924 void MarkCompactCollector::CreateBackPointers() { | |
1925 HeapObjectIterator iterator(heap()->map_space()); | |
1926 for (HeapObject* next_object = iterator.Next(); | |
1927 next_object != NULL; next_object = iterator.Next()) { | |
1928 if (next_object->IsMap()) { // Could also be FreeSpace object on free list. | |
1929 Map* map = Map::cast(next_object); | |
1930 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); | |
1931 if (map->instance_type() >= FIRST_JS_RECEIVER_TYPE) { | |
1932 map->CreateBackPointers(); | |
1933 } else { | |
1934 ASSERT(map->instance_descriptors() == heap()->empty_descriptor_array()); | |
1935 } | |
1936 } | |
1937 } | |
1938 } | |
1939 | |
1940 | |
1941 // Fill the marking stack with overflowed objects returned by the given | 1929 // Fill the marking stack with overflowed objects returned by the given |
1942 // iterator. Stop when the marking stack is filled or the end of the space | 1930 // iterator. Stop when the marking stack is filled or the end of the space |
1943 // is reached, whichever comes first. | 1931 // is reached, whichever comes first. |
1944 template<class T> | 1932 template<class T> |
1945 static void DiscoverGreyObjectsWithIterator(Heap* heap, | 1933 static void DiscoverGreyObjectsWithIterator(Heap* heap, |
1946 MarkingDeque* marking_deque, | 1934 MarkingDeque* marking_deque, |
1947 T* it) { | 1935 T* it) { |
1948 // The caller should ensure that the marking stack is initially not full, | 1936 // The caller should ensure that the marking stack is initially not full, |
1949 // so that we don't waste effort pointlessly scanning for objects. | 1937 // so that we don't waste effort pointlessly scanning for objects. |
1950 ASSERT(!marking_deque->IsFull()); | 1938 ASSERT(!marking_deque->IsFull()); |
(...skipping 503 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2454 if (map->attached_to_shared_function_info()) { | 2442 if (map->attached_to_shared_function_info()) { |
2455 JSFunction::cast(map->constructor())->shared()->AttachInitialMap(map); | 2443 JSFunction::cast(map->constructor())->shared()->AttachInitialMap(map); |
2456 } | 2444 } |
2457 } | 2445 } |
2458 } | 2446 } |
2459 | 2447 |
2460 | 2448 |
2461 void MarkCompactCollector::ClearNonLiveTransitions() { | 2449 void MarkCompactCollector::ClearNonLiveTransitions() { |
2462 HeapObjectIterator map_iterator(heap()->map_space()); | 2450 HeapObjectIterator map_iterator(heap()->map_space()); |
2463 // Iterate over the map space, setting map transitions that go from | 2451 // Iterate over the map space, setting map transitions that go from |
2464 // a marked map to an unmarked map to null transitions. At the same time, | 2452 // a marked map to an unmarked map to null transitions. This action |
2465 // set all the prototype fields of maps back to their original value, | 2453 // is carried out only on maps of JSObjects and related subtypes. |
2466 // dropping the back pointers temporarily stored in the prototype field. | |
2467 // Setting the prototype field requires following the linked list of | |
2468 // back pointers, reversing them all at once. This allows us to find | |
2469 // those maps with map transitions that need to be nulled, and only | |
2470 // scan the descriptor arrays of those maps, not all maps. | |
2471 // All of these actions are carried out only on maps of JSObjects | |
2472 // and related subtypes. | |
2473 for (HeapObject* obj = map_iterator.Next(); | 2454 for (HeapObject* obj = map_iterator.Next(); |
2474 obj != NULL; obj = map_iterator.Next()) { | 2455 obj != NULL; obj = map_iterator.Next()) { |
2475 Map* map = reinterpret_cast<Map*>(obj); | 2456 Map* map = reinterpret_cast<Map*>(obj); |
2476 MarkBit map_mark = Marking::MarkBitFrom(map); | 2457 MarkBit map_mark = Marking::MarkBitFrom(map); |
2477 if (map->IsFreeSpace()) continue; | 2458 if (map->IsFreeSpace()) continue; |
2478 | 2459 |
2479 ASSERT(map->IsMap()); | 2460 ASSERT(map->IsMap()); |
2480 // Only JSObject and subtypes have map transitions and back pointers. | 2461 // Only JSObject and subtypes have map transitions and back pointers. |
2481 STATIC_ASSERT(LAST_TYPE == LAST_JS_OBJECT_TYPE); | 2462 STATIC_ASSERT(LAST_TYPE == LAST_JS_OBJECT_TYPE); |
2482 if (map->instance_type() < FIRST_JS_OBJECT_TYPE) continue; | 2463 if (map->instance_type() < FIRST_JS_OBJECT_TYPE) continue; |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2538 for (int i = new_number_of_transitions * step; | 2519 for (int i = new_number_of_transitions * step; |
2539 i < number_of_transitions * step; | 2520 i < number_of_transitions * step; |
2540 i++) { | 2521 i++) { |
2541 prototype_transitions->set_undefined(heap_, header + i); | 2522 prototype_transitions->set_undefined(heap_, header + i); |
2542 } | 2523 } |
2543 } | 2524 } |
2544 | 2525 |
2545 | 2526 |
2546 void MarkCompactCollector::ClearNonLiveMapTransitions(Map* map, | 2527 void MarkCompactCollector::ClearNonLiveMapTransitions(Map* map, |
2547 MarkBit map_mark) { | 2528 MarkBit map_mark) { |
2548 // Follow the chain of back pointers to find the prototype. | 2529 Object* potential_parent = map->GetBackPointer(); |
2549 Object* real_prototype = map; | 2530 if (!potential_parent->IsMap()) return; |
2550 while (real_prototype->IsMap()) { | 2531 Map* parent = Map::cast(potential_parent); |
2551 real_prototype = Map::cast(real_prototype)->prototype(); | |
2552 ASSERT(real_prototype->IsHeapObject()); | |
2553 } | |
2554 | 2532 |
2555 // Follow back pointers, setting them to prototype, clearing map transitions | 2533 // Follow back pointer, check whether we are dealing with a map transition |
2556 // when necessary. | 2534 // from a live map to a dead path and in case clear transitions of parent. |
2557 Map* current = map; | |
2558 bool current_is_alive = map_mark.Get(); | 2535 bool current_is_alive = map_mark.Get(); |
2559 bool on_dead_path = !current_is_alive; | 2536 bool parent_is_alive = Marking::MarkBitFrom(parent).Get(); |
2560 while (current->IsMap()) { | 2537 if (!current_is_alive && parent_is_alive) { |
2561 Object* next = current->prototype(); | 2538 parent->ClearNonLiveTransitions(heap()); |
2562 // There should never be a dead map above a live map. | |
2563 ASSERT(on_dead_path || current_is_alive); | |
2564 | |
2565 // A live map above a dead map indicates a dead transition. This test will | |
2566 // always be false on the first iteration. | |
2567 if (on_dead_path && current_is_alive) { | |
2568 on_dead_path = false; | |
2569 current->ClearNonLiveTransitions(heap(), real_prototype); | |
2570 } | |
2571 | |
2572 Object** slot = HeapObject::RawField(current, Map::kPrototypeOffset); | |
2573 *slot = real_prototype; | |
2574 if (current_is_alive) RecordSlot(slot, slot, real_prototype); | |
2575 | |
2576 current = reinterpret_cast<Map*>(next); | |
2577 current_is_alive = Marking::MarkBitFrom(current).Get(); | |
2578 } | 2539 } |
2579 } | 2540 } |
2580 | 2541 |
2581 | 2542 |
2582 void MarkCompactCollector::ProcessWeakMaps() { | 2543 void MarkCompactCollector::ProcessWeakMaps() { |
2583 Object* weak_map_obj = encountered_weak_maps(); | 2544 Object* weak_map_obj = encountered_weak_maps(); |
2584 while (weak_map_obj != Smi::FromInt(0)) { | 2545 while (weak_map_obj != Smi::FromInt(0)) { |
2585 ASSERT(MarkCompactCollector::IsMarked(HeapObject::cast(weak_map_obj))); | 2546 ASSERT(MarkCompactCollector::IsMarked(HeapObject::cast(weak_map_obj))); |
2586 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj); | 2547 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj); |
2587 ObjectHashTable* table = ObjectHashTable::cast(weak_map->table()); | 2548 ObjectHashTable* table = ObjectHashTable::cast(weak_map->table()); |
(...skipping 1547 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4135 while (buffer != NULL) { | 4096 while (buffer != NULL) { |
4136 SlotsBuffer* next_buffer = buffer->next(); | 4097 SlotsBuffer* next_buffer = buffer->next(); |
4137 DeallocateBuffer(buffer); | 4098 DeallocateBuffer(buffer); |
4138 buffer = next_buffer; | 4099 buffer = next_buffer; |
4139 } | 4100 } |
4140 *buffer_address = NULL; | 4101 *buffer_address = NULL; |
4141 } | 4102 } |
4142 | 4103 |
4143 | 4104 |
4144 } } // namespace v8::internal | 4105 } } // namespace v8::internal |
OLD | NEW |