 Chromium Code Reviews
 Chromium Code Reviews Issue 10386046:
  Implement map collection for incremental marking.  (Closed) 
  Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
    
  
    Issue 10386046:
  Implement map collection for incremental marking.  (Closed) 
  Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge| OLD | NEW | 
|---|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. | 
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without | 
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are | 
| 4 // met: | 4 // met: | 
| 5 // | 5 // | 
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright | 
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. | 
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above | 
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following | 
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided | 
| (...skipping 639 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 650 // TODO(gc) hurry can mark objects it encounters black as mutator | 650 // TODO(gc) hurry can mark objects it encounters black as mutator | 
| 651 // was stopped. | 651 // was stopped. | 
| 652 Map* filler_map = heap_->one_pointer_filler_map(); | 652 Map* filler_map = heap_->one_pointer_filler_map(); | 
| 653 Map* global_context_map = heap_->global_context_map(); | 653 Map* global_context_map = heap_->global_context_map(); | 
| 654 IncrementalMarkingMarkingVisitor marking_visitor(heap_, this); | 654 IncrementalMarkingMarkingVisitor marking_visitor(heap_, this); | 
| 655 while (!marking_deque_.IsEmpty()) { | 655 while (!marking_deque_.IsEmpty()) { | 
| 656 HeapObject* obj = marking_deque_.Pop(); | 656 HeapObject* obj = marking_deque_.Pop(); | 
| 657 | 657 | 
| 658 // Explicitly skip one word fillers. Incremental markbit patterns are | 658 // Explicitly skip one word fillers. Incremental markbit patterns are | 
| 659 // correct only for objects that occupy at least two words. | 659 // correct only for objects that occupy at least two words. | 
| 660 Map* map = obj->map(); | 660 Map* map = obj->map(); | 
| 
Vyacheslav Egorov (Chromium)
2012/05/11 12:58:35
Hurry should also treat maps in a special way.
 
Michael Starzinger
2012/05/11 14:51:53
Done.
 | |
| 661 if (map == filler_map) { | 661 if (map == filler_map) { | 
| 662 continue; | 662 continue; | 
| 663 } else if (map == global_context_map) { | 663 } else if (map == global_context_map) { | 
| 664 // Global contexts have weak fields. | 664 // Global contexts have weak fields. | 
| 665 VisitGlobalContext(Context::cast(obj), &marking_visitor); | 665 VisitGlobalContext(Context::cast(obj), &marking_visitor); | 
| 666 } else { | 666 } else { | 
| 667 obj->Iterate(&marking_visitor); | 667 obj->Iterate(&marking_visitor); | 
| 668 } | 668 } | 
| 669 | 669 | 
| 670 MarkBit mark_bit = Marking::MarkBitFrom(obj); | 670 MarkBit mark_bit = Marking::MarkBitFrom(obj); | 
| (...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 800 Map* global_context_map = heap_->global_context_map(); | 800 Map* global_context_map = heap_->global_context_map(); | 
| 801 IncrementalMarkingMarkingVisitor marking_visitor(heap_, this); | 801 IncrementalMarkingMarkingVisitor marking_visitor(heap_, this); | 
| 802 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) { | 802 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) { | 
| 803 HeapObject* obj = marking_deque_.Pop(); | 803 HeapObject* obj = marking_deque_.Pop(); | 
| 804 | 804 | 
| 805 // Explicitly skip one word fillers. Incremental markbit patterns are | 805 // Explicitly skip one word fillers. Incremental markbit patterns are | 
| 806 // correct only for objects that occupy at least two words. | 806 // correct only for objects that occupy at least two words. | 
| 807 Map* map = obj->map(); | 807 Map* map = obj->map(); | 
| 808 if (map == filler_map) continue; | 808 if (map == filler_map) continue; | 
| 809 | 809 | 
| 810 if (obj->IsMap()) { | |
| 811 Map* map = Map::cast(obj); | |
| 812 heap_->ClearCacheOnMap(map); | |
| 813 } | |
| 814 | |
| 815 | |
| 816 int size = obj->SizeFromMap(map); | 810 int size = obj->SizeFromMap(map); | 
| 817 bytes_to_process -= size; | 811 bytes_to_process -= size; | 
| 818 MarkBit map_mark_bit = Marking::MarkBitFrom(map); | 812 MarkBit map_mark_bit = Marking::MarkBitFrom(map); | 
| 819 if (Marking::IsWhite(map_mark_bit)) { | 813 if (Marking::IsWhite(map_mark_bit)) { | 
| 820 WhiteToGreyAndPush(map, map_mark_bit); | 814 WhiteToGreyAndPush(map, map_mark_bit); | 
| 821 } | 815 } | 
| 822 | 816 | 
| 823 // TODO(gc) switch to static visitor instead of normal visitor. | 817 // TODO(gc) switch to static visitor instead of normal visitor. | 
| 824 if (map == global_context_map) { | 818 if (map == global_context_map) { | 
| 825 // Global contexts have weak fields. | 819 // Global contexts have weak fields. | 
| 826 Context* ctx = Context::cast(obj); | 820 Context* ctx = Context::cast(obj); | 
| 827 | 821 | 
| 828 // We will mark cache black with a separate pass | 822 // We will mark cache black with a separate pass | 
| 829 // when we finish marking. | 823 // when we finish marking. | 
| 830 MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache()); | 824 MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache()); | 
| 831 | 825 | 
| 832 VisitGlobalContext(ctx, &marking_visitor); | 826 VisitGlobalContext(ctx, &marking_visitor); | 
| 827 } else if (map->instance_type() == JS_MAP_TYPE) { | |
| 
Vyacheslav Egorov (Chromium)
2012/05/11 12:58:35
s/JS_MAP_TYPE/MAP_TYPE/
 
Michael Starzinger
2012/05/11 14:51:53
Already fixed in second patch set.
 | |
| 828 Map* map = Map::cast(obj); | |
| 829 heap_->ClearCacheOnMap(map); | |
| 830 | |
| 831 // When map collection is enabled we have to mark through map's | |
| 832 // transitions and back pointers in a special way to make these links | |
| 833 // weak. Only maps for subclasses of JSReceiver can have transitions. | |
| 834 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); | |
| 835 if (FLAG_collect_maps && map->instance_type() >= FIRST_JS_RECEIVER_TYPE) { | |
| 836 heap_->mark_compact_collector()->MarkWeakMapContents(map); | |
| 837 | |
| 838 // Mark the Object* fields of the Map. Since the descriptor array has | |
| 839 // been marked already, it is fine that one of these fields contains a | |
| 840 // pointer to it. But make sure to skip back pointer and prototype | |
| 841 // transitions. | |
| 842 STATIC_ASSERT(Map::kPointerFieldsEndOffset == | |
| 843 Map::kPrototypeTransitionsOrBackPointerOffset + kPointerSize); | |
| 844 marking_visitor.VisitPointers( | |
| 845 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset), | |
| 846 HeapObject::RawField( | |
| 847 map, Map::kPrototypeTransitionsOrBackPointerOffset)); | |
| 848 } else { | |
| 849 marking_visitor.VisitPointers( | |
| 850 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset), | |
| 851 HeapObject::RawField(map, Map::kPointerFieldsEndOffset)); | |
| 852 } | |
| 833 } else if (map->instance_type() == JS_FUNCTION_TYPE) { | 853 } else if (map->instance_type() == JS_FUNCTION_TYPE) { | 
| 834 marking_visitor.VisitPointers( | 854 marking_visitor.VisitPointers( | 
| 835 HeapObject::RawField(obj, JSFunction::kPropertiesOffset), | 855 HeapObject::RawField(obj, JSFunction::kPropertiesOffset), | 
| 836 HeapObject::RawField(obj, JSFunction::kCodeEntryOffset)); | 856 HeapObject::RawField(obj, JSFunction::kCodeEntryOffset)); | 
| 837 | 857 | 
| 838 marking_visitor.VisitCodeEntry( | 858 marking_visitor.VisitCodeEntry( | 
| 839 obj->address() + JSFunction::kCodeEntryOffset); | 859 obj->address() + JSFunction::kCodeEntryOffset); | 
| 840 | 860 | 
| 841 marking_visitor.VisitPointers( | 861 marking_visitor.VisitPointers( | 
| 842 HeapObject::RawField(obj, | 862 HeapObject::RawField(obj, | 
| (...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 948 allocation_marking_factor_ = kInitialAllocationMarkingFactor; | 968 allocation_marking_factor_ = kInitialAllocationMarkingFactor; | 
| 949 bytes_scanned_ = 0; | 969 bytes_scanned_ = 0; | 
| 950 } | 970 } | 
| 951 | 971 | 
| 952 | 972 | 
| 953 int64_t IncrementalMarking::SpaceLeftInOldSpace() { | 973 int64_t IncrementalMarking::SpaceLeftInOldSpace() { | 
| 954 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects(); | 974 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects(); | 
| 955 } | 975 } | 
| 956 | 976 | 
| 957 } } // namespace v8::internal | 977 } } // namespace v8::internal | 
| OLD | NEW |