OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
11 // with the distribution. | 11 // with the distribution. |
(...skipping 23 matching lines...) Expand all Loading... |
35 | 35 |
36 namespace v8 { | 36 namespace v8 { |
37 namespace internal { | 37 namespace internal { |
38 | 38 |
39 | 39 |
40 IncrementalMarking::IncrementalMarking(Heap* heap) | 40 IncrementalMarking::IncrementalMarking(Heap* heap) |
41 : heap_(heap), | 41 : heap_(heap), |
42 state_(STOPPED), | 42 state_(STOPPED), |
43 marking_deque_memory_(NULL), | 43 marking_deque_memory_(NULL), |
44 marking_deque_memory_committed_(false), | 44 marking_deque_memory_committed_(false), |
45 marker_(this, heap->mark_compact_collector()), | |
46 steps_count_(0), | 45 steps_count_(0), |
47 steps_took_(0), | 46 steps_took_(0), |
48 longest_step_(0.0), | 47 longest_step_(0.0), |
49 old_generation_space_available_at_start_of_incremental_(0), | 48 old_generation_space_available_at_start_of_incremental_(0), |
50 old_generation_space_used_at_start_of_incremental_(0), | 49 old_generation_space_used_at_start_of_incremental_(0), |
51 steps_count_since_last_gc_(0), | 50 steps_count_since_last_gc_(0), |
52 steps_took_since_last_gc_(0), | 51 steps_took_since_last_gc_(0), |
53 should_hurry_(false), | 52 should_hurry_(false), |
54 allocation_marking_factor_(0), | 53 allocation_marking_factor_(0), |
55 allocated_(0), | 54 allocated_(0), |
(...skipping 601 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
657 HeapObject* obj = marking_deque_.Pop(); | 656 HeapObject* obj = marking_deque_.Pop(); |
658 | 657 |
659 // Explicitly skip one word fillers. Incremental markbit patterns are | 658 // Explicitly skip one word fillers. Incremental markbit patterns are |
660 // correct only for objects that occupy at least two words. | 659 // correct only for objects that occupy at least two words. |
661 Map* map = obj->map(); | 660 Map* map = obj->map(); |
662 if (map == filler_map) { | 661 if (map == filler_map) { |
663 continue; | 662 continue; |
664 } else if (map == global_context_map) { | 663 } else if (map == global_context_map) { |
665 // Global contexts have weak fields. | 664 // Global contexts have weak fields. |
666 VisitGlobalContext(Context::cast(obj), &marking_visitor); | 665 VisitGlobalContext(Context::cast(obj), &marking_visitor); |
667 } else if (map->instance_type() == MAP_TYPE) { | |
668 Map* map = Map::cast(obj); | |
669 heap_->ClearCacheOnMap(map); | |
670 | |
671 // When map collection is enabled we have to mark through map's | |
672 // transitions and back pointers in a special way to make these links | |
673 // weak. Only maps for subclasses of JSReceiver can have transitions. | |
674 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); | |
675 if (FLAG_collect_maps && | |
676 map->instance_type() >= FIRST_JS_RECEIVER_TYPE) { | |
677 marker_.MarkMapContents(map); | |
678 } else { | |
679 marking_visitor.VisitPointers( | |
680 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset), | |
681 HeapObject::RawField(map, Map::kPointerFieldsEndOffset)); | |
682 } | |
683 } else { | 666 } else { |
684 obj->Iterate(&marking_visitor); | 667 obj->Iterate(&marking_visitor); |
685 } | 668 } |
686 | 669 |
687 MarkBit mark_bit = Marking::MarkBitFrom(obj); | 670 MarkBit mark_bit = Marking::MarkBitFrom(obj); |
688 ASSERT(!Marking::IsBlack(mark_bit)); | 671 ASSERT(!Marking::IsBlack(mark_bit)); |
689 Marking::MarkBlack(mark_bit); | 672 Marking::MarkBlack(mark_bit); |
690 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size()); | 673 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size()); |
691 } | 674 } |
692 state_ = COMPLETE; | 675 state_ = COMPLETE; |
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
817 Map* global_context_map = heap_->global_context_map(); | 800 Map* global_context_map = heap_->global_context_map(); |
818 IncrementalMarkingMarkingVisitor marking_visitor(heap_, this); | 801 IncrementalMarkingMarkingVisitor marking_visitor(heap_, this); |
819 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) { | 802 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) { |
820 HeapObject* obj = marking_deque_.Pop(); | 803 HeapObject* obj = marking_deque_.Pop(); |
821 | 804 |
822 // Explicitly skip one word fillers. Incremental markbit patterns are | 805 // Explicitly skip one word fillers. Incremental markbit patterns are |
823 // correct only for objects that occupy at least two words. | 806 // correct only for objects that occupy at least two words. |
824 Map* map = obj->map(); | 807 Map* map = obj->map(); |
825 if (map == filler_map) continue; | 808 if (map == filler_map) continue; |
826 | 809 |
| 810 if (obj->IsMap()) { |
| 811 Map* map = Map::cast(obj); |
| 812 heap_->ClearCacheOnMap(map); |
| 813 } |
| 814 |
| 815 |
827 int size = obj->SizeFromMap(map); | 816 int size = obj->SizeFromMap(map); |
828 bytes_to_process -= size; | 817 bytes_to_process -= size; |
829 MarkBit map_mark_bit = Marking::MarkBitFrom(map); | 818 MarkBit map_mark_bit = Marking::MarkBitFrom(map); |
830 if (Marking::IsWhite(map_mark_bit)) { | 819 if (Marking::IsWhite(map_mark_bit)) { |
831 WhiteToGreyAndPush(map, map_mark_bit); | 820 WhiteToGreyAndPush(map, map_mark_bit); |
832 } | 821 } |
833 | 822 |
834 // TODO(gc) switch to static visitor instead of normal visitor. | 823 // TODO(gc) switch to static visitor instead of normal visitor. |
835 if (map == global_context_map) { | 824 if (map == global_context_map) { |
836 // Global contexts have weak fields. | 825 // Global contexts have weak fields. |
837 Context* ctx = Context::cast(obj); | 826 Context* ctx = Context::cast(obj); |
838 | 827 |
839 // We will mark cache black with a separate pass | 828 // We will mark cache black with a separate pass |
840 // when we finish marking. | 829 // when we finish marking. |
841 MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache()); | 830 MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache()); |
842 | 831 |
843 VisitGlobalContext(ctx, &marking_visitor); | 832 VisitGlobalContext(ctx, &marking_visitor); |
844 } else if (map->instance_type() == MAP_TYPE) { | |
845 Map* map = Map::cast(obj); | |
846 heap_->ClearCacheOnMap(map); | |
847 | |
848 // When map collection is enabled we have to mark through map's | |
849 // transitions and back pointers in a special way to make these links | |
850 // weak. Only maps for subclasses of JSReceiver can have transitions. | |
851 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); | |
852 if (FLAG_collect_maps && | |
853 map->instance_type() >= FIRST_JS_RECEIVER_TYPE) { | |
854 marker_.MarkMapContents(map); | |
855 } else { | |
856 marking_visitor.VisitPointers( | |
857 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset), | |
858 HeapObject::RawField(map, Map::kPointerFieldsEndOffset)); | |
859 } | |
860 } else if (map->instance_type() == JS_FUNCTION_TYPE) { | 833 } else if (map->instance_type() == JS_FUNCTION_TYPE) { |
861 marking_visitor.VisitPointers( | 834 marking_visitor.VisitPointers( |
862 HeapObject::RawField(obj, JSFunction::kPropertiesOffset), | 835 HeapObject::RawField(obj, JSFunction::kPropertiesOffset), |
863 HeapObject::RawField(obj, JSFunction::kCodeEntryOffset)); | 836 HeapObject::RawField(obj, JSFunction::kCodeEntryOffset)); |
864 | 837 |
865 marking_visitor.VisitCodeEntry( | 838 marking_visitor.VisitCodeEntry( |
866 obj->address() + JSFunction::kCodeEntryOffset); | 839 obj->address() + JSFunction::kCodeEntryOffset); |
867 | 840 |
868 marking_visitor.VisitPointers( | 841 marking_visitor.VisitPointers( |
869 HeapObject::RawField(obj, | 842 HeapObject::RawField(obj, |
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
975 allocation_marking_factor_ = kInitialAllocationMarkingFactor; | 948 allocation_marking_factor_ = kInitialAllocationMarkingFactor; |
976 bytes_scanned_ = 0; | 949 bytes_scanned_ = 0; |
977 } | 950 } |
978 | 951 |
979 | 952 |
980 int64_t IncrementalMarking::SpaceLeftInOldSpace() { | 953 int64_t IncrementalMarking::SpaceLeftInOldSpace() { |
981 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects(); | 954 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects(); |
982 } | 955 } |
983 | 956 |
984 } } // namespace v8::internal | 957 } } // namespace v8::internal |
OLD | NEW |