Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(565)

Side by Side Diff: src/incremental-marking.cc

Issue 10959011: Allow partial scanning of large arrays in order to avoid (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 8 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 174 matching lines...) Expand 10 before | Expand all | Expand 10 after
185 Heap* heap = map->GetHeap(); 185 Heap* heap = map->GetHeap();
186 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object); 186 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
187 if (shared->ic_age() != heap->global_ic_age()) { 187 if (shared->ic_age() != heap->global_ic_age()) {
188 shared->ResetForNewContext(heap->global_ic_age()); 188 shared->ResetForNewContext(heap->global_ic_age());
189 } 189 }
190 FixedBodyVisitor<IncrementalMarkingMarkingVisitor, 190 FixedBodyVisitor<IncrementalMarkingMarkingVisitor,
191 SharedFunctionInfo::BodyDescriptor, 191 SharedFunctionInfo::BodyDescriptor,
192 void>::Visit(map, object); 192 void>::Visit(map, object);
193 } 193 }
194 194
195 static const int kScanningChunk = 32 * 1024;
196
197 static int VisitHugeArray(FixedArray* array) {
198 Heap* heap = array->GetHeap();
199 MemoryChunk* chunk = MemoryChunk::FromAddress(array->address());
200 Object** start = array->data_start();
201 int length = array->length();
202
203 if (chunk->owner()->identity() != LO_SPACE) {
204 VisitPointers(heap, start, start + length);
205 return length;
206 }
207
208 int from =
209 chunk->IsPartiallyScanned() ? chunk->PartiallyScannedProgress() : 0;
210 int to = Min(from + kScanningChunk, length);
211
212 VisitPointers(heap, start + from, start + to);
213
214 if (to == length) {
215 // If it went from black to grey while it was waiting for the next bit to
216 // be scanned then we have to start the scan again.
217 MarkBit mark_bit = Marking::MarkBitFrom(array);
218 if (!Marking::IsBlack(mark_bit)) {
219 ASSERT(Marking::IsGrey(mark_bit));
220 chunk->SetPartiallyScannedProgress(0);
221 } else {
222 chunk->SetCompletelyScanned();
223 }
224 } else {
225 chunk->SetPartiallyScannedProgress(to);
226 }
227 return to - from;
228 }
229
195 static inline void VisitJSFunction(Map* map, HeapObject* object) { 230 static inline void VisitJSFunction(Map* map, HeapObject* object) {
196 Heap* heap = map->GetHeap(); 231 Heap* heap = map->GetHeap();
197 // Iterate over all fields in the body but take care in dealing with 232 // Iterate over all fields in the body but take care in dealing with
198 // the code entry and skip weak fields. 233 // the code entry and skip weak fields.
199 VisitPointers(heap, 234 VisitPointers(heap,
200 HeapObject::RawField(object, JSFunction::kPropertiesOffset), 235 HeapObject::RawField(object, JSFunction::kPropertiesOffset),
201 HeapObject::RawField(object, JSFunction::kCodeEntryOffset)); 236 HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
202 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset); 237 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
203 VisitPointers(heap, 238 VisitPointers(heap,
204 HeapObject::RawField(object, 239 HeapObject::RawField(object,
(...skipping 409 matching lines...) Expand 10 before | Expand all | Expand 10 after
614 } 649 }
615 } else if (obj->map() != filler_map) { 650 } else if (obj->map() != filler_map) {
616 // Skip one word filler objects that appear on the 651 // Skip one word filler objects that appear on the
617 // stack when we perform in place array shift. 652 // stack when we perform in place array shift.
618 array[new_top] = obj; 653 array[new_top] = obj;
619 new_top = ((new_top + 1) & mask); 654 new_top = ((new_top + 1) & mask);
620 ASSERT(new_top != marking_deque_.bottom()); 655 ASSERT(new_top != marking_deque_.bottom());
621 #ifdef DEBUG 656 #ifdef DEBUG
622 MarkBit mark_bit = Marking::MarkBitFrom(obj); 657 MarkBit mark_bit = Marking::MarkBitFrom(obj);
623 ASSERT(Marking::IsGrey(mark_bit) || 658 ASSERT(Marking::IsGrey(mark_bit) ||
624 (obj->IsFiller() && Marking::IsWhite(mark_bit))); 659 (obj->IsFiller() && Marking::IsWhite(mark_bit)) ||
660 MemoryChunk::FromAddress(obj->address())->IsPartiallyScanned());
625 #endif 661 #endif
626 } 662 }
627 } 663 }
628 marking_deque_.set_top(new_top); 664 marking_deque_.set_top(new_top);
629 665
630 steps_took_since_last_gc_ = 0; 666 steps_took_since_last_gc_ = 0;
631 steps_count_since_last_gc_ = 0; 667 steps_count_since_last_gc_ = 0;
632 longest_step_ = 0.0; 668 longest_step_ = 0.0;
633 } 669 }
634 670
635 671
672 void IncrementalMarking::FillMarkingDequeFromLargePostponedArrays() {
673 ASSERT(marking_deque_.IsEmpty());
674 LargeObjectIterator it(heap_->lo_space());
675 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
676 if (!obj->IsFixedArray()) continue;
677 MemoryChunk* p = MemoryChunk::FromAddress(obj->address());
678 if (p->IsPartiallyScanned()) {
679 marking_deque_.PushGrey(obj);
680 }
681 }
682 }
683
684
636 void IncrementalMarking::Hurry() { 685 void IncrementalMarking::Hurry() {
637 if (state() == MARKING) { 686 if (state() == MARKING) {
638 double start = 0.0; 687 double start = 0.0;
639 if (FLAG_trace_incremental_marking) { 688 if (FLAG_trace_incremental_marking) {
640 PrintF("[IncrementalMarking] Hurry\n"); 689 PrintF("[IncrementalMarking] Hurry\n");
641 start = OS::TimeCurrentMillis(); 690 start = OS::TimeCurrentMillis();
642 } 691 }
643 // TODO(gc) hurry can mark objects it encounters black as mutator 692 // TODO(gc) hurry can mark objects it encounters black as mutator
644 // was stopped. 693 // was stopped.
645 Map* filler_map = heap_->one_pointer_filler_map(); 694 Map* filler_map = heap_->one_pointer_filler_map();
646 Map* native_context_map = heap_->native_context_map(); 695 Map* native_context_map = heap_->native_context_map();
647 while (!marking_deque_.IsEmpty()) { 696 do {
648 HeapObject* obj = marking_deque_.Pop(); 697 while (!marking_deque_.IsEmpty()) {
698 HeapObject* obj = marking_deque_.Pop();
649 699
650 // Explicitly skip one word fillers. Incremental markbit patterns are 700 // Explicitly skip one word fillers. Incremental markbit patterns are
651 // correct only for objects that occupy at least two words. 701 // correct only for objects that occupy at least two words.
652 Map* map = obj->map(); 702 Map* map = obj->map();
653 if (map == filler_map) { 703 if (map == filler_map) {
654 continue; 704 continue;
655 } else if (map == native_context_map) { 705 } else if (map == native_context_map) {
656 // Native contexts have weak fields. 706 // Native contexts have weak fields.
657 IncrementalMarkingMarkingVisitor::VisitNativeContext(map, obj); 707 IncrementalMarkingMarkingVisitor::VisitNativeContext(map, obj);
658 } else { 708 ASSERT(!Marking::IsBlack(Marking::MarkBitFrom(obj)));
659 MarkBit map_mark_bit = Marking::MarkBitFrom(map); 709 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
660 if (Marking::IsWhite(map_mark_bit)) { 710 } else if (map->instance_type() == FIXED_ARRAY_TYPE &&
661 WhiteToGreyAndPush(map, map_mark_bit); 711 FixedArray::cast(obj)->length() >
712 IncrementalMarkingMarkingVisitor::kScanningChunk) {
713 MarkBit map_mark_bit = Marking::MarkBitFrom(map);
714 if (Marking::IsWhite(map_mark_bit)) {
715 WhiteToGreyAndPush(map, map_mark_bit);
716 }
717 MarkBit mark_bit = Marking::MarkBitFrom(obj);
718 if (!Marking::IsBlack(mark_bit)) {
719 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
720 } else {
721 ASSERT(
722 MemoryChunk::FromAddress(obj->address())->IsPartiallyScanned());
723 }
724 IncrementalMarkingMarkingVisitor::VisitHugeArray(
725 FixedArray::cast(obj));
726 } else {
727 MarkBit map_mark_bit = Marking::MarkBitFrom(map);
728 if (Marking::IsWhite(map_mark_bit)) {
729 WhiteToGreyAndPush(map, map_mark_bit);
730 }
731 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
732 ASSERT(!Marking::IsBlack(Marking::MarkBitFrom(obj)));
733 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
662 } 734 }
663 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); 735
736 MarkBit mark_bit = Marking::MarkBitFrom(obj);
737 Marking::MarkBlack(mark_bit);
664 } 738 }
665 739 state_ = COMPLETE;
666 MarkBit mark_bit = Marking::MarkBitFrom(obj); 740 if (FLAG_trace_incremental_marking) {
667 ASSERT(!Marking::IsBlack(mark_bit)); 741 double end = OS::TimeCurrentMillis();
668 Marking::MarkBlack(mark_bit); 742 PrintF("[IncrementalMarking] Complete (hurry), spent %d ms.\n",
669 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size()); 743 static_cast<int>(end - start));
670 } 744 }
671 state_ = COMPLETE; 745 FillMarkingDequeFromLargePostponedArrays();
672 if (FLAG_trace_incremental_marking) { 746 } while (!marking_deque_.IsEmpty());
673 double end = OS::TimeCurrentMillis();
674 PrintF("[IncrementalMarking] Complete (hurry), spent %d ms.\n",
675 static_cast<int>(end - start));
676 }
677 } 747 }
678 748
679 if (FLAG_cleanup_code_caches_at_gc) { 749 if (FLAG_cleanup_code_caches_at_gc) {
680 PolymorphicCodeCache* poly_cache = heap_->polymorphic_code_cache(); 750 PolymorphicCodeCache* poly_cache = heap_->polymorphic_code_cache();
681 Marking::GreyToBlack(Marking::MarkBitFrom(poly_cache)); 751 Marking::GreyToBlack(Marking::MarkBitFrom(poly_cache));
682 MemoryChunk::IncrementLiveBytesFromGC(poly_cache->address(), 752 MemoryChunk::IncrementLiveBytesFromGC(poly_cache->address(),
683 PolymorphicCodeCache::kSize); 753 PolymorphicCodeCache::kSize);
684 } 754 }
685 755
686 Object* context = heap_->native_contexts_list(); 756 Object* context = heap_->native_contexts_list();
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
787 } 857 }
788 858
789 if (state_ == SWEEPING) { 859 if (state_ == SWEEPING) {
790 if (heap_->AdvanceSweepers(static_cast<int>(bytes_to_process))) { 860 if (heap_->AdvanceSweepers(static_cast<int>(bytes_to_process))) {
791 bytes_scanned_ = 0; 861 bytes_scanned_ = 0;
792 StartMarking(PREVENT_COMPACTION); 862 StartMarking(PREVENT_COMPACTION);
793 } 863 }
794 } else if (state_ == MARKING) { 864 } else if (state_ == MARKING) {
795 Map* filler_map = heap_->one_pointer_filler_map(); 865 Map* filler_map = heap_->one_pointer_filler_map();
796 Map* native_context_map = heap_->native_context_map(); 866 Map* native_context_map = heap_->native_context_map();
797 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) { 867 while (true) {
798 HeapObject* obj = marking_deque_.Pop(); 868 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) {
869 HeapObject* obj = marking_deque_.Pop();
799 870
800 // Explicitly skip one word fillers. Incremental markbit patterns are 871 // Explicitly skip one word fillers. Incremental markbit patterns are
801 // correct only for objects that occupy at least two words. 872 // correct only for objects that occupy at least two words.
802 Map* map = obj->map(); 873 Map* map = obj->map();
803 if (map == filler_map) continue; 874 if (map == filler_map) continue;
804 875
805 int size = obj->SizeFromMap(map); 876 int size = obj->SizeFromMap(map);
806 bytes_to_process -= size; 877 MarkBit map_mark_bit = Marking::MarkBitFrom(map);
807 MarkBit map_mark_bit = Marking::MarkBitFrom(map); 878 if (Marking::IsWhite(map_mark_bit)) {
808 if (Marking::IsWhite(map_mark_bit)) { 879 WhiteToGreyAndPush(map, map_mark_bit);
809 WhiteToGreyAndPush(map, map_mark_bit); 880 }
881
882 // TODO(gc) switch to static visitor instead of normal visitor.
883 if (map == native_context_map) {
884 // Native contexts have weak fields.
885 Context* ctx = Context::cast(obj);
886
887 // We will mark cache black with a separate pass
888 // when we finish marking.
889 MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache());
890
891 IncrementalMarkingMarkingVisitor::VisitNativeContext(map, ctx);
892 bytes_to_process -= size;
893 SLOW_ASSERT(Marking::IsGrey(Marking::MarkBitFrom(obj)));
894 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
895 } else if (map->instance_type() == FIXED_ARRAY_TYPE &&
896 FixedArray::cast(obj)->length() >
897 IncrementalMarkingMarkingVisitor::kScanningChunk) {
898 SLOW_ASSERT(
899 Marking::IsGrey(Marking::MarkBitFrom(obj)) ||
900 MemoryChunk::FromAddress(obj->address())->IsPartiallyScanned());
901 bytes_to_process -=
902 IncrementalMarkingMarkingVisitor::VisitHugeArray(
903 FixedArray::cast(obj));
904 MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
905 if (!Marking::IsBlack(obj_mark_bit)) {
906 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
907 }
908 } else {
909 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
910 bytes_to_process -= size;
911 SLOW_ASSERT(
912 Marking::IsGrey(Marking::MarkBitFrom(obj)) ||
913 (obj->IsFiller() && Marking::IsWhite(Marking::MarkBitFrom(obj))));
914 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
915 }
916
917 MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
918 Marking::MarkBlack(obj_mark_bit);
810 } 919 }
811 920 if (marking_deque_.IsEmpty()) {
812 // TODO(gc) switch to static visitor instead of normal visitor. 921 FillMarkingDequeFromLargePostponedArrays();
813 if (map == native_context_map) { 922 if (marking_deque_.IsEmpty()) {
814 // Native contexts have weak fields. 923 MarkingComplete(action);
815 Context* ctx = Context::cast(obj); 924 break;
816 925 }
817 // We will mark cache black with a separate pass 926 } else {
818 // when we finish marking. 927 ASSERT(bytes_to_process <= 0);
819 MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache()); 928 break;
820 929 }
821 IncrementalMarkingMarkingVisitor::VisitNativeContext(map, ctx);
822 } else {
823 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
824 }
825
826 MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
827 SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) ||
828 (obj->IsFiller() && Marking::IsWhite(obj_mark_bit)));
829 Marking::MarkBlack(obj_mark_bit);
830 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
831 } 930 }
832 if (marking_deque_.IsEmpty()) MarkingComplete(action);
833 } 931 }
834 932
835 allocated_ = 0; 933 allocated_ = 0;
836 934
837 steps_count_++; 935 steps_count_++;
838 steps_count_since_last_gc_++; 936 steps_count_since_last_gc_++;
839 937
840 bool speed_up = false; 938 bool speed_up = false;
841 939
842 if ((steps_count_ % kAllocationMarkingFactorSpeedupInterval) == 0) { 940 if ((steps_count_ % kAllocationMarkingFactorSpeedupInterval) == 0) {
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
924 allocation_marking_factor_ = kInitialAllocationMarkingFactor; 1022 allocation_marking_factor_ = kInitialAllocationMarkingFactor;
925 bytes_scanned_ = 0; 1023 bytes_scanned_ = 0;
926 } 1024 }
927 1025
928 1026
929 int64_t IncrementalMarking::SpaceLeftInOldSpace() { 1027 int64_t IncrementalMarking::SpaceLeftInOldSpace() {
930 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects(); 1028 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects();
931 } 1029 }
932 1030
933 } } // namespace v8::internal 1031 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/incremental-marking.h ('k') | src/mark-compact.h » ('j') | src/mark-compact.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698