Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(296)

Side by Side Diff: src/incremental-marking.cc

Issue 10996018: Allow partial scanning of large arrays in order to avoid (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 8 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | src/mark-compact.h » ('j') | src/mark-compact.cc » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after
169 169
170 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo); 170 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
171 171
172 table_.Register(kVisitJSFunction, &VisitJSFunction); 172 table_.Register(kVisitJSFunction, &VisitJSFunction);
173 173
174 table_.Register(kVisitJSRegExp, &VisitJSRegExp); 174 table_.Register(kVisitJSRegExp, &VisitJSRegExp);
175 } 175 }
176 176
177 static void VisitJSWeakMap(Map* map, HeapObject* object) { 177 static void VisitJSWeakMap(Map* map, HeapObject* object) {
178 Heap* heap = map->GetHeap(); 178 Heap* heap = map->GetHeap();
179 VisitPointers(heap, 179 Object** start = HeapObject::RawField(object, JSWeakMap::kPropertiesOffset);
180 HeapObject::RawField(object, JSWeakMap::kPropertiesOffset), 180 VisitPointers(heap, start, start,
181 HeapObject::RawField(object, JSWeakMap::kSize)); 181 HeapObject::RawField(object, JSWeakMap::kSize));
182 } 182 }
183 183
184 static void VisitSharedFunctionInfo(Map* map, HeapObject* object) { 184 static void VisitSharedFunctionInfo(Map* map, HeapObject* object) {
185 Heap* heap = map->GetHeap(); 185 Heap* heap = map->GetHeap();
186 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object); 186 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
187 if (shared->ic_age() != heap->global_ic_age()) { 187 if (shared->ic_age() != heap->global_ic_age()) {
188 shared->ResetForNewContext(heap->global_ic_age()); 188 shared->ResetForNewContext(heap->global_ic_age());
189 } 189 }
190 FixedBodyVisitor<IncrementalMarkingMarkingVisitor, 190 FixedBodyVisitor<IncrementalMarkingMarkingVisitor,
191 SharedFunctionInfo::BodyDescriptor, 191 SharedFunctionInfo::BodyDescriptor,
192 void>::Visit(map, object); 192 void>::Visit(map, object);
193 } 193 }
194 194
195 static const int kScanningChunk = 32 * 1024;
196
197 static int VisitHugeArray(FixedArray* array) {
198 Heap* heap = array->GetHeap();
199 MemoryChunk* chunk = MemoryChunk::FromAddress(array->address());
200 Object** start = array->data_start();
201 int length = array->length();
202
203 if (chunk->owner()->identity() != LO_SPACE) {
204 VisitPointers(heap, start, start, start + length);
205 return length;
206 }
207
208 int from =
209 chunk->IsPartiallyScanned() ? chunk->PartiallyScannedProgress() : 0;
210 int to = Min(from + kScanningChunk, length);
211
212 VisitPointers(heap, start, start + from, start + to);
213
214 if (to == length) {
215 // If it went from black to grey while it was waiting for the next bit to
216 // be scanned then we have to start the scan again.
217 MarkBit mark_bit = Marking::MarkBitFrom(array);
218 if (!Marking::IsBlack(mark_bit)) {
219 ASSERT(Marking::IsGrey(mark_bit));
220 chunk->SetPartiallyScannedProgress(0);
221 } else {
222 chunk->SetCompletelyScanned();
223 }
224 } else {
225 chunk->SetPartiallyScannedProgress(to);
226 }
227 return to - from;
228 }
229
195 static inline void VisitJSFunction(Map* map, HeapObject* object) { 230 static inline void VisitJSFunction(Map* map, HeapObject* object) {
196 Heap* heap = map->GetHeap(); 231 Heap* heap = map->GetHeap();
197 // Iterate over all fields in the body but take care in dealing with 232 // Iterate over all fields in the body but take care in dealing with
198 // the code entry and skip weak fields. 233 // the code entry and skip weak fields.
234 Object** start =
235 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
199 VisitPointers(heap, 236 VisitPointers(heap,
200 HeapObject::RawField(object, JSFunction::kPropertiesOffset), 237 start,
238 start,
201 HeapObject::RawField(object, JSFunction::kCodeEntryOffset)); 239 HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
202 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset); 240 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
203 VisitPointers(heap, 241 VisitPointers(heap,
242 start,
204 HeapObject::RawField(object, 243 HeapObject::RawField(object,
205 JSFunction::kCodeEntryOffset + kPointerSize), 244 JSFunction::kCodeEntryOffset + kPointerSize),
206 HeapObject::RawField(object, 245 HeapObject::RawField(object,
207 JSFunction::kNonWeakFieldsEndOffset)); 246 JSFunction::kNonWeakFieldsEndOffset));
208 } 247 }
209 248
210 INLINE(static void VisitPointer(Heap* heap, Object** p)) { 249 INLINE(static void VisitPointer(Heap* heap, Object** p)) {
211 Object* obj = *p; 250 Object* obj = *p;
212 if (obj->NonFailureIsHeapObject()) { 251 if (obj->NonFailureIsHeapObject()) {
213 heap->mark_compact_collector()->RecordSlot(p, p, obj); 252 heap->mark_compact_collector()->RecordSlot(p, p, obj);
214 MarkObject(heap, obj); 253 MarkObject(heap, obj);
215 } 254 }
216 } 255 }
217 256
218 INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) { 257 INLINE(static void VisitPointers(Heap* heap,
258 Object** anchor,
259 Object** start,
260 Object** end)) {
219 for (Object** p = start; p < end; p++) { 261 for (Object** p = start; p < end; p++) {
220 Object* obj = *p; 262 Object* obj = *p;
221 if (obj->NonFailureIsHeapObject()) { 263 if (obj->NonFailureIsHeapObject()) {
222 heap->mark_compact_collector()->RecordSlot(start, p, obj); 264 heap->mark_compact_collector()->RecordSlot(anchor, p, obj);
223 MarkObject(heap, obj); 265 MarkObject(heap, obj);
224 } 266 }
225 } 267 }
226 } 268 }
227 269
228 // Marks the object grey and pushes it on the marking stack. 270 // Marks the object grey and pushes it on the marking stack.
229 INLINE(static void MarkObject(Heap* heap, Object* obj)) { 271 INLINE(static void MarkObject(Heap* heap, Object* obj)) {
230 HeapObject* heap_object = HeapObject::cast(obj); 272 HeapObject* heap_object = HeapObject::cast(obj);
231 MarkBit mark_bit = Marking::MarkBitFrom(heap_object); 273 MarkBit mark_bit = Marking::MarkBitFrom(heap_object);
232 if (mark_bit.data_only()) { 274 if (mark_bit.data_only()) {
(...skipping 381 matching lines...) Expand 10 before | Expand all | Expand 10 after
614 } 656 }
615 } else if (obj->map() != filler_map) { 657 } else if (obj->map() != filler_map) {
616 // Skip one word filler objects that appear on the 658 // Skip one word filler objects that appear on the
617 // stack when we perform in place array shift. 659 // stack when we perform in place array shift.
618 array[new_top] = obj; 660 array[new_top] = obj;
619 new_top = ((new_top + 1) & mask); 661 new_top = ((new_top + 1) & mask);
620 ASSERT(new_top != marking_deque_.bottom()); 662 ASSERT(new_top != marking_deque_.bottom());
621 #ifdef DEBUG 663 #ifdef DEBUG
622 MarkBit mark_bit = Marking::MarkBitFrom(obj); 664 MarkBit mark_bit = Marking::MarkBitFrom(obj);
623 ASSERT(Marking::IsGrey(mark_bit) || 665 ASSERT(Marking::IsGrey(mark_bit) ||
624 (obj->IsFiller() && Marking::IsWhite(mark_bit))); 666 (obj->IsFiller() && Marking::IsWhite(mark_bit)) ||
667 MemoryChunk::FromAddress(obj->address())->IsPartiallyScanned());
625 #endif 668 #endif
626 } 669 }
627 } 670 }
628 marking_deque_.set_top(new_top); 671 marking_deque_.set_top(new_top);
629 672
630 steps_took_since_last_gc_ = 0; 673 steps_took_since_last_gc_ = 0;
631 steps_count_since_last_gc_ = 0; 674 steps_count_since_last_gc_ = 0;
632 longest_step_ = 0.0; 675 longest_step_ = 0.0;
633 } 676 }
634 677
635 678
636 void IncrementalMarking::Hurry() { 679 void IncrementalMarking::Hurry() {
637 if (state() == MARKING) { 680 if (state() == MARKING) {
638 double start = 0.0; 681 double start = 0.0;
639 if (FLAG_trace_incremental_marking) { 682 if (FLAG_trace_incremental_marking) {
640 PrintF("[IncrementalMarking] Hurry\n"); 683 PrintF("[IncrementalMarking] Hurry\n");
641 start = OS::TimeCurrentMillis(); 684 start = OS::TimeCurrentMillis();
642 } 685 }
643 // TODO(gc) hurry can mark objects it encounters black as mutator 686 // TODO(gc) hurry can mark objects it encounters black as mutator
644 // was stopped. 687 // was stopped.
645 Map* filler_map = heap_->one_pointer_filler_map(); 688 Map* filler_map = heap_->one_pointer_filler_map();
646 Map* native_context_map = heap_->native_context_map(); 689 Map* native_context_map = heap_->native_context_map();
647 while (!marking_deque_.IsEmpty()) { 690 do {
648 HeapObject* obj = marking_deque_.Pop(); 691 while (!marking_deque_.IsEmpty()) {
692 HeapObject* obj = marking_deque_.Pop();
649 693
650 // Explicitly skip one word fillers. Incremental markbit patterns are 694 // Explicitly skip one word fillers. Incremental markbit patterns are
651 // correct only for objects that occupy at least two words. 695 // correct only for objects that occupy at least two words.
652 Map* map = obj->map(); 696 Map* map = obj->map();
653 if (map == filler_map) { 697 if (map == filler_map) {
654 continue; 698 continue;
655 } else if (map == native_context_map) { 699 } else if (map == native_context_map) {
656 // Native contexts have weak fields. 700 // Native contexts have weak fields.
657 IncrementalMarkingMarkingVisitor::VisitNativeContext(map, obj); 701 IncrementalMarkingMarkingVisitor::VisitNativeContext(map, obj);
658 } else { 702 ASSERT(!Marking::IsBlack(Marking::MarkBitFrom(obj)));
659 MarkBit map_mark_bit = Marking::MarkBitFrom(map); 703 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
660 if (Marking::IsWhite(map_mark_bit)) { 704 } else if (map->instance_type() == FIXED_ARRAY_TYPE &&
661 WhiteToGreyAndPush(map, map_mark_bit); 705 FixedArray::cast(obj)->length() >
706 IncrementalMarkingMarkingVisitor::kScanningChunk) {
707 MarkBit map_mark_bit = Marking::MarkBitFrom(map);
708 if (Marking::IsWhite(map_mark_bit)) {
709 WhiteToGreyAndPush(map, map_mark_bit);
710 }
711 MarkBit mark_bit = Marking::MarkBitFrom(obj);
712 if (!Marking::IsBlack(mark_bit)) {
713 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
714 } else {
715 ASSERT(
716 MemoryChunk::FromAddress(obj->address())->IsPartiallyScanned());
717 }
718 IncrementalMarkingMarkingVisitor::VisitHugeArray(
719 FixedArray::cast(obj));
720 } else {
721 MarkBit map_mark_bit = Marking::MarkBitFrom(map);
722 if (Marking::IsWhite(map_mark_bit)) {
723 WhiteToGreyAndPush(map, map_mark_bit);
724 }
725 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
726 ASSERT(!Marking::IsBlack(Marking::MarkBitFrom(obj)));
727 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
662 } 728 }
663 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); 729
730 MarkBit mark_bit = Marking::MarkBitFrom(obj);
731 Marking::MarkBlack(mark_bit);
664 } 732 }
665 733 state_ = COMPLETE;
666 MarkBit mark_bit = Marking::MarkBitFrom(obj); 734 if (FLAG_trace_incremental_marking) {
667 ASSERT(!Marking::IsBlack(mark_bit)); 735 double end = OS::TimeCurrentMillis();
668 Marking::MarkBlack(mark_bit); 736 PrintF("[IncrementalMarking] Complete (hurry), spent %d ms.\n",
669 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size()); 737 static_cast<int>(end - start));
670 } 738 }
671 state_ = COMPLETE; 739 MarkCompactCollector::ProcessLargePostponedArrays(heap_, &marking_deque_);
672 if (FLAG_trace_incremental_marking) { 740 } while (!marking_deque_.IsEmpty());
673 double end = OS::TimeCurrentMillis();
674 PrintF("[IncrementalMarking] Complete (hurry), spent %d ms.\n",
675 static_cast<int>(end - start));
676 }
677 } 741 }
678 742
679 if (FLAG_cleanup_code_caches_at_gc) { 743 if (FLAG_cleanup_code_caches_at_gc) {
680 PolymorphicCodeCache* poly_cache = heap_->polymorphic_code_cache(); 744 PolymorphicCodeCache* poly_cache = heap_->polymorphic_code_cache();
681 Marking::GreyToBlack(Marking::MarkBitFrom(poly_cache)); 745 Marking::GreyToBlack(Marking::MarkBitFrom(poly_cache));
682 MemoryChunk::IncrementLiveBytesFromGC(poly_cache->address(), 746 MemoryChunk::IncrementLiveBytesFromGC(poly_cache->address(),
683 PolymorphicCodeCache::kSize); 747 PolymorphicCodeCache::kSize);
684 } 748 }
685 749
686 Object* context = heap_->native_contexts_list(); 750 Object* context = heap_->native_contexts_list();
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
787 } 851 }
788 852
789 if (state_ == SWEEPING) { 853 if (state_ == SWEEPING) {
790 if (heap_->AdvanceSweepers(static_cast<int>(bytes_to_process))) { 854 if (heap_->AdvanceSweepers(static_cast<int>(bytes_to_process))) {
791 bytes_scanned_ = 0; 855 bytes_scanned_ = 0;
792 StartMarking(PREVENT_COMPACTION); 856 StartMarking(PREVENT_COMPACTION);
793 } 857 }
794 } else if (state_ == MARKING) { 858 } else if (state_ == MARKING) {
795 Map* filler_map = heap_->one_pointer_filler_map(); 859 Map* filler_map = heap_->one_pointer_filler_map();
796 Map* native_context_map = heap_->native_context_map(); 860 Map* native_context_map = heap_->native_context_map();
797 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) { 861 while (true) {
798 HeapObject* obj = marking_deque_.Pop(); 862 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) {
863 HeapObject* obj = marking_deque_.Pop();
799 864
800 // Explicitly skip one word fillers. Incremental markbit patterns are 865 // Explicitly skip one word fillers. Incremental markbit patterns are
801 // correct only for objects that occupy at least two words. 866 // correct only for objects that occupy at least two words.
802 Map* map = obj->map(); 867 Map* map = obj->map();
803 if (map == filler_map) continue; 868 if (map == filler_map) continue;
804 869
805 int size = obj->SizeFromMap(map); 870 int size = obj->SizeFromMap(map);
806 bytes_to_process -= size; 871 MarkBit map_mark_bit = Marking::MarkBitFrom(map);
807 MarkBit map_mark_bit = Marking::MarkBitFrom(map); 872 if (Marking::IsWhite(map_mark_bit)) {
808 if (Marking::IsWhite(map_mark_bit)) { 873 WhiteToGreyAndPush(map, map_mark_bit);
809 WhiteToGreyAndPush(map, map_mark_bit); 874 }
875
876 // TODO(gc) switch to static visitor instead of normal visitor.
877 if (map == native_context_map) {
878 // Native contexts have weak fields.
879 Context* ctx = Context::cast(obj);
880
881 // We will mark cache black with a separate pass
882 // when we finish marking.
883 MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache());
884
885 IncrementalMarkingMarkingVisitor::VisitNativeContext(map, ctx);
886 bytes_to_process -= size;
887 SLOW_ASSERT(Marking::IsGrey(Marking::MarkBitFrom(obj)));
888 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
889 } else if (map->instance_type() == FIXED_ARRAY_TYPE &&
890 FixedArray::cast(obj)->length() >
891 IncrementalMarkingMarkingVisitor::kScanningChunk) {
892 SLOW_ASSERT(
893 Marking::IsGrey(Marking::MarkBitFrom(obj)) ||
894 MemoryChunk::FromAddress(obj->address())->IsPartiallyScanned());
895 bytes_to_process -=
896 IncrementalMarkingMarkingVisitor::VisitHugeArray(
897 FixedArray::cast(obj));
898 MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
899 if (!Marking::IsBlack(obj_mark_bit)) {
900 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
901 }
902 } else {
903 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
904 bytes_to_process -= size;
905 SLOW_ASSERT(
906 Marking::IsGrey(Marking::MarkBitFrom(obj)) ||
907 (obj->IsFiller() && Marking::IsWhite(Marking::MarkBitFrom(obj))));
908 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
909 }
910
911 MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
912 Marking::MarkBlack(obj_mark_bit);
810 } 913 }
811 914 if (marking_deque_.IsEmpty()) {
812 // TODO(gc) switch to static visitor instead of normal visitor. 915 MarkCompactCollector::ProcessLargePostponedArrays(heap_,
813 if (map == native_context_map) { 916 &marking_deque_);
814 // Native contexts have weak fields. 917 if (marking_deque_.IsEmpty()) {
815 Context* ctx = Context::cast(obj); 918 MarkingComplete(action);
816 919 break;
817 // We will mark cache black with a separate pass 920 }
818 // when we finish marking. 921 } else {
819 MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache()); 922 ASSERT(bytes_to_process <= 0);
820 923 break;
821 IncrementalMarkingMarkingVisitor::VisitNativeContext(map, ctx); 924 }
822 } else {
823 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
824 }
825
826 MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
827 SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) ||
828 (obj->IsFiller() && Marking::IsWhite(obj_mark_bit)));
829 Marking::MarkBlack(obj_mark_bit);
830 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
831 } 925 }
832 if (marking_deque_.IsEmpty()) MarkingComplete(action);
833 } 926 }
834 927
835 allocated_ = 0; 928 allocated_ = 0;
836 929
837 steps_count_++; 930 steps_count_++;
838 steps_count_since_last_gc_++; 931 steps_count_since_last_gc_++;
839 932
840 bool speed_up = false; 933 bool speed_up = false;
841 934
842 if ((steps_count_ % kAllocationMarkingFactorSpeedupInterval) == 0) { 935 if ((steps_count_ % kAllocationMarkingFactorSpeedupInterval) == 0) {
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
924 allocation_marking_factor_ = kInitialAllocationMarkingFactor; 1017 allocation_marking_factor_ = kInitialAllocationMarkingFactor;
925 bytes_scanned_ = 0; 1018 bytes_scanned_ = 0;
926 } 1019 }
927 1020
928 1021
929 int64_t IncrementalMarking::SpaceLeftInOldSpace() { 1022 int64_t IncrementalMarking::SpaceLeftInOldSpace() {
930 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects(); 1023 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects();
931 } 1024 }
932 1025
933 } } // namespace v8::internal 1026 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « no previous file | src/mark-compact.h » ('j') | src/mark-compact.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698