Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(495)

Side by Side Diff: src/incremental-marking.cc

Issue 10996018: Allow partial scanning of large arrays in order to avoid (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 8 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.cc ('k') | src/mark-compact.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after
169 169
170 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo); 170 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
171 171
172 table_.Register(kVisitJSFunction, &VisitJSFunction); 172 table_.Register(kVisitJSFunction, &VisitJSFunction);
173 173
174 table_.Register(kVisitJSRegExp, &VisitJSRegExp); 174 table_.Register(kVisitJSRegExp, &VisitJSRegExp);
175 } 175 }
176 176
177 static void VisitJSWeakMap(Map* map, HeapObject* object) { 177 static void VisitJSWeakMap(Map* map, HeapObject* object) {
178 Heap* heap = map->GetHeap(); 178 Heap* heap = map->GetHeap();
179 Object** start_slot =
180 HeapObject::RawField(object, JSWeakMap::kPropertiesOffset);
179 VisitPointers(heap, 181 VisitPointers(heap,
180 HeapObject::RawField(object, JSWeakMap::kPropertiesOffset), 182 start_slot,
183 start_slot,
181 HeapObject::RawField(object, JSWeakMap::kSize)); 184 HeapObject::RawField(object, JSWeakMap::kSize));
182 } 185 }
183 186
184 static void VisitSharedFunctionInfo(Map* map, HeapObject* object) { 187 static void VisitSharedFunctionInfo(Map* map, HeapObject* object) {
185 Heap* heap = map->GetHeap(); 188 Heap* heap = map->GetHeap();
186 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object); 189 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
187 if (shared->ic_age() != heap->global_ic_age()) { 190 if (shared->ic_age() != heap->global_ic_age()) {
188 shared->ResetForNewContext(heap->global_ic_age()); 191 shared->ResetForNewContext(heap->global_ic_age());
189 } 192 }
190 FixedBodyVisitor<IncrementalMarkingMarkingVisitor, 193 FixedBodyVisitor<IncrementalMarkingMarkingVisitor,
191 SharedFunctionInfo::BodyDescriptor, 194 SharedFunctionInfo::BodyDescriptor,
192 void>::Visit(map, object); 195 void>::Visit(map, object);
193 } 196 }
194 197
198 static const int kScanningChunk = 32 * 1024;
199
200 static int VisitHugeArray(FixedArray* array) {
201 Heap* heap = array->GetHeap();
202 MemoryChunk* chunk = MemoryChunk::FromAddress(array->address());
203 Object** start_slot = array->data_start();
204 int length = array->length();
205
206 if (chunk->owner()->identity() != LO_SPACE) {
207 VisitPointers(heap, start_slot, start_slot, start_slot + length);
208 return length;
209 }
210
211 int from =
212 chunk->IsPartiallyScanned() ? chunk->PartiallyScannedProgress() : 0;
213 int to = Min(from + kScanningChunk, length);
214
215 VisitPointers(heap, start_slot, start_slot + from, start_slot + to);
216
217 if (to == length) {
218 // If it went from black to grey while it was waiting for the next bit to
219 // be scanned then we have to start the scan again.
220 MarkBit mark_bit = Marking::MarkBitFrom(array);
221 if (!Marking::IsBlack(mark_bit)) {
222 ASSERT(Marking::IsGrey(mark_bit));
223 chunk->SetPartiallyScannedProgress(0);
224 } else {
225 chunk->SetCompletelyScanned();
226 }
227 } else {
228 chunk->SetPartiallyScannedProgress(to);
229 }
230 return to - from;
231 }
232
195 static inline void VisitJSFunction(Map* map, HeapObject* object) { 233 static inline void VisitJSFunction(Map* map, HeapObject* object) {
196 Heap* heap = map->GetHeap(); 234 Heap* heap = map->GetHeap();
197 // Iterate over all fields in the body but take care in dealing with 235 // Iterate over all fields in the body but take care in dealing with
198 // the code entry and skip weak fields. 236 // the code entry and skip weak fields.
237 Object** start_slot =
238 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
199 VisitPointers(heap, 239 VisitPointers(heap,
200 HeapObject::RawField(object, JSFunction::kPropertiesOffset), 240 start_slot,
241 start_slot,
201 HeapObject::RawField(object, JSFunction::kCodeEntryOffset)); 242 HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
202 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset); 243 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
203 VisitPointers(heap, 244 VisitPointers(heap,
245 start_slot,
204 HeapObject::RawField(object, 246 HeapObject::RawField(object,
205 JSFunction::kCodeEntryOffset + kPointerSize), 247 JSFunction::kCodeEntryOffset + kPointerSize),
206 HeapObject::RawField(object, 248 HeapObject::RawField(object,
207 JSFunction::kNonWeakFieldsEndOffset)); 249 JSFunction::kNonWeakFieldsEndOffset));
208 } 250 }
209 251
210 INLINE(static void VisitPointer(Heap* heap, Object** p)) { 252 INLINE(static void VisitPointer(Heap* heap, Object** p)) {
211 Object* obj = *p; 253 Object* obj = *p;
212 if (obj->NonFailureIsHeapObject()) { 254 if (obj->NonFailureIsHeapObject()) {
213 heap->mark_compact_collector()->RecordSlot(p, p, obj); 255 heap->mark_compact_collector()->RecordSlot(p, p, obj);
214 MarkObject(heap, obj); 256 MarkObject(heap, obj);
215 } 257 }
216 } 258 }
217 259
218 INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) { 260 INLINE(static void VisitPointers(Heap* heap,
261 Object** anchor,
262 Object** start,
263 Object** end)) {
219 for (Object** p = start; p < end; p++) { 264 for (Object** p = start; p < end; p++) {
220 Object* obj = *p; 265 Object* obj = *p;
221 if (obj->NonFailureIsHeapObject()) { 266 if (obj->NonFailureIsHeapObject()) {
222 heap->mark_compact_collector()->RecordSlot(start, p, obj); 267 heap->mark_compact_collector()->RecordSlot(anchor, p, obj);
223 MarkObject(heap, obj); 268 MarkObject(heap, obj);
224 } 269 }
225 } 270 }
226 } 271 }
227 272
228 // Marks the object grey and pushes it on the marking stack. 273 // Marks the object grey and pushes it on the marking stack.
229 INLINE(static void MarkObject(Heap* heap, Object* obj)) { 274 INLINE(static void MarkObject(Heap* heap, Object* obj)) {
230 HeapObject* heap_object = HeapObject::cast(obj); 275 HeapObject* heap_object = HeapObject::cast(obj);
231 MarkBit mark_bit = Marking::MarkBitFrom(heap_object); 276 MarkBit mark_bit = Marking::MarkBitFrom(heap_object);
232 if (mark_bit.data_only()) { 277 if (mark_bit.data_only()) {
(...skipping 381 matching lines...) Expand 10 before | Expand all | Expand 10 after
614 } 659 }
615 } else if (obj->map() != filler_map) { 660 } else if (obj->map() != filler_map) {
616 // Skip one word filler objects that appear on the 661 // Skip one word filler objects that appear on the
617 // stack when we perform in place array shift. 662 // stack when we perform in place array shift.
618 array[new_top] = obj; 663 array[new_top] = obj;
619 new_top = ((new_top + 1) & mask); 664 new_top = ((new_top + 1) & mask);
620 ASSERT(new_top != marking_deque_.bottom()); 665 ASSERT(new_top != marking_deque_.bottom());
621 #ifdef DEBUG 666 #ifdef DEBUG
622 MarkBit mark_bit = Marking::MarkBitFrom(obj); 667 MarkBit mark_bit = Marking::MarkBitFrom(obj);
623 ASSERT(Marking::IsGrey(mark_bit) || 668 ASSERT(Marking::IsGrey(mark_bit) ||
624 (obj->IsFiller() && Marking::IsWhite(mark_bit))); 669 (obj->IsFiller() && Marking::IsWhite(mark_bit)) ||
670 MemoryChunk::FromAddress(obj->address())->IsPartiallyScanned());
625 #endif 671 #endif
626 } 672 }
627 } 673 }
628 marking_deque_.set_top(new_top); 674 marking_deque_.set_top(new_top);
629 675
630 steps_took_since_last_gc_ = 0; 676 steps_took_since_last_gc_ = 0;
631 steps_count_since_last_gc_ = 0; 677 steps_count_since_last_gc_ = 0;
632 longest_step_ = 0.0; 678 longest_step_ = 0.0;
633 } 679 }
634 680
635 681
636 void IncrementalMarking::Hurry() { 682 void IncrementalMarking::Hurry() {
637 if (state() == MARKING) { 683 if (state() == MARKING) {
638 double start = 0.0; 684 double start = 0.0;
639 if (FLAG_trace_incremental_marking) { 685 if (FLAG_trace_incremental_marking) {
640 PrintF("[IncrementalMarking] Hurry\n"); 686 PrintF("[IncrementalMarking] Hurry\n");
641 start = OS::TimeCurrentMillis(); 687 start = OS::TimeCurrentMillis();
642 } 688 }
643 // TODO(gc) hurry can mark objects it encounters black as mutator 689 // TODO(gc) hurry can mark objects it encounters black as mutator
644 // was stopped. 690 // was stopped.
645 Map* filler_map = heap_->one_pointer_filler_map(); 691 Map* filler_map = heap_->one_pointer_filler_map();
646 Map* native_context_map = heap_->native_context_map(); 692 Map* native_context_map = heap_->native_context_map();
647 while (!marking_deque_.IsEmpty()) { 693 do {
648 HeapObject* obj = marking_deque_.Pop(); 694 while (!marking_deque_.IsEmpty()) {
695 HeapObject* obj = marking_deque_.Pop();
649 696
650 // Explicitly skip one word fillers. Incremental markbit patterns are 697 // Explicitly skip one word fillers. Incremental markbit patterns are
651 // correct only for objects that occupy at least two words. 698 // correct only for objects that occupy at least two words.
652 Map* map = obj->map(); 699 Map* map = obj->map();
653 if (map == filler_map) { 700 if (map == filler_map) {
654 continue; 701 continue;
655 } else if (map == native_context_map) { 702 } else if (map == native_context_map) {
656 // Native contexts have weak fields. 703 // Native contexts have weak fields.
657 IncrementalMarkingMarkingVisitor::VisitNativeContext(map, obj); 704 IncrementalMarkingMarkingVisitor::VisitNativeContext(map, obj);
658 } else { 705 ASSERT(!Marking::IsBlack(Marking::MarkBitFrom(obj)));
659 MarkBit map_mark_bit = Marking::MarkBitFrom(map); 706 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
660 if (Marking::IsWhite(map_mark_bit)) { 707 } else if (map->instance_type() == FIXED_ARRAY_TYPE &&
661 WhiteToGreyAndPush(map, map_mark_bit); 708 FixedArray::cast(obj)->length() >
709 IncrementalMarkingMarkingVisitor::kScanningChunk) {
710 MarkBit map_mark_bit = Marking::MarkBitFrom(map);
711 if (Marking::IsWhite(map_mark_bit)) {
712 WhiteToGreyAndPush(map, map_mark_bit);
713 }
714 MarkBit mark_bit = Marking::MarkBitFrom(obj);
715 if (!Marking::IsBlack(mark_bit)) {
716 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
717 } else {
718 ASSERT(
719 MemoryChunk::FromAddress(obj->address())->IsPartiallyScanned());
720 }
721 IncrementalMarkingMarkingVisitor::VisitHugeArray(
722 FixedArray::cast(obj));
723 } else {
724 MarkBit map_mark_bit = Marking::MarkBitFrom(map);
725 if (Marking::IsWhite(map_mark_bit)) {
726 WhiteToGreyAndPush(map, map_mark_bit);
727 }
728 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
729 ASSERT(!Marking::IsBlack(Marking::MarkBitFrom(obj)));
730 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
662 } 731 }
663 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); 732
733 MarkBit mark_bit = Marking::MarkBitFrom(obj);
734 Marking::MarkBlack(mark_bit);
664 } 735 }
665 736 state_ = COMPLETE;
666 MarkBit mark_bit = Marking::MarkBitFrom(obj); 737 if (FLAG_trace_incremental_marking) {
667 ASSERT(!Marking::IsBlack(mark_bit)); 738 double end = OS::TimeCurrentMillis();
668 Marking::MarkBlack(mark_bit); 739 PrintF("[IncrementalMarking] Complete (hurry), spent %d ms.\n",
669 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size()); 740 static_cast<int>(end - start));
670 } 741 }
671 state_ = COMPLETE; 742 MarkCompactCollector::ProcessLargePostponedArrays(heap_, &marking_deque_);
672 if (FLAG_trace_incremental_marking) { 743 } while (!marking_deque_.IsEmpty());
673 double end = OS::TimeCurrentMillis();
674 PrintF("[IncrementalMarking] Complete (hurry), spent %d ms.\n",
675 static_cast<int>(end - start));
676 }
677 } 744 }
678 745
679 if (FLAG_cleanup_code_caches_at_gc) { 746 if (FLAG_cleanup_code_caches_at_gc) {
680 PolymorphicCodeCache* poly_cache = heap_->polymorphic_code_cache(); 747 PolymorphicCodeCache* poly_cache = heap_->polymorphic_code_cache();
681 Marking::GreyToBlack(Marking::MarkBitFrom(poly_cache)); 748 Marking::GreyToBlack(Marking::MarkBitFrom(poly_cache));
682 MemoryChunk::IncrementLiveBytesFromGC(poly_cache->address(), 749 MemoryChunk::IncrementLiveBytesFromGC(poly_cache->address(),
683 PolymorphicCodeCache::kSize); 750 PolymorphicCodeCache::kSize);
684 } 751 }
685 752
686 Object* context = heap_->native_contexts_list(); 753 Object* context = heap_->native_contexts_list();
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
787 } 854 }
788 855
789 if (state_ == SWEEPING) { 856 if (state_ == SWEEPING) {
790 if (heap_->AdvanceSweepers(static_cast<int>(bytes_to_process))) { 857 if (heap_->AdvanceSweepers(static_cast<int>(bytes_to_process))) {
791 bytes_scanned_ = 0; 858 bytes_scanned_ = 0;
792 StartMarking(PREVENT_COMPACTION); 859 StartMarking(PREVENT_COMPACTION);
793 } 860 }
794 } else if (state_ == MARKING) { 861 } else if (state_ == MARKING) {
795 Map* filler_map = heap_->one_pointer_filler_map(); 862 Map* filler_map = heap_->one_pointer_filler_map();
796 Map* native_context_map = heap_->native_context_map(); 863 Map* native_context_map = heap_->native_context_map();
797 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) { 864 while (true) {
798 HeapObject* obj = marking_deque_.Pop(); 865 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) {
866 HeapObject* obj = marking_deque_.Pop();
799 867
800 // Explicitly skip one word fillers. Incremental markbit patterns are 868 // Explicitly skip one word fillers. Incremental markbit patterns are
801 // correct only for objects that occupy at least two words. 869 // correct only for objects that occupy at least two words.
802 Map* map = obj->map(); 870 Map* map = obj->map();
803 if (map == filler_map) continue; 871 if (map == filler_map) continue;
804 872
805 int size = obj->SizeFromMap(map); 873 int size = obj->SizeFromMap(map);
806 bytes_to_process -= size; 874 MarkBit map_mark_bit = Marking::MarkBitFrom(map);
807 MarkBit map_mark_bit = Marking::MarkBitFrom(map); 875 if (Marking::IsWhite(map_mark_bit)) {
808 if (Marking::IsWhite(map_mark_bit)) { 876 WhiteToGreyAndPush(map, map_mark_bit);
809 WhiteToGreyAndPush(map, map_mark_bit); 877 }
878
879 // TODO(gc) switch to static visitor instead of normal visitor.
880 if (map == native_context_map) {
881 // Native contexts have weak fields.
882 Context* ctx = Context::cast(obj);
883
884 // We will mark cache black with a separate pass
885 // when we finish marking.
886 MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache());
887
888 IncrementalMarkingMarkingVisitor::VisitNativeContext(map, ctx);
889 bytes_to_process -= size;
890 SLOW_ASSERT(Marking::IsGrey(Marking::MarkBitFrom(obj)));
891 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
892 } else if (map->instance_type() == FIXED_ARRAY_TYPE &&
893 FixedArray::cast(obj)->length() >
894 IncrementalMarkingMarkingVisitor::kScanningChunk) {
895 SLOW_ASSERT(
896 Marking::IsGrey(Marking::MarkBitFrom(obj)) ||
897 MemoryChunk::FromAddress(obj->address())->IsPartiallyScanned());
898 bytes_to_process -=
899 IncrementalMarkingMarkingVisitor::VisitHugeArray(
900 FixedArray::cast(obj));
901 MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
902 if (!Marking::IsBlack(obj_mark_bit)) {
903 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
904 }
905 } else {
906 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
907 bytes_to_process -= size;
908 SLOW_ASSERT(
909 Marking::IsGrey(Marking::MarkBitFrom(obj)) ||
910 (obj->IsFiller() && Marking::IsWhite(Marking::MarkBitFrom(obj))));
911 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
912 }
913
914 MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
915 Marking::MarkBlack(obj_mark_bit);
810 } 916 }
811 917 if (marking_deque_.IsEmpty()) {
812 // TODO(gc) switch to static visitor instead of normal visitor. 918 MarkCompactCollector::ProcessLargePostponedArrays(heap_,
813 if (map == native_context_map) { 919 &marking_deque_);
814 // Native contexts have weak fields. 920 if (marking_deque_.IsEmpty()) {
815 Context* ctx = Context::cast(obj); 921 MarkingComplete(action);
816 922 break;
817 // We will mark cache black with a separate pass 923 }
818 // when we finish marking. 924 } else {
819 MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache()); 925 ASSERT(bytes_to_process <= 0);
820 926 break;
821 IncrementalMarkingMarkingVisitor::VisitNativeContext(map, ctx); 927 }
822 } else {
823 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
824 }
825
826 MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
827 SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) ||
828 (obj->IsFiller() && Marking::IsWhite(obj_mark_bit)));
829 Marking::MarkBlack(obj_mark_bit);
830 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
831 } 928 }
832 if (marking_deque_.IsEmpty()) MarkingComplete(action);
833 } 929 }
834 930
835 allocated_ = 0; 931 allocated_ = 0;
836 932
837 steps_count_++; 933 steps_count_++;
838 steps_count_since_last_gc_++; 934 steps_count_since_last_gc_++;
839 935
840 bool speed_up = false; 936 bool speed_up = false;
841 937
842 if ((steps_count_ % kAllocationMarkingFactorSpeedupInterval) == 0) { 938 if ((steps_count_ % kAllocationMarkingFactorSpeedupInterval) == 0) {
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
924 allocation_marking_factor_ = kInitialAllocationMarkingFactor; 1020 allocation_marking_factor_ = kInitialAllocationMarkingFactor;
925 bytes_scanned_ = 0; 1021 bytes_scanned_ = 0;
926 } 1022 }
927 1023
928 1024
929 int64_t IncrementalMarking::SpaceLeftInOldSpace() { 1025 int64_t IncrementalMarking::SpaceLeftInOldSpace() {
930 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects(); 1026 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects();
931 } 1027 }
932 1028
933 } } // namespace v8::internal 1029 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.cc ('k') | src/mark-compact.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698