Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(6)

Side by Side Diff: src/incremental-marking.cc

Issue 11029023: Revert "Allow partial scanning of large arrays in order to avoid" (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 8 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.cc ('k') | src/mark-compact.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after
183 183
184 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo); 184 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
185 185
186 table_.Register(kVisitJSFunction, &VisitJSFunction); 186 table_.Register(kVisitJSFunction, &VisitJSFunction);
187 187
188 table_.Register(kVisitJSRegExp, &VisitJSRegExp); 188 table_.Register(kVisitJSRegExp, &VisitJSRegExp);
189 } 189 }
190 190
191 static void VisitJSWeakMap(Map* map, HeapObject* object) { 191 static void VisitJSWeakMap(Map* map, HeapObject* object) {
192 Heap* heap = map->GetHeap(); 192 Heap* heap = map->GetHeap();
193 Object** start_slot =
194 HeapObject::RawField(object, JSWeakMap::kPropertiesOffset);
195 VisitPointers(heap, 193 VisitPointers(heap,
196 start_slot, 194 HeapObject::RawField(object, JSWeakMap::kPropertiesOffset),
197 start_slot,
198 HeapObject::RawField(object, JSWeakMap::kSize)); 195 HeapObject::RawField(object, JSWeakMap::kSize));
199 } 196 }
200 197
201 static void VisitSharedFunctionInfo(Map* map, HeapObject* object) { 198 static void VisitSharedFunctionInfo(Map* map, HeapObject* object) {
202 Heap* heap = map->GetHeap(); 199 Heap* heap = map->GetHeap();
203 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object); 200 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
204 if (shared->ic_age() != heap->global_ic_age()) { 201 if (shared->ic_age() != heap->global_ic_age()) {
205 shared->ResetForNewContext(heap->global_ic_age()); 202 shared->ResetForNewContext(heap->global_ic_age());
206 } 203 }
207 FixedBodyVisitor<IncrementalMarkingMarkingVisitor, 204 FixedBodyVisitor<IncrementalMarkingMarkingVisitor,
208 SharedFunctionInfo::BodyDescriptor, 205 SharedFunctionInfo::BodyDescriptor,
209 void>::Visit(map, object); 206 void>::Visit(map, object);
210 } 207 }
211 208
212 static const int kScanningChunk = 32 * 1024;
213
214 static int VisitHugeArray(FixedArray* array) {
215 Heap* heap = array->GetHeap();
216 MemoryChunk* chunk = MemoryChunk::FromAddress(array->address());
217 Object** start_slot = array->data_start();
218 int length = array->length();
219
220 if (chunk->owner()->identity() != LO_SPACE) {
221 VisitPointers(heap, start_slot, start_slot, start_slot + length);
222 return length;
223 }
224
225 int from =
226 chunk->IsPartiallyScanned() ? chunk->PartiallyScannedProgress() : 0;
227 int to = Min(from + kScanningChunk, length);
228
229 VisitPointers(heap, start_slot, start_slot + from, start_slot + to);
230
231 if (to == length) {
232 // If it went from black to grey while it was waiting for the next bit to
233 // be scanned then we have to start the scan again.
234 MarkBit mark_bit = Marking::MarkBitFrom(array);
235 if (!Marking::IsBlack(mark_bit)) {
236 ASSERT(Marking::IsGrey(mark_bit));
237 chunk->SetPartiallyScannedProgress(0);
238 } else {
239 chunk->SetCompletelyScanned();
240 }
241 } else {
242 chunk->SetPartiallyScannedProgress(to);
243 }
244 return to - from;
245 }
246
247 static inline void VisitJSFunction(Map* map, HeapObject* object) { 209 static inline void VisitJSFunction(Map* map, HeapObject* object) {
248 Heap* heap = map->GetHeap(); 210 Heap* heap = map->GetHeap();
249 // Iterate over all fields in the body but take care in dealing with 211 // Iterate over all fields in the body but take care in dealing with
250 // the code entry and skip weak fields. 212 // the code entry and skip weak fields.
251 Object** start_slot =
252 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
253 VisitPointers(heap, 213 VisitPointers(heap,
254 start_slot, 214 HeapObject::RawField(object, JSFunction::kPropertiesOffset),
255 start_slot,
256 HeapObject::RawField(object, JSFunction::kCodeEntryOffset)); 215 HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
257 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset); 216 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
258 VisitPointers(heap, 217 VisitPointers(heap,
259 start_slot,
260 HeapObject::RawField(object, 218 HeapObject::RawField(object,
261 JSFunction::kCodeEntryOffset + kPointerSize), 219 JSFunction::kCodeEntryOffset + kPointerSize),
262 HeapObject::RawField(object, 220 HeapObject::RawField(object,
263 JSFunction::kNonWeakFieldsEndOffset)); 221 JSFunction::kNonWeakFieldsEndOffset));
264 } 222 }
265 223
266 INLINE(static void VisitPointer(Heap* heap, Object** p)) { 224 INLINE(static void VisitPointer(Heap* heap, Object** p)) {
267 Object* obj = *p; 225 Object* obj = *p;
268 if (obj->NonFailureIsHeapObject()) { 226 if (obj->NonFailureIsHeapObject()) {
269 heap->mark_compact_collector()->RecordSlot(p, p, obj); 227 heap->mark_compact_collector()->RecordSlot(p, p, obj);
270 MarkObject(heap, obj); 228 MarkObject(heap, obj);
271 } 229 }
272 } 230 }
273 231
274 INLINE(static void VisitPointers(Heap* heap, 232 INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) {
275 Object** anchor,
276 Object** start,
277 Object** end)) {
278 for (Object** p = start; p < end; p++) { 233 for (Object** p = start; p < end; p++) {
279 Object* obj = *p; 234 Object* obj = *p;
280 if (obj->NonFailureIsHeapObject()) { 235 if (obj->NonFailureIsHeapObject()) {
281 heap->mark_compact_collector()->RecordSlot(anchor, p, obj); 236 heap->mark_compact_collector()->RecordSlot(start, p, obj);
282 MarkObject(heap, obj); 237 MarkObject(heap, obj);
283 } 238 }
284 } 239 }
285 } 240 }
286 241
287 // Marks the object grey and pushes it on the marking stack. 242 // Marks the object grey and pushes it on the marking stack.
288 INLINE(static void MarkObject(Heap* heap, Object* obj)) { 243 INLINE(static void MarkObject(Heap* heap, Object* obj)) {
289 HeapObject* heap_object = HeapObject::cast(obj); 244 HeapObject* heap_object = HeapObject::cast(obj);
290 MarkBit mark_bit = Marking::MarkBitFrom(heap_object); 245 MarkBit mark_bit = Marking::MarkBitFrom(heap_object);
291 if (mark_bit.data_only()) { 246 if (mark_bit.data_only()) {
(...skipping 381 matching lines...) Expand 10 before | Expand all | Expand 10 after
673 } 628 }
674 } else if (obj->map() != filler_map) { 629 } else if (obj->map() != filler_map) {
675 // Skip one word filler objects that appear on the 630 // Skip one word filler objects that appear on the
676 // stack when we perform in place array shift. 631 // stack when we perform in place array shift.
677 array[new_top] = obj; 632 array[new_top] = obj;
678 new_top = ((new_top + 1) & mask); 633 new_top = ((new_top + 1) & mask);
679 ASSERT(new_top != marking_deque_.bottom()); 634 ASSERT(new_top != marking_deque_.bottom());
680 #ifdef DEBUG 635 #ifdef DEBUG
681 MarkBit mark_bit = Marking::MarkBitFrom(obj); 636 MarkBit mark_bit = Marking::MarkBitFrom(obj);
682 ASSERT(Marking::IsGrey(mark_bit) || 637 ASSERT(Marking::IsGrey(mark_bit) ||
683 (obj->IsFiller() && Marking::IsWhite(mark_bit)) || 638 (obj->IsFiller() && Marking::IsWhite(mark_bit)));
684 MemoryChunk::FromAddress(obj->address())->IsPartiallyScanned());
685 #endif 639 #endif
686 } 640 }
687 } 641 }
688 marking_deque_.set_top(new_top); 642 marking_deque_.set_top(new_top);
689 643
690 steps_took_since_last_gc_ = 0; 644 steps_took_since_last_gc_ = 0;
691 steps_count_since_last_gc_ = 0; 645 steps_count_since_last_gc_ = 0;
692 longest_step_ = 0.0; 646 longest_step_ = 0.0;
693 } 647 }
694 648
695 649
696 void IncrementalMarking::Hurry() { 650 void IncrementalMarking::Hurry() {
697 if (state() == MARKING) { 651 if (state() == MARKING) {
698 double start = 0.0; 652 double start = 0.0;
699 if (FLAG_trace_incremental_marking) { 653 if (FLAG_trace_incremental_marking) {
700 PrintF("[IncrementalMarking] Hurry\n"); 654 PrintF("[IncrementalMarking] Hurry\n");
701 start = OS::TimeCurrentMillis(); 655 start = OS::TimeCurrentMillis();
702 } 656 }
703 // TODO(gc) hurry can mark objects it encounters black as mutator 657 // TODO(gc) hurry can mark objects it encounters black as mutator
704 // was stopped. 658 // was stopped.
705 Map* filler_map = heap_->one_pointer_filler_map(); 659 Map* filler_map = heap_->one_pointer_filler_map();
706 Map* native_context_map = heap_->native_context_map(); 660 Map* native_context_map = heap_->native_context_map();
707 do { 661 while (!marking_deque_.IsEmpty()) {
708 while (!marking_deque_.IsEmpty()) { 662 HeapObject* obj = marking_deque_.Pop();
709 HeapObject* obj = marking_deque_.Pop();
710 663
711 // Explicitly skip one word fillers. Incremental markbit patterns are 664 // Explicitly skip one word fillers. Incremental markbit patterns are
712 // correct only for objects that occupy at least two words. 665 // correct only for objects that occupy at least two words.
713 Map* map = obj->map(); 666 Map* map = obj->map();
714 if (map == filler_map) { 667 if (map == filler_map) {
715 continue; 668 continue;
716 } else if (map == native_context_map) { 669 } else if (map == native_context_map) {
717 // Native contexts have weak fields. 670 // Native contexts have weak fields.
718 IncrementalMarkingMarkingVisitor::VisitNativeContext(map, obj); 671 IncrementalMarkingMarkingVisitor::VisitNativeContext(map, obj);
719 ASSERT(!Marking::IsBlack(Marking::MarkBitFrom(obj))); 672 } else {
720 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size()); 673 MarkBit map_mark_bit = Marking::MarkBitFrom(map);
721 } else if (map->instance_type() == FIXED_ARRAY_TYPE && 674 if (Marking::IsWhite(map_mark_bit)) {
722 FixedArray::cast(obj)->length() > 675 WhiteToGreyAndPush(map, map_mark_bit);
723 IncrementalMarkingMarkingVisitor::kScanningChunk) {
724 MarkBit map_mark_bit = Marking::MarkBitFrom(map);
725 if (Marking::IsWhite(map_mark_bit)) {
726 WhiteToGreyAndPush(map, map_mark_bit);
727 }
728 MarkBit mark_bit = Marking::MarkBitFrom(obj);
729 if (!Marking::IsBlack(mark_bit)) {
730 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
731 } else {
732 ASSERT(
733 MemoryChunk::FromAddress(obj->address())->IsPartiallyScanned());
734 }
735 IncrementalMarkingMarkingVisitor::VisitHugeArray(
736 FixedArray::cast(obj));
737 } else {
738 MarkBit map_mark_bit = Marking::MarkBitFrom(map);
739 if (Marking::IsWhite(map_mark_bit)) {
740 WhiteToGreyAndPush(map, map_mark_bit);
741 }
742 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
743 ASSERT(!Marking::IsBlack(Marking::MarkBitFrom(obj)));
744 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
745 } 676 }
677 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
678 }
746 679
747 MarkBit mark_bit = Marking::MarkBitFrom(obj); 680 MarkBit mark_bit = Marking::MarkBitFrom(obj);
748 Marking::MarkBlack(mark_bit); 681 ASSERT(!Marking::IsBlack(mark_bit));
749 } 682 Marking::MarkBlack(mark_bit);
750 state_ = COMPLETE; 683 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
751 if (FLAG_trace_incremental_marking) { 684 }
752 double end = OS::TimeCurrentMillis(); 685 state_ = COMPLETE;
753 PrintF("[IncrementalMarking] Complete (hurry), spent %d ms.\n", 686 if (FLAG_trace_incremental_marking) {
754 static_cast<int>(end - start)); 687 double end = OS::TimeCurrentMillis();
755 } 688 PrintF("[IncrementalMarking] Complete (hurry), spent %d ms.\n",
756 MarkCompactCollector::ProcessLargePostponedArrays(heap_, &marking_deque_); 689 static_cast<int>(end - start));
757 } while (!marking_deque_.IsEmpty()); 690 }
758 } 691 }
759 692
760 if (FLAG_cleanup_code_caches_at_gc) { 693 if (FLAG_cleanup_code_caches_at_gc) {
761 PolymorphicCodeCache* poly_cache = heap_->polymorphic_code_cache(); 694 PolymorphicCodeCache* poly_cache = heap_->polymorphic_code_cache();
762 Marking::GreyToBlack(Marking::MarkBitFrom(poly_cache)); 695 Marking::GreyToBlack(Marking::MarkBitFrom(poly_cache));
763 MemoryChunk::IncrementLiveBytesFromGC(poly_cache->address(), 696 MemoryChunk::IncrementLiveBytesFromGC(poly_cache->address(),
764 PolymorphicCodeCache::kSize); 697 PolymorphicCodeCache::kSize);
765 } 698 }
766 699
767 Object* context = heap_->native_contexts_list(); 700 Object* context = heap_->native_contexts_list();
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after
882 } 815 }
883 816
884 if (state_ == SWEEPING) { 817 if (state_ == SWEEPING) {
885 if (heap_->AdvanceSweepers(static_cast<int>(bytes_to_process))) { 818 if (heap_->AdvanceSweepers(static_cast<int>(bytes_to_process))) {
886 bytes_scanned_ = 0; 819 bytes_scanned_ = 0;
887 StartMarking(PREVENT_COMPACTION); 820 StartMarking(PREVENT_COMPACTION);
888 } 821 }
889 } else if (state_ == MARKING) { 822 } else if (state_ == MARKING) {
890 Map* filler_map = heap_->one_pointer_filler_map(); 823 Map* filler_map = heap_->one_pointer_filler_map();
891 Map* native_context_map = heap_->native_context_map(); 824 Map* native_context_map = heap_->native_context_map();
892 while (true) { 825 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) {
893 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) { 826 HeapObject* obj = marking_deque_.Pop();
894 HeapObject* obj = marking_deque_.Pop();
895 827
896 // Explicitly skip one word fillers. Incremental markbit patterns are 828 // Explicitly skip one word fillers. Incremental markbit patterns are
897 // correct only for objects that occupy at least two words. 829 // correct only for objects that occupy at least two words.
898 Map* map = obj->map(); 830 Map* map = obj->map();
899 if (map == filler_map) continue; 831 if (map == filler_map) continue;
900 832
901 int size = obj->SizeFromMap(map); 833 int size = obj->SizeFromMap(map);
902 MarkBit map_mark_bit = Marking::MarkBitFrom(map); 834 bytes_to_process -= size;
903 if (Marking::IsWhite(map_mark_bit)) { 835 MarkBit map_mark_bit = Marking::MarkBitFrom(map);
904 WhiteToGreyAndPush(map, map_mark_bit); 836 if (Marking::IsWhite(map_mark_bit)) {
905 } 837 WhiteToGreyAndPush(map, map_mark_bit);
838 }
906 839
907 // TODO(gc) switch to static visitor instead of normal visitor. 840 // TODO(gc) switch to static visitor instead of normal visitor.
908 if (map == native_context_map) { 841 if (map == native_context_map) {
909 // Native contexts have weak fields. 842 // Native contexts have weak fields.
910 Context* ctx = Context::cast(obj); 843 Context* ctx = Context::cast(obj);
911 844
912 // We will mark cache black with a separate pass 845 // We will mark cache black with a separate pass
913 // when we finish marking. 846 // when we finish marking.
914 MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache()); 847 MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache());
915 848
916 IncrementalMarkingMarkingVisitor::VisitNativeContext(map, ctx); 849 IncrementalMarkingMarkingVisitor::VisitNativeContext(map, ctx);
917 bytes_to_process -= size; 850 } else {
918 SLOW_ASSERT(Marking::IsGrey(Marking::MarkBitFrom(obj))); 851 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
919 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size); 852 }
920 } else if (map->instance_type() == FIXED_ARRAY_TYPE &&
921 FixedArray::cast(obj)->length() >
922 IncrementalMarkingMarkingVisitor::kScanningChunk) {
923 SLOW_ASSERT(
924 Marking::IsGrey(Marking::MarkBitFrom(obj)) ||
925 MemoryChunk::FromAddress(obj->address())->IsPartiallyScanned());
926 bytes_to_process -=
927 IncrementalMarkingMarkingVisitor::VisitHugeArray(
928 FixedArray::cast(obj));
929 MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
930 if (!Marking::IsBlack(obj_mark_bit)) {
931 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
932 }
933 } else {
934 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
935 bytes_to_process -= size;
936 SLOW_ASSERT(
937 Marking::IsGrey(Marking::MarkBitFrom(obj)) ||
938 (obj->IsFiller() && Marking::IsWhite(Marking::MarkBitFrom(obj))));
939 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
940 }
941 853
942 MarkBit obj_mark_bit = Marking::MarkBitFrom(obj); 854 MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
943 Marking::MarkBlack(obj_mark_bit); 855 SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) ||
944 } 856 (obj->IsFiller() && Marking::IsWhite(obj_mark_bit)));
945 if (marking_deque_.IsEmpty()) { 857 Marking::MarkBlack(obj_mark_bit);
946 MarkCompactCollector::ProcessLargePostponedArrays(heap_, 858 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
947 &marking_deque_);
948 if (marking_deque_.IsEmpty()) {
949 MarkingComplete(action);
950 break;
951 }
952 } else {
953 ASSERT(bytes_to_process <= 0);
954 break;
955 }
956 } 859 }
860 if (marking_deque_.IsEmpty()) MarkingComplete(action);
957 } 861 }
958 862
959 steps_count_++; 863 steps_count_++;
960 steps_count_since_last_gc_++; 864 steps_count_since_last_gc_++;
961 865
962 bool speed_up = false; 866 bool speed_up = false;
963 867
964 if ((steps_count_ % kMarkingSpeedAccellerationInterval) == 0) { 868 if ((steps_count_ % kMarkingSpeedAccellerationInterval) == 0) {
965 if (FLAG_trace_gc) { 869 if (FLAG_trace_gc) {
966 PrintPID("Speed up marking after %d steps\n", 870 PrintPID("Speed up marking after %d steps\n",
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
1047 bytes_scanned_ = 0; 951 bytes_scanned_ = 0;
1048 write_barriers_invoked_since_last_step_ = 0; 952 write_barriers_invoked_since_last_step_ = 0;
1049 } 953 }
1050 954
1051 955
1052 int64_t IncrementalMarking::SpaceLeftInOldSpace() { 956 int64_t IncrementalMarking::SpaceLeftInOldSpace() {
1053 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects(); 957 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects();
1054 } 958 }
1055 959
1056 } } // namespace v8::internal 960 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.cc ('k') | src/mark-compact.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698