OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 725 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
736 IncrementalMarking::set_should_hurry(false); | 736 IncrementalMarking::set_should_hurry(false); |
737 ResetStepCounters(); | 737 ResetStepCounters(); |
738 PatchIncrementalMarkingRecordWriteStubs(heap_, | 738 PatchIncrementalMarkingRecordWriteStubs(heap_, |
739 RecordWriteStub::STORE_BUFFER_ONLY); | 739 RecordWriteStub::STORE_BUFFER_ONLY); |
740 DeactivateIncrementalWriteBarrier(); | 740 DeactivateIncrementalWriteBarrier(); |
741 ASSERT(marking_deque_.IsEmpty()); | 741 ASSERT(marking_deque_.IsEmpty()); |
742 heap_->isolate()->stack_guard()->Continue(GC_REQUEST); | 742 heap_->isolate()->stack_guard()->Continue(GC_REQUEST); |
743 } | 743 } |
744 | 744 |
745 | 745 |
746 void IncrementalMarking::MarkingComplete() { | 746 void IncrementalMarking::MarkingComplete(Finalizer finalizer) { |
747 state_ = COMPLETE; | 747 state_ = COMPLETE; |
748 // We will set the stack guard to request a GC now. This will mean the rest | 748 // We will set the stack guard to request a GC now. This will mean the rest |
749 // of the GC gets performed as soon as possible (we can't do a GC here in a | 749 // of the GC gets performed as soon as possible (we can't do a GC here in a |
750 // record-write context). If a few things get allocated between now and then | 750 // record-write context). If a few things get allocated between now and then |
751 // that shouldn't make us do a scavenge and keep being incremental, so we set | 751 // that shouldn't make us do a scavenge and keep being incremental, so we set |
752 // the should-hurry flag to indicate that there can't be much work left to do. | 752 // the should-hurry flag to indicate that there can't be much work left to do. |
753 set_should_hurry(true); | 753 set_should_hurry(true); |
754 if (FLAG_trace_incremental_marking) { | 754 if (FLAG_trace_incremental_marking) { |
755 PrintF("[IncrementalMarking] Complete (normal).\n"); | 755 PrintF("[IncrementalMarking] Complete (normal).\n"); |
756 } | 756 } |
757 if (!heap_->idle_notification_will_schedule_next_gc()) { | 757 if (finalizer == GC_VIA_STACK_GUARD) { |
758 heap_->isolate()->stack_guard()->RequestGC(); | 758 heap_->isolate()->stack_guard()->RequestGC(); |
759 } | 759 } |
760 } | 760 } |
761 | 761 |
762 | 762 |
763 void IncrementalMarking::Step(intptr_t allocated_bytes) { | 763 void IncrementalMarking::Step(intptr_t allocated_bytes, Finalizer finalizer) { |
764 if (heap_->gc_state() != Heap::NOT_IN_GC || | 764 if (heap_->gc_state() != Heap::NOT_IN_GC || |
765 !FLAG_incremental_marking || | 765 !FLAG_incremental_marking || |
766 !FLAG_incremental_marking_steps || | 766 !FLAG_incremental_marking_steps || |
767 (state_ != SWEEPING && state_ != MARKING)) { | 767 (state_ != SWEEPING && state_ != MARKING)) { |
768 return; | 768 return; |
769 } | 769 } |
770 | 770 |
771 allocated_ += allocated_bytes; | 771 allocated_ += allocated_bytes; |
772 | 772 |
773 if (allocated_ < kAllocatedThreshold) return; | 773 if (allocated_ < kAllocatedThreshold) return; |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
826 } else { | 826 } else { |
827 obj->IterateBody(map->instance_type(), size, &marking_visitor); | 827 obj->IterateBody(map->instance_type(), size, &marking_visitor); |
828 } | 828 } |
829 | 829 |
830 MarkBit obj_mark_bit = Marking::MarkBitFrom(obj); | 830 MarkBit obj_mark_bit = Marking::MarkBitFrom(obj); |
831 SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) || | 831 SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) || |
832 (obj->IsFiller() && Marking::IsWhite(obj_mark_bit))); | 832 (obj->IsFiller() && Marking::IsWhite(obj_mark_bit))); |
833 Marking::MarkBlack(obj_mark_bit); | 833 Marking::MarkBlack(obj_mark_bit); |
834 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size); | 834 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size); |
835 } | 835 } |
836 if (marking_deque_.IsEmpty()) MarkingComplete(); | 836 if (marking_deque_.IsEmpty()) MarkingComplete(finalizer); |
837 } | 837 } |
838 | 838 |
839 allocated_ = 0; | 839 allocated_ = 0; |
840 | 840 |
841 steps_count_++; | 841 steps_count_++; |
842 steps_count_since_last_gc_++; | 842 steps_count_since_last_gc_++; |
843 | 843 |
844 bool speed_up = false; | 844 bool speed_up = false; |
845 | 845 |
846 if ((steps_count_ % kAllocationMarkingFactorSpeedupInterval) == 0) { | 846 if ((steps_count_ % kAllocationMarkingFactorSpeedupInterval) == 0) { |
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
928 allocation_marking_factor_ = kInitialAllocationMarkingFactor; | 928 allocation_marking_factor_ = kInitialAllocationMarkingFactor; |
929 bytes_scanned_ = 0; | 929 bytes_scanned_ = 0; |
930 } | 930 } |
931 | 931 |
932 | 932 |
933 int64_t IncrementalMarking::SpaceLeftInOldSpace() { | 933 int64_t IncrementalMarking::SpaceLeftInOldSpace() { |
934 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSize(); | 934 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSize(); |
935 } | 935 } |
936 | 936 |
937 } } // namespace v8::internal | 937 } } // namespace v8::internal |
OLD | NEW |