OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 13 matching lines...) Expand all Loading... | |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | 27 |
28 #include "v8.h" | 28 #include "v8.h" |
29 | 29 |
30 #include "incremental-marking.h" | 30 #include "incremental-marking.h" |
31 | 31 |
32 #include "code-stubs.h" | 32 #include "code-stubs.h" |
33 #include "compilation-cache.h" | 33 #include "compilation-cache.h" |
34 #include "objects-visiting.h" | |
35 #include "objects-visiting-inl.h" | |
34 #include "v8conversions.h" | 36 #include "v8conversions.h" |
35 | 37 |
36 namespace v8 { | 38 namespace v8 { |
37 namespace internal { | 39 namespace internal { |
38 | 40 |
39 | 41 |
40 IncrementalMarking::IncrementalMarking(Heap* heap) | 42 IncrementalMarking::IncrementalMarking(Heap* heap) |
41 : heap_(heap), | 43 : heap_(heap), |
42 state_(STOPPED), | 44 state_(STOPPED), |
43 marking_deque_memory_(NULL), | 45 marking_deque_memory_(NULL), |
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
153 MarkBit obj_bit = Marking::MarkBitFrom(obj); | 155 MarkBit obj_bit = Marking::MarkBitFrom(obj); |
154 if (Marking::IsBlack(obj_bit)) { | 156 if (Marking::IsBlack(obj_bit)) { |
155 // Object is not going to be rescanned. We need to record the slot. | 157 // Object is not going to be rescanned. We need to record the slot. |
156 heap_->mark_compact_collector()->RecordRelocSlot(rinfo, | 158 heap_->mark_compact_collector()->RecordRelocSlot(rinfo, |
157 Code::cast(value)); | 159 Code::cast(value)); |
158 } | 160 } |
159 } | 161 } |
160 } | 162 } |
161 | 163 |
162 | 164 |
163 class IncrementalMarkingMarkingVisitor : public ObjectVisitor { | 165 class IncrementalMarkingMarkingVisitor |
166 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> { | |
164 public: | 167 public: |
165 IncrementalMarkingMarkingVisitor(Heap* heap, | 168 static void Initialize() { |
166 IncrementalMarking* incremental_marking) | 169 StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize(); |
167 : heap_(heap), | 170 |
168 incremental_marking_(incremental_marking) { | 171 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo); |
172 | |
173 table_.Register(kVisitJSFunction, &VisitJSFunction); | |
174 | |
175 table_.Register(kVisitJSRegExp, &VisitJSRegExp); | |
169 } | 176 } |
170 | 177 |
171 void VisitEmbeddedPointer(RelocInfo* rinfo) { | 178 static inline void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo) { |
172 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); | 179 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); |
173 Object* target = rinfo->target_object(); | 180 Object* target = rinfo->target_object(); |
174 if (target->NonFailureIsHeapObject()) { | 181 if (target->NonFailureIsHeapObject()) { |
175 heap_->mark_compact_collector()->RecordRelocSlot(rinfo, target); | 182 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target); |
176 MarkObject(target); | 183 MarkObject(heap, target); |
177 } | 184 } |
178 } | 185 } |
179 | 186 |
180 void VisitCodeTarget(RelocInfo* rinfo) { | 187 static inline void VisitCodeTarget(Heap* heap, RelocInfo* rinfo) { |
181 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); | 188 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); |
182 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); | 189 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); |
183 if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() | 190 if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() |
184 && (target->ic_age() != heap_->global_ic_age())) { | 191 && (target->ic_age() != heap->global_ic_age())) { |
185 IC::Clear(rinfo->pc()); | 192 IC::Clear(rinfo->pc()); |
186 target = Code::GetCodeFromTargetAddress(rinfo->target_address()); | 193 target = Code::GetCodeFromTargetAddress(rinfo->target_address()); |
187 } | 194 } |
188 heap_->mark_compact_collector()->RecordRelocSlot(rinfo, Code::cast(target)); | 195 heap->mark_compact_collector()->RecordRelocSlot(rinfo, Code::cast(target)); |
189 MarkObject(target); | 196 MarkObject(heap, target); |
190 } | 197 } |
191 | 198 |
192 void VisitDebugTarget(RelocInfo* rinfo) { | 199 static void VisitCode(Map* map, HeapObject* object) { |
193 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && | 200 Heap* heap = map->GetHeap(); |
194 rinfo->IsPatchedReturnSequence()) || | 201 Code* code = reinterpret_cast<Code*>(object); |
195 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && | 202 code->CodeIterateBody<IncrementalMarkingMarkingVisitor>(heap); |
196 rinfo->IsPatchedDebugBreakSlotSequence())); | |
197 Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address()); | |
198 heap_->mark_compact_collector()->RecordRelocSlot(rinfo, Code::cast(target)); | |
199 MarkObject(target); | |
200 } | 203 } |
201 | 204 |
202 void VisitCodeEntry(Address entry_address) { | 205 static void VisitJSWeakMap(Map* map, HeapObject* object) { |
203 Object* target = Code::GetObjectFromEntryAddress(entry_address); | 206 Heap* heap = map->GetHeap(); |
204 heap_->mark_compact_collector()-> | 207 VisitPointers(heap, |
205 RecordCodeEntrySlot(entry_address, Code::cast(target)); | 208 HeapObject::RawField(object, JSWeakMap::kPropertiesOffset), |
206 MarkObject(target); | 209 HeapObject::RawField(object, JSWeakMap::kSize)); |
207 } | 210 } |
208 | 211 |
209 void VisitSharedFunctionInfo(SharedFunctionInfo* shared) { | 212 static void VisitSharedFunctionInfo(Map* map, HeapObject* object) { |
210 if (shared->ic_age() != heap_->global_ic_age()) { | 213 Heap* heap = map->GetHeap(); |
211 shared->ResetForNewContext(heap_->global_ic_age()); | 214 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object); |
215 if (shared->ic_age() != heap->global_ic_age()) { | |
216 shared->ResetForNewContext(heap->global_ic_age()); | |
217 } | |
218 FixedBodyVisitor<IncrementalMarkingMarkingVisitor, | |
219 SharedFunctionInfo::BodyDescriptor, | |
220 void>::Visit(map, object); | |
221 } | |
222 | |
223 static inline void VisitJSFunction(Map* map, HeapObject* object) { | |
224 Heap* heap = map->GetHeap(); | |
225 // Iterate over all fields in the body but take care in dealing with | |
226 // the code entry. | |
227 VisitPointers(heap, | |
228 HeapObject::RawField(object, JSFunction::kPropertiesOffset), | |
229 HeapObject::RawField(object, JSFunction::kCodeEntryOffset)); | |
230 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset); | |
231 VisitPointers(heap, | |
232 HeapObject::RawField(object, | |
233 JSFunction::kCodeEntryOffset + kPointerSize), | |
234 HeapObject::RawField(object, JSFunction::kSize)); | |
235 } | |
236 | |
237 INLINE(static void VisitPointer(Heap* heap, Object** p)) { | |
238 Object* obj = *p; | |
239 if (obj->NonFailureIsHeapObject()) { | |
240 heap->mark_compact_collector()->RecordSlot(p, p, obj); | |
241 MarkObject(heap, obj); | |
212 } | 242 } |
213 } | 243 } |
214 | 244 |
215 void VisitPointer(Object** p) { | 245 INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) { |
216 Object* obj = *p; | |
217 if (obj->NonFailureIsHeapObject()) { | |
218 heap_->mark_compact_collector()->RecordSlot(p, p, obj); | |
219 MarkObject(obj); | |
220 } | |
221 } | |
222 | |
223 void VisitPointers(Object** start, Object** end) { | |
224 for (Object** p = start; p < end; p++) { | 246 for (Object** p = start; p < end; p++) { |
225 Object* obj = *p; | 247 Object* obj = *p; |
226 if (obj->NonFailureIsHeapObject()) { | 248 if (obj->NonFailureIsHeapObject()) { |
227 heap_->mark_compact_collector()->RecordSlot(start, p, obj); | 249 heap->mark_compact_collector()->RecordSlot(start, p, obj); |
228 MarkObject(obj); | 250 MarkObject(heap, obj); |
229 } | 251 } |
230 } | 252 } |
231 } | 253 } |
232 | 254 |
233 private: | 255 INLINE(static void MarkObject(Heap* heap, Object* obj)) { |
234 // Mark object pointed to by p. | |
235 INLINE(void MarkObject(Object* obj)) { | |
236 HeapObject* heap_object = HeapObject::cast(obj); | 256 HeapObject* heap_object = HeapObject::cast(obj); |
237 MarkBit mark_bit = Marking::MarkBitFrom(heap_object); | 257 MarkBit mark_bit = Marking::MarkBitFrom(heap_object); |
238 if (mark_bit.data_only()) { | 258 if (mark_bit.data_only()) { |
239 if (incremental_marking_->MarkBlackOrKeepGrey(mark_bit)) { | 259 if (heap->incremental_marking()->MarkBlackOrKeepGrey(mark_bit)) { |
240 MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(), | 260 MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(), |
241 heap_object->Size()); | 261 heap_object->Size()); |
242 } | 262 } |
243 } else if (Marking::IsWhite(mark_bit)) { | 263 } else if (Marking::IsWhite(mark_bit)) { |
244 incremental_marking_->WhiteToGreyAndPush(heap_object, mark_bit); | 264 heap->incremental_marking()->WhiteToGreyAndPush(heap_object, mark_bit); |
245 } | 265 } |
246 } | 266 } |
247 | |
248 Heap* heap_; | |
249 IncrementalMarking* incremental_marking_; | |
250 }; | 267 }; |
251 | 268 |
252 | 269 |
253 class IncrementalMarkingRootMarkingVisitor : public ObjectVisitor { | 270 class IncrementalMarkingRootMarkingVisitor : public ObjectVisitor { |
254 public: | 271 public: |
255 IncrementalMarkingRootMarkingVisitor(Heap* heap, | 272 IncrementalMarkingRootMarkingVisitor(Heap* heap, |
256 IncrementalMarking* incremental_marking) | 273 IncrementalMarking* incremental_marking) |
257 : heap_(heap), | 274 : heap_(heap), |
258 incremental_marking_(incremental_marking) { | 275 incremental_marking_(incremental_marking) { |
259 } | 276 } |
(...skipping 23 matching lines...) Expand all Loading... | |
283 incremental_marking_->WhiteToGreyAndPush(heap_object, mark_bit); | 300 incremental_marking_->WhiteToGreyAndPush(heap_object, mark_bit); |
284 } | 301 } |
285 } | 302 } |
286 } | 303 } |
287 | 304 |
288 Heap* heap_; | 305 Heap* heap_; |
289 IncrementalMarking* incremental_marking_; | 306 IncrementalMarking* incremental_marking_; |
290 }; | 307 }; |
291 | 308 |
292 | 309 |
310 void IncrementalMarking::Initialize() { | |
311 IncrementalMarkingMarkingVisitor::Initialize(); | |
312 } | |
313 | |
314 | |
293 void IncrementalMarking::SetOldSpacePageFlags(MemoryChunk* chunk, | 315 void IncrementalMarking::SetOldSpacePageFlags(MemoryChunk* chunk, |
294 bool is_marking, | 316 bool is_marking, |
295 bool is_compacting) { | 317 bool is_compacting) { |
296 if (is_marking) { | 318 if (is_marking) { |
297 chunk->SetFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING); | 319 chunk->SetFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING); |
298 chunk->SetFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING); | 320 chunk->SetFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING); |
299 | 321 |
300 // It's difficult to filter out slots recorded for large objects. | 322 // It's difficult to filter out slots recorded for large objects. |
301 if (chunk->owner()->identity() == LO_SPACE && | 323 if (chunk->owner()->identity() == LO_SPACE && |
302 chunk->size() > static_cast<size_t>(Page::kPageSize) && | 324 chunk->size() > static_cast<size_t>(Page::kPageSize) && |
(...skipping 313 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
616 } | 638 } |
617 } | 639 } |
618 marking_deque_.set_top(new_top); | 640 marking_deque_.set_top(new_top); |
619 | 641 |
620 steps_took_since_last_gc_ = 0; | 642 steps_took_since_last_gc_ = 0; |
621 steps_count_since_last_gc_ = 0; | 643 steps_count_since_last_gc_ = 0; |
622 longest_step_ = 0.0; | 644 longest_step_ = 0.0; |
623 } | 645 } |
624 | 646 |
625 | 647 |
626 void IncrementalMarking::VisitGlobalContext(Context* ctx, ObjectVisitor* v) { | |
627 v->VisitPointers( | |
628 HeapObject::RawField( | |
629 ctx, Context::MarkCompactBodyDescriptor::kStartOffset), | |
630 HeapObject::RawField( | |
631 ctx, Context::MarkCompactBodyDescriptor::kEndOffset)); | |
632 | |
633 MarkCompactCollector* collector = heap_->mark_compact_collector(); | |
634 for (int idx = Context::FIRST_WEAK_SLOT; | |
635 idx < Context::GLOBAL_CONTEXT_SLOTS; | |
636 ++idx) { | |
637 Object** slot = | |
638 HeapObject::RawField(ctx, FixedArray::OffsetOfElementAt(idx)); | |
639 collector->RecordSlot(slot, slot, *slot); | |
640 } | |
641 } | |
642 | |
643 | |
644 void IncrementalMarking::Hurry() { | 648 void IncrementalMarking::Hurry() { |
645 if (state() == MARKING) { | 649 if (state() == MARKING) { |
646 double start = 0.0; | 650 double start = 0.0; |
647 if (FLAG_trace_incremental_marking) { | 651 if (FLAG_trace_incremental_marking) { |
648 PrintF("[IncrementalMarking] Hurry\n"); | 652 PrintF("[IncrementalMarking] Hurry\n"); |
649 start = OS::TimeCurrentMillis(); | 653 start = OS::TimeCurrentMillis(); |
650 } | 654 } |
651 // TODO(gc) hurry can mark objects it encounters black as mutator | 655 // TODO(gc) hurry can mark objects it encounters black as mutator |
652 // was stopped. | 656 // was stopped. |
653 Map* filler_map = heap_->one_pointer_filler_map(); | 657 Map* filler_map = heap_->one_pointer_filler_map(); |
654 Map* global_context_map = heap_->global_context_map(); | 658 Map* global_context_map = heap_->global_context_map(); |
655 IncrementalMarkingMarkingVisitor marking_visitor(heap_, this); | |
656 while (!marking_deque_.IsEmpty()) { | 659 while (!marking_deque_.IsEmpty()) { |
657 HeapObject* obj = marking_deque_.Pop(); | 660 HeapObject* obj = marking_deque_.Pop(); |
658 | 661 |
659 // Explicitly skip one word fillers. Incremental markbit patterns are | 662 // Explicitly skip one word fillers. Incremental markbit patterns are |
660 // correct only for objects that occupy at least two words. | 663 // correct only for objects that occupy at least two words. |
661 Map* map = obj->map(); | 664 Map* map = obj->map(); |
662 if (map == filler_map) { | 665 if (map == filler_map) { |
663 continue; | 666 continue; |
664 } else if (map == global_context_map) { | 667 } else if (map == global_context_map) { |
665 // Global contexts have weak fields. | 668 // Global contexts have weak fields. |
666 VisitGlobalContext(Context::cast(obj), &marking_visitor); | 669 IncrementalMarkingMarkingVisitor::VisitGlobalContext(map, obj); |
667 } else if (map->instance_type() == MAP_TYPE) { | 670 } else if (map->instance_type() == MAP_TYPE) { |
668 Map* map = Map::cast(obj); | 671 Map* map = Map::cast(obj); |
669 heap_->ClearCacheOnMap(map); | 672 heap_->ClearCacheOnMap(map); |
670 | 673 |
671 // When map collection is enabled we have to mark through map's | 674 // When map collection is enabled we have to mark through map's |
672 // transitions and back pointers in a special way to make these links | 675 // transitions and back pointers in a special way to make these links |
673 // weak. Only maps for subclasses of JSReceiver can have transitions. | 676 // weak. Only maps for subclasses of JSReceiver can have transitions. |
674 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); | 677 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); |
675 if (FLAG_collect_maps && | 678 if (FLAG_collect_maps && |
676 map->instance_type() >= FIRST_JS_RECEIVER_TYPE) { | 679 map->instance_type() >= FIRST_JS_RECEIVER_TYPE) { |
677 marker_.MarkMapContents(map); | 680 marker_.MarkMapContents(map); |
678 } else { | 681 } else { |
679 marking_visitor.VisitPointers( | 682 IncrementalMarkingMarkingVisitor::VisitPointers( |
683 heap_, | |
680 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset), | 684 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset), |
681 HeapObject::RawField(map, Map::kPointerFieldsEndOffset)); | 685 HeapObject::RawField(map, Map::kPointerFieldsEndOffset)); |
682 } | 686 } |
683 } else { | 687 } else { |
684 obj->Iterate(&marking_visitor); | 688 MarkBit map_mark_bit = Marking::MarkBitFrom(map); |
689 if (Marking::IsWhite(map_mark_bit)) { | |
690 WhiteToGreyAndPush(map, map_mark_bit); | |
691 } | |
692 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); | |
685 } | 693 } |
686 | 694 |
687 MarkBit mark_bit = Marking::MarkBitFrom(obj); | 695 MarkBit mark_bit = Marking::MarkBitFrom(obj); |
688 ASSERT(!Marking::IsBlack(mark_bit)); | 696 ASSERT(!Marking::IsBlack(mark_bit)); |
689 Marking::MarkBlack(mark_bit); | 697 Marking::MarkBlack(mark_bit); |
690 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size()); | 698 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size()); |
691 } | 699 } |
692 state_ = COMPLETE; | 700 state_ = COMPLETE; |
693 if (FLAG_trace_incremental_marking) { | 701 if (FLAG_trace_incremental_marking) { |
694 double end = OS::TimeCurrentMillis(); | 702 double end = OS::TimeCurrentMillis(); |
(...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
808 } | 816 } |
809 | 817 |
810 if (state_ == SWEEPING) { | 818 if (state_ == SWEEPING) { |
811 if (heap_->AdvanceSweepers(static_cast<int>(bytes_to_process))) { | 819 if (heap_->AdvanceSweepers(static_cast<int>(bytes_to_process))) { |
812 bytes_scanned_ = 0; | 820 bytes_scanned_ = 0; |
813 StartMarking(PREVENT_COMPACTION); | 821 StartMarking(PREVENT_COMPACTION); |
814 } | 822 } |
815 } else if (state_ == MARKING) { | 823 } else if (state_ == MARKING) { |
816 Map* filler_map = heap_->one_pointer_filler_map(); | 824 Map* filler_map = heap_->one_pointer_filler_map(); |
817 Map* global_context_map = heap_->global_context_map(); | 825 Map* global_context_map = heap_->global_context_map(); |
818 IncrementalMarkingMarkingVisitor marking_visitor(heap_, this); | |
819 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) { | 826 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) { |
820 HeapObject* obj = marking_deque_.Pop(); | 827 HeapObject* obj = marking_deque_.Pop(); |
821 | 828 |
822 // Explicitly skip one word fillers. Incremental markbit patterns are | 829 // Explicitly skip one word fillers. Incremental markbit patterns are |
823 // correct only for objects that occupy at least two words. | 830 // correct only for objects that occupy at least two words. |
824 Map* map = obj->map(); | 831 Map* map = obj->map(); |
825 if (map == filler_map) continue; | 832 if (map == filler_map) continue; |
826 | 833 |
827 int size = obj->SizeFromMap(map); | 834 int size = obj->SizeFromMap(map); |
828 bytes_to_process -= size; | 835 bytes_to_process -= size; |
829 MarkBit map_mark_bit = Marking::MarkBitFrom(map); | 836 MarkBit map_mark_bit = Marking::MarkBitFrom(map); |
830 if (Marking::IsWhite(map_mark_bit)) { | 837 if (Marking::IsWhite(map_mark_bit)) { |
831 WhiteToGreyAndPush(map, map_mark_bit); | 838 WhiteToGreyAndPush(map, map_mark_bit); |
832 } | 839 } |
833 | 840 |
834 // TODO(gc) switch to static visitor instead of normal visitor. | 841 // TODO(gc) switch to static visitor instead of normal visitor. |
835 if (map == global_context_map) { | 842 if (map == global_context_map) { |
836 // Global contexts have weak fields. | 843 // Global contexts have weak fields. |
837 Context* ctx = Context::cast(obj); | 844 Context* ctx = Context::cast(obj); |
838 | 845 |
839 // We will mark cache black with a separate pass | 846 // We will mark cache black with a separate pass |
840 // when we finish marking. | 847 // when we finish marking. |
841 MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache()); | 848 MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache()); |
842 | 849 |
843 VisitGlobalContext(ctx, &marking_visitor); | 850 IncrementalMarkingMarkingVisitor::VisitGlobalContext(map, ctx); |
844 } else if (map->instance_type() == MAP_TYPE) { | 851 } else if (map->instance_type() == MAP_TYPE) { |
845 Map* map = Map::cast(obj); | 852 Map* map = Map::cast(obj); |
846 heap_->ClearCacheOnMap(map); | 853 heap_->ClearCacheOnMap(map); |
847 | 854 |
848 // When map collection is enabled we have to mark through map's | 855 // When map collection is enabled we have to mark through map's |
849 // transitions and back pointers in a special way to make these links | 856 // transitions and back pointers in a special way to make these links |
850 // weak. Only maps for subclasses of JSReceiver can have transitions. | 857 // weak. Only maps for subclasses of JSReceiver can have transitions. |
851 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); | 858 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); |
852 if (FLAG_collect_maps && | 859 if (FLAG_collect_maps && |
853 map->instance_type() >= FIRST_JS_RECEIVER_TYPE) { | 860 map->instance_type() >= FIRST_JS_RECEIVER_TYPE) { |
854 marker_.MarkMapContents(map); | 861 marker_.MarkMapContents(map); |
855 } else { | 862 } else { |
856 marking_visitor.VisitPointers( | 863 IncrementalMarkingMarkingVisitor::VisitPointers( |
864 heap_, | |
857 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset), | 865 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset), |
858 HeapObject::RawField(map, Map::kPointerFieldsEndOffset)); | 866 HeapObject::RawField(map, Map::kPointerFieldsEndOffset)); |
859 } | 867 } |
860 } else if (map->instance_type() == JS_FUNCTION_TYPE) { | 868 } else if (map->instance_type() == JS_FUNCTION_TYPE) { |
861 marking_visitor.VisitPointers( | 869 IncrementalMarkingMarkingVisitor::VisitPointers( |
870 heap_, | |
862 HeapObject::RawField(obj, JSFunction::kPropertiesOffset), | 871 HeapObject::RawField(obj, JSFunction::kPropertiesOffset), |
863 HeapObject::RawField(obj, JSFunction::kCodeEntryOffset)); | 872 HeapObject::RawField(obj, JSFunction::kCodeEntryOffset)); |
864 | 873 |
865 marking_visitor.VisitCodeEntry( | 874 IncrementalMarkingMarkingVisitor::VisitCodeEntry( |
866 obj->address() + JSFunction::kCodeEntryOffset); | 875 heap_, obj->address() + JSFunction::kCodeEntryOffset); |
867 | 876 |
868 marking_visitor.VisitPointers( | 877 IncrementalMarkingMarkingVisitor::VisitPointers( |
878 heap_, | |
869 HeapObject::RawField(obj, | 879 HeapObject::RawField(obj, |
870 JSFunction::kCodeEntryOffset + kPointerSize), | 880 JSFunction::kCodeEntryOffset + kPointerSize), |
871 HeapObject::RawField(obj, | 881 HeapObject::RawField(obj, |
872 JSFunction::kNonWeakFieldsEndOffset)); | 882 JSFunction::kNonWeakFieldsEndOffset)); |
Toon Verwaest
2012/07/23 13:10:21
Is this the same as IncrementalMarkingMarkingVisit
Michael Starzinger
2012/07/25 08:21:33
Done. Yes, it's should be the same. Changed so tha
| |
873 } else { | 883 } else { |
874 obj->IterateBody(map->instance_type(), size, &marking_visitor); | 884 IncrementalMarkingMarkingVisitor::IterateBody(map, obj); |
875 } | 885 } |
876 | 886 |
877 MarkBit obj_mark_bit = Marking::MarkBitFrom(obj); | 887 MarkBit obj_mark_bit = Marking::MarkBitFrom(obj); |
878 SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) || | 888 SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) || |
879 (obj->IsFiller() && Marking::IsWhite(obj_mark_bit))); | 889 (obj->IsFiller() && Marking::IsWhite(obj_mark_bit))); |
880 Marking::MarkBlack(obj_mark_bit); | 890 Marking::MarkBlack(obj_mark_bit); |
881 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size); | 891 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size); |
882 } | 892 } |
883 if (marking_deque_.IsEmpty()) MarkingComplete(action); | 893 if (marking_deque_.IsEmpty()) MarkingComplete(action); |
884 } | 894 } |
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
975 allocation_marking_factor_ = kInitialAllocationMarkingFactor; | 985 allocation_marking_factor_ = kInitialAllocationMarkingFactor; |
976 bytes_scanned_ = 0; | 986 bytes_scanned_ = 0; |
977 } | 987 } |
978 | 988 |
979 | 989 |
980 int64_t IncrementalMarking::SpaceLeftInOldSpace() { | 990 int64_t IncrementalMarking::SpaceLeftInOldSpace() { |
981 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects(); | 991 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects(); |
982 } | 992 } |
983 | 993 |
984 } } // namespace v8::internal | 994 } } // namespace v8::internal |
OLD | NEW |