Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3)

Side by Side Diff: src/incremental-marking.cc

Issue 10816007: Refactor incremental marking to use static visitor. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Addressed comments by Toon Verwaest. Created 8 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/incremental-marking.h ('k') | src/mark-compact.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 13 matching lines...) Expand all
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 #include "v8.h" 28 #include "v8.h"
29 29
30 #include "incremental-marking.h" 30 #include "incremental-marking.h"
31 31
32 #include "code-stubs.h" 32 #include "code-stubs.h"
33 #include "compilation-cache.h" 33 #include "compilation-cache.h"
34 #include "objects-visiting.h"
35 #include "objects-visiting-inl.h"
34 #include "v8conversions.h" 36 #include "v8conversions.h"
35 37
36 namespace v8 { 38 namespace v8 {
37 namespace internal { 39 namespace internal {
38 40
39 41
40 IncrementalMarking::IncrementalMarking(Heap* heap) 42 IncrementalMarking::IncrementalMarking(Heap* heap)
41 : heap_(heap), 43 : heap_(heap),
42 state_(STOPPED), 44 state_(STOPPED),
43 marking_deque_memory_(NULL), 45 marking_deque_memory_(NULL),
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after
153 MarkBit obj_bit = Marking::MarkBitFrom(obj); 155 MarkBit obj_bit = Marking::MarkBitFrom(obj);
154 if (Marking::IsBlack(obj_bit)) { 156 if (Marking::IsBlack(obj_bit)) {
155 // Object is not going to be rescanned. We need to record the slot. 157 // Object is not going to be rescanned. We need to record the slot.
156 heap_->mark_compact_collector()->RecordRelocSlot(rinfo, 158 heap_->mark_compact_collector()->RecordRelocSlot(rinfo,
157 Code::cast(value)); 159 Code::cast(value));
158 } 160 }
159 } 161 }
160 } 162 }
161 163
162 164
163 class IncrementalMarkingMarkingVisitor : public ObjectVisitor { 165 class IncrementalMarkingMarkingVisitor
166 : public StaticMarkingVisitor<IncrementalMarkingMarkingVisitor> {
164 public: 167 public:
165 IncrementalMarkingMarkingVisitor(Heap* heap, 168 static void Initialize() {
166 IncrementalMarking* incremental_marking) 169 StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize();
167 : heap_(heap), 170
168 incremental_marking_(incremental_marking) { 171 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
172
173 table_.Register(kVisitJSFunction, &VisitJSFunction);
174
175 table_.Register(kVisitJSRegExp, &VisitJSRegExp);
169 } 176 }
170 177
171 void VisitEmbeddedPointer(RelocInfo* rinfo) { 178 static inline void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo) {
172 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); 179 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
173 Object* target = rinfo->target_object(); 180 Object* target = rinfo->target_object();
174 if (target->NonFailureIsHeapObject()) { 181 if (target->NonFailureIsHeapObject()) {
175 heap_->mark_compact_collector()->RecordRelocSlot(rinfo, target); 182 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
176 MarkObject(target); 183 MarkObject(heap, target);
177 } 184 }
178 } 185 }
179 186
180 void VisitCodeTarget(RelocInfo* rinfo) { 187 static inline void VisitCodeTarget(Heap* heap, RelocInfo* rinfo) {
181 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); 188 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
182 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); 189 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
183 if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() 190 if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub()
184 && (target->ic_age() != heap_->global_ic_age())) { 191 && (target->ic_age() != heap->global_ic_age())) {
185 IC::Clear(rinfo->pc()); 192 IC::Clear(rinfo->pc());
186 target = Code::GetCodeFromTargetAddress(rinfo->target_address()); 193 target = Code::GetCodeFromTargetAddress(rinfo->target_address());
187 } 194 }
188 heap_->mark_compact_collector()->RecordRelocSlot(rinfo, Code::cast(target)); 195 heap->mark_compact_collector()->RecordRelocSlot(rinfo, Code::cast(target));
189 MarkObject(target); 196 MarkObject(heap, target);
190 } 197 }
191 198
192 void VisitDebugTarget(RelocInfo* rinfo) { 199 static void VisitCode(Map* map, HeapObject* object) {
193 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && 200 Heap* heap = map->GetHeap();
194 rinfo->IsPatchedReturnSequence()) || 201 Code* code = reinterpret_cast<Code*>(object);
195 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && 202 code->CodeIterateBody<IncrementalMarkingMarkingVisitor>(heap);
196 rinfo->IsPatchedDebugBreakSlotSequence()));
197 Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
198 heap_->mark_compact_collector()->RecordRelocSlot(rinfo, Code::cast(target));
199 MarkObject(target);
200 } 203 }
201 204
202 void VisitCodeEntry(Address entry_address) { 205 static void VisitJSWeakMap(Map* map, HeapObject* object) {
203 Object* target = Code::GetObjectFromEntryAddress(entry_address); 206 Heap* heap = map->GetHeap();
204 heap_->mark_compact_collector()-> 207 VisitPointers(heap,
205 RecordCodeEntrySlot(entry_address, Code::cast(target)); 208 HeapObject::RawField(object, JSWeakMap::kPropertiesOffset),
206 MarkObject(target); 209 HeapObject::RawField(object, JSWeakMap::kSize));
207 } 210 }
208 211
209 void VisitSharedFunctionInfo(SharedFunctionInfo* shared) { 212 static void VisitSharedFunctionInfo(Map* map, HeapObject* object) {
210 if (shared->ic_age() != heap_->global_ic_age()) { 213 Heap* heap = map->GetHeap();
211 shared->ResetForNewContext(heap_->global_ic_age()); 214 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
215 if (shared->ic_age() != heap->global_ic_age()) {
216 shared->ResetForNewContext(heap->global_ic_age());
217 }
218 FixedBodyVisitor<IncrementalMarkingMarkingVisitor,
219 SharedFunctionInfo::BodyDescriptor,
220 void>::Visit(map, object);
221 }
222
223 static inline void VisitJSFunction(Map* map, HeapObject* object) {
224 Heap* heap = map->GetHeap();
225 // Iterate over all fields in the body but take care in dealing with
226 // the code entry and skip weak fields.
227 VisitPointers(heap,
228 HeapObject::RawField(object, JSFunction::kPropertiesOffset),
229 HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
230 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
231 VisitPointers(heap,
232 HeapObject::RawField(object,
233 JSFunction::kCodeEntryOffset + kPointerSize),
234 HeapObject::RawField(object,
235 JSFunction::kNonWeakFieldsEndOffset));
236 }
237
238 INLINE(static void VisitPointer(Heap* heap, Object** p)) {
239 Object* obj = *p;
240 if (obj->NonFailureIsHeapObject()) {
241 heap->mark_compact_collector()->RecordSlot(p, p, obj);
242 MarkObject(heap, obj);
212 } 243 }
213 } 244 }
214 245
215 void VisitPointer(Object** p) { 246 INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) {
216 Object* obj = *p;
217 if (obj->NonFailureIsHeapObject()) {
218 heap_->mark_compact_collector()->RecordSlot(p, p, obj);
219 MarkObject(obj);
220 }
221 }
222
223 void VisitPointers(Object** start, Object** end) {
224 for (Object** p = start; p < end; p++) { 247 for (Object** p = start; p < end; p++) {
225 Object* obj = *p; 248 Object* obj = *p;
226 if (obj->NonFailureIsHeapObject()) { 249 if (obj->NonFailureIsHeapObject()) {
227 heap_->mark_compact_collector()->RecordSlot(start, p, obj); 250 heap->mark_compact_collector()->RecordSlot(start, p, obj);
228 MarkObject(obj); 251 MarkObject(heap, obj);
229 } 252 }
230 } 253 }
231 } 254 }
232 255
233 private: 256 INLINE(static void MarkObject(Heap* heap, Object* obj)) {
234 // Mark object pointed to by p.
235 INLINE(void MarkObject(Object* obj)) {
236 HeapObject* heap_object = HeapObject::cast(obj); 257 HeapObject* heap_object = HeapObject::cast(obj);
237 MarkBit mark_bit = Marking::MarkBitFrom(heap_object); 258 MarkBit mark_bit = Marking::MarkBitFrom(heap_object);
238 if (mark_bit.data_only()) { 259 if (mark_bit.data_only()) {
239 if (incremental_marking_->MarkBlackOrKeepGrey(mark_bit)) { 260 if (heap->incremental_marking()->MarkBlackOrKeepGrey(mark_bit)) {
240 MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(), 261 MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(),
241 heap_object->Size()); 262 heap_object->Size());
242 } 263 }
243 } else if (Marking::IsWhite(mark_bit)) { 264 } else if (Marking::IsWhite(mark_bit)) {
244 incremental_marking_->WhiteToGreyAndPush(heap_object, mark_bit); 265 heap->incremental_marking()->WhiteToGreyAndPush(heap_object, mark_bit);
245 } 266 }
246 } 267 }
247
248 Heap* heap_;
249 IncrementalMarking* incremental_marking_;
250 }; 268 };
251 269
252 270
253 class IncrementalMarkingRootMarkingVisitor : public ObjectVisitor { 271 class IncrementalMarkingRootMarkingVisitor : public ObjectVisitor {
254 public: 272 public:
255 IncrementalMarkingRootMarkingVisitor(Heap* heap, 273 IncrementalMarkingRootMarkingVisitor(Heap* heap,
256 IncrementalMarking* incremental_marking) 274 IncrementalMarking* incremental_marking)
257 : heap_(heap), 275 : heap_(heap),
258 incremental_marking_(incremental_marking) { 276 incremental_marking_(incremental_marking) {
259 } 277 }
(...skipping 23 matching lines...) Expand all
283 incremental_marking_->WhiteToGreyAndPush(heap_object, mark_bit); 301 incremental_marking_->WhiteToGreyAndPush(heap_object, mark_bit);
284 } 302 }
285 } 303 }
286 } 304 }
287 305
288 Heap* heap_; 306 Heap* heap_;
289 IncrementalMarking* incremental_marking_; 307 IncrementalMarking* incremental_marking_;
290 }; 308 };
291 309
292 310
311 void IncrementalMarking::Initialize() {
312 IncrementalMarkingMarkingVisitor::Initialize();
313 }
314
315
293 void IncrementalMarking::SetOldSpacePageFlags(MemoryChunk* chunk, 316 void IncrementalMarking::SetOldSpacePageFlags(MemoryChunk* chunk,
294 bool is_marking, 317 bool is_marking,
295 bool is_compacting) { 318 bool is_compacting) {
296 if (is_marking) { 319 if (is_marking) {
297 chunk->SetFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING); 320 chunk->SetFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING);
298 chunk->SetFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING); 321 chunk->SetFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING);
299 322
300 // It's difficult to filter out slots recorded for large objects. 323 // It's difficult to filter out slots recorded for large objects.
301 if (chunk->owner()->identity() == LO_SPACE && 324 if (chunk->owner()->identity() == LO_SPACE &&
302 chunk->size() > static_cast<size_t>(Page::kPageSize) && 325 chunk->size() > static_cast<size_t>(Page::kPageSize) &&
(...skipping 313 matching lines...) Expand 10 before | Expand all | Expand 10 after
616 } 639 }
617 } 640 }
618 marking_deque_.set_top(new_top); 641 marking_deque_.set_top(new_top);
619 642
620 steps_took_since_last_gc_ = 0; 643 steps_took_since_last_gc_ = 0;
621 steps_count_since_last_gc_ = 0; 644 steps_count_since_last_gc_ = 0;
622 longest_step_ = 0.0; 645 longest_step_ = 0.0;
623 } 646 }
624 647
625 648
626 void IncrementalMarking::VisitGlobalContext(Context* ctx, ObjectVisitor* v) {
627 v->VisitPointers(
628 HeapObject::RawField(
629 ctx, Context::MarkCompactBodyDescriptor::kStartOffset),
630 HeapObject::RawField(
631 ctx, Context::MarkCompactBodyDescriptor::kEndOffset));
632
633 MarkCompactCollector* collector = heap_->mark_compact_collector();
634 for (int idx = Context::FIRST_WEAK_SLOT;
635 idx < Context::GLOBAL_CONTEXT_SLOTS;
636 ++idx) {
637 Object** slot =
638 HeapObject::RawField(ctx, FixedArray::OffsetOfElementAt(idx));
639 collector->RecordSlot(slot, slot, *slot);
640 }
641 }
642
643
644 void IncrementalMarking::Hurry() { 649 void IncrementalMarking::Hurry() {
645 if (state() == MARKING) { 650 if (state() == MARKING) {
646 double start = 0.0; 651 double start = 0.0;
647 if (FLAG_trace_incremental_marking) { 652 if (FLAG_trace_incremental_marking) {
648 PrintF("[IncrementalMarking] Hurry\n"); 653 PrintF("[IncrementalMarking] Hurry\n");
649 start = OS::TimeCurrentMillis(); 654 start = OS::TimeCurrentMillis();
650 } 655 }
651 // TODO(gc) hurry can mark objects it encounters black as mutator 656 // TODO(gc) hurry can mark objects it encounters black as mutator
652 // was stopped. 657 // was stopped.
653 Map* filler_map = heap_->one_pointer_filler_map(); 658 Map* filler_map = heap_->one_pointer_filler_map();
654 Map* global_context_map = heap_->global_context_map(); 659 Map* global_context_map = heap_->global_context_map();
655 IncrementalMarkingMarkingVisitor marking_visitor(heap_, this);
656 while (!marking_deque_.IsEmpty()) { 660 while (!marking_deque_.IsEmpty()) {
657 HeapObject* obj = marking_deque_.Pop(); 661 HeapObject* obj = marking_deque_.Pop();
658 662
659 // Explicitly skip one word fillers. Incremental markbit patterns are 663 // Explicitly skip one word fillers. Incremental markbit patterns are
660 // correct only for objects that occupy at least two words. 664 // correct only for objects that occupy at least two words.
661 Map* map = obj->map(); 665 Map* map = obj->map();
662 if (map == filler_map) { 666 if (map == filler_map) {
663 continue; 667 continue;
664 } else if (map == global_context_map) { 668 } else if (map == global_context_map) {
665 // Global contexts have weak fields. 669 // Global contexts have weak fields.
666 VisitGlobalContext(Context::cast(obj), &marking_visitor); 670 IncrementalMarkingMarkingVisitor::VisitGlobalContext(map, obj);
667 } else if (map->instance_type() == MAP_TYPE) { 671 } else if (map->instance_type() == MAP_TYPE) {
668 Map* map = Map::cast(obj); 672 Map* map = Map::cast(obj);
669 heap_->ClearCacheOnMap(map); 673 heap_->ClearCacheOnMap(map);
670 674
671 // When map collection is enabled we have to mark through map's 675 // When map collection is enabled we have to mark through map's
672 // transitions and back pointers in a special way to make these links 676 // transitions and back pointers in a special way to make these links
673 // weak. Only maps for subclasses of JSReceiver can have transitions. 677 // weak. Only maps for subclasses of JSReceiver can have transitions.
674 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); 678 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
675 if (FLAG_collect_maps && 679 if (FLAG_collect_maps &&
676 map->instance_type() >= FIRST_JS_RECEIVER_TYPE) { 680 map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
677 marker_.MarkMapContents(map); 681 marker_.MarkMapContents(map);
678 } else { 682 } else {
679 marking_visitor.VisitPointers( 683 IncrementalMarkingMarkingVisitor::VisitPointers(
684 heap_,
680 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset), 685 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
681 HeapObject::RawField(map, Map::kPointerFieldsEndOffset)); 686 HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
682 } 687 }
683 } else { 688 } else {
684 obj->Iterate(&marking_visitor); 689 MarkBit map_mark_bit = Marking::MarkBitFrom(map);
690 if (Marking::IsWhite(map_mark_bit)) {
691 WhiteToGreyAndPush(map, map_mark_bit);
692 }
693 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
685 } 694 }
686 695
687 MarkBit mark_bit = Marking::MarkBitFrom(obj); 696 MarkBit mark_bit = Marking::MarkBitFrom(obj);
688 ASSERT(!Marking::IsBlack(mark_bit)); 697 ASSERT(!Marking::IsBlack(mark_bit));
689 Marking::MarkBlack(mark_bit); 698 Marking::MarkBlack(mark_bit);
690 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size()); 699 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
691 } 700 }
692 state_ = COMPLETE; 701 state_ = COMPLETE;
693 if (FLAG_trace_incremental_marking) { 702 if (FLAG_trace_incremental_marking) {
694 double end = OS::TimeCurrentMillis(); 703 double end = OS::TimeCurrentMillis();
(...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after
808 } 817 }
809 818
810 if (state_ == SWEEPING) { 819 if (state_ == SWEEPING) {
811 if (heap_->AdvanceSweepers(static_cast<int>(bytes_to_process))) { 820 if (heap_->AdvanceSweepers(static_cast<int>(bytes_to_process))) {
812 bytes_scanned_ = 0; 821 bytes_scanned_ = 0;
813 StartMarking(PREVENT_COMPACTION); 822 StartMarking(PREVENT_COMPACTION);
814 } 823 }
815 } else if (state_ == MARKING) { 824 } else if (state_ == MARKING) {
816 Map* filler_map = heap_->one_pointer_filler_map(); 825 Map* filler_map = heap_->one_pointer_filler_map();
817 Map* global_context_map = heap_->global_context_map(); 826 Map* global_context_map = heap_->global_context_map();
818 IncrementalMarkingMarkingVisitor marking_visitor(heap_, this);
819 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) { 827 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) {
820 HeapObject* obj = marking_deque_.Pop(); 828 HeapObject* obj = marking_deque_.Pop();
821 829
822 // Explicitly skip one word fillers. Incremental markbit patterns are 830 // Explicitly skip one word fillers. Incremental markbit patterns are
823 // correct only for objects that occupy at least two words. 831 // correct only for objects that occupy at least two words.
824 Map* map = obj->map(); 832 Map* map = obj->map();
825 if (map == filler_map) continue; 833 if (map == filler_map) continue;
826 834
827 int size = obj->SizeFromMap(map); 835 int size = obj->SizeFromMap(map);
828 bytes_to_process -= size; 836 bytes_to_process -= size;
829 MarkBit map_mark_bit = Marking::MarkBitFrom(map); 837 MarkBit map_mark_bit = Marking::MarkBitFrom(map);
830 if (Marking::IsWhite(map_mark_bit)) { 838 if (Marking::IsWhite(map_mark_bit)) {
831 WhiteToGreyAndPush(map, map_mark_bit); 839 WhiteToGreyAndPush(map, map_mark_bit);
832 } 840 }
833 841
834 // TODO(gc) switch to static visitor instead of normal visitor. 842 // TODO(gc) switch to static visitor instead of normal visitor.
835 if (map == global_context_map) { 843 if (map == global_context_map) {
836 // Global contexts have weak fields. 844 // Global contexts have weak fields.
837 Context* ctx = Context::cast(obj); 845 Context* ctx = Context::cast(obj);
838 846
839 // We will mark cache black with a separate pass 847 // We will mark cache black with a separate pass
840 // when we finish marking. 848 // when we finish marking.
841 MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache()); 849 MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache());
842 850
843 VisitGlobalContext(ctx, &marking_visitor); 851 IncrementalMarkingMarkingVisitor::VisitGlobalContext(map, ctx);
844 } else if (map->instance_type() == MAP_TYPE) { 852 } else if (map->instance_type() == MAP_TYPE) {
845 Map* map = Map::cast(obj); 853 Map* map = Map::cast(obj);
846 heap_->ClearCacheOnMap(map); 854 heap_->ClearCacheOnMap(map);
847 855
848 // When map collection is enabled we have to mark through map's 856 // When map collection is enabled we have to mark through map's
849 // transitions and back pointers in a special way to make these links 857 // transitions and back pointers in a special way to make these links
850 // weak. Only maps for subclasses of JSReceiver can have transitions. 858 // weak. Only maps for subclasses of JSReceiver can have transitions.
851 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); 859 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
852 if (FLAG_collect_maps && 860 if (FLAG_collect_maps &&
853 map->instance_type() >= FIRST_JS_RECEIVER_TYPE) { 861 map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
854 marker_.MarkMapContents(map); 862 marker_.MarkMapContents(map);
855 } else { 863 } else {
856 marking_visitor.VisitPointers( 864 IncrementalMarkingMarkingVisitor::VisitPointers(
865 heap_,
857 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset), 866 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
858 HeapObject::RawField(map, Map::kPointerFieldsEndOffset)); 867 HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
859 } 868 }
860 } else if (map->instance_type() == JS_FUNCTION_TYPE) {
861 marking_visitor.VisitPointers(
862 HeapObject::RawField(obj, JSFunction::kPropertiesOffset),
863 HeapObject::RawField(obj, JSFunction::kCodeEntryOffset));
864
865 marking_visitor.VisitCodeEntry(
866 obj->address() + JSFunction::kCodeEntryOffset);
867
868 marking_visitor.VisitPointers(
869 HeapObject::RawField(obj,
870 JSFunction::kCodeEntryOffset + kPointerSize),
871 HeapObject::RawField(obj,
872 JSFunction::kNonWeakFieldsEndOffset));
873 } else { 869 } else {
874 obj->IterateBody(map->instance_type(), size, &marking_visitor); 870 IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
875 } 871 }
876 872
877 MarkBit obj_mark_bit = Marking::MarkBitFrom(obj); 873 MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
878 SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) || 874 SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) ||
879 (obj->IsFiller() && Marking::IsWhite(obj_mark_bit))); 875 (obj->IsFiller() && Marking::IsWhite(obj_mark_bit)));
880 Marking::MarkBlack(obj_mark_bit); 876 Marking::MarkBlack(obj_mark_bit);
881 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size); 877 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
882 } 878 }
883 if (marking_deque_.IsEmpty()) MarkingComplete(action); 879 if (marking_deque_.IsEmpty()) MarkingComplete(action);
884 } 880 }
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after
975 allocation_marking_factor_ = kInitialAllocationMarkingFactor; 971 allocation_marking_factor_ = kInitialAllocationMarkingFactor;
976 bytes_scanned_ = 0; 972 bytes_scanned_ = 0;
977 } 973 }
978 974
979 975
980 int64_t IncrementalMarking::SpaceLeftInOldSpace() { 976 int64_t IncrementalMarking::SpaceLeftInOldSpace() {
981 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects(); 977 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects();
982 } 978 }
983 979
984 } } // namespace v8::internal 980 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/incremental-marking.h ('k') | src/mark-compact.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698