Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(89)

Side by Side Diff: src/incremental-marking.cc

Issue 10386046: Implement map collection for incremental marking. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Minor fix in live bytes counting. Created 8 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/incremental-marking.h ('k') | src/incremental-marking-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
(...skipping 23 matching lines...) Expand all
35 35
36 namespace v8 { 36 namespace v8 {
37 namespace internal { 37 namespace internal {
38 38
39 39
40 IncrementalMarking::IncrementalMarking(Heap* heap) 40 IncrementalMarking::IncrementalMarking(Heap* heap)
41 : heap_(heap), 41 : heap_(heap),
42 state_(STOPPED), 42 state_(STOPPED),
43 marking_deque_memory_(NULL), 43 marking_deque_memory_(NULL),
44 marking_deque_memory_committed_(false), 44 marking_deque_memory_committed_(false),
45 marker_(this, heap->mark_compact_collector()),
45 steps_count_(0), 46 steps_count_(0),
46 steps_took_(0), 47 steps_took_(0),
47 longest_step_(0.0), 48 longest_step_(0.0),
48 old_generation_space_available_at_start_of_incremental_(0), 49 old_generation_space_available_at_start_of_incremental_(0),
49 old_generation_space_used_at_start_of_incremental_(0), 50 old_generation_space_used_at_start_of_incremental_(0),
50 steps_count_since_last_gc_(0), 51 steps_count_since_last_gc_(0),
51 steps_took_since_last_gc_(0), 52 steps_took_since_last_gc_(0),
52 should_hurry_(false), 53 should_hurry_(false),
53 allocation_marking_factor_(0), 54 allocation_marking_factor_(0),
54 allocated_(0), 55 allocated_(0),
(...skipping 601 matching lines...) Expand 10 before | Expand all | Expand 10 after
656 HeapObject* obj = marking_deque_.Pop(); 657 HeapObject* obj = marking_deque_.Pop();
657 658
658 // Explicitly skip one word fillers. Incremental markbit patterns are 659 // Explicitly skip one word fillers. Incremental markbit patterns are
659 // correct only for objects that occupy at least two words. 660 // correct only for objects that occupy at least two words.
660 Map* map = obj->map(); 661 Map* map = obj->map();
661 if (map == filler_map) { 662 if (map == filler_map) {
662 continue; 663 continue;
663 } else if (map == global_context_map) { 664 } else if (map == global_context_map) {
664 // Global contexts have weak fields. 665 // Global contexts have weak fields.
665 VisitGlobalContext(Context::cast(obj), &marking_visitor); 666 VisitGlobalContext(Context::cast(obj), &marking_visitor);
667 } else if (map->instance_type() == MAP_TYPE) {
668 Map* map = Map::cast(obj);
669 heap_->ClearCacheOnMap(map);
670
671 // When map collection is enabled we have to mark through map's
672 // transitions and back pointers in a special way to make these links
673 // weak. Only maps for subclasses of JSReceiver can have transitions.
674 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
675 if (FLAG_collect_maps &&
676 map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
677 marker_.MarkMapContents(map);
678 } else {
679 marking_visitor.VisitPointers(
680 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
681 HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
682 }
666 } else { 683 } else {
667 obj->Iterate(&marking_visitor); 684 obj->Iterate(&marking_visitor);
668 } 685 }
669 686
670 MarkBit mark_bit = Marking::MarkBitFrom(obj); 687 MarkBit mark_bit = Marking::MarkBitFrom(obj);
671 ASSERT(!Marking::IsBlack(mark_bit)); 688 ASSERT(!Marking::IsBlack(mark_bit));
672 Marking::MarkBlack(mark_bit); 689 Marking::MarkBlack(mark_bit);
673 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size()); 690 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
674 } 691 }
675 state_ = COMPLETE; 692 state_ = COMPLETE;
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after
800 Map* global_context_map = heap_->global_context_map(); 817 Map* global_context_map = heap_->global_context_map();
801 IncrementalMarkingMarkingVisitor marking_visitor(heap_, this); 818 IncrementalMarkingMarkingVisitor marking_visitor(heap_, this);
802 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) { 819 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) {
803 HeapObject* obj = marking_deque_.Pop(); 820 HeapObject* obj = marking_deque_.Pop();
804 821
805 // Explicitly skip one word fillers. Incremental markbit patterns are 822 // Explicitly skip one word fillers. Incremental markbit patterns are
806 // correct only for objects that occupy at least two words. 823 // correct only for objects that occupy at least two words.
807 Map* map = obj->map(); 824 Map* map = obj->map();
808 if (map == filler_map) continue; 825 if (map == filler_map) continue;
809 826
810 if (obj->IsMap()) {
811 Map* map = Map::cast(obj);
812 heap_->ClearCacheOnMap(map);
813 }
814
815
816 int size = obj->SizeFromMap(map); 827 int size = obj->SizeFromMap(map);
817 bytes_to_process -= size; 828 bytes_to_process -= size;
818 MarkBit map_mark_bit = Marking::MarkBitFrom(map); 829 MarkBit map_mark_bit = Marking::MarkBitFrom(map);
819 if (Marking::IsWhite(map_mark_bit)) { 830 if (Marking::IsWhite(map_mark_bit)) {
820 WhiteToGreyAndPush(map, map_mark_bit); 831 WhiteToGreyAndPush(map, map_mark_bit);
821 } 832 }
822 833
823 // TODO(gc) switch to static visitor instead of normal visitor. 834 // TODO(gc) switch to static visitor instead of normal visitor.
824 if (map == global_context_map) { 835 if (map == global_context_map) {
825 // Global contexts have weak fields. 836 // Global contexts have weak fields.
826 Context* ctx = Context::cast(obj); 837 Context* ctx = Context::cast(obj);
827 838
828 // We will mark cache black with a separate pass 839 // We will mark cache black with a separate pass
829 // when we finish marking. 840 // when we finish marking.
830 MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache()); 841 MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache());
831 842
832 VisitGlobalContext(ctx, &marking_visitor); 843 VisitGlobalContext(ctx, &marking_visitor);
844 } else if (map->instance_type() == MAP_TYPE) {
845 Map* map = Map::cast(obj);
846 heap_->ClearCacheOnMap(map);
847
848 // When map collection is enabled we have to mark through map's
849 // transitions and back pointers in a special way to make these links
850 // weak. Only maps for subclasses of JSReceiver can have transitions.
851 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
852 if (FLAG_collect_maps &&
853 map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
854 marker_.MarkMapContents(map);
855 } else {
856 marking_visitor.VisitPointers(
857 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
858 HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
859 }
833 } else if (map->instance_type() == JS_FUNCTION_TYPE) { 860 } else if (map->instance_type() == JS_FUNCTION_TYPE) {
834 marking_visitor.VisitPointers( 861 marking_visitor.VisitPointers(
835 HeapObject::RawField(obj, JSFunction::kPropertiesOffset), 862 HeapObject::RawField(obj, JSFunction::kPropertiesOffset),
836 HeapObject::RawField(obj, JSFunction::kCodeEntryOffset)); 863 HeapObject::RawField(obj, JSFunction::kCodeEntryOffset));
837 864
838 marking_visitor.VisitCodeEntry( 865 marking_visitor.VisitCodeEntry(
839 obj->address() + JSFunction::kCodeEntryOffset); 866 obj->address() + JSFunction::kCodeEntryOffset);
840 867
841 marking_visitor.VisitPointers( 868 marking_visitor.VisitPointers(
842 HeapObject::RawField(obj, 869 HeapObject::RawField(obj,
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after
948 allocation_marking_factor_ = kInitialAllocationMarkingFactor; 975 allocation_marking_factor_ = kInitialAllocationMarkingFactor;
949 bytes_scanned_ = 0; 976 bytes_scanned_ = 0;
950 } 977 }
951 978
952 979
953 int64_t IncrementalMarking::SpaceLeftInOldSpace() { 980 int64_t IncrementalMarking::SpaceLeftInOldSpace() {
954 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects(); 981 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects();
955 } 982 }
956 983
957 } } // namespace v8::internal 984 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/incremental-marking.h ('k') | src/incremental-marking-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698