Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(257)

Side by Side Diff: src/mark-compact.h

Issue 10386046: Implement map collection for incremental marking. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Minor fix in live bytes counting. Created 8 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/incremental-marking-inl.h ('k') | src/mark-compact.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 24 matching lines...) Expand all
35 namespace internal { 35 namespace internal {
36 36
37 // Callback function, returns whether an object is alive. The heap size 37 // Callback function, returns whether an object is alive. The heap size
38 // of the object is returned in size. It optionally updates the offset 38 // of the object is returned in size. It optionally updates the offset
39 // to the first live object in the page (only used for old and map objects). 39 // to the first live object in the page (only used for old and map objects).
40 typedef bool (*IsAliveFunction)(HeapObject* obj, int* size, int* offset); 40 typedef bool (*IsAliveFunction)(HeapObject* obj, int* size, int* offset);
41 41
42 // Forward declarations. 42 // Forward declarations.
43 class CodeFlusher; 43 class CodeFlusher;
44 class GCTracer; 44 class GCTracer;
45 class MarkCompactCollector;
45 class MarkingVisitor; 46 class MarkingVisitor;
46 class RootMarkingVisitor; 47 class RootMarkingVisitor;
47 48
48 49
49 class Marking { 50 class Marking {
50 public: 51 public:
51 explicit Marking(Heap* heap) 52 explicit Marking(Heap* heap)
52 : heap_(heap) { 53 : heap_(heap) {
53 } 54 }
54 55
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after
159 } 160 }
160 return is_black; 161 return is_black;
161 } 162 }
162 163
163 private: 164 private:
164 Heap* heap_; 165 Heap* heap_;
165 }; 166 };
166 167
167 // ---------------------------------------------------------------------------- 168 // ----------------------------------------------------------------------------
168 // Marking deque for tracing live objects. 169 // Marking deque for tracing live objects.
169
170 class MarkingDeque { 170 class MarkingDeque {
171 public: 171 public:
172 MarkingDeque() 172 MarkingDeque()
173 : array_(NULL), top_(0), bottom_(0), mask_(0), overflowed_(false) { } 173 : array_(NULL), top_(0), bottom_(0), mask_(0), overflowed_(false) { }
174 174
175 void Initialize(Address low, Address high) { 175 void Initialize(Address low, Address high) {
176 HeapObject** obj_low = reinterpret_cast<HeapObject**>(low); 176 HeapObject** obj_low = reinterpret_cast<HeapObject**>(low);
177 HeapObject** obj_high = reinterpret_cast<HeapObject**>(high); 177 HeapObject** obj_high = reinterpret_cast<HeapObject**>(high);
178 array_ = obj_low; 178 array_ = obj_low;
179 mask_ = RoundDownToPowerOf2(static_cast<int>(obj_high - obj_low)) - 1; 179 mask_ = RoundDownToPowerOf2(static_cast<int>(obj_high - obj_low)) - 1;
(...skipping 196 matching lines...) Expand 10 before | Expand all | Expand 10 after
376 private: 376 private:
377 static const int kChainLengthThreshold = 15; 377 static const int kChainLengthThreshold = 15;
378 378
379 intptr_t idx_; 379 intptr_t idx_;
380 intptr_t chain_length_; 380 intptr_t chain_length_;
381 SlotsBuffer* next_; 381 SlotsBuffer* next_;
382 ObjectSlot slots_[kNumberOfElements]; 382 ObjectSlot slots_[kNumberOfElements];
383 }; 383 };
384 384
385 385
386 // -------------------------------------------------------------------------
387 // Marker shared between incremental and non-incremental marking
388 template<class BaseMarker> class Marker {
389 public:
390 Marker(BaseMarker* base_marker, MarkCompactCollector* mark_compact_collector)
391 : base_marker_(base_marker),
392 mark_compact_collector_(mark_compact_collector) {}
393
394 // Mark pointers in a Map and its DescriptorArray together, possibly
395 // treating transitions or back pointers weak.
396 void MarkMapContents(Map* map);
397 void MarkDescriptorArray(DescriptorArray* descriptors);
398 void MarkAccessorPairSlot(AccessorPair* accessors, int offset);
399
400 private:
401 BaseMarker* base_marker() {
402 return base_marker_;
403 }
404
405 MarkCompactCollector* mark_compact_collector() {
406 return mark_compact_collector_;
407 }
408
409 BaseMarker* base_marker_;
410 MarkCompactCollector* mark_compact_collector_;
411 };
412
413
386 // Defined in isolate.h. 414 // Defined in isolate.h.
387 class ThreadLocalTop; 415 class ThreadLocalTop;
388 416
389 417
390 // ------------------------------------------------------------------------- 418 // -------------------------------------------------------------------------
391 // Mark-Compact collector 419 // Mark-Compact collector
392 class MarkCompactCollector { 420 class MarkCompactCollector {
393 public: 421 public:
394 // Type of functions to compute forwarding addresses of objects in 422 // Type of functions to compute forwarding addresses of objects in
395 // compacted spaces. Given an object and its size, return a (non-failure) 423 // compacted spaces. Given an object and its size, return a (non-failure)
(...skipping 181 matching lines...) Expand 10 before | Expand all | Expand 10 after
577 bool reduce_memory_footprint_; 605 bool reduce_memory_footprint_;
578 606
579 bool abort_incremental_marking_; 607 bool abort_incremental_marking_;
580 608
581 // True if we are collecting slots to perform evacuation from evacuation 609 // True if we are collecting slots to perform evacuation from evacuation
582 // candidates. 610 // candidates.
583 bool compacting_; 611 bool compacting_;
584 612
585 bool was_marked_incrementally_; 613 bool was_marked_incrementally_;
586 614
587 bool collect_maps_;
588
589 bool flush_monomorphic_ics_; 615 bool flush_monomorphic_ics_;
590 616
591 // A pointer to the current stack-allocated GC tracer object during a full 617 // A pointer to the current stack-allocated GC tracer object during a full
592 // collection (NULL before and after). 618 // collection (NULL before and after).
593 GCTracer* tracer_; 619 GCTracer* tracer_;
594 620
595 SlotsBufferAllocator slots_buffer_allocator_; 621 SlotsBufferAllocator slots_buffer_allocator_;
596 622
597 SlotsBuffer* migration_slots_buffer_; 623 SlotsBuffer* migration_slots_buffer_;
598 624
599 // Finishes GC, performs heap verification if enabled. 625 // Finishes GC, performs heap verification if enabled.
600 void Finish(); 626 void Finish();
601 627
602 // ----------------------------------------------------------------------- 628 // -----------------------------------------------------------------------
603 // Phase 1: Marking live objects. 629 // Phase 1: Marking live objects.
604 // 630 //
605 // Before: The heap has been prepared for garbage collection by 631 // Before: The heap has been prepared for garbage collection by
606 // MarkCompactCollector::Prepare() and is otherwise in its 632 // MarkCompactCollector::Prepare() and is otherwise in its
607 // normal state. 633 // normal state.
608 // 634 //
609 // After: Live objects are marked and non-live objects are unmarked. 635 // After: Live objects are marked and non-live objects are unmarked.
610 636
611
612 friend class RootMarkingVisitor; 637 friend class RootMarkingVisitor;
613 friend class MarkingVisitor; 638 friend class MarkingVisitor;
614 friend class StaticMarkingVisitor; 639 friend class StaticMarkingVisitor;
615 friend class CodeMarkingVisitor; 640 friend class CodeMarkingVisitor;
616 friend class SharedFunctionInfoMarkingVisitor; 641 friend class SharedFunctionInfoMarkingVisitor;
642 friend class Marker<IncrementalMarking>;
643 friend class Marker<MarkCompactCollector>;
617 644
618 // Mark non-optimize code for functions inlined into the given optimized 645 // Mark non-optimize code for functions inlined into the given optimized
619 // code. This will prevent it from being flushed. 646 // code. This will prevent it from being flushed.
620 void MarkInlinedFunctionsCode(Code* code); 647 void MarkInlinedFunctionsCode(Code* code);
621 648
622 // Mark code objects that are active on the stack to prevent them 649 // Mark code objects that are active on the stack to prevent them
623 // from being flushed. 650 // from being flushed.
624 void PrepareThreadForCodeFlushing(Isolate* isolate, ThreadLocalTop* top); 651 void PrepareThreadForCodeFlushing(Isolate* isolate, ThreadLocalTop* top);
625 652
626 void PrepareForCodeFlushing(); 653 void PrepareForCodeFlushing();
627 654
628 // Marking operations for objects reachable from roots. 655 // Marking operations for objects reachable from roots.
629 void MarkLiveObjects(); 656 void MarkLiveObjects();
630 657
631 void AfterMarking(); 658 void AfterMarking();
632 659
633 // Marks the object black and pushes it on the marking stack. 660 // Marks the object black and pushes it on the marking stack.
634 // This is for non-incremental marking. 661 // Returns true if object needed marking and false otherwise.
662 // This is for non-incremental marking only.
663 INLINE(bool MarkObjectAndPush(HeapObject* obj));
664
665 // Marks the object black and pushes it on the marking stack.
666 // This is for non-incremental marking only.
635 INLINE(void MarkObject(HeapObject* obj, MarkBit mark_bit)); 667 INLINE(void MarkObject(HeapObject* obj, MarkBit mark_bit));
636 668
637 INLINE(bool MarkObjectWithoutPush(HeapObject* object)); 669 // Marks the object black without pushing it on the marking stack.
638 INLINE(void MarkObjectAndPush(HeapObject* value)); 670 // Returns true if object needed marking and false otherwise.
671 // This is for non-incremental marking only.
672 INLINE(bool MarkObjectWithoutPush(HeapObject* obj));
639 673
640 // Marks the object black. This is for non-incremental marking. 674 // Marks the object black assuming that it is not yet marked.
675 // This is for non-incremental marking only.
641 INLINE(void SetMark(HeapObject* obj, MarkBit mark_bit)); 676 INLINE(void SetMark(HeapObject* obj, MarkBit mark_bit));
642 677
643 void ProcessNewlyMarkedObject(HeapObject* obj); 678 void ProcessNewlyMarkedObject(HeapObject* obj);
644 679
645 // Mark a Map and its DescriptorArray together, skipping transitions.
646 void MarkMapContents(Map* map);
647 void MarkAccessorPairSlot(HeapObject* accessors, int offset);
648 void MarkDescriptorArray(DescriptorArray* descriptors);
649
650 // Mark the heap roots and all objects reachable from them. 680 // Mark the heap roots and all objects reachable from them.
651 void MarkRoots(RootMarkingVisitor* visitor); 681 void MarkRoots(RootMarkingVisitor* visitor);
652 682
653 // Mark the symbol table specially. References to symbols from the 683 // Mark the symbol table specially. References to symbols from the
654 // symbol table are weak. 684 // symbol table are weak.
655 void MarkSymbolTable(); 685 void MarkSymbolTable();
656 686
657 // Mark objects in object groups that have at least one object in the 687 // Mark objects in object groups that have at least one object in the
658 // group marked. 688 // group marked.
659 void MarkObjectGroups(); 689 void MarkObjectGroups();
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
742 static void VisitObject(HeapObject* obj); 772 static void VisitObject(HeapObject* obj);
743 773
744 friend class UnmarkObjectVisitor; 774 friend class UnmarkObjectVisitor;
745 static void UnmarkObject(HeapObject* obj); 775 static void UnmarkObject(HeapObject* obj);
746 #endif 776 #endif
747 777
748 Heap* heap_; 778 Heap* heap_;
749 MarkingDeque marking_deque_; 779 MarkingDeque marking_deque_;
750 CodeFlusher* code_flusher_; 780 CodeFlusher* code_flusher_;
751 Object* encountered_weak_maps_; 781 Object* encountered_weak_maps_;
782 Marker<MarkCompactCollector> marker_;
752 783
753 List<Page*> evacuation_candidates_; 784 List<Page*> evacuation_candidates_;
754 List<Code*> invalidated_code_; 785 List<Code*> invalidated_code_;
755 786
756 friend class Heap; 787 friend class Heap;
757 }; 788 };
758 789
759 790
760 const char* AllocationSpaceName(AllocationSpace space); 791 const char* AllocationSpaceName(AllocationSpace space);
761 792
762 } } // namespace v8::internal 793 } } // namespace v8::internal
763 794
764 #endif // V8_MARK_COMPACT_H_ 795 #endif // V8_MARK_COMPACT_H_
OLDNEW
« no previous file with comments | « src/incremental-marking-inl.h ('k') | src/mark-compact.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698