OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 24 matching lines...) Expand all Loading... |
35 namespace internal { | 35 namespace internal { |
36 | 36 |
37 // Callback function, returns whether an object is alive. The heap size | 37 // Callback function, returns whether an object is alive. The heap size |
38 // of the object is returned in size. It optionally updates the offset | 38 // of the object is returned in size. It optionally updates the offset |
39 // to the first live object in the page (only used for old and map objects). | 39 // to the first live object in the page (only used for old and map objects). |
40 typedef bool (*IsAliveFunction)(HeapObject* obj, int* size, int* offset); | 40 typedef bool (*IsAliveFunction)(HeapObject* obj, int* size, int* offset); |
41 | 41 |
42 // Forward declarations. | 42 // Forward declarations. |
43 class CodeFlusher; | 43 class CodeFlusher; |
44 class GCTracer; | 44 class GCTracer; |
45 class MarkCompactCollector; | |
46 class MarkingVisitor; | 45 class MarkingVisitor; |
47 class RootMarkingVisitor; | 46 class RootMarkingVisitor; |
48 | 47 |
49 | 48 |
50 class Marking { | 49 class Marking { |
51 public: | 50 public: |
52 explicit Marking(Heap* heap) | 51 explicit Marking(Heap* heap) |
53 : heap_(heap) { | 52 : heap_(heap) { |
54 } | 53 } |
55 | 54 |
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
160 } | 159 } |
161 return is_black; | 160 return is_black; |
162 } | 161 } |
163 | 162 |
164 private: | 163 private: |
165 Heap* heap_; | 164 Heap* heap_; |
166 }; | 165 }; |
167 | 166 |
168 // ---------------------------------------------------------------------------- | 167 // ---------------------------------------------------------------------------- |
169 // Marking deque for tracing live objects. | 168 // Marking deque for tracing live objects. |
| 169 |
170 class MarkingDeque { | 170 class MarkingDeque { |
171 public: | 171 public: |
172 MarkingDeque() | 172 MarkingDeque() |
173 : array_(NULL), top_(0), bottom_(0), mask_(0), overflowed_(false) { } | 173 : array_(NULL), top_(0), bottom_(0), mask_(0), overflowed_(false) { } |
174 | 174 |
175 void Initialize(Address low, Address high) { | 175 void Initialize(Address low, Address high) { |
176 HeapObject** obj_low = reinterpret_cast<HeapObject**>(low); | 176 HeapObject** obj_low = reinterpret_cast<HeapObject**>(low); |
177 HeapObject** obj_high = reinterpret_cast<HeapObject**>(high); | 177 HeapObject** obj_high = reinterpret_cast<HeapObject**>(high); |
178 array_ = obj_low; | 178 array_ = obj_low; |
179 mask_ = RoundDownToPowerOf2(static_cast<int>(obj_high - obj_low)) - 1; | 179 mask_ = RoundDownToPowerOf2(static_cast<int>(obj_high - obj_low)) - 1; |
(...skipping 196 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
376 private: | 376 private: |
377 static const int kChainLengthThreshold = 15; | 377 static const int kChainLengthThreshold = 15; |
378 | 378 |
379 intptr_t idx_; | 379 intptr_t idx_; |
380 intptr_t chain_length_; | 380 intptr_t chain_length_; |
381 SlotsBuffer* next_; | 381 SlotsBuffer* next_; |
382 ObjectSlot slots_[kNumberOfElements]; | 382 ObjectSlot slots_[kNumberOfElements]; |
383 }; | 383 }; |
384 | 384 |
385 | 385 |
386 // ------------------------------------------------------------------------- | |
387 // Marker shared between incremental and non-incremental marking | |
388 template<class BaseMarker> class Marker { | |
389 public: | |
390 Marker(BaseMarker* base_marker, MarkCompactCollector* mark_compact_collector) | |
391 : base_marker_(base_marker), | |
392 mark_compact_collector_(mark_compact_collector) {} | |
393 | |
394 // Mark pointers in a Map and its DescriptorArray together, possibly | |
395 // treating transitions or back pointers weak. | |
396 void MarkMapContents(Map* map); | |
397 void MarkDescriptorArray(DescriptorArray* descriptors); | |
398 void MarkAccessorPairSlot(AccessorPair* accessors, int offset); | |
399 | |
400 private: | |
401 BaseMarker* base_marker() { | |
402 return base_marker_; | |
403 } | |
404 | |
405 MarkCompactCollector* mark_compact_collector() { | |
406 return mark_compact_collector_; | |
407 } | |
408 | |
409 BaseMarker* base_marker_; | |
410 MarkCompactCollector* mark_compact_collector_; | |
411 }; | |
412 | |
413 | |
414 // Defined in isolate.h. | 386 // Defined in isolate.h. |
415 class ThreadLocalTop; | 387 class ThreadLocalTop; |
416 | 388 |
417 | 389 |
418 // ------------------------------------------------------------------------- | 390 // ------------------------------------------------------------------------- |
419 // Mark-Compact collector | 391 // Mark-Compact collector |
420 class MarkCompactCollector { | 392 class MarkCompactCollector { |
421 public: | 393 public: |
422 // Type of functions to compute forwarding addresses of objects in | 394 // Type of functions to compute forwarding addresses of objects in |
423 // compacted spaces. Given an object and its size, return a (non-failure) | 395 // compacted spaces. Given an object and its size, return a (non-failure) |
(...skipping 181 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
605 bool reduce_memory_footprint_; | 577 bool reduce_memory_footprint_; |
606 | 578 |
607 bool abort_incremental_marking_; | 579 bool abort_incremental_marking_; |
608 | 580 |
609 // True if we are collecting slots to perform evacuation from evacuation | 581 // True if we are collecting slots to perform evacuation from evacuation |
610 // candidates. | 582 // candidates. |
611 bool compacting_; | 583 bool compacting_; |
612 | 584 |
613 bool was_marked_incrementally_; | 585 bool was_marked_incrementally_; |
614 | 586 |
| 587 bool collect_maps_; |
| 588 |
615 bool flush_monomorphic_ics_; | 589 bool flush_monomorphic_ics_; |
616 | 590 |
617 // A pointer to the current stack-allocated GC tracer object during a full | 591 // A pointer to the current stack-allocated GC tracer object during a full |
618 // collection (NULL before and after). | 592 // collection (NULL before and after). |
619 GCTracer* tracer_; | 593 GCTracer* tracer_; |
620 | 594 |
621 SlotsBufferAllocator slots_buffer_allocator_; | 595 SlotsBufferAllocator slots_buffer_allocator_; |
622 | 596 |
623 SlotsBuffer* migration_slots_buffer_; | 597 SlotsBuffer* migration_slots_buffer_; |
624 | 598 |
625 // Finishes GC, performs heap verification if enabled. | 599 // Finishes GC, performs heap verification if enabled. |
626 void Finish(); | 600 void Finish(); |
627 | 601 |
628 // ----------------------------------------------------------------------- | 602 // ----------------------------------------------------------------------- |
629 // Phase 1: Marking live objects. | 603 // Phase 1: Marking live objects. |
630 // | 604 // |
631 // Before: The heap has been prepared for garbage collection by | 605 // Before: The heap has been prepared for garbage collection by |
632 // MarkCompactCollector::Prepare() and is otherwise in its | 606 // MarkCompactCollector::Prepare() and is otherwise in its |
633 // normal state. | 607 // normal state. |
634 // | 608 // |
635 // After: Live objects are marked and non-live objects are unmarked. | 609 // After: Live objects are marked and non-live objects are unmarked. |
636 | 610 |
| 611 |
637 friend class RootMarkingVisitor; | 612 friend class RootMarkingVisitor; |
638 friend class MarkingVisitor; | 613 friend class MarkingVisitor; |
639 friend class StaticMarkingVisitor; | 614 friend class StaticMarkingVisitor; |
640 friend class CodeMarkingVisitor; | 615 friend class CodeMarkingVisitor; |
641 friend class SharedFunctionInfoMarkingVisitor; | 616 friend class SharedFunctionInfoMarkingVisitor; |
642 friend class Marker<IncrementalMarking>; | |
643 friend class Marker<MarkCompactCollector>; | |
644 | 617 |
645 // Mark non-optimize code for functions inlined into the given optimized | 618 // Mark non-optimize code for functions inlined into the given optimized |
646 // code. This will prevent it from being flushed. | 619 // code. This will prevent it from being flushed. |
647 void MarkInlinedFunctionsCode(Code* code); | 620 void MarkInlinedFunctionsCode(Code* code); |
648 | 621 |
649 // Mark code objects that are active on the stack to prevent them | 622 // Mark code objects that are active on the stack to prevent them |
650 // from being flushed. | 623 // from being flushed. |
651 void PrepareThreadForCodeFlushing(Isolate* isolate, ThreadLocalTop* top); | 624 void PrepareThreadForCodeFlushing(Isolate* isolate, ThreadLocalTop* top); |
652 | 625 |
653 void PrepareForCodeFlushing(); | 626 void PrepareForCodeFlushing(); |
654 | 627 |
655 // Marking operations for objects reachable from roots. | 628 // Marking operations for objects reachable from roots. |
656 void MarkLiveObjects(); | 629 void MarkLiveObjects(); |
657 | 630 |
658 void AfterMarking(); | 631 void AfterMarking(); |
659 | 632 |
660 // Marks the object black and pushes it on the marking stack. | 633 // Marks the object black and pushes it on the marking stack. |
661 // Returns true if object needed marking and false otherwise. | 634 // This is for non-incremental marking. |
662 // This is for non-incremental marking only. | |
663 INLINE(bool MarkObjectAndPush(HeapObject* obj)); | |
664 | |
665 // Marks the object black and pushes it on the marking stack. | |
666 // This is for non-incremental marking only. | |
667 INLINE(void MarkObject(HeapObject* obj, MarkBit mark_bit)); | 635 INLINE(void MarkObject(HeapObject* obj, MarkBit mark_bit)); |
668 | 636 |
669 // Marks the object black without pushing it on the marking stack. | 637 INLINE(bool MarkObjectWithoutPush(HeapObject* object)); |
670 // Returns true if object needed marking and false otherwise. | 638 INLINE(void MarkObjectAndPush(HeapObject* value)); |
671 // This is for non-incremental marking only. | |
672 INLINE(bool MarkObjectWithoutPush(HeapObject* obj)); | |
673 | 639 |
674 // Marks the object black assuming that it is not yet marked. | 640 // Marks the object black. This is for non-incremental marking. |
675 // This is for non-incremental marking only. | |
676 INLINE(void SetMark(HeapObject* obj, MarkBit mark_bit)); | 641 INLINE(void SetMark(HeapObject* obj, MarkBit mark_bit)); |
677 | 642 |
678 void ProcessNewlyMarkedObject(HeapObject* obj); | 643 void ProcessNewlyMarkedObject(HeapObject* obj); |
679 | 644 |
| 645 // Mark a Map and its DescriptorArray together, skipping transitions. |
| 646 void MarkMapContents(Map* map); |
| 647 void MarkAccessorPairSlot(HeapObject* accessors, int offset); |
| 648 void MarkDescriptorArray(DescriptorArray* descriptors); |
| 649 |
680 // Mark the heap roots and all objects reachable from them. | 650 // Mark the heap roots and all objects reachable from them. |
681 void MarkRoots(RootMarkingVisitor* visitor); | 651 void MarkRoots(RootMarkingVisitor* visitor); |
682 | 652 |
683 // Mark the symbol table specially. References to symbols from the | 653 // Mark the symbol table specially. References to symbols from the |
684 // symbol table are weak. | 654 // symbol table are weak. |
685 void MarkSymbolTable(); | 655 void MarkSymbolTable(); |
686 | 656 |
687 // Mark objects in object groups that have at least one object in the | 657 // Mark objects in object groups that have at least one object in the |
688 // group marked. | 658 // group marked. |
689 void MarkObjectGroups(); | 659 void MarkObjectGroups(); |
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
772 static void VisitObject(HeapObject* obj); | 742 static void VisitObject(HeapObject* obj); |
773 | 743 |
774 friend class UnmarkObjectVisitor; | 744 friend class UnmarkObjectVisitor; |
775 static void UnmarkObject(HeapObject* obj); | 745 static void UnmarkObject(HeapObject* obj); |
776 #endif | 746 #endif |
777 | 747 |
778 Heap* heap_; | 748 Heap* heap_; |
779 MarkingDeque marking_deque_; | 749 MarkingDeque marking_deque_; |
780 CodeFlusher* code_flusher_; | 750 CodeFlusher* code_flusher_; |
781 Object* encountered_weak_maps_; | 751 Object* encountered_weak_maps_; |
782 Marker<MarkCompactCollector> marker_; | |
783 | 752 |
784 List<Page*> evacuation_candidates_; | 753 List<Page*> evacuation_candidates_; |
785 List<Code*> invalidated_code_; | 754 List<Code*> invalidated_code_; |
786 | 755 |
787 friend class Heap; | 756 friend class Heap; |
788 }; | 757 }; |
789 | 758 |
790 | 759 |
791 const char* AllocationSpaceName(AllocationSpace space); | 760 const char* AllocationSpaceName(AllocationSpace space); |
792 | 761 |
793 } } // namespace v8::internal | 762 } } // namespace v8::internal |
794 | 763 |
795 #endif // V8_MARK_COMPACT_H_ | 764 #endif // V8_MARK_COMPACT_H_ |
OLD | NEW |