| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 566 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 577 bool reduce_memory_footprint_; | 577 bool reduce_memory_footprint_; |
| 578 | 578 |
| 579 bool abort_incremental_marking_; | 579 bool abort_incremental_marking_; |
| 580 | 580 |
| 581 // True if we are collecting slots to perform evacuation from evacuation | 581 // True if we are collecting slots to perform evacuation from evacuation |
| 582 // candidates. | 582 // candidates. |
| 583 bool compacting_; | 583 bool compacting_; |
| 584 | 584 |
| 585 bool was_marked_incrementally_; | 585 bool was_marked_incrementally_; |
| 586 | 586 |
| 587 bool collect_maps_; | 587 bool clear_map_transitions_; |
| 588 | 588 |
| 589 bool flush_monomorphic_ics_; | 589 bool flush_monomorphic_ics_; |
| 590 | 590 |
| 591 // A pointer to the current stack-allocated GC tracer object during a full | 591 // A pointer to the current stack-allocated GC tracer object during a full |
| 592 // collection (NULL before and after). | 592 // collection (NULL before and after). |
| 593 GCTracer* tracer_; | 593 GCTracer* tracer_; |
| 594 | 594 |
| 595 SlotsBufferAllocator slots_buffer_allocator_; | 595 SlotsBufferAllocator slots_buffer_allocator_; |
| 596 | 596 |
| 597 SlotsBuffer* migration_slots_buffer_; | 597 SlotsBuffer* migration_slots_buffer_; |
| 598 | 598 |
| 599 // Finishes GC, performs heap verification if enabled. | 599 // Finishes GC, performs heap verification if enabled. |
| 600 void Finish(); | 600 void Finish(); |
| 601 | 601 |
| 602 // ----------------------------------------------------------------------- | 602 // ----------------------------------------------------------------------- |
| 603 // Phase 1: Marking live objects. | 603 // Phase 1: Marking live objects. |
| 604 // | 604 // |
| 605 // Before: The heap has been prepared for garbage collection by | 605 // Before: The heap has been prepared for garbage collection by |
| 606 // MarkCompactCollector::Prepare() and is otherwise in its | 606 // MarkCompactCollector::Prepare() and is otherwise in its |
| 607 // normal state. | 607 // normal state. |
| 608 // | 608 // |
| 609 // After: Live objects are marked and non-live objects are unmarked. | 609 // After: Live objects are marked and non-live objects are unmarked. |
| 610 | 610 |
| 611 | |
| 612 friend class RootMarkingVisitor; | 611 friend class RootMarkingVisitor; |
| 613 friend class MarkingVisitor; | 612 friend class MarkingVisitor; |
| 614 friend class StaticMarkingVisitor; | 613 friend class StaticMarkingVisitor; |
| 615 friend class CodeMarkingVisitor; | 614 friend class CodeMarkingVisitor; |
| 616 friend class SharedFunctionInfoMarkingVisitor; | 615 friend class SharedFunctionInfoMarkingVisitor; |
| 616 friend class IncrementalMarking; |
| 617 | 617 |
| 618 // Mark non-optimize code for functions inlined into the given optimized | 618 // Mark non-optimize code for functions inlined into the given optimized |
| 619 // code. This will prevent it from being flushed. | 619 // code. This will prevent it from being flushed. |
| 620 void MarkInlinedFunctionsCode(Code* code); | 620 void MarkInlinedFunctionsCode(Code* code); |
| 621 | 621 |
| 622 // Mark code objects that are active on the stack to prevent them | 622 // Mark code objects that are active on the stack to prevent them |
| 623 // from being flushed. | 623 // from being flushed. |
| 624 void PrepareThreadForCodeFlushing(Isolate* isolate, ThreadLocalTop* top); | 624 void PrepareThreadForCodeFlushing(Isolate* isolate, ThreadLocalTop* top); |
| 625 | 625 |
| 626 void PrepareForCodeFlushing(); | 626 void PrepareForCodeFlushing(); |
| 627 | 627 |
| 628 // Marking operations for objects reachable from roots. | 628 // Marking operations for objects reachable from roots. |
| 629 void MarkLiveObjects(); | 629 void MarkLiveObjects(); |
| 630 | 630 |
| 631 void AfterMarking(); | 631 void AfterMarking(); |
| 632 | 632 |
| 633 // Marks the object black and pushes it on the marking stack. | 633 // Marks the object black and pushes it on the marking stack. |
| 634 // This is for non-incremental marking. | 634 // This is for non-incremental marking. |
| 635 INLINE(void MarkObject(HeapObject* obj, MarkBit mark_bit)); | 635 INLINE(void MarkObject(HeapObject* obj, MarkBit mark_bit)); |
| 636 | 636 |
| 637 INLINE(bool MarkObjectWithoutPush(HeapObject* object)); | 637 INLINE(bool MarkObjectWithoutPush(HeapObject* object)); |
| 638 INLINE(void MarkObjectAndPush(HeapObject* value)); | 638 INLINE(void MarkObjectAndPush(HeapObject* value)); |
| 639 | 639 |
| 640 // Marks the object black. This is for non-incremental marking. | 640 // Marks the object black. This is for non-incremental marking. |
| 641 INLINE(void SetMark(HeapObject* obj, MarkBit mark_bit)); | 641 INLINE(void SetMark(HeapObject* obj, MarkBit mark_bit)); |
| 642 | 642 |
| 643 void ProcessNewlyMarkedObject(HeapObject* obj); | 643 void ProcessNewlyMarkedObject(HeapObject* obj); |
| 644 | 644 |
| 645 // Mark a Map and its DescriptorArray together, skipping transitions. | 645 // Mark weak pointers in a Map and its DescriptorArray together, possibly |
| 646 void MarkMapContents(Map* map); | 646 // skipping transitions or back pointers. |
| 647 void MarkWeakMapContents(Map* map); |
| 647 void MarkAccessorPairSlot(HeapObject* accessors, int offset); | 648 void MarkAccessorPairSlot(HeapObject* accessors, int offset); |
| 648 void MarkDescriptorArray(DescriptorArray* descriptors); | 649 void MarkDescriptorArray(DescriptorArray* descriptors); |
| 649 | 650 |
| 650 // Mark the heap roots and all objects reachable from them. | 651 // Mark the heap roots and all objects reachable from them. |
| 651 void MarkRoots(RootMarkingVisitor* visitor); | 652 void MarkRoots(RootMarkingVisitor* visitor); |
| 652 | 653 |
| 653 // Mark the symbol table specially. References to symbols from the | 654 // Mark the symbol table specially. References to symbols from the |
| 654 // symbol table are weak. | 655 // symbol table are weak. |
| 655 void MarkSymbolTable(); | 656 void MarkSymbolTable(); |
| 656 | 657 |
| (...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 755 | 756 |
| 756 friend class Heap; | 757 friend class Heap; |
| 757 }; | 758 }; |
| 758 | 759 |
| 759 | 760 |
| 760 const char* AllocationSpaceName(AllocationSpace space); | 761 const char* AllocationSpaceName(AllocationSpace space); |
| 761 | 762 |
| 762 } } // namespace v8::internal | 763 } } // namespace v8::internal |
| 763 | 764 |
| 764 #endif // V8_MARK_COMPACT_H_ | 765 #endif // V8_MARK_COMPACT_H_ |
| OLD | NEW |