OLD | NEW |
1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 616 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
627 // Process weak global handle callbacks. This must be done after the | 627 // Process weak global handle callbacks. This must be done after the |
628 // GC is completely done, because the callbacks may invoke arbitrary | 628 // GC is completely done, because the callbacks may invoke arbitrary |
629 // API functions. | 629 // API functions. |
630 ASSERT(isolate_->heap()->gc_state() == Heap::NOT_IN_GC); | 630 ASSERT(isolate_->heap()->gc_state() == Heap::NOT_IN_GC); |
631 const int initial_post_gc_processing_count = ++post_gc_processing_count_; | 631 const int initial_post_gc_processing_count = ++post_gc_processing_count_; |
632 bool next_gc_likely_to_collect_more = false; | 632 bool next_gc_likely_to_collect_more = false; |
633 if (collector == SCAVENGER) { | 633 if (collector == SCAVENGER) { |
634 for (int i = 0; i < new_space_nodes_.length(); ++i) { | 634 for (int i = 0; i < new_space_nodes_.length(); ++i) { |
635 Node* node = new_space_nodes_[i]; | 635 Node* node = new_space_nodes_[i]; |
636 ASSERT(node->is_in_new_space_list()); | 636 ASSERT(node->is_in_new_space_list()); |
| 637 if (!node->IsRetainer()) { |
| 638 // Free nodes do not have weak callbacks. Do not use them to compute |
| 639 // the next_gc_likely_to_collect_more. |
| 640 continue; |
| 641 } |
637 // Skip dependent handles. Their weak callbacks might expect to be | 642 // Skip dependent handles. Their weak callbacks might expect to be |
638 // called between two global garbage collection callbacks which | 643 // called between two global garbage collection callbacks which |
639 // are not called for minor collections. | 644 // are not called for minor collections. |
640 if (!node->is_independent() && !node->is_partially_dependent()) { | 645 if (!node->is_independent() && !node->is_partially_dependent()) { |
641 continue; | 646 continue; |
642 } | 647 } |
643 node->clear_partially_dependent(); | 648 node->clear_partially_dependent(); |
644 if (node->PostGarbageCollectionProcessing(isolate_)) { | 649 if (node->PostGarbageCollectionProcessing(isolate_)) { |
645 if (initial_post_gc_processing_count != post_gc_processing_count_) { | 650 if (initial_post_gc_processing_count != post_gc_processing_count_) { |
646 // Weak callback triggered another GC and another round of | 651 // Weak callback triggered another GC and another round of |
647 // PostGarbageCollection processing. The current node might | 652 // PostGarbageCollection processing. The current node might |
648 // have been deleted in that round, so we need to bail out (or | 653 // have been deleted in that round, so we need to bail out (or |
649 // restart the processing). | 654 // restart the processing). |
650 return next_gc_likely_to_collect_more; | 655 return next_gc_likely_to_collect_more; |
651 } | 656 } |
652 } | 657 } |
653 if (!node->IsRetainer()) { | 658 if (!node->IsRetainer()) { |
654 next_gc_likely_to_collect_more = true; | 659 next_gc_likely_to_collect_more = true; |
655 } | 660 } |
656 } | 661 } |
657 } else { | 662 } else { |
658 for (NodeIterator it(this); !it.done(); it.Advance()) { | 663 for (NodeIterator it(this); !it.done(); it.Advance()) { |
| 664 if (!it.node()->IsRetainer()) { |
| 665 // Free nodes do not have weak callbacks. Do not use them to compute |
| 666 // the next_gc_likely_to_collect_more. |
| 667 continue; |
| 668 } |
659 it.node()->clear_partially_dependent(); | 669 it.node()->clear_partially_dependent(); |
660 if (it.node()->PostGarbageCollectionProcessing(isolate_)) { | 670 if (it.node()->PostGarbageCollectionProcessing(isolate_)) { |
661 if (initial_post_gc_processing_count != post_gc_processing_count_) { | 671 if (initial_post_gc_processing_count != post_gc_processing_count_) { |
662 // See the comment above. | 672 // See the comment above. |
663 return next_gc_likely_to_collect_more; | 673 return next_gc_likely_to_collect_more; |
664 } | 674 } |
665 } | 675 } |
666 if (!it.node()->IsRetainer()) { | 676 if (!it.node()->IsRetainer()) { |
667 next_gc_likely_to_collect_more = true; | 677 next_gc_likely_to_collect_more = true; |
668 } | 678 } |
(...skipping 332 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1001 } | 1011 } |
1002 } | 1012 } |
1003 object_group_connections_.Clear(); | 1013 object_group_connections_.Clear(); |
1004 object_group_connections_.Initialize(kObjectGroupConnectionsCapacity); | 1014 object_group_connections_.Initialize(kObjectGroupConnectionsCapacity); |
1005 retainer_infos_.Clear(); | 1015 retainer_infos_.Clear(); |
1006 implicit_ref_connections_.Clear(); | 1016 implicit_ref_connections_.Clear(); |
1007 } | 1017 } |
1008 | 1018 |
1009 | 1019 |
1010 } } // namespace v8::internal | 1020 } } // namespace v8::internal |
OLD | NEW |