| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1453 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1464 MarkCompactMarkingVisitor::MarkInlinedFunctionsCode(heap(), | 1464 MarkCompactMarkingVisitor::MarkInlinedFunctionsCode(heap(), |
| 1465 frame->LookupCode()); | 1465 frame->LookupCode()); |
| 1466 } | 1466 } |
| 1467 } | 1467 } |
| 1468 } | 1468 } |
| 1469 | 1469 |
| 1470 | 1470 |
| 1471 void MarkCompactCollector::PrepareForCodeFlushing() { | 1471 void MarkCompactCollector::PrepareForCodeFlushing() { |
| 1472 ASSERT(heap() == Isolate::Current()->heap()); | 1472 ASSERT(heap() == Isolate::Current()->heap()); |
| 1473 | 1473 |
| 1474 // Enable code flushing for non-incremental cycles. |
| 1475 if (FLAG_flush_code && !FLAG_flush_code_incrementally) { |
| 1476 EnableCodeFlushing(!was_marked_incrementally_); |
| 1477 } |
| 1478 |
| 1474 // If code flushing is disabled, there is no need to prepare for it. | 1479 // If code flushing is disabled, there is no need to prepare for it. |
| 1475 if (!is_code_flushing_enabled()) return; | 1480 if (!is_code_flushing_enabled()) return; |
| 1476 | 1481 |
| 1477 // Ensure that empty descriptor array is marked. Method MarkDescriptorArray | 1482 // Ensure that empty descriptor array is marked. Method MarkDescriptorArray |
| 1478 // relies on it being marked before any other descriptor array. | 1483 // relies on it being marked before any other descriptor array. |
| 1479 HeapObject* descriptor_array = heap()->empty_descriptor_array(); | 1484 HeapObject* descriptor_array = heap()->empty_descriptor_array(); |
| 1480 MarkBit descriptor_array_mark = Marking::MarkBitFrom(descriptor_array); | 1485 MarkBit descriptor_array_mark = Marking::MarkBitFrom(descriptor_array); |
| 1481 MarkObject(descriptor_array, descriptor_array_mark); | 1486 MarkObject(descriptor_array, descriptor_array_mark); |
| 1482 | 1487 |
| 1483 // Make sure we are not referencing the code from the stack. | 1488 // Make sure we are not referencing the code from the stack. |
| (...skipping 542 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2026 MarkCompactWeakObjectRetainer mark_compact_object_retainer; | 2031 MarkCompactWeakObjectRetainer mark_compact_object_retainer; |
| 2027 heap()->ProcessWeakReferences(&mark_compact_object_retainer); | 2032 heap()->ProcessWeakReferences(&mark_compact_object_retainer); |
| 2028 | 2033 |
| 2029 // Remove object groups after marking phase. | 2034 // Remove object groups after marking phase. |
| 2030 heap()->isolate()->global_handles()->RemoveObjectGroups(); | 2035 heap()->isolate()->global_handles()->RemoveObjectGroups(); |
| 2031 heap()->isolate()->global_handles()->RemoveImplicitRefGroups(); | 2036 heap()->isolate()->global_handles()->RemoveImplicitRefGroups(); |
| 2032 | 2037 |
| 2033 // Flush code from collected candidates. | 2038 // Flush code from collected candidates. |
| 2034 if (is_code_flushing_enabled()) { | 2039 if (is_code_flushing_enabled()) { |
| 2035 code_flusher_->ProcessCandidates(); | 2040 code_flusher_->ProcessCandidates(); |
| 2041 // If incremental marker does not support code flushing, we need to |
| 2042 // disable it before incremental marking steps for next cycle. |
| 2043 if (FLAG_flush_code && !FLAG_flush_code_incrementally) { |
| 2044 EnableCodeFlushing(false); |
| 2045 } |
| 2036 } | 2046 } |
| 2037 | 2047 |
| 2038 if (!FLAG_watch_ic_patching) { | 2048 if (!FLAG_watch_ic_patching) { |
| 2039 // Clean up dead objects from the runtime profiler. | 2049 // Clean up dead objects from the runtime profiler. |
| 2040 heap()->isolate()->runtime_profiler()->RemoveDeadSamples(); | 2050 heap()->isolate()->runtime_profiler()->RemoveDeadSamples(); |
| 2041 } | 2051 } |
| 2042 | 2052 |
| 2043 if (FLAG_track_gc_object_stats) { | 2053 if (FLAG_track_gc_object_stats) { |
| 2044 heap()->CheckpointObjectStats(); | 2054 heap()->CheckpointObjectStats(); |
| 2045 } | 2055 } |
| (...skipping 1554 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3600 // detect whether unmarked map became dead in this collection or in one | 3610 // detect whether unmarked map became dead in this collection or in one |
| 3601 // of the previous ones. | 3611 // of the previous ones. |
| 3602 SweepSpace(heap()->map_space(), PRECISE); | 3612 SweepSpace(heap()->map_space(), PRECISE); |
| 3603 | 3613 |
| 3604 // Deallocate unmarked objects and clear marked bits for marked objects. | 3614 // Deallocate unmarked objects and clear marked bits for marked objects. |
| 3605 heap_->lo_space()->FreeUnmarkedObjects(); | 3615 heap_->lo_space()->FreeUnmarkedObjects(); |
| 3606 } | 3616 } |
| 3607 | 3617 |
| 3608 | 3618 |
| 3609 void MarkCompactCollector::EnableCodeFlushing(bool enable) { | 3619 void MarkCompactCollector::EnableCodeFlushing(bool enable) { |
| 3620 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 3621 if (heap()->isolate()->debug()->IsLoaded() || |
| 3622 heap()->isolate()->debug()->has_break_points()) { |
| 3623 enable = false; |
| 3624 } |
| 3625 #endif |
| 3626 |
| 3610 if (enable) { | 3627 if (enable) { |
| 3611 if (code_flusher_ != NULL) return; | 3628 if (code_flusher_ != NULL) return; |
| 3612 code_flusher_ = new CodeFlusher(heap()->isolate()); | 3629 code_flusher_ = new CodeFlusher(heap()->isolate()); |
| 3613 } else { | 3630 } else { |
| 3614 if (code_flusher_ == NULL) return; | 3631 if (code_flusher_ == NULL) return; |
| 3615 delete code_flusher_; | 3632 delete code_flusher_; |
| 3616 code_flusher_ = NULL; | 3633 code_flusher_ = NULL; |
| 3617 } | 3634 } |
| 3618 } | 3635 } |
| 3619 | 3636 |
| (...skipping 168 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3788 while (buffer != NULL) { | 3805 while (buffer != NULL) { |
| 3789 SlotsBuffer* next_buffer = buffer->next(); | 3806 SlotsBuffer* next_buffer = buffer->next(); |
| 3790 DeallocateBuffer(buffer); | 3807 DeallocateBuffer(buffer); |
| 3791 buffer = next_buffer; | 3808 buffer = next_buffer; |
| 3792 } | 3809 } |
| 3793 *buffer_address = NULL; | 3810 *buffer_address = NULL; |
| 3794 } | 3811 } |
| 3795 | 3812 |
| 3796 | 3813 |
| 3797 } } // namespace v8::internal | 3814 } } // namespace v8::internal |
| OLD | NEW |