Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 278 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 289 #ifdef DEBUG | 289 #ifdef DEBUG |
| 290 if (FLAG_verify_heap) { | 290 if (FLAG_verify_heap) { |
| 291 VerifyMarking(heap_); | 291 VerifyMarking(heap_); |
| 292 } | 292 } |
| 293 #endif | 293 #endif |
| 294 | 294 |
| 295 SweepSpaces(); | 295 SweepSpaces(); |
| 296 | 296 |
| 297 if (!FLAG_collect_maps) ReattachInitialMaps(); | 297 if (!FLAG_collect_maps) ReattachInitialMaps(); |
| 298 | 298 |
| 299 #ifdef DEBUG | |
| 300 if (FLAG_verify_context_separation) { | |
| 301 bool was_code_flushing_enabled = is_code_flushing_enabled(); | |
| 302 EnableCodeFlushing(false); | |
| 303 VerifyMarkbitsAreClean(); | |
| 304 VerifyContextSeparation(); | |
| 305 EnableCodeFlushing(was_code_flushing_enabled); | |
|
Toon Verwaest
2012/07/13 09:02:04
Please move the surrounding code into VerifyContex
| |
| 306 } | |
| 307 #endif | |
| 308 | |
| 299 Finish(); | 309 Finish(); |
| 300 | 310 |
| 301 tracer_ = NULL; | 311 tracer_ = NULL; |
| 302 } | 312 } |
| 303 | 313 |
| 304 | 314 |
| 305 #ifdef DEBUG | 315 #ifdef DEBUG |
| 306 void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) { | 316 void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) { |
| 307 PageIterator it(space); | 317 PageIterator it(space); |
| 308 | 318 |
| 309 while (it.has_next()) { | 319 while (it.has_next()) { |
| 310 Page* p = it.next(); | 320 Page* p = it.next(); |
| 311 CHECK(p->markbits()->IsClean()); | 321 CHECK(p->markbits()->IsClean()); |
| 312 CHECK_EQ(0, p->LiveBytes()); | 322 CHECK_EQ(0, p->LiveBytes()); |
| 313 } | 323 } |
| 314 } | 324 } |
| 315 | 325 |
| 326 | |
| 316 void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) { | 327 void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) { |
| 317 NewSpacePageIterator it(space->bottom(), space->top()); | 328 NewSpacePageIterator it(space->bottom(), space->top()); |
| 318 | 329 |
| 319 while (it.has_next()) { | 330 while (it.has_next()) { |
| 320 NewSpacePage* p = it.next(); | 331 NewSpacePage* p = it.next(); |
| 321 CHECK(p->markbits()->IsClean()); | 332 CHECK(p->markbits()->IsClean()); |
| 322 CHECK_EQ(0, p->LiveBytes()); | 333 CHECK_EQ(0, p->LiveBytes()); |
| 323 } | 334 } |
| 324 } | 335 } |
| 325 | 336 |
| 337 | |
| 326 void MarkCompactCollector::VerifyMarkbitsAreClean() { | 338 void MarkCompactCollector::VerifyMarkbitsAreClean() { |
| 327 VerifyMarkbitsAreClean(heap_->old_pointer_space()); | 339 VerifyMarkbitsAreClean(heap_->old_pointer_space()); |
| 328 VerifyMarkbitsAreClean(heap_->old_data_space()); | 340 VerifyMarkbitsAreClean(heap_->old_data_space()); |
| 329 VerifyMarkbitsAreClean(heap_->code_space()); | 341 VerifyMarkbitsAreClean(heap_->code_space()); |
| 330 VerifyMarkbitsAreClean(heap_->cell_space()); | 342 VerifyMarkbitsAreClean(heap_->cell_space()); |
| 331 VerifyMarkbitsAreClean(heap_->map_space()); | 343 VerifyMarkbitsAreClean(heap_->map_space()); |
| 332 VerifyMarkbitsAreClean(heap_->new_space()); | 344 VerifyMarkbitsAreClean(heap_->new_space()); |
| 333 | 345 |
| 334 LargeObjectIterator it(heap_->lo_space()); | 346 LargeObjectIterator it(heap_->lo_space()); |
| 335 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 347 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
| (...skipping 1277 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1613 collector_->MarkObject(shared->code(), code_mark); | 1625 collector_->MarkObject(shared->code(), code_mark); |
| 1614 collector_->MarkObject(shared, shared_mark); | 1626 collector_->MarkObject(shared, shared_mark); |
| 1615 } | 1627 } |
| 1616 } | 1628 } |
| 1617 | 1629 |
| 1618 private: | 1630 private: |
| 1619 MarkCompactCollector* collector_; | 1631 MarkCompactCollector* collector_; |
| 1620 }; | 1632 }; |
| 1621 | 1633 |
| 1622 | 1634 |
| 1635 #ifdef DEBUG | |
| 1636 static bool IsKnownInternalContext(Heap* heap, Object* context) { | |
| 1637 Handle<Context> debug_context = heap->isolate()->debug()->debug_context(); | |
| 1638 if (!debug_context.is_null() && context == *debug_context) return true; | |
| 1639 return false; | |
| 1640 } | |
| 1641 | |
| 1642 | |
| 1643 void MarkCompactCollector::VerifyContextSeparation() { | |
| 1644 // Disabling of code cache flushing in maps is known to introduce cross | |
| 1645 // context leaks and verification will most certainly fail. | |
| 1646 if (!FLAG_cleanup_code_caches_at_gc) { | |
| 1647 PrintF("[VerifyContextSeparation: Incompatible flags. Skipped.]\n"); | |
| 1648 return; | |
| 1649 } | |
| 1650 | |
| 1651 HeapObjectIterator it(heap_->code_space()); | |
| 1652 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { | |
| 1653 Code* code = Code::cast(object); | |
| 1654 | |
| 1655 // TODO(mstarzinger): We currently limit this verification to optimized | |
| 1656 // code, but should extend it to cover all kinds of code objects. | |
| 1657 if (code->kind() != Code::OPTIMIZED_FUNCTION) continue; | |
| 1658 | |
| 1659 // Mark the code object and process transitive closure. | |
| 1660 MarkingVisitor marker(heap_); | |
| 1661 code->Iterate(&marker); | |
| 1662 ProcessMarkingDeque(); | |
| 1663 | |
| 1664 // Count the number of global contexts that are kept alive by this | |
| 1665 // code object. | |
| 1666 int number_of_live_global_contexts = 0; | |
| 1667 Object* context = heap_->global_contexts_list(); | |
| 1668 while (!context->IsUndefined()) { | |
| 1669 ASSERT(context->IsGlobalContext()); | |
|
Toon Verwaest
2012/07/13 09:02:04
Can this happen?
| |
| 1670 if (!IsKnownInternalContext(heap_, context) && | |
| 1671 Marking::MarkBitFrom(HeapObject::cast(context)).Get()) { | |
| 1672 number_of_live_global_contexts++; | |
| 1673 } | |
| 1674 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); | |
| 1675 } | |
| 1676 if (number_of_live_global_contexts > 1) { | |
| 1677 PrintF("[VerifyContextSeparation: %p keeps %d contexts alive.]\n", | |
| 1678 reinterpret_cast<void*>(code), number_of_live_global_contexts); | |
| 1679 #ifdef OBJECT_PRINT | |
| 1680 code->PrintLn(); | |
| 1681 context = heap_->global_contexts_list(); | |
| 1682 while (!context->IsUndefined()) { | |
| 1683 if (Marking::MarkBitFrom(HeapObject::cast(context)).Get()) { | |
| 1684 context->PrintLn(); | |
| 1685 } | |
| 1686 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); | |
| 1687 } | |
| 1688 #endif | |
| 1689 } | |
| 1690 CHECK_LE(number_of_live_global_contexts, 1); | |
| 1691 ClearMarkbits(); | |
| 1692 ReattachInitialMaps(); | |
| 1693 } | |
| 1694 } | |
| 1695 #endif | |
| 1696 | |
| 1697 | |
| 1623 void MarkCompactCollector::MarkInlinedFunctionsCode(Code* code) { | 1698 void MarkCompactCollector::MarkInlinedFunctionsCode(Code* code) { |
| 1624 // For optimized functions we should retain both non-optimized version | 1699 // For optimized functions we should retain both non-optimized version |
| 1625 // of it's code and non-optimized version of all inlined functions. | 1700 // of it's code and non-optimized version of all inlined functions. |
| 1626 // This is required to support bailing out from inlined code. | 1701 // This is required to support bailing out from inlined code. |
| 1627 DeoptimizationInputData* data = | 1702 DeoptimizationInputData* data = |
| 1628 DeoptimizationInputData::cast(code->deoptimization_data()); | 1703 DeoptimizationInputData::cast(code->deoptimization_data()); |
| 1629 | 1704 |
| 1630 FixedArray* literals = data->LiteralArray(); | 1705 FixedArray* literals = data->LiteralArray(); |
| 1631 | 1706 |
| 1632 for (int i = 0, count = data->InlinedFunctionCount()->value(); | 1707 for (int i = 0, count = data->InlinedFunctionCount()->value(); |
| (...skipping 2320 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3953 // Give pages that are queued to be freed back to the OS. | 4028 // Give pages that are queued to be freed back to the OS. |
| 3954 heap()->FreeQueuedChunks(); | 4029 heap()->FreeQueuedChunks(); |
| 3955 } | 4030 } |
| 3956 | 4031 |
| 3957 | 4032 |
| 3958 void MarkCompactCollector::SweepSpaces() { | 4033 void MarkCompactCollector::SweepSpaces() { |
| 3959 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP); | 4034 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP); |
| 3960 #ifdef DEBUG | 4035 #ifdef DEBUG |
| 3961 state_ = SWEEP_SPACES; | 4036 state_ = SWEEP_SPACES; |
| 3962 #endif | 4037 #endif |
| 3963 SweeperType how_to_sweep = | 4038 bool lazy_sweeping = |
| 3964 FLAG_lazy_sweeping ? LAZY_CONSERVATIVE : CONSERVATIVE; | 4039 FLAG_lazy_sweeping && !FLAG_expose_gc && !FLAG_verify_context_separation; |
| 3965 if (FLAG_expose_gc) how_to_sweep = CONSERVATIVE; | 4040 SweeperType how_to_sweep = lazy_sweeping ? LAZY_CONSERVATIVE : CONSERVATIVE; |
| 3966 if (sweep_precisely_) how_to_sweep = PRECISE; | 4041 if (sweep_precisely_) how_to_sweep = PRECISE; |
| 3967 // Noncompacting collections simply sweep the spaces to clear the mark | 4042 // Noncompacting collections simply sweep the spaces to clear the mark |
| 3968 // bits and free the nonlive blocks (for old and map spaces). We sweep | 4043 // bits and free the nonlive blocks (for old and map spaces). We sweep |
| 3969 // the map space last because freeing non-live maps overwrites them and | 4044 // the map space last because freeing non-live maps overwrites them and |
| 3970 // the other spaces rely on possibly non-live maps to get the sizes for | 4045 // the other spaces rely on possibly non-live maps to get the sizes for |
| 3971 // non-live objects. | 4046 // non-live objects. |
| 3972 SweepSpace(heap()->old_pointer_space(), how_to_sweep); | 4047 SweepSpace(heap()->old_pointer_space(), how_to_sweep); |
| 3973 SweepSpace(heap()->old_data_space(), how_to_sweep); | 4048 SweepSpace(heap()->old_data_space(), how_to_sweep); |
| 3974 | 4049 |
| 3975 RemoveDeadInvalidatedCode(); | 4050 RemoveDeadInvalidatedCode(); |
| (...skipping 180 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4156 while (buffer != NULL) { | 4231 while (buffer != NULL) { |
| 4157 SlotsBuffer* next_buffer = buffer->next(); | 4232 SlotsBuffer* next_buffer = buffer->next(); |
| 4158 DeallocateBuffer(buffer); | 4233 DeallocateBuffer(buffer); |
| 4159 buffer = next_buffer; | 4234 buffer = next_buffer; |
| 4160 } | 4235 } |
| 4161 *buffer_address = NULL; | 4236 *buffer_address = NULL; |
| 4162 } | 4237 } |
| 4163 | 4238 |
| 4164 | 4239 |
| 4165 } } // namespace v8::internal | 4240 } } // namespace v8::internal |
| OLD | NEW |