Chromium Code Reviews| Index: src/mark-compact.cc |
| diff --git a/src/mark-compact.cc b/src/mark-compact.cc |
| index e61457991e1769c6a589d1c40c7550de5f9854b2..28a9466648ea60adf0c0e1a394d8d8fc9aa3462e 100644 |
| --- a/src/mark-compact.cc |
| +++ b/src/mark-compact.cc |
| @@ -296,6 +296,16 @@ void MarkCompactCollector::CollectGarbage() { |
| if (!FLAG_collect_maps) ReattachInitialMaps(); |
| +#ifdef DEBUG |
| + if (FLAG_verify_context_separation) { |
| + bool was_code_flushing_enabled = is_code_flushing_enabled(); |
| + EnableCodeFlushing(false); |
| + VerifyMarkbitsAreClean(); |
| + VerifyContextSeparation(); |
| + EnableCodeFlushing(was_code_flushing_enabled); |
|
Toon Verwaest
2012/07/13 09:02:04
Please move the surrounding code into VerifyContex
|
| + } |
| +#endif |
| + |
| Finish(); |
| tracer_ = NULL; |
| @@ -313,6 +323,7 @@ void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) { |
| } |
| } |
| + |
| void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) { |
| NewSpacePageIterator it(space->bottom(), space->top()); |
| @@ -323,6 +334,7 @@ void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) { |
| } |
| } |
| + |
| void MarkCompactCollector::VerifyMarkbitsAreClean() { |
| VerifyMarkbitsAreClean(heap_->old_pointer_space()); |
| VerifyMarkbitsAreClean(heap_->old_data_space()); |
| @@ -1620,6 +1632,69 @@ class SharedFunctionInfoMarkingVisitor : public ObjectVisitor { |
| }; |
| +#ifdef DEBUG |
| +static bool IsKnownInternalContext(Heap* heap, Object* context) { |
| + Handle<Context> debug_context = heap->isolate()->debug()->debug_context(); |
| + if (!debug_context.is_null() && context == *debug_context) return true; |
| + return false; |
| +} |
| + |
| + |
| +void MarkCompactCollector::VerifyContextSeparation() { |
| + // Disabling of code cache flushing in maps is known to introduce cross |
| + // context leaks and verification will most certainly fail. |
| + if (!FLAG_cleanup_code_caches_at_gc) { |
| + PrintF("[VerifyContextSeparation: Incompatible flags. Skipped.]\n"); |
| + return; |
| + } |
| + |
| + HeapObjectIterator it(heap_->code_space()); |
| + for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { |
| + Code* code = Code::cast(object); |
| + |
| + // TODO(mstarzinger): We currently limit this verification to optimized |
| + // code, but should extend it to cover all kinds of code objects. |
| + if (code->kind() != Code::OPTIMIZED_FUNCTION) continue; |
| + |
| + // Mark the code object and process transitive closure. |
| + MarkingVisitor marker(heap_); |
| + code->Iterate(&marker); |
| + ProcessMarkingDeque(); |
| + |
| + // Count the number of global contexts that are kept alive by this |
| + // code object. |
| + int number_of_live_global_contexts = 0; |
| + Object* context = heap_->global_contexts_list(); |
| + while (!context->IsUndefined()) { |
| + ASSERT(context->IsGlobalContext()); |
|
Toon Verwaest
2012/07/13 09:02:04
Can this happen?
|
| + if (!IsKnownInternalContext(heap_, context) && |
| + Marking::MarkBitFrom(HeapObject::cast(context)).Get()) { |
| + number_of_live_global_contexts++; |
| + } |
| + context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); |
| + } |
| + if (number_of_live_global_contexts > 1) { |
| + PrintF("[VerifyContextSeparation: %p keeps %d contexts alive.]\n", |
| + reinterpret_cast<void*>(code), number_of_live_global_contexts); |
| +#ifdef OBJECT_PRINT |
| + code->PrintLn(); |
| + context = heap_->global_contexts_list(); |
| + while (!context->IsUndefined()) { |
| + if (Marking::MarkBitFrom(HeapObject::cast(context)).Get()) { |
| + context->PrintLn(); |
| + } |
| + context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); |
| + } |
| +#endif |
| + } |
| + CHECK_LE(number_of_live_global_contexts, 1); |
| + ClearMarkbits(); |
| + ReattachInitialMaps(); |
| + } |
| +} |
| +#endif |
| + |
| + |
| void MarkCompactCollector::MarkInlinedFunctionsCode(Code* code) { |
| // For optimized functions we should retain both non-optimized version |
| // of it's code and non-optimized version of all inlined functions. |
| @@ -3960,9 +4035,9 @@ void MarkCompactCollector::SweepSpaces() { |
| #ifdef DEBUG |
| state_ = SWEEP_SPACES; |
| #endif |
| - SweeperType how_to_sweep = |
| - FLAG_lazy_sweeping ? LAZY_CONSERVATIVE : CONSERVATIVE; |
| - if (FLAG_expose_gc) how_to_sweep = CONSERVATIVE; |
| + bool lazy_sweeping = |
| + FLAG_lazy_sweeping && !FLAG_expose_gc && !FLAG_verify_context_separation; |
| + SweeperType how_to_sweep = lazy_sweeping ? LAZY_CONSERVATIVE : CONSERVATIVE; |
| if (sweep_precisely_) how_to_sweep = PRECISE; |
| // Noncompacting collections simply sweep the spaces to clear the mark |
| // bits and free the nonlive blocks (for old and map spaces). We sweep |