Chromium Code Reviews| Index: src/mark-compact.cc |
| diff --git a/src/mark-compact.cc b/src/mark-compact.cc |
| index 8ca14db5063ea5122c25e7414d96f0700b4b3b4c..b79cd7fa97a1a163b518c8dd71fb990389ba6b17 100644 |
| --- a/src/mark-compact.cc |
| +++ b/src/mark-compact.cc |
| @@ -380,6 +380,11 @@ void MarkCompactCollector::CollectGarbage() { |
| ASSERT(state_ == PREPARE_GC); |
| ASSERT(encountered_weak_maps_ == Smi::FromInt(0)); |
| + if (heap_->IsConcurrentSweepingActivated() && |
| + heap_->IsConcurrentSweepingPending()) { |
| + heap_->WaitUntilParallelSweepingCompleted(); |
|
Michael Starzinger
2013/01/24 17:28:57
This should be moved into MarkCompact::Prepare, be
Hannes Payer (out of office)
2013/01/25 10:46:49
Done.
|
| + } |
| + |
| MarkLiveObjects(); |
| ASSERT(heap_->incremental_marking()->IsStopped()); |
| @@ -2728,6 +2733,7 @@ enum SkipListRebuildingMode { |
| // if requested. |
| template<SweepingMode sweeping_mode, SkipListRebuildingMode skip_list_mode> |
| static void SweepPrecisely(PagedSpace* space, |
| + FreeList* free_list, |
| Page* p, |
| ObjectVisitor* v) { |
| ASSERT(!p->IsEvacuationCandidate() && !p->WasSwept()); |
| @@ -2771,7 +2777,8 @@ static void SweepPrecisely(PagedSpace* space, |
| for ( ; live_objects != 0; live_objects--) { |
| Address free_end = object_address + offsets[live_index++] * kPointerSize; |
| if (free_end != free_start) { |
| - space->Free(free_start, static_cast<int>(free_end - free_start)); |
| + MarkCompactCollector::Free(space, free_list, free_start, |
| + static_cast<int>(free_end - free_start)); |
| } |
| HeapObject* live_object = HeapObject::FromAddress(free_end); |
| ASSERT(Marking::IsBlack(Marking::MarkBitFrom(live_object))); |
| @@ -2797,7 +2804,8 @@ static void SweepPrecisely(PagedSpace* space, |
| cells[cell_index] = 0; |
| } |
| if (free_start != p->area_end()) { |
| - space->Free(free_start, static_cast<int>(p->area_end() - free_start)); |
| + MarkCompactCollector::Free(space, free_list, free_start, |
| + static_cast<int>(p->area_end() - free_start)); |
| } |
| p->ResetLiveBytes(); |
| } |
| @@ -3027,15 +3035,15 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { |
| switch (space->identity()) { |
| case OLD_DATA_SPACE: |
| - SweepConservatively(space, p); |
| + SweepConservatively(space, space->free_list(), p); |
| break; |
| case OLD_POINTER_SPACE: |
| SweepPrecisely<SWEEP_AND_VISIT_LIVE_OBJECTS, IGNORE_SKIP_LIST>( |
| - space, p, &updating_visitor); |
| + space, space->free_list(), p, &updating_visitor); |
| break; |
| case CODE_SPACE: |
| SweepPrecisely<SWEEP_AND_VISIT_LIVE_OBJECTS, REBUILD_SKIP_LIST>( |
| - space, p, &updating_visitor); |
| + space, space->free_list(), p, &updating_visitor); |
| break; |
| default: |
| UNREACHABLE(); |
| @@ -3383,6 +3391,19 @@ static inline Address StartOfLiveObject(Address block_address, uint32_t cell) { |
| } |
| +intptr_t MarkCompactCollector::Free(PagedSpace* space, |
| + FreeList* free_list, |
| + Address start, |
| + int size) { |
| + if (space->heap()->AreSweepingThreadsActivated()) { |
| + intptr_t wasted = free_list->Free(start, size); |
| + return size - wasted; |
| + } else { |
| + return space->Free(start, size); |
| + } |
| +} |
| + |
| + |
| // Sweeps a space conservatively. After this has been done the larger free |
| // spaces have been put on the free list and the smaller ones have been |
| // ignored and left untouched. A free space is always either ignored or put |
| @@ -3390,7 +3411,9 @@ static inline Address StartOfLiveObject(Address block_address, uint32_t cell) { |
| // because it means that any FreeSpace maps left actually describe a region of |
| // memory that can be ignored when scanning. Dead objects other than free |
| // spaces will not contain the free space map. |
| -intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) { |
| +intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space, |
| + FreeList* free_list, |
| + Page* p) { |
| ASSERT(!p->IsEvacuationCandidate() && !p->WasSwept()); |
| MarkBit::CellType* cells = p->markbits()->cells(); |
| p->MarkSweptConservatively(); |
| @@ -3418,8 +3441,8 @@ intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) { |
| } |
| size_t size = block_address - p->area_start(); |
| if (cell_index == last_cell_index) { |
| - freed_bytes += static_cast<int>(space->Free(p->area_start(), |
| - static_cast<int>(size))); |
| + freed_bytes += Free(space, free_list, p->area_start(), |
| + static_cast<int>(size)); |
| ASSERT_EQ(0, p->LiveBytes()); |
| return freed_bytes; |
| } |
| @@ -3428,8 +3451,8 @@ intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) { |
| Address free_end = StartOfLiveObject(block_address, cells[cell_index]); |
| // Free the first free space. |
| size = free_end - p->area_start(); |
| - freed_bytes += space->Free(p->area_start(), |
| - static_cast<int>(size)); |
| + freed_bytes += Free(space, free_list, p->area_start(), |
| + static_cast<int>(size)); |
| // The start of the current free area is represented in undigested form by |
| // the address of the last 32-word section that contained a live object and |
| // the marking bitmap for that cell, which describes where the live object |
| @@ -3458,8 +3481,8 @@ intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) { |
| // so now we need to find the start of the first live object at the |
| // end of the free space. |
| free_end = StartOfLiveObject(block_address, cell); |
| - freed_bytes += space->Free(free_start, |
| - static_cast<int>(free_end - free_start)); |
| + freed_bytes += Free(space, free_list, free_start, |
| + static_cast<int>(free_end - free_start)); |
| } |
| } |
| // Update our undigested record of where the current free area started. |
| @@ -3473,8 +3496,8 @@ intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) { |
| // Handle the free space at the end of the page. |
| if (block_address - free_start > 32 * kPointerSize) { |
| free_start = DigestFreeStart(free_start, free_start_cell); |
| - freed_bytes += space->Free(free_start, |
| - static_cast<int>(block_address - free_start)); |
| + freed_bytes += Free(space, free_list, free_start, |
| + static_cast<int>(block_address - free_start)); |
| } |
| p->ResetLiveBytes(); |
| @@ -3482,10 +3505,91 @@ intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) { |
| } |
| +void MarkCompactCollector::PrepareParallelSweeping(PagedSpace* space) { |
| + bool unused_page_present = false; |
| + |
| + space->set_was_swept_conservatively(true); |
| + |
| + space->ClearStats(); |
| + |
| + PageIterator it(space); |
| + while (it.has_next()) { |
| + Page* p = it.next(); |
| + |
| + // Clear sweeping flags indicating that marking bits are still intact. |
| + p->ClearSweptPrecisely(); |
| + p->ClearSweptConservatively(); |
| + p->set_parallel_sweeping(0); |
| + |
| + if (p->IsEvacuationCandidate()) { |
| + ASSERT(evacuation_candidates_.length() > 0); |
| + continue; |
| + } |
| + |
| + if (p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) { |
| + // Will be processed in EvacuateNewSpaceAndCandidates. |
| + continue; |
| + } |
| + |
| + // One unused page is kept, all further are released before sweeping them. |
| + if (p->LiveBytes() == 0) { |
| + if (unused_page_present) { |
| + if (FLAG_gc_verbose) { |
| + PrintF("Sweeping 0x%" V8PRIxPTR " released page.\n", |
| + reinterpret_cast<intptr_t>(p)); |
| + } |
| + // Adjust unswept free bytes because releasing a page expects said |
| + // counter to be accurate for unswept pages. |
| + space->IncreaseUnsweptFreeBytes(p); |
| + space->ReleasePage(p); |
| + continue; |
| + } |
| + unused_page_present = true; |
| + } |
| + } |
| +} |
| + |
| + |
| +void MarkCompactCollector::SweepInParallel(PagedSpace* space, |
| + SweeperType sweeper_type, |
| + FreeList* private_free_list, |
| + FreeList* free_list) { |
| + PageIterator it(space); |
| + while (it.has_next()) { |
| + Page* p = it.next(); |
| + |
| + if (p->IsEvacuationCandidate()) { |
| + ASSERT(evacuation_candidates_.length() > 0); |
| + continue; |
| + } |
| + |
| + if (p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) { |
| + // Will be processed in EvacuateNewSpaceAndCandidates. |
| + continue; |
| + } |
| + |
| + if (p->TryParallelSweeping()) { |
| + if (sweeper_type == CONSERVATIVE || sweeper_type == LAZY_CONSERVATIVE) { |
| + SweepConservatively(space, private_free_list, p); |
| + } else { |
| + SweepPrecisely<SWEEP_ONLY, IGNORE_SKIP_LIST>( |
| + space, private_free_list, p, NULL); |
| + } |
| + free_list->Concatenate(private_free_list); |
| + } |
| + } |
| +} |
| + |
| + |
| void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) { |
| space->set_was_swept_conservatively(sweeper == CONSERVATIVE || |
| sweeper == LAZY_CONSERVATIVE); |
| - |
| + ASSERT(!(space->identity() == OLD_DATA_SPACE && |
| + FLAG_parallel_sweeping && |
| + FLAG_concurrent_sweeping)); |
| + ASSERT(!(space->identity() == OLD_POINTER_SPACE && |
| + FLAG_parallel_sweeping && |
| + FLAG_concurrent_sweeping)); |
| space->ClearStats(); |
| PageIterator it(space); |
| @@ -3543,7 +3647,7 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) { |
| PrintF("Sweeping 0x%" V8PRIxPTR " conservatively.\n", |
| reinterpret_cast<intptr_t>(p)); |
| } |
| - SweepConservatively(space, p); |
| + SweepConservatively(space, space->free_list(), p); |
| pages_swept++; |
| break; |
| } |
| @@ -3552,7 +3656,7 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) { |
| PrintF("Sweeping 0x%" V8PRIxPTR " conservatively as needed.\n", |
| reinterpret_cast<intptr_t>(p)); |
| } |
| - freed_bytes += SweepConservatively(space, p); |
| + freed_bytes += SweepConservatively(space, space->free_list(), p); |
| pages_swept++; |
| space->SetPagesToSweep(p->next_page()); |
| lazy_sweeping_active = true; |
| @@ -3564,9 +3668,11 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) { |
| reinterpret_cast<intptr_t>(p)); |
| } |
| if (space->identity() == CODE_SPACE) { |
| - SweepPrecisely<SWEEP_ONLY, REBUILD_SKIP_LIST>(space, p, NULL); |
| + SweepPrecisely<SWEEP_ONLY, REBUILD_SKIP_LIST>( |
| + space, space->free_list(), p, NULL); |
| } else { |
| - SweepPrecisely<SWEEP_ONLY, IGNORE_SKIP_LIST>(space, p, NULL); |
| + SweepPrecisely<SWEEP_ONLY, IGNORE_SKIP_LIST>( |
| + space, space->free_list(), p, NULL); |
| } |
| pages_swept++; |
| break; |
| @@ -3602,8 +3708,18 @@ void MarkCompactCollector::SweepSpaces() { |
| // the map space last because freeing non-live maps overwrites them and |
| // the other spaces rely on possibly non-live maps to get the sizes for |
| // non-live objects. |
| - SweepSpace(heap()->old_pointer_space(), how_to_sweep); |
| - SweepSpace(heap()->old_data_space(), how_to_sweep); |
| + |
| + if (heap()->IsConcurrentSweepingActivated()) { |
| + PrepareParallelSweeping(heap()->old_pointer_space()); |
| + PrepareParallelSweeping(heap()->old_data_space()); |
| + heap_->StartParallelSweeping(how_to_sweep); |
| + if (FLAG_parallel_sweeping) { |
| + heap_->WaitUntilParallelSweepingCompleted(); |
| + } |
| + } else { |
| + SweepSpace(heap()->old_pointer_space(), how_to_sweep); |
| + SweepSpace(heap()->old_data_space(), how_to_sweep); |
| + } |
| RemoveDeadInvalidatedCode(); |
| SweepSpace(heap()->code_space(), PRECISE); |