Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 100 next_object_must_be_here_or_later = current + object->Size(); | 100 next_object_must_be_here_or_later = current + object->Size(); |
| 101 } | 101 } |
| 102 } | 102 } |
| 103 } | 103 } |
| 104 | 104 |
| 105 | 105 |
| 106 static void VerifyMarking(NewSpace* space) { | 106 static void VerifyMarking(NewSpace* space) { |
| 107 Address end = space->top(); | 107 Address end = space->top(); |
| 108 NewSpacePageIterator it(space->bottom(), end); | 108 NewSpacePageIterator it(space->bottom(), end); |
| 109 // The bottom position is at the start of its page. Allows us to use | 109 // The bottom position is at the start of its page. Allows us to use |
| 110 // page->body() as start of range on all pages. | 110 // page->area_start() as start of range on all pages. |
| 111 ASSERT_EQ(space->bottom(), | 111 ASSERT_EQ(space->bottom(), |
| 112 NewSpacePage::FromAddress(space->bottom())->body()); | 112 NewSpacePage::FromAddress(space->bottom())->area_start()); |
| 113 while (it.has_next()) { | 113 while (it.has_next()) { |
| 114 NewSpacePage* page = it.next(); | 114 NewSpacePage* page = it.next(); |
| 115 Address limit = it.has_next() ? page->body_limit() : end; | 115 Address limit = it.has_next() ? page->area_end() : end; |
| 116 ASSERT(limit == end || !page->Contains(end)); | 116 ASSERT(limit == end || !page->Contains(end)); |
| 117 VerifyMarking(page->body(), limit); | 117 VerifyMarking(page->area_start(), limit); |
| 118 } | 118 } |
| 119 } | 119 } |
| 120 | 120 |
| 121 | 121 |
| 122 static void VerifyMarking(PagedSpace* space) { | 122 static void VerifyMarking(PagedSpace* space) { |
| 123 PageIterator it(space); | 123 PageIterator it(space); |
| 124 | 124 |
| 125 while (it.has_next()) { | 125 while (it.has_next()) { |
| 126 Page* p = it.next(); | 126 Page* p = it.next(); |
| 127 VerifyMarking(p->ObjectAreaStart(), p->ObjectAreaEnd()); | 127 VerifyMarking(p->area_start(), p->area_end()); |
| 128 } | 128 } |
| 129 } | 129 } |
| 130 | 130 |
| 131 | 131 |
| 132 static void VerifyMarking(Heap* heap) { | 132 static void VerifyMarking(Heap* heap) { |
| 133 VerifyMarking(heap->old_pointer_space()); | 133 VerifyMarking(heap->old_pointer_space()); |
| 134 VerifyMarking(heap->old_data_space()); | 134 VerifyMarking(heap->old_data_space()); |
| 135 VerifyMarking(heap->code_space()); | 135 VerifyMarking(heap->code_space()); |
| 136 VerifyMarking(heap->cell_space()); | 136 VerifyMarking(heap->cell_space()); |
| 137 VerifyMarking(heap->map_space()); | 137 VerifyMarking(heap->map_space()); |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 180 } | 180 } |
| 181 } | 181 } |
| 182 | 182 |
| 183 | 183 |
| 184 static void VerifyEvacuation(NewSpace* space) { | 184 static void VerifyEvacuation(NewSpace* space) { |
| 185 NewSpacePageIterator it(space->bottom(), space->top()); | 185 NewSpacePageIterator it(space->bottom(), space->top()); |
| 186 VerifyEvacuationVisitor visitor; | 186 VerifyEvacuationVisitor visitor; |
| 187 | 187 |
| 188 while (it.has_next()) { | 188 while (it.has_next()) { |
| 189 NewSpacePage* page = it.next(); | 189 NewSpacePage* page = it.next(); |
| 190 Address current = page->body(); | 190 Address current = page->area_start(); |
| 191 Address limit = it.has_next() ? page->body_limit() : space->top(); | 191 Address limit = it.has_next() ? page->area_end() : space->top(); |
| 192 ASSERT(limit == space->top() || !page->Contains(space->top())); | 192 ASSERT(limit == space->top() || !page->Contains(space->top())); |
| 193 while (current < limit) { | 193 while (current < limit) { |
| 194 HeapObject* object = HeapObject::FromAddress(current); | 194 HeapObject* object = HeapObject::FromAddress(current); |
| 195 object->Iterate(&visitor); | 195 object->Iterate(&visitor); |
| 196 current += object->Size(); | 196 current += object->Size(); |
| 197 } | 197 } |
| 198 } | 198 } |
| 199 } | 199 } |
| 200 | 200 |
| 201 | 201 |
| 202 static void VerifyEvacuation(PagedSpace* space) { | 202 static void VerifyEvacuation(PagedSpace* space) { |
| 203 PageIterator it(space); | 203 PageIterator it(space); |
| 204 | 204 |
| 205 while (it.has_next()) { | 205 while (it.has_next()) { |
| 206 Page* p = it.next(); | 206 Page* p = it.next(); |
| 207 if (p->IsEvacuationCandidate()) continue; | 207 if (p->IsEvacuationCandidate()) continue; |
| 208 VerifyEvacuation(p->ObjectAreaStart(), p->ObjectAreaEnd()); | 208 VerifyEvacuation(p->area_start(), p->area_end()); |
| 209 } | 209 } |
| 210 } | 210 } |
| 211 | 211 |
| 212 | 212 |
| 213 static void VerifyEvacuation(Heap* heap) { | 213 static void VerifyEvacuation(Heap* heap) { |
| 214 VerifyEvacuation(heap->old_pointer_space()); | 214 VerifyEvacuation(heap->old_pointer_space()); |
| 215 VerifyEvacuation(heap->old_data_space()); | 215 VerifyEvacuation(heap->old_data_space()); |
| 216 VerifyEvacuation(heap->code_space()); | 216 VerifyEvacuation(heap->code_space()); |
| 217 VerifyEvacuation(heap->cell_space()); | 217 VerifyEvacuation(heap->cell_space()); |
| 218 VerifyEvacuation(heap->map_space()); | 218 VerifyEvacuation(heap->map_space()); |
| 219 VerifyEvacuation(heap->new_space()); | 219 VerifyEvacuation(heap->new_space()); |
| 220 | 220 |
| 221 VerifyEvacuationVisitor visitor; | 221 VerifyEvacuationVisitor visitor; |
| 222 heap->IterateStrongRoots(&visitor, VISIT_ALL); | 222 heap->IterateStrongRoots(&visitor, VISIT_ALL); |
| 223 } | 223 } |
| 224 #endif | 224 #endif |
| 225 | 225 |
| 226 | 226 |
| 227 void MarkCompactCollector::AddEvacuationCandidate(Page* p) { | 227 void MarkCompactCollector::AddEvacuationCandidate(Page* p) { |
| 228 p->MarkEvacuationCandidate(); | 228 p->MarkEvacuationCandidate(); |
| 229 evacuation_candidates_.Add(p); | 229 evacuation_candidates_.Add(p); |
| 230 } | 230 } |
| 231 | 231 |
| 232 | 232 |
| 233 static void TraceFragmentation(PagedSpace* space) { | 233 static void TraceFragmentation(PagedSpace* space) { |
| 234 int number_of_pages = space->CountTotalPages(); | 234 int number_of_pages = space->CountTotalPages(); |
| 235 intptr_t reserved = (number_of_pages * Page::kObjectAreaSize); | 235 intptr_t reserved = (number_of_pages * space->AreaSize()); |
| 236 intptr_t free = reserved - space->SizeOfObjects(); | 236 intptr_t free = reserved - space->SizeOfObjects(); |
| 237 PrintF("[%s]: %d pages, %d (%.1f%%) free\n", | 237 PrintF("[%s]: %d pages, %d (%.1f%%) free\n", |
| 238 AllocationSpaceName(space->identity()), | 238 AllocationSpaceName(space->identity()), |
| 239 number_of_pages, | 239 number_of_pages, |
| 240 static_cast<int>(free), | 240 static_cast<int>(free), |
| 241 static_cast<double>(free) * 100 / reserved); | 241 static_cast<double>(free) * 100 / reserved); |
| 242 } | 242 } |
| 243 | 243 |
| 244 | 244 |
| 245 bool MarkCompactCollector::StartCompaction(CompactionMode mode) { | 245 bool MarkCompactCollector::StartCompaction(CompactionMode mode) { |
| (...skipping 200 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 446 p->LiveBytes()); | 446 p->LiveBytes()); |
| 447 } | 447 } |
| 448 return 0; | 448 return 0; |
| 449 } | 449 } |
| 450 | 450 |
| 451 FreeList::SizeStats sizes; | 451 FreeList::SizeStats sizes; |
| 452 space->CountFreeListItems(p, &sizes); | 452 space->CountFreeListItems(p, &sizes); |
| 453 | 453 |
| 454 intptr_t ratio; | 454 intptr_t ratio; |
| 455 intptr_t ratio_threshold; | 455 intptr_t ratio_threshold; |
| 456 intptr_t area_size = space->AreaSize(); | |
| 456 if (space->identity() == CODE_SPACE) { | 457 if (space->identity() == CODE_SPACE) { |
| 457 ratio = (sizes.medium_size_ * 10 + sizes.large_size_ * 2) * 100 / | 458 ratio = (sizes.medium_size_ * 10 + sizes.large_size_ * 2) * 100 / |
| 458 Page::kObjectAreaSize; | 459 area_size; |
| 459 ratio_threshold = 10; | 460 ratio_threshold = 10; |
| 460 } else { | 461 } else { |
| 461 ratio = (sizes.small_size_ * 5 + sizes.medium_size_) * 100 / | 462 ratio = (sizes.small_size_ * 5 + sizes.medium_size_) * 100 / |
| 462 Page::kObjectAreaSize; | 463 area_size; |
| 463 ratio_threshold = 15; | 464 ratio_threshold = 15; |
| 464 } | 465 } |
| 465 | 466 |
| 466 if (FLAG_trace_fragmentation) { | 467 if (FLAG_trace_fragmentation) { |
| 467 PrintF("%p [%s]: %d (%.2f%%) %d (%.2f%%) %d (%.2f%%) %d (%.2f%%) %s\n", | 468 PrintF("%p [%s]: %d (%.2f%%) %d (%.2f%%) %d (%.2f%%) %d (%.2f%%) %s\n", |
| 468 reinterpret_cast<void*>(p), | 469 reinterpret_cast<void*>(p), |
| 469 AllocationSpaceName(space->identity()), | 470 AllocationSpaceName(space->identity()), |
| 470 static_cast<int>(sizes.small_size_), | 471 static_cast<int>(sizes.small_size_), |
| 471 static_cast<double>(sizes.small_size_ * 100) / | 472 static_cast<double>(sizes.small_size_ * 100) / |
| 472 Page::kObjectAreaSize, | 473 area_size, |
| 473 static_cast<int>(sizes.medium_size_), | 474 static_cast<int>(sizes.medium_size_), |
| 474 static_cast<double>(sizes.medium_size_ * 100) / | 475 static_cast<double>(sizes.medium_size_ * 100) / |
| 475 Page::kObjectAreaSize, | 476 area_size, |
| 476 static_cast<int>(sizes.large_size_), | 477 static_cast<int>(sizes.large_size_), |
| 477 static_cast<double>(sizes.large_size_ * 100) / | 478 static_cast<double>(sizes.large_size_ * 100) / |
| 478 Page::kObjectAreaSize, | 479 area_size, |
| 479 static_cast<int>(sizes.huge_size_), | 480 static_cast<int>(sizes.huge_size_), |
| 480 static_cast<double>(sizes.huge_size_ * 100) / | 481 static_cast<double>(sizes.huge_size_ * 100) / |
| 481 Page::kObjectAreaSize, | 482 area_size, |
| 482 (ratio > ratio_threshold) ? "[fragmented]" : ""); | 483 (ratio > ratio_threshold) ? "[fragmented]" : ""); |
| 483 } | 484 } |
| 484 | 485 |
| 485 if (FLAG_always_compact && sizes.Total() != Page::kObjectAreaSize) { | 486 if (FLAG_always_compact && sizes.Total() != area_size) { |
| 486 return 1; | 487 return 1; |
| 487 } | 488 } |
| 488 | 489 |
| 489 if (ratio <= ratio_threshold) return 0; // Not fragmented. | 490 if (ratio <= ratio_threshold) return 0; // Not fragmented. |
| 490 | 491 |
| 491 return static_cast<int>(ratio - ratio_threshold); | 492 return static_cast<int>(ratio - ratio_threshold); |
| 492 } | 493 } |
| 493 | 494 |
| 494 | 495 |
| 495 void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) { | 496 void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) { |
| (...skipping 25 matching lines...) Expand all Loading... | |
| 521 Page* page_; | 522 Page* page_; |
| 522 }; | 523 }; |
| 523 | 524 |
| 524 enum CompactionMode { | 525 enum CompactionMode { |
| 525 COMPACT_FREE_LISTS, | 526 COMPACT_FREE_LISTS, |
| 526 REDUCE_MEMORY_FOOTPRINT | 527 REDUCE_MEMORY_FOOTPRINT |
| 527 }; | 528 }; |
| 528 | 529 |
| 529 CompactionMode mode = COMPACT_FREE_LISTS; | 530 CompactionMode mode = COMPACT_FREE_LISTS; |
| 530 | 531 |
| 531 intptr_t reserved = number_of_pages * Page::kObjectAreaSize; | 532 intptr_t reserved = number_of_pages * space->AreaSize(); |
| 532 intptr_t over_reserved = reserved - space->SizeOfObjects(); | 533 intptr_t over_reserved = reserved - space->SizeOfObjects(); |
| 533 static const intptr_t kFreenessThreshold = 50; | 534 static const intptr_t kFreenessThreshold = 50; |
| 534 | 535 |
| 535 if (over_reserved >= 2 * Page::kObjectAreaSize && | 536 if (over_reserved >= 2 * space->AreaSize() && |
| 536 reduce_memory_footprint_) { | 537 reduce_memory_footprint_) { |
| 537 mode = REDUCE_MEMORY_FOOTPRINT; | 538 mode = REDUCE_MEMORY_FOOTPRINT; |
| 538 | 539 |
| 539 // We expect that empty pages are easier to compact so slightly bump the | 540 // We expect that empty pages are easier to compact so slightly bump the |
| 540 // limit. | 541 // limit. |
| 541 max_evacuation_candidates += 2; | 542 max_evacuation_candidates += 2; |
| 542 | 543 |
| 543 if (FLAG_trace_fragmentation) { | 544 if (FLAG_trace_fragmentation) { |
| 544 PrintF("Estimated over reserved memory: %.1f MB (setting threshold %d)\n", | 545 PrintF("Estimated over reserved memory: %.1f MB (setting threshold %d)\n", |
| 545 static_cast<double>(over_reserved) / MB, | 546 static_cast<double>(over_reserved) / MB, |
| (...skipping 22 matching lines...) Expand all Loading... | |
| 568 if ((counter & 1) == (page_number & 1)) fragmentation = 1; | 569 if ((counter & 1) == (page_number & 1)) fragmentation = 1; |
| 569 } else if (mode == REDUCE_MEMORY_FOOTPRINT) { | 570 } else if (mode == REDUCE_MEMORY_FOOTPRINT) { |
| 570 // Don't try to release too many pages. | 571 // Don't try to release too many pages. |
| 571 if (estimated_release >= ((over_reserved * 3) / 4)) { | 572 if (estimated_release >= ((over_reserved * 3) / 4)) { |
| 572 continue; | 573 continue; |
| 573 } | 574 } |
| 574 | 575 |
| 575 intptr_t free_bytes = 0; | 576 intptr_t free_bytes = 0; |
| 576 | 577 |
| 577 if (!p->WasSwept()) { | 578 if (!p->WasSwept()) { |
| 578 free_bytes = (Page::kObjectAreaSize - p->LiveBytes()); | 579 free_bytes = (p->area_size() - p->LiveBytes()); |
| 579 } else { | 580 } else { |
| 580 FreeList::SizeStats sizes; | 581 FreeList::SizeStats sizes; |
| 581 space->CountFreeListItems(p, &sizes); | 582 space->CountFreeListItems(p, &sizes); |
| 582 free_bytes = sizes.Total(); | 583 free_bytes = sizes.Total(); |
| 583 } | 584 } |
| 584 | 585 |
| 585 int free_pct = static_cast<int>(free_bytes * 100 / Page::kObjectAreaSize); | 586 int free_pct = static_cast<int>(free_bytes * 100) / p->area_size(); |
| 586 | 587 |
| 587 if (free_pct >= kFreenessThreshold) { | 588 if (free_pct >= kFreenessThreshold) { |
| 588 estimated_release += Page::kObjectAreaSize + | 589 estimated_release += 2 * p->area_size() - free_bytes; |
| 589 (Page::kObjectAreaSize - free_bytes); | |
| 590 fragmentation = free_pct; | 590 fragmentation = free_pct; |
| 591 } else { | 591 } else { |
| 592 fragmentation = 0; | 592 fragmentation = 0; |
| 593 } | 593 } |
| 594 | 594 |
| 595 if (FLAG_trace_fragmentation) { | 595 if (FLAG_trace_fragmentation) { |
| 596 PrintF("%p [%s]: %d (%.2f%%) free %s\n", | 596 PrintF("%p [%s]: %d (%.2f%%) free %s\n", |
| 597 reinterpret_cast<void*>(p), | 597 reinterpret_cast<void*>(p), |
| 598 AllocationSpaceName(space->identity()), | 598 AllocationSpaceName(space->identity()), |
| 599 static_cast<int>(free_bytes), | 599 static_cast<int>(free_bytes), |
| 600 static_cast<double>(free_bytes * 100) / Page::kObjectAreaSize, | 600 static_cast<double>(free_bytes * 100) / p->area_size(), |
| 601 (fragmentation > 0) ? "[fragmented]" : ""); | 601 (fragmentation > 0) ? "[fragmented]" : ""); |
| 602 } | 602 } |
| 603 } else { | 603 } else { |
| 604 fragmentation = FreeListFragmentation(space, p); | 604 fragmentation = FreeListFragmentation(space, p); |
| 605 } | 605 } |
| 606 | 606 |
| 607 if (fragmentation != 0) { | 607 if (fragmentation != 0) { |
| 608 if (count < max_evacuation_candidates) { | 608 if (count < max_evacuation_candidates) { |
| 609 candidates[count++] = Candidate(fragmentation, p); | 609 candidates[count++] = Candidate(fragmentation, p); |
| 610 } else { | 610 } else { |
| (...skipping 1359 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1970 ASSERT(strcmp(Marking::kWhiteBitPattern, "00") == 0); | 1970 ASSERT(strcmp(Marking::kWhiteBitPattern, "00") == 0); |
| 1971 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0); | 1971 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0); |
| 1972 ASSERT(strcmp(Marking::kGreyBitPattern, "11") == 0); | 1972 ASSERT(strcmp(Marking::kGreyBitPattern, "11") == 0); |
| 1973 ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0); | 1973 ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0); |
| 1974 | 1974 |
| 1975 MarkBit::CellType* cells = p->markbits()->cells(); | 1975 MarkBit::CellType* cells = p->markbits()->cells(); |
| 1976 | 1976 |
| 1977 int last_cell_index = | 1977 int last_cell_index = |
| 1978 Bitmap::IndexToCell( | 1978 Bitmap::IndexToCell( |
| 1979 Bitmap::CellAlignIndex( | 1979 Bitmap::CellAlignIndex( |
| 1980 p->AddressToMarkbitIndex(p->ObjectAreaEnd()))); | 1980 p->AddressToMarkbitIndex(p->area_end()))); |
| 1981 | 1981 |
| 1982 int cell_index = Page::kFirstUsedCell; | 1982 Address cell_base = p->area_start(); |
| 1983 Address cell_base = p->ObjectAreaStart(); | 1983 int cell_index = Bitmap::IndexToCell( |
| 1984 Bitmap::CellAlignIndex( | |
| 1985 p->AddressToMarkbitIndex(cell_base))); | |
| 1984 | 1986 |
| 1985 for (cell_index = Page::kFirstUsedCell; | 1987 |
| 1988 for (; | |
| 1986 cell_index < last_cell_index; | 1989 cell_index < last_cell_index; |
| 1987 cell_index++, cell_base += 32 * kPointerSize) { | 1990 cell_index++, cell_base += 32 * kPointerSize) { |
| 1988 ASSERT((unsigned)cell_index == | 1991 ASSERT((unsigned)cell_index == |
| 1989 Bitmap::IndexToCell( | 1992 Bitmap::IndexToCell( |
| 1990 Bitmap::CellAlignIndex( | 1993 Bitmap::CellAlignIndex( |
| 1991 p->AddressToMarkbitIndex(cell_base)))); | 1994 p->AddressToMarkbitIndex(cell_base)))); |
| 1992 | 1995 |
| 1993 const MarkBit::CellType current_cell = cells[cell_index]; | 1996 const MarkBit::CellType current_cell = cells[cell_index]; |
| 1994 if (current_cell == 0) continue; | 1997 if (current_cell == 0) continue; |
| 1995 | 1998 |
| (...skipping 783 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2779 } | 2782 } |
| 2780 | 2783 |
| 2781 return String::cast(*p); | 2784 return String::cast(*p); |
| 2782 } | 2785 } |
| 2783 | 2786 |
| 2784 | 2787 |
| 2785 bool MarkCompactCollector::TryPromoteObject(HeapObject* object, | 2788 bool MarkCompactCollector::TryPromoteObject(HeapObject* object, |
| 2786 int object_size) { | 2789 int object_size) { |
| 2787 Object* result; | 2790 Object* result; |
| 2788 | 2791 |
| 2789 if (object_size > heap()->MaxObjectSizeInPagedSpace()) { | 2792 if (object_size > Page::kMaxNonCodeHeapObjectSize) { |
| 2790 MaybeObject* maybe_result = | 2793 MaybeObject* maybe_result = |
| 2791 heap()->lo_space()->AllocateRaw(object_size, NOT_EXECUTABLE); | 2794 heap()->lo_space()->AllocateRaw(object_size, NOT_EXECUTABLE); |
| 2792 if (maybe_result->ToObject(&result)) { | 2795 if (maybe_result->ToObject(&result)) { |
| 2793 HeapObject* target = HeapObject::cast(result); | 2796 HeapObject* target = HeapObject::cast(result); |
| 2794 MigrateObject(target->address(), | 2797 MigrateObject(target->address(), |
| 2795 object->address(), | 2798 object->address(), |
| 2796 object_size, | 2799 object_size, |
| 2797 LO_SPACE); | 2800 LO_SPACE); |
| 2798 heap()->mark_compact_collector()->tracer()-> | 2801 heap()->mark_compact_collector()->tracer()-> |
| 2799 increment_promoted_objects_size(object_size); | 2802 increment_promoted_objects_size(object_size); |
| (...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2897 void MarkCompactCollector::EvacuateLiveObjectsFromPage(Page* p) { | 2900 void MarkCompactCollector::EvacuateLiveObjectsFromPage(Page* p) { |
| 2898 AlwaysAllocateScope always_allocate; | 2901 AlwaysAllocateScope always_allocate; |
| 2899 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); | 2902 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); |
| 2900 ASSERT(p->IsEvacuationCandidate() && !p->WasSwept()); | 2903 ASSERT(p->IsEvacuationCandidate() && !p->WasSwept()); |
| 2901 MarkBit::CellType* cells = p->markbits()->cells(); | 2904 MarkBit::CellType* cells = p->markbits()->cells(); |
| 2902 p->MarkSweptPrecisely(); | 2905 p->MarkSweptPrecisely(); |
| 2903 | 2906 |
| 2904 int last_cell_index = | 2907 int last_cell_index = |
| 2905 Bitmap::IndexToCell( | 2908 Bitmap::IndexToCell( |
| 2906 Bitmap::CellAlignIndex( | 2909 Bitmap::CellAlignIndex( |
| 2907 p->AddressToMarkbitIndex(p->ObjectAreaEnd()))); | 2910 p->AddressToMarkbitIndex(p->area_end()))); |
| 2908 | 2911 |
| 2909 int cell_index = Page::kFirstUsedCell; | 2912 Address cell_base = p->area_start(); |
| 2910 Address cell_base = p->ObjectAreaStart(); | 2913 int cell_index = Bitmap::IndexToCell( |
| 2914 Bitmap::CellAlignIndex( | |
| 2915 p->AddressToMarkbitIndex(cell_base))); | |
| 2916 | |
| 2911 int offsets[16]; | 2917 int offsets[16]; |
| 2912 | 2918 |
| 2913 for (cell_index = Page::kFirstUsedCell; | 2919 for (; |
| 2914 cell_index < last_cell_index; | 2920 cell_index < last_cell_index; |
| 2915 cell_index++, cell_base += 32 * kPointerSize) { | 2921 cell_index++, cell_base += 32 * kPointerSize) { |
| 2916 ASSERT((unsigned)cell_index == | 2922 ASSERT((unsigned)cell_index == |
| 2917 Bitmap::IndexToCell( | 2923 Bitmap::IndexToCell( |
| 2918 Bitmap::CellAlignIndex( | 2924 Bitmap::CellAlignIndex( |
| 2919 p->AddressToMarkbitIndex(cell_base)))); | 2925 p->AddressToMarkbitIndex(cell_base)))); |
| 2920 if (cells[cell_index] == 0) continue; | 2926 if (cells[cell_index] == 0) continue; |
| 2921 | 2927 |
| 2922 int live_objects = MarkWordToObjectStarts(cells[cell_index], offsets); | 2928 int live_objects = MarkWordToObjectStarts(cells[cell_index], offsets); |
| 2923 for (int i = 0; i < live_objects; i++) { | 2929 for (int i = 0; i < live_objects; i++) { |
| (...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3058 ASSERT_EQ(skip_list_mode == REBUILD_SKIP_LIST, | 3064 ASSERT_EQ(skip_list_mode == REBUILD_SKIP_LIST, |
| 3059 space->identity() == CODE_SPACE); | 3065 space->identity() == CODE_SPACE); |
| 3060 ASSERT((p->skip_list() == NULL) || (skip_list_mode == REBUILD_SKIP_LIST)); | 3066 ASSERT((p->skip_list() == NULL) || (skip_list_mode == REBUILD_SKIP_LIST)); |
| 3061 | 3067 |
| 3062 MarkBit::CellType* cells = p->markbits()->cells(); | 3068 MarkBit::CellType* cells = p->markbits()->cells(); |
| 3063 p->MarkSweptPrecisely(); | 3069 p->MarkSweptPrecisely(); |
| 3064 | 3070 |
| 3065 int last_cell_index = | 3071 int last_cell_index = |
| 3066 Bitmap::IndexToCell( | 3072 Bitmap::IndexToCell( |
| 3067 Bitmap::CellAlignIndex( | 3073 Bitmap::CellAlignIndex( |
| 3068 p->AddressToMarkbitIndex(p->ObjectAreaEnd()))); | 3074 p->AddressToMarkbitIndex(p->area_end()))); |
| 3069 | 3075 |
| 3070 int cell_index = Page::kFirstUsedCell; | 3076 Address free_start = p->area_start(); |
| 3071 Address free_start = p->ObjectAreaStart(); | 3077 int cell_index = |
|
Erik Corry
2012/02/23 12:00:40
I think it is clearer to move this down to immedia
| |
| 3078 Bitmap::IndexToCell( | |
| 3079 Bitmap::CellAlignIndex( | |
| 3080 p->AddressToMarkbitIndex(free_start))); | |
| 3081 | |
| 3072 ASSERT(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0); | 3082 ASSERT(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0); |
| 3073 Address object_address = p->ObjectAreaStart(); | 3083 Address object_address = free_start; |
| 3074 int offsets[16]; | 3084 int offsets[16]; |
| 3075 | 3085 |
| 3076 SkipList* skip_list = p->skip_list(); | 3086 SkipList* skip_list = p->skip_list(); |
| 3077 int curr_region = -1; | 3087 int curr_region = -1; |
| 3078 if ((skip_list_mode == REBUILD_SKIP_LIST) && skip_list) { | 3088 if ((skip_list_mode == REBUILD_SKIP_LIST) && skip_list) { |
| 3079 skip_list->Clear(); | 3089 skip_list->Clear(); |
| 3080 } | 3090 } |
| 3081 | 3091 |
| 3082 for (cell_index = Page::kFirstUsedCell; | 3092 for (; |
| 3083 cell_index < last_cell_index; | 3093 cell_index < last_cell_index; |
| 3084 cell_index++, object_address += 32 * kPointerSize) { | 3094 cell_index++, object_address += 32 * kPointerSize) { |
| 3085 ASSERT((unsigned)cell_index == | 3095 ASSERT((unsigned)cell_index == |
| 3086 Bitmap::IndexToCell( | 3096 Bitmap::IndexToCell( |
| 3087 Bitmap::CellAlignIndex( | 3097 Bitmap::CellAlignIndex( |
| 3088 p->AddressToMarkbitIndex(object_address)))); | 3098 p->AddressToMarkbitIndex(object_address)))); |
| 3089 int live_objects = MarkWordToObjectStarts(cells[cell_index], offsets); | 3099 int live_objects = MarkWordToObjectStarts(cells[cell_index], offsets); |
| 3090 int live_index = 0; | 3100 int live_index = 0; |
| 3091 for ( ; live_objects != 0; live_objects--) { | 3101 for ( ; live_objects != 0; live_objects--) { |
| 3092 Address free_end = object_address + offsets[live_index++] * kPointerSize; | 3102 Address free_end = object_address + offsets[live_index++] * kPointerSize; |
| (...skipping 16 matching lines...) Expand all Loading... | |
| 3109 new_region_end != curr_region) { | 3119 new_region_end != curr_region) { |
| 3110 skip_list->AddObject(free_end, size); | 3120 skip_list->AddObject(free_end, size); |
| 3111 curr_region = new_region_end; | 3121 curr_region = new_region_end; |
| 3112 } | 3122 } |
| 3113 } | 3123 } |
| 3114 free_start = free_end + size; | 3124 free_start = free_end + size; |
| 3115 } | 3125 } |
| 3116 // Clear marking bits for current cell. | 3126 // Clear marking bits for current cell. |
| 3117 cells[cell_index] = 0; | 3127 cells[cell_index] = 0; |
| 3118 } | 3128 } |
| 3119 if (free_start != p->ObjectAreaEnd()) { | 3129 if (free_start != p->area_end()) { |
| 3120 space->Free(free_start, static_cast<int>(p->ObjectAreaEnd() - free_start)); | 3130 space->Free(free_start, static_cast<int>(p->area_end() - free_start)); |
| 3121 } | 3131 } |
| 3122 p->ResetLiveBytes(); | 3132 p->ResetLiveBytes(); |
| 3123 } | 3133 } |
| 3124 | 3134 |
| 3125 | 3135 |
| 3126 static bool SetMarkBitsUnderInvalidatedCode(Code* code, bool value) { | 3136 static bool SetMarkBitsUnderInvalidatedCode(Code* code, bool value) { |
| 3127 Page* p = Page::FromAddress(code->address()); | 3137 Page* p = Page::FromAddress(code->address()); |
| 3128 | 3138 |
| 3129 if (p->IsEvacuationCandidate() || | 3139 if (p->IsEvacuationCandidate() || |
| 3130 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) { | 3140 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) { |
| (...skipping 274 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3405 VerifyEvacuation(heap_); | 3415 VerifyEvacuation(heap_); |
| 3406 } | 3416 } |
| 3407 #endif | 3417 #endif |
| 3408 | 3418 |
| 3409 slots_buffer_allocator_.DeallocateChain(&migration_slots_buffer_); | 3419 slots_buffer_allocator_.DeallocateChain(&migration_slots_buffer_); |
| 3410 ASSERT(migration_slots_buffer_ == NULL); | 3420 ASSERT(migration_slots_buffer_ == NULL); |
| 3411 for (int i = 0; i < npages; i++) { | 3421 for (int i = 0; i < npages; i++) { |
| 3412 Page* p = evacuation_candidates_[i]; | 3422 Page* p = evacuation_candidates_[i]; |
| 3413 if (!p->IsEvacuationCandidate()) continue; | 3423 if (!p->IsEvacuationCandidate()) continue; |
| 3414 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); | 3424 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); |
| 3415 space->Free(p->ObjectAreaStart(), Page::kObjectAreaSize); | 3425 space->Free(p->area_start(), p->area_size()); |
| 3416 p->set_scan_on_scavenge(false); | 3426 p->set_scan_on_scavenge(false); |
| 3417 slots_buffer_allocator_.DeallocateChain(p->slots_buffer_address()); | 3427 slots_buffer_allocator_.DeallocateChain(p->slots_buffer_address()); |
| 3418 p->ClearEvacuationCandidate(); | 3428 p->ClearEvacuationCandidate(); |
| 3419 p->ResetLiveBytes(); | 3429 p->ResetLiveBytes(); |
| 3420 space->ReleasePage(p); | 3430 space->ReleasePage(p); |
| 3421 } | 3431 } |
| 3422 evacuation_candidates_.Rewind(0); | 3432 evacuation_candidates_.Rewind(0); |
| 3423 compacting_ = false; | 3433 compacting_ = false; |
| 3424 } | 3434 } |
| 3425 | 3435 |
| (...skipping 282 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3708 // memory that can be ignored when scanning. Dead objects other than free | 3718 // memory that can be ignored when scanning. Dead objects other than free |
| 3709 // spaces will not contain the free space map. | 3719 // spaces will not contain the free space map. |
| 3710 intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) { | 3720 intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) { |
| 3711 ASSERT(!p->IsEvacuationCandidate() && !p->WasSwept()); | 3721 ASSERT(!p->IsEvacuationCandidate() && !p->WasSwept()); |
| 3712 MarkBit::CellType* cells = p->markbits()->cells(); | 3722 MarkBit::CellType* cells = p->markbits()->cells(); |
| 3713 p->MarkSweptConservatively(); | 3723 p->MarkSweptConservatively(); |
| 3714 | 3724 |
| 3715 int last_cell_index = | 3725 int last_cell_index = |
| 3716 Bitmap::IndexToCell( | 3726 Bitmap::IndexToCell( |
| 3717 Bitmap::CellAlignIndex( | 3727 Bitmap::CellAlignIndex( |
| 3718 p->AddressToMarkbitIndex(p->ObjectAreaEnd()))); | 3728 p->AddressToMarkbitIndex(p->area_end()))); |
| 3719 | 3729 |
| 3720 int cell_index = Page::kFirstUsedCell; | 3730 int cell_index = |
| 3731 Bitmap::IndexToCell( | |
| 3732 Bitmap::CellAlignIndex( | |
| 3733 p->AddressToMarkbitIndex(p->area_start()))); | |
| 3734 | |
| 3721 intptr_t freed_bytes = 0; | 3735 intptr_t freed_bytes = 0; |
| 3722 | 3736 |
| 3723 // This is the start of the 32 word block that we are currently looking at. | 3737 // This is the start of the 32 word block that we are currently looking at. |
| 3724 Address block_address = p->ObjectAreaStart(); | 3738 Address block_address = p->area_start(); |
| 3725 | 3739 |
| 3726 // Skip over all the dead objects at the start of the page and mark them free. | 3740 // Skip over all the dead objects at the start of the page and mark them free. |
| 3727 for (cell_index = Page::kFirstUsedCell; | 3741 for (; |
| 3728 cell_index < last_cell_index; | 3742 cell_index < last_cell_index; |
| 3729 cell_index++, block_address += 32 * kPointerSize) { | 3743 cell_index++, block_address += 32 * kPointerSize) { |
| 3730 if (cells[cell_index] != 0) break; | 3744 if (cells[cell_index] != 0) break; |
| 3731 } | 3745 } |
| 3732 size_t size = block_address - p->ObjectAreaStart(); | 3746 size_t size = block_address - p->area_start(); |
| 3733 if (cell_index == last_cell_index) { | 3747 if (cell_index == last_cell_index) { |
| 3734 freed_bytes += static_cast<int>(space->Free(p->ObjectAreaStart(), | 3748 freed_bytes += static_cast<int>(space->Free(p->area_start(), |
| 3735 static_cast<int>(size))); | 3749 static_cast<int>(size))); |
| 3736 ASSERT_EQ(0, p->LiveBytes()); | 3750 ASSERT_EQ(0, p->LiveBytes()); |
| 3737 return freed_bytes; | 3751 return freed_bytes; |
| 3738 } | 3752 } |
| 3739 // Grow the size of the start-of-page free space a little to get up to the | 3753 // Grow the size of the start-of-page free space a little to get up to the |
| 3740 // first live object. | 3754 // first live object. |
| 3741 Address free_end = StartOfLiveObject(block_address, cells[cell_index]); | 3755 Address free_end = StartOfLiveObject(block_address, cells[cell_index]); |
| 3742 // Free the first free space. | 3756 // Free the first free space. |
| 3743 size = free_end - p->ObjectAreaStart(); | 3757 size = free_end - p->area_start(); |
| 3744 freed_bytes += space->Free(p->ObjectAreaStart(), | 3758 freed_bytes += space->Free(p->area_start(), |
| 3745 static_cast<int>(size)); | 3759 static_cast<int>(size)); |
| 3746 // The start of the current free area is represented in undigested form by | 3760 // The start of the current free area is represented in undigested form by |
| 3747 // the address of the last 32-word section that contained a live object and | 3761 // the address of the last 32-word section that contained a live object and |
| 3748 // the marking bitmap for that cell, which describes where the live object | 3762 // the marking bitmap for that cell, which describes where the live object |
| 3749 // started. Unless we find a large free space in the bitmap we will not | 3763 // started. Unless we find a large free space in the bitmap we will not |
| 3750 // digest this pair into a real address. We start the iteration here at the | 3764 // digest this pair into a real address. We start the iteration here at the |
| 3751 // first word in the marking bit map that indicates a live object. | 3765 // first word in the marking bit map that indicates a live object. |
| 3752 Address free_start = block_address; | 3766 Address free_start = block_address; |
| 3753 uint32_t free_start_cell = cells[cell_index]; | 3767 uint32_t free_start_cell = cells[cell_index]; |
| 3754 | 3768 |
| (...skipping 360 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4115 while (buffer != NULL) { | 4129 while (buffer != NULL) { |
| 4116 SlotsBuffer* next_buffer = buffer->next(); | 4130 SlotsBuffer* next_buffer = buffer->next(); |
| 4117 DeallocateBuffer(buffer); | 4131 DeallocateBuffer(buffer); |
| 4118 buffer = next_buffer; | 4132 buffer = next_buffer; |
| 4119 } | 4133 } |
| 4120 *buffer_address = NULL; | 4134 *buffer_address = NULL; |
| 4121 } | 4135 } |
| 4122 | 4136 |
| 4123 | 4137 |
| 4124 } } // namespace v8::internal | 4138 } } // namespace v8::internal |
| OLD | NEW |