Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(54)

Side by Side Diff: src/heap/mark-compact.cc

Issue 2440683002: [heap] Move typed slot filtering logic into sweeper. (Closed)
Patch Set: fix test Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/remembered-set.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/mark-compact.h" 5 #include "src/heap/mark-compact.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/base/sys-info.h" 9 #include "src/base/sys-info.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
(...skipping 2854 matching lines...) Expand 10 before | Expand all | Expand 10 after
2865 Object** p) { 2865 Object** p) {
2866 MapWord map_word = HeapObject::cast(*p)->map_word(); 2866 MapWord map_word = HeapObject::cast(*p)->map_word();
2867 2867
2868 if (map_word.IsForwardingAddress()) { 2868 if (map_word.IsForwardingAddress()) {
2869 return String::cast(map_word.ToForwardingAddress()); 2869 return String::cast(map_word.ToForwardingAddress());
2870 } 2870 }
2871 2871
2872 return String::cast(*p); 2872 return String::cast(*p);
2873 } 2873 }
2874 2874
2875 bool MarkCompactCollector::IsSlotInBlackObject(MemoryChunk* p, Address slot) {
2876 Space* owner = p->owner();
2877 DCHECK(owner != heap_->lo_space() && owner != nullptr);
2878 USE(owner);
2879
2880 // We may be part of a black area.
2881 if (Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(slot))) {
2882 return true;
2883 }
2884
2885 uint32_t mark_bit_index = p->AddressToMarkbitIndex(slot);
2886 unsigned int cell_index = mark_bit_index >> Bitmap::kBitsPerCellLog2;
2887 MarkBit::CellType index_mask = 1u << Bitmap::IndexInCell(mark_bit_index);
2888 MarkBit::CellType* cells = p->markbits()->cells();
2889 Address base_address = p->area_start();
2890 unsigned int base_address_cell_index = Bitmap::IndexToCell(
2891 Bitmap::CellAlignIndex(p->AddressToMarkbitIndex(base_address)));
2892
2893 // Check if the slot points to the start of an object. This can happen e.g.
2894 // when we left trim a fixed array. Such slots are invalid and we can remove
2895 // them.
2896 if (index_mask > 1) {
2897 if ((cells[cell_index] & index_mask) != 0 &&
2898 (cells[cell_index] & (index_mask >> 1)) == 0) {
2899 return false;
2900 }
2901 } else {
2902 // Left trimming moves the mark bits so we cannot be in the very first cell.
2903 DCHECK(cell_index != base_address_cell_index);
2904 if ((cells[cell_index] & index_mask) != 0 &&
2905 (cells[cell_index - 1] & (1u << Bitmap::kBitIndexMask)) == 0) {
2906 return false;
2907 }
2908 }
2909
2910 // Check if the object is in the current cell.
2911 MarkBit::CellType slot_mask;
2912 if ((cells[cell_index] == 0) ||
2913 (base::bits::CountTrailingZeros32(cells[cell_index]) >
2914 base::bits::CountTrailingZeros32(cells[cell_index] | index_mask))) {
2915 // If we are already in the first cell, there is no live object.
2916 if (cell_index == base_address_cell_index) return false;
2917
2918 // If not, find a cell in a preceding cell slot that has a mark bit set.
2919 do {
2920 cell_index--;
2921 } while (cell_index > base_address_cell_index && cells[cell_index] == 0);
2922
2923 // The slot must be in a dead object if there are no preceding cells that
2924 // have mark bits set.
2925 if (cells[cell_index] == 0) {
2926 return false;
2927 }
2928
2929 // The object is in a preceding cell. Set the mask to find any object.
2930 slot_mask = ~0u;
2931 } else {
2932 // We are interested in object mark bits right before the slot.
2933 slot_mask = index_mask + (index_mask - 1);
2934 }
2935
2936 MarkBit::CellType current_cell = cells[cell_index];
2937 CHECK(current_cell != 0);
2938
2939 // Find the last live object in the cell.
2940 unsigned int leading_zeros =
2941 base::bits::CountLeadingZeros32(current_cell & slot_mask);
2942 CHECK(leading_zeros != Bitmap::kBitsPerCell);
2943 int offset = static_cast<int>(Bitmap::kBitIndexMask - leading_zeros) - 1;
2944
2945 base_address += (cell_index - base_address_cell_index) *
2946 Bitmap::kBitsPerCell * kPointerSize;
2947 Address address = base_address + offset * kPointerSize;
2948
2949 // If the found mark bit is part of a black area, the slot cannot be part
2950 // of a live object since it is not marked.
2951 if (p->IsBlackAreaEndMarker(address + kPointerSize)) return false;
2952
2953 HeapObject* object = HeapObject::FromAddress(address);
2954 CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object)));
2955 CHECK(object->address() < reinterpret_cast<Address>(slot));
2956 if ((object->address() + kPointerSize) <= slot &&
2957 (object->address() + object->Size()) > slot) {
2958 // If the slot is within the last found object in the cell, the slot is
2959 // in a live object.
2960 // Slots pointing to the first word of an object are invalid and removed.
2961 // This can happen when we move the object header while left trimming.
2962 return true;
2963 }
2964 return false;
2965 }
2966
2967 HeapObject* MarkCompactCollector::FindBlackObjectBySlotSlow(Address slot) {
2968 Page* p = Page::FromAddress(slot);
2969 Space* owner = p->owner();
2970 if (owner == heap_->lo_space() || owner == nullptr) {
2971 Object* large_object = heap_->lo_space()->FindObject(slot);
2972 // This object has to exist, otherwise we would not have recorded a slot
2973 // for it.
2974 CHECK(large_object->IsHeapObject());
2975 HeapObject* large_heap_object = HeapObject::cast(large_object);
2976
2977 if (IsMarked(large_heap_object)) {
2978 return large_heap_object;
2979 }
2980 return nullptr;
2981 }
2982
2983 LiveObjectIterator<kBlackObjects> it(p);
2984 HeapObject* object = nullptr;
2985 while ((object = it.Next()) != nullptr) {
2986 int size = object->Size();
2987 if (object->address() > slot) return nullptr;
2988 if (object->address() <= slot && slot < (object->address() + size)) {
2989 return object;
2990 }
2991 }
2992
2993 return nullptr;
2994 }
2995
2996
2997 void MarkCompactCollector::EvacuateNewSpacePrologue() { 2875 void MarkCompactCollector::EvacuateNewSpacePrologue() {
2998 NewSpace* new_space = heap()->new_space(); 2876 NewSpace* new_space = heap()->new_space();
2999 // Append the list of new space pages to be processed. 2877 // Append the list of new space pages to be processed.
3000 for (Page* p : NewSpacePageRange(new_space->bottom(), new_space->top())) { 2878 for (Page* p : NewSpacePageRange(new_space->bottom(), new_space->top())) {
3001 newspace_evacuation_candidates_.Add(p); 2879 newspace_evacuation_candidates_.Add(p);
3002 } 2880 }
3003 new_space->Flip(); 2881 new_space->Flip();
3004 new_space->ResetAllocationInfo(); 2882 new_space->ResetAllocationInfo();
3005 } 2883 }
3006 2884
(...skipping 302 matching lines...) Expand 10 before | Expand all | Expand 10 after
3309 HeapObject* heap_object = HeapObject::cast(object); 3187 HeapObject* heap_object = HeapObject::cast(object);
3310 MapWord map_word = heap_object->map_word(); 3188 MapWord map_word = heap_object->map_word();
3311 if (map_word.IsForwardingAddress()) { 3189 if (map_word.IsForwardingAddress()) {
3312 return map_word.ToForwardingAddress(); 3190 return map_word.ToForwardingAddress();
3313 } 3191 }
3314 } 3192 }
3315 return object; 3193 return object;
3316 } 3194 }
3317 }; 3195 };
3318 3196
3197 MarkCompactCollector::Sweeper::ClearOldToNewSlotsMode
3198 MarkCompactCollector::Sweeper::GetClearOldToNewSlotsMode(Page* p) {
3199 AllocationSpace identity = p->owner()->identity();
3200 if (p->old_to_new_slots() &&
3201 (identity == OLD_SPACE || identity == MAP_SPACE)) {
3202 return MarkCompactCollector::Sweeper::CLEAR_REGULAR_SLOTS;
3203 } else if (p->typed_old_to_new_slots() && identity == CODE_SPACE) {
3204 return MarkCompactCollector::Sweeper::CLEAR_TYPED_SLOTS;
3205 }
3206 return MarkCompactCollector::Sweeper::DO_NOT_CLEAR;
3207 }
3208
3319 int MarkCompactCollector::Sweeper::RawSweep( 3209 int MarkCompactCollector::Sweeper::RawSweep(
3320 Page* p, FreeListRebuildingMode free_list_mode, 3210 Page* p, FreeListRebuildingMode free_list_mode,
3321 FreeSpaceTreatmentMode free_space_mode) { 3211 FreeSpaceTreatmentMode free_space_mode) {
3322 Space* space = p->owner(); 3212 Space* space = p->owner();
3323 AllocationSpace identity = space->identity();
3324 DCHECK_NOT_NULL(space); 3213 DCHECK_NOT_NULL(space);
3325 DCHECK(free_list_mode == IGNORE_FREE_LIST || identity == OLD_SPACE || 3214 DCHECK(free_list_mode == IGNORE_FREE_LIST || space->identity() == OLD_SPACE ||
3326 identity == CODE_SPACE || identity == MAP_SPACE); 3215 space->identity() == CODE_SPACE || space->identity() == MAP_SPACE);
3327 DCHECK(!p->IsEvacuationCandidate() && !p->SweepingDone()); 3216 DCHECK(!p->IsEvacuationCandidate() && !p->SweepingDone());
3328 3217
3218 // If there are old-to-new slots in that page, we have to filter out slots
3219 // that are in dead memory which is freed by the sweeper.
3220 ClearOldToNewSlotsMode slots_clearing_mode = GetClearOldToNewSlotsMode(p);
3221
3222 // The free ranges map is used for filtering typed slots.
3223 std::map<uint32_t, uint32_t> free_ranges;
3224
3329 // Before we sweep objects on the page, we free dead array buffers which 3225 // Before we sweep objects on the page, we free dead array buffers which
3330 // requires valid mark bits. 3226 // requires valid mark bits.
3331 ArrayBufferTracker::FreeDead(p); 3227 ArrayBufferTracker::FreeDead(p);
3332 3228
3333 // We also release the black area markers here.
3334 p->ReleaseBlackAreaEndMarkerMap();
3335
3336 Address free_start = p->area_start(); 3229 Address free_start = p->area_start();
3337 DCHECK(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0); 3230 DCHECK(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0);
3338 3231
3339 // If we use the skip list for code space pages, we have to lock the skip 3232 // If we use the skip list for code space pages, we have to lock the skip
3340 // list because it could be accessed concurrently by the runtime or the 3233 // list because it could be accessed concurrently by the runtime or the
3341 // deoptimizer. 3234 // deoptimizer.
3342 const bool rebuild_skip_list = 3235 const bool rebuild_skip_list =
3343 space->identity() == CODE_SPACE && p->skip_list() != nullptr; 3236 space->identity() == CODE_SPACE && p->skip_list() != nullptr;
3344 SkipList* skip_list = p->skip_list(); 3237 SkipList* skip_list = p->skip_list();
3345 if (rebuild_skip_list) { 3238 if (rebuild_skip_list) {
3346 skip_list->Clear(); 3239 skip_list->Clear();
3347 } 3240 }
3348 3241
3349 intptr_t freed_bytes = 0; 3242 intptr_t freed_bytes = 0;
3350 intptr_t max_freed_bytes = 0; 3243 intptr_t max_freed_bytes = 0;
3351 int curr_region = -1; 3244 int curr_region = -1;
3352 3245
3353 LiveObjectIterator<kBlackObjects> it(p); 3246 LiveObjectIterator<kBlackObjects> it(p);
3354 HeapObject* object = NULL; 3247 HeapObject* object = NULL;
3355 bool clear_slots = 3248
3356 p->old_to_new_slots() && (identity == OLD_SPACE || identity == MAP_SPACE);
3357 while ((object = it.Next()) != NULL) { 3249 while ((object = it.Next()) != NULL) {
3358 DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); 3250 DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object)));
3359 Address free_end = object->address(); 3251 Address free_end = object->address();
3360 if (free_end != free_start) { 3252 if (free_end != free_start) {
3361 CHECK_GT(free_end, free_start); 3253 CHECK_GT(free_end, free_start);
3362 size_t size = static_cast<size_t>(free_end - free_start); 3254 size_t size = static_cast<size_t>(free_end - free_start);
3363 if (free_space_mode == ZAP_FREE_SPACE) { 3255 if (free_space_mode == ZAP_FREE_SPACE) {
3364 memset(free_start, 0xcc, size); 3256 memset(free_start, 0xcc, size);
3365 } 3257 }
3366 if (free_list_mode == REBUILD_FREE_LIST) { 3258 if (free_list_mode == REBUILD_FREE_LIST) {
3367 freed_bytes = reinterpret_cast<PagedSpace*>(space)->UnaccountedFree( 3259 freed_bytes = reinterpret_cast<PagedSpace*>(space)->UnaccountedFree(
3368 free_start, size); 3260 free_start, size);
3369 max_freed_bytes = Max(freed_bytes, max_freed_bytes); 3261 max_freed_bytes = Max(freed_bytes, max_freed_bytes);
3370 } else { 3262 } else {
3371 p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size), 3263 p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size),
3372 ClearRecordedSlots::kNo); 3264 ClearRecordedSlots::kNo);
3373 } 3265 }
3374 3266
3375 if (clear_slots) { 3267 if (slots_clearing_mode == CLEAR_REGULAR_SLOTS) {
3376 RememberedSet<OLD_TO_NEW>::RemoveRange(p, free_start, free_end, 3268 RememberedSet<OLD_TO_NEW>::RemoveRange(p, free_start, free_end,
3377 SlotSet::KEEP_EMPTY_BUCKETS); 3269 SlotSet::KEEP_EMPTY_BUCKETS);
3270 } else if (slots_clearing_mode == CLEAR_TYPED_SLOTS) {
3271 free_ranges.insert(std::pair<uint32_t, uint32_t>(
3272 static_cast<uint32_t>(free_start - p->address()),
3273 static_cast<uint32_t>(free_end - p->address())));
3378 } 3274 }
3379 } 3275 }
3380 Map* map = object->synchronized_map(); 3276 Map* map = object->synchronized_map();
3381 int size = object->SizeFromMap(map); 3277 int size = object->SizeFromMap(map);
3382 if (rebuild_skip_list) { 3278 if (rebuild_skip_list) {
3383 int new_region_start = SkipList::RegionNumber(free_end); 3279 int new_region_start = SkipList::RegionNumber(free_end);
3384 int new_region_end = 3280 int new_region_end =
3385 SkipList::RegionNumber(free_end + size - kPointerSize); 3281 SkipList::RegionNumber(free_end + size - kPointerSize);
3386 if (new_region_start != curr_region || new_region_end != curr_region) { 3282 if (new_region_start != curr_region || new_region_end != curr_region) {
3387 skip_list->AddObject(free_end, size); 3283 skip_list->AddObject(free_end, size);
(...skipping 11 matching lines...) Expand all
3399 } 3295 }
3400 if (free_list_mode == REBUILD_FREE_LIST) { 3296 if (free_list_mode == REBUILD_FREE_LIST) {
3401 freed_bytes = reinterpret_cast<PagedSpace*>(space)->UnaccountedFree( 3297 freed_bytes = reinterpret_cast<PagedSpace*>(space)->UnaccountedFree(
3402 free_start, size); 3298 free_start, size);
3403 max_freed_bytes = Max(freed_bytes, max_freed_bytes); 3299 max_freed_bytes = Max(freed_bytes, max_freed_bytes);
3404 } else { 3300 } else {
3405 p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size), 3301 p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size),
3406 ClearRecordedSlots::kNo); 3302 ClearRecordedSlots::kNo);
3407 } 3303 }
3408 3304
3409 if (clear_slots) { 3305 if (slots_clearing_mode == CLEAR_REGULAR_SLOTS) {
3410 RememberedSet<OLD_TO_NEW>::RemoveRange(p, free_start, p->area_end(), 3306 RememberedSet<OLD_TO_NEW>::RemoveRange(p, free_start, p->area_end(),
3411 SlotSet::KEEP_EMPTY_BUCKETS); 3307 SlotSet::KEEP_EMPTY_BUCKETS);
3308 } else if (slots_clearing_mode == CLEAR_TYPED_SLOTS) {
3309 free_ranges.insert(std::pair<uint32_t, uint32_t>(
3310 static_cast<uint32_t>(free_start - p->address()),
3311 static_cast<uint32_t>(p->area_end() - p->address())));
3412 } 3312 }
3413 } 3313 }
3414 3314
3315 // Clear invalid typed slots after collection all free ranges.
3316 if (slots_clearing_mode == CLEAR_TYPED_SLOTS) {
3317 p->typed_old_to_new_slots()->RemoveInvaldSlots(free_ranges);
3318 }
3319
3415 // Clear the mark bits of that page and reset live bytes count. 3320 // Clear the mark bits of that page and reset live bytes count.
3416 p->ClearLiveness(); 3321 p->ClearLiveness();
3417 3322
3418 p->concurrent_sweeping_state().SetValue(Page::kSweepingDone); 3323 p->concurrent_sweeping_state().SetValue(Page::kSweepingDone);
3419 if (free_list_mode == IGNORE_FREE_LIST) return 0; 3324 if (free_list_mode == IGNORE_FREE_LIST) return 0;
3420 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes)); 3325 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes));
3421 } 3326 }
3422 3327
3423 void MarkCompactCollector::InvalidateCode(Code* code) { 3328 void MarkCompactCollector::InvalidateCode(Code* code) {
3424 Page* page = Page::FromAddress(code->address()); 3329 Page* page = Page::FromAddress(code->address());
(...skipping 405 matching lines...) Expand 10 before | Expand all | Expand 10 after
3830 if (page->concurrent_sweeping_state().Value() != Page::kSweepingPending) { 3735 if (page->concurrent_sweeping_state().Value() != Page::kSweepingPending) {
3831 page->mutex()->Unlock(); 3736 page->mutex()->Unlock();
3832 return 0; 3737 return 0;
3833 } 3738 }
3834 page->concurrent_sweeping_state().SetValue(Page::kSweepingInProgress); 3739 page->concurrent_sweeping_state().SetValue(Page::kSweepingInProgress);
3835 const Sweeper::FreeSpaceTreatmentMode free_space_mode = 3740 const Sweeper::FreeSpaceTreatmentMode free_space_mode =
3836 Heap::ShouldZapGarbage() ? ZAP_FREE_SPACE : IGNORE_FREE_SPACE; 3741 Heap::ShouldZapGarbage() ? ZAP_FREE_SPACE : IGNORE_FREE_SPACE;
3837 if (identity == NEW_SPACE) { 3742 if (identity == NEW_SPACE) {
3838 RawSweep(page, IGNORE_FREE_LIST, free_space_mode); 3743 RawSweep(page, IGNORE_FREE_LIST, free_space_mode);
3839 } else { 3744 } else {
3840 if (identity == CODE_SPACE) {
3841 RememberedSet<OLD_TO_NEW>::ClearInvalidTypedSlots(heap_, page);
3842 }
3843 max_freed = RawSweep(page, REBUILD_FREE_LIST, free_space_mode); 3745 max_freed = RawSweep(page, REBUILD_FREE_LIST, free_space_mode);
3844 } 3746 }
3845 3747
3846 // After finishing sweeping of a page we clean up its remembered set. 3748 // After finishing sweeping of a page we clean up its remembered set.
3847 if (page->typed_old_to_new_slots()) { 3749 if (page->typed_old_to_new_slots()) {
3848 page->typed_old_to_new_slots()->FreeToBeFreedChunks(); 3750 page->typed_old_to_new_slots()->FreeToBeFreedChunks();
3849 } 3751 }
3850 if (page->old_to_new_slots()) { 3752 if (page->old_to_new_slots()) {
3851 page->old_to_new_slots()->FreeToBeFreedBuckets(); 3753 page->old_to_new_slots()->FreeToBeFreedBuckets();
3852 } 3754 }
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
3900 bool unused_page_present = false; 3802 bool unused_page_present = false;
3901 3803
3902 // Loop needs to support deletion if live bytes == 0 for a page. 3804 // Loop needs to support deletion if live bytes == 0 for a page.
3903 for (auto it = space->begin(); it != space->end();) { 3805 for (auto it = space->begin(); it != space->end();) {
3904 Page* p = *(it++); 3806 Page* p = *(it++);
3905 DCHECK(p->SweepingDone()); 3807 DCHECK(p->SweepingDone());
3906 3808
3907 if (p->IsEvacuationCandidate()) { 3809 if (p->IsEvacuationCandidate()) {
3908 // Will be processed in EvacuateNewSpaceAndCandidates. 3810 // Will be processed in EvacuateNewSpaceAndCandidates.
3909 DCHECK(evacuation_candidates_.length() > 0); 3811 DCHECK(evacuation_candidates_.length() > 0);
3910 DCHECK(!p->HasBlackAreas());
3911 continue; 3812 continue;
3912 } 3813 }
3913 3814
3914 if (p->IsFlagSet(Page::NEVER_ALLOCATE_ON_PAGE)) { 3815 if (p->IsFlagSet(Page::NEVER_ALLOCATE_ON_PAGE)) {
3915 // We need to sweep the page to get it into an iterable state again. Note 3816 // We need to sweep the page to get it into an iterable state again. Note
3916 // that this adds unusable memory into the free list that is later on 3817 // that this adds unusable memory into the free list that is later on
3917 // (in the free list) dropped again. Since we only use the flag for 3818 // (in the free list) dropped again. Since we only use the flag for
3918 // testing this is fine. 3819 // testing this is fine.
3919 p->concurrent_sweeping_state().SetValue(Page::kSweepingInProgress); 3820 p->concurrent_sweeping_state().SetValue(Page::kSweepingInProgress);
3920 Sweeper::RawSweep(p, Sweeper::IGNORE_FREE_LIST, 3821 Sweeper::RawSweep(p, Sweeper::IGNORE_FREE_LIST,
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
4010 // The target is always in old space, we don't have to record the slot in 3911 // The target is always in old space, we don't have to record the slot in
4011 // the old-to-new remembered set. 3912 // the old-to-new remembered set.
4012 DCHECK(!heap()->InNewSpace(target)); 3913 DCHECK(!heap()->InNewSpace(target));
4013 RecordRelocSlot(host, &rinfo, target); 3914 RecordRelocSlot(host, &rinfo, target);
4014 } 3915 }
4015 } 3916 }
4016 } 3917 }
4017 3918
4018 } // namespace internal 3919 } // namespace internal
4019 } // namespace v8 3920 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/remembered-set.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698