OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
84 for (Object** current = start; current < end; current++) { | 84 for (Object** current = start; current < end; current++) { |
85 if ((*current)->IsHeapObject()) { | 85 if ((*current)->IsHeapObject()) { |
86 HeapObject* object = HeapObject::cast(*current); | 86 HeapObject* object = HeapObject::cast(*current); |
87 CHECK(heap_->mark_compact_collector()->IsMarked(object)); | 87 CHECK(heap_->mark_compact_collector()->IsMarked(object)); |
88 } | 88 } |
89 } | 89 } |
90 } | 90 } |
91 | 91 |
92 void VisitEmbeddedPointer(RelocInfo* rinfo) { | 92 void VisitEmbeddedPointer(RelocInfo* rinfo) { |
93 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); | 93 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); |
94 if (!FLAG_weak_embedded_maps_in_optimized_code || !FLAG_collect_maps || | 94 if (!Code::IsWeakEmbeddedObject(rinfo->host()->kind(), |
95 rinfo->host()->kind() != Code::OPTIMIZED_FUNCTION || | 95 rinfo->target_object())) { |
96 !rinfo->target_object()->IsMap() || | |
97 !Map::cast(rinfo->target_object())->CanTransition()) { | |
98 VisitPointer(rinfo->target_object_address()); | 96 VisitPointer(rinfo->target_object_address()); |
99 } | 97 } |
100 } | 98 } |
101 | 99 |
102 private: | 100 private: |
103 Heap* heap_; | 101 Heap* heap_; |
104 }; | 102 }; |
105 | 103 |
106 | 104 |
107 static void VerifyMarking(Heap* heap, Address bottom, Address top) { | 105 static void VerifyMarking(Heap* heap, Address bottom, Address top) { |
(...skipping 318 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
426 | 424 |
427 if (!FLAG_collect_maps) ReattachInitialMaps(); | 425 if (!FLAG_collect_maps) ReattachInitialMaps(); |
428 | 426 |
429 #ifdef DEBUG | 427 #ifdef DEBUG |
430 if (FLAG_verify_native_context_separation) { | 428 if (FLAG_verify_native_context_separation) { |
431 VerifyNativeContextSeparation(heap_); | 429 VerifyNativeContextSeparation(heap_); |
432 } | 430 } |
433 #endif | 431 #endif |
434 | 432 |
435 #ifdef VERIFY_HEAP | 433 #ifdef VERIFY_HEAP |
436 if (FLAG_collect_maps && FLAG_weak_embedded_maps_in_optimized_code && | 434 if (heap()->weak_embedded_objects_verification_enabled()) { |
437 heap()->weak_embedded_maps_verification_enabled()) { | 435 VerifyWeakEmbeddedObjectsInOptimizedCode(); |
438 VerifyWeakEmbeddedMapsInOptimizedCode(); | |
439 } | 436 } |
440 if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) { | 437 if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) { |
441 VerifyOmittedMapChecks(); | 438 VerifyOmittedMapChecks(); |
442 } | 439 } |
443 #endif | 440 #endif |
444 | 441 |
445 Finish(); | 442 Finish(); |
446 | 443 |
447 if (marking_parity_ == EVEN_MARKING_PARITY) { | 444 if (marking_parity_ == EVEN_MARKING_PARITY) { |
448 marking_parity_ = ODD_MARKING_PARITY; | 445 marking_parity_ = ODD_MARKING_PARITY; |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
494 | 491 |
495 LargeObjectIterator it(heap_->lo_space()); | 492 LargeObjectIterator it(heap_->lo_space()); |
496 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 493 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
497 MarkBit mark_bit = Marking::MarkBitFrom(obj); | 494 MarkBit mark_bit = Marking::MarkBitFrom(obj); |
498 CHECK(Marking::IsWhite(mark_bit)); | 495 CHECK(Marking::IsWhite(mark_bit)); |
499 CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes()); | 496 CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes()); |
500 } | 497 } |
501 } | 498 } |
502 | 499 |
503 | 500 |
504 void MarkCompactCollector::VerifyWeakEmbeddedMapsInOptimizedCode() { | 501 void MarkCompactCollector::VerifyWeakEmbeddedObjectsInOptimizedCode() { |
505 HeapObjectIterator code_iterator(heap()->code_space()); | 502 HeapObjectIterator code_iterator(heap()->code_space()); |
506 for (HeapObject* obj = code_iterator.Next(); | 503 for (HeapObject* obj = code_iterator.Next(); |
507 obj != NULL; | 504 obj != NULL; |
508 obj = code_iterator.Next()) { | 505 obj = code_iterator.Next()) { |
509 Code* code = Code::cast(obj); | 506 Code* code = Code::cast(obj); |
510 if (code->kind() != Code::OPTIMIZED_FUNCTION) continue; | 507 if (code->kind() != Code::OPTIMIZED_FUNCTION) continue; |
511 if (WillBeDeoptimized(code)) continue; | 508 if (WillBeDeoptimized(code)) continue; |
512 code->VerifyEmbeddedMapsDependency(); | 509 code->VerifyEmbeddedObjectsDependency(); |
513 } | 510 } |
514 } | 511 } |
515 | 512 |
516 | 513 |
517 void MarkCompactCollector::VerifyOmittedMapChecks() { | 514 void MarkCompactCollector::VerifyOmittedMapChecks() { |
518 HeapObjectIterator iterator(heap()->map_space()); | 515 HeapObjectIterator iterator(heap()->map_space()); |
519 for (HeapObject* obj = iterator.Next(); | 516 for (HeapObject* obj = iterator.Next(); |
520 obj != NULL; | 517 obj != NULL; |
521 obj = iterator.Next()) { | 518 obj = iterator.Next()) { |
522 Map* map = Map::cast(obj); | 519 Map* map = Map::cast(obj); |
(...skipping 943 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1466 JSWeakCollection::kTableOffset); | 1463 JSWeakCollection::kTableOffset); |
1467 BodyVisitorBase<MarkCompactMarkingVisitor>::IteratePointers( | 1464 BodyVisitorBase<MarkCompactMarkingVisitor>::IteratePointers( |
1468 map->GetHeap(), | 1465 map->GetHeap(), |
1469 object, | 1466 object, |
1470 JSWeakCollection::kTableOffset + kPointerSize, | 1467 JSWeakCollection::kTableOffset + kPointerSize, |
1471 object_size); | 1468 object_size); |
1472 | 1469 |
1473 // Mark the backing hash table without pushing it on the marking stack. | 1470 // Mark the backing hash table without pushing it on the marking stack. |
1474 Object* table_object = weak_collection->table(); | 1471 Object* table_object = weak_collection->table(); |
1475 if (!table_object->IsHashTable()) return; | 1472 if (!table_object->IsHashTable()) return; |
1476 ObjectHashTable* table = ObjectHashTable::cast(table_object); | 1473 WeakHashTable* table = WeakHashTable::cast(table_object); |
1477 Object** table_slot = | 1474 Object** table_slot = |
1478 HeapObject::RawField(weak_collection, JSWeakCollection::kTableOffset); | 1475 HeapObject::RawField(weak_collection, JSWeakCollection::kTableOffset); |
1479 MarkBit table_mark = Marking::MarkBitFrom(table); | 1476 MarkBit table_mark = Marking::MarkBitFrom(table); |
1480 collector->RecordSlot(table_slot, table_slot, table); | 1477 collector->RecordSlot(table_slot, table_slot, table); |
1481 if (!table_mark.Get()) collector->SetMark(table, table_mark); | 1478 if (!table_mark.Get()) collector->SetMark(table, table_mark); |
1482 // Recording the map slot can be skipped, because maps are not compacted. | 1479 // Recording the map slot can be skipped, because maps are not compacted. |
1483 collector->MarkObject(table->map(), Marking::MarkBitFrom(table->map())); | 1480 collector->MarkObject(table->map(), Marking::MarkBitFrom(table->map())); |
1484 ASSERT(MarkCompactCollector::IsMarked(table->map())); | 1481 ASSERT(MarkCompactCollector::IsMarked(table->map())); |
1485 } | 1482 } |
1486 | 1483 |
(...skipping 621 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2108 | 2105 |
2109 | 2106 |
2110 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { | 2107 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { |
2111 // Mark the heap roots including global variables, stack variables, | 2108 // Mark the heap roots including global variables, stack variables, |
2112 // etc., and all objects reachable from them. | 2109 // etc., and all objects reachable from them. |
2113 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG); | 2110 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG); |
2114 | 2111 |
2115 // Handle the string table specially. | 2112 // Handle the string table specially. |
2116 MarkStringTable(visitor); | 2113 MarkStringTable(visitor); |
2117 | 2114 |
| 2115 MarkWeakObjectToCodeTable(); |
| 2116 |
2118 // There may be overflowed objects in the heap. Visit them now. | 2117 // There may be overflowed objects in the heap. Visit them now. |
2119 while (marking_deque_.overflowed()) { | 2118 while (marking_deque_.overflowed()) { |
2120 RefillMarkingDeque(); | 2119 RefillMarkingDeque(); |
2121 EmptyMarkingDeque(); | 2120 EmptyMarkingDeque(); |
2122 } | 2121 } |
2123 } | 2122 } |
2124 | 2123 |
2125 | 2124 |
2126 void MarkCompactCollector::MarkImplicitRefGroups() { | 2125 void MarkCompactCollector::MarkImplicitRefGroups() { |
2127 List<ImplicitRefGroup*>* ref_groups = | 2126 List<ImplicitRefGroup*>* ref_groups = |
(...skipping 20 matching lines...) Expand all Loading... |
2148 } | 2147 } |
2149 | 2148 |
2150 // Once the entire group has been marked, dispose it because it's | 2149 // Once the entire group has been marked, dispose it because it's |
2151 // not needed anymore. | 2150 // not needed anymore. |
2152 delete entry; | 2151 delete entry; |
2153 } | 2152 } |
2154 ref_groups->Rewind(last); | 2153 ref_groups->Rewind(last); |
2155 } | 2154 } |
2156 | 2155 |
2157 | 2156 |
| 2157 void MarkCompactCollector::MarkWeakObjectToCodeTable() { |
| 2158 HeapObject* weak_object_to_code_table = |
| 2159 HeapObject::cast(heap()->weak_object_to_code_table()); |
| 2160 if (!IsMarked(weak_object_to_code_table)) { |
| 2161 MarkBit mark = Marking::MarkBitFrom(weak_object_to_code_table); |
| 2162 SetMark(weak_object_to_code_table, mark); |
| 2163 } |
| 2164 } |
| 2165 |
| 2166 |
2158 // Mark all objects reachable from the objects on the marking stack. | 2167 // Mark all objects reachable from the objects on the marking stack. |
2159 // Before: the marking stack contains zero or more heap object pointers. | 2168 // Before: the marking stack contains zero or more heap object pointers. |
2160 // After: the marking stack is empty, and all objects reachable from the | 2169 // After: the marking stack is empty, and all objects reachable from the |
2161 // marking stack have been marked, or are overflowed in the heap. | 2170 // marking stack have been marked, or are overflowed in the heap. |
2162 void MarkCompactCollector::EmptyMarkingDeque() { | 2171 void MarkCompactCollector::EmptyMarkingDeque() { |
2163 while (!marking_deque_.IsEmpty()) { | 2172 while (!marking_deque_.IsEmpty()) { |
2164 HeapObject* object = marking_deque_.Pop(); | 2173 HeapObject* object = marking_deque_.Pop(); |
2165 ASSERT(object->IsHeapObject()); | 2174 ASSERT(object->IsHeapObject()); |
2166 ASSERT(heap()->Contains(object)); | 2175 ASSERT(heap()->Contains(object)); |
2167 ASSERT(Marking::IsBlack(Marking::MarkBitFrom(object))); | 2176 ASSERT(Marking::IsBlack(Marking::MarkBitFrom(object))); |
(...skipping 347 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2515 // Since it survived the GC, reattach it now. | 2524 // Since it survived the GC, reattach it now. |
2516 JSFunction::cast(map->constructor())->shared()->AttachInitialMap(map); | 2525 JSFunction::cast(map->constructor())->shared()->AttachInitialMap(map); |
2517 } | 2526 } |
2518 | 2527 |
2519 ClearNonLivePrototypeTransitions(map); | 2528 ClearNonLivePrototypeTransitions(map); |
2520 ClearNonLiveMapTransitions(map, map_mark); | 2529 ClearNonLiveMapTransitions(map, map_mark); |
2521 | 2530 |
2522 if (map_mark.Get()) { | 2531 if (map_mark.Get()) { |
2523 ClearNonLiveDependentCode(map->dependent_code()); | 2532 ClearNonLiveDependentCode(map->dependent_code()); |
2524 } else { | 2533 } else { |
2525 ClearAndDeoptimizeDependentCode(map); | 2534 ClearAndDeoptimizeDependentCode(map->dependent_code()); |
| 2535 map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array())); |
2526 } | 2536 } |
2527 } | 2537 } |
2528 | 2538 |
2529 // Iterate over property cell space, removing dependent code that is not | 2539 // Iterate over property cell space, removing dependent code that is not |
2530 // otherwise kept alive by strong references. | 2540 // otherwise kept alive by strong references. |
2531 HeapObjectIterator cell_iterator(heap_->property_cell_space()); | 2541 HeapObjectIterator cell_iterator(heap_->property_cell_space()); |
2532 for (HeapObject* cell = cell_iterator.Next(); | 2542 for (HeapObject* cell = cell_iterator.Next(); |
2533 cell != NULL; | 2543 cell != NULL; |
2534 cell = cell_iterator.Next()) { | 2544 cell = cell_iterator.Next()) { |
2535 if (IsMarked(cell)) { | 2545 if (IsMarked(cell)) { |
2536 ClearNonLiveDependentCode(PropertyCell::cast(cell)->dependent_code()); | 2546 ClearNonLiveDependentCode(PropertyCell::cast(cell)->dependent_code()); |
2537 } | 2547 } |
2538 } | 2548 } |
| 2549 |
| 2550 if (heap_->weak_object_to_code_table()->IsHashTable()) { |
| 2551 WeakHashTable* table = |
| 2552 WeakHashTable::cast(heap_->weak_object_to_code_table()); |
| 2553 uint32_t capacity = table->Capacity(); |
| 2554 for (uint32_t i = 0; i < capacity; i++) { |
| 2555 uint32_t key_index = table->EntryToIndex(i); |
| 2556 Object* key = table->get(key_index); |
| 2557 if (!table->IsKey(key)) continue; |
| 2558 uint32_t value_index = table->EntryToValueIndex(i); |
| 2559 Object* value = table->get(value_index); |
| 2560 if (IsMarked(key)) { |
| 2561 if (!IsMarked(value)) { |
| 2562 HeapObject* obj = HeapObject::cast(value); |
| 2563 MarkBit mark = Marking::MarkBitFrom(obj); |
| 2564 SetMark(obj, mark); |
| 2565 } |
| 2566 ClearNonLiveDependentCode(DependentCode::cast(value)); |
| 2567 } else { |
| 2568 ClearAndDeoptimizeDependentCode(DependentCode::cast(value)); |
| 2569 table->set(key_index, heap_->the_hole_value()); |
| 2570 table->set(value_index, heap_->the_hole_value()); |
| 2571 } |
| 2572 } |
| 2573 } |
2539 } | 2574 } |
2540 | 2575 |
2541 | 2576 |
2542 void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) { | 2577 void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) { |
2543 int number_of_transitions = map->NumberOfProtoTransitions(); | 2578 int number_of_transitions = map->NumberOfProtoTransitions(); |
2544 FixedArray* prototype_transitions = map->GetPrototypeTransitions(); | 2579 FixedArray* prototype_transitions = map->GetPrototypeTransitions(); |
2545 | 2580 |
2546 int new_number_of_transitions = 0; | 2581 int new_number_of_transitions = 0; |
2547 const int header = Map::kProtoTransitionHeaderSize; | 2582 const int header = Map::kProtoTransitionHeaderSize; |
2548 const int proto_offset = header + Map::kProtoTransitionPrototypeOffset; | 2583 const int proto_offset = header + Map::kProtoTransitionPrototypeOffset; |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2594 // Follow back pointer, check whether we are dealing with a map transition | 2629 // Follow back pointer, check whether we are dealing with a map transition |
2595 // from a live map to a dead path and in case clear transitions of parent. | 2630 // from a live map to a dead path and in case clear transitions of parent. |
2596 bool current_is_alive = map_mark.Get(); | 2631 bool current_is_alive = map_mark.Get(); |
2597 bool parent_is_alive = Marking::MarkBitFrom(parent).Get(); | 2632 bool parent_is_alive = Marking::MarkBitFrom(parent).Get(); |
2598 if (!current_is_alive && parent_is_alive) { | 2633 if (!current_is_alive && parent_is_alive) { |
2599 parent->ClearNonLiveTransitions(heap()); | 2634 parent->ClearNonLiveTransitions(heap()); |
2600 } | 2635 } |
2601 } | 2636 } |
2602 | 2637 |
2603 | 2638 |
2604 void MarkCompactCollector::ClearAndDeoptimizeDependentCode(Map* map) { | 2639 void MarkCompactCollector::ClearAndDeoptimizeDependentCode( |
| 2640 DependentCode* entries) { |
2605 DisallowHeapAllocation no_allocation; | 2641 DisallowHeapAllocation no_allocation; |
2606 DependentCode* entries = map->dependent_code(); | |
2607 DependentCode::GroupStartIndexes starts(entries); | 2642 DependentCode::GroupStartIndexes starts(entries); |
2608 int number_of_entries = starts.number_of_entries(); | 2643 int number_of_entries = starts.number_of_entries(); |
2609 if (number_of_entries == 0) return; | 2644 if (number_of_entries == 0) return; |
2610 for (int i = 0; i < number_of_entries; i++) { | 2645 for (int i = 0; i < number_of_entries; i++) { |
2611 // If the entry is compilation info then the map must be alive, | 2646 // If the entry is compilation info then the map must be alive, |
2612 // and ClearAndDeoptimizeDependentCode shouldn't be called. | 2647 // and ClearAndDeoptimizeDependentCode shouldn't be called. |
2613 ASSERT(entries->is_code_at(i)); | 2648 ASSERT(entries->is_code_at(i)); |
2614 Code* code = entries->code_at(i); | 2649 Code* code = entries->code_at(i); |
2615 | 2650 |
2616 if (IsMarked(code) && !code->marked_for_deoptimization()) { | 2651 if (IsMarked(code) && !code->marked_for_deoptimization()) { |
2617 code->set_marked_for_deoptimization(true); | 2652 code->set_marked_for_deoptimization(true); |
2618 have_code_to_deoptimize_ = true; | 2653 have_code_to_deoptimize_ = true; |
2619 } | 2654 } |
2620 entries->clear_at(i); | 2655 entries->clear_at(i); |
2621 } | 2656 } |
2622 map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array())); | |
2623 } | 2657 } |
2624 | 2658 |
2625 | 2659 |
2626 void MarkCompactCollector::ClearNonLiveDependentCode(DependentCode* entries) { | 2660 void MarkCompactCollector::ClearNonLiveDependentCode(DependentCode* entries) { |
2627 DisallowHeapAllocation no_allocation; | 2661 DisallowHeapAllocation no_allocation; |
2628 DependentCode::GroupStartIndexes starts(entries); | 2662 DependentCode::GroupStartIndexes starts(entries); |
2629 int number_of_entries = starts.number_of_entries(); | 2663 int number_of_entries = starts.number_of_entries(); |
2630 if (number_of_entries == 0) return; | 2664 if (number_of_entries == 0) return; |
2631 int new_number_of_entries = 0; | 2665 int new_number_of_entries = 0; |
2632 // Go through all groups, remove dead codes and compact. | 2666 // Go through all groups, remove dead codes and compact. |
(...skipping 817 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3450 cell = js_global_property_cell_iterator.Next()) { | 3484 cell = js_global_property_cell_iterator.Next()) { |
3451 if (cell->IsPropertyCell()) { | 3485 if (cell->IsPropertyCell()) { |
3452 PropertyCell::BodyDescriptor::IterateBody(cell, &updating_visitor); | 3486 PropertyCell::BodyDescriptor::IterateBody(cell, &updating_visitor); |
3453 } | 3487 } |
3454 } | 3488 } |
3455 | 3489 |
3456 // Update the head of the native contexts list in the heap. | 3490 // Update the head of the native contexts list in the heap. |
3457 updating_visitor.VisitPointer(heap_->native_contexts_list_address()); | 3491 updating_visitor.VisitPointer(heap_->native_contexts_list_address()); |
3458 | 3492 |
3459 heap_->string_table()->Iterate(&updating_visitor); | 3493 heap_->string_table()->Iterate(&updating_visitor); |
| 3494 updating_visitor.VisitPointer(heap_->weak_object_to_code_table_address()); |
| 3495 if (heap_->weak_object_to_code_table()->IsHashTable()) { |
| 3496 WeakHashTable* table = |
| 3497 WeakHashTable::cast(heap_->weak_object_to_code_table()); |
| 3498 table->Iterate(&updating_visitor); |
| 3499 table->Rehash(heap_->undefined_value()); |
| 3500 } |
3460 | 3501 |
3461 // Update pointers from external string table. | 3502 // Update pointers from external string table. |
3462 heap_->UpdateReferencesInExternalStringTable( | 3503 heap_->UpdateReferencesInExternalStringTable( |
3463 &UpdateReferenceInExternalStringTableEntry); | 3504 &UpdateReferenceInExternalStringTableEntry); |
3464 | 3505 |
3465 if (!FLAG_watch_ic_patching) { | 3506 if (!FLAG_watch_ic_patching) { |
3466 // Update JSFunction pointers from the runtime profiler. | 3507 // Update JSFunction pointers from the runtime profiler. |
3467 heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact( | 3508 heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact( |
3468 &updating_visitor); | 3509 &updating_visitor); |
3469 } | 3510 } |
(...skipping 843 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4313 while (buffer != NULL) { | 4354 while (buffer != NULL) { |
4314 SlotsBuffer* next_buffer = buffer->next(); | 4355 SlotsBuffer* next_buffer = buffer->next(); |
4315 DeallocateBuffer(buffer); | 4356 DeallocateBuffer(buffer); |
4316 buffer = next_buffer; | 4357 buffer = next_buffer; |
4317 } | 4358 } |
4318 *buffer_address = NULL; | 4359 *buffer_address = NULL; |
4319 } | 4360 } |
4320 | 4361 |
4321 | 4362 |
4322 } } // namespace v8::internal | 4363 } } // namespace v8::internal |
OLD | NEW |