| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 90 // Variables set based on semispace_size_ and old_generation_size_ in | 90 // Variables set based on semispace_size_ and old_generation_size_ in |
| 91 // ConfigureHeap (survived_since_last_expansion_, external_allocation_limit_) | 91 // ConfigureHeap (survived_since_last_expansion_, external_allocation_limit_) |
| 92 // Will be 4 * reserved_semispace_size_ to ensure that young | 92 // Will be 4 * reserved_semispace_size_ to ensure that young |
| 93 // generation can be aligned to its size. | 93 // generation can be aligned to its size. |
| 94 survived_since_last_expansion_(0), | 94 survived_since_last_expansion_(0), |
| 95 sweep_generation_(0), | 95 sweep_generation_(0), |
| 96 always_allocate_scope_depth_(0), | 96 always_allocate_scope_depth_(0), |
| 97 linear_allocation_scope_depth_(0), | 97 linear_allocation_scope_depth_(0), |
| 98 contexts_disposed_(0), | 98 contexts_disposed_(0), |
| 99 global_ic_age_(0), | 99 global_ic_age_(0), |
| 100 flush_monomorphic_ics_(false), | |
| 101 scan_on_scavenge_pages_(0), | 100 scan_on_scavenge_pages_(0), |
| 102 new_space_(this), | 101 new_space_(this), |
| 103 old_pointer_space_(NULL), | 102 old_pointer_space_(NULL), |
| 104 old_data_space_(NULL), | 103 old_data_space_(NULL), |
| 105 code_space_(NULL), | 104 code_space_(NULL), |
| 106 map_space_(NULL), | 105 map_space_(NULL), |
| 107 cell_space_(NULL), | 106 cell_space_(NULL), |
| 108 lo_space_(NULL), | 107 lo_space_(NULL), |
| 109 gc_state_(NOT_IN_GC), | 108 gc_state_(NOT_IN_GC), |
| 110 gc_post_processing_depth_(0), | 109 gc_post_processing_depth_(0), |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 169 intptr_t max_virtual = OS::MaxVirtualMemory(); | 168 intptr_t max_virtual = OS::MaxVirtualMemory(); |
| 170 | 169 |
| 171 if (max_virtual > 0) { | 170 if (max_virtual > 0) { |
| 172 if (code_range_size_ > 0) { | 171 if (code_range_size_ > 0) { |
| 173 // Reserve no more than 1/8 of the memory for the code range. | 172 // Reserve no more than 1/8 of the memory for the code range. |
| 174 code_range_size_ = Min(code_range_size_, max_virtual >> 3); | 173 code_range_size_ = Min(code_range_size_, max_virtual >> 3); |
| 175 } | 174 } |
| 176 } | 175 } |
| 177 | 176 |
| 178 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength); | 177 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength); |
| 179 native_contexts_list_ = NULL; | 178 global_contexts_list_ = NULL; |
| 180 mark_compact_collector_.heap_ = this; | 179 mark_compact_collector_.heap_ = this; |
| 181 external_string_table_.heap_ = this; | 180 external_string_table_.heap_ = this; |
| 182 // Put a dummy entry in the remembered pages so we can find the list the | 181 // Put a dummy entry in the remembered pages so we can find the list the |
| 183 // minidump even if there are no real unmapped pages. | 182 // minidump even if there are no real unmapped pages. |
| 184 RememberUnmappedPage(NULL, false); | 183 RememberUnmappedPage(NULL, false); |
| 185 | 184 |
| 186 ClearObjectStats(true); | 185 ClearObjectStats(true); |
| 187 } | 186 } |
| 188 | 187 |
| 189 | 188 |
| (...skipping 559 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 749 | 748 |
| 750 // Committing memory to from space failed again. | 749 // Committing memory to from space failed again. |
| 751 // Memory is exhausted and we will die. | 750 // Memory is exhausted and we will die. |
| 752 V8::FatalProcessOutOfMemory("Committing semi space failed."); | 751 V8::FatalProcessOutOfMemory("Committing semi space failed."); |
| 753 } | 752 } |
| 754 | 753 |
| 755 | 754 |
| 756 void Heap::ClearJSFunctionResultCaches() { | 755 void Heap::ClearJSFunctionResultCaches() { |
| 757 if (isolate_->bootstrapper()->IsActive()) return; | 756 if (isolate_->bootstrapper()->IsActive()) return; |
| 758 | 757 |
| 759 Object* context = native_contexts_list_; | 758 Object* context = global_contexts_list_; |
| 760 while (!context->IsUndefined()) { | 759 while (!context->IsUndefined()) { |
| 761 // Get the caches for this context. GC can happen when the context | 760 // Get the caches for this context. GC can happen when the context |
| 762 // is not fully initialized, so the caches can be undefined. | 761 // is not fully initialized, so the caches can be undefined. |
| 763 Object* caches_or_undefined = | 762 Object* caches_or_undefined = |
| 764 Context::cast(context)->get(Context::JSFUNCTION_RESULT_CACHES_INDEX); | 763 Context::cast(context)->get(Context::JSFUNCTION_RESULT_CACHES_INDEX); |
| 765 if (!caches_or_undefined->IsUndefined()) { | 764 if (!caches_or_undefined->IsUndefined()) { |
| 766 FixedArray* caches = FixedArray::cast(caches_or_undefined); | 765 FixedArray* caches = FixedArray::cast(caches_or_undefined); |
| 767 // Clear the caches: | 766 // Clear the caches: |
| 768 int length = caches->length(); | 767 int length = caches->length(); |
| 769 for (int i = 0; i < length; i++) { | 768 for (int i = 0; i < length; i++) { |
| 770 JSFunctionResultCache::cast(caches->get(i))->Clear(); | 769 JSFunctionResultCache::cast(caches->get(i))->Clear(); |
| 771 } | 770 } |
| 772 } | 771 } |
| 773 // Get the next context: | 772 // Get the next context: |
| 774 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); | 773 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); |
| 775 } | 774 } |
| 776 } | 775 } |
| 777 | 776 |
| 778 | 777 |
| 779 | 778 |
| 780 void Heap::ClearNormalizedMapCaches() { | 779 void Heap::ClearNormalizedMapCaches() { |
| 781 if (isolate_->bootstrapper()->IsActive() && | 780 if (isolate_->bootstrapper()->IsActive() && |
| 782 !incremental_marking()->IsMarking()) { | 781 !incremental_marking()->IsMarking()) { |
| 783 return; | 782 return; |
| 784 } | 783 } |
| 785 | 784 |
| 786 Object* context = native_contexts_list_; | 785 Object* context = global_contexts_list_; |
| 787 while (!context->IsUndefined()) { | 786 while (!context->IsUndefined()) { |
| 788 // GC can happen when the context is not fully initialized, | 787 // GC can happen when the context is not fully initialized, |
| 789 // so the cache can be undefined. | 788 // so the cache can be undefined. |
| 790 Object* cache = | 789 Object* cache = |
| 791 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX); | 790 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX); |
| 792 if (!cache->IsUndefined()) { | 791 if (!cache->IsUndefined()) { |
| 793 NormalizedMapCache::cast(cache)->Clear(); | 792 NormalizedMapCache::cast(cache)->Clear(); |
| 794 } | 793 } |
| 795 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); | 794 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); |
| 796 } | 795 } |
| (...skipping 189 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 986 mark_compact_collector_.CollectGarbage(); | 985 mark_compact_collector_.CollectGarbage(); |
| 987 | 986 |
| 988 LOG(isolate_, ResourceEvent("markcompact", "end")); | 987 LOG(isolate_, ResourceEvent("markcompact", "end")); |
| 989 | 988 |
| 990 gc_state_ = NOT_IN_GC; | 989 gc_state_ = NOT_IN_GC; |
| 991 | 990 |
| 992 isolate_->counters()->objs_since_last_full()->Set(0); | 991 isolate_->counters()->objs_since_last_full()->Set(0); |
| 993 | 992 |
| 994 contexts_disposed_ = 0; | 993 contexts_disposed_ = 0; |
| 995 | 994 |
| 996 flush_monomorphic_ics_ = false; | 995 isolate_->set_context_exit_happened(false); |
| 997 } | 996 } |
| 998 | 997 |
| 999 | 998 |
| 1000 void Heap::MarkCompactPrologue() { | 999 void Heap::MarkCompactPrologue() { |
| 1001 // At any old GC clear the keyed lookup cache to enable collection of unused | 1000 // At any old GC clear the keyed lookup cache to enable collection of unused |
| 1002 // maps. | 1001 // maps. |
| 1003 isolate_->keyed_lookup_cache()->Clear(); | 1002 isolate_->keyed_lookup_cache()->Clear(); |
| 1004 isolate_->context_slot_cache()->Clear(); | 1003 isolate_->context_slot_cache()->Clear(); |
| 1005 isolate_->descriptor_lookup_cache()->Clear(); | 1004 isolate_->descriptor_lookup_cache()->Clear(); |
| 1006 StringSplitCache::Clear(string_split_cache()); | 1005 StringSplitCache::Clear(string_split_cache()); |
| (...skipping 282 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1289 for (HeapObject* heap_object = cell_iterator.Next(); | 1288 for (HeapObject* heap_object = cell_iterator.Next(); |
| 1290 heap_object != NULL; | 1289 heap_object != NULL; |
| 1291 heap_object = cell_iterator.Next()) { | 1290 heap_object = cell_iterator.Next()) { |
| 1292 if (heap_object->IsJSGlobalPropertyCell()) { | 1291 if (heap_object->IsJSGlobalPropertyCell()) { |
| 1293 JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(heap_object); | 1292 JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(heap_object); |
| 1294 Address value_address = cell->ValueAddress(); | 1293 Address value_address = cell->ValueAddress(); |
| 1295 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address)); | 1294 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address)); |
| 1296 } | 1295 } |
| 1297 } | 1296 } |
| 1298 | 1297 |
| 1299 // Scavenge object reachable from the native contexts list directly. | 1298 // Scavenge object reachable from the global contexts list directly. |
| 1300 scavenge_visitor.VisitPointer(BitCast<Object**>(&native_contexts_list_)); | 1299 scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_)); |
| 1301 | 1300 |
| 1302 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | 1301 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); |
| 1303 isolate_->global_handles()->IdentifyNewSpaceWeakIndependentHandles( | 1302 isolate_->global_handles()->IdentifyNewSpaceWeakIndependentHandles( |
| 1304 &IsUnscavengedHeapObject); | 1303 &IsUnscavengedHeapObject); |
| 1305 isolate_->global_handles()->IterateNewSpaceWeakIndependentRoots( | 1304 isolate_->global_handles()->IterateNewSpaceWeakIndependentRoots( |
| 1306 &scavenge_visitor); | 1305 &scavenge_visitor); |
| 1307 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | 1306 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); |
| 1308 | 1307 |
| 1309 UpdateNewSpaceReferencesInExternalStringTable( | 1308 UpdateNewSpaceReferencesInExternalStringTable( |
| 1310 &UpdateNewSpaceReferenceInExternalStringTableEntry); | 1309 &UpdateNewSpaceReferenceInExternalStringTableEntry); |
| (...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1450 } | 1449 } |
| 1451 | 1450 |
| 1452 return head; | 1451 return head; |
| 1453 } | 1452 } |
| 1454 | 1453 |
| 1455 | 1454 |
| 1456 void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) { | 1455 void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) { |
| 1457 Object* undefined = undefined_value(); | 1456 Object* undefined = undefined_value(); |
| 1458 Object* head = undefined; | 1457 Object* head = undefined; |
| 1459 Context* tail = NULL; | 1458 Context* tail = NULL; |
| 1460 Object* candidate = native_contexts_list_; | 1459 Object* candidate = global_contexts_list_; |
| 1461 | 1460 |
| 1462 // We don't record weak slots during marking or scavenges. | 1461 // We don't record weak slots during marking or scavenges. |
| 1463 // Instead we do it once when we complete mark-compact cycle. | 1462 // Instead we do it once when we complete mark-compact cycle. |
| 1464 // Note that write barrier has no effect if we are already in the middle of | 1463 // Note that write barrier has no effect if we are already in the middle of |
| 1465 // compacting mark-sweep cycle and we have to record slots manually. | 1464 // compacting mark-sweep cycle and we have to record slots manually. |
| 1466 bool record_slots = | 1465 bool record_slots = |
| 1467 gc_state() == MARK_COMPACT && | 1466 gc_state() == MARK_COMPACT && |
| 1468 mark_compact_collector()->is_compacting(); | 1467 mark_compact_collector()->is_compacting(); |
| 1469 | 1468 |
| 1470 while (candidate != undefined) { | 1469 while (candidate != undefined) { |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1523 | 1522 |
| 1524 // Terminate the list if there is one or more elements. | 1523 // Terminate the list if there is one or more elements. |
| 1525 if (tail != NULL) { | 1524 if (tail != NULL) { |
| 1526 tail->set_unchecked(this, | 1525 tail->set_unchecked(this, |
| 1527 Context::NEXT_CONTEXT_LINK, | 1526 Context::NEXT_CONTEXT_LINK, |
| 1528 Heap::undefined_value(), | 1527 Heap::undefined_value(), |
| 1529 UPDATE_WRITE_BARRIER); | 1528 UPDATE_WRITE_BARRIER); |
| 1530 } | 1529 } |
| 1531 | 1530 |
| 1532 // Update the head of the list of contexts. | 1531 // Update the head of the list of contexts. |
| 1533 native_contexts_list_ = head; | 1532 global_contexts_list_ = head; |
| 1534 } | 1533 } |
| 1535 | 1534 |
| 1536 | 1535 |
| 1537 void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) { | 1536 void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) { |
| 1538 AssertNoAllocation no_allocation; | 1537 AssertNoAllocation no_allocation; |
| 1539 | 1538 |
| 1540 class VisitorAdapter : public ObjectVisitor { | 1539 class VisitorAdapter : public ObjectVisitor { |
| 1541 public: | 1540 public: |
| 1542 explicit VisitorAdapter(v8::ExternalResourceVisitor* visitor) | 1541 explicit VisitorAdapter(v8::ExternalResourceVisitor* visitor) |
| 1543 : visitor_(visitor) {} | 1542 : visitor_(visitor) {} |
| (...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1649 class ScavengingVisitor : public StaticVisitorBase { | 1648 class ScavengingVisitor : public StaticVisitorBase { |
| 1650 public: | 1649 public: |
| 1651 static void Initialize() { | 1650 static void Initialize() { |
| 1652 table_.Register(kVisitSeqAsciiString, &EvacuateSeqAsciiString); | 1651 table_.Register(kVisitSeqAsciiString, &EvacuateSeqAsciiString); |
| 1653 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString); | 1652 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString); |
| 1654 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate); | 1653 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate); |
| 1655 table_.Register(kVisitByteArray, &EvacuateByteArray); | 1654 table_.Register(kVisitByteArray, &EvacuateByteArray); |
| 1656 table_.Register(kVisitFixedArray, &EvacuateFixedArray); | 1655 table_.Register(kVisitFixedArray, &EvacuateFixedArray); |
| 1657 table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray); | 1656 table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray); |
| 1658 | 1657 |
| 1659 table_.Register(kVisitNativeContext, | 1658 table_.Register(kVisitGlobalContext, |
| 1660 &ObjectEvacuationStrategy<POINTER_OBJECT>:: | 1659 &ObjectEvacuationStrategy<POINTER_OBJECT>:: |
| 1661 template VisitSpecialized<Context::kSize>); | 1660 template VisitSpecialized<Context::kSize>); |
| 1662 | 1661 |
| 1663 table_.Register(kVisitConsString, | 1662 table_.Register(kVisitConsString, |
| 1664 &ObjectEvacuationStrategy<POINTER_OBJECT>:: | 1663 &ObjectEvacuationStrategy<POINTER_OBJECT>:: |
| 1665 template VisitSpecialized<ConsString::kSize>); | 1664 template VisitSpecialized<ConsString::kSize>); |
| 1666 | 1665 |
| 1667 table_.Register(kVisitSlicedString, | 1666 table_.Register(kVisitSlicedString, |
| 1668 &ObjectEvacuationStrategy<POINTER_OBJECT>:: | 1667 &ObjectEvacuationStrategy<POINTER_OBJECT>:: |
| 1669 template VisitSpecialized<SlicedString::kSize>); | 1668 template VisitSpecialized<SlicedString::kSize>); |
| (...skipping 765 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2435 { MaybeObject* maybe_obj = | 2434 { MaybeObject* maybe_obj = |
| 2436 AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel); | 2435 AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel); |
| 2437 if (!maybe_obj->ToObject(&obj)) return false; | 2436 if (!maybe_obj->ToObject(&obj)) return false; |
| 2438 } | 2437 } |
| 2439 set_module_context_map(Map::cast(obj)); | 2438 set_module_context_map(Map::cast(obj)); |
| 2440 | 2439 |
| 2441 { MaybeObject* maybe_obj = | 2440 { MaybeObject* maybe_obj = |
| 2442 AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel); | 2441 AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel); |
| 2443 if (!maybe_obj->ToObject(&obj)) return false; | 2442 if (!maybe_obj->ToObject(&obj)) return false; |
| 2444 } | 2443 } |
| 2445 Map* native_context_map = Map::cast(obj); | 2444 Map* global_context_map = Map::cast(obj); |
| 2446 native_context_map->set_dictionary_map(true); | 2445 global_context_map->set_dictionary_map(true); |
| 2447 native_context_map->set_visitor_id(StaticVisitorBase::kVisitNativeContext); | 2446 global_context_map->set_visitor_id(StaticVisitorBase::kVisitGlobalContext); |
| 2448 set_native_context_map(native_context_map); | 2447 set_global_context_map(global_context_map); |
| 2449 | 2448 |
| 2450 { MaybeObject* maybe_obj = AllocateMap(SHARED_FUNCTION_INFO_TYPE, | 2449 { MaybeObject* maybe_obj = AllocateMap(SHARED_FUNCTION_INFO_TYPE, |
| 2451 SharedFunctionInfo::kAlignedSize); | 2450 SharedFunctionInfo::kAlignedSize); |
| 2452 if (!maybe_obj->ToObject(&obj)) return false; | 2451 if (!maybe_obj->ToObject(&obj)) return false; |
| 2453 } | 2452 } |
| 2454 set_shared_function_info_map(Map::cast(obj)); | 2453 set_shared_function_info_map(Map::cast(obj)); |
| 2455 | 2454 |
| 2456 { MaybeObject* maybe_obj = AllocateMap(JS_MESSAGE_OBJECT_TYPE, | 2455 { MaybeObject* maybe_obj = AllocateMap(JS_MESSAGE_OBJECT_TYPE, |
| 2457 JSMessageObject::kSize); | 2456 JSMessageObject::kSize); |
| 2458 if (!maybe_obj->ToObject(&obj)) return false; | 2457 if (!maybe_obj->ToObject(&obj)) return false; |
| (...skipping 1258 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3717 function->set_literals_or_bindings(empty_fixed_array()); | 3716 function->set_literals_or_bindings(empty_fixed_array()); |
| 3718 function->set_next_function_link(undefined_value()); | 3717 function->set_next_function_link(undefined_value()); |
| 3719 } | 3718 } |
| 3720 | 3719 |
| 3721 | 3720 |
| 3722 MaybeObject* Heap::AllocateFunctionPrototype(JSFunction* function) { | 3721 MaybeObject* Heap::AllocateFunctionPrototype(JSFunction* function) { |
| 3723 // Allocate the prototype. Make sure to use the object function | 3722 // Allocate the prototype. Make sure to use the object function |
| 3724 // from the function's context, since the function can be from a | 3723 // from the function's context, since the function can be from a |
| 3725 // different context. | 3724 // different context. |
| 3726 JSFunction* object_function = | 3725 JSFunction* object_function = |
| 3727 function->context()->native_context()->object_function(); | 3726 function->context()->global_context()->object_function(); |
| 3728 | 3727 |
| 3729 // Each function prototype gets a copy of the object function map. | 3728 // Each function prototype gets a copy of the object function map. |
| 3730 // This avoid unwanted sharing of maps between prototypes of different | 3729 // This avoid unwanted sharing of maps between prototypes of different |
| 3731 // constructors. | 3730 // constructors. |
| 3732 Map* new_map; | 3731 Map* new_map; |
| 3733 ASSERT(object_function->has_initial_map()); | 3732 ASSERT(object_function->has_initial_map()); |
| 3734 MaybeObject* maybe_map = object_function->initial_map()->Copy(); | 3733 MaybeObject* maybe_map = object_function->initial_map()->Copy(); |
| 3735 if (!maybe_map->To(&new_map)) return maybe_map; | 3734 if (!maybe_map->To(&new_map)) return maybe_map; |
| 3736 | 3735 |
| 3737 Object* prototype; | 3736 Object* prototype; |
| (...skipping 29 matching lines...) Expand all Loading... |
| 3767 MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) { | 3766 MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) { |
| 3768 // To get fast allocation and map sharing for arguments objects we | 3767 // To get fast allocation and map sharing for arguments objects we |
| 3769 // allocate them based on an arguments boilerplate. | 3768 // allocate them based on an arguments boilerplate. |
| 3770 | 3769 |
| 3771 JSObject* boilerplate; | 3770 JSObject* boilerplate; |
| 3772 int arguments_object_size; | 3771 int arguments_object_size; |
| 3773 bool strict_mode_callee = callee->IsJSFunction() && | 3772 bool strict_mode_callee = callee->IsJSFunction() && |
| 3774 !JSFunction::cast(callee)->shared()->is_classic_mode(); | 3773 !JSFunction::cast(callee)->shared()->is_classic_mode(); |
| 3775 if (strict_mode_callee) { | 3774 if (strict_mode_callee) { |
| 3776 boilerplate = | 3775 boilerplate = |
| 3777 isolate()->context()->native_context()-> | 3776 isolate()->context()->global_context()-> |
| 3778 strict_mode_arguments_boilerplate(); | 3777 strict_mode_arguments_boilerplate(); |
| 3779 arguments_object_size = kArgumentsObjectSizeStrict; | 3778 arguments_object_size = kArgumentsObjectSizeStrict; |
| 3780 } else { | 3779 } else { |
| 3781 boilerplate = | 3780 boilerplate = |
| 3782 isolate()->context()->native_context()->arguments_boilerplate(); | 3781 isolate()->context()->global_context()->arguments_boilerplate(); |
| 3783 arguments_object_size = kArgumentsObjectSize; | 3782 arguments_object_size = kArgumentsObjectSize; |
| 3784 } | 3783 } |
| 3785 | 3784 |
| 3786 // This calls Copy directly rather than using Heap::AllocateRaw so we | 3785 // This calls Copy directly rather than using Heap::AllocateRaw so we |
| 3787 // duplicate the check here. | 3786 // duplicate the check here. |
| 3788 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC); | 3787 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC); |
| 3789 | 3788 |
| 3790 // Check that the size of the boilerplate matches our | 3789 // Check that the size of the boilerplate matches our |
| 3791 // expectations. The ArgumentsAccessStub::GenerateNewObject relies | 3790 // expectations. The ArgumentsAccessStub::GenerateNewObject relies |
| 3792 // on the size being a known constant. | 3791 // on the size being a known constant. |
| (...skipping 510 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4303 JSObject* jsobj = JSObject::cast(object); | 4302 JSObject* jsobj = JSObject::cast(object); |
| 4304 | 4303 |
| 4305 // Reinitialize the object from the constructor map. | 4304 // Reinitialize the object from the constructor map. |
| 4306 InitializeJSObjectFromMap(jsobj, FixedArray::cast(properties), map); | 4305 InitializeJSObjectFromMap(jsobj, FixedArray::cast(properties), map); |
| 4307 | 4306 |
| 4308 // Functions require some minimal initialization. | 4307 // Functions require some minimal initialization. |
| 4309 if (type == JS_FUNCTION_TYPE) { | 4308 if (type == JS_FUNCTION_TYPE) { |
| 4310 map->set_function_with_prototype(true); | 4309 map->set_function_with_prototype(true); |
| 4311 InitializeFunction(JSFunction::cast(object), shared, the_hole_value()); | 4310 InitializeFunction(JSFunction::cast(object), shared, the_hole_value()); |
| 4312 JSFunction::cast(object)->set_context( | 4311 JSFunction::cast(object)->set_context( |
| 4313 isolate()->context()->native_context()); | 4312 isolate()->context()->global_context()); |
| 4314 } | 4313 } |
| 4315 | 4314 |
| 4316 // Put in filler if the new object is smaller than the old. | 4315 // Put in filler if the new object is smaller than the old. |
| 4317 if (size_difference > 0) { | 4316 if (size_difference > 0) { |
| 4318 CreateFillerObjectAt( | 4317 CreateFillerObjectAt( |
| 4319 object->address() + map->instance_size(), size_difference); | 4318 object->address() + map->instance_size(), size_difference); |
| 4320 } | 4319 } |
| 4321 | 4320 |
| 4322 return object; | 4321 return object; |
| 4323 } | 4322 } |
| (...skipping 274 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4598 String::cast(result)->set_length(length); | 4597 String::cast(result)->set_length(length); |
| 4599 String::cast(result)->set_hash_field(String::kEmptyHashField); | 4598 String::cast(result)->set_hash_field(String::kEmptyHashField); |
| 4600 ASSERT_EQ(size, HeapObject::cast(result)->Size()); | 4599 ASSERT_EQ(size, HeapObject::cast(result)->Size()); |
| 4601 return result; | 4600 return result; |
| 4602 } | 4601 } |
| 4603 | 4602 |
| 4604 | 4603 |
| 4605 MaybeObject* Heap::AllocateJSArray( | 4604 MaybeObject* Heap::AllocateJSArray( |
| 4606 ElementsKind elements_kind, | 4605 ElementsKind elements_kind, |
| 4607 PretenureFlag pretenure) { | 4606 PretenureFlag pretenure) { |
| 4608 Context* native_context = isolate()->context()->native_context(); | 4607 Context* global_context = isolate()->context()->global_context(); |
| 4609 JSFunction* array_function = native_context->array_function(); | 4608 JSFunction* array_function = global_context->array_function(); |
| 4610 Map* map = array_function->initial_map(); | 4609 Map* map = array_function->initial_map(); |
| 4611 Object* maybe_map_array = native_context->js_array_maps(); | 4610 Object* maybe_map_array = global_context->js_array_maps(); |
| 4612 if (!maybe_map_array->IsUndefined()) { | 4611 if (!maybe_map_array->IsUndefined()) { |
| 4613 Object* maybe_transitioned_map = | 4612 Object* maybe_transitioned_map = |
| 4614 FixedArray::cast(maybe_map_array)->get(elements_kind); | 4613 FixedArray::cast(maybe_map_array)->get(elements_kind); |
| 4615 if (!maybe_transitioned_map->IsUndefined()) { | 4614 if (!maybe_transitioned_map->IsUndefined()) { |
| 4616 map = Map::cast(maybe_transitioned_map); | 4615 map = Map::cast(maybe_transitioned_map); |
| 4617 } | 4616 } |
| 4618 } | 4617 } |
| 4619 | 4618 |
| 4620 return AllocateJSObjectFromMap(map, pretenure); | 4619 return AllocateJSObjectFromMap(map, pretenure); |
| 4621 } | 4620 } |
| (...skipping 262 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4884 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure); | 4883 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure); |
| 4885 if (!maybe_result->ToObject(&result)) return maybe_result; | 4884 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 4886 } | 4885 } |
| 4887 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier( | 4886 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier( |
| 4888 hash_table_map()); | 4887 hash_table_map()); |
| 4889 ASSERT(result->IsHashTable()); | 4888 ASSERT(result->IsHashTable()); |
| 4890 return result; | 4889 return result; |
| 4891 } | 4890 } |
| 4892 | 4891 |
| 4893 | 4892 |
| 4894 MaybeObject* Heap::AllocateNativeContext() { | 4893 MaybeObject* Heap::AllocateGlobalContext() { |
| 4895 Object* result; | 4894 Object* result; |
| 4896 { MaybeObject* maybe_result = | 4895 { MaybeObject* maybe_result = |
| 4897 AllocateFixedArray(Context::NATIVE_CONTEXT_SLOTS); | 4896 AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS); |
| 4898 if (!maybe_result->ToObject(&result)) return maybe_result; | 4897 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 4899 } | 4898 } |
| 4900 Context* context = reinterpret_cast<Context*>(result); | 4899 Context* context = reinterpret_cast<Context*>(result); |
| 4901 context->set_map_no_write_barrier(native_context_map()); | 4900 context->set_map_no_write_barrier(global_context_map()); |
| 4902 context->set_js_array_maps(undefined_value()); | 4901 context->set_js_array_maps(undefined_value()); |
| 4903 ASSERT(context->IsNativeContext()); | 4902 ASSERT(context->IsGlobalContext()); |
| 4904 ASSERT(result->IsContext()); | 4903 ASSERT(result->IsContext()); |
| 4905 return result; | 4904 return result; |
| 4906 } | 4905 } |
| 4907 | 4906 |
| 4908 | 4907 |
| 4909 MaybeObject* Heap::AllocateModuleContext(ScopeInfo* scope_info) { | 4908 MaybeObject* Heap::AllocateModuleContext(ScopeInfo* scope_info) { |
| 4910 Object* result; | 4909 Object* result; |
| 4911 { MaybeObject* maybe_result = | 4910 { MaybeObject* maybe_result = |
| 4912 AllocateFixedArray(scope_info->ContextLength(), TENURED); | 4911 AllocateFixedArray(scope_info->ContextLength(), TENURED); |
| 4913 if (!maybe_result->ToObject(&result)) return maybe_result; | 4912 if (!maybe_result->ToObject(&result)) return maybe_result; |
| (...skipping 10 matching lines...) Expand all Loading... |
| 4924 ASSERT(length >= Context::MIN_CONTEXT_SLOTS); | 4923 ASSERT(length >= Context::MIN_CONTEXT_SLOTS); |
| 4925 Object* result; | 4924 Object* result; |
| 4926 { MaybeObject* maybe_result = AllocateFixedArray(length); | 4925 { MaybeObject* maybe_result = AllocateFixedArray(length); |
| 4927 if (!maybe_result->ToObject(&result)) return maybe_result; | 4926 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 4928 } | 4927 } |
| 4929 Context* context = reinterpret_cast<Context*>(result); | 4928 Context* context = reinterpret_cast<Context*>(result); |
| 4930 context->set_map_no_write_barrier(function_context_map()); | 4929 context->set_map_no_write_barrier(function_context_map()); |
| 4931 context->set_closure(function); | 4930 context->set_closure(function); |
| 4932 context->set_previous(function->context()); | 4931 context->set_previous(function->context()); |
| 4933 context->set_extension(Smi::FromInt(0)); | 4932 context->set_extension(Smi::FromInt(0)); |
| 4934 context->set_global_object(function->context()->global_object()); | 4933 context->set_global(function->context()->global()); |
| 4935 return context; | 4934 return context; |
| 4936 } | 4935 } |
| 4937 | 4936 |
| 4938 | 4937 |
| 4939 MaybeObject* Heap::AllocateCatchContext(JSFunction* function, | 4938 MaybeObject* Heap::AllocateCatchContext(JSFunction* function, |
| 4940 Context* previous, | 4939 Context* previous, |
| 4941 String* name, | 4940 String* name, |
| 4942 Object* thrown_object) { | 4941 Object* thrown_object) { |
| 4943 STATIC_ASSERT(Context::MIN_CONTEXT_SLOTS == Context::THROWN_OBJECT_INDEX); | 4942 STATIC_ASSERT(Context::MIN_CONTEXT_SLOTS == Context::THROWN_OBJECT_INDEX); |
| 4944 Object* result; | 4943 Object* result; |
| 4945 { MaybeObject* maybe_result = | 4944 { MaybeObject* maybe_result = |
| 4946 AllocateFixedArray(Context::MIN_CONTEXT_SLOTS + 1); | 4945 AllocateFixedArray(Context::MIN_CONTEXT_SLOTS + 1); |
| 4947 if (!maybe_result->ToObject(&result)) return maybe_result; | 4946 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 4948 } | 4947 } |
| 4949 Context* context = reinterpret_cast<Context*>(result); | 4948 Context* context = reinterpret_cast<Context*>(result); |
| 4950 context->set_map_no_write_barrier(catch_context_map()); | 4949 context->set_map_no_write_barrier(catch_context_map()); |
| 4951 context->set_closure(function); | 4950 context->set_closure(function); |
| 4952 context->set_previous(previous); | 4951 context->set_previous(previous); |
| 4953 context->set_extension(name); | 4952 context->set_extension(name); |
| 4954 context->set_global_object(previous->global_object()); | 4953 context->set_global(previous->global()); |
| 4955 context->set(Context::THROWN_OBJECT_INDEX, thrown_object); | 4954 context->set(Context::THROWN_OBJECT_INDEX, thrown_object); |
| 4956 return context; | 4955 return context; |
| 4957 } | 4956 } |
| 4958 | 4957 |
| 4959 | 4958 |
| 4960 MaybeObject* Heap::AllocateWithContext(JSFunction* function, | 4959 MaybeObject* Heap::AllocateWithContext(JSFunction* function, |
| 4961 Context* previous, | 4960 Context* previous, |
| 4962 JSObject* extension) { | 4961 JSObject* extension) { |
| 4963 Object* result; | 4962 Object* result; |
| 4964 { MaybeObject* maybe_result = AllocateFixedArray(Context::MIN_CONTEXT_SLOTS); | 4963 { MaybeObject* maybe_result = AllocateFixedArray(Context::MIN_CONTEXT_SLOTS); |
| 4965 if (!maybe_result->ToObject(&result)) return maybe_result; | 4964 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 4966 } | 4965 } |
| 4967 Context* context = reinterpret_cast<Context*>(result); | 4966 Context* context = reinterpret_cast<Context*>(result); |
| 4968 context->set_map_no_write_barrier(with_context_map()); | 4967 context->set_map_no_write_barrier(with_context_map()); |
| 4969 context->set_closure(function); | 4968 context->set_closure(function); |
| 4970 context->set_previous(previous); | 4969 context->set_previous(previous); |
| 4971 context->set_extension(extension); | 4970 context->set_extension(extension); |
| 4972 context->set_global_object(previous->global_object()); | 4971 context->set_global(previous->global()); |
| 4973 return context; | 4972 return context; |
| 4974 } | 4973 } |
| 4975 | 4974 |
| 4976 | 4975 |
| 4977 MaybeObject* Heap::AllocateBlockContext(JSFunction* function, | 4976 MaybeObject* Heap::AllocateBlockContext(JSFunction* function, |
| 4978 Context* previous, | 4977 Context* previous, |
| 4979 ScopeInfo* scope_info) { | 4978 ScopeInfo* scope_info) { |
| 4980 Object* result; | 4979 Object* result; |
| 4981 { MaybeObject* maybe_result = | 4980 { MaybeObject* maybe_result = |
| 4982 AllocateFixedArrayWithHoles(scope_info->ContextLength()); | 4981 AllocateFixedArrayWithHoles(scope_info->ContextLength()); |
| 4983 if (!maybe_result->ToObject(&result)) return maybe_result; | 4982 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 4984 } | 4983 } |
| 4985 Context* context = reinterpret_cast<Context*>(result); | 4984 Context* context = reinterpret_cast<Context*>(result); |
| 4986 context->set_map_no_write_barrier(block_context_map()); | 4985 context->set_map_no_write_barrier(block_context_map()); |
| 4987 context->set_closure(function); | 4986 context->set_closure(function); |
| 4988 context->set_previous(previous); | 4987 context->set_previous(previous); |
| 4989 context->set_extension(scope_info); | 4988 context->set_extension(scope_info); |
| 4990 context->set_global_object(previous->global_object()); | 4989 context->set_global(previous->global()); |
| 4991 return context; | 4990 return context; |
| 4992 } | 4991 } |
| 4993 | 4992 |
| 4994 | 4993 |
| 4995 MaybeObject* Heap::AllocateScopeInfo(int length) { | 4994 MaybeObject* Heap::AllocateScopeInfo(int length) { |
| 4996 FixedArray* scope_info; | 4995 FixedArray* scope_info; |
| 4997 MaybeObject* maybe_scope_info = AllocateFixedArray(length, TENURED); | 4996 MaybeObject* maybe_scope_info = AllocateFixedArray(length, TENURED); |
| 4998 if (!maybe_scope_info->To(&scope_info)) return maybe_scope_info; | 4997 if (!maybe_scope_info->To(&scope_info)) return maybe_scope_info; |
| 4999 scope_info->set_map_no_write_barrier(scope_info_map()); | 4998 scope_info->set_map_no_write_barrier(scope_info_map()); |
| 5000 return scope_info; | 4999 return scope_info; |
| (...skipping 1156 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6157 } | 6156 } |
| 6158 | 6157 |
| 6159 if (create_heap_objects) { | 6158 if (create_heap_objects) { |
| 6160 // Create initial maps. | 6159 // Create initial maps. |
| 6161 if (!CreateInitialMaps()) return false; | 6160 if (!CreateInitialMaps()) return false; |
| 6162 if (!CreateApiObjects()) return false; | 6161 if (!CreateApiObjects()) return false; |
| 6163 | 6162 |
| 6164 // Create initial objects | 6163 // Create initial objects |
| 6165 if (!CreateInitialObjects()) return false; | 6164 if (!CreateInitialObjects()) return false; |
| 6166 | 6165 |
| 6167 native_contexts_list_ = undefined_value(); | 6166 global_contexts_list_ = undefined_value(); |
| 6168 } | 6167 } |
| 6169 | 6168 |
| 6170 LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity())); | 6169 LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity())); |
| 6171 LOG(isolate_, IntPtrTEvent("heap-available", Available())); | 6170 LOG(isolate_, IntPtrTEvent("heap-available", Available())); |
| 6172 | 6171 |
| 6173 store_buffer()->SetUp(); | 6172 store_buffer()->SetUp(); |
| 6174 | 6173 |
| 6175 if (FLAG_parallel_recompilation) relocation_mutex_ = OS::CreateMutex(); | 6174 if (FLAG_parallel_recompilation) relocation_mutex_ = OS::CreateMutex(); |
| 6176 | 6175 |
| 6177 return true; | 6176 return true; |
| (...skipping 506 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6684 MarkVisitor mark_visitor(this); | 6683 MarkVisitor mark_visitor(this); |
| 6685 MarkRecursively(root, &mark_visitor); | 6684 MarkRecursively(root, &mark_visitor); |
| 6686 | 6685 |
| 6687 UnmarkVisitor unmark_visitor(this); | 6686 UnmarkVisitor unmark_visitor(this); |
| 6688 UnmarkRecursively(root, &unmark_visitor); | 6687 UnmarkRecursively(root, &unmark_visitor); |
| 6689 | 6688 |
| 6690 ProcessResults(); | 6689 ProcessResults(); |
| 6691 } | 6690 } |
| 6692 | 6691 |
| 6693 | 6692 |
| 6694 static bool SafeIsNativeContext(HeapObject* obj) { | 6693 static bool SafeIsGlobalContext(HeapObject* obj) { |
| 6695 return obj->map() == obj->GetHeap()->raw_unchecked_native_context_map(); | 6694 return obj->map() == obj->GetHeap()->raw_unchecked_global_context_map(); |
| 6696 } | 6695 } |
| 6697 | 6696 |
| 6698 | 6697 |
| 6699 void PathTracer::MarkRecursively(Object** p, MarkVisitor* mark_visitor) { | 6698 void PathTracer::MarkRecursively(Object** p, MarkVisitor* mark_visitor) { |
| 6700 if (!(*p)->IsHeapObject()) return; | 6699 if (!(*p)->IsHeapObject()) return; |
| 6701 | 6700 |
| 6702 HeapObject* obj = HeapObject::cast(*p); | 6701 HeapObject* obj = HeapObject::cast(*p); |
| 6703 | 6702 |
| 6704 Object* map = obj->map(); | 6703 Object* map = obj->map(); |
| 6705 | 6704 |
| 6706 if (!map->IsHeapObject()) return; // visited before | 6705 if (!map->IsHeapObject()) return; // visited before |
| 6707 | 6706 |
| 6708 if (found_target_in_trace_) return; // stop if target found | 6707 if (found_target_in_trace_) return; // stop if target found |
| 6709 object_stack_.Add(obj); | 6708 object_stack_.Add(obj); |
| 6710 if (((search_target_ == kAnyGlobalObject) && obj->IsJSGlobalObject()) || | 6709 if (((search_target_ == kAnyGlobalObject) && obj->IsJSGlobalObject()) || |
| 6711 (obj == search_target_)) { | 6710 (obj == search_target_)) { |
| 6712 found_target_in_trace_ = true; | 6711 found_target_in_trace_ = true; |
| 6713 found_target_ = true; | 6712 found_target_ = true; |
| 6714 return; | 6713 return; |
| 6715 } | 6714 } |
| 6716 | 6715 |
| 6717 bool is_native_context = SafeIsNativeContext(obj); | 6716 bool is_global_context = SafeIsGlobalContext(obj); |
| 6718 | 6717 |
| 6719 // not visited yet | 6718 // not visited yet |
| 6720 Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map)); | 6719 Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map)); |
| 6721 | 6720 |
| 6722 Address map_addr = map_p->address(); | 6721 Address map_addr = map_p->address(); |
| 6723 | 6722 |
| 6724 obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_addr + kMarkTag)); | 6723 obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_addr + kMarkTag)); |
| 6725 | 6724 |
| 6726 // Scan the object body. | 6725 // Scan the object body. |
| 6727 if (is_native_context && (visit_mode_ == VISIT_ONLY_STRONG)) { | 6726 if (is_global_context && (visit_mode_ == VISIT_ONLY_STRONG)) { |
| 6728 // This is specialized to scan Context's properly. | 6727 // This is specialized to scan Context's properly. |
| 6729 Object** start = reinterpret_cast<Object**>(obj->address() + | 6728 Object** start = reinterpret_cast<Object**>(obj->address() + |
| 6730 Context::kHeaderSize); | 6729 Context::kHeaderSize); |
| 6731 Object** end = reinterpret_cast<Object**>(obj->address() + | 6730 Object** end = reinterpret_cast<Object**>(obj->address() + |
| 6732 Context::kHeaderSize + Context::FIRST_WEAK_SLOT * kPointerSize); | 6731 Context::kHeaderSize + Context::FIRST_WEAK_SLOT * kPointerSize); |
| 6733 mark_visitor->VisitPointers(start, end); | 6732 mark_visitor->VisitPointers(start, end); |
| 6734 } else { | 6733 } else { |
| 6735 obj->IterateBody(map_p->instance_type(), | 6734 obj->IterateBody(map_p->instance_type(), |
| 6736 obj->SizeFromMap(map_p), | 6735 obj->SizeFromMap(map_p), |
| 6737 mark_visitor); | 6736 mark_visitor); |
| (...skipping 525 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7263 static_cast<int>(object_sizes_last_time_[index])); | 7262 static_cast<int>(object_sizes_last_time_[index])); |
| 7264 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) | 7263 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) |
| 7265 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 7264 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
| 7266 | 7265 |
| 7267 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 7266 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
| 7268 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 7267 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
| 7269 ClearObjectStats(); | 7268 ClearObjectStats(); |
| 7270 } | 7269 } |
| 7271 | 7270 |
| 7272 } } // namespace v8::internal | 7271 } } // namespace v8::internal |
| OLD | NEW |