Chromium Code Reviews| Index: src/heap.cc |
| diff --git a/src/heap.cc b/src/heap.cc |
| index 0245cf7ee4b250e7c67630aecc00580669b76742..4c4d740bc0fe9d77755144ab00c7f7a980bdae3f 100644 |
| --- a/src/heap.cc |
| +++ b/src/heap.cc |
| @@ -3910,50 +3910,36 @@ MaybeObject* Heap::CopyCode(Code* code, Vector<byte> reloc_info) { |
| } |
| -MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) { |
| +MaybeObject* Heap::Allocate(Map* map, AllocationSpace space, |
| + AllocationSiteMode mode, |
| + Handle<Object>* allocation_site_info_payload) { |
| ASSERT(gc_state_ == NOT_IN_GC); |
| ASSERT(map->instance_type() != MAP_TYPE); |
| + ASSERT(allocation_site_info_payload != NULL || |
| + mode == DONT_TRACK_ALLOCATION_SITE); |
| + ASSERT(space == NEW_SPACE || mode == DONT_TRACK_ALLOCATION_SITE); |
| // If allocation failures are disallowed, we may allocate in a different |
| // space when new space is full and the object is not a large object. |
| AllocationSpace retry_space = |
| (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type()); |
| - Object* result; |
| - { MaybeObject* maybe_result = |
| - AllocateRaw(map->instance_size(), space, retry_space); |
| - if (!maybe_result->ToObject(&result)) return maybe_result; |
| + int size = map->instance_size(); |
| + if (mode == TRACK_ALLOCATION_SITE) { |
| + size += AllocationSiteInfo::kSize; |
| } |
| - // No need for write barrier since object is white and map is in old space. |
| - HeapObject::cast(result)->set_map_no_write_barrier(map); |
| - return result; |
| -} |
| - |
| - |
| -// TODO(mvstanton): consolidate this with the function above. |
| -MaybeObject* Heap::AllocateWithAllocationSiteInfo(Map* map, |
| - AllocationSpace space, |
| - Handle<Object>* allocation_site_info_payload) { |
| - ASSERT(gc_state_ == NOT_IN_GC); |
| - ASSERT(map->instance_type() != MAP_TYPE); |
| - // If allocation failures are disallowed, we may allocate in a different |
| - // space when new space is full and the object is not a large object. |
| - AllocationSpace retry_space = |
| - (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type()); |
| Object* result; |
| - { MaybeObject* maybe_result = |
| - AllocateRaw(map->instance_size() + AllocationSiteInfo::kSize, |
| - space, |
| - retry_space); |
| - if (!maybe_result->ToObject(&result)) return maybe_result; |
| - } |
| + MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); |
| + if (!maybe_result->ToObject(&result)) return maybe_result; |
| // No need for write barrier since object is white and map is in old space. |
| HeapObject::cast(result)->set_map_no_write_barrier(map); |
| - Object* allocation_pointer = reinterpret_cast<Object*>( |
| - reinterpret_cast<Address>(result) + map->instance_size()); |
| - HeapObject::cast(allocation_pointer)->set_map_no_write_barrier( |
| - allocation_site_info_map()); |
| - AllocationSiteInfo* allocation_site_info = |
| - AllocationSiteInfo::cast(allocation_pointer); |
| - allocation_site_info->set_payload(**allocation_site_info_payload); |
| + if (mode == TRACK_ALLOCATION_SITE) { |
| + Object* allocation_pointer = reinterpret_cast<Object*>( |
| + reinterpret_cast<Address>(result) + map->instance_size()); |
| + HeapObject::cast(allocation_pointer)->set_map_no_write_barrier( |
| + allocation_site_info_map()); |
| + AllocationSiteInfo* allocation_site_info = |
| + AllocationSiteInfo::cast(allocation_pointer); |
| + allocation_site_info->set_payload(**allocation_site_info_payload); |
| + } |
| return result; |
| } |
| @@ -4196,6 +4182,8 @@ void Heap::InitializeJSObjectFromMap(JSObject* obj, |
| MaybeObject* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure, |
| AllocationSiteMode mode, |
| Handle<Object>* allocation_site_info_payload) { |
| + ASSERT(allocation_site_info_payload != NULL || |
| + mode == DONT_TRACK_ALLOCATION_SITE); |
| ASSERT(pretenure == NOT_TENURED || mode == DONT_TRACK_ALLOCATION_SITE); |
| // JSFunctions should be allocated using AllocateFunction to be |
| // properly initialized. |
| @@ -4222,16 +4210,9 @@ MaybeObject* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure, |
| (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; |
| if (map->instance_size() > Page::kMaxNonCodeHeapObjectSize) space = LO_SPACE; |
| Object* obj; |
| - { |
| - MaybeObject* maybe_obj; |
| - if (mode == TRACK_ALLOCATION_SITE) { |
| - maybe_obj = AllocateWithAllocationSiteInfo(map, space, |
| - allocation_site_info_payload); |
| - } else { |
| - maybe_obj = Allocate(map, space); |
| - } |
| - if (!maybe_obj->ToObject(&obj)) return maybe_obj; |
| - } |
| + MaybeObject* maybe_obj = Allocate(map, space, mode, |
| + allocation_site_info_payload); |
| + if (!maybe_obj->To(&obj)) return maybe_obj; |
| // Initialize the JSObject. |
| InitializeJSObjectFromMap(JSObject::cast(obj), |
| @@ -4246,6 +4227,9 @@ MaybeObject* Heap::AllocateJSObject(JSFunction* constructor, |
| PretenureFlag pretenure, |
| AllocationSiteMode mode, |
| Handle<Object>* allocation_site_info_payload) { |
| + ASSERT(allocation_site_info_payload != NULL || |
| + mode == DONT_TRACK_ALLOCATION_SITE); |
| + ASSERT(pretenure == NOT_TENURED || mode == DONT_TRACK_ALLOCATION_SITE); |
| // Allocate the initial map if absent. |
| if (!constructor->has_initial_map()) { |
| Object* initial_map; |
| @@ -4259,20 +4243,16 @@ MaybeObject* Heap::AllocateJSObject(JSFunction* constructor, |
| // advice |
| Map* initial_map = constructor->initial_map(); |
| if (mode == TRACK_ALLOCATION_SITE) { |
| - ASSERT(allocation_site_info_payload != NULL); |
| - ASSERT((*allocation_site_info_payload)->IsJSGlobalPropertyCell()); |
| JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast( |
| **allocation_site_info_payload); |
| - ASSERT(cell->value()->IsSmi()); |
| Smi* smi = Smi::cast(cell->value()); |
| ElementsKind to_kind = static_cast<ElementsKind>(smi->value()); |
| if (to_kind != initial_map->elements_kind()) { |
| - initial_map = initial_map->LookupElementsTransitionMap(to_kind); |
| - // TODO(mvstanton): I may have to allocate this transition, right? |
| - ASSERT(initial_map != NULL); |
| - // constructor->set_initial_map(Map::cast(initial_map)); |
| - // Map::cast(initial_map)->set_constructor(constructor); |
| - mode = DONT_TRACK_ALLOCATION_SITE; |
| + MaybeObject* maybe_new_map = constructor->GetElementsTransitionMap( |
| + isolate(), to_kind); |
| + if (!maybe_new_map->To(&initial_map)) return maybe_new_map; |
| + // TODO(mvstanton): Is the line below useful for performance? |
| + // mode = DONT_TRACK_ALLOCATION_SITE; |
|
Toon Verwaest
2013/02/21 12:13:03
Use GetMode(to_kind); here to decide whether or no
mvstanton
2013/02/27 14:37:07
Done.
|
| } |
| } |
| @@ -4311,18 +4291,25 @@ MaybeObject* Heap::AllocateJSArrayAndStorage( |
| Handle<Object> *allocation_site_payload, |
| ArrayStorageAllocationMode mode, |
| PretenureFlag pretenure) { |
| - ASSERT(capacity >= length); |
| ASSERT(allocation_site_payload != NULL || |
| allocation_site_info_mode == DONT_TRACK_ALLOCATION_SITE); |
| - if (pretenure == TENURED && |
| - allocation_site_info_mode == TRACK_ALLOCATION_SITE) { |
| - PrintF("Sorry, can't track yet in tenured space\n"); |
| - } |
| + ASSERT(pretenure == NOT_TENURED || |
| + allocation_site_info_mode == DONT_TRACK_ALLOCATION_SITE); |
| MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure, |
| allocation_site_info_mode, |
| allocation_site_payload); |
| JSArray* array; |
| if (!maybe_array->To(&array)) return maybe_array; |
| + return AllocateJSArrayStorage(array, length, capacity, mode); |
| +} |
| + |
| + |
| +MaybeObject* Heap::AllocateJSArrayStorage( |
| + JSArray* array, |
| + int length, |
| + int capacity, |
| + ArrayStorageAllocationMode mode) { |
| + ASSERT(capacity >= length); |
| if (capacity == 0) { |
| array->set_length(Smi::FromInt(0)); |
| @@ -4332,6 +4319,7 @@ MaybeObject* Heap::AllocateJSArrayAndStorage( |
| FixedArrayBase* elms; |
| MaybeObject* maybe_elms = NULL; |
| + ElementsKind elements_kind = array->GetElementsKind(); |
| if (IsFastDoubleElementsKind(elements_kind)) { |
| if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) { |
| maybe_elms = AllocateUninitializedFixedDoubleArray(capacity); |