OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1939 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1950 | 1950 |
1951 // Take another spin if there are now unswept objects in new space | 1951 // Take another spin if there are now unswept objects in new space |
1952 // (there are currently no more unswept promoted objects). | 1952 // (there are currently no more unswept promoted objects). |
1953 } while (new_space_front != new_space_.top()); | 1953 } while (new_space_front != new_space_.top()); |
1954 | 1954 |
1955 return new_space_front; | 1955 return new_space_front; |
1956 } | 1956 } |
1957 | 1957 |
1958 | 1958 |
1959 STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) == 0); | 1959 STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) == 0); |
| 1960 STATIC_ASSERT((ConstantPoolArray::kHeaderSize & kDoubleAlignmentMask) == 0); |
1960 | 1961 |
1961 | 1962 |
1962 INLINE(static HeapObject* EnsureDoubleAligned(Heap* heap, | 1963 INLINE(static HeapObject* EnsureDoubleAligned(Heap* heap, |
1963 HeapObject* object, | 1964 HeapObject* object, |
1964 int size)); | 1965 int size)); |
1965 | 1966 |
1966 static HeapObject* EnsureDoubleAligned(Heap* heap, | 1967 static HeapObject* EnsureDoubleAligned(Heap* heap, |
1967 HeapObject* object, | 1968 HeapObject* object, |
1968 int size) { | 1969 int size) { |
1969 if ((OffsetFrom(object->address()) & kDoubleAlignmentMask) != 0) { | 1970 if ((OffsetFrom(object->address()) & kDoubleAlignmentMask) != 0) { |
(...skipping 681 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2651 set_undetectable_ascii_string_map(Map::cast(obj)); | 2652 set_undetectable_ascii_string_map(Map::cast(obj)); |
2652 Map::cast(obj)->set_is_undetectable(); | 2653 Map::cast(obj)->set_is_undetectable(); |
2653 | 2654 |
2654 { MaybeObject* maybe_obj = | 2655 { MaybeObject* maybe_obj = |
2655 AllocateMap(FIXED_DOUBLE_ARRAY_TYPE, kVariableSizeSentinel); | 2656 AllocateMap(FIXED_DOUBLE_ARRAY_TYPE, kVariableSizeSentinel); |
2656 if (!maybe_obj->ToObject(&obj)) return false; | 2657 if (!maybe_obj->ToObject(&obj)) return false; |
2657 } | 2658 } |
2658 set_fixed_double_array_map(Map::cast(obj)); | 2659 set_fixed_double_array_map(Map::cast(obj)); |
2659 | 2660 |
2660 { MaybeObject* maybe_obj = | 2661 { MaybeObject* maybe_obj = |
| 2662 AllocateMap(CONSTANT_POOL_ARRAY_TYPE, kVariableSizeSentinel); |
| 2663 if (!maybe_obj->ToObject(&obj)) return false; |
| 2664 } |
| 2665 set_constant_pool_array_map(Map::cast(obj)); |
| 2666 |
| 2667 { MaybeObject* maybe_obj = |
2661 AllocateMap(BYTE_ARRAY_TYPE, kVariableSizeSentinel); | 2668 AllocateMap(BYTE_ARRAY_TYPE, kVariableSizeSentinel); |
2662 if (!maybe_obj->ToObject(&obj)) return false; | 2669 if (!maybe_obj->ToObject(&obj)) return false; |
2663 } | 2670 } |
2664 set_byte_array_map(Map::cast(obj)); | 2671 set_byte_array_map(Map::cast(obj)); |
2665 | 2672 |
2666 { MaybeObject* maybe_obj = | 2673 { MaybeObject* maybe_obj = |
2667 AllocateMap(FREE_SPACE_TYPE, kVariableSizeSentinel); | 2674 AllocateMap(FREE_SPACE_TYPE, kVariableSizeSentinel); |
2668 if (!maybe_obj->ToObject(&obj)) return false; | 2675 if (!maybe_obj->ToObject(&obj)) return false; |
2669 } | 2676 } |
2670 set_free_space_map(Map::cast(obj)); | 2677 set_free_space_map(Map::cast(obj)); |
(...skipping 2706 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5377 HeapObject* dst = HeapObject::cast(obj); | 5384 HeapObject* dst = HeapObject::cast(obj); |
5378 dst->set_map_no_write_barrier(map); | 5385 dst->set_map_no_write_barrier(map); |
5379 CopyBlock( | 5386 CopyBlock( |
5380 dst->address() + FixedDoubleArray::kLengthOffset, | 5387 dst->address() + FixedDoubleArray::kLengthOffset, |
5381 src->address() + FixedDoubleArray::kLengthOffset, | 5388 src->address() + FixedDoubleArray::kLengthOffset, |
5382 FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset); | 5389 FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset); |
5383 return obj; | 5390 return obj; |
5384 } | 5391 } |
5385 | 5392 |
5386 | 5393 |
| 5394 MaybeObject* Heap::CopyConstantPoolArrayWithMap(ConstantPoolArray* src, |
| 5395 Map* map) { |
| 5396 int int64_entries = src->count_of_int64_entries(); |
| 5397 int ptr_entries = src->count_of_ptr_entries(); |
| 5398 int int32_entries = src->count_of_int32_entries(); |
| 5399 Object* obj; |
| 5400 { MaybeObject* maybe_obj = |
| 5401 AllocateConstantPoolArray(int64_entries, ptr_entries, int32_entries); |
| 5402 if (!maybe_obj->ToObject(&obj)) return maybe_obj; |
| 5403 } |
| 5404 HeapObject* dst = HeapObject::cast(obj); |
| 5405 dst->set_map_no_write_barrier(map); |
| 5406 CopyBlock( |
| 5407 dst->address() + ConstantPoolArray::kLengthOffset, |
| 5408 src->address() + ConstantPoolArray::kLengthOffset, |
| 5409 ConstantPoolArray::SizeFor(int64_entries, ptr_entries, int32_entries) |
| 5410 - ConstantPoolArray::kLengthOffset); |
| 5411 return obj; |
| 5412 } |
| 5413 |
| 5414 |
5387 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) { | 5415 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) { |
5388 if (length < 0 || length > FixedArray::kMaxLength) { | 5416 if (length < 0 || length > FixedArray::kMaxLength) { |
5389 return Failure::OutOfMemoryException(0xe); | 5417 return Failure::OutOfMemoryException(0xe); |
5390 } | 5418 } |
5391 int size = FixedArray::SizeFor(length); | 5419 int size = FixedArray::SizeFor(length); |
5392 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure); | 5420 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure); |
5393 | 5421 |
5394 return AllocateRaw(size, space, OLD_POINTER_SPACE); | 5422 return AllocateRaw(size, space, OLD_POINTER_SPACE); |
5395 } | 5423 } |
5396 | 5424 |
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5508 | 5536 |
5509 HeapObject* object; | 5537 HeapObject* object; |
5510 { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_DATA_SPACE); | 5538 { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_DATA_SPACE); |
5511 if (!maybe_object->To<HeapObject>(&object)) return maybe_object; | 5539 if (!maybe_object->To<HeapObject>(&object)) return maybe_object; |
5512 } | 5540 } |
5513 | 5541 |
5514 return EnsureDoubleAligned(this, object, size); | 5542 return EnsureDoubleAligned(this, object, size); |
5515 } | 5543 } |
5516 | 5544 |
5517 | 5545 |
| 5546 MaybeObject* Heap::AllocateConstantPoolArray(int number_of_int64_entries, |
| 5547 int number_of_ptr_entries, |
| 5548 int number_of_int32_entries) { |
| 5549 ASSERT(number_of_int64_entries > 0 || number_of_ptr_entries > 0 || |
| 5550 number_of_int32_entries > 0); |
| 5551 int size = ConstantPoolArray::SizeFor(number_of_int64_entries, |
| 5552 number_of_ptr_entries, |
| 5553 number_of_int32_entries); |
| 5554 #ifndef V8_HOST_ARCH_64_BIT |
| 5555 size += kPointerSize; |
| 5556 #endif |
| 5557 |
| 5558 HeapObject* object; |
| 5559 { MaybeObject* maybe_object = old_pointer_space_->AllocateRaw(size); |
| 5560 if (!maybe_object->To<HeapObject>(&object)) return maybe_object; |
| 5561 } |
| 5562 object = EnsureDoubleAligned(this, object, size); |
| 5563 HeapObject::cast(object)->set_map_no_write_barrier(constant_pool_array_map()); |
| 5564 |
| 5565 ConstantPoolArray* constant_pool = |
| 5566 reinterpret_cast<ConstantPoolArray*>(object); |
| 5567 constant_pool->SetEntryCounts(number_of_int64_entries, |
| 5568 number_of_ptr_entries, |
| 5569 number_of_int32_entries); |
| 5570 MemsetPointer( |
| 5571 HeapObject::RawField( |
| 5572 constant_pool, |
| 5573 constant_pool->OffsetOfElementAt(constant_pool->first_ptr_index())), |
| 5574 undefined_value(), |
| 5575 number_of_ptr_entries); |
| 5576 return constant_pool; |
| 5577 } |
| 5578 |
| 5579 |
5518 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) { | 5580 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) { |
5519 Object* result; | 5581 Object* result; |
5520 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure); | 5582 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure); |
5521 if (!maybe_result->ToObject(&result)) return maybe_result; | 5583 if (!maybe_result->ToObject(&result)) return maybe_result; |
5522 } | 5584 } |
5523 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier( | 5585 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier( |
5524 hash_table_map()); | 5586 hash_table_map()); |
5525 ASSERT(result->IsHashTable()); | 5587 ASSERT(result->IsHashTable()); |
5526 return result; | 5588 return result; |
5527 } | 5589 } |
(...skipping 2382 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7910 if (FLAG_concurrent_recompilation) { | 7972 if (FLAG_concurrent_recompilation) { |
7911 heap_->relocation_mutex_->Lock(); | 7973 heap_->relocation_mutex_->Lock(); |
7912 #ifdef DEBUG | 7974 #ifdef DEBUG |
7913 heap_->relocation_mutex_locked_by_optimizer_thread_ = | 7975 heap_->relocation_mutex_locked_by_optimizer_thread_ = |
7914 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); | 7976 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); |
7915 #endif // DEBUG | 7977 #endif // DEBUG |
7916 } | 7978 } |
7917 } | 7979 } |
7918 | 7980 |
7919 } } // namespace v8::internal | 7981 } } // namespace v8::internal |
OLD | NEW |