Chromium Code Reviews| Index: src/heap.cc |
| diff --git a/src/heap.cc b/src/heap.cc |
| index 22dbbebe887ad64404ce0948795c07b104130b09..f53c84ef4ad1779571c020ddb73855ff2d65f5fd 100644 |
| --- a/src/heap.cc |
| +++ b/src/heap.cc |
| @@ -1484,6 +1484,22 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, |
| } |
| +#ifndef V8_HOST_ARCH_64_BIT |
| +INLINE(static HeapObject* EnsureDoubleAligned(Heap* heap, |
| + HeapObject* object, |
| + int size)) { |
|
Erik Corry
2012/04/16 14:35:19
This assumes that there is an even number of heade
Vyacheslav Egorov (Chromium)
2012/04/30 14:39:11
Done.
|
| + if ((OffsetFrom(object->address()) & kDoubleAlignmentMask) != 0) { |
| + heap->CreateFillerObjectAt(object->address(), kPointerSize); |
| + return HeapObject::FromAddress(object->address() + kPointerSize); |
| + } else { |
| + heap->CreateFillerObjectAt(object->address() + size - kPointerSize, |
| + kPointerSize); |
| + return object; |
| + } |
| +} |
| +#endif |
| + |
| + |
| enum LoggingAndProfiling { |
| LOGGING_AND_PROFILING_ENABLED, |
| LOGGING_AND_PROFILING_DISABLED |
| @@ -1557,6 +1573,8 @@ class ScavengingVisitor : public StaticVisitorBase { |
| private: |
| enum ObjectContents { DATA_OBJECT, POINTER_OBJECT }; |
| enum SizeRestriction { SMALL, UNKNOWN_SIZE }; |
| + enum AlignmentRequirement { POINTER_ALIGNED, DOUBLE_ALIGNED }; |
|
Erik Corry
2012/04/16 14:35:19
It seems to me that if you use the actual alignmen
|
| + |
| static void RecordCopiedObject(Heap* heap, HeapObject* obj) { |
| bool should_record = false; |
| @@ -1607,7 +1625,10 @@ class ScavengingVisitor : public StaticVisitorBase { |
| } |
| } |
| - template<ObjectContents object_contents, SizeRestriction size_restriction> |
| + |
| + template<ObjectContents object_contents, |
| + SizeRestriction size_restriction, |
| + AlignmentRequirement alignment> |
| static inline void EvacuateObject(Map* map, |
| HeapObject** slot, |
| HeapObject* object, |
| @@ -1616,19 +1637,26 @@ class ScavengingVisitor : public StaticVisitorBase { |
| (object_size <= Page::kMaxNonCodeHeapObjectSize)); |
| SLOW_ASSERT(object->Size() == object_size); |
| +#ifndef V8_HOST_ARCH_64_BIT |
| + int allocation_size = object_size; |
| + if (alignment == DOUBLE_ALIGNED) { |
|
Erik Corry
2012/04/16 14:35:19
You can replace this with
if (kPointerSize != alig
Vyacheslav Egorov (Chromium)
2012/04/30 14:39:11
Done.
Vyacheslav Egorov (Chromium)
2012/04/30 14:39:11
Done.
|
| + allocation_size += kPointerSize; |
| + } |
| +#endif |
| + |
| Heap* heap = map->GetHeap(); |
| if (heap->ShouldBePromoted(object->address(), object_size)) { |
| MaybeObject* maybe_result; |
| if ((size_restriction != SMALL) && |
| - (object_size > Page::kMaxNonCodeHeapObjectSize)) { |
| - maybe_result = heap->lo_space()->AllocateRaw(object_size, |
| + (allocation_size > Page::kMaxNonCodeHeapObjectSize)) { |
| + maybe_result = heap->lo_space()->AllocateRaw(allocation_size, |
| NOT_EXECUTABLE); |
| } else { |
| if (object_contents == DATA_OBJECT) { |
| - maybe_result = heap->old_data_space()->AllocateRaw(object_size); |
| + maybe_result = heap->old_data_space()->AllocateRaw(allocation_size); |
| } else { |
| - maybe_result = heap->old_pointer_space()->AllocateRaw(object_size); |
| + maybe_result = heap->old_pointer_space()->AllocateRaw(allocation_size); |
| } |
| } |
| @@ -1636,6 +1664,12 @@ class ScavengingVisitor : public StaticVisitorBase { |
| if (maybe_result->ToObject(&result)) { |
| HeapObject* target = HeapObject::cast(result); |
| +#ifndef V8_HOST_ARCH_64_BIT |
| + if (alignment == DOUBLE_ALIGNED) { |
| + target = EnsureDoubleAligned(heap, target, allocation_size); |
| + } |
| +#endif |
| + |
| // Order is important: slot might be inside of the target if target |
| // was allocated over a dead object and slot comes from the store |
| // buffer. |
| @@ -1650,11 +1684,17 @@ class ScavengingVisitor : public StaticVisitorBase { |
| return; |
| } |
| } |
| - MaybeObject* allocation = heap->new_space()->AllocateRaw(object_size); |
| + MaybeObject* allocation = heap->new_space()->AllocateRaw(allocation_size); |
| heap->promotion_queue()->SetNewLimit(heap->new_space()->top()); |
| Object* result = allocation->ToObjectUnchecked(); |
| HeapObject* target = HeapObject::cast(result); |
| +#ifndef V8_HOST_ARCH_64_BIT |
| + if (alignment == DOUBLE_ALIGNED) { |
| + target = EnsureDoubleAligned(heap, target, allocation_size); |
| + } |
| +#endif |
| + |
| // Order is important: slot might be inside of the target if target |
| // was allocated over a dead object and slot comes from the store |
| // buffer. |
| @@ -1690,7 +1730,7 @@ class ScavengingVisitor : public StaticVisitorBase { |
| HeapObject** slot, |
| HeapObject* object) { |
| int object_size = FixedArray::BodyDescriptor::SizeOf(map, object); |
| - EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map, |
| + EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE, POINTER_ALIGNED>(map, |
| slot, |
|
Erik Corry
2012/04/16 14:35:19
Ironically enough, the alignment is wrong here.
|
| object, |
| object_size); |
| @@ -1702,10 +1742,11 @@ class ScavengingVisitor : public StaticVisitorBase { |
| HeapObject* object) { |
| int length = reinterpret_cast<FixedDoubleArray*>(object)->length(); |
| int object_size = FixedDoubleArray::SizeFor(length); |
| - EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, |
| - slot, |
| - object, |
| - object_size); |
| + EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, DOUBLE_ALIGNED>( |
| + map, |
| + slot, |
| + object, |
| + object_size); |
| } |
| @@ -1713,7 +1754,7 @@ class ScavengingVisitor : public StaticVisitorBase { |
| HeapObject** slot, |
| HeapObject* object) { |
| int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize(); |
| - EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size); |
| + EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, POINTER_ALIGNED>(map, slot, object, object_size); |
| } |
| @@ -1722,7 +1763,7 @@ class ScavengingVisitor : public StaticVisitorBase { |
| HeapObject* object) { |
| int object_size = SeqAsciiString::cast(object)-> |
| SeqAsciiStringSize(map->instance_type()); |
| - EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size); |
| + EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, POINTER_ALIGNED>(map, slot, object, object_size); |
| } |
| @@ -1731,7 +1772,7 @@ class ScavengingVisitor : public StaticVisitorBase { |
| HeapObject* object) { |
| int object_size = SeqTwoByteString::cast(object)-> |
| SeqTwoByteStringSize(map->instance_type()); |
| - EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size); |
| + EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, POINTER_ALIGNED>(map, slot, object, object_size); |
| } |
| @@ -1774,7 +1815,7 @@ class ScavengingVisitor : public StaticVisitorBase { |
| } |
| int object_size = ConsString::kSize; |
| - EvacuateObject<POINTER_OBJECT, SMALL>(map, slot, object, object_size); |
| + EvacuateObject<POINTER_OBJECT, SMALL, POINTER_ALIGNED>(map, slot, object, object_size); |
| } |
| template<ObjectContents object_contents> |
| @@ -1784,14 +1825,14 @@ class ScavengingVisitor : public StaticVisitorBase { |
| static inline void VisitSpecialized(Map* map, |
| HeapObject** slot, |
| HeapObject* object) { |
| - EvacuateObject<object_contents, SMALL>(map, slot, object, object_size); |
| + EvacuateObject<object_contents, SMALL, POINTER_ALIGNED>(map, slot, object, object_size); |
| } |
| static inline void Visit(Map* map, |
| HeapObject** slot, |
| HeapObject* object) { |
| int object_size = map->instance_size(); |
| - EvacuateObject<object_contents, SMALL>(map, slot, object, object_size); |
| + EvacuateObject<object_contents, SMALL, POINTER_ALIGNED>(map, slot, object, object_size); |
| } |
| }; |
| @@ -4656,6 +4697,11 @@ MaybeObject* Heap::AllocateRawFixedDoubleArray(int length, |
| AllocationSpace space = |
| (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; |
| int size = FixedDoubleArray::SizeFor(length); |
| + |
| +#ifndef V8_HOST_ARCH_64_BIT |
| + size += kPointerSize; |
| +#endif |
| + |
| if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) { |
| // Too big for new space. |
| space = LO_SPACE; |
| @@ -4668,7 +4714,16 @@ MaybeObject* Heap::AllocateRawFixedDoubleArray(int length, |
| AllocationSpace retry_space = |
| (size <= Page::kMaxNonCodeHeapObjectSize) ? OLD_DATA_SPACE : LO_SPACE; |
| - return AllocateRaw(size, space, retry_space); |
| + HeapObject* object; |
| + { MaybeObject* maybe_object = AllocateRaw(size, space, retry_space); |
| + if (!maybe_object->To<HeapObject>(&object)) return maybe_object; |
| + } |
| + |
| +#ifndef V8_HOST_ARCH_64_BIT |
| + return EnsureDoubleAligned(this, object, size); |
| +#else |
| + return object; |
| +#endif |
| } |