| Index: src/heap.cc
|
| ===================================================================
|
| --- src/heap.cc (revision 11348)
|
| +++ src/heap.cc (working copy)
|
| @@ -1124,6 +1124,27 @@
|
| }
|
|
|
|
|
| +class ScavengeWeakObjectRetainer : public WeakObjectRetainer {
|
| + public:
|
| + explicit ScavengeWeakObjectRetainer(Heap* heap) : heap_(heap) { }
|
| +
|
| + virtual Object* RetainAs(Object* object) {
|
| + if (!heap_->InFromSpace(object)) {
|
| + return object;
|
| + }
|
| +
|
| + MapWord map_word = HeapObject::cast(object)->map_word();
|
| + if (map_word.IsForwardingAddress()) {
|
| + return map_word.ToForwardingAddress();
|
| + }
|
| + return NULL;
|
| + }
|
| +
|
| + private:
|
| + Heap* heap_;
|
| +};
|
| +
|
| +
|
| void Heap::Scavenge() {
|
| #ifdef DEBUG
|
| if (FLAG_verify_heap) VerifyNonPointerSpacePointers();
|
| @@ -1222,6 +1243,9 @@
|
| }
|
| incremental_marking()->UpdateMarkingDequeAfterScavenge();
|
|
|
| + ScavengeWeakObjectRetainer weak_object_retainer(this);
|
| + ProcessWeakReferences(&weak_object_retainer);
|
| +
|
| ASSERT(new_space_front == new_space_.top());
|
|
|
| // Set age mark.
|
| @@ -1308,7 +1332,8 @@
|
|
|
| static Object* ProcessFunctionWeakReferences(Heap* heap,
|
| Object* function,
|
| - WeakObjectRetainer* retainer) {
|
| + WeakObjectRetainer* retainer,
|
| + bool record_slots) {
|
| Object* undefined = heap->undefined_value();
|
| Object* head = undefined;
|
| JSFunction* tail = NULL;
|
| @@ -1325,6 +1350,12 @@
|
| // Subsequent elements in the list.
|
| ASSERT(tail != NULL);
|
| tail->set_next_function_link(retain);
|
| + if (record_slots) {
|
| + Object** next_function =
|
| + HeapObject::RawField(tail, JSFunction::kNextFunctionLinkOffset);
|
| + heap->mark_compact_collector()->RecordSlot(
|
| + next_function, next_function, retain);
|
| + }
|
| }
|
| // Retained function is new tail.
|
| candidate_function = reinterpret_cast<JSFunction*>(retain);
|
| @@ -1353,6 +1384,15 @@
|
| Object* head = undefined;
|
| Context* tail = NULL;
|
| Object* candidate = global_contexts_list_;
|
| +
|
| + // We don't record weak slots during marking or scavenges.
|
| + // Instead we do it once when we complete mark-compact cycle.
|
| + // Note that write barrier has no effect if we are already in the middle of
|
| + // compacting mark-sweep cycle and we have to record slots manually.
|
| + bool record_slots =
|
| + gc_state() == MARK_COMPACT &&
|
| + mark_compact_collector()->is_compacting();
|
| +
|
| while (candidate != undefined) {
|
| // Check whether to keep the candidate in the list.
|
| Context* candidate_context = reinterpret_cast<Context*>(candidate);
|
| @@ -1368,6 +1408,14 @@
|
| Context::NEXT_CONTEXT_LINK,
|
| retain,
|
| UPDATE_WRITE_BARRIER);
|
| +
|
| + if (record_slots) {
|
| + Object** next_context =
|
| + HeapObject::RawField(
|
| + tail, FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK));
|
| + mark_compact_collector()->RecordSlot(
|
| + next_context, next_context, retain);
|
| + }
|
| }
|
| // Retained context is new tail.
|
| candidate_context = reinterpret_cast<Context*>(retain);
|
| @@ -1380,11 +1428,19 @@
|
| ProcessFunctionWeakReferences(
|
| this,
|
| candidate_context->get(Context::OPTIMIZED_FUNCTIONS_LIST),
|
| - retainer);
|
| + retainer,
|
| + record_slots);
|
| candidate_context->set_unchecked(this,
|
| Context::OPTIMIZED_FUNCTIONS_LIST,
|
| function_list_head,
|
| UPDATE_WRITE_BARRIER);
|
| + if (record_slots) {
|
| + Object** optimized_functions =
|
| + HeapObject::RawField(
|
| + tail, FixedArray::SizeFor(Context::OPTIMIZED_FUNCTIONS_LIST));
|
| + mark_compact_collector()->RecordSlot(
|
| + optimized_functions, optimized_functions, function_list_head);
|
| + }
|
| }
|
|
|
| // Move to next element in the list.
|
| @@ -1484,6 +1540,27 @@
|
| }
|
|
|
|
|
| +STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) == 0);
|
| +
|
| +
|
| +INLINE(static HeapObject* EnsureDoubleAligned(Heap* heap,
|
| + HeapObject* object,
|
| + int size));
|
| +
|
| +static HeapObject* EnsureDoubleAligned(Heap* heap,
|
| + HeapObject* object,
|
| + int size) {
|
| + if ((OffsetFrom(object->address()) & kDoubleAlignmentMask) != 0) {
|
| + heap->CreateFillerObjectAt(object->address(), kPointerSize);
|
| + return HeapObject::FromAddress(object->address() + kPointerSize);
|
| + } else {
|
| + heap->CreateFillerObjectAt(object->address() + size - kPointerSize,
|
| + kPointerSize);
|
| + return object;
|
| + }
|
| +}
|
| +
|
| +
|
| enum LoggingAndProfiling {
|
| LOGGING_AND_PROFILING_ENABLED,
|
| LOGGING_AND_PROFILING_DISABLED
|
| @@ -1607,7 +1684,10 @@
|
| }
|
| }
|
|
|
| - template<ObjectContents object_contents, SizeRestriction size_restriction>
|
| +
|
| + template<ObjectContents object_contents,
|
| + SizeRestriction size_restriction,
|
| + int alignment>
|
| static inline void EvacuateObject(Map* map,
|
| HeapObject** slot,
|
| HeapObject* object,
|
| @@ -1616,19 +1696,26 @@
|
| (object_size <= Page::kMaxNonCodeHeapObjectSize));
|
| SLOW_ASSERT(object->Size() == object_size);
|
|
|
| + int allocation_size = object_size;
|
| + if (alignment != kObjectAlignment) {
|
| + ASSERT(alignment == kDoubleAlignment);
|
| + allocation_size += kPointerSize;
|
| + }
|
| +
|
| Heap* heap = map->GetHeap();
|
| if (heap->ShouldBePromoted(object->address(), object_size)) {
|
| MaybeObject* maybe_result;
|
|
|
| if ((size_restriction != SMALL) &&
|
| - (object_size > Page::kMaxNonCodeHeapObjectSize)) {
|
| - maybe_result = heap->lo_space()->AllocateRaw(object_size,
|
| + (allocation_size > Page::kMaxNonCodeHeapObjectSize)) {
|
| + maybe_result = heap->lo_space()->AllocateRaw(allocation_size,
|
| NOT_EXECUTABLE);
|
| } else {
|
| if (object_contents == DATA_OBJECT) {
|
| - maybe_result = heap->old_data_space()->AllocateRaw(object_size);
|
| + maybe_result = heap->old_data_space()->AllocateRaw(allocation_size);
|
| } else {
|
| - maybe_result = heap->old_pointer_space()->AllocateRaw(object_size);
|
| + maybe_result =
|
| + heap->old_pointer_space()->AllocateRaw(allocation_size);
|
| }
|
| }
|
|
|
| @@ -1636,6 +1723,10 @@
|
| if (maybe_result->ToObject(&result)) {
|
| HeapObject* target = HeapObject::cast(result);
|
|
|
| + if (alignment != kObjectAlignment) {
|
| + target = EnsureDoubleAligned(heap, target, allocation_size);
|
| + }
|
| +
|
| // Order is important: slot might be inside of the target if target
|
| // was allocated over a dead object and slot comes from the store
|
| // buffer.
|
| @@ -1643,18 +1734,27 @@
|
| MigrateObject(heap, object, target, object_size);
|
|
|
| if (object_contents == POINTER_OBJECT) {
|
| - heap->promotion_queue()->insert(target, object_size);
|
| + if (map->instance_type() == JS_FUNCTION_TYPE) {
|
| + heap->promotion_queue()->insert(
|
| + target, JSFunction::kNonWeakFieldsEndOffset);
|
| + } else {
|
| + heap->promotion_queue()->insert(target, object_size);
|
| + }
|
| }
|
|
|
| heap->tracer()->increment_promoted_objects_size(object_size);
|
| return;
|
| }
|
| }
|
| - MaybeObject* allocation = heap->new_space()->AllocateRaw(object_size);
|
| + MaybeObject* allocation = heap->new_space()->AllocateRaw(allocation_size);
|
| heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
|
| Object* result = allocation->ToObjectUnchecked();
|
| HeapObject* target = HeapObject::cast(result);
|
|
|
| + if (alignment != kObjectAlignment) {
|
| + target = EnsureDoubleAligned(heap, target, allocation_size);
|
| + }
|
| +
|
| // Order is important: slot might be inside of the target if target
|
| // was allocated over a dead object and slot comes from the store
|
| // buffer.
|
| @@ -1690,7 +1790,7 @@
|
| HeapObject** slot,
|
| HeapObject* object) {
|
| int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
|
| - EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map,
|
| + EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(map,
|
| slot,
|
| object,
|
| object_size);
|
| @@ -1702,10 +1802,11 @@
|
| HeapObject* object) {
|
| int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
|
| int object_size = FixedDoubleArray::SizeFor(length);
|
| - EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map,
|
| - slot,
|
| - object,
|
| - object_size);
|
| + EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kDoubleAlignment>(
|
| + map,
|
| + slot,
|
| + object,
|
| + object_size);
|
| }
|
|
|
|
|
| @@ -1713,7 +1814,8 @@
|
| HeapObject** slot,
|
| HeapObject* object) {
|
| int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
|
| - EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
|
| + EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
|
| + map, slot, object, object_size);
|
| }
|
|
|
|
|
| @@ -1722,7 +1824,8 @@
|
| HeapObject* object) {
|
| int object_size = SeqAsciiString::cast(object)->
|
| SeqAsciiStringSize(map->instance_type());
|
| - EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
|
| + EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
|
| + map, slot, object, object_size);
|
| }
|
|
|
|
|
| @@ -1731,7 +1834,8 @@
|
| HeapObject* object) {
|
| int object_size = SeqTwoByteString::cast(object)->
|
| SeqTwoByteStringSize(map->instance_type());
|
| - EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
|
| + EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
|
| + map, slot, object, object_size);
|
| }
|
|
|
|
|
| @@ -1774,7 +1878,8 @@
|
| }
|
|
|
| int object_size = ConsString::kSize;
|
| - EvacuateObject<POINTER_OBJECT, SMALL>(map, slot, object, object_size);
|
| + EvacuateObject<POINTER_OBJECT, SMALL, kObjectAlignment>(
|
| + map, slot, object, object_size);
|
| }
|
|
|
| template<ObjectContents object_contents>
|
| @@ -1784,14 +1889,16 @@
|
| static inline void VisitSpecialized(Map* map,
|
| HeapObject** slot,
|
| HeapObject* object) {
|
| - EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
|
| + EvacuateObject<object_contents, SMALL, kObjectAlignment>(
|
| + map, slot, object, object_size);
|
| }
|
|
|
| static inline void Visit(Map* map,
|
| HeapObject** slot,
|
| HeapObject* object) {
|
| int object_size = map->instance_size();
|
| - EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
|
| + EvacuateObject<object_contents, SMALL, kObjectAlignment>(
|
| + map, slot, object, object_size);
|
| }
|
| };
|
|
|
| @@ -3827,6 +3934,16 @@
|
| }
|
|
|
|
|
| +MaybeObject* Heap::AllocateJSModule() {
|
| + // Allocate a fresh map. Modules do not have a prototype.
|
| + Map* map;
|
| + MaybeObject* maybe_map = AllocateMap(JS_MODULE_TYPE, JSModule::kSize);
|
| + if (!maybe_map->To(&map)) return maybe_map;
|
| + // Allocate the object based on the map.
|
| + return AllocateJSObjectFromMap(map, TENURED);
|
| +}
|
| +
|
| +
|
| MaybeObject* Heap::AllocateJSArrayAndStorage(
|
| ElementsKind elements_kind,
|
| int length,
|
| @@ -3963,7 +4080,7 @@
|
| // Fill these accessors into the dictionary.
|
| DescriptorArray* descs = map->instance_descriptors();
|
| for (int i = 0; i < descs->number_of_descriptors(); i++) {
|
| - PropertyDetails details(descs->GetDetails(i));
|
| + PropertyDetails details = descs->GetDetails(i);
|
| ASSERT(details.type() == CALLBACKS); // Only accessors are expected.
|
| PropertyDetails d =
|
| PropertyDetails(details.attributes(), CALLBACKS, details.index());
|
| @@ -4656,6 +4773,11 @@
|
| AllocationSpace space =
|
| (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
|
| int size = FixedDoubleArray::SizeFor(length);
|
| +
|
| +#ifndef V8_HOST_ARCH_64_BIT
|
| + size += kPointerSize;
|
| +#endif
|
| +
|
| if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) {
|
| // Too big for new space.
|
| space = LO_SPACE;
|
| @@ -4668,7 +4790,12 @@
|
| AllocationSpace retry_space =
|
| (size <= Page::kMaxNonCodeHeapObjectSize) ? OLD_DATA_SPACE : LO_SPACE;
|
|
|
| - return AllocateRaw(size, space, retry_space);
|
| + HeapObject* object;
|
| + { MaybeObject* maybe_object = AllocateRaw(size, space, retry_space);
|
| + if (!maybe_object->To<HeapObject>(&object)) return maybe_object;
|
| + }
|
| +
|
| + return EnsureDoubleAligned(this, object, size);
|
| }
|
|
|
|
|
| @@ -4701,6 +4828,22 @@
|
| }
|
|
|
|
|
| +MaybeObject* Heap::AllocateModuleContext(Context* previous,
|
| + ScopeInfo* scope_info) {
|
| + Object* result;
|
| + { MaybeObject* maybe_result =
|
| + AllocateFixedArrayWithHoles(scope_info->ContextLength(), TENURED);
|
| + if (!maybe_result->ToObject(&result)) return maybe_result;
|
| + }
|
| + Context* context = reinterpret_cast<Context*>(result);
|
| + context->set_map_no_write_barrier(module_context_map());
|
| + context->set_previous(previous);
|
| + context->set_extension(scope_info);
|
| + context->set_global(previous->global());
|
| + return context;
|
| +}
|
| +
|
| +
|
| MaybeObject* Heap::AllocateFunctionContext(int length, JSFunction* function) {
|
| ASSERT(length >= Context::MIN_CONTEXT_SLOTS);
|
| Object* result;
|
|
|