Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(5)

Side by Side Diff: src/heap.cc

Issue 35103002: Align double array backing store during compaction and mark-sweep promotion. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Rebase Created 7 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1999 matching lines...) Expand 10 before | Expand all | Expand 10 after
2010 } 2010 }
2011 2011
2012 // Take another spin if there are now unswept objects in new space 2012 // Take another spin if there are now unswept objects in new space
2013 // (there are currently no more unswept promoted objects). 2013 // (there are currently no more unswept promoted objects).
2014 } while (new_space_front != new_space_.top()); 2014 } while (new_space_front != new_space_.top());
2015 2015
2016 return new_space_front; 2016 return new_space_front;
2017 } 2017 }
2018 2018
2019 2019
2020 STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) == 0);
2021 STATIC_ASSERT((ConstantPoolArray::kHeaderSize & kDoubleAlignmentMask) == 0);
2022
2023
2024 INLINE(static HeapObject* EnsureDoubleAligned(Heap* heap,
2025 HeapObject* object,
2026 int size));
2027
2028 static HeapObject* EnsureDoubleAligned(Heap* heap,
2029 HeapObject* object,
2030 int size) {
2031 if ((OffsetFrom(object->address()) & kDoubleAlignmentMask) != 0) {
2032 heap->CreateFillerObjectAt(object->address(), kPointerSize);
2033 return HeapObject::FromAddress(object->address() + kPointerSize);
2034 } else {
2035 heap->CreateFillerObjectAt(object->address() + size - kPointerSize,
2036 kPointerSize);
2037 return object;
2038 }
2039 }
2040
2041
2042 enum LoggingAndProfiling { 2020 enum LoggingAndProfiling {
2043 LOGGING_AND_PROFILING_ENABLED, 2021 LOGGING_AND_PROFILING_ENABLED,
2044 LOGGING_AND_PROFILING_DISABLED 2022 LOGGING_AND_PROFILING_DISABLED
2045 }; 2023 };
2046 2024
2047 2025
2048 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS }; 2026 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS };
2049 2027
2050 2028
2051 template<MarksHandling marks_handling, 2029 template<MarksHandling marks_handling,
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after
2210 } else { 2188 } else {
2211 ASSERT(heap->AllowedToBeMigrated(object, OLD_POINTER_SPACE)); 2189 ASSERT(heap->AllowedToBeMigrated(object, OLD_POINTER_SPACE));
2212 maybe_result = heap->old_pointer_space()->AllocateRaw(allocation_size); 2190 maybe_result = heap->old_pointer_space()->AllocateRaw(allocation_size);
2213 } 2191 }
2214 2192
2215 Object* result = NULL; // Initialization to please compiler. 2193 Object* result = NULL; // Initialization to please compiler.
2216 if (maybe_result->ToObject(&result)) { 2194 if (maybe_result->ToObject(&result)) {
2217 HeapObject* target = HeapObject::cast(result); 2195 HeapObject* target = HeapObject::cast(result);
2218 2196
2219 if (alignment != kObjectAlignment) { 2197 if (alignment != kObjectAlignment) {
2220 target = EnsureDoubleAligned(heap, target, allocation_size); 2198 target = heap->EnsureDoubleAligned(target, allocation_size);
2221 } 2199 }
2222 2200
2223 // Order is important: slot might be inside of the target if target 2201 // Order is important: slot might be inside of the target if target
2224 // was allocated over a dead object and slot comes from the store 2202 // was allocated over a dead object and slot comes from the store
2225 // buffer. 2203 // buffer.
2226 *slot = target; 2204 *slot = target;
2227 MigrateObject(heap, object, target, object_size); 2205 MigrateObject(heap, object, target, object_size);
2228 2206
2229 if (object_contents == POINTER_OBJECT) { 2207 if (object_contents == POINTER_OBJECT) {
2230 if (map->instance_type() == JS_FUNCTION_TYPE) { 2208 if (map->instance_type() == JS_FUNCTION_TYPE) {
2231 heap->promotion_queue()->insert( 2209 heap->promotion_queue()->insert(
2232 target, JSFunction::kNonWeakFieldsEndOffset); 2210 target, JSFunction::kNonWeakFieldsEndOffset);
2233 } else { 2211 } else {
2234 heap->promotion_queue()->insert(target, object_size); 2212 heap->promotion_queue()->insert(target, object_size);
2235 } 2213 }
2236 } 2214 }
2237 2215
2238 heap->tracer()->increment_promoted_objects_size(object_size); 2216 heap->tracer()->increment_promoted_objects_size(object_size);
2239 return; 2217 return;
2240 } 2218 }
2241 } 2219 }
2242 ASSERT(heap->AllowedToBeMigrated(object, NEW_SPACE)); 2220 ASSERT(heap->AllowedToBeMigrated(object, NEW_SPACE));
2243 MaybeObject* allocation = heap->new_space()->AllocateRaw(allocation_size); 2221 MaybeObject* allocation = heap->new_space()->AllocateRaw(allocation_size);
2244 heap->promotion_queue()->SetNewLimit(heap->new_space()->top()); 2222 heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
2245 Object* result = allocation->ToObjectUnchecked(); 2223 Object* result = allocation->ToObjectUnchecked();
2246 HeapObject* target = HeapObject::cast(result); 2224 HeapObject* target = HeapObject::cast(result);
2247 2225
2248 if (alignment != kObjectAlignment) { 2226 if (alignment != kObjectAlignment) {
2249 target = EnsureDoubleAligned(heap, target, allocation_size); 2227 target = heap->EnsureDoubleAligned(target, allocation_size);
2250 } 2228 }
2251 2229
2252 // Order is important: slot might be inside of the target if target 2230 // Order is important: slot might be inside of the target if target
2253 // was allocated over a dead object and slot comes from the store 2231 // was allocated over a dead object and slot comes from the store
2254 // buffer. 2232 // buffer.
2255 *slot = target; 2233 *slot = target;
2256 MigrateObject(heap, object, target, object_size); 2234 MigrateObject(heap, object, target, object_size);
2257 return; 2235 return;
2258 } 2236 }
2259 2237
(...skipping 3186 matching lines...) Expand 10 before | Expand all | Expand 10 after
5446 #ifndef V8_HOST_ARCH_64_BIT 5424 #ifndef V8_HOST_ARCH_64_BIT
5447 size += kPointerSize; 5425 size += kPointerSize;
5448 #endif 5426 #endif
5449 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); 5427 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
5450 5428
5451 HeapObject* object; 5429 HeapObject* object;
5452 { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_DATA_SPACE); 5430 { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_DATA_SPACE);
5453 if (!maybe_object->To<HeapObject>(&object)) return maybe_object; 5431 if (!maybe_object->To<HeapObject>(&object)) return maybe_object;
5454 } 5432 }
5455 5433
5456 return EnsureDoubleAligned(this, object, size); 5434 return EnsureDoubleAligned(object, size);
5457 } 5435 }
5458 5436
5459 5437
5460 MaybeObject* Heap::AllocateConstantPoolArray(int number_of_int64_entries, 5438 MaybeObject* Heap::AllocateConstantPoolArray(int number_of_int64_entries,
5461 int number_of_ptr_entries, 5439 int number_of_ptr_entries,
5462 int number_of_int32_entries) { 5440 int number_of_int32_entries) {
5463 ASSERT(number_of_int64_entries > 0 || number_of_ptr_entries > 0 || 5441 ASSERT(number_of_int64_entries > 0 || number_of_ptr_entries > 0 ||
5464 number_of_int32_entries > 0); 5442 number_of_int32_entries > 0);
5465 int size = ConstantPoolArray::SizeFor(number_of_int64_entries, 5443 int size = ConstantPoolArray::SizeFor(number_of_int64_entries,
5466 number_of_ptr_entries, 5444 number_of_ptr_entries,
5467 number_of_int32_entries); 5445 number_of_int32_entries);
5468 #ifndef V8_HOST_ARCH_64_BIT 5446 #ifndef V8_HOST_ARCH_64_BIT
5469 size += kPointerSize; 5447 size += kPointerSize;
5470 #endif 5448 #endif
5471 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED); 5449 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED);
5472 5450
5473 HeapObject* object; 5451 HeapObject* object;
5474 { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_POINTER_SPACE); 5452 { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_POINTER_SPACE);
5475 if (!maybe_object->To<HeapObject>(&object)) return maybe_object; 5453 if (!maybe_object->To<HeapObject>(&object)) return maybe_object;
5476 } 5454 }
5477 object = EnsureDoubleAligned(this, object, size); 5455 object = EnsureDoubleAligned(object, size);
5478 HeapObject::cast(object)->set_map_no_write_barrier(constant_pool_array_map()); 5456 HeapObject::cast(object)->set_map_no_write_barrier(constant_pool_array_map());
5479 5457
5480 ConstantPoolArray* constant_pool = 5458 ConstantPoolArray* constant_pool =
5481 reinterpret_cast<ConstantPoolArray*>(object); 5459 reinterpret_cast<ConstantPoolArray*>(object);
5482 constant_pool->SetEntryCounts(number_of_int64_entries, 5460 constant_pool->SetEntryCounts(number_of_int64_entries,
5483 number_of_ptr_entries, 5461 number_of_ptr_entries,
5484 number_of_int32_entries); 5462 number_of_int32_entries);
5485 MemsetPointer( 5463 MemsetPointer(
5486 HeapObject::RawField( 5464 HeapObject::RawField(
5487 constant_pool, 5465 constant_pool,
(...skipping 2471 matching lines...) Expand 10 before | Expand all | Expand 10 after
7959 static_cast<int>(object_sizes_last_time_[index])); 7937 static_cast<int>(object_sizes_last_time_[index]));
7960 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) 7938 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT)
7961 #undef ADJUST_LAST_TIME_OBJECT_COUNT 7939 #undef ADJUST_LAST_TIME_OBJECT_COUNT
7962 7940
7963 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); 7941 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
7964 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); 7942 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));
7965 ClearObjectStats(); 7943 ClearObjectStats();
7966 } 7944 }
7967 7945
7968 } } // namespace v8::internal 7946 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698