Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(6)

Side by Side Diff: src/mark-compact.cc

Issue 35103002: Align double array backing store during compaction and mark-sweep promotion. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Rebase Created 7 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap-inl.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1996 matching lines...) Expand 10 before | Expand all | Expand 10 after
2007 Heap::UpdateAllocationSiteFeedback(object); 2007 Heap::UpdateAllocationSiteFeedback(object);
2008 2008
2009 offset++; 2009 offset++;
2010 current_cell >>= 1; 2010 current_cell >>= 1;
2011 // Aggressively promote young survivors to the old space. 2011 // Aggressively promote young survivors to the old space.
2012 if (TryPromoteObject(object, size)) { 2012 if (TryPromoteObject(object, size)) {
2013 continue; 2013 continue;
2014 } 2014 }
2015 2015
2016 // Promotion failed. Just migrate object to another semispace. 2016 // Promotion failed. Just migrate object to another semispace.
2017 MaybeObject* allocation = new_space->AllocateRaw(size); 2017 int allocation_size = size;
2018 if (Heap::MustBeDoubleAligned(object)) {
2019 ASSERT(kObjectAlignment != kDoubleAlignment);
2020 allocation_size += kPointerSize;
2021 }
2022 MaybeObject* allocation = new_space->AllocateRaw(allocation_size);
2018 if (allocation->IsFailure()) { 2023 if (allocation->IsFailure()) {
2019 if (!new_space->AddFreshPage()) { 2024 if (!new_space->AddFreshPage()) {
2020 // Shouldn't happen. We are sweeping linearly, and to-space 2025 // Shouldn't happen. We are sweeping linearly, and to-space
2021 // has the same number of pages as from-space, so there is 2026 // has the same number of pages as from-space, so there is
2022 // always room. 2027 // always room.
2023 UNREACHABLE(); 2028 UNREACHABLE();
2024 } 2029 }
2025 allocation = new_space->AllocateRaw(size); 2030 allocation = new_space->AllocateRaw(allocation_size);
2026 ASSERT(!allocation->IsFailure()); 2031 ASSERT(!allocation->IsFailure());
2027 } 2032 }
2028 Object* target = allocation->ToObjectUnchecked(); 2033 Object* result = allocation->ToObjectUnchecked();
2029 2034 HeapObject* target = HeapObject::cast(result);
2030 MigrateObject(HeapObject::cast(target)->address(), 2035 if (Heap::MustBeDoubleAligned(object)) {
2036 target = heap()->EnsureDoubleAligned(target, allocation_size);
2037 }
2038 MigrateObject(target->address(),
2031 object->address(), 2039 object->address(),
2032 size, 2040 size,
2033 NEW_SPACE); 2041 NEW_SPACE);
2034 } 2042 }
2035 *cells = 0; 2043 *cells = 0;
2036 } 2044 }
2037 return survivors_size; 2045 return survivors_size;
2038 } 2046 }
2039 2047
2040 2048
(...skipping 897 matching lines...) Expand 10 before | Expand all | Expand 10 after
2938 if (map_word.IsForwardingAddress()) { 2946 if (map_word.IsForwardingAddress()) {
2939 return String::cast(map_word.ToForwardingAddress()); 2947 return String::cast(map_word.ToForwardingAddress());
2940 } 2948 }
2941 2949
2942 return String::cast(*p); 2950 return String::cast(*p);
2943 } 2951 }
2944 2952
2945 2953
2946 bool MarkCompactCollector::TryPromoteObject(HeapObject* object, 2954 bool MarkCompactCollector::TryPromoteObject(HeapObject* object,
2947 int object_size) { 2955 int object_size) {
2956 int allocation_size = object_size;
2957 if (Heap::MustBeDoubleAligned(object)) {
2958 ASSERT(kObjectAlignment != kDoubleAlignment);
2959 allocation_size += kPointerSize;
2960 }
2961
2948 // TODO(hpayer): Replace that check with an assert. 2962 // TODO(hpayer): Replace that check with an assert.
2949 CHECK(object_size <= Page::kMaxNonCodeHeapObjectSize); 2963 CHECK(allocation_size <= Page::kNonCodeObjectAreaSize);
2950 2964
2951 OldSpace* target_space = heap()->TargetSpace(object); 2965 OldSpace* target_space = heap()->TargetSpace(object);
2952 2966
2953 ASSERT(target_space == heap()->old_pointer_space() || 2967 ASSERT(target_space == heap()->old_pointer_space() ||
2954 target_space == heap()->old_data_space()); 2968 target_space == heap()->old_data_space());
2955 Object* result; 2969 Object* result;
2956 MaybeObject* maybe_result = target_space->AllocateRaw(object_size); 2970 MaybeObject* maybe_result = target_space->AllocateRaw(allocation_size);
2957 if (maybe_result->ToObject(&result)) { 2971 if (maybe_result->ToObject(&result)) {
2958 HeapObject* target = HeapObject::cast(result); 2972 HeapObject* target = HeapObject::cast(result);
2973 if (Heap::MustBeDoubleAligned(object)) {
2974 target = heap()->EnsureDoubleAligned(target, allocation_size);
2975 }
2959 MigrateObject(target->address(), 2976 MigrateObject(target->address(),
2960 object->address(), 2977 object->address(),
2961 object_size, 2978 object_size,
2962 target_space->identity()); 2979 target_space->identity());
2963 heap()->mark_compact_collector()->tracer()-> 2980 heap()->mark_compact_collector()->tracer()->
2964 increment_promoted_objects_size(object_size); 2981 increment_promoted_objects_size(object_size);
2965 return true; 2982 return true;
2966 } 2983 }
2967 2984
2968 return false; 2985 return false;
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
3019 if (*cell == 0) continue; 3036 if (*cell == 0) continue;
3020 3037
3021 int live_objects = MarkWordToObjectStarts(*cell, offsets); 3038 int live_objects = MarkWordToObjectStarts(*cell, offsets);
3022 for (int i = 0; i < live_objects; i++) { 3039 for (int i = 0; i < live_objects; i++) {
3023 Address object_addr = cell_base + offsets[i] * kPointerSize; 3040 Address object_addr = cell_base + offsets[i] * kPointerSize;
3024 HeapObject* object = HeapObject::FromAddress(object_addr); 3041 HeapObject* object = HeapObject::FromAddress(object_addr);
3025 ASSERT(Marking::IsBlack(Marking::MarkBitFrom(object))); 3042 ASSERT(Marking::IsBlack(Marking::MarkBitFrom(object)));
3026 3043
3027 int size = object->Size(); 3044 int size = object->Size();
3028 3045
3029 MaybeObject* target = space->AllocateRaw(size); 3046 int allocation_size = size;
3047 if (Heap::MustBeDoubleAligned(object)) {
3048 ASSERT(kObjectAlignment != kDoubleAlignment);
3049 allocation_size += kPointerSize;
3050 }
3051 MaybeObject* target = space->AllocateRaw(allocation_size);
3030 if (target->IsFailure()) { 3052 if (target->IsFailure()) {
3031 // OS refused to give us memory. 3053 // OS refused to give us memory.
3032 V8::FatalProcessOutOfMemory("Evacuation"); 3054 V8::FatalProcessOutOfMemory("Evacuation");
3033 return; 3055 return;
3034 } 3056 }
3035 3057
3036 Object* target_object = target->ToObjectUnchecked(); 3058 Object* result = target->ToObjectUnchecked();
3037 3059 HeapObject* target_object = HeapObject::cast(result);
3038 MigrateObject(HeapObject::cast(target_object)->address(), 3060 if (Heap::MustBeDoubleAligned(object)) {
3061 target_object =
3062 heap()->EnsureDoubleAligned(target_object, allocation_size);
3063 }
3064 MigrateObject(target_object->address(),
3039 object_addr, 3065 object_addr,
3040 size, 3066 size,
3041 space->identity()); 3067 space->identity());
3042 ASSERT(object->map_word().IsForwardingAddress()); 3068 ASSERT(object->map_word().IsForwardingAddress());
3043 } 3069 }
3044 3070
3045 // Clear marking bits for current cell. 3071 // Clear marking bits for current cell.
3046 *cell = 0; 3072 *cell = 0;
3047 } 3073 }
3048 p->ResetLiveBytes(); 3074 p->ResetLiveBytes();
(...skipping 1312 matching lines...) Expand 10 before | Expand all | Expand 10 after
4361 while (buffer != NULL) { 4387 while (buffer != NULL) {
4362 SlotsBuffer* next_buffer = buffer->next(); 4388 SlotsBuffer* next_buffer = buffer->next();
4363 DeallocateBuffer(buffer); 4389 DeallocateBuffer(buffer);
4364 buffer = next_buffer; 4390 buffer = next_buffer;
4365 } 4391 }
4366 *buffer_address = NULL; 4392 *buffer_address = NULL;
4367 } 4393 }
4368 4394
4369 4395
4370 } } // namespace v8::internal 4396 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap-inl.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698