Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(7)

Side by Side Diff: src/heap.cc

Issue 9452002: Ensure that executable pages are properly guarded. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 8 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | src/mark-compact.cc » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1074 matching lines...) Expand 10 before | Expand all | Expand 10 after
1085 guard_ = false; 1085 guard_ = false;
1086 } 1086 }
1087 1087
1088 1088
1089 void PromotionQueue::RelocateQueueHead() { 1089 void PromotionQueue::RelocateQueueHead() {
1090 ASSERT(emergency_stack_ == NULL); 1090 ASSERT(emergency_stack_ == NULL);
1091 1091
1092 Page* p = Page::FromAllocationTop(reinterpret_cast<Address>(rear_)); 1092 Page* p = Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
1093 intptr_t* head_start = rear_; 1093 intptr_t* head_start = rear_;
1094 intptr_t* head_end = 1094 intptr_t* head_end =
1095 Min(front_, reinterpret_cast<intptr_t*>(p->body_limit())); 1095 Min(front_, reinterpret_cast<intptr_t*>(p->area_end()));
1096 1096
1097 int entries_count = 1097 int entries_count =
1098 static_cast<int>(head_end - head_start) / kEntrySizeInWords; 1098 static_cast<int>(head_end - head_start) / kEntrySizeInWords;
1099 1099
1100 emergency_stack_ = new List<Entry>(2 * entries_count); 1100 emergency_stack_ = new List<Entry>(2 * entries_count);
1101 1101
1102 while (head_start != head_end) { 1102 while (head_start != head_end) {
1103 int size = static_cast<int>(*(head_start++)); 1103 int size = static_cast<int>(*(head_start++));
1104 HeapObject* obj = reinterpret_cast<HeapObject*>(*(head_start++)); 1104 HeapObject* obj = reinterpret_cast<HeapObject*>(*(head_start++));
1105 emergency_stack_->Add(Entry(obj, size)); 1105 emergency_stack_->Add(Entry(obj, size));
(...skipping 322 matching lines...) Expand 10 before | Expand all | Expand 10 after
1428 // The addresses new_space_front and new_space_.top() define a 1428 // The addresses new_space_front and new_space_.top() define a
1429 // queue of unprocessed copied objects. Process them until the 1429 // queue of unprocessed copied objects. Process them until the
1430 // queue is empty. 1430 // queue is empty.
1431 while (new_space_front != new_space_.top()) { 1431 while (new_space_front != new_space_.top()) {
1432 if (!NewSpacePage::IsAtEnd(new_space_front)) { 1432 if (!NewSpacePage::IsAtEnd(new_space_front)) {
1433 HeapObject* object = HeapObject::FromAddress(new_space_front); 1433 HeapObject* object = HeapObject::FromAddress(new_space_front);
1434 new_space_front += 1434 new_space_front +=
1435 NewSpaceScavenger::IterateBody(object->map(), object); 1435 NewSpaceScavenger::IterateBody(object->map(), object);
1436 } else { 1436 } else {
1437 new_space_front = 1437 new_space_front =
1438 NewSpacePage::FromLimit(new_space_front)->next_page()->body(); 1438 NewSpacePage::FromLimit(new_space_front)->next_page()->area_start();
1439 } 1439 }
1440 } 1440 }
1441 1441
1442 // Promote and process all the to-be-promoted objects. 1442 // Promote and process all the to-be-promoted objects.
1443 { 1443 {
1444 StoreBufferRebuildScope scope(this, 1444 StoreBufferRebuildScope scope(this,
1445 store_buffer(), 1445 store_buffer(),
1446 &ScavengeStoreBufferCallback); 1446 &ScavengeStoreBufferCallback);
1447 while (!promotion_queue()->is_empty()) { 1447 while (!promotion_queue()->is_empty()) {
1448 HeapObject* target; 1448 HeapObject* target;
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after
1590 } 1590 }
1591 } 1591 }
1592 } 1592 }
1593 1593
1594 template<ObjectContents object_contents, SizeRestriction size_restriction> 1594 template<ObjectContents object_contents, SizeRestriction size_restriction>
1595 static inline void EvacuateObject(Map* map, 1595 static inline void EvacuateObject(Map* map,
1596 HeapObject** slot, 1596 HeapObject** slot,
1597 HeapObject* object, 1597 HeapObject* object,
1598 int object_size) { 1598 int object_size) {
1599 SLOW_ASSERT((size_restriction != SMALL) || 1599 SLOW_ASSERT((size_restriction != SMALL) ||
1600 (object_size <= Page::kMaxHeapObjectSize)); 1600 (object_size <= Page::kMaxNonCodeHeapObjectSize));
1601 SLOW_ASSERT(object->Size() == object_size); 1601 SLOW_ASSERT(object->Size() == object_size);
1602 1602
1603 Heap* heap = map->GetHeap(); 1603 Heap* heap = map->GetHeap();
1604 if (heap->ShouldBePromoted(object->address(), object_size)) { 1604 if (heap->ShouldBePromoted(object->address(), object_size)) {
1605 MaybeObject* maybe_result; 1605 MaybeObject* maybe_result;
1606 1606
1607 if ((size_restriction != SMALL) && 1607 if ((size_restriction != SMALL) &&
1608 (object_size > Page::kMaxHeapObjectSize)) { 1608 (object_size > Page::kMaxNonCodeHeapObjectSize)) {
1609 maybe_result = heap->lo_space()->AllocateRaw(object_size, 1609 maybe_result = heap->lo_space()->AllocateRaw(object_size,
1610 NOT_EXECUTABLE); 1610 NOT_EXECUTABLE);
1611 } else { 1611 } else {
1612 if (object_contents == DATA_OBJECT) { 1612 if (object_contents == DATA_OBJECT) {
1613 maybe_result = heap->old_data_space()->AllocateRaw(object_size); 1613 maybe_result = heap->old_data_space()->AllocateRaw(object_size);
1614 } else { 1614 } else {
1615 maybe_result = heap->old_pointer_space()->AllocateRaw(object_size); 1615 maybe_result = heap->old_pointer_space()->AllocateRaw(object_size);
1616 } 1616 }
1617 } 1617 }
1618 1618
(...skipping 638 matching lines...) Expand 10 before | Expand all | Expand 10 after
2257 set_message_object_map(Map::cast(obj)); 2257 set_message_object_map(Map::cast(obj));
2258 2258
2259 ASSERT(!InNewSpace(empty_fixed_array())); 2259 ASSERT(!InNewSpace(empty_fixed_array()));
2260 return true; 2260 return true;
2261 } 2261 }
2262 2262
2263 2263
2264 MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) { 2264 MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) {
2265 // Statically ensure that it is safe to allocate heap numbers in paged 2265 // Statically ensure that it is safe to allocate heap numbers in paged
2266 // spaces. 2266 // spaces.
2267 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxHeapObjectSize); 2267 STATIC_ASSERT(HeapNumber::kSize <= Page::kNonCodeObjectAreaSize);
2268 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; 2268 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
2269 2269
2270 Object* result; 2270 Object* result;
2271 { MaybeObject* maybe_result = 2271 { MaybeObject* maybe_result =
2272 AllocateRaw(HeapNumber::kSize, space, OLD_DATA_SPACE); 2272 AllocateRaw(HeapNumber::kSize, space, OLD_DATA_SPACE);
2273 if (!maybe_result->ToObject(&result)) return maybe_result; 2273 if (!maybe_result->ToObject(&result)) return maybe_result;
2274 } 2274 }
2275 2275
2276 HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map()); 2276 HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map());
2277 HeapNumber::cast(result)->set_value(value); 2277 HeapNumber::cast(result)->set_value(value);
2278 return result; 2278 return result;
2279 } 2279 }
2280 2280
2281 2281
2282 MaybeObject* Heap::AllocateHeapNumber(double value) { 2282 MaybeObject* Heap::AllocateHeapNumber(double value) {
2283 // Use general version, if we're forced to always allocate. 2283 // Use general version, if we're forced to always allocate.
2284 if (always_allocate()) return AllocateHeapNumber(value, TENURED); 2284 if (always_allocate()) return AllocateHeapNumber(value, TENURED);
2285 2285
2286 // This version of AllocateHeapNumber is optimized for 2286 // This version of AllocateHeapNumber is optimized for
2287 // allocation in new space. 2287 // allocation in new space.
2288 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxHeapObjectSize); 2288 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxNonCodeHeapObjectSize);
2289 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC); 2289 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
2290 Object* result; 2290 Object* result;
2291 { MaybeObject* maybe_result = new_space_.AllocateRaw(HeapNumber::kSize); 2291 { MaybeObject* maybe_result = new_space_.AllocateRaw(HeapNumber::kSize);
2292 if (!maybe_result->ToObject(&result)) return maybe_result; 2292 if (!maybe_result->ToObject(&result)) return maybe_result;
2293 } 2293 }
2294 HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map()); 2294 HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map());
2295 HeapNumber::cast(result)->set_value(value); 2295 HeapNumber::cast(result)->set_value(value);
2296 return result; 2296 return result;
2297 } 2297 }
2298 2298
(...skipping 550 matching lines...) Expand 10 before | Expand all | Expand 10 after
2849 return Smi::FromInt(int_value); 2849 return Smi::FromInt(int_value);
2850 } 2850 }
2851 2851
2852 // Materialize the value in the heap. 2852 // Materialize the value in the heap.
2853 return AllocateHeapNumber(value, pretenure); 2853 return AllocateHeapNumber(value, pretenure);
2854 } 2854 }
2855 2855
2856 2856
2857 MaybeObject* Heap::AllocateForeign(Address address, PretenureFlag pretenure) { 2857 MaybeObject* Heap::AllocateForeign(Address address, PretenureFlag pretenure) {
2858 // Statically ensure that it is safe to allocate foreigns in paged spaces. 2858 // Statically ensure that it is safe to allocate foreigns in paged spaces.
2859 STATIC_ASSERT(Foreign::kSize <= Page::kMaxHeapObjectSize); 2859 STATIC_ASSERT(Foreign::kSize <= Page::kMaxNonCodeHeapObjectSize);
2860 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; 2860 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
2861 Foreign* result; 2861 Foreign* result;
2862 MaybeObject* maybe_result = Allocate(foreign_map(), space); 2862 MaybeObject* maybe_result = Allocate(foreign_map(), space);
2863 if (!maybe_result->To(&result)) return maybe_result; 2863 if (!maybe_result->To(&result)) return maybe_result;
2864 result->set_foreign_address(address); 2864 result->set_foreign_address(address);
2865 return result; 2865 return result;
2866 } 2866 }
2867 2867
2868 2868
2869 MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) { 2869 MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) {
(...skipping 397 matching lines...) Expand 10 before | Expand all | Expand 10 after
3267 3267
3268 MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) { 3268 MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
3269 if (length < 0 || length > ByteArray::kMaxLength) { 3269 if (length < 0 || length > ByteArray::kMaxLength) {
3270 return Failure::OutOfMemoryException(); 3270 return Failure::OutOfMemoryException();
3271 } 3271 }
3272 if (pretenure == NOT_TENURED) { 3272 if (pretenure == NOT_TENURED) {
3273 return AllocateByteArray(length); 3273 return AllocateByteArray(length);
3274 } 3274 }
3275 int size = ByteArray::SizeFor(length); 3275 int size = ByteArray::SizeFor(length);
3276 Object* result; 3276 Object* result;
3277 { MaybeObject* maybe_result = (size <= MaxObjectSizeInPagedSpace()) 3277 { MaybeObject* maybe_result = (size <= Page::kMaxNonCodeHeapObjectSize)
3278 ? old_data_space_->AllocateRaw(size) 3278 ? old_data_space_->AllocateRaw(size)
3279 : lo_space_->AllocateRaw(size, NOT_EXECUTABLE); 3279 : lo_space_->AllocateRaw(size, NOT_EXECUTABLE);
3280 if (!maybe_result->ToObject(&result)) return maybe_result; 3280 if (!maybe_result->ToObject(&result)) return maybe_result;
3281 } 3281 }
3282 3282
3283 reinterpret_cast<ByteArray*>(result)->set_map_no_write_barrier( 3283 reinterpret_cast<ByteArray*>(result)->set_map_no_write_barrier(
3284 byte_array_map()); 3284 byte_array_map());
3285 reinterpret_cast<ByteArray*>(result)->set_length(length); 3285 reinterpret_cast<ByteArray*>(result)->set_length(length);
3286 return result; 3286 return result;
3287 } 3287 }
3288 3288
3289 3289
3290 MaybeObject* Heap::AllocateByteArray(int length) { 3290 MaybeObject* Heap::AllocateByteArray(int length) {
3291 if (length < 0 || length > ByteArray::kMaxLength) { 3291 if (length < 0 || length > ByteArray::kMaxLength) {
3292 return Failure::OutOfMemoryException(); 3292 return Failure::OutOfMemoryException();
3293 } 3293 }
3294 int size = ByteArray::SizeFor(length); 3294 int size = ByteArray::SizeFor(length);
3295 AllocationSpace space = 3295 AllocationSpace space =
3296 (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : NEW_SPACE; 3296 (size > Page::kMaxNonCodeHeapObjectSize) ? LO_SPACE : NEW_SPACE;
3297 Object* result; 3297 Object* result;
3298 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); 3298 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
3299 if (!maybe_result->ToObject(&result)) return maybe_result; 3299 if (!maybe_result->ToObject(&result)) return maybe_result;
3300 } 3300 }
3301 3301
3302 reinterpret_cast<ByteArray*>(result)->set_map_no_write_barrier( 3302 reinterpret_cast<ByteArray*>(result)->set_map_no_write_barrier(
3303 byte_array_map()); 3303 byte_array_map());
3304 reinterpret_cast<ByteArray*>(result)->set_length(length); 3304 reinterpret_cast<ByteArray*>(result)->set_length(length);
3305 return result; 3305 return result;
3306 } 3306 }
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
3352 MaybeObject* maybe_reloc_info = AllocateByteArray(desc.reloc_size, TENURED); 3352 MaybeObject* maybe_reloc_info = AllocateByteArray(desc.reloc_size, TENURED);
3353 if (!maybe_reloc_info->To(&reloc_info)) return maybe_reloc_info; 3353 if (!maybe_reloc_info->To(&reloc_info)) return maybe_reloc_info;
3354 3354
3355 // Compute size. 3355 // Compute size.
3356 int body_size = RoundUp(desc.instr_size, kObjectAlignment); 3356 int body_size = RoundUp(desc.instr_size, kObjectAlignment);
3357 int obj_size = Code::SizeFor(body_size); 3357 int obj_size = Code::SizeFor(body_size);
3358 ASSERT(IsAligned(static_cast<intptr_t>(obj_size), kCodeAlignment)); 3358 ASSERT(IsAligned(static_cast<intptr_t>(obj_size), kCodeAlignment));
3359 MaybeObject* maybe_result; 3359 MaybeObject* maybe_result;
3360 // Large code objects and code objects which should stay at a fixed address 3360 // Large code objects and code objects which should stay at a fixed address
3361 // are allocated in large object space. 3361 // are allocated in large object space.
3362 if (obj_size > MaxObjectSizeInPagedSpace() || immovable) { 3362 if (obj_size > code_space()->AreaSize() || immovable) {
3363 maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE); 3363 maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE);
3364 } else { 3364 } else {
3365 maybe_result = code_space_->AllocateRaw(obj_size); 3365 maybe_result = code_space_->AllocateRaw(obj_size);
3366 } 3366 }
3367 3367
3368 Object* result; 3368 Object* result;
3369 if (!maybe_result->ToObject(&result)) return maybe_result; 3369 if (!maybe_result->ToObject(&result)) return maybe_result;
3370 3370
3371 // Initialize the object 3371 // Initialize the object
3372 HeapObject::cast(result)->set_map_no_write_barrier(code_map()); 3372 HeapObject::cast(result)->set_map_no_write_barrier(code_map());
(...skipping 28 matching lines...) Expand all
3401 } 3401 }
3402 #endif 3402 #endif
3403 return code; 3403 return code;
3404 } 3404 }
3405 3405
3406 3406
3407 MaybeObject* Heap::CopyCode(Code* code) { 3407 MaybeObject* Heap::CopyCode(Code* code) {
3408 // Allocate an object the same size as the code object. 3408 // Allocate an object the same size as the code object.
3409 int obj_size = code->Size(); 3409 int obj_size = code->Size();
3410 MaybeObject* maybe_result; 3410 MaybeObject* maybe_result;
3411 if (obj_size > MaxObjectSizeInPagedSpace()) { 3411 if (obj_size > code_space()->AreaSize()) {
3412 maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE); 3412 maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE);
3413 } else { 3413 } else {
3414 maybe_result = code_space_->AllocateRaw(obj_size); 3414 maybe_result = code_space_->AllocateRaw(obj_size);
3415 } 3415 }
3416 3416
3417 Object* result; 3417 Object* result;
3418 if (!maybe_result->ToObject(&result)) return maybe_result; 3418 if (!maybe_result->ToObject(&result)) return maybe_result;
3419 3419
3420 // Copy code object. 3420 // Copy code object.
3421 Address old_addr = code->address(); 3421 Address old_addr = code->address();
(...skipping 22 matching lines...) Expand all
3444 int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment); 3444 int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment);
3445 3445
3446 int new_obj_size = Code::SizeFor(new_body_size); 3446 int new_obj_size = Code::SizeFor(new_body_size);
3447 3447
3448 Address old_addr = code->address(); 3448 Address old_addr = code->address();
3449 3449
3450 size_t relocation_offset = 3450 size_t relocation_offset =
3451 static_cast<size_t>(code->instruction_end() - old_addr); 3451 static_cast<size_t>(code->instruction_end() - old_addr);
3452 3452
3453 MaybeObject* maybe_result; 3453 MaybeObject* maybe_result;
3454 if (new_obj_size > MaxObjectSizeInPagedSpace()) { 3454 if (new_obj_size > code_space()->AreaSize()) {
3455 maybe_result = lo_space_->AllocateRaw(new_obj_size, EXECUTABLE); 3455 maybe_result = lo_space_->AllocateRaw(new_obj_size, EXECUTABLE);
3456 } else { 3456 } else {
3457 maybe_result = code_space_->AllocateRaw(new_obj_size); 3457 maybe_result = code_space_->AllocateRaw(new_obj_size);
3458 } 3458 }
3459 3459
3460 Object* result; 3460 Object* result;
3461 if (!maybe_result->ToObject(&result)) return maybe_result; 3461 if (!maybe_result->ToObject(&result)) return maybe_result;
3462 3462
3463 // Copy code object. 3463 // Copy code object.
3464 Address new_addr = reinterpret_cast<HeapObject*>(result)->address(); 3464 Address new_addr = reinterpret_cast<HeapObject*>(result)->address();
(...skipping 300 matching lines...) Expand 10 before | Expand all | Expand 10 after
3765 map->inobject_properties(); 3765 map->inobject_properties();
3766 ASSERT(prop_size >= 0); 3766 ASSERT(prop_size >= 0);
3767 Object* properties; 3767 Object* properties;
3768 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size, pretenure); 3768 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size, pretenure);
3769 if (!maybe_properties->ToObject(&properties)) return maybe_properties; 3769 if (!maybe_properties->ToObject(&properties)) return maybe_properties;
3770 } 3770 }
3771 3771
3772 // Allocate the JSObject. 3772 // Allocate the JSObject.
3773 AllocationSpace space = 3773 AllocationSpace space =
3774 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; 3774 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
3775 if (map->instance_size() > MaxObjectSizeInPagedSpace()) space = LO_SPACE; 3775 if (map->instance_size() > Page::kMaxNonCodeHeapObjectSize) space = LO_SPACE;
3776 Object* obj; 3776 Object* obj;
3777 { MaybeObject* maybe_obj = Allocate(map, space); 3777 { MaybeObject* maybe_obj = Allocate(map, space);
3778 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 3778 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3779 } 3779 }
3780 3780
3781 // Initialize the JSObject. 3781 // Initialize the JSObject.
3782 InitializeJSObjectFromMap(JSObject::cast(obj), 3782 InitializeJSObjectFromMap(JSObject::cast(obj),
3783 FixedArray::cast(properties), 3783 FixedArray::cast(properties),
3784 map); 3784 map);
3785 ASSERT(JSObject::cast(obj)->HasFastSmiOnlyElements() || 3785 ASSERT(JSObject::cast(obj)->HasFastSmiOnlyElements() ||
(...skipping 487 matching lines...) Expand 10 before | Expand all | Expand 10 after
4273 } else { 4273 } else {
4274 if (chars > SeqTwoByteString::kMaxLength) { 4274 if (chars > SeqTwoByteString::kMaxLength) {
4275 return Failure::OutOfMemoryException(); 4275 return Failure::OutOfMemoryException();
4276 } 4276 }
4277 map = symbol_map(); 4277 map = symbol_map();
4278 size = SeqTwoByteString::SizeFor(chars); 4278 size = SeqTwoByteString::SizeFor(chars);
4279 } 4279 }
4280 4280
4281 // Allocate string. 4281 // Allocate string.
4282 Object* result; 4282 Object* result;
4283 { MaybeObject* maybe_result = (size > MaxObjectSizeInPagedSpace()) 4283 { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize)
4284 ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE) 4284 ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
4285 : old_data_space_->AllocateRaw(size); 4285 : old_data_space_->AllocateRaw(size);
4286 if (!maybe_result->ToObject(&result)) return maybe_result; 4286 if (!maybe_result->ToObject(&result)) return maybe_result;
4287 } 4287 }
4288 4288
4289 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(map); 4289 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(map);
4290 // Set length and hash fields of the allocated string. 4290 // Set length and hash fields of the allocated string.
4291 String* answer = String::cast(result); 4291 String* answer = String::cast(result);
4292 answer->set_length(chars); 4292 answer->set_length(chars);
4293 answer->set_hash_field(hash_field); 4293 answer->set_hash_field(hash_field);
(...skipping 16 matching lines...) Expand all
4310 int size = SeqAsciiString::SizeFor(length); 4310 int size = SeqAsciiString::SizeFor(length);
4311 ASSERT(size <= SeqAsciiString::kMaxSize); 4311 ASSERT(size <= SeqAsciiString::kMaxSize);
4312 4312
4313 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; 4313 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
4314 AllocationSpace retry_space = OLD_DATA_SPACE; 4314 AllocationSpace retry_space = OLD_DATA_SPACE;
4315 4315
4316 if (space == NEW_SPACE) { 4316 if (space == NEW_SPACE) {
4317 if (size > kMaxObjectSizeInNewSpace) { 4317 if (size > kMaxObjectSizeInNewSpace) {
4318 // Allocate in large object space, retry space will be ignored. 4318 // Allocate in large object space, retry space will be ignored.
4319 space = LO_SPACE; 4319 space = LO_SPACE;
4320 } else if (size > MaxObjectSizeInPagedSpace()) { 4320 } else if (size > Page::kMaxNonCodeHeapObjectSize) {
4321 // Allocate in new space, retry in large object space. 4321 // Allocate in new space, retry in large object space.
4322 retry_space = LO_SPACE; 4322 retry_space = LO_SPACE;
4323 } 4323 }
4324 } else if (space == OLD_DATA_SPACE && size > MaxObjectSizeInPagedSpace()) { 4324 } else if (space == OLD_DATA_SPACE &&
4325 size > Page::kMaxNonCodeHeapObjectSize) {
4325 space = LO_SPACE; 4326 space = LO_SPACE;
4326 } 4327 }
4327 Object* result; 4328 Object* result;
4328 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); 4329 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space);
4329 if (!maybe_result->ToObject(&result)) return maybe_result; 4330 if (!maybe_result->ToObject(&result)) return maybe_result;
4330 } 4331 }
4331 4332
4332 // Partially initialize the object. 4333 // Partially initialize the object.
4333 HeapObject::cast(result)->set_map_no_write_barrier(ascii_string_map()); 4334 HeapObject::cast(result)->set_map_no_write_barrier(ascii_string_map());
4334 String::cast(result)->set_length(length); 4335 String::cast(result)->set_length(length);
(...skipping 10 matching lines...) Expand all
4345 } 4346 }
4346 int size = SeqTwoByteString::SizeFor(length); 4347 int size = SeqTwoByteString::SizeFor(length);
4347 ASSERT(size <= SeqTwoByteString::kMaxSize); 4348 ASSERT(size <= SeqTwoByteString::kMaxSize);
4348 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; 4349 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
4349 AllocationSpace retry_space = OLD_DATA_SPACE; 4350 AllocationSpace retry_space = OLD_DATA_SPACE;
4350 4351
4351 if (space == NEW_SPACE) { 4352 if (space == NEW_SPACE) {
4352 if (size > kMaxObjectSizeInNewSpace) { 4353 if (size > kMaxObjectSizeInNewSpace) {
4353 // Allocate in large object space, retry space will be ignored. 4354 // Allocate in large object space, retry space will be ignored.
4354 space = LO_SPACE; 4355 space = LO_SPACE;
4355 } else if (size > MaxObjectSizeInPagedSpace()) { 4356 } else if (size > Page::kMaxNonCodeHeapObjectSize) {
4356 // Allocate in new space, retry in large object space. 4357 // Allocate in new space, retry in large object space.
4357 retry_space = LO_SPACE; 4358 retry_space = LO_SPACE;
4358 } 4359 }
4359 } else if (space == OLD_DATA_SPACE && size > MaxObjectSizeInPagedSpace()) { 4360 } else if (space == OLD_DATA_SPACE &&
4361 size > Page::kMaxNonCodeHeapObjectSize) {
4360 space = LO_SPACE; 4362 space = LO_SPACE;
4361 } 4363 }
4362 Object* result; 4364 Object* result;
4363 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); 4365 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space);
4364 if (!maybe_result->ToObject(&result)) return maybe_result; 4366 if (!maybe_result->ToObject(&result)) return maybe_result;
4365 } 4367 }
4366 4368
4367 // Partially initialize the object. 4369 // Partially initialize the object.
4368 HeapObject::cast(result)->set_map_no_write_barrier(string_map()); 4370 HeapObject::cast(result)->set_map_no_write_barrier(string_map());
4369 String::cast(result)->set_length(length); 4371 String::cast(result)->set_length(length);
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
4488 return Failure::OutOfMemoryException(); 4490 return Failure::OutOfMemoryException();
4489 } 4491 }
4490 4492
4491 AllocationSpace space = 4493 AllocationSpace space =
4492 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; 4494 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
4493 int size = FixedArray::SizeFor(length); 4495 int size = FixedArray::SizeFor(length);
4494 if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) { 4496 if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) {
4495 // Too big for new space. 4497 // Too big for new space.
4496 space = LO_SPACE; 4498 space = LO_SPACE;
4497 } else if (space == OLD_POINTER_SPACE && 4499 } else if (space == OLD_POINTER_SPACE &&
4498 size > MaxObjectSizeInPagedSpace()) { 4500 size > Page::kMaxNonCodeHeapObjectSize) {
4499 // Too big for old pointer space. 4501 // Too big for old pointer space.
4500 space = LO_SPACE; 4502 space = LO_SPACE;
4501 } 4503 }
4502 4504
4503 AllocationSpace retry_space = 4505 AllocationSpace retry_space =
4504 (size <= MaxObjectSizeInPagedSpace()) ? OLD_POINTER_SPACE : LO_SPACE; 4506 (size <= Page::kMaxNonCodeHeapObjectSize) ? OLD_POINTER_SPACE : LO_SPACE;
4505 4507
4506 return AllocateRaw(size, space, retry_space); 4508 return AllocateRaw(size, space, retry_space);
4507 } 4509 }
4508 4510
4509 4511
4510 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller( 4512 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller(
4511 Heap* heap, 4513 Heap* heap,
4512 int length, 4514 int length,
4513 PretenureFlag pretenure, 4515 PretenureFlag pretenure,
4514 Object* filler) { 4516 Object* filler) {
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after
4621 return Failure::OutOfMemoryException(); 4623 return Failure::OutOfMemoryException();
4622 } 4624 }
4623 4625
4624 AllocationSpace space = 4626 AllocationSpace space =
4625 (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; 4627 (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
4626 int size = FixedDoubleArray::SizeFor(length); 4628 int size = FixedDoubleArray::SizeFor(length);
4627 if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) { 4629 if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) {
4628 // Too big for new space. 4630 // Too big for new space.
4629 space = LO_SPACE; 4631 space = LO_SPACE;
4630 } else if (space == OLD_DATA_SPACE && 4632 } else if (space == OLD_DATA_SPACE &&
4631 size > MaxObjectSizeInPagedSpace()) { 4633 size > Page::kMaxNonCodeHeapObjectSize) {
4632 // Too big for old data space. 4634 // Too big for old data space.
4633 space = LO_SPACE; 4635 space = LO_SPACE;
4634 } 4636 }
4635 4637
4636 AllocationSpace retry_space = 4638 AllocationSpace retry_space =
4637 (size <= MaxObjectSizeInPagedSpace()) ? OLD_DATA_SPACE : LO_SPACE; 4639 (size <= Page::kMaxNonCodeHeapObjectSize) ? OLD_DATA_SPACE : LO_SPACE;
4638 4640
4639 return AllocateRaw(size, space, retry_space); 4641 return AllocateRaw(size, space, retry_space);
4640 } 4642 }
4641 4643
4642 4644
4643 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) { 4645 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) {
4644 Object* result; 4646 Object* result;
4645 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure); 4647 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure);
4646 if (!maybe_result->ToObject(&result)) return maybe_result; 4648 if (!maybe_result->ToObject(&result)) return maybe_result;
4647 } 4649 }
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
4756 #define MAKE_CASE(NAME, Name, name) \ 4758 #define MAKE_CASE(NAME, Name, name) \
4757 case NAME##_TYPE: map = name##_map(); break; 4759 case NAME##_TYPE: map = name##_map(); break;
4758 STRUCT_LIST(MAKE_CASE) 4760 STRUCT_LIST(MAKE_CASE)
4759 #undef MAKE_CASE 4761 #undef MAKE_CASE
4760 default: 4762 default:
4761 UNREACHABLE(); 4763 UNREACHABLE();
4762 return Failure::InternalError(); 4764 return Failure::InternalError();
4763 } 4765 }
4764 int size = map->instance_size(); 4766 int size = map->instance_size();
4765 AllocationSpace space = 4767 AllocationSpace space =
4766 (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : OLD_POINTER_SPACE; 4768 (size > Page::kMaxNonCodeHeapObjectSize) ? LO_SPACE : OLD_POINTER_SPACE;
4767 Object* result; 4769 Object* result;
4768 { MaybeObject* maybe_result = Allocate(map, space); 4770 { MaybeObject* maybe_result = Allocate(map, space);
4769 if (!maybe_result->ToObject(&result)) return maybe_result; 4771 if (!maybe_result->ToObject(&result)) return maybe_result;
4770 } 4772 }
4771 Struct::cast(result)->InitializeBody(size); 4773 Struct::cast(result)->InitializeBody(size);
4772 return result; 4774 return result;
4773 } 4775 }
4774 4776
4775 4777
4776 bool Heap::IsHeapIterable() { 4778 bool Heap::IsHeapIterable() {
(...skipping 426 matching lines...) Expand 10 before | Expand all | Expand 10 after
5203 return symbol_table()->LookupSymbolIfExists(string, symbol); 5205 return symbol_table()->LookupSymbolIfExists(string, symbol);
5204 } 5206 }
5205 5207
5206 5208
5207 #ifdef DEBUG 5209 #ifdef DEBUG
5208 void Heap::ZapFromSpace() { 5210 void Heap::ZapFromSpace() {
5209 NewSpacePageIterator it(new_space_.FromSpaceStart(), 5211 NewSpacePageIterator it(new_space_.FromSpaceStart(),
5210 new_space_.FromSpaceEnd()); 5212 new_space_.FromSpaceEnd());
5211 while (it.has_next()) { 5213 while (it.has_next()) {
5212 NewSpacePage* page = it.next(); 5214 NewSpacePage* page = it.next();
5213 for (Address cursor = page->body(), limit = page->body_limit(); 5215 for (Address cursor = page->area_start(), limit = page->area_end();
5214 cursor < limit; 5216 cursor < limit;
5215 cursor += kPointerSize) { 5217 cursor += kPointerSize) {
5216 Memory::Address_at(cursor) = kFromSpaceZapValue; 5218 Memory::Address_at(cursor) = kFromSpaceZapValue;
5217 } 5219 }
5218 } 5220 }
5219 } 5221 }
5220 #endif // DEBUG 5222 #endif // DEBUG
5221 5223
5222 5224
5223 void Heap::IterateAndMarkPointersToFromSpace(Address start, 5225 void Heap::IterateAndMarkPointersToFromSpace(Address start,
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
5342 // scanning a page and ensuring that all pointers to young space are in the 5344 // scanning a page and ensuring that all pointers to young space are in the
5343 // store buffer. 5345 // store buffer.
5344 void Heap::OldPointerSpaceCheckStoreBuffer() { 5346 void Heap::OldPointerSpaceCheckStoreBuffer() {
5345 OldSpace* space = old_pointer_space(); 5347 OldSpace* space = old_pointer_space();
5346 PageIterator pages(space); 5348 PageIterator pages(space);
5347 5349
5348 store_buffer()->SortUniq(); 5350 store_buffer()->SortUniq();
5349 5351
5350 while (pages.has_next()) { 5352 while (pages.has_next()) {
5351 Page* page = pages.next(); 5353 Page* page = pages.next();
5352 Object** current = reinterpret_cast<Object**>(page->ObjectAreaStart()); 5354 Object** current = reinterpret_cast<Object**>(page->area_start());
5353 5355
5354 Address end = page->ObjectAreaEnd(); 5356 Address end = page->area_end();
5355 5357
5356 Object*** store_buffer_position = store_buffer()->Start(); 5358 Object*** store_buffer_position = store_buffer()->Start();
5357 Object*** store_buffer_top = store_buffer()->Top(); 5359 Object*** store_buffer_top = store_buffer()->Top();
5358 5360
5359 Object** limit = reinterpret_cast<Object**>(end); 5361 Object** limit = reinterpret_cast<Object**>(end);
5360 CheckStoreBuffer(this, 5362 CheckStoreBuffer(this,
5361 current, 5363 current,
5362 limit, 5364 limit,
5363 &store_buffer_position, 5365 &store_buffer_position,
5364 store_buffer_top, 5366 store_buffer_top,
5365 &EverythingsAPointer, 5367 &EverythingsAPointer,
5366 space->top(), 5368 space->top(),
5367 space->limit()); 5369 space->limit());
5368 } 5370 }
5369 } 5371 }
5370 5372
5371 5373
5372 void Heap::MapSpaceCheckStoreBuffer() { 5374 void Heap::MapSpaceCheckStoreBuffer() {
5373 MapSpace* space = map_space(); 5375 MapSpace* space = map_space();
5374 PageIterator pages(space); 5376 PageIterator pages(space);
5375 5377
5376 store_buffer()->SortUniq(); 5378 store_buffer()->SortUniq();
5377 5379
5378 while (pages.has_next()) { 5380 while (pages.has_next()) {
5379 Page* page = pages.next(); 5381 Page* page = pages.next();
5380 Object** current = reinterpret_cast<Object**>(page->ObjectAreaStart()); 5382 Object** current = reinterpret_cast<Object**>(page->area_start());
5381 5383
5382 Address end = page->ObjectAreaEnd(); 5384 Address end = page->area_end();
5383 5385
5384 Object*** store_buffer_position = store_buffer()->Start(); 5386 Object*** store_buffer_position = store_buffer()->Start();
5385 Object*** store_buffer_top = store_buffer()->Top(); 5387 Object*** store_buffer_top = store_buffer()->Top();
5386 5388
5387 Object** limit = reinterpret_cast<Object**>(end); 5389 Object** limit = reinterpret_cast<Object**>(end);
5388 CheckStoreBuffer(this, 5390 CheckStoreBuffer(this,
5389 current, 5391 current,
5390 limit, 5392 limit,
5391 &store_buffer_position, 5393 &store_buffer_position,
5392 store_buffer_top, 5394 store_buffer_top,
(...skipping 1535 matching lines...) Expand 10 before | Expand all | Expand 10 after
6928 isolate_->heap()->store_buffer()->Compact(); 6930 isolate_->heap()->store_buffer()->Compact();
6929 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); 6931 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED);
6930 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { 6932 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) {
6931 next = chunk->next_chunk(); 6933 next = chunk->next_chunk();
6932 isolate_->memory_allocator()->Free(chunk); 6934 isolate_->memory_allocator()->Free(chunk);
6933 } 6935 }
6934 chunks_queued_for_free_ = NULL; 6936 chunks_queued_for_free_ = NULL;
6935 } 6937 }
6936 6938
6937 } } // namespace v8::internal 6939 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | src/mark-compact.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698