Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(4)

Side by Side Diff: src/heap.cc

Issue 10105026: Version 3.10.3 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: Created 8 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/heap-profiler.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1106 matching lines...) Expand 10 before | Expand all | Expand 10 after
1117 1117
1118 while (head_start != head_end) { 1118 while (head_start != head_end) {
1119 int size = static_cast<int>(*(head_start++)); 1119 int size = static_cast<int>(*(head_start++));
1120 HeapObject* obj = reinterpret_cast<HeapObject*>(*(head_start++)); 1120 HeapObject* obj = reinterpret_cast<HeapObject*>(*(head_start++));
1121 emergency_stack_->Add(Entry(obj, size)); 1121 emergency_stack_->Add(Entry(obj, size));
1122 } 1122 }
1123 rear_ = head_end; 1123 rear_ = head_end;
1124 } 1124 }
1125 1125
1126 1126
1127 class ScavengeWeakObjectRetainer : public WeakObjectRetainer {
1128 public:
1129 explicit ScavengeWeakObjectRetainer(Heap* heap) : heap_(heap) { }
1130
1131 virtual Object* RetainAs(Object* object) {
1132 if (!heap_->InFromSpace(object)) {
1133 return object;
1134 }
1135
1136 MapWord map_word = HeapObject::cast(object)->map_word();
1137 if (map_word.IsForwardingAddress()) {
1138 return map_word.ToForwardingAddress();
1139 }
1140 return NULL;
1141 }
1142
1143 private:
1144 Heap* heap_;
1145 };
1146
1147
1127 void Heap::Scavenge() { 1148 void Heap::Scavenge() {
1128 #ifdef DEBUG 1149 #ifdef DEBUG
1129 if (FLAG_verify_heap) VerifyNonPointerSpacePointers(); 1150 if (FLAG_verify_heap) VerifyNonPointerSpacePointers();
1130 #endif 1151 #endif
1131 1152
1132 gc_state_ = SCAVENGE; 1153 gc_state_ = SCAVENGE;
1133 1154
1134 // Implements Cheney's copying algorithm 1155 // Implements Cheney's copying algorithm
1135 LOG(isolate_, ResourceEvent("scavenge", "begin")); 1156 LOG(isolate_, ResourceEvent("scavenge", "begin"));
1136 1157
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
1215 &UpdateNewSpaceReferenceInExternalStringTableEntry); 1236 &UpdateNewSpaceReferenceInExternalStringTableEntry);
1216 1237
1217 promotion_queue_.Destroy(); 1238 promotion_queue_.Destroy();
1218 1239
1219 LiveObjectList::UpdateReferencesForScavengeGC(); 1240 LiveObjectList::UpdateReferencesForScavengeGC();
1220 if (!FLAG_watch_ic_patching) { 1241 if (!FLAG_watch_ic_patching) {
1221 isolate()->runtime_profiler()->UpdateSamplesAfterScavenge(); 1242 isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
1222 } 1243 }
1223 incremental_marking()->UpdateMarkingDequeAfterScavenge(); 1244 incremental_marking()->UpdateMarkingDequeAfterScavenge();
1224 1245
1246 ScavengeWeakObjectRetainer weak_object_retainer(this);
1247 ProcessWeakReferences(&weak_object_retainer);
1248
1225 ASSERT(new_space_front == new_space_.top()); 1249 ASSERT(new_space_front == new_space_.top());
1226 1250
1227 // Set age mark. 1251 // Set age mark.
1228 new_space_.set_age_mark(new_space_.top()); 1252 new_space_.set_age_mark(new_space_.top());
1229 1253
1230 new_space_.LowerInlineAllocationLimit( 1254 new_space_.LowerInlineAllocationLimit(
1231 new_space_.inline_allocation_limit_step()); 1255 new_space_.inline_allocation_limit_step());
1232 1256
1233 // Update how much has survived scavenge. 1257 // Update how much has survived scavenge.
1234 IncrementYoungSurvivorsCounter(static_cast<int>( 1258 IncrementYoungSurvivorsCounter(static_cast<int>(
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
1301 Object** end = start + external_string_table_.old_space_strings_.length(); 1325 Object** end = start + external_string_table_.old_space_strings_.length();
1302 for (Object** p = start; p < end; ++p) *p = updater_func(this, p); 1326 for (Object** p = start; p < end; ++p) *p = updater_func(this, p);
1303 } 1327 }
1304 1328
1305 UpdateNewSpaceReferencesInExternalStringTable(updater_func); 1329 UpdateNewSpaceReferencesInExternalStringTable(updater_func);
1306 } 1330 }
1307 1331
1308 1332
1309 static Object* ProcessFunctionWeakReferences(Heap* heap, 1333 static Object* ProcessFunctionWeakReferences(Heap* heap,
1310 Object* function, 1334 Object* function,
1311 WeakObjectRetainer* retainer) { 1335 WeakObjectRetainer* retainer,
1336 bool record_slots) {
1312 Object* undefined = heap->undefined_value(); 1337 Object* undefined = heap->undefined_value();
1313 Object* head = undefined; 1338 Object* head = undefined;
1314 JSFunction* tail = NULL; 1339 JSFunction* tail = NULL;
1315 Object* candidate = function; 1340 Object* candidate = function;
1316 while (candidate != undefined) { 1341 while (candidate != undefined) {
1317 // Check whether to keep the candidate in the list. 1342 // Check whether to keep the candidate in the list.
1318 JSFunction* candidate_function = reinterpret_cast<JSFunction*>(candidate); 1343 JSFunction* candidate_function = reinterpret_cast<JSFunction*>(candidate);
1319 Object* retain = retainer->RetainAs(candidate); 1344 Object* retain = retainer->RetainAs(candidate);
1320 if (retain != NULL) { 1345 if (retain != NULL) {
1321 if (head == undefined) { 1346 if (head == undefined) {
1322 // First element in the list. 1347 // First element in the list.
1323 head = retain; 1348 head = retain;
1324 } else { 1349 } else {
1325 // Subsequent elements in the list. 1350 // Subsequent elements in the list.
1326 ASSERT(tail != NULL); 1351 ASSERT(tail != NULL);
1327 tail->set_next_function_link(retain); 1352 tail->set_next_function_link(retain);
1353 if (record_slots) {
1354 Object** next_function =
1355 HeapObject::RawField(tail, JSFunction::kNextFunctionLinkOffset);
1356 heap->mark_compact_collector()->RecordSlot(
1357 next_function, next_function, retain);
1358 }
1328 } 1359 }
1329 // Retained function is new tail. 1360 // Retained function is new tail.
1330 candidate_function = reinterpret_cast<JSFunction*>(retain); 1361 candidate_function = reinterpret_cast<JSFunction*>(retain);
1331 tail = candidate_function; 1362 tail = candidate_function;
1332 1363
1333 ASSERT(retain->IsUndefined() || retain->IsJSFunction()); 1364 ASSERT(retain->IsUndefined() || retain->IsJSFunction());
1334 1365
1335 if (retain == undefined) break; 1366 if (retain == undefined) break;
1336 } 1367 }
1337 1368
1338 // Move to next element in the list. 1369 // Move to next element in the list.
1339 candidate = candidate_function->next_function_link(); 1370 candidate = candidate_function->next_function_link();
1340 } 1371 }
1341 1372
1342 // Terminate the list if there is one or more elements. 1373 // Terminate the list if there is one or more elements.
1343 if (tail != NULL) { 1374 if (tail != NULL) {
1344 tail->set_next_function_link(undefined); 1375 tail->set_next_function_link(undefined);
1345 } 1376 }
1346 1377
1347 return head; 1378 return head;
1348 } 1379 }
1349 1380
1350 1381
1351 void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) { 1382 void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
1352 Object* undefined = undefined_value(); 1383 Object* undefined = undefined_value();
1353 Object* head = undefined; 1384 Object* head = undefined;
1354 Context* tail = NULL; 1385 Context* tail = NULL;
1355 Object* candidate = global_contexts_list_; 1386 Object* candidate = global_contexts_list_;
1387
1388 // We don't record weak slots during marking or scavenges.
1389 // Instead we do it once when we complete mark-compact cycle.
1390 // Note that write barrier has no effect if we are already in the middle of
1391 // compacting mark-sweep cycle and we have to record slots manually.
1392 bool record_slots =
1393 gc_state() == MARK_COMPACT &&
1394 mark_compact_collector()->is_compacting();
1395
1356 while (candidate != undefined) { 1396 while (candidate != undefined) {
1357 // Check whether to keep the candidate in the list. 1397 // Check whether to keep the candidate in the list.
1358 Context* candidate_context = reinterpret_cast<Context*>(candidate); 1398 Context* candidate_context = reinterpret_cast<Context*>(candidate);
1359 Object* retain = retainer->RetainAs(candidate); 1399 Object* retain = retainer->RetainAs(candidate);
1360 if (retain != NULL) { 1400 if (retain != NULL) {
1361 if (head == undefined) { 1401 if (head == undefined) {
1362 // First element in the list. 1402 // First element in the list.
1363 head = retain; 1403 head = retain;
1364 } else { 1404 } else {
1365 // Subsequent elements in the list. 1405 // Subsequent elements in the list.
1366 ASSERT(tail != NULL); 1406 ASSERT(tail != NULL);
1367 tail->set_unchecked(this, 1407 tail->set_unchecked(this,
1368 Context::NEXT_CONTEXT_LINK, 1408 Context::NEXT_CONTEXT_LINK,
1369 retain, 1409 retain,
1370 UPDATE_WRITE_BARRIER); 1410 UPDATE_WRITE_BARRIER);
1411
1412 if (record_slots) {
1413 Object** next_context =
1414 HeapObject::RawField(
1415 tail, FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK));
1416 mark_compact_collector()->RecordSlot(
1417 next_context, next_context, retain);
1418 }
1371 } 1419 }
1372 // Retained context is new tail. 1420 // Retained context is new tail.
1373 candidate_context = reinterpret_cast<Context*>(retain); 1421 candidate_context = reinterpret_cast<Context*>(retain);
1374 tail = candidate_context; 1422 tail = candidate_context;
1375 1423
1376 if (retain == undefined) break; 1424 if (retain == undefined) break;
1377 1425
1378 // Process the weak list of optimized functions for the context. 1426 // Process the weak list of optimized functions for the context.
1379 Object* function_list_head = 1427 Object* function_list_head =
1380 ProcessFunctionWeakReferences( 1428 ProcessFunctionWeakReferences(
1381 this, 1429 this,
1382 candidate_context->get(Context::OPTIMIZED_FUNCTIONS_LIST), 1430 candidate_context->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1383 retainer); 1431 retainer,
1432 record_slots);
1384 candidate_context->set_unchecked(this, 1433 candidate_context->set_unchecked(this,
1385 Context::OPTIMIZED_FUNCTIONS_LIST, 1434 Context::OPTIMIZED_FUNCTIONS_LIST,
1386 function_list_head, 1435 function_list_head,
1387 UPDATE_WRITE_BARRIER); 1436 UPDATE_WRITE_BARRIER);
1437 if (record_slots) {
1438 Object** optimized_functions =
1439 HeapObject::RawField(
1440 tail, FixedArray::SizeFor(Context::OPTIMIZED_FUNCTIONS_LIST));
1441 mark_compact_collector()->RecordSlot(
1442 optimized_functions, optimized_functions, function_list_head);
1443 }
1388 } 1444 }
1389 1445
1390 // Move to next element in the list. 1446 // Move to next element in the list.
1391 candidate = candidate_context->get(Context::NEXT_CONTEXT_LINK); 1447 candidate = candidate_context->get(Context::NEXT_CONTEXT_LINK);
1392 } 1448 }
1393 1449
1394 // Terminate the list if there is one or more elements. 1450 // Terminate the list if there is one or more elements.
1395 if (tail != NULL) { 1451 if (tail != NULL) {
1396 tail->set_unchecked(this, 1452 tail->set_unchecked(this,
1397 Context::NEXT_CONTEXT_LINK, 1453 Context::NEXT_CONTEXT_LINK,
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
1477 } 1533 }
1478 1534
1479 // Take another spin if there are now unswept objects in new space 1535 // Take another spin if there are now unswept objects in new space
1480 // (there are currently no more unswept promoted objects). 1536 // (there are currently no more unswept promoted objects).
1481 } while (new_space_front != new_space_.top()); 1537 } while (new_space_front != new_space_.top());
1482 1538
1483 return new_space_front; 1539 return new_space_front;
1484 } 1540 }
1485 1541
1486 1542
1543 STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) == 0);
1544
1545
1546 INLINE(static HeapObject* EnsureDoubleAligned(Heap* heap,
1547 HeapObject* object,
1548 int size));
1549
1550 static HeapObject* EnsureDoubleAligned(Heap* heap,
1551 HeapObject* object,
1552 int size) {
1553 if ((OffsetFrom(object->address()) & kDoubleAlignmentMask) != 0) {
1554 heap->CreateFillerObjectAt(object->address(), kPointerSize);
1555 return HeapObject::FromAddress(object->address() + kPointerSize);
1556 } else {
1557 heap->CreateFillerObjectAt(object->address() + size - kPointerSize,
1558 kPointerSize);
1559 return object;
1560 }
1561 }
1562
1563
1487 enum LoggingAndProfiling { 1564 enum LoggingAndProfiling {
1488 LOGGING_AND_PROFILING_ENABLED, 1565 LOGGING_AND_PROFILING_ENABLED,
1489 LOGGING_AND_PROFILING_DISABLED 1566 LOGGING_AND_PROFILING_DISABLED
1490 }; 1567 };
1491 1568
1492 1569
1493 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS }; 1570 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS };
1494 1571
1495 1572
1496 template<MarksHandling marks_handling, 1573 template<MarksHandling marks_handling,
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after
1600 } 1677 }
1601 } 1678 }
1602 1679
1603 if (marks_handling == TRANSFER_MARKS) { 1680 if (marks_handling == TRANSFER_MARKS) {
1604 if (Marking::TransferColor(source, target)) { 1681 if (Marking::TransferColor(source, target)) {
1605 MemoryChunk::IncrementLiveBytesFromGC(target->address(), size); 1682 MemoryChunk::IncrementLiveBytesFromGC(target->address(), size);
1606 } 1683 }
1607 } 1684 }
1608 } 1685 }
1609 1686
1610 template<ObjectContents object_contents, SizeRestriction size_restriction> 1687
1688 template<ObjectContents object_contents,
1689 SizeRestriction size_restriction,
1690 int alignment>
1611 static inline void EvacuateObject(Map* map, 1691 static inline void EvacuateObject(Map* map,
1612 HeapObject** slot, 1692 HeapObject** slot,
1613 HeapObject* object, 1693 HeapObject* object,
1614 int object_size) { 1694 int object_size) {
1615 SLOW_ASSERT((size_restriction != SMALL) || 1695 SLOW_ASSERT((size_restriction != SMALL) ||
1616 (object_size <= Page::kMaxNonCodeHeapObjectSize)); 1696 (object_size <= Page::kMaxNonCodeHeapObjectSize));
1617 SLOW_ASSERT(object->Size() == object_size); 1697 SLOW_ASSERT(object->Size() == object_size);
1618 1698
1699 int allocation_size = object_size;
1700 if (alignment != kObjectAlignment) {
1701 ASSERT(alignment == kDoubleAlignment);
1702 allocation_size += kPointerSize;
1703 }
1704
1619 Heap* heap = map->GetHeap(); 1705 Heap* heap = map->GetHeap();
1620 if (heap->ShouldBePromoted(object->address(), object_size)) { 1706 if (heap->ShouldBePromoted(object->address(), object_size)) {
1621 MaybeObject* maybe_result; 1707 MaybeObject* maybe_result;
1622 1708
1623 if ((size_restriction != SMALL) && 1709 if ((size_restriction != SMALL) &&
1624 (object_size > Page::kMaxNonCodeHeapObjectSize)) { 1710 (allocation_size > Page::kMaxNonCodeHeapObjectSize)) {
1625 maybe_result = heap->lo_space()->AllocateRaw(object_size, 1711 maybe_result = heap->lo_space()->AllocateRaw(allocation_size,
1626 NOT_EXECUTABLE); 1712 NOT_EXECUTABLE);
1627 } else { 1713 } else {
1628 if (object_contents == DATA_OBJECT) { 1714 if (object_contents == DATA_OBJECT) {
1629 maybe_result = heap->old_data_space()->AllocateRaw(object_size); 1715 maybe_result = heap->old_data_space()->AllocateRaw(allocation_size);
1630 } else { 1716 } else {
1631 maybe_result = heap->old_pointer_space()->AllocateRaw(object_size); 1717 maybe_result =
1718 heap->old_pointer_space()->AllocateRaw(allocation_size);
1632 } 1719 }
1633 } 1720 }
1634 1721
1635 Object* result = NULL; // Initialization to please compiler. 1722 Object* result = NULL; // Initialization to please compiler.
1636 if (maybe_result->ToObject(&result)) { 1723 if (maybe_result->ToObject(&result)) {
1637 HeapObject* target = HeapObject::cast(result); 1724 HeapObject* target = HeapObject::cast(result);
1638 1725
1726 if (alignment != kObjectAlignment) {
1727 target = EnsureDoubleAligned(heap, target, allocation_size);
1728 }
1729
1639 // Order is important: slot might be inside of the target if target 1730 // Order is important: slot might be inside of the target if target
1640 // was allocated over a dead object and slot comes from the store 1731 // was allocated over a dead object and slot comes from the store
1641 // buffer. 1732 // buffer.
1642 *slot = target; 1733 *slot = target;
1643 MigrateObject(heap, object, target, object_size); 1734 MigrateObject(heap, object, target, object_size);
1644 1735
1645 if (object_contents == POINTER_OBJECT) { 1736 if (object_contents == POINTER_OBJECT) {
1646 heap->promotion_queue()->insert(target, object_size); 1737 if (map->instance_type() == JS_FUNCTION_TYPE) {
1738 heap->promotion_queue()->insert(
1739 target, JSFunction::kNonWeakFieldsEndOffset);
1740 } else {
1741 heap->promotion_queue()->insert(target, object_size);
1742 }
1647 } 1743 }
1648 1744
1649 heap->tracer()->increment_promoted_objects_size(object_size); 1745 heap->tracer()->increment_promoted_objects_size(object_size);
1650 return; 1746 return;
1651 } 1747 }
1652 } 1748 }
1653 MaybeObject* allocation = heap->new_space()->AllocateRaw(object_size); 1749 MaybeObject* allocation = heap->new_space()->AllocateRaw(allocation_size);
1654 heap->promotion_queue()->SetNewLimit(heap->new_space()->top()); 1750 heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
1655 Object* result = allocation->ToObjectUnchecked(); 1751 Object* result = allocation->ToObjectUnchecked();
1656 HeapObject* target = HeapObject::cast(result); 1752 HeapObject* target = HeapObject::cast(result);
1657 1753
1754 if (alignment != kObjectAlignment) {
1755 target = EnsureDoubleAligned(heap, target, allocation_size);
1756 }
1757
1658 // Order is important: slot might be inside of the target if target 1758 // Order is important: slot might be inside of the target if target
1659 // was allocated over a dead object and slot comes from the store 1759 // was allocated over a dead object and slot comes from the store
1660 // buffer. 1760 // buffer.
1661 *slot = target; 1761 *slot = target;
1662 MigrateObject(heap, object, target, object_size); 1762 MigrateObject(heap, object, target, object_size);
1663 return; 1763 return;
1664 } 1764 }
1665 1765
1666 1766
1667 static inline void EvacuateJSFunction(Map* map, 1767 static inline void EvacuateJSFunction(Map* map,
(...skipping 15 matching lines...) Expand all
1683 map->GetHeap()->mark_compact_collector()-> 1783 map->GetHeap()->mark_compact_collector()->
1684 RecordCodeEntrySlot(code_entry_slot, code); 1784 RecordCodeEntrySlot(code_entry_slot, code);
1685 } 1785 }
1686 } 1786 }
1687 1787
1688 1788
1689 static inline void EvacuateFixedArray(Map* map, 1789 static inline void EvacuateFixedArray(Map* map,
1690 HeapObject** slot, 1790 HeapObject** slot,
1691 HeapObject* object) { 1791 HeapObject* object) {
1692 int object_size = FixedArray::BodyDescriptor::SizeOf(map, object); 1792 int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
1693 EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map, 1793 EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(map,
1694 slot, 1794 slot,
1695 object, 1795 object,
1696 object_size); 1796 object_size);
1697 } 1797 }
1698 1798
1699 1799
1700 static inline void EvacuateFixedDoubleArray(Map* map, 1800 static inline void EvacuateFixedDoubleArray(Map* map,
1701 HeapObject** slot, 1801 HeapObject** slot,
1702 HeapObject* object) { 1802 HeapObject* object) {
1703 int length = reinterpret_cast<FixedDoubleArray*>(object)->length(); 1803 int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
1704 int object_size = FixedDoubleArray::SizeFor(length); 1804 int object_size = FixedDoubleArray::SizeFor(length);
1705 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, 1805 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kDoubleAlignment>(
1706 slot, 1806 map,
1707 object, 1807 slot,
1708 object_size); 1808 object,
1809 object_size);
1709 } 1810 }
1710 1811
1711 1812
1712 static inline void EvacuateByteArray(Map* map, 1813 static inline void EvacuateByteArray(Map* map,
1713 HeapObject** slot, 1814 HeapObject** slot,
1714 HeapObject* object) { 1815 HeapObject* object) {
1715 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize(); 1816 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
1716 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size); 1817 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
1818 map, slot, object, object_size);
1717 } 1819 }
1718 1820
1719 1821
1720 static inline void EvacuateSeqAsciiString(Map* map, 1822 static inline void EvacuateSeqAsciiString(Map* map,
1721 HeapObject** slot, 1823 HeapObject** slot,
1722 HeapObject* object) { 1824 HeapObject* object) {
1723 int object_size = SeqAsciiString::cast(object)-> 1825 int object_size = SeqAsciiString::cast(object)->
1724 SeqAsciiStringSize(map->instance_type()); 1826 SeqAsciiStringSize(map->instance_type());
1725 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size); 1827 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
1828 map, slot, object, object_size);
1726 } 1829 }
1727 1830
1728 1831
1729 static inline void EvacuateSeqTwoByteString(Map* map, 1832 static inline void EvacuateSeqTwoByteString(Map* map,
1730 HeapObject** slot, 1833 HeapObject** slot,
1731 HeapObject* object) { 1834 HeapObject* object) {
1732 int object_size = SeqTwoByteString::cast(object)-> 1835 int object_size = SeqTwoByteString::cast(object)->
1733 SeqTwoByteStringSize(map->instance_type()); 1836 SeqTwoByteStringSize(map->instance_type());
1734 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size); 1837 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
1838 map, slot, object, object_size);
1735 } 1839 }
1736 1840
1737 1841
1738 static inline bool IsShortcutCandidate(int type) { 1842 static inline bool IsShortcutCandidate(int type) {
1739 return ((type & kShortcutTypeMask) == kShortcutTypeTag); 1843 return ((type & kShortcutTypeMask) == kShortcutTypeTag);
1740 } 1844 }
1741 1845
1742 static inline void EvacuateShortcutCandidate(Map* map, 1846 static inline void EvacuateShortcutCandidate(Map* map,
1743 HeapObject** slot, 1847 HeapObject** slot,
1744 HeapObject* object) { 1848 HeapObject* object) {
(...skipping 22 matching lines...) Expand all
1767 object->set_map_word(MapWord::FromForwardingAddress(target)); 1871 object->set_map_word(MapWord::FromForwardingAddress(target));
1768 return; 1872 return;
1769 } 1873 }
1770 1874
1771 heap->DoScavengeObject(first->map(), slot, first); 1875 heap->DoScavengeObject(first->map(), slot, first);
1772 object->set_map_word(MapWord::FromForwardingAddress(*slot)); 1876 object->set_map_word(MapWord::FromForwardingAddress(*slot));
1773 return; 1877 return;
1774 } 1878 }
1775 1879
1776 int object_size = ConsString::kSize; 1880 int object_size = ConsString::kSize;
1777 EvacuateObject<POINTER_OBJECT, SMALL>(map, slot, object, object_size); 1881 EvacuateObject<POINTER_OBJECT, SMALL, kObjectAlignment>(
1882 map, slot, object, object_size);
1778 } 1883 }
1779 1884
1780 template<ObjectContents object_contents> 1885 template<ObjectContents object_contents>
1781 class ObjectEvacuationStrategy { 1886 class ObjectEvacuationStrategy {
1782 public: 1887 public:
1783 template<int object_size> 1888 template<int object_size>
1784 static inline void VisitSpecialized(Map* map, 1889 static inline void VisitSpecialized(Map* map,
1785 HeapObject** slot, 1890 HeapObject** slot,
1786 HeapObject* object) { 1891 HeapObject* object) {
1787 EvacuateObject<object_contents, SMALL>(map, slot, object, object_size); 1892 EvacuateObject<object_contents, SMALL, kObjectAlignment>(
1893 map, slot, object, object_size);
1788 } 1894 }
1789 1895
1790 static inline void Visit(Map* map, 1896 static inline void Visit(Map* map,
1791 HeapObject** slot, 1897 HeapObject** slot,
1792 HeapObject* object) { 1898 HeapObject* object) {
1793 int object_size = map->instance_size(); 1899 int object_size = map->instance_size();
1794 EvacuateObject<object_contents, SMALL>(map, slot, object, object_size); 1900 EvacuateObject<object_contents, SMALL, kObjectAlignment>(
1901 map, slot, object, object_size);
1795 } 1902 }
1796 }; 1903 };
1797 1904
1798 static VisitorDispatchTable<ScavengingCallback> table_; 1905 static VisitorDispatchTable<ScavengingCallback> table_;
1799 }; 1906 };
1800 1907
1801 1908
1802 template<MarksHandling marks_handling, 1909 template<MarksHandling marks_handling,
1803 LoggingAndProfiling logging_and_profiling_mode> 1910 LoggingAndProfiling logging_and_profiling_mode>
1804 VisitorDispatchTable<ScavengingCallback> 1911 VisitorDispatchTable<ScavengingCallback>
(...skipping 2015 matching lines...) Expand 10 before | Expand all | Expand 10 after
3820 constructor->initial_map(), pretenure); 3927 constructor->initial_map(), pretenure);
3821 #ifdef DEBUG 3928 #ifdef DEBUG
3822 // Make sure result is NOT a global object if valid. 3929 // Make sure result is NOT a global object if valid.
3823 Object* non_failure; 3930 Object* non_failure;
3824 ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject()); 3931 ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject());
3825 #endif 3932 #endif
3826 return result; 3933 return result;
3827 } 3934 }
3828 3935
3829 3936
3937 MaybeObject* Heap::AllocateJSModule() {
3938 // Allocate a fresh map. Modules do not have a prototype.
3939 Map* map;
3940 MaybeObject* maybe_map = AllocateMap(JS_MODULE_TYPE, JSModule::kSize);
3941 if (!maybe_map->To(&map)) return maybe_map;
3942 // Allocate the object based on the map.
3943 return AllocateJSObjectFromMap(map, TENURED);
3944 }
3945
3946
3830 MaybeObject* Heap::AllocateJSArrayAndStorage( 3947 MaybeObject* Heap::AllocateJSArrayAndStorage(
3831 ElementsKind elements_kind, 3948 ElementsKind elements_kind,
3832 int length, 3949 int length,
3833 int capacity, 3950 int capacity,
3834 ArrayStorageAllocationMode mode, 3951 ArrayStorageAllocationMode mode,
3835 PretenureFlag pretenure) { 3952 PretenureFlag pretenure) {
3836 ASSERT(capacity >= length); 3953 ASSERT(capacity >= length);
3837 MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure); 3954 MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure);
3838 JSArray* array; 3955 JSArray* array;
3839 if (!maybe_array->To(&array)) return maybe_array; 3956 if (!maybe_array->To(&array)) return maybe_array;
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after
3956 StringDictionary::Allocate( 4073 StringDictionary::Allocate(
3957 map->NumberOfDescribedProperties() * 2 + initial_size); 4074 map->NumberOfDescribedProperties() * 2 + initial_size);
3958 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 4075 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3959 } 4076 }
3960 StringDictionary* dictionary = StringDictionary::cast(obj); 4077 StringDictionary* dictionary = StringDictionary::cast(obj);
3961 4078
3962 // The global object might be created from an object template with accessors. 4079 // The global object might be created from an object template with accessors.
3963 // Fill these accessors into the dictionary. 4080 // Fill these accessors into the dictionary.
3964 DescriptorArray* descs = map->instance_descriptors(); 4081 DescriptorArray* descs = map->instance_descriptors();
3965 for (int i = 0; i < descs->number_of_descriptors(); i++) { 4082 for (int i = 0; i < descs->number_of_descriptors(); i++) {
3966 PropertyDetails details(descs->GetDetails(i)); 4083 PropertyDetails details = descs->GetDetails(i);
3967 ASSERT(details.type() == CALLBACKS); // Only accessors are expected. 4084 ASSERT(details.type() == CALLBACKS); // Only accessors are expected.
3968 PropertyDetails d = 4085 PropertyDetails d =
3969 PropertyDetails(details.attributes(), CALLBACKS, details.index()); 4086 PropertyDetails(details.attributes(), CALLBACKS, details.index());
3970 Object* value = descs->GetCallbacksObject(i); 4087 Object* value = descs->GetCallbacksObject(i);
3971 { MaybeObject* maybe_value = AllocateJSGlobalPropertyCell(value); 4088 { MaybeObject* maybe_value = AllocateJSGlobalPropertyCell(value);
3972 if (!maybe_value->ToObject(&value)) return maybe_value; 4089 if (!maybe_value->ToObject(&value)) return maybe_value;
3973 } 4090 }
3974 4091
3975 Object* result; 4092 Object* result;
3976 { MaybeObject* maybe_result = dictionary->Add(descs->GetKey(i), value, d); 4093 { MaybeObject* maybe_result = dictionary->Add(descs->GetKey(i), value, d);
(...skipping 672 matching lines...) Expand 10 before | Expand all | Expand 10 after
4649 4766
4650 MaybeObject* Heap::AllocateRawFixedDoubleArray(int length, 4767 MaybeObject* Heap::AllocateRawFixedDoubleArray(int length,
4651 PretenureFlag pretenure) { 4768 PretenureFlag pretenure) {
4652 if (length < 0 || length > FixedDoubleArray::kMaxLength) { 4769 if (length < 0 || length > FixedDoubleArray::kMaxLength) {
4653 return Failure::OutOfMemoryException(); 4770 return Failure::OutOfMemoryException();
4654 } 4771 }
4655 4772
4656 AllocationSpace space = 4773 AllocationSpace space =
4657 (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; 4774 (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
4658 int size = FixedDoubleArray::SizeFor(length); 4775 int size = FixedDoubleArray::SizeFor(length);
4776
4777 #ifndef V8_HOST_ARCH_64_BIT
4778 size += kPointerSize;
4779 #endif
4780
4659 if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) { 4781 if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) {
4660 // Too big for new space. 4782 // Too big for new space.
4661 space = LO_SPACE; 4783 space = LO_SPACE;
4662 } else if (space == OLD_DATA_SPACE && 4784 } else if (space == OLD_DATA_SPACE &&
4663 size > Page::kMaxNonCodeHeapObjectSize) { 4785 size > Page::kMaxNonCodeHeapObjectSize) {
4664 // Too big for old data space. 4786 // Too big for old data space.
4665 space = LO_SPACE; 4787 space = LO_SPACE;
4666 } 4788 }
4667 4789
4668 AllocationSpace retry_space = 4790 AllocationSpace retry_space =
4669 (size <= Page::kMaxNonCodeHeapObjectSize) ? OLD_DATA_SPACE : LO_SPACE; 4791 (size <= Page::kMaxNonCodeHeapObjectSize) ? OLD_DATA_SPACE : LO_SPACE;
4670 4792
4671 return AllocateRaw(size, space, retry_space); 4793 HeapObject* object;
4794 { MaybeObject* maybe_object = AllocateRaw(size, space, retry_space);
4795 if (!maybe_object->To<HeapObject>(&object)) return maybe_object;
4796 }
4797
4798 return EnsureDoubleAligned(this, object, size);
4672 } 4799 }
4673 4800
4674 4801
4675 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) { 4802 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) {
4676 Object* result; 4803 Object* result;
4677 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure); 4804 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure);
4678 if (!maybe_result->ToObject(&result)) return maybe_result; 4805 if (!maybe_result->ToObject(&result)) return maybe_result;
4679 } 4806 }
4680 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier( 4807 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(
4681 hash_table_map()); 4808 hash_table_map());
(...skipping 12 matching lines...) Expand all
4694 context->set_map_no_write_barrier(global_context_map()); 4821 context->set_map_no_write_barrier(global_context_map());
4695 context->set_smi_js_array_map(undefined_value()); 4822 context->set_smi_js_array_map(undefined_value());
4696 context->set_double_js_array_map(undefined_value()); 4823 context->set_double_js_array_map(undefined_value());
4697 context->set_object_js_array_map(undefined_value()); 4824 context->set_object_js_array_map(undefined_value());
4698 ASSERT(context->IsGlobalContext()); 4825 ASSERT(context->IsGlobalContext());
4699 ASSERT(result->IsContext()); 4826 ASSERT(result->IsContext());
4700 return result; 4827 return result;
4701 } 4828 }
4702 4829
4703 4830
4831 MaybeObject* Heap::AllocateModuleContext(Context* previous,
4832 ScopeInfo* scope_info) {
4833 Object* result;
4834 { MaybeObject* maybe_result =
4835 AllocateFixedArrayWithHoles(scope_info->ContextLength(), TENURED);
4836 if (!maybe_result->ToObject(&result)) return maybe_result;
4837 }
4838 Context* context = reinterpret_cast<Context*>(result);
4839 context->set_map_no_write_barrier(module_context_map());
4840 context->set_previous(previous);
4841 context->set_extension(scope_info);
4842 context->set_global(previous->global());
4843 return context;
4844 }
4845
4846
4704 MaybeObject* Heap::AllocateFunctionContext(int length, JSFunction* function) { 4847 MaybeObject* Heap::AllocateFunctionContext(int length, JSFunction* function) {
4705 ASSERT(length >= Context::MIN_CONTEXT_SLOTS); 4848 ASSERT(length >= Context::MIN_CONTEXT_SLOTS);
4706 Object* result; 4849 Object* result;
4707 { MaybeObject* maybe_result = AllocateFixedArray(length); 4850 { MaybeObject* maybe_result = AllocateFixedArray(length);
4708 if (!maybe_result->ToObject(&result)) return maybe_result; 4851 if (!maybe_result->ToObject(&result)) return maybe_result;
4709 } 4852 }
4710 Context* context = reinterpret_cast<Context*>(result); 4853 Context* context = reinterpret_cast<Context*>(result);
4711 context->set_map_no_write_barrier(function_context_map()); 4854 context->set_map_no_write_barrier(function_context_map());
4712 context->set_closure(function); 4855 context->set_closure(function);
4713 context->set_previous(function->context()); 4856 context->set_previous(function->context());
(...skipping 2266 matching lines...) Expand 10 before | Expand all | Expand 10 after
6980 } else { 7123 } else {
6981 p ^= 0x1d1ed & (Page::kPageSize - 1); // I died. 7124 p ^= 0x1d1ed & (Page::kPageSize - 1); // I died.
6982 } 7125 }
6983 remembered_unmapped_pages_[remembered_unmapped_pages_index_] = 7126 remembered_unmapped_pages_[remembered_unmapped_pages_index_] =
6984 reinterpret_cast<Address>(p); 7127 reinterpret_cast<Address>(p);
6985 remembered_unmapped_pages_index_++; 7128 remembered_unmapped_pages_index_++;
6986 remembered_unmapped_pages_index_ %= kRememberedUnmappedPages; 7129 remembered_unmapped_pages_index_ %= kRememberedUnmappedPages;
6987 } 7130 }
6988 7131
6989 } } // namespace v8::internal 7132 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-profiler.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698