OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1035 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1046 public: | 1046 public: |
1047 static inline void Visit(Map* map, HeapObject* obj); | 1047 static inline void Visit(Map* map, HeapObject* obj); |
1048 }; | 1048 }; |
1049 | 1049 |
1050 static void Initialize(); | 1050 static void Initialize(); |
1051 | 1051 |
1052 INLINE(static void VisitPointer(Heap* heap, Object** p)) { | 1052 INLINE(static void VisitPointer(Heap* heap, Object** p)) { |
1053 MarkObjectByPointer(heap->mark_compact_collector(), p, p); | 1053 MarkObjectByPointer(heap->mark_compact_collector(), p, p); |
1054 } | 1054 } |
1055 | 1055 |
1056 INLINE(static void VisitPointers(Heap* heap, | 1056 INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) { |
1057 Object** anchor, | |
1058 Object** start, | |
1059 Object** end)) { | |
1060 // Mark all objects pointed to in [start, end). | 1057 // Mark all objects pointed to in [start, end). |
1061 const int kMinRangeForMarkingRecursion = 64; | 1058 const int kMinRangeForMarkingRecursion = 64; |
1062 if (end - start >= kMinRangeForMarkingRecursion) { | 1059 if (end - start >= kMinRangeForMarkingRecursion) { |
1063 if (VisitUnmarkedObjects(heap, anchor, start, end)) return; | 1060 if (VisitUnmarkedObjects(heap, start, end)) return; |
1064 // We are close to a stack overflow, so just mark the objects. | 1061 // We are close to a stack overflow, so just mark the objects. |
1065 } | 1062 } |
1066 MarkCompactCollector* collector = heap->mark_compact_collector(); | 1063 MarkCompactCollector* collector = heap->mark_compact_collector(); |
1067 for (Object** p = start; p < end; p++) { | 1064 for (Object** p = start; p < end; p++) { |
1068 MarkObjectByPointer(collector, anchor, p); | 1065 MarkObjectByPointer(collector, start, p); |
1069 } | 1066 } |
1070 } | 1067 } |
1071 | 1068 |
1072 static void VisitHugeFixedArray(Heap* heap, FixedArray* array, int length); | |
1073 | |
1074 // The deque is contiguous and we use new space, it is therefore contained in | |
1075 // one page minus the header. It also has a size that is a power of two so | |
1076 // it is half the size of a page. We want to scan a number of array entries | |
1077 // that is less than the number of entries in the deque, so we divide by 2 | |
1078 // once more. | |
1079 static const int kScanningChunk = Page::kPageSize / 4 / kPointerSize; | |
1080 | |
1081 INLINE(static void VisitFixedArray(Map* map, HeapObject* object)) { | |
1082 FixedArray* array = FixedArray::cast(object); | |
1083 int length = array->length(); | |
1084 Heap* heap = map->GetHeap(); | |
1085 | |
1086 if (length < kScanningChunk || | |
1087 MemoryChunk::FromAddress(array->address())->owner()->identity() != | |
1088 LO_SPACE) { | |
1089 Object** start_slot = array->data_start(); | |
1090 VisitPointers(heap, start_slot, start_slot, start_slot + length); | |
1091 } else { | |
1092 VisitHugeFixedArray(heap, array, length); | |
1093 } | |
1094 } | |
1095 | |
1096 // Marks the object black and pushes it on the marking stack. | 1069 // Marks the object black and pushes it on the marking stack. |
1097 INLINE(static void MarkObject(Heap* heap, HeapObject* object)) { | 1070 INLINE(static void MarkObject(Heap* heap, HeapObject* object)) { |
1098 MarkBit mark = Marking::MarkBitFrom(object); | 1071 MarkBit mark = Marking::MarkBitFrom(object); |
1099 heap->mark_compact_collector()->MarkObject(object, mark); | 1072 heap->mark_compact_collector()->MarkObject(object, mark); |
1100 } | 1073 } |
1101 | 1074 |
1102 // Marks the object black without pushing it on the marking stack. | 1075 // Marks the object black without pushing it on the marking stack. |
1103 // Returns true if object needed marking and false otherwise. | 1076 // Returns true if object needed marking and false otherwise. |
1104 INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) { | 1077 INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) { |
1105 MarkBit mark_bit = Marking::MarkBitFrom(object); | 1078 MarkBit mark_bit = Marking::MarkBitFrom(object); |
(...skipping 26 matching lines...) Expand all Loading... |
1132 Map* map = obj->map(); | 1105 Map* map = obj->map(); |
1133 Heap* heap = obj->GetHeap(); | 1106 Heap* heap = obj->GetHeap(); |
1134 MarkBit mark = Marking::MarkBitFrom(obj); | 1107 MarkBit mark = Marking::MarkBitFrom(obj); |
1135 heap->mark_compact_collector()->SetMark(obj, mark); | 1108 heap->mark_compact_collector()->SetMark(obj, mark); |
1136 // Mark the map pointer and the body. | 1109 // Mark the map pointer and the body. |
1137 MarkBit map_mark = Marking::MarkBitFrom(map); | 1110 MarkBit map_mark = Marking::MarkBitFrom(map); |
1138 heap->mark_compact_collector()->MarkObject(map, map_mark); | 1111 heap->mark_compact_collector()->MarkObject(map, map_mark); |
1139 IterateBody(map, obj); | 1112 IterateBody(map, obj); |
1140 } | 1113 } |
1141 | 1114 |
1142 // Visit all unmarked objects pointed to by [start_slot, end_slot). | 1115 // Visit all unmarked objects pointed to by [start, end). |
1143 // Returns false if the operation fails (lack of stack space). | 1116 // Returns false if the operation fails (lack of stack space). |
1144 static inline bool VisitUnmarkedObjects(Heap* heap, | 1117 static inline bool VisitUnmarkedObjects(Heap* heap, |
1145 Object** anchor_slot, | 1118 Object** start, |
1146 Object** start_slot, | 1119 Object** end) { |
1147 Object** end_slot) { | |
1148 // Return false is we are close to the stack limit. | 1120 // Return false is we are close to the stack limit. |
1149 StackLimitCheck check(heap->isolate()); | 1121 StackLimitCheck check(heap->isolate()); |
1150 if (check.HasOverflowed()) return false; | 1122 if (check.HasOverflowed()) return false; |
1151 | 1123 |
1152 MarkCompactCollector* collector = heap->mark_compact_collector(); | 1124 MarkCompactCollector* collector = heap->mark_compact_collector(); |
1153 // Visit the unmarked objects. | 1125 // Visit the unmarked objects. |
1154 for (Object** p = start_slot; p < end_slot; p++) { | 1126 for (Object** p = start; p < end; p++) { |
1155 Object* o = *p; | 1127 Object* o = *p; |
1156 if (!o->IsHeapObject()) continue; | 1128 if (!o->IsHeapObject()) continue; |
1157 collector->RecordSlot(anchor_slot, p, o); | 1129 collector->RecordSlot(start, p, o); |
1158 HeapObject* obj = HeapObject::cast(o); | 1130 HeapObject* obj = HeapObject::cast(o); |
1159 MarkBit mark = Marking::MarkBitFrom(obj); | 1131 MarkBit mark = Marking::MarkBitFrom(obj); |
1160 if (mark.Get()) continue; | 1132 if (mark.Get()) continue; |
1161 VisitUnmarkedObject(collector, obj); | 1133 VisitUnmarkedObject(collector, obj); |
1162 } | 1134 } |
1163 return true; | 1135 return true; |
1164 } | 1136 } |
1165 | 1137 |
1166 static void VisitJSWeakMap(Map* map, HeapObject* object) { | 1138 static void VisitJSWeakMap(Map* map, HeapObject* object) { |
1167 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector(); | 1139 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector(); |
(...skipping 300 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1468 reinterpret_cast<JSFunction*>(object), | 1440 reinterpret_cast<JSFunction*>(object), |
1469 false); | 1441 false); |
1470 } | 1442 } |
1471 | 1443 |
1472 | 1444 |
1473 static inline void VisitJSFunctionFields(Map* map, | 1445 static inline void VisitJSFunctionFields(Map* map, |
1474 JSFunction* object, | 1446 JSFunction* object, |
1475 bool flush_code_candidate) { | 1447 bool flush_code_candidate) { |
1476 Heap* heap = map->GetHeap(); | 1448 Heap* heap = map->GetHeap(); |
1477 | 1449 |
1478 Object** start_slot = | 1450 VisitPointers(heap, |
1479 HeapObject::RawField(object, JSFunction::kPropertiesOffset); | 1451 HeapObject::RawField(object, JSFunction::kPropertiesOffset), |
1480 Object** end_slot = | 1452 HeapObject::RawField(object, JSFunction::kCodeEntryOffset)); |
1481 HeapObject::RawField(object, JSFunction::kCodeEntryOffset); | |
1482 VisitPointers(heap, start_slot, start_slot, end_slot); | |
1483 | 1453 |
1484 if (!flush_code_candidate) { | 1454 if (!flush_code_candidate) { |
1485 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset); | 1455 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset); |
1486 } else { | 1456 } else { |
1487 // Don't visit code object. | 1457 // Don't visit code object. |
1488 | 1458 |
1489 // Visit shared function info to avoid double checking of its | 1459 // Visit shared function info to avoid double checking of its |
1490 // flushability. | 1460 // flushability. |
1491 SharedFunctionInfo* shared_info = object->unchecked_shared(); | 1461 SharedFunctionInfo* shared_info = object->unchecked_shared(); |
1492 MarkBit shared_info_mark = Marking::MarkBitFrom(shared_info); | 1462 MarkBit shared_info_mark = Marking::MarkBitFrom(shared_info); |
1493 if (!shared_info_mark.Get()) { | 1463 if (!shared_info_mark.Get()) { |
1494 Map* shared_info_map = shared_info->map(); | 1464 Map* shared_info_map = shared_info->map(); |
1495 MarkBit shared_info_map_mark = | 1465 MarkBit shared_info_map_mark = |
1496 Marking::MarkBitFrom(shared_info_map); | 1466 Marking::MarkBitFrom(shared_info_map); |
1497 heap->mark_compact_collector()->SetMark(shared_info, shared_info_mark); | 1467 heap->mark_compact_collector()->SetMark(shared_info, shared_info_mark); |
1498 heap->mark_compact_collector()->MarkObject(shared_info_map, | 1468 heap->mark_compact_collector()->MarkObject(shared_info_map, |
1499 shared_info_map_mark); | 1469 shared_info_map_mark); |
1500 VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map, | 1470 VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map, |
1501 shared_info, | 1471 shared_info, |
1502 true); | 1472 true); |
1503 } | 1473 } |
1504 } | 1474 } |
1505 | 1475 |
1506 start_slot = | 1476 VisitPointers( |
| 1477 heap, |
1507 HeapObject::RawField(object, | 1478 HeapObject::RawField(object, |
1508 JSFunction::kCodeEntryOffset + kPointerSize); | 1479 JSFunction::kCodeEntryOffset + kPointerSize), |
1509 end_slot = | 1480 HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset)); |
1510 HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset); | |
1511 VisitPointers(heap, start_slot, start_slot, end_slot); | |
1512 } | 1481 } |
1513 | 1482 |
1514 | 1483 |
1515 static void VisitSharedFunctionInfoFields(Heap* heap, | 1484 static void VisitSharedFunctionInfoFields(Heap* heap, |
1516 HeapObject* object, | 1485 HeapObject* object, |
1517 bool flush_code_candidate) { | 1486 bool flush_code_candidate) { |
1518 VisitPointer(heap, | 1487 VisitPointer(heap, |
1519 HeapObject::RawField(object, SharedFunctionInfo::kNameOffset)); | 1488 HeapObject::RawField(object, SharedFunctionInfo::kNameOffset)); |
1520 | 1489 |
1521 if (!flush_code_candidate) { | 1490 if (!flush_code_candidate) { |
1522 VisitPointer(heap, | 1491 VisitPointer(heap, |
1523 HeapObject::RawField(object, | 1492 HeapObject::RawField(object, |
1524 SharedFunctionInfo::kCodeOffset)); | 1493 SharedFunctionInfo::kCodeOffset)); |
1525 } | 1494 } |
1526 | 1495 |
1527 Object** start_slot = | 1496 VisitPointers( |
| 1497 heap, |
1528 HeapObject::RawField(object, | 1498 HeapObject::RawField(object, |
1529 SharedFunctionInfo::kOptimizedCodeMapOffset); | 1499 SharedFunctionInfo::kOptimizedCodeMapOffset), |
1530 Object** end_slot = | 1500 HeapObject::RawField(object, SharedFunctionInfo::kSize)); |
1531 HeapObject::RawField(object, SharedFunctionInfo::kSize); | |
1532 | |
1533 VisitPointers(heap, start_slot, start_slot, end_slot); | |
1534 } | 1501 } |
1535 | 1502 |
1536 static VisitorDispatchTable<Callback> non_count_table_; | 1503 static VisitorDispatchTable<Callback> non_count_table_; |
1537 }; | 1504 }; |
1538 | 1505 |
1539 | 1506 |
1540 void MarkCompactMarkingVisitor::VisitHugeFixedArray(Heap* heap, | |
1541 FixedArray* array, | |
1542 int length) { | |
1543 MemoryChunk* chunk = MemoryChunk::FromAddress(array->address()); | |
1544 | |
1545 ASSERT(chunk->owner()->identity() == LO_SPACE); | |
1546 | |
1547 Object** start_slot = array->data_start(); | |
1548 int from = | |
1549 chunk->IsPartiallyScanned() ? chunk->PartiallyScannedProgress() : 0; | |
1550 int to = Min(from + kScanningChunk, length); | |
1551 VisitPointers(heap, start_slot, start_slot + from, start_slot + to); | |
1552 | |
1553 if (to == length) { | |
1554 chunk->SetCompletelyScanned(); | |
1555 } else { | |
1556 chunk->SetPartiallyScannedProgress(to); | |
1557 } | |
1558 } | |
1559 | |
1560 | |
1561 void MarkCompactMarkingVisitor::ObjectStatsCountFixedArray( | 1507 void MarkCompactMarkingVisitor::ObjectStatsCountFixedArray( |
1562 FixedArrayBase* fixed_array, | 1508 FixedArrayBase* fixed_array, |
1563 FixedArraySubInstanceType fast_type, | 1509 FixedArraySubInstanceType fast_type, |
1564 FixedArraySubInstanceType dictionary_type) { | 1510 FixedArraySubInstanceType dictionary_type) { |
1565 Heap* heap = fixed_array->map()->GetHeap(); | 1511 Heap* heap = fixed_array->map()->GetHeap(); |
1566 if (fixed_array->map() != heap->fixed_cow_array_map() && | 1512 if (fixed_array->map() != heap->fixed_cow_array_map() && |
1567 fixed_array->map() != heap->fixed_double_array_map() && | 1513 fixed_array->map() != heap->fixed_double_array_map() && |
1568 fixed_array != heap->empty_fixed_array()) { | 1514 fixed_array != heap->empty_fixed_array()) { |
1569 if (fixed_array->IsDictionary()) { | 1515 if (fixed_array->IsDictionary()) { |
1570 heap->RecordObjectStats(FIXED_ARRAY_TYPE, | 1516 heap->RecordObjectStats(FIXED_ARRAY_TYPE, |
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1692 | 1638 |
1693 table_.Register(kVisitSharedFunctionInfo, | 1639 table_.Register(kVisitSharedFunctionInfo, |
1694 &VisitSharedFunctionInfoAndFlushCode); | 1640 &VisitSharedFunctionInfoAndFlushCode); |
1695 | 1641 |
1696 table_.Register(kVisitJSFunction, | 1642 table_.Register(kVisitJSFunction, |
1697 &VisitJSFunctionAndFlushCode); | 1643 &VisitJSFunctionAndFlushCode); |
1698 | 1644 |
1699 table_.Register(kVisitJSRegExp, | 1645 table_.Register(kVisitJSRegExp, |
1700 &VisitRegExpAndFlushCode); | 1646 &VisitRegExpAndFlushCode); |
1701 | 1647 |
1702 table_.Register(kVisitFixedArray, | |
1703 &VisitFixedArray); | |
1704 | |
1705 if (FLAG_track_gc_object_stats) { | 1648 if (FLAG_track_gc_object_stats) { |
1706 // Copy the visitor table to make call-through possible. | 1649 // Copy the visitor table to make call-through possible. |
1707 non_count_table_.CopyFrom(&table_); | 1650 non_count_table_.CopyFrom(&table_); |
1708 #define VISITOR_ID_COUNT_FUNCTION(id) \ | 1651 #define VISITOR_ID_COUNT_FUNCTION(id) \ |
1709 table_.Register(kVisit##id, ObjectStatsTracker<kVisit##id>::Visit); | 1652 table_.Register(kVisit##id, ObjectStatsTracker<kVisit##id>::Visit); |
1710 VISITOR_ID_LIST(VISITOR_ID_COUNT_FUNCTION) | 1653 VISITOR_ID_LIST(VISITOR_ID_COUNT_FUNCTION) |
1711 #undef VISITOR_ID_COUNT_FUNCTION | 1654 #undef VISITOR_ID_COUNT_FUNCTION |
1712 } | 1655 } |
1713 } | 1656 } |
1714 | 1657 |
1715 | 1658 |
1716 VisitorDispatchTable<MarkCompactMarkingVisitor::Callback> | 1659 VisitorDispatchTable<MarkCompactMarkingVisitor::Callback> |
1717 MarkCompactMarkingVisitor::non_count_table_; | 1660 MarkCompactMarkingVisitor::non_count_table_; |
1718 | 1661 |
1719 | 1662 |
1720 class MarkingVisitor : public ObjectVisitor { | 1663 class MarkingVisitor : public ObjectVisitor { |
1721 public: | 1664 public: |
1722 explicit MarkingVisitor(Heap* heap) : heap_(heap) { } | 1665 explicit MarkingVisitor(Heap* heap) : heap_(heap) { } |
1723 | 1666 |
1724 void VisitPointer(Object** p) { | 1667 void VisitPointer(Object** p) { |
1725 MarkCompactMarkingVisitor::VisitPointer(heap_, p); | 1668 MarkCompactMarkingVisitor::VisitPointer(heap_, p); |
1726 } | 1669 } |
1727 | 1670 |
1728 void VisitPointers(Object** start_slot, Object** end_slot) { | 1671 void VisitPointers(Object** start, Object** end) { |
1729 MarkCompactMarkingVisitor::VisitPointers( | 1672 MarkCompactMarkingVisitor::VisitPointers(heap_, start, end); |
1730 heap_, start_slot, start_slot, end_slot); | |
1731 } | 1673 } |
1732 | 1674 |
1733 private: | 1675 private: |
1734 Heap* heap_; | 1676 Heap* heap_; |
1735 }; | 1677 }; |
1736 | 1678 |
1737 | 1679 |
1738 class CodeMarkingVisitor : public ThreadVisitor { | 1680 class CodeMarkingVisitor : public ThreadVisitor { |
1739 public: | 1681 public: |
1740 explicit CodeMarkingVisitor(MarkCompactCollector* collector) | 1682 explicit CodeMarkingVisitor(MarkCompactCollector* collector) |
1741 : collector_(collector) {} | 1683 : collector_(collector) {} |
1742 | 1684 |
1743 void VisitThread(Isolate* isolate, ThreadLocalTop* top) { | 1685 void VisitThread(Isolate* isolate, ThreadLocalTop* top) { |
1744 collector_->PrepareThreadForCodeFlushing(isolate, top); | 1686 collector_->PrepareThreadForCodeFlushing(isolate, top); |
1745 } | 1687 } |
1746 | 1688 |
1747 private: | 1689 private: |
1748 MarkCompactCollector* collector_; | 1690 MarkCompactCollector* collector_; |
1749 }; | 1691 }; |
1750 | 1692 |
1751 | 1693 |
1752 class SharedFunctionInfoMarkingVisitor : public ObjectVisitor { | 1694 class SharedFunctionInfoMarkingVisitor : public ObjectVisitor { |
1753 public: | 1695 public: |
1754 explicit SharedFunctionInfoMarkingVisitor(MarkCompactCollector* collector) | 1696 explicit SharedFunctionInfoMarkingVisitor(MarkCompactCollector* collector) |
1755 : collector_(collector) {} | 1697 : collector_(collector) {} |
1756 | 1698 |
1757 void VisitPointers(Object** start_slot, Object** end_slot) { | 1699 void VisitPointers(Object** start, Object** end) { |
1758 for (Object** p = start_slot; p < end_slot; p++) VisitPointer(p); | 1700 for (Object** p = start; p < end; p++) VisitPointer(p); |
1759 } | 1701 } |
1760 | 1702 |
1761 void VisitPointer(Object** slot) { | 1703 void VisitPointer(Object** slot) { |
1762 Object* obj = *slot; | 1704 Object* obj = *slot; |
1763 if (obj->IsSharedFunctionInfo()) { | 1705 if (obj->IsSharedFunctionInfo()) { |
1764 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(obj); | 1706 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(obj); |
1765 MarkBit shared_mark = Marking::MarkBitFrom(shared); | 1707 MarkBit shared_mark = Marking::MarkBitFrom(shared); |
1766 MarkBit code_mark = Marking::MarkBitFrom(shared->code()); | 1708 MarkBit code_mark = Marking::MarkBitFrom(shared->code()); |
1767 collector_->MarkObject(shared->code(), code_mark); | 1709 collector_->MarkObject(shared->code(), code_mark); |
1768 collector_->MarkObject(shared, shared_mark); | 1710 collector_->MarkObject(shared, shared_mark); |
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1859 // Visitor class for marking heap roots. | 1801 // Visitor class for marking heap roots. |
1860 class RootMarkingVisitor : public ObjectVisitor { | 1802 class RootMarkingVisitor : public ObjectVisitor { |
1861 public: | 1803 public: |
1862 explicit RootMarkingVisitor(Heap* heap) | 1804 explicit RootMarkingVisitor(Heap* heap) |
1863 : collector_(heap->mark_compact_collector()) { } | 1805 : collector_(heap->mark_compact_collector()) { } |
1864 | 1806 |
1865 void VisitPointer(Object** p) { | 1807 void VisitPointer(Object** p) { |
1866 MarkObjectByPointer(p); | 1808 MarkObjectByPointer(p); |
1867 } | 1809 } |
1868 | 1810 |
1869 void VisitPointers(Object** start_slot, Object** end_slot) { | 1811 void VisitPointers(Object** start, Object** end) { |
1870 for (Object** p = start_slot; p < end_slot; p++) MarkObjectByPointer(p); | 1812 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); |
1871 } | 1813 } |
1872 | 1814 |
1873 private: | 1815 private: |
1874 void MarkObjectByPointer(Object** p) { | 1816 void MarkObjectByPointer(Object** p) { |
1875 if (!(*p)->IsHeapObject()) return; | 1817 if (!(*p)->IsHeapObject()) return; |
1876 | 1818 |
1877 // Replace flat cons strings in place. | 1819 // Replace flat cons strings in place. |
1878 HeapObject* object = ShortCircuitConsString(p); | 1820 HeapObject* object = ShortCircuitConsString(p); |
1879 MarkBit mark_bit = Marking::MarkBitFrom(object); | 1821 MarkBit mark_bit = Marking::MarkBitFrom(object); |
1880 if (mark_bit.Get()) return; | 1822 if (mark_bit.Get()) return; |
(...skipping 15 matching lines...) Expand all Loading... |
1896 MarkCompactCollector* collector_; | 1838 MarkCompactCollector* collector_; |
1897 }; | 1839 }; |
1898 | 1840 |
1899 | 1841 |
1900 // Helper class for pruning the symbol table. | 1842 // Helper class for pruning the symbol table. |
1901 class SymbolTableCleaner : public ObjectVisitor { | 1843 class SymbolTableCleaner : public ObjectVisitor { |
1902 public: | 1844 public: |
1903 explicit SymbolTableCleaner(Heap* heap) | 1845 explicit SymbolTableCleaner(Heap* heap) |
1904 : heap_(heap), pointers_removed_(0) { } | 1846 : heap_(heap), pointers_removed_(0) { } |
1905 | 1847 |
1906 virtual void VisitPointers(Object** start_slot, Object** end_slot) { | 1848 virtual void VisitPointers(Object** start, Object** end) { |
1907 // Visit all HeapObject pointers in [start_slot, end_slot). | 1849 // Visit all HeapObject pointers in [start, end). |
1908 for (Object** p = start_slot; p < end_slot; p++) { | 1850 for (Object** p = start; p < end; p++) { |
1909 Object* o = *p; | 1851 Object* o = *p; |
1910 if (o->IsHeapObject() && | 1852 if (o->IsHeapObject() && |
1911 !Marking::MarkBitFrom(HeapObject::cast(o)).Get()) { | 1853 !Marking::MarkBitFrom(HeapObject::cast(o)).Get()) { |
1912 // Check if the symbol being pruned is an external symbol. We need to | 1854 // Check if the symbol being pruned is an external symbol. We need to |
1913 // delete the associated external data as this symbol is going away. | 1855 // delete the associated external data as this symbol is going away. |
1914 | 1856 |
1915 // Since no objects have yet been moved we can safely access the map of | 1857 // Since no objects have yet been moved we can safely access the map of |
1916 // the object. | 1858 // the object. |
1917 if (o->IsExternalString()) { | 1859 if (o->IsExternalString()) { |
1918 heap_->FinalizeExternalString(String::cast(*p)); | 1860 heap_->FinalizeExternalString(String::cast(*p)); |
(...skipping 260 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2179 ASSERT(object->IsHeapObject()); | 2121 ASSERT(object->IsHeapObject()); |
2180 ASSERT(heap()->Contains(object)); | 2122 ASSERT(heap()->Contains(object)); |
2181 ASSERT(Marking::IsBlack(Marking::MarkBitFrom(object))); | 2123 ASSERT(Marking::IsBlack(Marking::MarkBitFrom(object))); |
2182 | 2124 |
2183 Map* map = object->map(); | 2125 Map* map = object->map(); |
2184 MarkBit map_mark = Marking::MarkBitFrom(map); | 2126 MarkBit map_mark = Marking::MarkBitFrom(map); |
2185 MarkObject(map, map_mark); | 2127 MarkObject(map, map_mark); |
2186 | 2128 |
2187 MarkCompactMarkingVisitor::IterateBody(map, object); | 2129 MarkCompactMarkingVisitor::IterateBody(map, object); |
2188 } | 2130 } |
2189 ProcessLargePostponedArrays(heap(), &marking_deque_); | |
2190 | 2131 |
2191 // Process encountered weak maps, mark objects only reachable by those | 2132 // Process encountered weak maps, mark objects only reachable by those |
2192 // weak maps and repeat until fix-point is reached. | 2133 // weak maps and repeat until fix-point is reached. |
2193 ProcessWeakMaps(); | 2134 ProcessWeakMaps(); |
2194 } | 2135 } |
2195 } | 2136 } |
2196 | 2137 |
2197 | 2138 |
2198 void MarkCompactCollector::ProcessLargePostponedArrays(Heap* heap, | |
2199 MarkingDeque* deque) { | |
2200 ASSERT(deque->IsEmpty()); | |
2201 LargeObjectIterator it(heap->lo_space()); | |
2202 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | |
2203 if (!obj->IsFixedArray()) continue; | |
2204 MemoryChunk* p = MemoryChunk::FromAddress(obj->address()); | |
2205 if (p->IsPartiallyScanned()) { | |
2206 deque->PushBlack(obj); | |
2207 } | |
2208 } | |
2209 } | |
2210 | |
2211 | |
2212 // Sweep the heap for overflowed objects, clear their overflow bits, and | 2139 // Sweep the heap for overflowed objects, clear their overflow bits, and |
2213 // push them on the marking stack. Stop early if the marking stack fills | 2140 // push them on the marking stack. Stop early if the marking stack fills |
2214 // before sweeping completes. If sweeping completes, there are no remaining | 2141 // before sweeping completes. If sweeping completes, there are no remaining |
2215 // overflowed objects in the heap so the overflow flag on the markings stack | 2142 // overflowed objects in the heap so the overflow flag on the markings stack |
2216 // is cleared. | 2143 // is cleared. |
2217 void MarkCompactCollector::RefillMarkingDeque() { | 2144 void MarkCompactCollector::RefillMarkingDeque() { |
2218 if (FLAG_trace_gc) { | |
2219 PrintPID("Marking queue overflowed\n"); | |
2220 } | |
2221 ASSERT(marking_deque_.overflowed()); | 2145 ASSERT(marking_deque_.overflowed()); |
2222 | 2146 |
2223 SemiSpaceIterator new_it(heap()->new_space()); | 2147 SemiSpaceIterator new_it(heap()->new_space()); |
2224 DiscoverGreyObjectsWithIterator(heap(), &marking_deque_, &new_it); | 2148 DiscoverGreyObjectsWithIterator(heap(), &marking_deque_, &new_it); |
2225 if (marking_deque_.IsFull()) return; | 2149 if (marking_deque_.IsFull()) return; |
2226 | 2150 |
2227 DiscoverGreyObjectsInSpace(heap(), | 2151 DiscoverGreyObjectsInSpace(heap(), |
2228 &marking_deque_, | 2152 &marking_deque_, |
2229 heap()->old_pointer_space()); | 2153 heap()->old_pointer_space()); |
2230 if (marking_deque_.IsFull()) return; | 2154 if (marking_deque_.IsFull()) return; |
(...skipping 470 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2701 // Visitor for updating pointers from live objects in old spaces to new space. | 2625 // Visitor for updating pointers from live objects in old spaces to new space. |
2702 // It does not expect to encounter pointers to dead objects. | 2626 // It does not expect to encounter pointers to dead objects. |
2703 class PointersUpdatingVisitor: public ObjectVisitor { | 2627 class PointersUpdatingVisitor: public ObjectVisitor { |
2704 public: | 2628 public: |
2705 explicit PointersUpdatingVisitor(Heap* heap) : heap_(heap) { } | 2629 explicit PointersUpdatingVisitor(Heap* heap) : heap_(heap) { } |
2706 | 2630 |
2707 void VisitPointer(Object** p) { | 2631 void VisitPointer(Object** p) { |
2708 UpdatePointer(p); | 2632 UpdatePointer(p); |
2709 } | 2633 } |
2710 | 2634 |
2711 void VisitPointers(Object** start_slot, Object** end_slot) { | 2635 void VisitPointers(Object** start, Object** end) { |
2712 for (Object** p = start_slot; p < end_slot; p++) UpdatePointer(p); | 2636 for (Object** p = start; p < end; p++) UpdatePointer(p); |
2713 } | 2637 } |
2714 | 2638 |
2715 void VisitEmbeddedPointer(RelocInfo* rinfo) { | 2639 void VisitEmbeddedPointer(RelocInfo* rinfo) { |
2716 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); | 2640 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); |
2717 Object* target = rinfo->target_object(); | 2641 Object* target = rinfo->target_object(); |
2718 VisitPointer(&target); | 2642 VisitPointer(&target); |
2719 rinfo->set_target_object(target); | 2643 rinfo->set_target_object(target); |
2720 } | 2644 } |
2721 | 2645 |
2722 void VisitCodeTarget(RelocInfo* rinfo) { | 2646 void VisitCodeTarget(RelocInfo* rinfo) { |
(...skipping 1432 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4155 while (buffer != NULL) { | 4079 while (buffer != NULL) { |
4156 SlotsBuffer* next_buffer = buffer->next(); | 4080 SlotsBuffer* next_buffer = buffer->next(); |
4157 DeallocateBuffer(buffer); | 4081 DeallocateBuffer(buffer); |
4158 buffer = next_buffer; | 4082 buffer = next_buffer; |
4159 } | 4083 } |
4160 *buffer_address = NULL; | 4084 *buffer_address = NULL; |
4161 } | 4085 } |
4162 | 4086 |
4163 | 4087 |
4164 } } // namespace v8::internal | 4088 } } // namespace v8::internal |
OLD | NEW |