| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 920 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 931 // page dirty marks. Therefore, we only replace the string with its left | 931 // page dirty marks. Therefore, we only replace the string with its left |
| 932 // substring when page dirty marks do not change. | 932 // substring when page dirty marks do not change. |
| 933 Object* first = reinterpret_cast<ConsString*>(object)->unchecked_first(); | 933 Object* first = reinterpret_cast<ConsString*>(object)->unchecked_first(); |
| 934 if (!heap->InNewSpace(object) && heap->InNewSpace(first)) return object; | 934 if (!heap->InNewSpace(object) && heap->InNewSpace(first)) return object; |
| 935 | 935 |
| 936 *p = first; | 936 *p = first; |
| 937 return HeapObject::cast(first); | 937 return HeapObject::cast(first); |
| 938 } | 938 } |
| 939 | 939 |
| 940 | 940 |
| 941 class StaticMarkingVisitor : public StaticVisitorBase { | 941 class MarkCompactMarkingVisitor |
| 942 : public StaticMarkingVisitor<MarkCompactMarkingVisitor> { |
| 942 public: | 943 public: |
| 943 static inline void IterateBody(Map* map, HeapObject* obj) { | |
| 944 table_.GetVisitor(map)(map, obj); | |
| 945 } | |
| 946 | |
| 947 static void Initialize(); | 944 static void Initialize(); |
| 948 | 945 |
| 949 INLINE(static void VisitPointer(Heap* heap, Object** p)) { | 946 INLINE(static void VisitPointer(Heap* heap, Object** p)) { |
| 950 MarkObjectByPointer(heap->mark_compact_collector(), p, p); | 947 MarkObjectByPointer(heap->mark_compact_collector(), p, p); |
| 951 } | 948 } |
| 952 | 949 |
| 953 INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) { | 950 INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) { |
| 954 // Mark all objects pointed to in [start, end). | 951 // Mark all objects pointed to in [start, end). |
| 955 const int kMinRangeForMarkingRecursion = 64; | 952 const int kMinRangeForMarkingRecursion = 64; |
| 956 if (end - start >= kMinRangeForMarkingRecursion) { | 953 if (end - start >= kMinRangeForMarkingRecursion) { |
| 957 if (VisitUnmarkedObjects(heap, start, end)) return; | 954 if (VisitUnmarkedObjects(heap, start, end)) return; |
| 958 // We are close to a stack overflow, so just mark the objects. | 955 // We are close to a stack overflow, so just mark the objects. |
| 959 } | 956 } |
| 960 MarkCompactCollector* collector = heap->mark_compact_collector(); | 957 MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 961 for (Object** p = start; p < end; p++) { | 958 for (Object** p = start; p < end; p++) { |
| 962 MarkObjectByPointer(collector, start, p); | 959 MarkObjectByPointer(collector, start, p); |
| 963 } | 960 } |
| 964 } | 961 } |
| 965 | 962 |
| 966 static void VisitGlobalPropertyCell(Heap* heap, RelocInfo* rinfo) { | 963 INLINE(static void MarkObject(Heap* heap, HeapObject* object)) { |
| 967 ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL); | 964 MarkBit mark = Marking::MarkBitFrom(object); |
| 968 JSGlobalPropertyCell* cell = | 965 heap->mark_compact_collector()->MarkObject(object, mark); |
| 969 JSGlobalPropertyCell::cast(rinfo->target_cell()); | |
| 970 MarkBit mark = Marking::MarkBitFrom(cell); | |
| 971 heap->mark_compact_collector()->MarkObject(cell, mark); | |
| 972 } | 966 } |
| 973 | 967 |
| 974 static inline void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo) { | 968 static inline void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo) { |
| 975 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); | 969 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); |
| 976 // TODO(mstarzinger): We do not short-circuit cons strings here, verify | 970 // TODO(mstarzinger): We do not short-circuit cons strings here, verify |
| 977 // that there can be no such embedded pointers and add assertion here. | 971 // that there can be no such embedded pointers and add assertion here. |
| 978 HeapObject* object = HeapObject::cast(rinfo->target_object()); | 972 HeapObject* object = HeapObject::cast(rinfo->target_object()); |
| 979 heap->mark_compact_collector()->RecordRelocSlot(rinfo, object); | 973 heap->mark_compact_collector()->RecordRelocSlot(rinfo, object); |
| 980 MarkBit mark = Marking::MarkBitFrom(object); | 974 MarkObject(heap, object); |
| 981 heap->mark_compact_collector()->MarkObject(object, mark); | |
| 982 } | 975 } |
| 983 | 976 |
| 984 static inline void VisitCodeTarget(Heap* heap, RelocInfo* rinfo) { | 977 static inline void VisitCodeTarget(Heap* heap, RelocInfo* rinfo) { |
| 985 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); | 978 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); |
| 986 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); | 979 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); |
| 987 if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() | 980 if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() |
| 988 && (target->ic_state() == MEGAMORPHIC || | 981 && (target->ic_state() == MEGAMORPHIC || |
| 989 heap->mark_compact_collector()->flush_monomorphic_ics_ || | 982 heap->mark_compact_collector()->flush_monomorphic_ics_ || |
| 990 target->ic_age() != heap->global_ic_age())) { | 983 target->ic_age() != heap->global_ic_age())) { |
| 991 IC::Clear(rinfo->pc()); | 984 IC::Clear(rinfo->pc()); |
| 992 target = Code::GetCodeFromTargetAddress(rinfo->target_address()); | 985 target = Code::GetCodeFromTargetAddress(rinfo->target_address()); |
| 993 } | 986 } |
| 994 MarkBit code_mark = Marking::MarkBitFrom(target); | |
| 995 heap->mark_compact_collector()->MarkObject(target, code_mark); | |
| 996 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target); | 987 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target); |
| 997 } | 988 MarkObject(heap, target); |
| 998 | |
| 999 static inline void VisitDebugTarget(Heap* heap, RelocInfo* rinfo) { | |
| 1000 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && | |
| 1001 rinfo->IsPatchedReturnSequence()) || | |
| 1002 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && | |
| 1003 rinfo->IsPatchedDebugBreakSlotSequence())); | |
| 1004 Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address()); | |
| 1005 MarkBit code_mark = Marking::MarkBitFrom(target); | |
| 1006 heap->mark_compact_collector()->MarkObject(target, code_mark); | |
| 1007 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target); | |
| 1008 } | 989 } |
| 1009 | 990 |
| 1010 // Mark object pointed to by p. | 991 // Mark object pointed to by p. |
| 1011 INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector, | 992 INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector, |
| 1012 Object** anchor_slot, | 993 Object** anchor_slot, |
| 1013 Object** p)) { | 994 Object** p)) { |
| 1014 if (!(*p)->IsHeapObject()) return; | 995 if (!(*p)->IsHeapObject()) return; |
| 1015 HeapObject* object = ShortCircuitConsString(p); | 996 HeapObject* object = ShortCircuitConsString(p); |
| 1016 collector->RecordSlot(anchor_slot, p, object); | 997 collector->RecordSlot(anchor_slot, p, object); |
| 1017 MarkBit mark = Marking::MarkBitFrom(object); | 998 MarkBit mark = Marking::MarkBitFrom(object); |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1052 if (!o->IsHeapObject()) continue; | 1033 if (!o->IsHeapObject()) continue; |
| 1053 collector->RecordSlot(start, p, o); | 1034 collector->RecordSlot(start, p, o); |
| 1054 HeapObject* obj = HeapObject::cast(o); | 1035 HeapObject* obj = HeapObject::cast(o); |
| 1055 MarkBit mark = Marking::MarkBitFrom(obj); | 1036 MarkBit mark = Marking::MarkBitFrom(obj); |
| 1056 if (mark.Get()) continue; | 1037 if (mark.Get()) continue; |
| 1057 VisitUnmarkedObject(collector, obj); | 1038 VisitUnmarkedObject(collector, obj); |
| 1058 } | 1039 } |
| 1059 return true; | 1040 return true; |
| 1060 } | 1041 } |
| 1061 | 1042 |
| 1062 static inline void VisitExternalReference(Address* p) { } | 1043 static void VisitCode(Map* map, HeapObject* object) { |
| 1063 static inline void VisitExternalReference(RelocInfo* rinfo) { } | 1044 Heap* heap = map->GetHeap(); |
| 1064 static inline void VisitRuntimeEntry(RelocInfo* rinfo) { } | 1045 Code* code = reinterpret_cast<Code*>(object); |
| 1065 | 1046 if (FLAG_cleanup_code_caches_at_gc) { |
| 1066 private: | 1047 code->ClearTypeFeedbackCells(heap); |
| 1067 class DataObjectVisitor { | |
| 1068 public: | |
| 1069 template<int size> | |
| 1070 static void VisitSpecialized(Map* map, HeapObject* object) { | |
| 1071 } | 1048 } |
| 1072 | 1049 code->CodeIterateBody<MarkCompactMarkingVisitor>(heap); |
| 1073 static void Visit(Map* map, HeapObject* object) { | 1050 } |
| 1074 } | |
| 1075 }; | |
| 1076 | |
| 1077 typedef FlexibleBodyVisitor<StaticMarkingVisitor, | |
| 1078 JSObject::BodyDescriptor, | |
| 1079 void> JSObjectVisitor; | |
| 1080 | |
| 1081 typedef FlexibleBodyVisitor<StaticMarkingVisitor, | |
| 1082 StructBodyDescriptor, | |
| 1083 void> StructObjectVisitor; | |
| 1084 | |
| 1085 template<int id> | |
| 1086 static inline void TrackObjectStatsAndVisit(Map* map, HeapObject* obj); | |
| 1087 | 1051 |
| 1088 static void VisitJSWeakMap(Map* map, HeapObject* object) { | 1052 static void VisitJSWeakMap(Map* map, HeapObject* object) { |
| 1089 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector(); | 1053 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector(); |
| 1090 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(object); | 1054 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(object); |
| 1091 | 1055 |
| 1092 // Enqueue weak map in linked list of encountered weak maps. | 1056 // Enqueue weak map in linked list of encountered weak maps. |
| 1093 if (weak_map->next() == Smi::FromInt(0)) { | 1057 if (weak_map->next() == Smi::FromInt(0)) { |
| 1094 weak_map->set_next(collector->encountered_weak_maps()); | 1058 weak_map->set_next(collector->encountered_weak_maps()); |
| 1095 collector->set_encountered_weak_maps(weak_map); | 1059 collector->set_encountered_weak_maps(weak_map); |
| 1096 } | 1060 } |
| 1097 | 1061 |
| 1098 // Skip visiting the backing hash table containing the mappings. | 1062 // Skip visiting the backing hash table containing the mappings. |
| 1099 int object_size = JSWeakMap::BodyDescriptor::SizeOf(map, object); | 1063 int object_size = JSWeakMap::BodyDescriptor::SizeOf(map, object); |
| 1100 BodyVisitorBase<StaticMarkingVisitor>::IteratePointers( | 1064 BodyVisitorBase<MarkCompactMarkingVisitor>::IteratePointers( |
| 1101 map->GetHeap(), | 1065 map->GetHeap(), |
| 1102 object, | 1066 object, |
| 1103 JSWeakMap::BodyDescriptor::kStartOffset, | 1067 JSWeakMap::BodyDescriptor::kStartOffset, |
| 1104 JSWeakMap::kTableOffset); | 1068 JSWeakMap::kTableOffset); |
| 1105 BodyVisitorBase<StaticMarkingVisitor>::IteratePointers( | 1069 BodyVisitorBase<MarkCompactMarkingVisitor>::IteratePointers( |
| 1106 map->GetHeap(), | 1070 map->GetHeap(), |
| 1107 object, | 1071 object, |
| 1108 JSWeakMap::kTableOffset + kPointerSize, | 1072 JSWeakMap::kTableOffset + kPointerSize, |
| 1109 object_size); | 1073 object_size); |
| 1110 | 1074 |
| 1111 // Mark the backing hash table without pushing it on the marking stack. | 1075 // Mark the backing hash table without pushing it on the marking stack. |
| 1112 Object* table_object = weak_map->table(); | 1076 Object* table_object = weak_map->table(); |
| 1113 if (!table_object->IsHashTable()) return; | 1077 if (!table_object->IsHashTable()) return; |
| 1114 ObjectHashTable* table = ObjectHashTable::cast(table_object); | 1078 ObjectHashTable* table = ObjectHashTable::cast(table_object); |
| 1115 Object** table_slot = | 1079 Object** table_slot = |
| 1116 HeapObject::RawField(weak_map, JSWeakMap::kTableOffset); | 1080 HeapObject::RawField(weak_map, JSWeakMap::kTableOffset); |
| 1117 MarkBit table_mark = Marking::MarkBitFrom(table); | 1081 MarkBit table_mark = Marking::MarkBitFrom(table); |
| 1118 collector->RecordSlot(table_slot, table_slot, table); | 1082 collector->RecordSlot(table_slot, table_slot, table); |
| 1119 if (!table_mark.Get()) collector->SetMark(table, table_mark); | 1083 if (!table_mark.Get()) collector->SetMark(table, table_mark); |
| 1120 // Recording the map slot can be skipped, because maps are not compacted. | 1084 // Recording the map slot can be skipped, because maps are not compacted. |
| 1121 collector->MarkObject(table->map(), Marking::MarkBitFrom(table->map())); | 1085 collector->MarkObject(table->map(), Marking::MarkBitFrom(table->map())); |
| 1122 ASSERT(MarkCompactCollector::IsMarked(table->map())); | 1086 ASSERT(MarkCompactCollector::IsMarked(table->map())); |
| 1123 } | 1087 } |
| 1124 | 1088 |
| 1125 static void VisitCode(Map* map, HeapObject* object) { | 1089 private: |
| 1126 Heap* heap = map->GetHeap(); | 1090 template<int id> |
| 1127 Code* code = reinterpret_cast<Code*>(object); | 1091 static inline void TrackObjectStatsAndVisit(Map* map, HeapObject* obj); |
| 1128 if (FLAG_cleanup_code_caches_at_gc) { | |
| 1129 code->ClearTypeFeedbackCells(heap); | |
| 1130 } | |
| 1131 code->CodeIterateBody<StaticMarkingVisitor>(heap); | |
| 1132 } | |
| 1133 | 1092 |
| 1134 // Code flushing support. | 1093 // Code flushing support. |
| 1135 | 1094 |
| 1136 // How many collections newly compiled code object will survive before being | 1095 // How many collections newly compiled code object will survive before being |
| 1137 // flushed. | 1096 // flushed. |
| 1138 static const int kCodeAgeThreshold = 5; | 1097 static const int kCodeAgeThreshold = 5; |
| 1139 | 1098 |
| 1140 static const int kRegExpCodeThreshold = 5; | 1099 static const int kRegExpCodeThreshold = 5; |
| 1141 | 1100 |
| 1142 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) { | 1101 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) { |
| (...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1237 | 1196 |
| 1238 static inline bool IsValidNotBuiltinContext(Object* ctx) { | 1197 static inline bool IsValidNotBuiltinContext(Object* ctx) { |
| 1239 return ctx->IsContext() && | 1198 return ctx->IsContext() && |
| 1240 !Context::cast(ctx)->global()->IsJSBuiltinsObject(); | 1199 !Context::cast(ctx)->global()->IsJSBuiltinsObject(); |
| 1241 } | 1200 } |
| 1242 | 1201 |
| 1243 | 1202 |
| 1244 static void VisitSharedFunctionInfoGeneric(Map* map, HeapObject* object) { | 1203 static void VisitSharedFunctionInfoGeneric(Map* map, HeapObject* object) { |
| 1245 SharedFunctionInfo::cast(object)->BeforeVisitingPointers(); | 1204 SharedFunctionInfo::cast(object)->BeforeVisitingPointers(); |
| 1246 | 1205 |
| 1247 FixedBodyVisitor<StaticMarkingVisitor, | 1206 FixedBodyVisitor<MarkCompactMarkingVisitor, |
| 1248 SharedFunctionInfo::BodyDescriptor, | 1207 SharedFunctionInfo::BodyDescriptor, |
| 1249 void>::Visit(map, object); | 1208 void>::Visit(map, object); |
| 1250 } | 1209 } |
| 1251 | 1210 |
| 1252 | 1211 |
| 1253 static void UpdateRegExpCodeAgeAndFlush(Heap* heap, | 1212 static void UpdateRegExpCodeAgeAndFlush(Heap* heap, |
| 1254 JSRegExp* re, | 1213 JSRegExp* re, |
| 1255 bool is_ascii) { | 1214 bool is_ascii) { |
| 1256 // Make sure that the fixed array is in fact initialized on the RegExp. | 1215 // Make sure that the fixed array is in fact initialized on the RegExp. |
| 1257 // We could potentially trigger a GC when initializing the RegExp. | 1216 // We could potentially trigger a GC when initializing the RegExp. |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1305 | 1264 |
| 1306 // Works by setting the current sweep_generation (as a smi) in the | 1265 // Works by setting the current sweep_generation (as a smi) in the |
| 1307 // code object place in the data array of the RegExp and keeps a copy | 1266 // code object place in the data array of the RegExp and keeps a copy |
| 1308 // around that can be reinstated if we reuse the RegExp before flushing. | 1267 // around that can be reinstated if we reuse the RegExp before flushing. |
| 1309 // If we did not use the code for kRegExpCodeThreshold mark sweep GCs | 1268 // If we did not use the code for kRegExpCodeThreshold mark sweep GCs |
| 1310 // we flush the code. | 1269 // we flush the code. |
| 1311 static void VisitRegExpAndFlushCode(Map* map, HeapObject* object) { | 1270 static void VisitRegExpAndFlushCode(Map* map, HeapObject* object) { |
| 1312 Heap* heap = map->GetHeap(); | 1271 Heap* heap = map->GetHeap(); |
| 1313 MarkCompactCollector* collector = heap->mark_compact_collector(); | 1272 MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 1314 if (!collector->is_code_flushing_enabled()) { | 1273 if (!collector->is_code_flushing_enabled()) { |
| 1315 VisitJSRegExpFields(map, object); | 1274 VisitJSRegExp(map, object); |
| 1316 return; | 1275 return; |
| 1317 } | 1276 } |
| 1318 JSRegExp* re = reinterpret_cast<JSRegExp*>(object); | 1277 JSRegExp* re = reinterpret_cast<JSRegExp*>(object); |
| 1319 // Flush code or set age on both ASCII and two byte code. | 1278 // Flush code or set age on both ASCII and two byte code. |
| 1320 UpdateRegExpCodeAgeAndFlush(heap, re, true); | 1279 UpdateRegExpCodeAgeAndFlush(heap, re, true); |
| 1321 UpdateRegExpCodeAgeAndFlush(heap, re, false); | 1280 UpdateRegExpCodeAgeAndFlush(heap, re, false); |
| 1322 // Visit the fields of the RegExp, including the updated FixedArray. | 1281 // Visit the fields of the RegExp, including the updated FixedArray. |
| 1323 VisitJSRegExpFields(map, object); | 1282 VisitJSRegExp(map, object); |
| 1324 } | 1283 } |
| 1325 | 1284 |
| 1326 | 1285 |
| 1327 static void VisitSharedFunctionInfoAndFlushCode(Map* map, | 1286 static void VisitSharedFunctionInfoAndFlushCode(Map* map, |
| 1328 HeapObject* object) { | 1287 HeapObject* object) { |
| 1329 Heap* heap = map->GetHeap(); | 1288 Heap* heap = map->GetHeap(); |
| 1330 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object); | 1289 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object); |
| 1331 if (shared->ic_age() != heap->global_ic_age()) { | 1290 if (shared->ic_age() != heap->global_ic_age()) { |
| 1332 shared->ResetForNewContext(heap->global_ic_age()); | 1291 shared->ResetForNewContext(heap->global_ic_age()); |
| 1333 } | 1292 } |
| (...skipping 18 matching lines...) Expand all Loading... |
| 1352 known_flush_code_candidate = IsFlushable(heap, shared); | 1311 known_flush_code_candidate = IsFlushable(heap, shared); |
| 1353 if (known_flush_code_candidate) { | 1312 if (known_flush_code_candidate) { |
| 1354 heap->mark_compact_collector()->code_flusher()->AddCandidate(shared); | 1313 heap->mark_compact_collector()->code_flusher()->AddCandidate(shared); |
| 1355 } | 1314 } |
| 1356 } | 1315 } |
| 1357 | 1316 |
| 1358 VisitSharedFunctionInfoFields(heap, object, known_flush_code_candidate); | 1317 VisitSharedFunctionInfoFields(heap, object, known_flush_code_candidate); |
| 1359 } | 1318 } |
| 1360 | 1319 |
| 1361 | 1320 |
| 1362 static void VisitCodeEntry(Heap* heap, Address entry_address) { | |
| 1363 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address)); | |
| 1364 MarkBit mark = Marking::MarkBitFrom(code); | |
| 1365 heap->mark_compact_collector()->MarkObject(code, mark); | |
| 1366 heap->mark_compact_collector()-> | |
| 1367 RecordCodeEntrySlot(entry_address, code); | |
| 1368 } | |
| 1369 | |
| 1370 static void VisitGlobalContext(Map* map, HeapObject* object) { | |
| 1371 FixedBodyVisitor<StaticMarkingVisitor, | |
| 1372 Context::MarkCompactBodyDescriptor, | |
| 1373 void>::Visit(map, object); | |
| 1374 | |
| 1375 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector(); | |
| 1376 for (int idx = Context::FIRST_WEAK_SLOT; | |
| 1377 idx < Context::GLOBAL_CONTEXT_SLOTS; | |
| 1378 ++idx) { | |
| 1379 Object** slot = | |
| 1380 HeapObject::RawField(object, FixedArray::OffsetOfElementAt(idx)); | |
| 1381 collector->RecordSlot(slot, slot, *slot); | |
| 1382 } | |
| 1383 } | |
| 1384 | |
| 1385 static void VisitJSFunctionAndFlushCode(Map* map, HeapObject* object) { | 1321 static void VisitJSFunctionAndFlushCode(Map* map, HeapObject* object) { |
| 1386 Heap* heap = map->GetHeap(); | 1322 Heap* heap = map->GetHeap(); |
| 1387 MarkCompactCollector* collector = heap->mark_compact_collector(); | 1323 MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 1388 if (!collector->is_code_flushing_enabled()) { | 1324 if (!collector->is_code_flushing_enabled()) { |
| 1389 VisitJSFunction(map, object); | 1325 VisitJSFunction(map, object); |
| 1390 return; | 1326 return; |
| 1391 } | 1327 } |
| 1392 | 1328 |
| 1393 JSFunction* jsfunction = reinterpret_cast<JSFunction*>(object); | 1329 JSFunction* jsfunction = reinterpret_cast<JSFunction*>(object); |
| 1394 // The function must have a valid context and not be a builtin. | 1330 // The function must have a valid context and not be a builtin. |
| (...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1456 } | 1392 } |
| 1457 | 1393 |
| 1458 VisitPointers( | 1394 VisitPointers( |
| 1459 heap, | 1395 heap, |
| 1460 HeapObject::RawField(object, | 1396 HeapObject::RawField(object, |
| 1461 JSFunction::kCodeEntryOffset + kPointerSize), | 1397 JSFunction::kCodeEntryOffset + kPointerSize), |
| 1462 HeapObject::RawField(object, | 1398 HeapObject::RawField(object, |
| 1463 JSFunction::kNonWeakFieldsEndOffset)); | 1399 JSFunction::kNonWeakFieldsEndOffset)); |
| 1464 } | 1400 } |
| 1465 | 1401 |
| 1466 static inline void VisitJSRegExpFields(Map* map, | |
| 1467 HeapObject* object) { | |
| 1468 int last_property_offset = | |
| 1469 JSRegExp::kSize + kPointerSize * map->inobject_properties(); | |
| 1470 VisitPointers(map->GetHeap(), | |
| 1471 SLOT_ADDR(object, JSRegExp::kPropertiesOffset), | |
| 1472 SLOT_ADDR(object, last_property_offset)); | |
| 1473 } | |
| 1474 | |
| 1475 | 1402 |
| 1476 static void VisitSharedFunctionInfoFields(Heap* heap, | 1403 static void VisitSharedFunctionInfoFields(Heap* heap, |
| 1477 HeapObject* object, | 1404 HeapObject* object, |
| 1478 bool flush_code_candidate) { | 1405 bool flush_code_candidate) { |
| 1479 VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kNameOffset)); | 1406 VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kNameOffset)); |
| 1480 | 1407 |
| 1481 if (!flush_code_candidate) { | 1408 if (!flush_code_candidate) { |
| 1482 VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kCodeOffset)); | 1409 VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kCodeOffset)); |
| 1483 } | 1410 } |
| 1484 | 1411 |
| 1485 VisitPointers(heap, | 1412 VisitPointers(heap, |
| 1486 SLOT_ADDR(object, SharedFunctionInfo::kOptimizedCodeMapOffset), | 1413 SLOT_ADDR(object, SharedFunctionInfo::kOptimizedCodeMapOffset), |
| 1487 SLOT_ADDR(object, SharedFunctionInfo::kSize)); | 1414 SLOT_ADDR(object, SharedFunctionInfo::kSize)); |
| 1488 } | 1415 } |
| 1489 | 1416 |
| 1490 #undef SLOT_ADDR | 1417 #undef SLOT_ADDR |
| 1491 | 1418 |
| 1492 typedef void (*Callback)(Map* map, HeapObject* object); | |
| 1493 | |
| 1494 static VisitorDispatchTable<Callback> table_; | |
| 1495 static VisitorDispatchTable<Callback> non_count_table_; | 1419 static VisitorDispatchTable<Callback> non_count_table_; |
| 1496 }; | 1420 }; |
| 1497 | 1421 |
| 1498 | 1422 |
| 1499 template<int id> | 1423 template<int id> |
| 1500 void StaticMarkingVisitor::TrackObjectStatsAndVisit(Map* map, HeapObject* obj) { | 1424 void MarkCompactMarkingVisitor::TrackObjectStatsAndVisit(Map* map, |
| 1425 HeapObject* obj) { |
| 1501 Heap* heap = map->GetHeap(); | 1426 Heap* heap = map->GetHeap(); |
| 1502 int object_size = obj->Size(); | 1427 int object_size = obj->Size(); |
| 1503 heap->RecordObjectStats(map->instance_type(), -1, object_size); | 1428 heap->RecordObjectStats(map->instance_type(), -1, object_size); |
| 1504 non_count_table_.GetVisitorById(static_cast<VisitorId>(id))(map, obj); | 1429 non_count_table_.GetVisitorById(static_cast<VisitorId>(id))(map, obj); |
| 1505 } | 1430 } |
| 1506 | 1431 |
| 1507 | 1432 |
| 1508 template<> | 1433 template<> |
| 1509 void StaticMarkingVisitor::TrackObjectStatsAndVisit< | 1434 void MarkCompactMarkingVisitor::TrackObjectStatsAndVisit< |
| 1510 StaticMarkingVisitor::kVisitCode>(Map* map, HeapObject* obj) { | 1435 MarkCompactMarkingVisitor::kVisitCode>(Map* map, HeapObject* obj) { |
| 1511 Heap* heap = map->GetHeap(); | 1436 Heap* heap = map->GetHeap(); |
| 1512 int object_size = obj->Size(); | 1437 int object_size = obj->Size(); |
| 1513 ASSERT(map->instance_type() == CODE_TYPE); | 1438 ASSERT(map->instance_type() == CODE_TYPE); |
| 1514 heap->RecordObjectStats(CODE_TYPE, -1, object_size); | 1439 heap->RecordObjectStats(CODE_TYPE, -1, object_size); |
| 1515 heap->RecordObjectStats(CODE_TYPE, Code::cast(obj)->kind(), object_size); | 1440 heap->RecordObjectStats(CODE_TYPE, Code::cast(obj)->kind(), object_size); |
| 1516 non_count_table_.GetVisitorById(static_cast<VisitorId>(kVisitCode))(map, obj); | 1441 non_count_table_.GetVisitorById(static_cast<VisitorId>(kVisitCode))(map, obj); |
| 1517 } | 1442 } |
| 1518 | 1443 |
| 1519 | 1444 |
| 1520 void StaticMarkingVisitor::Initialize() { | 1445 void MarkCompactMarkingVisitor::Initialize() { |
| 1521 table_.Register(kVisitShortcutCandidate, | 1446 StaticMarkingVisitor<MarkCompactMarkingVisitor>::Initialize(); |
| 1522 &FixedBodyVisitor<StaticMarkingVisitor, | |
| 1523 ConsString::BodyDescriptor, | |
| 1524 void>::Visit); | |
| 1525 | |
| 1526 table_.Register(kVisitConsString, | |
| 1527 &FixedBodyVisitor<StaticMarkingVisitor, | |
| 1528 ConsString::BodyDescriptor, | |
| 1529 void>::Visit); | |
| 1530 | |
| 1531 table_.Register(kVisitSlicedString, | |
| 1532 &FixedBodyVisitor<StaticMarkingVisitor, | |
| 1533 SlicedString::BodyDescriptor, | |
| 1534 void>::Visit); | |
| 1535 | |
| 1536 table_.Register(kVisitFixedArray, | |
| 1537 &FlexibleBodyVisitor<StaticMarkingVisitor, | |
| 1538 FixedArray::BodyDescriptor, | |
| 1539 void>::Visit); | |
| 1540 | |
| 1541 table_.Register(kVisitGlobalContext, &VisitGlobalContext); | |
| 1542 | |
| 1543 table_.Register(kVisitFixedDoubleArray, DataObjectVisitor::Visit); | |
| 1544 | |
| 1545 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit); | |
| 1546 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit); | |
| 1547 table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit); | |
| 1548 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit); | |
| 1549 | |
| 1550 table_.Register(kVisitJSWeakMap, &VisitJSWeakMap); | |
| 1551 | |
| 1552 table_.Register(kVisitOddball, | |
| 1553 &FixedBodyVisitor<StaticMarkingVisitor, | |
| 1554 Oddball::BodyDescriptor, | |
| 1555 void>::Visit); | |
| 1556 table_.Register(kVisitMap, | |
| 1557 &FixedBodyVisitor<StaticMarkingVisitor, | |
| 1558 Map::BodyDescriptor, | |
| 1559 void>::Visit); | |
| 1560 | |
| 1561 table_.Register(kVisitCode, &VisitCode); | |
| 1562 | 1447 |
| 1563 table_.Register(kVisitSharedFunctionInfo, | 1448 table_.Register(kVisitSharedFunctionInfo, |
| 1564 &VisitSharedFunctionInfoAndFlushCode); | 1449 &VisitSharedFunctionInfoAndFlushCode); |
| 1565 | 1450 |
| 1566 table_.Register(kVisitJSFunction, | 1451 table_.Register(kVisitJSFunction, |
| 1567 &VisitJSFunctionAndFlushCode); | 1452 &VisitJSFunctionAndFlushCode); |
| 1568 | 1453 |
| 1569 table_.Register(kVisitJSRegExp, | 1454 table_.Register(kVisitJSRegExp, |
| 1570 &VisitRegExpAndFlushCode); | 1455 &VisitRegExpAndFlushCode); |
| 1571 | 1456 |
| 1572 table_.Register(kVisitPropertyCell, | |
| 1573 &FixedBodyVisitor<StaticMarkingVisitor, | |
| 1574 JSGlobalPropertyCell::BodyDescriptor, | |
| 1575 void>::Visit); | |
| 1576 | |
| 1577 table_.RegisterSpecializations<DataObjectVisitor, | |
| 1578 kVisitDataObject, | |
| 1579 kVisitDataObjectGeneric>(); | |
| 1580 | |
| 1581 table_.RegisterSpecializations<JSObjectVisitor, | |
| 1582 kVisitJSObject, | |
| 1583 kVisitJSObjectGeneric>(); | |
| 1584 | |
| 1585 table_.RegisterSpecializations<StructObjectVisitor, | |
| 1586 kVisitStruct, | |
| 1587 kVisitStructGeneric>(); | |
| 1588 | |
| 1589 if (FLAG_track_gc_object_stats) { | 1457 if (FLAG_track_gc_object_stats) { |
| 1590 // Copy the visitor table to make call-through possible. | 1458 // Copy the visitor table to make call-through possible. |
| 1591 non_count_table_.CopyFrom(&table_); | 1459 non_count_table_.CopyFrom(&table_); |
| 1592 #define VISITOR_ID_COUNT_FUNCTION(id) \ | 1460 #define VISITOR_ID_COUNT_FUNCTION(id) \ |
| 1593 table_.Register(kVisit##id, TrackObjectStatsAndVisit<kVisit##id>); | 1461 table_.Register(kVisit##id, TrackObjectStatsAndVisit<kVisit##id>); |
| 1594 VISITOR_ID_LIST(VISITOR_ID_COUNT_FUNCTION) | 1462 VISITOR_ID_LIST(VISITOR_ID_COUNT_FUNCTION) |
| 1595 #undef VISITOR_ID_COUNT_FUNCTION | 1463 #undef VISITOR_ID_COUNT_FUNCTION |
| 1596 } | 1464 } |
| 1597 } | 1465 } |
| 1598 | 1466 |
| 1599 | 1467 |
| 1600 VisitorDispatchTable<StaticMarkingVisitor::Callback> | 1468 VisitorDispatchTable<MarkCompactMarkingVisitor::Callback> |
| 1601 StaticMarkingVisitor::table_; | 1469 MarkCompactMarkingVisitor::non_count_table_; |
| 1602 VisitorDispatchTable<StaticMarkingVisitor::Callback> | |
| 1603 StaticMarkingVisitor::non_count_table_; | |
| 1604 | 1470 |
| 1605 | 1471 |
| 1606 class MarkingVisitor : public ObjectVisitor { | 1472 class MarkingVisitor : public ObjectVisitor { |
| 1607 public: | 1473 public: |
| 1608 explicit MarkingVisitor(Heap* heap) : heap_(heap) { } | 1474 explicit MarkingVisitor(Heap* heap) : heap_(heap) { } |
| 1609 | 1475 |
| 1610 void VisitPointer(Object** p) { | 1476 void VisitPointer(Object** p) { |
| 1611 StaticMarkingVisitor::VisitPointer(heap_, p); | 1477 MarkCompactMarkingVisitor::VisitPointer(heap_, p); |
| 1612 } | 1478 } |
| 1613 | 1479 |
| 1614 void VisitPointers(Object** start, Object** end) { | 1480 void VisitPointers(Object** start, Object** end) { |
| 1615 StaticMarkingVisitor::VisitPointers(heap_, start, end); | 1481 MarkCompactMarkingVisitor::VisitPointers(heap_, start, end); |
| 1616 } | 1482 } |
| 1617 | 1483 |
| 1618 private: | 1484 private: |
| 1619 Heap* heap_; | 1485 Heap* heap_; |
| 1620 }; | 1486 }; |
| 1621 | 1487 |
| 1622 | 1488 |
| 1623 class CodeMarkingVisitor : public ThreadVisitor { | 1489 class CodeMarkingVisitor : public ThreadVisitor { |
| 1624 public: | 1490 public: |
| 1625 explicit CodeMarkingVisitor(MarkCompactCollector* collector) | 1491 explicit CodeMarkingVisitor(MarkCompactCollector* collector) |
| (...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1764 MarkBit mark_bit = Marking::MarkBitFrom(object); | 1630 MarkBit mark_bit = Marking::MarkBitFrom(object); |
| 1765 if (mark_bit.Get()) return; | 1631 if (mark_bit.Get()) return; |
| 1766 | 1632 |
| 1767 Map* map = object->map(); | 1633 Map* map = object->map(); |
| 1768 // Mark the object. | 1634 // Mark the object. |
| 1769 collector_->SetMark(object, mark_bit); | 1635 collector_->SetMark(object, mark_bit); |
| 1770 | 1636 |
| 1771 // Mark the map pointer and body, and push them on the marking stack. | 1637 // Mark the map pointer and body, and push them on the marking stack. |
| 1772 MarkBit map_mark = Marking::MarkBitFrom(map); | 1638 MarkBit map_mark = Marking::MarkBitFrom(map); |
| 1773 collector_->MarkObject(map, map_mark); | 1639 collector_->MarkObject(map, map_mark); |
| 1774 StaticMarkingVisitor::IterateBody(map, object); | 1640 MarkCompactMarkingVisitor::IterateBody(map, object); |
| 1775 | 1641 |
| 1776 // Mark all the objects reachable from the map and body. May leave | 1642 // Mark all the objects reachable from the map and body. May leave |
| 1777 // overflowed objects in the heap. | 1643 // overflowed objects in the heap. |
| 1778 collector_->EmptyMarkingDeque(); | 1644 collector_->EmptyMarkingDeque(); |
| 1779 } | 1645 } |
| 1780 | 1646 |
| 1781 MarkCompactCollector* collector_; | 1647 MarkCompactCollector* collector_; |
| 1782 }; | 1648 }; |
| 1783 | 1649 |
| 1784 | 1650 |
| (...skipping 449 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2234 while (!marking_deque_.IsEmpty()) { | 2100 while (!marking_deque_.IsEmpty()) { |
| 2235 HeapObject* object = marking_deque_.Pop(); | 2101 HeapObject* object = marking_deque_.Pop(); |
| 2236 ASSERT(object->IsHeapObject()); | 2102 ASSERT(object->IsHeapObject()); |
| 2237 ASSERT(heap()->Contains(object)); | 2103 ASSERT(heap()->Contains(object)); |
| 2238 ASSERT(Marking::IsBlack(Marking::MarkBitFrom(object))); | 2104 ASSERT(Marking::IsBlack(Marking::MarkBitFrom(object))); |
| 2239 | 2105 |
| 2240 Map* map = object->map(); | 2106 Map* map = object->map(); |
| 2241 MarkBit map_mark = Marking::MarkBitFrom(map); | 2107 MarkBit map_mark = Marking::MarkBitFrom(map); |
| 2242 MarkObject(map, map_mark); | 2108 MarkObject(map, map_mark); |
| 2243 | 2109 |
| 2244 StaticMarkingVisitor::IterateBody(map, object); | 2110 MarkCompactMarkingVisitor::IterateBody(map, object); |
| 2245 } | 2111 } |
| 2246 | 2112 |
| 2247 // Process encountered weak maps, mark objects only reachable by those | 2113 // Process encountered weak maps, mark objects only reachable by those |
| 2248 // weak maps and repeat until fix-point is reached. | 2114 // weak maps and repeat until fix-point is reached. |
| 2249 ProcessWeakMaps(); | 2115 ProcessWeakMaps(); |
| 2250 } | 2116 } |
| 2251 } | 2117 } |
| 2252 | 2118 |
| 2253 | 2119 |
| 2254 // Sweep the heap for overflowed objects, clear their overflow bits, and | 2120 // Sweep the heap for overflowed objects, clear their overflow bits, and |
| (...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2373 if (was_marked_incrementally_) { | 2239 if (was_marked_incrementally_) { |
| 2374 // There is no write barrier on cells so we have to scan them now at the end | 2240 // There is no write barrier on cells so we have to scan them now at the end |
| 2375 // of the incremental marking. | 2241 // of the incremental marking. |
| 2376 { | 2242 { |
| 2377 HeapObjectIterator cell_iterator(heap()->cell_space()); | 2243 HeapObjectIterator cell_iterator(heap()->cell_space()); |
| 2378 HeapObject* cell; | 2244 HeapObject* cell; |
| 2379 while ((cell = cell_iterator.Next()) != NULL) { | 2245 while ((cell = cell_iterator.Next()) != NULL) { |
| 2380 ASSERT(cell->IsJSGlobalPropertyCell()); | 2246 ASSERT(cell->IsJSGlobalPropertyCell()); |
| 2381 if (IsMarked(cell)) { | 2247 if (IsMarked(cell)) { |
| 2382 int offset = JSGlobalPropertyCell::kValueOffset; | 2248 int offset = JSGlobalPropertyCell::kValueOffset; |
| 2383 StaticMarkingVisitor::VisitPointer( | 2249 MarkCompactMarkingVisitor::VisitPointer( |
| 2384 heap(), | 2250 heap(), |
| 2385 reinterpret_cast<Object**>(cell->address() + offset)); | 2251 reinterpret_cast<Object**>(cell->address() + offset)); |
| 2386 } | 2252 } |
| 2387 } | 2253 } |
| 2388 } | 2254 } |
| 2389 } | 2255 } |
| 2390 | 2256 |
| 2391 RootMarkingVisitor root_visitor(heap()); | 2257 RootMarkingVisitor root_visitor(heap()); |
| 2392 MarkRoots(&root_visitor); | 2258 MarkRoots(&root_visitor); |
| 2393 | 2259 |
| (...skipping 238 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2632 Object** anchor = reinterpret_cast<Object**>(table->address()); | 2498 Object** anchor = reinterpret_cast<Object**>(table->address()); |
| 2633 for (int i = 0; i < table->Capacity(); i++) { | 2499 for (int i = 0; i < table->Capacity(); i++) { |
| 2634 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { | 2500 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { |
| 2635 Object** key_slot = | 2501 Object** key_slot = |
| 2636 HeapObject::RawField(table, FixedArray::OffsetOfElementAt( | 2502 HeapObject::RawField(table, FixedArray::OffsetOfElementAt( |
| 2637 ObjectHashTable::EntryToIndex(i))); | 2503 ObjectHashTable::EntryToIndex(i))); |
| 2638 RecordSlot(anchor, key_slot, *key_slot); | 2504 RecordSlot(anchor, key_slot, *key_slot); |
| 2639 Object** value_slot = | 2505 Object** value_slot = |
| 2640 HeapObject::RawField(table, FixedArray::OffsetOfElementAt( | 2506 HeapObject::RawField(table, FixedArray::OffsetOfElementAt( |
| 2641 ObjectHashTable::EntryToValueIndex(i))); | 2507 ObjectHashTable::EntryToValueIndex(i))); |
| 2642 StaticMarkingVisitor::MarkObjectByPointer(this, anchor, value_slot); | 2508 MarkCompactMarkingVisitor::MarkObjectByPointer(this, anchor, value_slot)
; |
| 2643 } | 2509 } |
| 2644 } | 2510 } |
| 2645 weak_map_obj = weak_map->next(); | 2511 weak_map_obj = weak_map->next(); |
| 2646 } | 2512 } |
| 2647 } | 2513 } |
| 2648 | 2514 |
| 2649 | 2515 |
| 2650 void MarkCompactCollector::ClearWeakMaps() { | 2516 void MarkCompactCollector::ClearWeakMaps() { |
| 2651 Object* weak_map_obj = encountered_weak_maps(); | 2517 Object* weak_map_obj = encountered_weak_maps(); |
| 2652 while (weak_map_obj != Smi::FromInt(0)) { | 2518 while (weak_map_obj != Smi::FromInt(0)) { |
| (...skipping 1378 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4031 GDBJITInterface::RemoveCode(reinterpret_cast<Code*>(obj)); | 3897 GDBJITInterface::RemoveCode(reinterpret_cast<Code*>(obj)); |
| 4032 } | 3898 } |
| 4033 #endif | 3899 #endif |
| 4034 if (obj->IsCode()) { | 3900 if (obj->IsCode()) { |
| 4035 PROFILE(isolate, CodeDeleteEvent(obj->address())); | 3901 PROFILE(isolate, CodeDeleteEvent(obj->address())); |
| 4036 } | 3902 } |
| 4037 } | 3903 } |
| 4038 | 3904 |
| 4039 | 3905 |
| 4040 void MarkCompactCollector::Initialize() { | 3906 void MarkCompactCollector::Initialize() { |
| 4041 StaticMarkingVisitor::Initialize(); | 3907 MarkCompactMarkingVisitor::Initialize(); |
| 3908 IncrementalMarking::Initialize(); |
| 4042 } | 3909 } |
| 4043 | 3910 |
| 4044 | 3911 |
| 4045 bool SlotsBuffer::IsTypedSlot(ObjectSlot slot) { | 3912 bool SlotsBuffer::IsTypedSlot(ObjectSlot slot) { |
| 4046 return reinterpret_cast<uintptr_t>(slot) < NUMBER_OF_SLOT_TYPES; | 3913 return reinterpret_cast<uintptr_t>(slot) < NUMBER_OF_SLOT_TYPES; |
| 4047 } | 3914 } |
| 4048 | 3915 |
| 4049 | 3916 |
| 4050 bool SlotsBuffer::AddTo(SlotsBufferAllocator* allocator, | 3917 bool SlotsBuffer::AddTo(SlotsBufferAllocator* allocator, |
| 4051 SlotsBuffer** buffer_address, | 3918 SlotsBuffer** buffer_address, |
| (...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4176 while (buffer != NULL) { | 4043 while (buffer != NULL) { |
| 4177 SlotsBuffer* next_buffer = buffer->next(); | 4044 SlotsBuffer* next_buffer = buffer->next(); |
| 4178 DeallocateBuffer(buffer); | 4045 DeallocateBuffer(buffer); |
| 4179 buffer = next_buffer; | 4046 buffer = next_buffer; |
| 4180 } | 4047 } |
| 4181 *buffer_address = NULL; | 4048 *buffer_address = NULL; |
| 4182 } | 4049 } |
| 4183 | 4050 |
| 4184 | 4051 |
| 4185 } } // namespace v8::internal | 4052 } } // namespace v8::internal |
| OLD | NEW |