| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 75 class VerifyMarkingVisitor: public ObjectVisitor { | 75 class VerifyMarkingVisitor: public ObjectVisitor { |
| 76 public: | 76 public: |
| 77 void VisitPointers(Object** start, Object** end) { | 77 void VisitPointers(Object** start, Object** end) { |
| 78 for (Object** current = start; current < end; current++) { | 78 for (Object** current = start; current < end; current++) { |
| 79 if ((*current)->IsHeapObject()) { | 79 if ((*current)->IsHeapObject()) { |
| 80 HeapObject* object = HeapObject::cast(*current); | 80 HeapObject* object = HeapObject::cast(*current); |
| 81 CHECK(HEAP->mark_compact_collector()->IsMarked(object)); | 81 CHECK(HEAP->mark_compact_collector()->IsMarked(object)); |
| 82 } | 82 } |
| 83 } | 83 } |
| 84 } | 84 } |
| 85 |
| 86 void VisitEmbeddedPointer(RelocInfo* rinfo) { |
| 87 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); |
| 88 if (rinfo->host()->kind() != Code::OPTIMIZED_FUNCTION || |
| 89 !rinfo->target_object()->IsMap() || |
| 90 !Map::cast(rinfo->target_object())->CanTransition()) { |
| 91 VisitPointer(rinfo->target_object_address()); |
| 92 } |
| 93 } |
| 85 }; | 94 }; |
| 86 | 95 |
| 87 | 96 |
| 88 static void VerifyMarking(Address bottom, Address top) { | 97 static void VerifyMarking(Address bottom, Address top) { |
| 89 VerifyMarkingVisitor visitor; | 98 VerifyMarkingVisitor visitor; |
| 90 HeapObject* object; | 99 HeapObject* object; |
| 91 Address next_object_must_be_here_or_later = bottom; | 100 Address next_object_must_be_here_or_later = bottom; |
| 92 | 101 |
| 93 for (Address current = bottom; | 102 for (Address current = bottom; |
| 94 current < top; | 103 current < top; |
| (...skipping 280 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 375 | 384 |
| 376 void MarkCompactCollector::CollectGarbage() { | 385 void MarkCompactCollector::CollectGarbage() { |
| 377 // Make sure that Prepare() has been called. The individual steps below will | 386 // Make sure that Prepare() has been called. The individual steps below will |
| 378 // update the state as they proceed. | 387 // update the state as they proceed. |
| 379 ASSERT(state_ == PREPARE_GC); | 388 ASSERT(state_ == PREPARE_GC); |
| 380 ASSERT(encountered_weak_maps_ == Smi::FromInt(0)); | 389 ASSERT(encountered_weak_maps_ == Smi::FromInt(0)); |
| 381 | 390 |
| 382 MarkLiveObjects(); | 391 MarkLiveObjects(); |
| 383 ASSERT(heap_->incremental_marking()->IsStopped()); | 392 ASSERT(heap_->incremental_marking()->IsStopped()); |
| 384 | 393 |
| 385 if (FLAG_collect_maps) ClearNonLiveTransitions(); | 394 if (FLAG_collect_maps) ClearNonLiveReferences(); |
| 386 | 395 |
| 387 ClearWeakMaps(); | 396 ClearWeakMaps(); |
| 388 | 397 |
| 389 #ifdef VERIFY_HEAP | 398 #ifdef VERIFY_HEAP |
| 390 if (FLAG_verify_heap) { | 399 if (FLAG_verify_heap) { |
| 391 VerifyMarking(heap_); | 400 VerifyMarking(heap_); |
| 392 } | 401 } |
| 393 #endif | 402 #endif |
| 394 | 403 |
| 395 SweepSpaces(); | 404 SweepSpaces(); |
| (...skipping 420 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 816 space->PrepareForMarkCompact(); | 825 space->PrepareForMarkCompact(); |
| 817 } | 826 } |
| 818 | 827 |
| 819 #ifdef VERIFY_HEAP | 828 #ifdef VERIFY_HEAP |
| 820 if (!was_marked_incrementally_ && FLAG_verify_heap) { | 829 if (!was_marked_incrementally_ && FLAG_verify_heap) { |
| 821 VerifyMarkbitsAreClean(); | 830 VerifyMarkbitsAreClean(); |
| 822 } | 831 } |
| 823 #endif | 832 #endif |
| 824 } | 833 } |
| 825 | 834 |
| 835 class DeoptimizeMarkedCodeFilter : public OptimizedFunctionFilter { |
| 836 public: |
| 837 virtual bool TakeFunction(JSFunction* function) { |
| 838 return function->code()->marked_for_deoptimization(); |
| 839 } |
| 840 }; |
| 841 |
| 826 | 842 |
| 827 void MarkCompactCollector::Finish() { | 843 void MarkCompactCollector::Finish() { |
| 828 #ifdef DEBUG | 844 #ifdef DEBUG |
| 829 ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS); | 845 ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS); |
| 830 state_ = IDLE; | 846 state_ = IDLE; |
| 831 #endif | 847 #endif |
| 832 // The stub cache is not traversed during GC; clear the cache to | 848 // The stub cache is not traversed during GC; clear the cache to |
| 833 // force lazy re-initialization of it. This must be done after the | 849 // force lazy re-initialization of it. This must be done after the |
| 834 // GC, because it relies on the new address of certain old space | 850 // GC, because it relies on the new address of certain old space |
| 835 // objects (empty string, illegal builtin). | 851 // objects (empty string, illegal builtin). |
| 836 heap()->isolate()->stub_cache()->Clear(); | 852 heap()->isolate()->stub_cache()->Clear(); |
| 853 |
| 854 DeoptimizeMarkedCodeFilter filter; |
| 855 Deoptimizer::DeoptimizeAllFunctionsWith(&filter); |
| 837 } | 856 } |
| 838 | 857 |
| 839 | 858 |
| 840 // ------------------------------------------------------------------------- | 859 // ------------------------------------------------------------------------- |
| 841 // Phase 1: tracing and marking live objects. | 860 // Phase 1: tracing and marking live objects. |
| 842 // before: all objects are in normal state. | 861 // before: all objects are in normal state. |
| 843 // after: a live object's map pointer is marked as '00'. | 862 // after: a live object's map pointer is marked as '00'. |
| 844 | 863 |
| 845 // Marking all live objects in the heap as part of mark-sweep or mark-compact | 864 // Marking all live objects in the heap as part of mark-sweep or mark-compact |
| 846 // collection. Before marking, all objects are in their normal state. After | 865 // collection. Before marking, all objects are in their normal state. After |
| (...skipping 1312 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2159 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); | 2178 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); |
| 2160 if (map->instance_type() < FIRST_JS_RECEIVER_TYPE) continue; | 2179 if (map->instance_type() < FIRST_JS_RECEIVER_TYPE) continue; |
| 2161 | 2180 |
| 2162 if (map->attached_to_shared_function_info()) { | 2181 if (map->attached_to_shared_function_info()) { |
| 2163 JSFunction::cast(map->constructor())->shared()->AttachInitialMap(map); | 2182 JSFunction::cast(map->constructor())->shared()->AttachInitialMap(map); |
| 2164 } | 2183 } |
| 2165 } | 2184 } |
| 2166 } | 2185 } |
| 2167 | 2186 |
| 2168 | 2187 |
| 2169 void MarkCompactCollector::ClearNonLiveTransitions() { | 2188 void MarkCompactCollector::ClearNonLiveReferences() { |
| 2170 HeapObjectIterator map_iterator(heap()->map_space()); | 2189 HeapObjectIterator map_iterator(heap()->map_space()); |
| 2171 // Iterate over the map space, setting map transitions that go from | 2190 // Iterate over the map space, setting map transitions that go from |
| 2172 // a marked map to an unmarked map to null transitions. This action | 2191 // a marked map to an unmarked map to null transitions. This action |
| 2173 // is carried out only on maps of JSObjects and related subtypes. | 2192 // is carried out only on maps of JSObjects and related subtypes. |
| 2174 for (HeapObject* obj = map_iterator.Next(); | 2193 for (HeapObject* obj = map_iterator.Next(); |
| 2175 obj != NULL; obj = map_iterator.Next()) { | 2194 obj != NULL; obj = map_iterator.Next()) { |
| 2176 Map* map = reinterpret_cast<Map*>(obj); | 2195 Map* map = reinterpret_cast<Map*>(obj); |
| 2177 MarkBit map_mark = Marking::MarkBitFrom(map); | 2196 MarkBit map_mark = Marking::MarkBitFrom(map); |
| 2178 if (map->IsFreeSpace()) continue; | 2197 if (map->IsFreeSpace()) continue; |
| 2179 | 2198 |
| 2180 ASSERT(map->IsMap()); | 2199 ASSERT(map->IsMap()); |
| 2181 // Only JSObject and subtypes have map transitions and back pointers. | 2200 if (!map->CanTransition()) continue; |
| 2182 STATIC_ASSERT(LAST_TYPE == LAST_JS_OBJECT_TYPE); | |
| 2183 if (map->instance_type() < FIRST_JS_OBJECT_TYPE) continue; | |
| 2184 | 2201 |
| 2185 if (map_mark.Get() && | 2202 if (map_mark.Get() && |
| 2186 map->attached_to_shared_function_info()) { | 2203 map->attached_to_shared_function_info()) { |
| 2187 // This map is used for inobject slack tracking and has been detached | 2204 // This map is used for inobject slack tracking and has been detached |
| 2188 // from SharedFunctionInfo during the mark phase. | 2205 // from SharedFunctionInfo during the mark phase. |
| 2189 // Since it survived the GC, reattach it now. | 2206 // Since it survived the GC, reattach it now. |
| 2190 map->unchecked_constructor()->unchecked_shared()->AttachInitialMap(map); | 2207 map->unchecked_constructor()->unchecked_shared()->AttachInitialMap(map); |
| 2191 } | 2208 } |
| 2192 | 2209 |
| 2193 ClearNonLivePrototypeTransitions(map); | 2210 ClearNonLivePrototypeTransitions(map); |
| 2194 ClearNonLiveMapTransitions(map, map_mark); | 2211 ClearNonLiveMapTransitions(map, map_mark); |
| 2212 |
| 2213 if (map_mark.Get()) { |
| 2214 ClearNonLiveDependentCodes(map); |
| 2215 } else { |
| 2216 ClearAndDeoptimizeDependentCodes(map); |
| 2217 } |
| 2195 } | 2218 } |
| 2196 } | 2219 } |
| 2197 | 2220 |
| 2198 | 2221 |
| 2199 void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) { | 2222 void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) { |
| 2200 int number_of_transitions = map->NumberOfProtoTransitions(); | 2223 int number_of_transitions = map->NumberOfProtoTransitions(); |
| 2201 FixedArray* prototype_transitions = map->GetPrototypeTransitions(); | 2224 FixedArray* prototype_transitions = map->GetPrototypeTransitions(); |
| 2202 | 2225 |
| 2203 int new_number_of_transitions = 0; | 2226 int new_number_of_transitions = 0; |
| 2204 const int header = Map::kProtoTransitionHeaderSize; | 2227 const int header = Map::kProtoTransitionHeaderSize; |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2253 // Follow back pointer, check whether we are dealing with a map transition | 2276 // Follow back pointer, check whether we are dealing with a map transition |
| 2254 // from a live map to a dead path and in case clear transitions of parent. | 2277 // from a live map to a dead path and in case clear transitions of parent. |
| 2255 bool current_is_alive = map_mark.Get(); | 2278 bool current_is_alive = map_mark.Get(); |
| 2256 bool parent_is_alive = Marking::MarkBitFrom(parent).Get(); | 2279 bool parent_is_alive = Marking::MarkBitFrom(parent).Get(); |
| 2257 if (!current_is_alive && parent_is_alive) { | 2280 if (!current_is_alive && parent_is_alive) { |
| 2258 parent->ClearNonLiveTransitions(heap()); | 2281 parent->ClearNonLiveTransitions(heap()); |
| 2259 } | 2282 } |
| 2260 } | 2283 } |
| 2261 | 2284 |
| 2262 | 2285 |
| 2286 void MarkCompactCollector::ClearAndDeoptimizeDependentCodes(Map* map) { |
| 2287 AssertNoAllocation no_allocation_scope; |
| 2288 DependentCodes* codes = map->dependent_codes(); |
| 2289 int number_of_codes = codes->number_of_codes(); |
| 2290 if (number_of_codes == 0) return; |
| 2291 for (int i = 0; i < number_of_codes; i++) { |
| 2292 Code* code = codes->code_at(i); |
| 2293 if (IsMarked(code) && !code->marked_for_deoptimization()) { |
| 2294 code->set_marked_for_deoptimization(true); |
| 2295 } |
| 2296 codes->clear_code_at(i); |
| 2297 } |
| 2298 map->set_dependent_codes(DependentCodes::cast(heap()->empty_fixed_array())); |
| 2299 } |
| 2300 |
| 2301 |
| 2302 void MarkCompactCollector::ClearNonLiveDependentCodes(Map* map) { |
| 2303 AssertNoAllocation no_allocation_scope; |
| 2304 DependentCodes* codes = map->dependent_codes(); |
| 2305 int number_of_codes = codes->number_of_codes(); |
| 2306 if (number_of_codes == 0) return; |
| 2307 int new_number_of_codes = 0; |
| 2308 for (int i = 0; i < number_of_codes; i++) { |
| 2309 Code* code = codes->code_at(i); |
| 2310 if (IsMarked(code) && !code->marked_for_deoptimization()) { |
| 2311 if (new_number_of_codes != i) { |
| 2312 codes->set_code_at(new_number_of_codes, code); |
| 2313 Object** slot = codes->code_slot_at(new_number_of_codes); |
| 2314 RecordSlot(slot, slot, code); |
| 2315 new_number_of_codes++; |
| 2316 } |
| 2317 } |
| 2318 } |
| 2319 for (int i = new_number_of_codes; i < number_of_codes; i++) { |
| 2320 codes->clear_code_at(i); |
| 2321 } |
| 2322 codes->set_number_of_codes(new_number_of_codes); |
| 2323 } |
| 2324 |
| 2325 |
| 2263 void MarkCompactCollector::ProcessWeakMaps() { | 2326 void MarkCompactCollector::ProcessWeakMaps() { |
| 2264 Object* weak_map_obj = encountered_weak_maps(); | 2327 Object* weak_map_obj = encountered_weak_maps(); |
| 2265 while (weak_map_obj != Smi::FromInt(0)) { | 2328 while (weak_map_obj != Smi::FromInt(0)) { |
| 2266 ASSERT(MarkCompactCollector::IsMarked(HeapObject::cast(weak_map_obj))); | 2329 ASSERT(MarkCompactCollector::IsMarked(HeapObject::cast(weak_map_obj))); |
| 2267 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj); | 2330 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj); |
| 2268 ObjectHashTable* table = ObjectHashTable::cast(weak_map->table()); | 2331 ObjectHashTable* table = ObjectHashTable::cast(weak_map->table()); |
| 2269 Object** anchor = reinterpret_cast<Object**>(table->address()); | 2332 Object** anchor = reinterpret_cast<Object**>(table->address()); |
| 2270 for (int i = 0; i < table->Capacity(); i++) { | 2333 for (int i = 0; i < table->Capacity(); i++) { |
| 2271 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { | 2334 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { |
| 2272 Object** key_slot = | 2335 Object** key_slot = |
| (...skipping 1575 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3848 while (buffer != NULL) { | 3911 while (buffer != NULL) { |
| 3849 SlotsBuffer* next_buffer = buffer->next(); | 3912 SlotsBuffer* next_buffer = buffer->next(); |
| 3850 DeallocateBuffer(buffer); | 3913 DeallocateBuffer(buffer); |
| 3851 buffer = next_buffer; | 3914 buffer = next_buffer; |
| 3852 } | 3915 } |
| 3853 *buffer_address = NULL; | 3916 *buffer_address = NULL; |
| 3854 } | 3917 } |
| 3855 | 3918 |
| 3856 | 3919 |
| 3857 } } // namespace v8::internal | 3920 } } // namespace v8::internal |
| OLD | NEW |