OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
76 class VerifyMarkingVisitor: public ObjectVisitor { | 76 class VerifyMarkingVisitor: public ObjectVisitor { |
77 public: | 77 public: |
78 void VisitPointers(Object** start, Object** end) { | 78 void VisitPointers(Object** start, Object** end) { |
79 for (Object** current = start; current < end; current++) { | 79 for (Object** current = start; current < end; current++) { |
80 if ((*current)->IsHeapObject()) { | 80 if ((*current)->IsHeapObject()) { |
81 HeapObject* object = HeapObject::cast(*current); | 81 HeapObject* object = HeapObject::cast(*current); |
82 CHECK(HEAP->mark_compact_collector()->IsMarked(object)); | 82 CHECK(HEAP->mark_compact_collector()->IsMarked(object)); |
83 } | 83 } |
84 } | 84 } |
85 } | 85 } |
| 86 |
| 87 |
| 88 void VisitEmbeddedPointer(RelocInfo* rinfo) { |
| 89 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); |
| 90 if (rinfo->host()->kind() != Code::OPTIMIZED_FUNCTION || |
| 91 !rinfo->target_object()->IsMap() || |
| 92 !Map::cast(rinfo->target_object())->CanTransition()) { |
| 93 VisitPointer(rinfo->target_object_address()); |
| 94 } |
| 95 } |
86 }; | 96 }; |
87 | 97 |
88 | 98 |
89 static void VerifyMarking(Address bottom, Address top) { | 99 static void VerifyMarking(Address bottom, Address top) { |
90 VerifyMarkingVisitor visitor; | 100 VerifyMarkingVisitor visitor; |
91 HeapObject* object; | 101 HeapObject* object; |
92 Address next_object_must_be_here_or_later = bottom; | 102 Address next_object_must_be_here_or_later = bottom; |
93 | 103 |
94 for (Address current = bottom; | 104 for (Address current = bottom; |
95 current < top; | 105 current < top; |
(...skipping 721 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
817 space->PrepareForMarkCompact(); | 827 space->PrepareForMarkCompact(); |
818 } | 828 } |
819 | 829 |
820 #ifdef VERIFY_HEAP | 830 #ifdef VERIFY_HEAP |
821 if (!was_marked_incrementally_ && FLAG_verify_heap) { | 831 if (!was_marked_incrementally_ && FLAG_verify_heap) { |
822 VerifyMarkbitsAreClean(); | 832 VerifyMarkbitsAreClean(); |
823 } | 833 } |
824 #endif | 834 #endif |
825 } | 835 } |
826 | 836 |
| 837 class DeoptimizeMarkedCodeFilter : public OptimizedFunctionFilter { |
| 838 public: |
| 839 virtual bool TakeFunction(JSFunction* function) { |
| 840 return function->code()->marked_for_deoptimization(); |
| 841 } |
| 842 }; |
| 843 |
827 | 844 |
828 void MarkCompactCollector::Finish() { | 845 void MarkCompactCollector::Finish() { |
829 #ifdef DEBUG | 846 #ifdef DEBUG |
830 ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS); | 847 ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS); |
831 state_ = IDLE; | 848 state_ = IDLE; |
832 #endif | 849 #endif |
833 // The stub cache is not traversed during GC; clear the cache to | 850 // The stub cache is not traversed during GC; clear the cache to |
834 // force lazy re-initialization of it. This must be done after the | 851 // force lazy re-initialization of it. This must be done after the |
835 // GC, because it relies on the new address of certain old space | 852 // GC, because it relies on the new address of certain old space |
836 // objects (empty string, illegal builtin). | 853 // objects (empty string, illegal builtin). |
837 heap()->isolate()->stub_cache()->Clear(); | 854 heap()->isolate()->stub_cache()->Clear(); |
| 855 |
| 856 DeoptimizeMarkedCodeFilter filter; |
| 857 Deoptimizer::DeoptimizeAllFunctionsWith(&filter); |
838 } | 858 } |
839 | 859 |
840 | 860 |
841 // ------------------------------------------------------------------------- | 861 // ------------------------------------------------------------------------- |
842 // Phase 1: tracing and marking live objects. | 862 // Phase 1: tracing and marking live objects. |
843 // before: all objects are in normal state. | 863 // before: all objects are in normal state. |
844 // after: a live object's map pointer is marked as '00'. | 864 // after: a live object's map pointer is marked as '00'. |
845 | 865 |
846 // Marking all live objects in the heap as part of mark-sweep or mark-compact | 866 // Marking all live objects in the heap as part of mark-sweep or mark-compact |
847 // collection. Before marking, all objects are in their normal state. After | 867 // collection. Before marking, all objects are in their normal state. After |
(...skipping 1285 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2133 // Iterate over the map space, setting map transitions that go from | 2153 // Iterate over the map space, setting map transitions that go from |
2134 // a marked map to an unmarked map to null transitions. This action | 2154 // a marked map to an unmarked map to null transitions. This action |
2135 // is carried out only on maps of JSObjects and related subtypes. | 2155 // is carried out only on maps of JSObjects and related subtypes. |
2136 for (HeapObject* obj = map_iterator.Next(); | 2156 for (HeapObject* obj = map_iterator.Next(); |
2137 obj != NULL; obj = map_iterator.Next()) { | 2157 obj != NULL; obj = map_iterator.Next()) { |
2138 Map* map = reinterpret_cast<Map*>(obj); | 2158 Map* map = reinterpret_cast<Map*>(obj); |
2139 MarkBit map_mark = Marking::MarkBitFrom(map); | 2159 MarkBit map_mark = Marking::MarkBitFrom(map); |
2140 if (map->IsFreeSpace()) continue; | 2160 if (map->IsFreeSpace()) continue; |
2141 | 2161 |
2142 ASSERT(map->IsMap()); | 2162 ASSERT(map->IsMap()); |
2143 // Only JSObject and subtypes have map transitions and back pointers. | 2163 if (!map->CanTransition()) continue; |
2144 STATIC_ASSERT(LAST_TYPE == LAST_JS_OBJECT_TYPE); | |
2145 if (map->instance_type() < FIRST_JS_OBJECT_TYPE) continue; | |
2146 | 2164 |
2147 if (map_mark.Get() && | 2165 if (map_mark.Get() && |
2148 map->attached_to_shared_function_info()) { | 2166 map->attached_to_shared_function_info()) { |
2149 // This map is used for inobject slack tracking and has been detached | 2167 // This map is used for inobject slack tracking and has been detached |
2150 // from SharedFunctionInfo during the mark phase. | 2168 // from SharedFunctionInfo during the mark phase. |
2151 // Since it survived the GC, reattach it now. | 2169 // Since it survived the GC, reattach it now. |
2152 map->unchecked_constructor()->unchecked_shared()->AttachInitialMap(map); | 2170 map->unchecked_constructor()->unchecked_shared()->AttachInitialMap(map); |
2153 } | 2171 } |
2154 | 2172 |
2155 ClearNonLivePrototypeTransitions(map); | 2173 ClearNonLivePrototypeTransitions(map); |
2156 ClearNonLiveMapTransitions(map, map_mark); | 2174 ClearNonLiveMapTransitions(map, map_mark); |
| 2175 |
| 2176 if (map_mark.Get()) { |
| 2177 ClearNonLiveDependentCodes(map); |
| 2178 } else { |
| 2179 DeoptimizeLiveDependentCodes(map); |
| 2180 } |
2157 } | 2181 } |
2158 } | 2182 } |
2159 | 2183 |
2160 | 2184 |
2161 void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) { | 2185 void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) { |
2162 int number_of_transitions = map->NumberOfProtoTransitions(); | 2186 int number_of_transitions = map->NumberOfProtoTransitions(); |
2163 FixedArray* prototype_transitions = map->GetPrototypeTransitions(); | 2187 FixedArray* prototype_transitions = map->GetPrototypeTransitions(); |
2164 | 2188 |
2165 int new_number_of_transitions = 0; | 2189 int new_number_of_transitions = 0; |
2166 const int header = Map::kProtoTransitionHeaderSize; | 2190 const int header = Map::kProtoTransitionHeaderSize; |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2215 // Follow back pointer, check whether we are dealing with a map transition | 2239 // Follow back pointer, check whether we are dealing with a map transition |
2216 // from a live map to a dead path and in case clear transitions of parent. | 2240 // from a live map to a dead path and in case clear transitions of parent. |
2217 bool current_is_alive = map_mark.Get(); | 2241 bool current_is_alive = map_mark.Get(); |
2218 bool parent_is_alive = Marking::MarkBitFrom(parent).Get(); | 2242 bool parent_is_alive = Marking::MarkBitFrom(parent).Get(); |
2219 if (!current_is_alive && parent_is_alive) { | 2243 if (!current_is_alive && parent_is_alive) { |
2220 parent->ClearNonLiveTransitions(heap()); | 2244 parent->ClearNonLiveTransitions(heap()); |
2221 } | 2245 } |
2222 } | 2246 } |
2223 | 2247 |
2224 | 2248 |
| 2249 void MarkCompactCollector::DeoptimizeLiveDependentCodes(Map* map) { |
| 2250 AssertNoAllocation no_allocation_scope; |
| 2251 DependentCodes* codes = map->dependent_codes(); |
| 2252 int number_of_codes = codes->number_of_codes(); |
| 2253 if (number_of_codes == 0) return; |
| 2254 for (int i = 0; i < number_of_codes; i++) { |
| 2255 Code* code = codes->code_at(i); |
| 2256 if (IsMarked(code) && !code->marked_for_deoptimization()) { |
| 2257 code->set_marked_for_deoptimization(true); |
| 2258 } |
| 2259 codes->clear_code_at(i); |
| 2260 } |
| 2261 map->set_dependent_codes(DependentCodes::cast(heap()->empty_fixed_array())); |
| 2262 } |
| 2263 |
| 2264 |
| 2265 void MarkCompactCollector::ClearNonLiveDependentCodes(Map* map) { |
| 2266 AssertNoAllocation no_allocation_scope; |
| 2267 DependentCodes* codes = map->dependent_codes(); |
| 2268 int number_of_codes = codes->number_of_codes(); |
| 2269 if (number_of_codes == 0) return; |
| 2270 int new_number_of_codes = 0; |
| 2271 for (int i = 0; i < number_of_codes; i++) { |
| 2272 Code* code = codes->code_at(i); |
| 2273 if (IsMarked(code) && !code->marked_for_deoptimization()) { |
| 2274 if (new_number_of_codes != i) { |
| 2275 codes->set_code_at(new_number_of_codes, code); |
| 2276 Object** slot = codes->code_slot_at(new_number_of_codes); |
| 2277 RecordSlot(slot, slot, code); |
| 2278 new_number_of_codes++; |
| 2279 } |
| 2280 } |
| 2281 } |
| 2282 for (int i = new_number_of_codes; i < number_of_codes; i++) { |
| 2283 codes->clear_code_at(i); |
| 2284 } |
| 2285 codes->set_number_of_codes(new_number_of_codes); |
| 2286 } |
| 2287 |
| 2288 |
2225 void MarkCompactCollector::ProcessWeakMaps() { | 2289 void MarkCompactCollector::ProcessWeakMaps() { |
2226 Object* weak_map_obj = encountered_weak_maps(); | 2290 Object* weak_map_obj = encountered_weak_maps(); |
2227 while (weak_map_obj != Smi::FromInt(0)) { | 2291 while (weak_map_obj != Smi::FromInt(0)) { |
2228 ASSERT(MarkCompactCollector::IsMarked(HeapObject::cast(weak_map_obj))); | 2292 ASSERT(MarkCompactCollector::IsMarked(HeapObject::cast(weak_map_obj))); |
2229 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj); | 2293 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj); |
2230 ObjectHashTable* table = ObjectHashTable::cast(weak_map->table()); | 2294 ObjectHashTable* table = ObjectHashTable::cast(weak_map->table()); |
2231 Object** anchor = reinterpret_cast<Object**>(table->address()); | 2295 Object** anchor = reinterpret_cast<Object**>(table->address()); |
2232 for (int i = 0; i < table->Capacity(); i++) { | 2296 for (int i = 0; i < table->Capacity(); i++) { |
2233 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { | 2297 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { |
2234 Object** key_slot = | 2298 Object** key_slot = |
(...skipping 1579 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3814 while (buffer != NULL) { | 3878 while (buffer != NULL) { |
3815 SlotsBuffer* next_buffer = buffer->next(); | 3879 SlotsBuffer* next_buffer = buffer->next(); |
3816 DeallocateBuffer(buffer); | 3880 DeallocateBuffer(buffer); |
3817 buffer = next_buffer; | 3881 buffer = next_buffer; |
3818 } | 3882 } |
3819 *buffer_address = NULL; | 3883 *buffer_address = NULL; |
3820 } | 3884 } |
3821 | 3885 |
3822 | 3886 |
3823 } } // namespace v8::internal | 3887 } } // namespace v8::internal |
OLD | NEW |