OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
78 for (Object** current = start; current < end; current++) { | 78 for (Object** current = start; current < end; current++) { |
79 if ((*current)->IsHeapObject()) { | 79 if ((*current)->IsHeapObject()) { |
80 HeapObject* object = HeapObject::cast(*current); | 80 HeapObject* object = HeapObject::cast(*current); |
81 CHECK(HEAP->mark_compact_collector()->IsMarked(object)); | 81 CHECK(HEAP->mark_compact_collector()->IsMarked(object)); |
82 } | 82 } |
83 } | 83 } |
84 } | 84 } |
85 | 85 |
86 void VisitEmbeddedPointer(RelocInfo* rinfo) { | 86 void VisitEmbeddedPointer(RelocInfo* rinfo) { |
87 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); | 87 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); |
88 if (!FLAG_weak_embedded_maps_in_optimized_code || | 88 if (!FLAG_weak_embedded_maps_in_optimized_code || !FLAG_collect_maps || |
89 rinfo->host()->kind() != Code::OPTIMIZED_FUNCTION || | 89 rinfo->host()->kind() != Code::OPTIMIZED_FUNCTION || |
90 !rinfo->target_object()->IsMap() || | 90 !rinfo->target_object()->IsMap() || |
91 !Map::cast(rinfo->target_object())->CanTransition()) { | 91 !Map::cast(rinfo->target_object())->CanTransition()) { |
92 VisitPointer(rinfo->target_object_address()); | 92 VisitPointer(rinfo->target_object_address()); |
93 } | 93 } |
94 } | 94 } |
95 }; | 95 }; |
96 | 96 |
97 | 97 |
98 static void VerifyMarking(Address bottom, Address top) { | 98 static void VerifyMarking(Address bottom, Address top) { |
(...skipping 727 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
826 space->PrepareForMarkCompact(); | 826 space->PrepareForMarkCompact(); |
827 } | 827 } |
828 | 828 |
829 #ifdef VERIFY_HEAP | 829 #ifdef VERIFY_HEAP |
830 if (!was_marked_incrementally_ && FLAG_verify_heap) { | 830 if (!was_marked_incrementally_ && FLAG_verify_heap) { |
831 VerifyMarkbitsAreClean(); | 831 VerifyMarkbitsAreClean(); |
832 } | 832 } |
833 #endif | 833 #endif |
834 } | 834 } |
835 | 835 |
836 #ifdef VERIFY_HEAP | |
837 static void VerifyWeakEmbeddedMapsInOptimizedCode(Heap* heap) { | |
Michael Starzinger
2013/01/31 14:27:50
Move this function up to the other verifiers at th
ulan
2013/02/04 09:54:06
Done.
| |
838 HeapObjectIterator code_iterator(heap->code_space()); | |
839 for (HeapObject* obj = code_iterator.Next(); | |
840 obj != NULL; | |
841 obj = code_iterator.Next()) { | |
842 Code* code = Code::cast(obj); | |
843 if (code->kind() != Code::OPTIMIZED_FUNCTION) continue; | |
844 if (code->marked_for_deoptimization()) continue; | |
845 code->VerifyEmbeddedMaps(); | |
846 } | |
847 } | |
848 #endif | |
849 | |
836 class DeoptimizeMarkedCodeFilter : public OptimizedFunctionFilter { | 850 class DeoptimizeMarkedCodeFilter : public OptimizedFunctionFilter { |
837 public: | 851 public: |
838 virtual bool TakeFunction(JSFunction* function) { | 852 virtual bool TakeFunction(JSFunction* function) { |
839 return function->code()->marked_for_deoptimization(); | 853 return function->code()->marked_for_deoptimization(); |
840 } | 854 } |
841 }; | 855 }; |
842 | 856 |
843 | 857 |
844 void MarkCompactCollector::Finish() { | 858 void MarkCompactCollector::Finish() { |
845 #ifdef DEBUG | 859 #ifdef DEBUG |
846 ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS); | 860 ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS); |
847 state_ = IDLE; | 861 state_ = IDLE; |
848 #endif | 862 #endif |
849 // The stub cache is not traversed during GC; clear the cache to | 863 // The stub cache is not traversed during GC; clear the cache to |
850 // force lazy re-initialization of it. This must be done after the | 864 // force lazy re-initialization of it. This must be done after the |
851 // GC, because it relies on the new address of certain old space | 865 // GC, because it relies on the new address of certain old space |
852 // objects (empty string, illegal builtin). | 866 // objects (empty string, illegal builtin). |
853 heap()->isolate()->stub_cache()->Clear(); | 867 heap()->isolate()->stub_cache()->Clear(); |
854 | 868 |
869 #ifdef VERIFY_HEAP | |
870 if (FLAG_collect_maps && FLAG_weak_embedded_maps_in_optimized_code && | |
Michael Starzinger
2013/01/31 14:27:50
Move this call into MarkCompactCollector::CollectG
ulan
2013/02/04 09:54:06
Done.
| |
871 heap()->weak_embedded_maps_verification_enabled()) { | |
872 VerifyWeakEmbeddedMapsInOptimizedCode(heap()); | |
873 } | |
874 #endif | |
875 | |
855 DeoptimizeMarkedCodeFilter filter; | 876 DeoptimizeMarkedCodeFilter filter; |
856 Deoptimizer::DeoptimizeAllFunctionsWith(&filter); | 877 Deoptimizer::DeoptimizeAllFunctionsWith(&filter); |
857 } | 878 } |
858 | 879 |
859 | 880 |
860 // ------------------------------------------------------------------------- | 881 // ------------------------------------------------------------------------- |
861 // Phase 1: tracing and marking live objects. | 882 // Phase 1: tracing and marking live objects. |
862 // before: all objects are in normal state. | 883 // before: all objects are in normal state. |
863 // after: a live object's map pointer is marked as '00'. | 884 // after: a live object's map pointer is marked as '00'. |
864 | 885 |
(...skipping 1438 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2303 AssertNoAllocation no_allocation_scope; | 2324 AssertNoAllocation no_allocation_scope; |
2304 DependentCodes* codes = map->dependent_codes(); | 2325 DependentCodes* codes = map->dependent_codes(); |
2305 int number_of_codes = codes->number_of_codes(); | 2326 int number_of_codes = codes->number_of_codes(); |
2306 if (number_of_codes == 0) return; | 2327 if (number_of_codes == 0) return; |
2307 int new_number_of_codes = 0; | 2328 int new_number_of_codes = 0; |
2308 for (int i = 0; i < number_of_codes; i++) { | 2329 for (int i = 0; i < number_of_codes; i++) { |
2309 Code* code = codes->code_at(i); | 2330 Code* code = codes->code_at(i); |
2310 if (IsMarked(code) && !code->marked_for_deoptimization()) { | 2331 if (IsMarked(code) && !code->marked_for_deoptimization()) { |
2311 if (new_number_of_codes != i) { | 2332 if (new_number_of_codes != i) { |
2312 codes->set_code_at(new_number_of_codes, code); | 2333 codes->set_code_at(new_number_of_codes, code); |
2313 Object** slot = codes->code_slot_at(new_number_of_codes); | |
2314 RecordSlot(slot, slot, code); | |
2315 new_number_of_codes++; | |
2316 } | 2334 } |
2335 Object** slot = codes->code_slot_at(new_number_of_codes); | |
ulan
2013/01/29 15:02:58
This was the bug that lead to crashes.
Michael Starzinger
2013/01/31 14:27:50
Ouch, nice catch, I should have seen that in my in
| |
2336 RecordSlot(slot, slot, code); | |
2337 new_number_of_codes++; | |
2317 } | 2338 } |
2318 } | 2339 } |
2319 for (int i = new_number_of_codes; i < number_of_codes; i++) { | 2340 for (int i = new_number_of_codes; i < number_of_codes; i++) { |
2320 codes->clear_code_at(i); | 2341 codes->clear_code_at(i); |
2321 } | 2342 } |
2322 codes->set_number_of_codes(new_number_of_codes); | 2343 codes->set_number_of_codes(new_number_of_codes); |
2344 number_of_codes = codes->number_of_codes(); | |
Michael Starzinger
2013/01/31 14:27:50
This call seems to be obsolete.
ulan
2013/02/04 09:54:06
Done.
| |
2323 } | 2345 } |
2324 | 2346 |
2325 | 2347 |
2326 void MarkCompactCollector::ProcessWeakMaps() { | 2348 void MarkCompactCollector::ProcessWeakMaps() { |
2327 Object* weak_map_obj = encountered_weak_maps(); | 2349 Object* weak_map_obj = encountered_weak_maps(); |
2328 while (weak_map_obj != Smi::FromInt(0)) { | 2350 while (weak_map_obj != Smi::FromInt(0)) { |
2329 ASSERT(MarkCompactCollector::IsMarked(HeapObject::cast(weak_map_obj))); | 2351 ASSERT(MarkCompactCollector::IsMarked(HeapObject::cast(weak_map_obj))); |
2330 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj); | 2352 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj); |
2331 ObjectHashTable* table = ObjectHashTable::cast(weak_map->table()); | 2353 ObjectHashTable* table = ObjectHashTable::cast(weak_map->table()); |
2332 Object** anchor = reinterpret_cast<Object**>(table->address()); | 2354 Object** anchor = reinterpret_cast<Object**>(table->address()); |
(...skipping 1578 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3911 while (buffer != NULL) { | 3933 while (buffer != NULL) { |
3912 SlotsBuffer* next_buffer = buffer->next(); | 3934 SlotsBuffer* next_buffer = buffer->next(); |
3913 DeallocateBuffer(buffer); | 3935 DeallocateBuffer(buffer); |
3914 buffer = next_buffer; | 3936 buffer = next_buffer; |
3915 } | 3937 } |
3916 *buffer_address = NULL; | 3938 *buffer_address = NULL; |
3917 } | 3939 } |
3918 | 3940 |
3919 | 3941 |
3920 } } // namespace v8::internal | 3942 } } // namespace v8::internal |
OLD | NEW |