| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 171 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); | 171 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); |
| 172 Object* target = rinfo->target_object(); | 172 Object* target = rinfo->target_object(); |
| 173 if (target->NonFailureIsHeapObject()) { | 173 if (target->NonFailureIsHeapObject()) { |
| 174 heap_->mark_compact_collector()->RecordRelocSlot(rinfo, target); | 174 heap_->mark_compact_collector()->RecordRelocSlot(rinfo, target); |
| 175 MarkObject(target); | 175 MarkObject(target); |
| 176 } | 176 } |
| 177 } | 177 } |
| 178 | 178 |
| 179 void VisitCodeTarget(RelocInfo* rinfo) { | 179 void VisitCodeTarget(RelocInfo* rinfo) { |
| 180 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); | 180 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); |
| 181 Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); | 181 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); |
| 182 if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() |
| 183 && (target->ic_age() != heap_->global_ic_age())) { |
| 184 IC::Clear(rinfo->pc()); |
| 185 target = Code::GetCodeFromTargetAddress(rinfo->target_address()); |
| 186 } |
| 182 heap_->mark_compact_collector()->RecordRelocSlot(rinfo, Code::cast(target)); | 187 heap_->mark_compact_collector()->RecordRelocSlot(rinfo, Code::cast(target)); |
| 183 MarkObject(target); | 188 MarkObject(target); |
| 184 } | 189 } |
| 185 | 190 |
| 186 void VisitDebugTarget(RelocInfo* rinfo) { | 191 void VisitDebugTarget(RelocInfo* rinfo) { |
| 187 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && | 192 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && |
| 188 rinfo->IsPatchedReturnSequence()) || | 193 rinfo->IsPatchedReturnSequence()) || |
| 189 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && | 194 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && |
| 190 rinfo->IsPatchedDebugBreakSlotSequence())); | 195 rinfo->IsPatchedDebugBreakSlotSequence())); |
| 191 Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address()); | 196 Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address()); |
| (...skipping 596 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 788 Map* global_context_map = heap_->global_context_map(); | 793 Map* global_context_map = heap_->global_context_map(); |
| 789 IncrementalMarkingMarkingVisitor marking_visitor(heap_, this); | 794 IncrementalMarkingMarkingVisitor marking_visitor(heap_, this); |
| 790 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) { | 795 while (!marking_deque_.IsEmpty() && bytes_to_process > 0) { |
| 791 HeapObject* obj = marking_deque_.Pop(); | 796 HeapObject* obj = marking_deque_.Pop(); |
| 792 | 797 |
| 793 // Explicitly skip one word fillers. Incremental markbit patterns are | 798 // Explicitly skip one word fillers. Incremental markbit patterns are |
| 794 // correct only for objects that occupy at least two words. | 799 // correct only for objects that occupy at least two words. |
| 795 Map* map = obj->map(); | 800 Map* map = obj->map(); |
| 796 if (map == filler_map) continue; | 801 if (map == filler_map) continue; |
| 797 | 802 |
| 803 if (obj->IsMap()) { |
| 804 Map* map = Map::cast(obj); |
| 805 heap_->ClearCacheOnMap(map); |
| 806 } |
| 807 |
| 808 |
| 798 int size = obj->SizeFromMap(map); | 809 int size = obj->SizeFromMap(map); |
| 799 bytes_to_process -= size; | 810 bytes_to_process -= size; |
| 800 MarkBit map_mark_bit = Marking::MarkBitFrom(map); | 811 MarkBit map_mark_bit = Marking::MarkBitFrom(map); |
| 801 if (Marking::IsWhite(map_mark_bit)) { | 812 if (Marking::IsWhite(map_mark_bit)) { |
| 802 WhiteToGreyAndPush(map, map_mark_bit); | 813 WhiteToGreyAndPush(map, map_mark_bit); |
| 803 } | 814 } |
| 804 | 815 |
| 805 // TODO(gc) switch to static visitor instead of normal visitor. | 816 // TODO(gc) switch to static visitor instead of normal visitor. |
| 806 if (map == global_context_map) { | 817 if (map == global_context_map) { |
| 807 // Global contexts have weak fields. | 818 // Global contexts have weak fields. |
| (...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 917 allocation_marking_factor_ = kInitialAllocationMarkingFactor; | 928 allocation_marking_factor_ = kInitialAllocationMarkingFactor; |
| 918 bytes_scanned_ = 0; | 929 bytes_scanned_ = 0; |
| 919 } | 930 } |
| 920 | 931 |
| 921 | 932 |
| 922 int64_t IncrementalMarking::SpaceLeftInOldSpace() { | 933 int64_t IncrementalMarking::SpaceLeftInOldSpace() { |
| 923 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSize(); | 934 return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSize(); |
| 924 } | 935 } |
| 925 | 936 |
| 926 } } // namespace v8::internal | 937 } } // namespace v8::internal |
| OLD | NEW |