| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 45 | 45 |
| 46 | 46 |
| 47 void MarkCompactCollector::SetFlags(int flags) { | 47 void MarkCompactCollector::SetFlags(int flags) { |
| 48 sweep_precisely_ = ((flags & Heap::kSweepPreciselyMask) != 0); | 48 sweep_precisely_ = ((flags & Heap::kSweepPreciselyMask) != 0); |
| 49 reduce_memory_footprint_ = ((flags & Heap::kReduceMemoryFootprintMask) != 0); | 49 reduce_memory_footprint_ = ((flags & Heap::kReduceMemoryFootprintMask) != 0); |
| 50 abort_incremental_marking_ = | 50 abort_incremental_marking_ = |
| 51 ((flags & Heap::kAbortIncrementalMarkingMask) != 0); | 51 ((flags & Heap::kAbortIncrementalMarkingMask) != 0); |
| 52 } | 52 } |
| 53 | 53 |
| 54 | 54 |
| 55 void MarkCompactCollector::ClearCacheOnMap(Map* map) { | |
| 56 if (FLAG_cleanup_code_caches_at_gc) { | |
| 57 map->ClearCodeCache(heap()); | |
| 58 } | |
| 59 } | |
| 60 | |
| 61 | |
| 62 void MarkCompactCollector::MarkObject(HeapObject* obj, MarkBit mark_bit) { | 55 void MarkCompactCollector::MarkObject(HeapObject* obj, MarkBit mark_bit) { |
| 63 ASSERT(Marking::MarkBitFrom(obj) == mark_bit); | 56 ASSERT(Marking::MarkBitFrom(obj) == mark_bit); |
| 64 if (!mark_bit.Get()) { | 57 if (!mark_bit.Get()) { |
| 65 mark_bit.Set(); | 58 mark_bit.Set(); |
| 66 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size()); | 59 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size()); |
| 67 ProcessNewlyMarkedObject(obj); | 60 ProcessNewlyMarkedObject(obj); |
| 68 } | 61 } |
| 69 } | 62 } |
| 70 | 63 |
| 71 | 64 |
| 72 bool MarkCompactCollector::MarkObjectWithoutPush(HeapObject* object) { | 65 bool MarkCompactCollector::MarkObjectWithoutPush(HeapObject* object) { |
| 73 MarkBit mark = Marking::MarkBitFrom(object); | 66 MarkBit mark = Marking::MarkBitFrom(object); |
| 74 bool old_mark = mark.Get(); | 67 bool old_mark = mark.Get(); |
| 75 if (!old_mark) SetMark(object, mark); | 68 if (!old_mark) SetMark(object, mark); |
| 76 return old_mark; | 69 return old_mark; |
| 77 } | 70 } |
| 78 | 71 |
| 79 | 72 |
| 80 void MarkCompactCollector::MarkObjectAndPush(HeapObject* object) { | 73 void MarkCompactCollector::MarkObjectAndPush(HeapObject* object) { |
| 81 if (!MarkObjectWithoutPush(object)) marking_deque_.PushBlack(object); | 74 if (!MarkObjectWithoutPush(object)) marking_deque_.PushBlack(object); |
| 82 } | 75 } |
| 83 | 76 |
| 84 | 77 |
| 85 void MarkCompactCollector::SetMark(HeapObject* obj, MarkBit mark_bit) { | 78 void MarkCompactCollector::SetMark(HeapObject* obj, MarkBit mark_bit) { |
| 86 ASSERT(!mark_bit.Get()); | 79 ASSERT(!mark_bit.Get()); |
| 87 ASSERT(Marking::MarkBitFrom(obj) == mark_bit); | 80 ASSERT(Marking::MarkBitFrom(obj) == mark_bit); |
| 88 mark_bit.Set(); | 81 mark_bit.Set(); |
| 89 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size()); | 82 MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size()); |
| 90 if (obj->IsMap()) { | 83 if (obj->IsMap()) { |
| 91 ClearCacheOnMap(Map::cast(obj)); | 84 heap_->ClearCacheOnMap(Map::cast(obj)); |
| 92 } | 85 } |
| 93 } | 86 } |
| 94 | 87 |
| 95 | 88 |
| 96 bool MarkCompactCollector::IsMarked(Object* obj) { | 89 bool MarkCompactCollector::IsMarked(Object* obj) { |
| 97 ASSERT(obj->IsHeapObject()); | 90 ASSERT(obj->IsHeapObject()); |
| 98 HeapObject* heap_object = HeapObject::cast(obj); | 91 HeapObject* heap_object = HeapObject::cast(obj); |
| 99 return Marking::MarkBitFrom(heap_object).Get(); | 92 return Marking::MarkBitFrom(heap_object).Get(); |
| 100 } | 93 } |
| 101 | 94 |
| (...skipping 10 matching lines...) Expand all Loading... |
| 112 SlotsBuffer::FAIL_ON_OVERFLOW)) { | 105 SlotsBuffer::FAIL_ON_OVERFLOW)) { |
| 113 EvictEvacuationCandidate(object_page); | 106 EvictEvacuationCandidate(object_page); |
| 114 } | 107 } |
| 115 } | 108 } |
| 116 } | 109 } |
| 117 | 110 |
| 118 | 111 |
| 119 } } // namespace v8::internal | 112 } } // namespace v8::internal |
| 120 | 113 |
| 121 #endif // V8_MARK_COMPACT_INL_H_ | 114 #endif // V8_MARK_COMPACT_INL_H_ |
| OLD | NEW |