Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 29 matching lines...) Expand all Loading... | |
| 40 MarkBit Marking::MarkBitFrom(Address addr) { | 40 MarkBit Marking::MarkBitFrom(Address addr) { |
| 41 MemoryChunk* p = MemoryChunk::FromAddress(addr); | 41 MemoryChunk* p = MemoryChunk::FromAddress(addr); |
| 42 return p->markbits()->MarkBitFromIndex(p->AddressToMarkbitIndex(addr), | 42 return p->markbits()->MarkBitFromIndex(p->AddressToMarkbitIndex(addr), |
| 43 p->ContainsOnlyData()); | 43 p->ContainsOnlyData()); |
| 44 } | 44 } |
| 45 | 45 |
| 46 | 46 |
| 47 void MarkCompactCollector::SetFlags(int flags) { | 47 void MarkCompactCollector::SetFlags(int flags) { |
| 48 sweep_precisely_ = ((flags & Heap::kMakeHeapIterableMask) != 0); | 48 sweep_precisely_ = ((flags & Heap::kMakeHeapIterableMask) != 0); |
| 49 reduce_memory_footprint_ = ((flags & Heap::kReduceMemoryFootprintMask) != 0); | 49 reduce_memory_footprint_ = ((flags & Heap::kReduceMemoryFootprintMask) != 0); |
| 50 // Precise sweeping requires us to abort any incremental marking as well. | |
| 51 abort_incremental_marking_ = ((flags & (Heap::kAbortIncrementalMarkingMask | | |
| 52 Heap::kMakeHeapIterableMask)) != 0); | |
|
ulan
2012/03/07 17:22:26
If kMakeHeapIterableMask implies abort_incremental
Michael Starzinger
2012/03/07 17:50:38
Done.
| |
| 50 } | 53 } |
| 51 | 54 |
| 52 | 55 |
| 53 void MarkCompactCollector::ClearCacheOnMap(Map* map) { | 56 void MarkCompactCollector::ClearCacheOnMap(Map* map) { |
| 54 if (FLAG_cleanup_code_caches_at_gc) { | 57 if (FLAG_cleanup_code_caches_at_gc) { |
| 55 map->ClearCodeCache(heap()); | 58 map->ClearCodeCache(heap()); |
| 56 } | 59 } |
| 57 } | 60 } |
| 58 | 61 |
| 59 | 62 |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 110 SlotsBuffer::FAIL_ON_OVERFLOW)) { | 113 SlotsBuffer::FAIL_ON_OVERFLOW)) { |
| 111 EvictEvacuationCandidate(object_page); | 114 EvictEvacuationCandidate(object_page); |
| 112 } | 115 } |
| 113 } | 116 } |
| 114 } | 117 } |
| 115 | 118 |
| 116 | 119 |
| 117 } } // namespace v8::internal | 120 } } // namespace v8::internal |
| 118 | 121 |
| 119 #endif // V8_MARK_COMPACT_INL_H_ | 122 #endif // V8_MARK_COMPACT_INL_H_ |
| OLD | NEW |